diff --git a/core/pom.xml b/core/pom.xml
index 276cf10aa205..e51a13ee0ac4 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -274,6 +274,10 @@
com.fasterxml.jackson.module
jackson-module-scala_${scala.binary.version}
+
+ org.scala-lang.modules
+ scala-xml_${scala.binary.version}
+
org.apache.derby
derby
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala b/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala
index 53d69ba26811..3abb2d8a11f3 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala
@@ -41,7 +41,7 @@ import org.apache.spark.internal.Logging
* There is no particular relationship between an operation scope and a stage or a job.
* A scope may live inside one stage (e.g. map) or span across multiple jobs (e.g. take).
*/
-@JsonInclude(Include.NON_NULL)
+@JsonInclude(Include.NON_ABSENT)
@JsonPropertyOrder(Array("id", "name", "parent"))
private[spark] class RDDOperationScope(
val name: String,
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
index 76af33c1a18d..61305a0770d9 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
@@ -49,7 +49,7 @@ private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{
}
mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule)
mapper.enable(SerializationFeature.INDENT_OUTPUT)
- mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
+ mapper.setSerializationInclusion(JsonInclude.Include.NON_ABSENT)
mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat)
override def isWriteable(
diff --git a/core/src/test/scala/org/apache/spark/JsonTestUtils.scala b/core/src/test/scala/org/apache/spark/JsonTestUtils.scala
index ba367cd47614..9eb82467cb61 100644
--- a/core/src/test/scala/org/apache/spark/JsonTestUtils.scala
+++ b/core/src/test/scala/org/apache/spark/JsonTestUtils.scala
@@ -21,7 +21,7 @@ import org.json4s.jackson.JsonMethods
trait JsonTestUtils {
def assertValidDataInJson(validateJson: JValue, expectedJson: JValue) {
- val Diff(c, a, d) = validateJson.diff(expectedJson)
+ val Diff(c, a, d) = expectedJson.diff(validateJson)
val validatePretty = JsonMethods.pretty(validateJson)
val expectedPretty = JsonMethods.pretty(expectedJson)
val errorMessage = s"Expected:\n$expectedPretty\nFound:\n$validatePretty"
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 4c0619322536..9f3367e292b1 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -491,7 +491,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
json match {
case JNothing => Seq()
case apps: JArray =>
- apps.filter(app => {
+ apps.children.filter(app => {
(app \ "attempts") match {
case attempts: JArray =>
val state = (attempts.children.head \ "completed").asInstanceOf[JBool]
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
index 0f20eea73504..70cd5357ec80 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -131,7 +131,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
val storageJson = getJson(ui, "storage/rdd")
storageJson.children.length should be (1)
- (storageJson \ "storageLevel").extract[String] should be (StorageLevels.DISK_ONLY.description)
+ (storageJson.children.head \ "storageLevel").extract[String] should be (StorageLevels.DISK_ONLY.description)
val rddJson = getJson(ui, "storage/rdd/0")
(rddJson \ "storageLevel").extract[String] should be (StorageLevels.DISK_ONLY.description)
@@ -150,7 +150,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
val updatedStorageJson = getJson(ui, "storage/rdd")
updatedStorageJson.children.length should be (1)
- (updatedStorageJson \ "storageLevel").extract[String] should be (
+ (updatedStorageJson.children.head \ "storageLevel").extract[String] should be (
StorageLevels.MEMORY_ONLY.description)
val updatedRddJson = getJson(ui, "storage/rdd/0")
(updatedRddJson \ "storageLevel").extract[String] should be (
@@ -204,7 +204,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
val stageJson = getJson(sc.ui.get, "stages")
stageJson.children.length should be (1)
- (stageJson \ "status").extract[String] should be (StageStatus.FAILED.name())
+ (stageJson.children.head \ "status").extract[String] should be (StageStatus.FAILED.name())
// Regression test for SPARK-2105
class NotSerializable
@@ -325,11 +325,11 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
find(cssSelector(".progress-cell .progress")).get.text should be ("2/2 (1 failed)")
}
val jobJson = getJson(sc.ui.get, "jobs")
- (jobJson \ "numTasks").extract[Int]should be (2)
- (jobJson \ "numCompletedTasks").extract[Int] should be (3)
- (jobJson \ "numFailedTasks").extract[Int] should be (1)
- (jobJson \ "numCompletedStages").extract[Int] should be (2)
- (jobJson \ "numFailedStages").extract[Int] should be (1)
+ (jobJson.children.head \ "numTasks").extract[Int]should be (2)
+ (jobJson.children.head \ "numCompletedTasks").extract[Int] should be (3)
+ (jobJson.children.head \ "numFailedTasks").extract[Int] should be (1)
+ (jobJson.children.head \ "numCompletedStages").extract[Int] should be (2)
+ (jobJson.children.head \ "numFailedStages").extract[Int] should be (1)
val stageJson = getJson(sc.ui.get, "stages")
for {
@@ -656,11 +656,11 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.ui.get.webUrl + "/api/v1/applications"))
val appListJsonAst = JsonMethods.parse(appListRawJson)
appListJsonAst.children.length should be (1)
- val attempts = (appListJsonAst \ "attempts").children
+ val attempts = (appListJsonAst.children.head \ "attempts").children
attempts.size should be (1)
- (attempts(0) \ "completed").extract[Boolean] should be (false)
- parseDate(attempts(0) \ "startTime") should be (sc.startTime)
- parseDate(attempts(0) \ "endTime") should be (-1)
+ (attempts.head \ "completed").extract[Boolean] should be (false)
+ parseDate(attempts.head \ "startTime") should be (sc.startTime)
+ parseDate(attempts.head \ "endTime") should be (-1)
val oneAppJsonAst = getJson(sc.ui.get, "")
oneAppJsonAst should be (appListJsonAst.children(0))
}
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 4f0794d6f1a1..af27790a1f44 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -17,6 +17,7 @@ arpack_combined_all-0.1.jar
arrow-format-0.8.0.jar
arrow-memory-0.8.0.jar
arrow-vector-0.8.0.jar
+audience-annotations-0.5.0.jar
automaton-1.11-8.jar
avro-1.7.7.jar
avro-ipc-1.7.7.jar
@@ -89,16 +90,16 @@ htrace-core-3.0.4.jar
httpclient-4.5.4.jar
httpcore-4.4.8.jar
ivy-2.4.0.jar
-jackson-annotations-2.6.7.jar
-jackson-core-2.6.7.jar
+jackson-annotations-2.9.6.jar
+jackson-core-2.9.6.jar
jackson-core-asl-1.9.13.jar
-jackson-databind-2.6.7.1.jar
-jackson-dataformat-yaml-2.6.7.jar
+jackson-databind-2.9.6.jar
+jackson-dataformat-yaml-2.9.6.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
-jackson-module-jaxb-annotations-2.6.7.jar
-jackson-module-paranamer-2.7.9.jar
-jackson-module-scala_2.11-2.6.7.1.jar
+jackson-module-jaxb-annotations-2.9.6.jar
+jackson-module-paranamer-2.9.6.jar
+jackson-module-scala_2.11-2.9.6.jar
jackson-xc-1.9.13.jar
janino-3.0.8.jar
java-xmlbuilder-1.1.jar
@@ -126,9 +127,10 @@ jline-2.12.1.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
-json4s-ast_2.11-3.2.11.jar
-json4s-core_2.11-3.2.11.jar
-json4s-jackson_2.11-3.2.11.jar
+json4s-ast_2.11-3.6.2.jar
+json4s-core_2.11-3.6.2.jar
+json4s-jackson_2.11-3.6.2.jar
+json4s-scalap_2.11-3.6.2.jar
jsr305-1.3.9.jar
jta-1.1.jar
jtransforms-2.4.0.jar
@@ -173,14 +175,13 @@ py4j-0.10.7.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
-scala-parser-combinators_2.11-1.0.4.jar
+scala-parser-combinators_2.11-1.1.1.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.5.jar
-scalap-2.11.8.jar
+scala-xml_2.11-1.1.1.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
-snakeyaml-1.15.jar
+snakeyaml-1.18.jar
snappy-0.2.jar
snappy-java-1.1.2.6.jar
spire-macros_2.11-0.13.0.jar
@@ -197,5 +198,5 @@ xercesImpl-2.9.1.jar
xmlenc-0.52.jar
xz-1.0.jar
zjsonpatch-0.3.0.jar
-zookeeper-3.4.6.jar
+zookeeper-3.4.13.jar
zstd-jni-1.3.2-2.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index df2be777ff5a..e6a41be7a019 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -17,6 +17,7 @@ arpack_combined_all-0.1.jar
arrow-format-0.8.0.jar
arrow-memory-0.8.0.jar
arrow-vector-0.8.0.jar
+audience-annotations-0.5.0.jar
automaton-1.11-8.jar
avro-1.7.7.jar
avro-ipc-1.7.7.jar
@@ -66,21 +67,21 @@ gson-2.2.4.jar
guava-14.0.1.jar
guice-3.0.jar
guice-servlet-3.0.jar
-hadoop-annotations-2.7.3.jar
-hadoop-auth-2.7.3.jar
-hadoop-client-2.7.3.jar
-hadoop-common-2.7.3.jar
-hadoop-hdfs-2.7.3.jar
-hadoop-mapreduce-client-app-2.7.3.jar
-hadoop-mapreduce-client-common-2.7.3.jar
-hadoop-mapreduce-client-core-2.7.3.jar
-hadoop-mapreduce-client-jobclient-2.7.3.jar
-hadoop-mapreduce-client-shuffle-2.7.3.jar
-hadoop-yarn-api-2.7.3.jar
-hadoop-yarn-client-2.7.3.jar
-hadoop-yarn-common-2.7.3.jar
-hadoop-yarn-server-common-2.7.3.jar
-hadoop-yarn-server-web-proxy-2.7.3.jar
+hadoop-annotations-2.7.7.jar
+hadoop-auth-2.7.7.jar
+hadoop-client-2.7.7.jar
+hadoop-common-2.7.7.jar
+hadoop-hdfs-2.7.7.jar
+hadoop-mapreduce-client-app-2.7.7.jar
+hadoop-mapreduce-client-common-2.7.7.jar
+hadoop-mapreduce-client-core-2.7.7.jar
+hadoop-mapreduce-client-jobclient-2.7.7.jar
+hadoop-mapreduce-client-shuffle-2.7.7.jar
+hadoop-yarn-api-2.7.7.jar
+hadoop-yarn-client-2.7.7.jar
+hadoop-yarn-common-2.7.7.jar
+hadoop-yarn-server-common-2.7.7.jar
+hadoop-yarn-server-web-proxy-2.7.7.jar
hk2-api-2.4.0-b34.jar
hk2-locator-2.4.0-b34.jar
hk2-utils-2.4.0-b34.jar
@@ -89,16 +90,16 @@ htrace-core-3.1.0-incubating.jar
httpclient-4.5.4.jar
httpcore-4.4.8.jar
ivy-2.4.0.jar
-jackson-annotations-2.6.7.jar
-jackson-core-2.6.7.jar
+jackson-annotations-2.9.6.jar
+jackson-core-2.9.6.jar
jackson-core-asl-1.9.13.jar
-jackson-databind-2.6.7.1.jar
-jackson-dataformat-yaml-2.6.7.jar
+jackson-databind-2.9.6.jar
+jackson-dataformat-yaml-2.9.6.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
-jackson-module-jaxb-annotations-2.6.7.jar
-jackson-module-paranamer-2.7.9.jar
-jackson-module-scala_2.11-2.6.7.1.jar
+jackson-module-jaxb-annotations-2.9.6.jar
+jackson-module-paranamer-2.9.6.jar
+jackson-module-scala_2.11-2.9.6.jar
jackson-xc-1.9.13.jar
janino-3.0.8.jar
java-xmlbuilder-1.1.jar
@@ -121,14 +122,16 @@ jersey-media-jaxb-2.22.2.jar
jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
+jetty-sslengine-6.1.26.jar
jetty-util-6.1.26.jar
jline-2.12.1.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
-json4s-ast_2.11-3.2.11.jar
-json4s-core_2.11-3.2.11.jar
-json4s-jackson_2.11-3.2.11.jar
+json4s-ast_2.11-3.6.2.jar
+json4s-core_2.11-3.6.2.jar
+json4s-jackson_2.11-3.6.2.jar
+json4s-scalap_2.11-3.6.2.jar
jsp-api-2.1.jar
jsr305-1.3.9.jar
jta-1.1.jar
@@ -174,14 +177,13 @@ py4j-0.10.7.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
-scala-parser-combinators_2.11-1.0.4.jar
+scala-parser-combinators_2.11-1.1.1.jar
scala-reflect-2.11.8.jar
-scala-xml_2.11-1.0.5.jar
-scalap-2.11.8.jar
+scala-xml_2.11-1.1.1.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
-snakeyaml-1.15.jar
+snakeyaml-1.18.jar
snappy-0.2.jar
snappy-java-1.1.2.6.jar
spire-macros_2.11-0.13.0.jar
@@ -198,5 +200,5 @@ xercesImpl-2.9.1.jar
xmlenc-0.52.jar
xz-1.0.jar
zjsonpatch-0.3.0.jar
-zookeeper-3.4.6.jar
+zookeeper-3.4.13.jar
zstd-jni-1.3.2-2.jar
diff --git a/pom.xml b/pom.xml
index 7c6bb4bcc8a1..afeb2a041cde 100644
--- a/pom.xml
+++ b/pom.xml
@@ -121,7 +121,7 @@
2.5.0
${hadoop.version}
1.6.0
- 3.4.6
+ 3.4.13
2.6.0
org.spark-project.hive
@@ -157,9 +157,11 @@
3.2.2
2.11.8
2.11
+ 1.1.1
+ 1.1.1
1.9.13
- 2.6.7
- 2.6.7.1
+ 2.9.6
+ 2.9.6
1.1.2.6
1.1.2
1.2.0-incubating
@@ -708,7 +710,7 @@
org.json4s
json4s-jackson_${scala.binary.version}
- 3.2.11
+ 3.6.2
org.scala-lang
@@ -733,7 +735,7 @@
org.scala-lang.modules
scala-parser-combinators_${scala.binary.version}
- 1.0.4
+ ${scala.parser.combinators.version}
org.scala-lang
@@ -746,6 +748,11 @@
jline
2.12.1
+
+ org.scala-lang.modules
+ scala-xml_${scala.binary.version}
+ ${scala.xml.version}
+
org.scalatest
scalatest_${scala.binary.version}
@@ -2664,7 +2671,7 @@
hadoop-2.7
- 2.7.3
+ 2.7.7
2.7.1
diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml
index be9d087b6393..686b952a3493 100644
--- a/sql/catalyst/pom.xml
+++ b/sql/catalyst/pom.xml
@@ -43,8 +43,11 @@
org.scala-lang.modules
scala-parser-combinators_${scala.binary.version}
-
+ org.scala-lang
+ scala-compiler
+
+
org.apache.spark
spark-core_${scala.binary.version}
${project.version}