diff --git a/pom.xml b/pom.xml
index 2ea699371b44f..e24e32f213de5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1996,75 +1996,6 @@
-
- ${hive.group}
- hive-contrib
- ${hive.version}
- test
-
-
- ${hive.group}
- hive-exec
-
-
- ${hive.group}
- hive-serde
-
-
- ${hive.group}
- hive-shims
-
-
- commons-codec
- commons-codec
-
-
- org.slf4j
- slf4j-api
-
-
-
-
- ${hive.group}.hcatalog
- hive-hcatalog-core
- ${hive.version}
- test
-
-
- ${hive.group}
- hive-exec
-
-
- ${hive.group}
- hive-metastore
-
-
- ${hive.group}
- hive-cli
-
-
- ${hive.group}
- hive-common
-
-
- com.google.guava
- guava
-
-
- org.slf4j
- slf4j-api
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.apache.hadoop
- *
-
-
-
-
org.apache.orc
orc-core
diff --git a/sql/hive-thriftserver/pom.xml b/sql/hive-thriftserver/pom.xml
index 5b1352adddd89..9dd927084298a 100644
--- a/sql/hive-thriftserver/pom.xml
+++ b/sql/hive-thriftserver/pom.xml
@@ -77,15 +77,6 @@
${hive.group}
hive-beeline
-
-
- ${hive.group}
- hive-contrib
-
-
- ${hive.group}.hcatalog
- hive-hcatalog-core
-
org.eclipse.jetty
jetty-server
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 6d45041e12821..db33d1d4a07dd 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -27,12 +27,11 @@ import scala.concurrent.Promise
import scala.concurrent.duration._
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
-import org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.Logging
-import org.apache.spark.sql.hive.test.HiveTestUtils
+import org.apache.spark.sql.hive.test.HiveTestJars
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.spark.util.{ThreadUtils, Utils}
@@ -202,7 +201,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
}
test("Commands using SerDe provided in --jars") {
- val jarFile = HiveTestUtils.getHiveHcatalogCoreJar.getCanonicalPath
+ val jarFile = HiveTestJars.getHiveHcatalogCoreJar().getCanonicalPath
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
@@ -297,11 +296,12 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
}
test("Support hive.aux.jars.path") {
- val hiveContribJar = HiveTestUtils.getHiveContribJar.getCanonicalPath
+ val hiveContribJar = HiveTestJars.getHiveContribJar().getCanonicalPath
runCliWithin(
1.minute,
Seq("--conf", s"spark.hadoop.${ConfVars.HIVEAUXJARS}=$hiveContribJar"))(
- s"CREATE TEMPORARY FUNCTION example_format AS '${classOf[UDFExampleFormat].getName}';" -> "",
+ "CREATE TEMPORARY FUNCTION example_format AS " +
+ "'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';" -> "",
"SELECT example_format('%o', 93);" -> "135"
)
}
@@ -319,7 +319,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
test("SPARK-28840 test --jars and hive.aux.jars.path command") {
val jarFile = new File("../../sql/hive/src/test/resources/SPARK-21101-1.0.jar").getCanonicalPath
- val hiveContribJar = HiveTestUtils.getHiveContribJar.getCanonicalPath
+ val hiveContribJar = HiveTestJars.getHiveContribJar().getCanonicalPath
runCliWithin(
1.minute,
Seq("--jars", s"$jarFile", "--conf",
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 3fef23f8eed3e..bc62cfaf62272 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -43,7 +43,7 @@ import org.scalatest.BeforeAndAfterAll
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.HiveUtils
-import org.apache.spark.sql.hive.test.HiveTestUtils
+import org.apache.spark.sql.hive.test.HiveTestJars
import org.apache.spark.sql.internal.StaticSQLConf.HIVE_THRIFT_SERVER_SINGLESESSION
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.spark.util.{ThreadUtils, Utils}
@@ -492,7 +492,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
withMultipleConnectionJdbcStatement("smallKV", "addJar")(
{
statement =>
- val jarFile = HiveTestUtils.getHiveHcatalogCoreJar.getCanonicalPath
+ val jarFile = HiveTestJars.getHiveHcatalogCoreJar().getCanonicalPath
statement.executeQuery(s"ADD JAR $jarFile")
},
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index d37f0c8573659..f627227aa0380 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -103,14 +103,6 @@
${hive.group}
hive-metastore
-
- ${hive.group}
- hive-contrib
-
-
- ${hive.group}.hcatalog
- hive-hcatalog-core
-