Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Revert "init"
This reverts commit 15e9159.
  • Loading branch information
LuciferYang committed Dec 25, 2023
commit 1813f97efa13e8eb0814daf77d716ee680c8548c
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,22 @@
package org.apache.spark.sql.hive

import java.io.File
import java.net.URL
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}

import scala.sys.process._
import scala.util.control.NonFatal

import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.conf.Configuration
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._

import org.apache.spark.{SparkConf, TestUtils}
import org.apache.spark.deploy.SparkSubmitTestUtils
import org.apache.spark.internal.config.{JAR_IVY_REPO_PATH, MASTER_REST_SERVER_ENABLED}
import org.apache.spark.internal.config.MASTER_REST_SERVER_ENABLED
import org.apache.spark.internal.config.UI.UI_ENABLED
import org.apache.spark.launcher.JavaModuleOptions
import org.apache.spark.sql.{QueryTest, Row, SparkSession}
Expand Down Expand Up @@ -205,13 +207,16 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
}
}

val ivyTestDir = new File(sparkTestingDir, "ivy2")
PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) =>
val sparkHome = new File(sparkTestingDir, s"spark-$version")
if (!sparkHome.exists()) {
tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)
}

Files.deleteIfExists(Paths.get(sparkHome.getCanonicalPath, "jars", "ivy-2.5.1.jar"))
val ivyUrl = new URL("https://repo1.maven.org/maven2/org/apache/ivy/ivy/2.5.2/ivy-2.5.2.jar")
IOUtils.copy(ivyUrl, new File(s"${sparkHome.getCanonicalPath}/jars", "ivy-2.5.2.jar"))
Copy link
Contributor Author

@LuciferYang LuciferYang Dec 25, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have been investigating the case of the previous Ivy upgrade failure recently. I found that if the Spark version being tested in HiveExternalCatalogVersionsSuite also uses Ivy 2.5.2, the error like #44477 (comment) will not occur again... But I haven't come up with a better solution yet.

cc @bjornjorgensen @dongjoon-hyun


// Extract major.minor for testing Spark 3.1.x and 3.0.x with metastore 2.3.9 and Java 11.
val hiveMetastoreVersion = """^\d+\.\d+""".r.findFirstIn(hiveVersion).get
val args = Seq(
Expand All @@ -221,7 +226,6 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=$hiveMetastoreVersion",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
"--conf", s"${JAR_IVY_REPO_PATH.key}=${ivyTestDir.getCanonicalPath}",
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
"--conf", s"spark.sql.test.version.index=$index",
"--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath} " +
Expand Down