Skip to content

Commit 0c76584

Browse files
committed
put user defined repo before default repo
1 parent 5c8727d commit 0c76584

File tree

7 files changed

+70
-12
lines changed

7 files changed

+70
-12
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1031,10 +1031,8 @@ private[spark] object SparkSubmitUtils {
10311031
val cr = new ChainResolver
10321032
cr.setName("user-list")
10331033

1034-
// add current default resolver, if any
1035-
Option(ivySettings.getDefaultResolver).foreach(cr.add)
1036-
1037-
// add additional repositories, last resolution in chain takes precedence
1034+
// before default resolvers, add additional repositories,
1035+
// last resolution in chain takes precedence
10381036
repositoryList.zipWithIndex.foreach { case (repo, i) =>
10391037
val brr: IBiblioResolver = new IBiblioResolver
10401038
brr.setM2compatible(true)
@@ -1047,6 +1045,9 @@ private[spark] object SparkSubmitUtils {
10471045
// scalastyle:on println
10481046
}
10491047

1048+
// add current default resolver, if any
1049+
Option(ivySettings.getDefaultResolver).foreach(cr.add)
1050+
10501051
ivySettings.addResolver(cr)
10511052
ivySettings.setDefaultResolver(cr.getName)
10521053
}

core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.ivy.core.settings.IvySettings
3131
import org.apache.spark.TestUtils.{createCompiledClass, JavaSourceFromString}
3232
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
3333

34-
private[deploy] object IvyTestUtils {
34+
object IvyTestUtils {
3535

3636
/**
3737
* Create the path for the jar and pom from the maven coordinate. Extension should be `jar`
@@ -355,7 +355,7 @@ private[deploy] object IvyTestUtils {
355355
* @param withPython Whether to pack python files inside the jar for extensive testing.
356356
* @return Root path of the repository. Will be `rootDir` if supplied.
357357
*/
358-
private[deploy] def withRepository(
358+
def withRepository(
359359
artifact: MavenCoordinate,
360360
dependencies: Option[String],
361361
rootDir: Option[File],

core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala

Lines changed: 54 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.deploy
1919

20-
import java.io.{File, OutputStream, PrintStream}
20+
import java.io.{File, FileInputStream, OutputStream, PrintStream}
2121
import java.nio.charset.StandardCharsets
2222

2323
import scala.collection.mutable.ArrayBuffer
@@ -30,6 +30,7 @@ import org.scalatest.BeforeAndAfterAll
3030

3131
import org.apache.spark.SparkFunSuite
3232
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
33+
import org.apache.spark.unsafe.types.UTF8String
3334
import org.apache.spark.util.Utils
3435

3536
class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
@@ -142,12 +143,13 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
142143
test("search for artifact at local repositories") {
143144
val main = new MavenCoordinate("my.great.lib", "mylib", "0.1")
144145
val dep = "my.great.dep:mydep:0.5"
145-
// Local M2 repository
146+
147+
// Local M2 repositorya
146148
IvyTestUtils.withRepository(main, Some(dep), Some(SparkSubmitUtils.m2Path)) { repo =>
147149
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
148150
main.toString,
149151
SparkSubmitUtils.buildIvySettings(None, None),
150-
isTest = true)
152+
isTest = false)
151153
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
152154
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
153155
}
@@ -258,4 +260,53 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
258260
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
259261
}
260262
}
263+
264+
test("search for artifact taking order from user defined repositories to default repositories") {
265+
val main = new MavenCoordinate("a", "b", "0.1")
266+
267+
def isSameFile(left: String, right: String): Boolean = {
268+
val leftInput: FileInputStream = new FileInputStream(left)
269+
val leftMd5 = UTF8String.fromString(org.apache.commons.codec
270+
.digest.DigestUtils.md5Hex(leftInput))
271+
272+
val rightInput: FileInputStream = new FileInputStream(left)
273+
val rightMd5 = UTF8String.fromString(org.apache.commons.codec
274+
.digest.DigestUtils.md5Hex(rightInput))
275+
276+
leftMd5 == rightMd5
277+
}
278+
279+
var userDefinedRepo = Utils.createTempDir("my_m2")
280+
try {
281+
IvyTestUtils.withRepository(main, None, Some(userDefinedRepo)) { repo =>
282+
IvyTestUtils.withRepository(main, None, Some(SparkSubmitUtils.m2Path)) {
283+
defaultRepo =>
284+
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
285+
main.toString,
286+
SparkSubmitUtils.buildIvySettings(Option(repo), None),
287+
isTest = false)
288+
assert(isSameFile(Seq(userDefinedRepo, main.groupId, main.artifactId, main.version,
289+
"b-0.1.jar").mkString(File.separatorChar.toString), jarPath))
290+
assert(jarPath.indexOf("b") >= 0, "should find artifact")
291+
292+
}
293+
}
294+
295+
IvyTestUtils.withRepository(main, None, Some(SparkSubmitUtils.m2Path)) { defaultRepo =>
296+
IvyTestUtils.withRepository(main, None, Some(userDefinedRepo)) {
297+
repo =>
298+
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
299+
main.toString,
300+
SparkSubmitUtils.buildIvySettings(Option(repo), None),
301+
isTest = false)
302+
assert(isSameFile(Seq(SparkSubmitUtils.m2Path.getCanonicalPath, main.groupId,
303+
main.artifactId, main.version, "b-0.1.jar").mkString(File.separatorChar.toString),
304+
jarPath))
305+
assert(jarPath.indexOf("b") >= 0, "should find artifact")
306+
}
307+
}
308+
} finally {
309+
Utils.deleteRecursively(userDefinedRepo)
310+
}
311+
}
261312
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -328,6 +328,7 @@ private[spark] object HiveUtils extends Logging {
328328
sparkConf = conf,
329329
hadoopConf = hadoopConf,
330330
config = configurations,
331+
ivyPath = conf.getOption("spark.jars.ivy"),
331332
barrierPrefixes = hiveMetastoreBarrierPrefixes,
332333
sharedPrefixes = hiveMetastoreSharedPrefixes)
333334
} else {

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ private[hive] object IsolatedClientLoader extends Logging {
136136

137137
// A map from a given pair of HiveVersion and Hadoop version to jar files.
138138
// It is only used by forVersion.
139-
private val resolvedVersions =
139+
private[hive] val resolvedVersions =
140140
new scala.collection.mutable.HashMap[(HiveVersion, String), Seq[URL]]
141141
}
142142

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUtilsSuite.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,15 @@
1717

1818
package org.apache.spark.sql.hive
1919

20+
import org.apache.hadoop.conf.Configuration
2021
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
21-
22+
import org.apache.spark.SparkConf
23+
import org.apache.spark.deploy.IvyTestUtils
24+
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
2225
import org.apache.spark.sql.hive.test.TestHiveSingleton
2326
import org.apache.spark.sql.test.SQLTestUtils
2427
import org.apache.spark.sql.QueryTest
28+
import org.apache.spark.sql.hive.client.IsolatedClientLoader
2529

2630
class HiveUtilsSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
2731

sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,9 @@ package org.apache.spark.sql.hive.client
1919

2020
import org.apache.hadoop.conf.Configuration
2121
import org.apache.hadoop.hive.conf.HiveConf
22-
2322
import org.apache.spark.SparkFunSuite
23+
import org.apache.spark.deploy.IvyTestUtils
24+
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
2425
import org.apache.spark.sql.catalyst.catalog._
2526
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal}
2627
import org.apache.spark.sql.hive.HiveUtils

0 commit comments

Comments
 (0)