Skip to content
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Updating unit test cover all three scenarios.
  • Loading branch information
kishorvpatil committed Nov 3, 2016
commit 51eefa50faeb26c6c1fd9d689b8450358291d569
41 changes: 26 additions & 15 deletions yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -284,33 +284,44 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll

test("distribute archive multiple times") {
val libs = Utils.createTempDir()
// Create jars dir and RELEASE file to avoid IllegalStateException.
val jarsDir = new File(libs, "jars")
assert(jarsDir.mkdir())
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see jarsDir being used anywhere either

new FileOutputStream(new File(libs, "RELEASE")).close()
val userLib1 = Utils.createTempDir()
val userLib2 = Utils.createTempDir()

val jar1 = TestUtils.createJarWithFiles(Map(), jarsDir)
val jar2 = TestUtils.createJarWithFiles(Map(), userLib1)
// Copy jar2 to jar3 with same name
val jar3 = {
val target = new File(userLib2, new File(jar2.toURI).getName)
val input = new FileInputStream(jar2.getPath)
val output = new FileOutputStream(target)
Utils.copyStream(input, output, closeStreams = true)
target.toURI.toURL
}
val userLib1 = Utils.createTempDir()
val testJar = TestUtils.createJarWithFiles(Map(), userLib1)

// Case 1: FILES_TO_DISTRIBUTE and ARCHIVES_TO_DISTRIBUTE can't have duplicate files
val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath))
.set(FILES_TO_DISTRIBUTE, Seq(jar2.getPath, jar3.getPath))
.set(ARCHIVES_TO_DISTRIBUTE, Seq(jar2.getPath, jar3.getPath))

.set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath))
.set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath))

val client = createClient(sparkConf)
val tempDir = Utils.createTempDir()
intercept[IllegalArgumentException] {
client.prepareLocalResources(new Path(tempDir.getAbsolutePath()), Nil)
}

// Case 2: FILES_TO_DISTRIBUTE can't have duplicate files.
val sparkConfFiles = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath))
.set(FILES_TO_DISTRIBUTE, Seq(testJar.getPath, testJar.getPath))

val clientFiles = createClient(sparkConfFiles)
val tempDirForFiles = Utils.createTempDir()
intercept[IllegalArgumentException] {
clientFiles.prepareLocalResources(new Path(tempDirForFiles.getAbsolutePath()), Nil)
}

// Case 3: ARCHIVES_TO_DISTRIBUTE can't have duplicate files.
val sparkConfArchives = new SparkConfWithEnv(Map("SPARK_HOME" -> libs.getAbsolutePath))
.set(ARCHIVES_TO_DISTRIBUTE, Seq(testJar.getPath, testJar.getPath))

val clientArchives = createClient(sparkConfArchives)
val tempDirForArchives = Utils.createTempDir()
intercept[IllegalArgumentException] {
clientArchives.prepareLocalResources(new Path(tempDirForArchives.getAbsolutePath()), Nil)
}
}

test("distribute local spark jars") {
Expand Down