Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Drop http jar support for now
  • Loading branch information
kimoonkim committed Dec 16, 2017
commit bde1cf9881c863d53d138d90816554ab2ec8af0e
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,10 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
private val APP_LOCATOR_LABEL = UUID.randomUUID().toString.replaceAll("-", "")
private var kubernetesTestComponents: KubernetesTestComponents = _
private var testAppConf: TestAppConf = _
private var staticAssetServerLauncher: StaticAssetServerLauncher = _

override def beforeAll(): Unit = {
testBackend.initialize()
kubernetesTestComponents = new KubernetesTestComponents(testBackend.getKubernetesClient)
staticAssetServerLauncher = new StaticAssetServerLauncher(
kubernetesTestComponents.kubernetesClient.inNamespace(kubernetesTestComponents.namespace))
}

override def afterAll(): Unit = {
Expand All @@ -69,16 +66,6 @@ private[spark] class KubernetesSuite extends FunSuite with BeforeAndAfterAll wit
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE)
}

test("Use remote resources without the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
testAppConf.setJars(Seq(
s"$assetServerUri/${EXAMPLES_JAR_FILE.getName}",
s"$assetServerUri/${HELPER_JAR_FILE.getName}"
))
runSparkPiAndVerifyCompletion()
}

test("Submit small local files without the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
testAppConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@ private[spark] class SparkDockerImageBuilder
private val BASE_DOCKER_FILE = "dockerfiles/spark-base/Dockerfile"
private val DRIVER_DOCKER_FILE = "dockerfiles/driver/Dockerfile"
private val EXECUTOR_DOCKER_FILE = "dockerfiles/executor/Dockerfile"
private val STATIC_ASSET_SERVER_DOCKER_FILE =
"dockerfiles/integration-test-asset-server/Dockerfile"
private val TIMEOUT = PatienceConfiguration.Timeout(Span(2, Minutes))
private val INTERVAL = PatienceConfiguration.Interval(Span(2, Seconds))
private val dockerHost = dockerEnv.getOrElse("DOCKER_HOST",
Expand Down Expand Up @@ -64,7 +62,6 @@ private[spark] class SparkDockerImageBuilder
buildImage("spark-base", BASE_DOCKER_FILE)
buildImage("spark-driver", DRIVER_DOCKER_FILE)
buildImage("spark-executor", EXECUTOR_DOCKER_FILE)
buildImage("spark-integration-test-asset-server", STATIC_ASSET_SERVER_DOCKER_FILE)
}

private def buildImage(name: String, dockerFile: String): Unit = {
Expand Down