Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
7570eab
[SPARK-22788][STREAMING] Use correct hadoop config for fs append supp…
Dec 20, 2017
7798c9e
[SPARK-22824] Restore old offset for binary compatibility
jose-torres Dec 20, 2017
d762d11
[SPARK-22832][ML] BisectingKMeans unpersist unused datasets
zhengruifeng Dec 20, 2017
c89b431
[SPARK-22849] ivy.retrieve pattern should also consider `classifier`
gatorsmile Dec 20, 2017
792915c
[SPARK-22830] Scala Coding style has been improved in Spark Examples
chetkhatri Dec 20, 2017
b176014
[SPARK-22847][CORE] Remove redundant code in AppStatusListener while …
Ngone51 Dec 20, 2017
0114c89
[SPARK-22845][SCHEDULER] Modify spark.kubernetes.allocation.batch.del…
foxish Dec 21, 2017
fb0562f
[SPARK-22810][ML][PYSPARK] Expose Python API for LinearRegression wit…
yanboliang Dec 21, 2017
9c289a5
[SPARK-22387][SQL] Propagate session configs to data source read/writ…
jiangxb1987 Dec 21, 2017
d3ae3e1
[SPARK-19634][SQL][ML][FOLLOW-UP] Improve interface of dataframe vect…
WeichenXu123 Dec 21, 2017
cb9fc8d
[SPARK-22848][SQL] Eliminate mutable state from Stack
kiszk Dec 21, 2017
59d5263
[SPARK-22324][SQL][PYTHON] Upgrade Arrow to 0.8.0
BryanCutler Dec 21, 2017
0abaf31
[SPARK-22852][BUILD] Exclude -Xlint:unchecked from sbt javadoc flags
easel Dec 21, 2017
4c2efde
[SPARK-22855][BUILD] Add -no-java-comments to sbt docs/scalacOptions
easel Dec 21, 2017
8a0ed5a
[SPARK-22668][SQL] Ensure no global variables in arguments of method …
cloud-fan Dec 21, 2017
d3a1d95
[SPARK-22786][SQL] only use AppStatusPlugin in history server
cloud-fan Dec 21, 2017
4e107fd
[SPARK-22822][TEST] Basic tests for WindowFrameCoercion and DecimalPr…
wangyum Dec 21, 2017
fe65361
[SPARK-22042][FOLLOW-UP][SQL] ReorderJoinPredicates can break when ch…
tejasapatil Dec 21, 2017
7beb375
[SPARK-22861][SQL] SQLAppStatusListener handles multi-job executions.
squito Dec 21, 2017
7ab165b
[SPARK-22648][K8S] Spark on Kubernetes - Documentation
foxish Dec 22, 2017
c0abb1d
[SPARK-22854][UI] Read Spark version from event logs.
Dec 22, 2017
c6f01ca
[SPARK-22750][SQL] Reuse mutable states when possible
mgaido91 Dec 22, 2017
a36b78b
[SPARK-22450][CORE][MLLIB][FOLLOWUP] safely register class for mllib …
zhengruifeng Dec 22, 2017
22e1849
[SPARK-22866][K8S] Fix path issue in Kubernetes dockerfile
foxish Dec 22, 2017
8df1da3
[SPARK-22862] Docs on lazy elimination of columns missing from an enc…
marmbrus Dec 22, 2017
13190a4
[SPARK-22874][PYSPARK][SQL] Modify checking pandas version to use Loo…
ueshin Dec 22, 2017
d23dc5b
[SPARK-22346][ML] VectorSizeHint Transformer for using VectorAssemble…
MrBago Dec 22, 2017
d3cbbdd
[SPARK-22757][Kubernetes] Enable use of remote dependencies in Kubern…
liyinan926 Dec 12, 2017
5d2cbc8
Addressed first round of comments
liyinan926 Dec 15, 2017
4ee76af
Addressed the second round of comments
liyinan926 Dec 16, 2017
9c8051a
Create one task per jar/file to download in the init-container
liyinan926 Dec 16, 2017
1f65417
More review comments
liyinan926 Dec 18, 2017
109ad80
Shorten variable names
liyinan926 Dec 19, 2017
c21fdcf
Removed traits that have only a single implementation
liyinan926 Dec 19, 2017
a3cd71d
Remove unused class arguments
liyinan926 Dec 19, 2017
23c5cd9
Improved documentation
liyinan926 Dec 19, 2017
2ec15c4
Addressed latest round of comments
liyinan926 Dec 20, 2017
5d1f889
Addressed more comments
liyinan926 Dec 21, 2017
9d9c841
Updated names of two configuration properties
liyinan926 Dec 22, 2017
c51bc56
Addressed more comments
liyinan926 Dec 25, 2017
28343fb
Addressed one more comment
liyinan926 Dec 26, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Addressed the second round of comments
  • Loading branch information
liyinan926 committed Dec 23, 2017
commit 4ee76afa3500a4315d3a62fb911f219c4f3a7cd7
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,9 @@ private[spark] object Config extends Logging {
.stringConf
.createWithDefault("/var/spark-data/spark-files")

val INIT_CONTAINER_DOCKER_IMAGE =
ConfigBuilder("spark.kubernetes.initContainer.docker.image")
.doc("Image for the driver and executor's init-container that downloads dependencies.")
val INIT_CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.initContainer.image")
.doc("Image for the driver and executor's init-container for downloading dependencies.")
.stringConf
.createOptional

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,12 @@ private[spark] class InitContainerBootstrapImpl(
}
val initContainerCustomEnvVars = sparkConf.getAllWithPrefix(initContainerCustomEnvVarKeyPrefix)
.toSeq
.map(env =>
.map { env =>
new EnvVarBuilder()
.withName(env._1)
.withValue(env._2)
.build())
.build()
}

val initContainer = new ContainerBuilder(podWithDetachedInitContainer.initContainer)
.withName("spark-init")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ private[spark] class MountSecretsBootstrapImpl(

override def mountSecrets(pod: Pod, container: Container): (Pod, Container) = {
var podBuilder = new PodBuilder(pod)
secretNamesToMountPaths.keys.foreach(name =>
secretNamesToMountPaths.keys.foreach { name =>
podBuilder = podBuilder
.editOrNewSpec()
.addNewVolume()
Expand All @@ -47,16 +47,17 @@ private[spark] class MountSecretsBootstrapImpl(
.withSecretName(name)
.endSecret()
.endVolume()
.endSpec())
.endSpec()
}

var containerBuilder = new ContainerBuilder(container)
secretNamesToMountPaths.foreach(namePath =>
secretNamesToMountPaths.foreach { namePath =>
containerBuilder = containerBuilder
.addNewVolumeMount()
.withName(secretVolumeName(namePath._1))
.withMountPath(namePath._2)
.endVolumeMount()
)
}

(podBuilder.build(), containerBuilder.build())
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ import org.apache.spark.deploy.k8s.{ConfigurationUtils, MountSecretsBootstrapImp
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit.steps._
import org.apache.spark.deploy.k8s.submit.steps.initcontainer.InitContainerConfigurationStepsOrchestrator
import org.apache.spark.deploy.k8s.submit.steps.initcontainer.InitContainerConfigOrchestrator
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.util.SystemClock
import org.apache.spark.util.Utils

/**
* Constructs the complete list of driver configuration steps to run to deploy the Spark driver.
*/
private[spark] class DriverConfigurationStepsOrchestrator(
private[spark] class DriverConfigOrchestrator(
namespace: String,
kubernetesAppId: String,
launchTime: Long,
Expand Down Expand Up @@ -125,6 +125,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(

val mayBeInitContainerBootstrapStep =
if (areAnyFilesNonContainerLocal(sparkJars ++ sparkFiles)) {
<<<<<<< HEAD:resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
val initContainerConfigurationStepsOrchestrator =
new InitContainerConfigurationStepsOrchestrator(
namespace,
Expand All @@ -147,6 +148,26 @@ private[spark] class DriverConfigurationStepsOrchestrator(
INIT_CONTAINER_PROPERTIES_FILE_NAME)

Some(initContainerBootstrapStep)
=======
val orchestrator = new InitContainerConfigOrchestrator(
namespace,
kubernetesResourceNamePrefix,
sparkJars,
sparkFiles,
jarsDownloadPath,
filesDownloadPath,
dockerImagePullPolicy,
allDriverLabels,
initContainerConfigMapName,
INIT_CONTAINER_PROPERTIES_FILE_NAME,
submissionSparkConf)
val bootstrapStep = new DriverInitContainerBootstrapStep(
orchestrator.getAllConfigurationSteps(),
initContainerConfigMapName,
INIT_CONTAINER_PROPERTIES_FILE_NAME)

Some(bootstrapStep)
>>>>>>> Addressed the second round of comments:resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigOrchestrator.scala
} else {
None
}
Expand All @@ -169,7 +190,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(

private def areAnyFilesNonContainerLocal(files: Seq[String]): Boolean = {
files.exists { uri =>
Option(Utils.resolveURI(uri).getScheme).getOrElse("file") != "local"
Utils.resolveURI(uri).getScheme != "local"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ private[spark] class KubernetesClientApplication extends SparkApplication {
val loggingPodStatusWatcher = new LoggingPodStatusWatcherImpl(
kubernetesAppId, loggingInterval)

val configurationStepsOrchestrator = new DriverConfigurationStepsOrchestrator(
val configurationStepsOrchestrator = new DriverConfigOrchestrator(
namespace,
kubernetesAppId,
launchTime,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,10 @@ private[spark] object KubernetesFileUtils {
* Get from a given collection of file URIs the ones that represent remote files.
*/
def getOnlyRemoteFiles(uris: Iterable[String]): Iterable[String] = {
filterUriStringsByScheme(uris, scheme => scheme != "file" && scheme != "local")
}

private def filterUriStringsByScheme(
uris: Iterable[String], schemeFilter: (String => Boolean)): Iterable[String] = {
uris.filter(uri => schemeFilter(Option(Utils.resolveURI(uri).getScheme).getOrElse("file")))
uris.filter { uri =>
val scheme = Utils.resolveURI(uri).getScheme
scheme != "file" && scheme != "local"
}
}

private def resolveFileUri(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ private[spark] class DriverInitContainerBootstrapStep(
config: Map[String, String]): ConfigMap = {
val properties = new Properties()
config.foreach { entry =>
properties.setProperty(entry._1, entry._2) }
properties.setProperty(entry._1, entry._2)
}
val propertiesWriter = new StringWriter()
properties.store(propertiesWriter,
s"Java properties built from Kubernetes config map with name: $configMapName " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ private[spark] class BaseInitContainerConfigurationStep(
val remoteJarsConf = if (remoteJarsToDownload.nonEmpty) {
Map(INIT_CONTAINER_REMOTE_JARS.key -> remoteJarsToDownload.mkString(","))
} else {
Map.empty[String, String]
Map()
}
val remoteFilesConf = if (remoteFilesToDownload.nonEmpty) {
Map(INIT_CONTAINER_REMOTE_FILES.key -> remoteFilesToDownload.mkString(","))
} else {
Map.empty[String, String]
Map()
}

val baseInitContainerConfig = Map[String, String](
val baseInitContainerConfig = Map(
JARS_DOWNLOAD_LOCATION.key -> jarsDownloadPath,
FILES_DOWNLOAD_LOCATION.key -> filesDownloadPath) ++
remoteJarsConf ++
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.deploy.k8s.Constants._
* Returns the complete ordered list of steps required to configure the init-container. This is
* only used when there are remote application dependencies to localize.
*/
private[spark] class InitContainerConfigurationStepsOrchestrator(
private[spark] class InitContainerConfigOrchestrator(
namespace: String,
kubernetesResourceNamePrefix: String,
sparkJars: Seq[String],
Expand All @@ -39,9 +39,9 @@ private[spark] class InitContainerConfigurationStepsOrchestrator(
submissionSparkConf: SparkConf) {

private val initContainerImage = submissionSparkConf
.get(INIT_CONTAINER_DOCKER_IMAGE)
.get(INIT_CONTAINER_IMAGE)
.getOrElse(throw new SparkException(
"Must specify the init-container Docker image when there are remote dependencies"))
"Must specify the init-container image when there are remote dependencies"))
private val downloadTimeoutMinutes = submissionSparkConf.get(INIT_CONTAINER_MOUNT_TIMEOUT)

def getAllConfigurationSteps(): Seq[InitContainerConfigurationStep] = {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ import org.apache.spark.util.{ThreadUtils, Utils}
* with different configurations for different download sources, or using the same container to
* download everything at once.
*/
private[spark] class KubernetesSparkDependencyDownloadInitContainer(
private[spark] class SparkPodInitContainer(
sparkConf: SparkConf,
fileFetcher: FileFetcher) extends Logging {

Expand Down Expand Up @@ -70,9 +70,10 @@ private[spark] class KubernetesSparkDependencyDownloadInitContainer(
s"Remote files download directory specified at $filesDownloadDir does not exist " +
"or is not a directory.")
}
waitForFutures(
remoteJarsDownload,
remoteFilesDownload)

Seq(remoteJarsDownload, remoteFilesDownload).foreach {
ThreadUtils.awaitResult(_, Duration.create(downloadTimeoutMinutes, TimeUnit.MINUTES))
}
}

private def downloadFiles(
Expand All @@ -86,16 +87,9 @@ private[spark] class KubernetesSparkDependencyDownloadInitContainer(
fileFetcher.fetchFile(file, downloadDir)
}
}

private def waitForFutures(futures: Future[_]*) {
futures.foreach {
ThreadUtils.awaitResult(_, Duration.create(downloadTimeoutMinutes, TimeUnit.MINUTES))
}
}
}

private class FileFetcherImpl(sparkConf: SparkConf, securityManager: SparkSecurityManager)
extends FileFetcher {
private class FileFetcher(sparkConf: SparkConf, securityManager: SparkSecurityManager) {

def fetchFile(uri: String, targetDir: File): Unit = {
Utils.fetchFile(
Expand All @@ -109,7 +103,7 @@ private class FileFetcherImpl(sparkConf: SparkConf, securityManager: SparkSecuri
}
}

object KubernetesSparkDependencyDownloadInitContainer extends Logging {
object SparkPodInitContainer extends Logging {

def main(args: Array[String]): Unit = {
logInfo("Starting init-container to download Spark application dependencies.")
Expand All @@ -120,10 +114,8 @@ object KubernetesSparkDependencyDownloadInitContainer extends Logging {
}

val securityManager = new SparkSecurityManager(sparkConf)
val fileFetcher = new FileFetcherImpl(sparkConf, securityManager)
new KubernetesSparkDependencyDownloadInitContainer(
sparkConf,
fileFetcher).run()
val fileFetcher = new FileFetcher(sparkConf, securityManager)
new SparkPodInitContainer(sparkConf, fileFetcher).run()
logInfo("Finished downloading application dependencies.")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ private[spark] class KubernetesClusterManager extends ExternalClusterManager wit
configMapKey <- maybeInitContainerConfigMapKey
} yield {
val initContainerImage = sparkConf
.get(INIT_CONTAINER_DOCKER_IMAGE)
.get(INIT_CONTAINER_IMAGE)
.getOrElse(throw new SparkException(
"Must specify the init-container Docker image when there are remote dependencies"))
"Must specify the init-container image when there are remote dependencies"))
new InitContainerBootstrapImpl(
initContainerImage,
sparkConf.get(CONTAINER_IMAGE_PULL_POLICY),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.submit.steps._

class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
class DriverConfigOrchestratorSuite extends SparkFunSuite {

private val NAMESPACE = "default"
private val DRIVER_IMAGE = "driver-image"
Expand All @@ -38,7 +38,7 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
val sparkConf = new SparkConf(false)
.set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
val orchestrator = new DriverConfigurationStepsOrchestrator(
val orchestrator = new DriverConfigOrchestrator(
NAMESPACE,
APP_ID,
LAUNCH_TIME,
Expand All @@ -58,8 +58,13 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {

test("Base submission steps without a main app resource.") {
val sparkConf = new SparkConf(false)
<<<<<<< HEAD:resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
.set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
val orchestrator = new DriverConfigurationStepsOrchestrator(
=======
.set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
val orchestrator = new DriverConfigOrchestrator(
>>>>>>> Addressed the second round of comments:resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigOrchestratorSuite.scala
NAMESPACE,
APP_ID,
LAUNCH_TIME,
Expand All @@ -78,11 +83,16 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {

test("Submission steps with an init-container.") {
val sparkConf = new SparkConf(false)
<<<<<<< HEAD:resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
.set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
.set(INIT_CONTAINER_DOCKER_IMAGE, INIT_CONTAINER_IMAGE)
=======
.set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
.set(INIT_CONTAINER_IMAGE, INIT_CONTAINER_IMAGE)
>>>>>>> Addressed the second round of comments:resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigOrchestratorSuite.scala
.set("spark.jars", "hdfs://localhost:9000/var/apps/jars/jar1.jar")
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
val orchestrator = new DriverConfigurationStepsOrchestrator(
val orchestrator = new DriverConfigOrchestrator(
NAMESPACE,
APP_ID,
LAUNCH_TIME,
Expand All @@ -106,7 +116,7 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
.set(s"$KUBERNETES_DRIVER_SECRETS_PREFIX$SECRET_FOO", SECRET_MOUNT_PATH)
.set(s"$KUBERNETES_DRIVER_SECRETS_PREFIX$SECRET_BAR", SECRET_MOUNT_PATH)
val mainAppResource = JavaMainAppResource("local:///var/apps/jars/main.jar")
val orchestrator = new DriverConfigurationStepsOrchestrator(
val orchestrator = new DriverConfigOrchestrator(
NAMESPACE,
APP_ID,
LAUNCH_TIME,
Expand All @@ -125,8 +135,8 @@ class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
}

private def validateStepTypes(
orchestrator: DriverConfigurationStepsOrchestrator,
types: Class[_ <: DriverConfigurationStep]*): Unit = {
orchestrator: DriverConfigOrchestrator,
types: Class[_ <: DriverConfigurationStep]*): Unit = {
val steps = orchestrator.getAllConfigurationSteps()
assert(steps.size === types.size)
assert(steps.map(_.getClass) === types)
Expand Down
Loading