Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
999ec13
[SPARK-22570][SQL] Avoid to create a lot of global variables by using…
kiszk Nov 30, 2017
6ac57fd
[SPARK-21417][SQL] Infer join conditions using propagated constraints
Nov 30, 2017
bcceab6
[SPARK-22489][SQL] Shouldn't change broadcast join buildSide if user …
wangyum Nov 30, 2017
f5f8e84
[SPARK-22614] Dataset API: repartitionByRange(...)
adrian-ionescu Nov 30, 2017
7e5f669
[SPARK-22428][DOC] Add spark application garbage collector configurat…
gaborgsomogyi Dec 1, 2017
7da1f57
[SPARK-22373] Bump Janino dependency version to fix thread safety issue…
Victsm Dec 1, 2017
dc36542
[SPARK-22653] executorAddress registered in CoarseGrainedSchedulerBac…
tgravescs Dec 1, 2017
16adaf6
[SPARK-22601][SQL] Data load is getting displayed successful on provi…
sujith71955 Dec 1, 2017
9d06a9e
[SPARK-22393][SPARK-SHELL] spark-shell can't find imported types in c…
mpetruska Dec 1, 2017
ee10ca7
[SPARK-22638][SS] Use a separate queue for StreamingQueryListenerBus
zsxwing Dec 1, 2017
aa4cf2b
[SPARK-22651][PYTHON][ML] Prevent initiating multiple Hive clients fo…
HyukjinKwon Dec 2, 2017
d2cf95a
[SPARK-22634][BUILD] Update Bouncy Castle to 1.58
srowen Dec 2, 2017
f23dddf
[SPARK-20682][SPARK-15474][SPARK-21791] Add new ORCFileFormat based o…
dongjoon-hyun Dec 3, 2017
2c16267
[SPARK-22669][SQL] Avoid unnecessary function calls in code generation
mgaido91 Dec 3, 2017
dff440f
[SPARK-22626][SQL] deals with wrong Hive's statistics (zero rowCount)
wangyum Dec 3, 2017
4131ad0
[SPARK-22489][DOC][FOLLOWUP] Update broadcast behavior changes in mig…
wangyum Dec 4, 2017
3927bb9
[SPARK-22473][FOLLOWUP][TEST] Remove deprecated Date functions
mgaido91 Dec 4, 2017
f81401e
[SPARK-22162] Executors and the driver should use consistent JobIDs i…
Dec 4, 2017
e1dd03e
[SPARK-22372][CORE, YARN] Make cluster submission use SparkApplication.
Dec 4, 2017
dcaac45
Spark on Kubernetes - basic submission client
liyinan926 Nov 10, 2017
27c67ff
Addressed first round of review comments
liyinan926 Nov 27, 2017
6d597d0
Made Client implement the SparkApplication trait
liyinan926 Nov 28, 2017
5b9fa39
Addressed the second round of comments
liyinan926 Nov 28, 2017
5ccadb5
Added missing step for supporting local:// dependencies and addressed…
liyinan926 Nov 30, 2017
12f2797
Fixed Scala style check errors
liyinan926 Nov 30, 2017
c35fe48
Addressed another round of comments
liyinan926 Dec 4, 2017
faa2849
Rebased on master and added a constant val for the Client class
liyinan926 Dec 4, 2017
347ed69
Addressed another major round of comments
liyinan926 Dec 5, 2017
0e8ca01
Addressed one more round of comments
liyinan926 Dec 5, 2017
3a0b8e3
Removed mentioning of kubernetes-namespace
liyinan926 Dec 6, 2017
83d0b9c
Fixed a couple of bugs found during manual tests
liyinan926 Dec 7, 2017
44c40b1
Guard against client mode in SparkContext
liyinan926 Dec 8, 2017
67bc847
Added libc6-compat into the base docker image
liyinan926 Dec 8, 2017
7d2b303
Addressed latest comments
liyinan926 Dec 8, 2017
caf2206
Addressed docs comments
liyinan926 Dec 9, 2017
2e7810b
Fixed a comment
liyinan926 Dec 11, 2017
cbcd30e
Addressed latest comments
liyinan926 Dec 11, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Addressed first round of review comments
  • Loading branch information
liyinan926 committed Dec 4, 2017
commit 27c67ff2693a1aaac3d0863410c67691a0784ea9
7 changes: 4 additions & 3 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -718,12 +718,13 @@ object SparkSubmit extends CommandLineUtils with Logging {

if (isKubernetesCluster) {
childMainClass = "org.apache.spark.deploy.k8s.submit.Client"
childArgs ++= Array("--primary-java-resource", args.primaryResource)
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
childArgs ++= Array("--primary-java-resource", args.primaryResource)
}
childArgs ++= Array("--main-class", args.mainClass)
if (args.childArgs != null) {
args.childArgs.foreach { arg =>
childArgs += "--arg"
childArgs += arg
childArgs += ("--arg", arg)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@ private[spark] object Config extends Logging {

val KUBERNETES_SERVICE_ACCOUNT_NAME =
ConfigBuilder(s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.serviceAccountName")
.doc("Service account that is used when running the driver pod. The driver pod uses" +
" this service account when requesting executor pods from the API server. If specific" +
" credentials are given for the driver pod to use, the driver will favor" +
" using those credentials instead.")
.doc("Service account that is used when running the driver pod. The driver pod uses " +
"this service account when requesting executor pods from the API server. If specific " +
"credentials are given for the driver pod to use, the driver will favor " +
"using those credentials instead.")
.stringConf
.createOptional

Expand All @@ -85,10 +85,10 @@ private[spark] object Config extends Logging {

val KUBERNETES_DRIVER_MEMORY_OVERHEAD =
ConfigBuilder("spark.kubernetes.driver.memoryOverhead")
.doc("The amount of off-heap memory (in megabytes) to be allocated for the driver and the" +
" driver submission server. This is memory that accounts for things like VM overheads," +
" interned strings, other native overheads, etc. This tends to grow with the driver's" +
" memory size (typically 6-10%).")
.doc("The amount of off-heap memory (in megabytes) to be allocated for the driver and the " +
"driver submission server. This is memory that accounts for things like VM overheads, " +
"interned strings, other native overheads, etc. This tends to grow with the driver's " +
"memory size (typically 6-10%).")
.bytesConf(ByteUnit.MiB)
.createOptional

Expand Down Expand Up @@ -141,8 +141,8 @@ private[spark] object Config extends Logging {

val WAIT_FOR_APP_COMPLETION =
ConfigBuilder("spark.kubernetes.submission.waitAppCompletion")
.doc("In cluster mode, whether to wait for the application to finish before exiting the" +
" launcher process.")
.doc("In cluster mode, whether to wait for the application to finish before exiting the " +
"launcher process.")
.booleanConf
.createWithDefault(true)

Expand All @@ -166,17 +166,16 @@ private[spark] object Config extends Logging {
val KUBERNETES_DRIVER_ENV_KEY = "spark.kubernetes.driverEnv."

def getK8sMasterUrl(rawMasterString: String): String = {
if (!rawMasterString.startsWith("k8s://")) {
throw new IllegalArgumentException("Master URL should start with k8s:// in Kubernetes mode.")
}
require(rawMasterString.startsWith("k8s://"),
"Master URL should start with k8s:// in Kubernetes mode.")
val masterWithoutK8sPrefix = rawMasterString.replaceFirst("k8s://", "")
if (masterWithoutK8sPrefix.startsWith("http://")
|| masterWithoutK8sPrefix.startsWith("https://")) {
masterWithoutK8sPrefix
} else {
val resolvedURL = s"https://$masterWithoutK8sPrefix"
logInfo("No scheme specified for kubernetes master URL, so defaulting to https. Resolved" +
s" URL is $resolvedURL")
logInfo("No scheme specified for kubernetes master URL, so defaulting to https. Resolved " +
s"URL is $resolvedURL")
resolvedURL
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ import org.apache.spark.deploy.k8s.submit.steps.DriverConfigurationStep
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils

/**
* Encapsulates arguments to the submission client.
*
* @param mainAppResource the main application resource
* @param mainClass the main class of the application to run
* @param driverArgs arguments to the driver
*/
private[spark] case class ClientArguments(
mainAppResource: MainAppResource,
mainClass: String,
Expand All @@ -58,7 +65,7 @@ private[spark] object ClientArguments {
}

require(mainAppResource.isDefined,
"Main app resource must be defined by either --primary-py-file or --primary-java-resource.")
"Main app resource must be defined by --primary-java-resource.")
require(mainClass.isDefined, "Main class must be specified via --main-class")

ClientArguments(
Expand All @@ -68,6 +75,19 @@ private[spark] object ClientArguments {
}
}

/**
* Submits a Spark application to run on Kubernetes by creating the driver pod and starting a
* watcher that monitors and logs the application status. Waits for the application to terminate if
* spark.kubernetes.submission.waitAppCompletion is true.
*
* @param submissionSteps steps that collectively configure the driver
* @param submissionSparkConf the submission client Spark configuration
* @param kubernetesClient the client to talk to the Kubernetes API server
* @param waitForAppCompletion a flag indicating whether the client should wait for the application
* to complete
* @param appName the application name
* @param loggingPodStatusWatcher a watcher that monitors and logs the application status
*/
private[spark] class Client(
submissionSteps: Seq[DriverConfigurationStep],
submissionSparkConf: SparkConf,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,12 @@ private[spark] class DriverConfigurationStepsOrchestrator(
submissionSparkConf,
KUBERNETES_DRIVER_LABEL_PREFIX)
require(!driverCustomLabels.contains(SPARK_APP_ID_LABEL), "Label with key " +
s" $SPARK_APP_ID_LABEL is not allowed as it is reserved for Spark bookkeeping" +
" operations.")
s"$SPARK_APP_ID_LABEL is not allowed as it is reserved for Spark bookkeeping " +
"operations.")
require(!driverCustomLabels.contains(SPARK_ROLE_LABEL), "Label with key " +
s"$SPARK_ROLE_LABEL is not allowed as it is reserved for Spark bookkeeping " +
"operations.")

val allDriverLabels = driverCustomLabels ++ Map(
SPARK_APP_ID_LABEL -> kubernetesAppId,
SPARK_ROLE_LABEL -> SPARK_POD_DRIVER_ROLE)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,9 @@ private[k8s] trait LoggingPodStatusWatcher extends Watcher[Pod] {
* number.
*/
private[k8s] class LoggingPodStatusWatcherImpl(
appId: String, maybeLoggingInterval: Option[Long])
extends LoggingPodStatusWatcher with Logging {
appId: String,
maybeLoggingInterval: Option[Long])
extends LoggingPodStatusWatcher with Logging {

private val podCompletedFuture = new CountDownLatch(1)
// start timer for periodic logging
Expand All @@ -66,10 +67,7 @@ private[k8s] class LoggingPodStatusWatcherImpl(
override def eventReceived(action: Action, pod: Pod): Unit = {
this.pod = Option(pod)
action match {
case Action.DELETED =>
closeWatch()

case Action.ERROR =>
case Action.DELETED | Action.ERROR =>
closeWatch()

case _ =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,11 @@ private[spark] class BaseDriverConfigurationStep(
" Spark bookkeeping operations.")

val driverCustomEnvs = submissionSparkConf.getAllWithPrefix(KUBERNETES_DRIVER_ENV_KEY).toSeq
.map(env => new EnvVarBuilder()
.withName(env._1)
.withValue(env._2)
.build())
.map(env =>
new EnvVarBuilder()
.withName(env._1)
.withValue(env._2)
.build())

val allDriverAnnotations = driverCustomAnnotations ++ Map(SPARK_APP_NAME_ANNOTATION -> appName)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,8 +237,8 @@ private[spark] class DriverKubernetesCredentialsStep(

private class OptionSettableSparkConf(sparkConf: SparkConf) {
def setOption(configEntry: String, option: Option[String]): SparkConf = {
option.map( opt => {
option.map( opt =>
sparkConf.set(configEntry, opt)
}).getOrElse(sparkConf)
).getOrElse(sparkConf)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ private[spark] class DriverServiceBootstrapStep(
} else {
val randomServiceId = clock.getTimeMillis()
val shorterServiceName = s"spark-$randomServiceId$DRIVER_SVC_POSTFIX"
logWarning(s"Driver's hostname would preferably be $preferredServiceName, but this is" +
s" too long (must be <= 63 characters). Falling back to use $shorterServiceName" +
" as the driver service's name.")
logWarning(s"Driver's hostname would preferably be $preferredServiceName, but this is " +
s"too long (must be <= $MAX_SERVICE_NAME_LENGTH characters). Falling back to use " +
s"$shorterServiceName as the driver service's name.")
shorterServiceName
}

Expand Down