-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-18278] [Scheduler] Spark on Kubernetes - Basic Scheduler Backend #19468
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
f6fdd6a
75e31a9
cf82b21
488c535
82b79a7
c052212
c565c9f
2fb596d
992acbe
b0a5839
a4f9797
2b5dcac
018f4d8
4b32134
6cf4ed7
1f271be
71a971f
0ab9ca7
7f14b71
7afce3f
b75b413
3b587b4
cb12fec
ae396cf
f8e3249
a44c29e
4bed817
c386186
b85cfc4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
- Loading branch information
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,10 +17,9 @@ | |
|
|
||
| package org.apache.spark.deploy.k8s | ||
|
|
||
| import org.apache.spark.{SparkConf, SparkException} | ||
| import org.apache.spark.internal.Logging | ||
| import org.apache.spark.SparkConf | ||
|
|
||
| private[spark] object ConfigurationUtils extends Logging { | ||
| private[spark] object ConfigurationUtils { | ||
| def parsePrefixedKeyValuePairs( | ||
| sparkConf: SparkConf, | ||
| prefix: String, | ||
|
|
@@ -34,4 +33,24 @@ private[spark] object ConfigurationUtils extends Logging { | |
| } | ||
| fromPrefix.toMap | ||
| } | ||
|
|
||
| def requireBothOrNeitherDefined( | ||
|
||
| opt1: Option[_], | ||
| opt2: Option[_], | ||
| errMessageWhenFirstIsMissing: String, | ||
| errMessageWhenSecondIsMissing: String): Unit = { | ||
| requireSecondIfFirstIsDefined(opt1, opt2, errMessageWhenSecondIsMissing) | ||
| requireSecondIfFirstIsDefined(opt2, opt1, errMessageWhenFirstIsMissing) | ||
| } | ||
|
|
||
| def requireSecondIfFirstIsDefined( | ||
|
||
| opt1: Option[_], opt2: Option[_], errMessageWhenSecondIsMissing: String): Unit = { | ||
| opt1.foreach { _ => | ||
| require(opt2.isDefined, errMessageWhenSecondIsMissing) | ||
| } | ||
| } | ||
|
|
||
| def requireNandDefined(opt1: Option[_], opt2: Option[_], errMessage: String): Unit = { | ||
| opt1.foreach { _ => require(opt2.isEmpty, errMessage) } | ||
| } | ||
| } | ||
This file was deleted.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,8 +18,7 @@ package org.apache.spark.scheduler.cluster.k8s | |
|
|
||
| import scala.collection.JavaConverters._ | ||
|
|
||
| import io.fabric8.kubernetes.api.model.{ContainerBuilder, ContainerPortBuilder, EnvVar, EnvVarBuilder, EnvVarSourceBuilder, Pod, PodBuilder, QuantityBuilder} | ||
| import org.apache.commons.io.FilenameUtils | ||
| import io.fabric8.kubernetes.api.model._ | ||
|
|
||
| import org.apache.spark.{SparkConf, SparkException} | ||
| import org.apache.spark.deploy.k8s.ConfigurationUtils | ||
|
|
@@ -48,7 +47,7 @@ private[spark] class ExecutorPodFactoryImpl(sparkConf: SparkConf) | |
| org.apache.spark.internal.config.EXECUTOR_CLASS_PATH) | ||
| private val executorJarsDownloadDir = sparkConf.get(INIT_CONTAINER_JARS_DOWNLOAD_LOCATION) | ||
|
|
||
| private val executorLabels = ConfigurationUtils.parsePrefixedKeyValuePairs ( | ||
| private val executorLabels = ConfigurationUtils.parsePrefixedKeyValuePairs( | ||
| sparkConf, | ||
|
||
| KUBERNETES_EXECUTOR_LABEL_PREFIX, | ||
| "executor label") | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -27,7 +27,7 @@ import io.fabric8.kubernetes.client.Watcher.Action | |
| import io.fabric8.kubernetes.client.dsl.{FilterWatchListDeletable, MixedOperation, NonNamespaceOperation, PodResource} | ||
| import org.mockito.{AdditionalAnswers, ArgumentCaptor, Mock, MockitoAnnotations} | ||
| import org.mockito.Matchers.{any, eq => mockitoEq} | ||
| import org.mockito.Mockito.{mock => _, _} | ||
| import org.mockito.Mockito.{doNothing, never, times, verify, when} | ||
| import org.scalatest.BeforeAndAfter | ||
| import org.scalatest.mock.MockitoSugar._ | ||
|
|
||
|
|
@@ -74,7 +74,7 @@ private[spark] class KubernetesClusterSchedulerBackendSuite | |
| .build() | ||
|
|
||
| private type PODS = MixedOperation[Pod, PodList, DoneablePod, PodResource[Pod, DoneablePod]] | ||
| private type LABELLED_PODS = FilterWatchListDeletable[ | ||
| private type LABELED_PODS = FilterWatchListDeletable[ | ||
| Pod, PodList, java.lang.Boolean, Watch, Watcher[Pod]] | ||
|
||
| private type IN_NAMESPACE_PODS = NonNamespaceOperation[ | ||
| Pod, PodList, DoneablePod, PodResource[Pod, DoneablePod]] | ||
|
||
|
|
@@ -104,7 +104,7 @@ private[spark] class KubernetesClusterSchedulerBackendSuite | |
| private var podOperations: PODS = _ | ||
|
|
||
| @Mock | ||
| private var podsWithLabelOperations: LABELLED_PODS = _ | ||
| private var podsWithLabelOperations: LABELED_PODS = _ | ||
|
|
||
| @Mock | ||
| private var podsInNamespace: IN_NAMESPACE_PODS = _ | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We should add comment to explain what does the function do, it not only return the configs, but also ensure no duplicate configs are set.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Done.