Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1005,8 +1005,9 @@ private[spark] class SparkSubmit extends Logging {
case t: Throwable =>
throw findCause(t)
} finally {
if (args.master.startsWith("k8s") && !isShell(args.primaryResource) &&
!isSqlShell(args.mainClass) && !isThriftServer(args.mainClass)) {
if (sparkConf.get(AUTO_STOP_ACTIVE_SPARK_CONTEXTS) && args.master.startsWith("k8s") &&
!isShell(args.primaryResource) && !isSqlShell(args.mainClass) &&
!isThriftServer(args.mainClass)) {
try {
SparkContext.getActive.foreach(_.stop())
} catch {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2020,6 +2020,14 @@ package object config {
.toSequence
.createWithDefault(Nil)

private[spark] val AUTO_STOP_ACTIVE_SPARK_CONTEXTS =
ConfigBuilder("spark.kubernetes.submit.autoStopActiveSparkContexts")
.version("3.4.0")
.doc("When set to true, on Kubernetes Spark will stop all the active Spark contexts after " +
"the finish of non-shell applications' main method.")
.booleanConf
.createWithDefault(false)

private[spark] val SCHEDULER_ALLOCATION_FILE =
ConfigBuilder("spark.scheduler.allocation.file")
.version("0.8.1")
Expand Down