diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala index e811b82efa36..3b6a4e648273 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala @@ -460,7 +460,7 @@ private[spark] class MesosClusterScheduler( containerInfo } - private def getDriverCommandValue(desc: MesosDriverDescription): String = { + private[scheduler] def getDriverCommandValue(desc: MesosDriverDescription): String = { val dockerDefined = desc.conf.contains("spark.mesos.executor.docker.image") val executorUri = getDriverExecutorURI(desc) // Gets the path to run spark-submit, and the path to the Mesos sandbox. @@ -508,14 +508,14 @@ private[spark] class MesosClusterScheduler( private def generateCmdOption(desc: MesosDriverDescription, sandboxPath: String): Seq[String] = { var options = Seq( - "--name", desc.conf.get("spark.app.name"), + "--name", shellEscape(desc.conf.get("spark.app.name")), "--master", s"mesos://${conf.get("spark.master")}", "--driver-cores", desc.cores.toString, "--driver-memory", s"${desc.mem}M") // Assume empty main class means we're running python if (!desc.command.mainClass.equals("")) { - options ++= Seq("--class", desc.command.mainClass) + options ++= Seq("--class", shellEscape(desc.command.mainClass)) } desc.conf.getOption("spark.executor.memory").foreach { v => @@ -542,7 +542,7 @@ private[spark] class MesosClusterScheduler( .filter { case (key, _) => !replicatedOptionsBlacklist.contains(key) } .toMap (defaultConf ++ driverConf).foreach { case (key, value) => - options ++= Seq("--conf", s""""$key=${shellEscape(value)}"""".stripMargin) } + options ++= Seq("--conf", s"$key=${shellEscape(value)}") } options } diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala index 57c966bb9c89..b03254cf06a5 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala @@ -58,6 +58,12 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi } private def testDriverDescription(submissionId: String): MesosDriverDescription = { + testDriverDescription(submissionId, Map[String, String]()) + } + + private def testDriverDescription( + submissionId: String, + schedulerProps: Map[String, String]): MesosDriverDescription = { new MesosDriverDescription( "d1", "jar", @@ -65,7 +71,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi 1, true, command, - Map[String, String](), + schedulerProps, submissionId, new Date()) } @@ -199,6 +205,46 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi }) } + test("escapes spark.app.name correctly") { + setScheduler() + + val driverDesc = testDriverDescription("s1", Map[String, String]( + "spark.app.name" -> "AnApp With $pecialChars.py", + "spark.mesos.executor.home" -> "test" + )) + + val cmdString = scheduler.getDriverCommandValue(driverDesc) + assert(cmdString.contains("AnApp With \\$pecialChars.py")) + } + + test("escapes extraJavaOptions correctly") { + setScheduler() + + val driverDesc = testDriverDescription("s1", Map[String, String]( + "spark.app.name" -> "app.py", + "spark.mesos.executor.home" -> "test", + "spark.driver.extraJavaOptions" -> "-DparamA=\"val1 val2\" -Dpath=$PATH" + )) + + val cmdString = scheduler.getDriverCommandValue(driverDesc) + assert(cmdString.contains( + "spark.driver.extraJavaOptions=\"-DparamA=\\\"val1 val2\\\" -Dpath=\\$PATH")) + } + + test("does not escape $MESOS_SANDBOX for --py-files when using a docker image") { + setScheduler() + + val driverDesc = testDriverDescription("s1", Map[String, String]( + "spark.app.name" -> "app.py", + "spark.mesos.executor.docker.image" -> "test/spark:01", + "spark.submit.pyFiles" -> "http://site.com/extraPythonFile.py" + )) + + val cmdString = scheduler.getDriverCommandValue(driverDesc) + assert(!cmdString.contains("\\$MESOS_SANDBOX/extraPythonFile.py")) + assert(cmdString.contains("$MESOS_SANDBOX/extraPythonFile.py")) + } + test("supports spark.mesos.driverEnv.*") { setScheduler()