diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala index a569f3e3b863..b15ae5d64c76 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala @@ -611,14 +611,12 @@ private[spark] class MesosClusterScheduler( partitionResources(remainingResources.asJava, "mem", desc.mem) offer.remainingResources = finalResources.asJava - val appName = desc.conf.get("spark.app.name") - val driverLabels = MesosProtoUtils.mesosLabels(desc.conf.get(config.DRIVER_LABELS) .getOrElse("")) TaskInfo.newBuilder() .setTaskId(taskId) - .setName(s"Driver for ${appName}") + .setName(s"Driver for ${desc.name}") .setSlaveId(offer.offer.getSlaveId) .setCommand(buildDriverCommand(desc)) .setContainer(getContainerInfo(desc)) diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala index 22de3b1bd808..0362194f90e7 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala @@ -182,19 +182,12 @@ private[spark] class MesosCoarseGrainedSchedulerBackend( private val metricsSource = new MesosCoarseGrainedSchedulerSource(this) - private val schedulerUuid: String = UUID.randomUUID().toString - - private var nextMesosTaskId = 0 - @volatile var appId: String = _ private var schedulerDriver: SchedulerDriver = _ - def newMesosTaskId(): String = { - val id = nextMesosTaskId - nextMesosTaskId += 1 - id.toString - } + private val schedulerUuid: String = UUID.randomUUID().toString + private val nextExecutorNumber = new AtomicLong() override def start() { super.start() @@ -527,7 +520,8 @@ private[spark] class MesosCoarseGrainedSchedulerBackend( if (canLaunchTask(slaveId, offer.getHostname, resources)) { // Create a task launchTasks = true - val taskId = newMesosTaskId() + val taskSeqNumber = nextExecutorNumber.getAndIncrement() + val taskId = s"${schedulerUuid}-$taskSeqNumber" val offerCPUs = getResource(resources, "cpus").toInt val offerGPUs = getResource(resources, "gpus").toInt var taskGPUs = executorGpus(offerGPUs) @@ -540,10 +534,10 @@ private[spark] class MesosCoarseGrainedSchedulerBackend( partitionTaskResources(resources, taskCPUs, taskMemory, taskGPUs) val taskBuilder = MesosTaskInfo.newBuilder() - .setTaskId(TaskID.newBuilder().setValue( s"$schedulerUuid-$taskId").build()) + .setTaskId(TaskID.newBuilder().setValue(taskId).build()) .setSlaveId(offer.getSlaveId) .setCommand(createCommand(offer, taskCPUs + extraCoresPerExecutor, taskId)) - .setName(s"${sc.appName} $taskId") + .setName(s"${sc.appName} $taskSeqNumber") .setLabels(MesosProtoUtils.mesosLabels(taskLabels)) .addAllResources(resourcesToUse.asJava) .setContainer(getContainerInfo(sc.conf)) diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala index 8ef1e18f83de..f2da10c7c7f5 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala @@ -557,6 +557,7 @@ trait MesosSchedulerUtils extends Logging { * the same frameworkID. To enforce that only the first driver registers with the configured * framework ID, the driver calls this method after the first registration. */ + @deprecated("Multiple Spark Contexts and fine-grained scheduler are deprecated") def unsetFrameworkID(sc: SparkContext) { sc.conf.remove("spark.mesos.driver.frameworkId") System.clearProperty("spark.mesos.driver.frameworkId")