Skip to content
Closed
Prev Previous commit
Next Next commit
Resolve review
  • Loading branch information
zeotuan committed May 29, 2024
commit 6db64bd8b3b35df1b48a4f6e8300f48ae983592f
Original file line number Diff line number Diff line change
Expand Up @@ -784,6 +784,7 @@ object LogKeys {
case object URL extends LogKey
case object URL2 extends LogKey
case object URLS extends LogKey
case object EXECUTOR_USER_CLASS_PATH_FIRST extends LogKey
case object USER_ID extends LogKey
case object USER_NAME extends LogKey
case object UUID extends LogKey
Expand Down
7 changes: 4 additions & 3 deletions core/src/main/scala/org/apache/spark/executor/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ private[spark] class Executor(
if (sessionBasedRoot.isDirectory && sessionBasedRoot.exists()) {
Utils.deleteRecursively(sessionBasedRoot)
}
logInfo(log"Session evicted: ${LogMDC(UUID, state.sessionUUID)}")
logInfo(log"Session evicted: ${LogMDC(SESSION_ID, state.sessionUUID)}")
}
})
.build[String, IsolatedSessionState]
Expand Down Expand Up @@ -1087,7 +1087,8 @@ private[spark] class Executor(
private def createClassLoader(urls: Array[URL], useStub: Boolean): MutableURLClassLoader = {
logInfo(
log"Starting executor with user classpath" +
log" (userClassPathFirst = ${LogMDC(CLASS_PATH, userClassPathFirst)}): " +
log" (userClassPathFirst =" +
log" ${LogMDC(LogKeys.EXECUTOR_USER_CLASS_PATH_FIRST, userClassPathFirst)}): " +
log"${LogMDC(URLS, urls.mkString("'", ",", "'"))}"
)

Expand Down Expand Up @@ -1138,7 +1139,7 @@ private[spark] class Executor(
parent
}
logInfo(log"Created or updated repl class loader ${LogMDC(CLASS_LOADER, classLoader)}" +
log" for ${LogMDC(UUID, sessionUUID)}.")
log" for ${LogMDC(SESSION_ID, sessionUUID)}.")
classLoader
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.task.{TaskAttemptContextImpl => NewTaskAttemp
import org.apache.spark.{SerializableWritable, SparkConf, SparkException, TaskContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.{Logging, MDC}
import org.apache.spark.internal.LogKeys.{JOB_ID, TASK_ATTEMPT_ID, TOTAL_TIME}
import org.apache.spark.internal.LogKeys.{DURATION, JOB_ID, TASK_ATTEMPT_ID}
import org.apache.spark.internal.io.FileCommitProtocol.TaskCommitMessage
import org.apache.spark.rdd.{HadoopRDD, RDD}
import org.apache.spark.util.{SerializableConfiguration, SerializableJobConf, Utils}
Expand Down Expand Up @@ -102,7 +102,7 @@ object SparkHadoopWriter extends Logging {
val (_, duration) = Utils
.timeTakenMs { committer.commitJob(jobContext, ret.toImmutableArraySeq) }
logInfo(log"Write Job ${MDC(JOB_ID, jobContext.getJobID)} committed." +
log" Elapsed time: ${MDC(TOTAL_TIME, duration)} ms.")
log" Elapsed time: ${MDC(DURATION, duration)} ms.")
} catch {
case cause: Throwable =>
logError(log"Aborting job ${MDC(JOB_ID, jobContext.getJobID)}.", cause)
Expand Down