diff --git a/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala b/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala index b8a43a03d8b6..1a018811974c 100644 --- a/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala +++ b/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala @@ -31,7 +31,8 @@ object LogKey extends Enumeration { val MAX_SIZE = Value val MIN_SIZE = Value val REMOTE_ADDRESS = Value - val POD_ID = Value + val KUBERNETES_NAMESPACE = Value + val KUBERNETES_POD_NAME = Value type LogKey = Value } diff --git a/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala b/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala index 84b9debb2afd..96512129a88c 100644 --- a/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala @@ -97,20 +97,23 @@ trait Logging { } implicit class LogStringContext(val sc: StringContext) { - def log(args: MDC*): MessageWithContext = { + def log(args: Any*): MessageWithContext = { val processedParts = sc.parts.iterator val sb = new StringBuilder(processedParts.next()) val context = new java.util.HashMap[String, String]() - args.foreach { mdc => - sb.append(mdc.value.toString) - if (Logging.isStructuredLoggingEnabled) { - context.put(mdc.key.toString.toLowerCase(Locale.ROOT), mdc.value.toString) - } + args.foreach { + case mdc: MDC => + sb.append(mdc.value.toString) + if (Logging.isStructuredLoggingEnabled) { + context.put(mdc.key.toString.toLowerCase(Locale.ROOT), mdc.value.toString) + } - if (processedParts.hasNext) { - sb.append(processedParts.next()) - } + if (processedParts.hasNext) { + sb.append(processedParts.next()) + } + case any: Any => + sb.append(any) } MessageWithContext(sb.toString(), context) diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala index f4d80c24d01f..3fb775330666 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala @@ -533,8 +533,10 @@ class ExecutorPodsAllocator( currentTime - creationTime > executorIdleTimeout } catch { case e: Exception => - logError(log"Cannot get the creationTimestamp of the pod: " + - log"${MDC(LogKey.POD_ID, state.pod)}", e) + logError(log"Cannot get the creationTimestamp of the pod " + + log"${MDC(LogKey.KUBERNETES_POD_NAME, state.pod.getMetadata.getName)} in namespace " + + log"${MDC(LogKey.KUBERNETES_NAMESPACE, state.pod.getMetadata.getNamespace)}. " + + log"Resource details: ${state.pod}", e) true } }