diff --git a/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala b/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala index 4776439c2350d..ad08fa0480cc1 100644 --- a/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala +++ b/common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala @@ -81,6 +81,7 @@ object LogKeys { case object CLASS_LOADER extends LogKey case object CLASS_NAME extends LogKey case object CLASS_PATH extends LogKey + case object CLASS_PATHS extends LogKey case object CLAUSES extends LogKey case object CLUSTER_CENTROIDS extends LogKey case object CLUSTER_ID extends LogKey @@ -474,7 +475,6 @@ object LogKeys { case object PROCESS extends LogKey case object PROCESSING_TIME extends LogKey case object PRODUCER_ID extends LogKey - case object PROPERTY_FILE extends LogKey case object PROPERTY_NAME extends LogKey case object PROVIDER extends LogKey case object PUSHED_FILTERS extends LogKey diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 430d4fdf9f9d5..d4455e76ffb9b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -964,7 +964,7 @@ private[spark] class SparkSubmit extends Logging { // sysProps may contain sensitive information, so redact before printing logInfo(log"Spark config:\n" + log"${MDC(LogKeys.CONFIG, Utils.redact(sparkConf.getAll.toMap).sorted.mkString("\n"))}") - logInfo(log"Classpath elements:\n${MDC(LogKeys.CLASS_PATH, childClasspath.mkString("\n"))}") + logInfo(log"Classpath elements:\n${MDC(LogKeys.CLASS_PATHS, childClasspath.mkString("\n"))}") logInfo("\n") } assert(!(args.deployMode == "cluster" && args.proxyUser != null && childClasspath.nonEmpty) || diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index ccd9a1ddb7464..61235a7019070 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -89,7 +89,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S lazy val defaultSparkProperties: HashMap[String, String] = { val defaultProperties = new HashMap[String, String]() if (verbose) { - logInfo(log"Using properties file: ${MDC(PROPERTY_FILE, propertiesFile)}") + logInfo(log"Using properties file: ${MDC(PATH, propertiesFile)}") } Option(propertiesFile).foreach { filename => val properties = Utils.getPropertiesFromFile(filename) @@ -99,7 +99,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Property files may contain sensitive information, so redact before printing if (verbose) { Utils.redact(properties).foreach { case (k, v) => - logInfo(log"Adding default property: ${MDC(PROPERTY_NAME, k)}=${MDC(VALUE, v)}") + logInfo(log"Adding default property: ${MDC(KEY, k)}=${MDC(VALUE, v)}") } } }