From fedd01c031017af4410bf998e3f067e5a9cc2687 Mon Sep 17 00:00:00 2001 From: Cheolsoo Park Date: Tue, 21 Apr 2015 12:12:22 -0700 Subject: [PATCH 1/2] Ignore non-spark properties with a warning message in all cases --- .../spark/deploy/SparkSubmitArguments.scala | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index faa8780288ea..c62e570f1c33 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -61,7 +61,16 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S var pyFiles: String = null var isR: Boolean = false var action: SparkSubmitAction = null - val sparkProperties: HashMap[String, String] = new HashMap[String, String]() + val sparkProperties: HashMap[String, String] = new HashMap[String, String]() { + override def put(k: String, v: String): Option[String] = { + if (k.startsWith("spark.")) { + super.put(k, v) + } else { + SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v") + Option[String](null) + } + } + } var proxyUser: String = null // Standalone cluster mode only @@ -77,12 +86,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile") Option(propertiesFile).foreach { filename => Utils.getPropertiesFromFile(filename).foreach { case (k, v) => - if (k.startsWith("spark.")) { - defaultProperties(k) = v - if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v") - } else { - SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v") - } + defaultProperties(k) = v + if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v") } } defaultProperties From 89579502ae6c069de080ab418f761cef4c55b225 Mon Sep 17 00:00:00 2001 From: Cheolsoo Park Date: Tue, 21 Apr 2015 21:07:42 -0700 Subject: [PATCH 2/2] Add IgnoreNonSparkProperties method --- .../spark/deploy/SparkSubmitArguments.scala | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index c62e570f1c33..c896842943f2 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -61,16 +61,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S var pyFiles: String = null var isR: Boolean = false var action: SparkSubmitAction = null - val sparkProperties: HashMap[String, String] = new HashMap[String, String]() { - override def put(k: String, v: String): Option[String] = { - if (k.startsWith("spark.")) { - super.put(k, v) - } else { - SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v") - Option[String](null) - } - } - } + val sparkProperties: HashMap[String, String] = new HashMap[String, String]() var proxyUser: String = null // Standalone cluster mode only @@ -102,6 +93,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S } // Populate `sparkProperties` map from properties file mergeDefaultSparkProperties() + // Remove keys that don't start with "spark." from `sparkProperties`. + ignoreNonSparkProperties() // Use `sparkProperties` map along with env vars to fill in any missing parameters loadEnvironmentArguments() @@ -122,6 +115,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S } } + /** + * Remove keys that don't start with "spark." from `sparkProperties`. + */ + private def ignoreNonSparkProperties(): Unit = { + sparkProperties.foreach { case (k, v) => + if (!k.startsWith("spark.")) { + sparkProperties -= k + SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v") + } + } + } + /** * Load arguments from environment variables, Spark properties etc. */