From 62f4d2ffc1af8aa64a945195faa9b2ef74b5ee9b Mon Sep 17 00:00:00 2001 From: Luc Bourlier Date: Wed, 16 Dec 2015 11:17:30 +0000 Subject: [PATCH] Do not send SPARK_HOME through Spark submit REST interface It is usually an invalid location on the remote machine executing the job. It is picked up by the Mesos support in cluster mode, and most of the time causes the job to fail. Fixes SPARK-12345 --- .../org/apache/spark/deploy/rest/RestSubmissionClient.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala index f0dd667ea1b2..0744c64d5e94 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala @@ -428,8 +428,10 @@ private[spark] object RestSubmissionClient { * Filter non-spark environment variables from any environment. */ private[rest] def filterSystemEnvironment(env: Map[String, String]): Map[String, String] = { - env.filter { case (k, _) => - (k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED") || k.startsWith("MESOS_") + env.filterKeys { k => + // SPARK_HOME is filtered out because it is usually wrong on the remote machine (SPARK-12345) + (k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED" && k != "SPARK_HOME") || + k.startsWith("MESOS_") } } }