From 62c8552c778637ef4b074b9f27e9a5de0669c259 Mon Sep 17 00:00:00 2001 From: xiaochang-wu Date: Thu, 22 Dec 2016 15:31:40 +0800 Subject: [PATCH] add executor launch prefix support --- conf/spark-env.sh.template | 3 +++ .../org/apache/spark/launcher/WorkerCommandBuilder.scala | 4 ++++ .../org/apache/spark/deploy/yarn/ExecutorRunnable.scala | 6 ++++++ .../org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala | 2 +- 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index 5c1e876ef9afc..2a91e6bd527fd 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -56,6 +56,9 @@ # - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y") # - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y") # - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers +# - SPARK_EXECUTOR_LAUNCH_PREFIX, to set prefix of executor launch command lines, +# only support standalone mode, YARN client mode should use +# spark.yarn.appMasterEnv.SPARK_EXECUTOR_LAUNCH_PREFIX # Generic options for the daemons used in the standalone deploy mode # - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf) diff --git a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala index 31b9c5edf003f..f801d7a97c379 100644 --- a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala +++ b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala @@ -41,6 +41,10 @@ private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, comm command.javaOpts.foreach(cmd.add) CommandBuilderUtils.addPermGenSizeOpt(cmd) addOptionString(cmd, getenv("SPARK_JAVA_OPTS")) + + val prefix = getenv("SPARK_EXECUTOR_LAUNCH_PREFIX") + if (prefix != null) + cmd.add(0, prefix) cmd } diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala index 868c2edc5a463..0ae5626a446c9 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala @@ -206,7 +206,13 @@ private[yarn] class ExecutorRunnable( }.toSeq YarnSparkHadoopUtil.addOutOfMemoryErrorArgument(javaOpts) + + // Add support for extra executor launch prefix. + val executorLaunchPrefix = (if (sys.env.contains("SPARK_EXECUTOR_LAUNCH_PREFIX")) + sys.env("SPARK_EXECUTOR_LAUNCH_PREFIX") else ""); + val commands = prefixEnv ++ Seq( + executorLaunchPrefix, YarnSparkHadoopUtil.expandEnvironment(Environment.JAVA_HOME) + "/bin/java", "-server") ++ javaOpts ++ diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala index cc53b1b06e94a..defbb87c0f3a3 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala @@ -207,7 +207,7 @@ object YarnSparkHadoopUtil { if (Utils.isWindows) { javaOpts += escapeForShell("-XX:OnOutOfMemoryError=taskkill /F /PID %%%%p") } else { - javaOpts += "-XX:OnOutOfMemoryError='kill %p'" + javaOpts += escapeForShell("-XX:OnOutOfMemoryError='kill %p'") } } }