Skip to content

Commit 7bfc75b

Browse files
committed
Backport of SPARK-4267 for branch-1.2: Before passing to YARN, escape arguments in "extraJavaOptions" args, in order to correctly handle cases like -Dfoo="one two three". Also standardize how these args are handled and ensure that individual args are treated as stand-alone args, not one string.
1 parent 64254ee commit 7bfc75b

File tree

3 files changed

+17
-12
lines changed

3 files changed

+17
-12
lines changed

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -350,9 +350,11 @@ private[spark] trait ClientBase extends Logging {
350350

351351
// Include driver-specific java options if we are launching a driver
352352
if (isLaunchingDriver) {
353-
sparkConf.getOption("spark.driver.extraJavaOptions")
353+
val driverOpts = sparkConf.getOption("spark.driver.extraJavaOptions")
354354
.orElse(sys.env.get("SPARK_JAVA_OPTS"))
355-
.foreach(opts => javaOpts += opts)
355+
driverOpts.foreach { opts =>
356+
javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell)
357+
}
356358
val libraryPaths = Seq(sys.props.get("spark.driver.extraLibraryPath"),
357359
sys.props.get("spark.driver.libraryPath")).flatten
358360
if (libraryPaths.nonEmpty) {

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -55,14 +55,15 @@ trait ExecutorRunnableUtil extends Logging {
5555

5656
// Set the JVM memory
5757
val executorMemoryString = executorMemory + "m"
58-
javaOpts += "-Xms" + executorMemoryString + " -Xmx" + executorMemoryString + " "
58+
javaOpts += "-Xms" + executorMemoryString
59+
javaOpts += "-Xmx" + executorMemoryString
5960

6061
// Set extra Java options for the executor, if defined
6162
sys.props.get("spark.executor.extraJavaOptions").foreach { opts =>
62-
javaOpts += opts
63+
javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell)
6364
}
6465
sys.env.get("SPARK_JAVA_OPTS").foreach { opts =>
65-
javaOpts += opts
66+
javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell)
6667
}
6768
sys.props.get("spark.executor.extraLibraryPath").foreach { p =>
6869
prefixEnv = Some(Utils.libraryPathEnvPrefix(Seq(p)))
@@ -96,11 +97,11 @@ trait ExecutorRunnableUtil extends Logging {
9697
// multi-tennent machines
9798
// The options are based on
9899
// http://www.oracle.com/technetwork/java/gc-tuning-5-138395.html#0.0.0.%20When%20to%20Use%20the%20Concurrent%20Low%20Pause%20Collector|outline
99-
javaOpts += " -XX:+UseConcMarkSweepGC "
100-
javaOpts += " -XX:+CMSIncrementalMode "
101-
javaOpts += " -XX:+CMSIncrementalPacing "
102-
javaOpts += " -XX:CMSIncrementalDutyCycleMin=0 "
103-
javaOpts += " -XX:CMSIncrementalDutyCycle=10 "
100+
javaOpts += "-XX:+UseConcMarkSweepGC"
101+
javaOpts += "-XX:+CMSIncrementalMode"
102+
javaOpts += "-XX:+CMSIncrementalPacing"
103+
javaOpts += "-XX:CMSIncrementalDutyCycleMin=0"
104+
javaOpts += "-XX:CMSIncrementalDutyCycle=10"
104105
}
105106
*/
106107

yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers wit
5151
private var oldConf: Map[String, String] = _
5252

5353
override def beforeAll() {
54+
super.beforeAll()
55+
5456
tempDir = Utils.createTempDir()
5557

5658
val logConfDir = new File(tempDir, "log4j")
@@ -102,8 +104,8 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers wit
102104
sys.props += ("spark.executor.instances" -> "1")
103105
sys.props += ("spark.driver.extraClassPath" -> childClasspath)
104106
sys.props += ("spark.executor.extraClassPath" -> childClasspath)
105-
106-
super.beforeAll()
107+
sys.props += ("spark.executor.extraJavaOptions" -> "-Dfoo=\"one two three\"")
108+
sys.props += ("spark.driver.extraJavaOptions" -> "-Dfoo=\"one two three\"")
107109
}
108110

109111
override def afterAll() {

0 commit comments

Comments
 (0)