From add84163ea0403d23dbbda540766d58329a27d76 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Mon, 9 Jun 2014 16:15:24 -0700 Subject: [PATCH 1/7] [SPARK-2778] [yarn] Add yarn integration tests. This patch adds a couple of, currently, very simple integration tests to make sure both client and cluster modes are working. The tests don't do much yet other than run a simple job, but the plan is to enhance them after we get the framework in. The cluster tests are noisy, so redirect all log output to a file like other tests do. Copying the conf around sucks but it's less work than messing with maven/sbt and having to clean up other projects. Note the test is only added for yarn-stable. The code compiles against yarn-alpha but there are two issues I ran into that I could not overcome: - and old netty dependency kept creeping into the classpath and causing akka to not work, when using sbt; the old netty was correctly suppressed under maven. - MiniYARNCluster kept failing to execute containers because it did not create the NM's local dir itself; this is apparently a known behavior, but I'm not sure how to work around it. None of those issues are present with the stable Yarn. Also, these tests are a little slow to run. Apparently Spark doesn't yet tag tests (so that these could be isolated in a "slow" batch), so this is something to keep in mind. --- pom.xml | 31 +++- yarn/pom.xml | 3 +- yarn/stable/pom.xml | 9 ++ .../src/test/resources/log4j.properties | 28 ++++ .../spark/deploy/yarn/YarnClusterSuite.scala | 141 ++++++++++++++++++ 5 files changed, 209 insertions(+), 3 deletions(-) create mode 100644 yarn/stable/src/test/resources/log4j.properties create mode 100644 yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala diff --git a/pom.xml b/pom.xml index a5eaea80afd71..0fa71253c1c42 100644 --- a/pom.xml +++ b/pom.xml @@ -698,6 +698,35 @@ + + org.apache.hadoop + hadoop-yarn-server-tests + ${yarn.version} + tests + test + + + asm + asm + + + org.ow2.asm + asm + + + org.jboss.netty + netty + + + javax.servlet + servlet-api + + + commons-logging + commons-logging + + + org.apache.hadoop hadoop-yarn-server-web-proxy @@ -1170,7 +1199,7 @@ org.apache.zookeeper zookeeper - 3.4.5-mapr-1406 + 3.4.5-mapr-1406 diff --git a/yarn/pom.xml b/yarn/pom.xml index 3faaf053634d6..4e125f3dc4db2 100644 --- a/yarn/pom.xml +++ b/yarn/pom.xml @@ -126,7 +126,6 @@ ${basedir}/../.. - ${spark.classpath} @@ -134,7 +133,7 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes - + ../common/src/main/resources diff --git a/yarn/stable/pom.xml b/yarn/stable/pom.xml index b6c8456d06684..fc14e8eb7a94d 100644 --- a/yarn/stable/pom.xml +++ b/yarn/stable/pom.xml @@ -32,4 +32,13 @@ jar Spark Project YARN Stable API + + + org.apache.hadoop + hadoop-yarn-server-tests + tests + test + + + diff --git a/yarn/stable/src/test/resources/log4j.properties b/yarn/stable/src/test/resources/log4j.properties new file mode 100644 index 0000000000000..26b73a1b39744 --- /dev/null +++ b/yarn/stable/src/test/resources/log4j.properties @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Set everything to be logged to the file core/target/unit-tests.log +log4j.rootCategory=INFO, file +log4j.appender.file=org.apache.log4j.FileAppender +log4j.appender.file.append=false +log4j.appender.file.file=target/unit-tests.log +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n + +# Ignore messages below warning level from Jetty, because it's a bit verbose +log4j.logger.org.eclipse.jetty=WARN +org.eclipse.jetty.LEVEL=WARN diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala new file mode 100644 index 0000000000000..c8c7967df808f --- /dev/null +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy.yarn + +import java.io.File + +import scala.collection.JavaConversions._ +import scala.collection.mutable.HashMap + +import com.google.common.base.Charsets +import com.google.common.io.Files +import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} + +import org.apache.hadoop.yarn.conf.YarnConfiguration +import org.apache.hadoop.yarn.server.MiniYARNCluster + +import org.apache.spark.{Logging, SparkConf, SparkContext} +import org.apache.spark.deploy.SparkHadoopUtil +import org.apache.spark.util.Utils + +class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { + + private val oldConf = new HashMap[String, String]() + private var yarnCluster: MiniYARNCluster = _ + private var tempDir: File = _ + private var fakeSparkJar: File = _ + + override def beforeAll() { + tempDir = Utils.createTempDir() + + yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1, 1, false) + yarnCluster.init(new YarnConfiguration()) + yarnCluster.start() + + val sysProps = sys.props.map { case (k, v) => (k, v) } + sysProps.foreach { case (k, v) => + if (k.startsWith("spark.")) { + oldConf += (k -> v) + sys.props -= k + } + } + + yarnCluster.getConfig().foreach { e => + sys.props += ("spark.hadoop." + e.getKey() -> e.getValue()) + } + + fakeSparkJar = File.createTempFile("sparkJar", null, tempDir) + sys.props += ("spark.yarn.jar" -> ("local:" + fakeSparkJar.getAbsolutePath())) + sys.props += ("spark.executor.instances" -> "1") + sys.props += ("spark.driver.extraClassPath" -> sys.props("java.class.path")) + sys.props += ("spark.executor.extraClassPath" -> sys.props("java.class.path")) + + super.beforeAll() + } + + override def afterAll() { + yarnCluster.stop() + + val sysProps = sys.props.map { case (k, v) => (k, v) } + sysProps.foreach { case (k, v) => + if (k.startsWith("spark.")) { + sys.props -= k + } + } + + oldConf.foreach { case (k, v) => sys.props += (k -> v) } + + super.afterAll() + } + + test("run Spark in yarn-client mode") { + var result = File.createTempFile("result", null, tempDir) + YarnClusterDriver.main(Array("yarn-client", result.getAbsolutePath())) + checkResult(result) + } + + test("run Spark in yarn-cluster mode") { + val main = YarnClusterDriver.getClass.getName().stripSuffix("$") + var result = File.createTempFile("result", null, tempDir) + + // The Client object will call System.exit() after the job is done, and we don't want + // that because it messes up the scalatest monitoring. So replicate some of what main() + // does here. + val args = Array("--class", main, + "--jar", "file:" + fakeSparkJar.getAbsolutePath(), + "--arg", "yarn-cluster", + "--arg", result.getAbsolutePath(), + "--num-executors", "4") + val sparkConf = new SparkConf() + val yarnConf = SparkHadoopUtil.get.newConfiguration(sparkConf) + val clientArgs = new ClientArguments(args, sparkConf) + new Client(clientArgs, yarnConf, sparkConf).run() + checkResult(result) + } + + /** + * This is a workaround for an issue with yarn-cluster mode: the Client class will not provide + * any sort of error when the job process finishes successfully, but the job itself fails. So + * the tests enforce that something is written to a file after everything is ok to indicate + * that the job succeeded. + */ + private def checkResult(result: File) = { + var resultString = Files.toString(result, Charsets.UTF_8) + resultString should be ("success") + } + +} + +private object YarnClusterDriver extends Logging with Matchers { + + def main(args: Array[String]) = { + val sc = new SparkContext(new SparkConf().setMaster(args(0)) + .setAppName("yarn \"test app\" 'with quotes'")) + val status = new File(args(1)) + var result = "failure" + try { + val data = sc.parallelize(1 to 4).map(i => i).collect().toSet + data should be (Set(1, 2, 3, 4)) + result = "success" + } finally { + sc.stop() + Files.write(result, status, Charsets.UTF_8) + } + } + +} From 68fbbbfc03d9d18eec58c1a6dad058014157e9da Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Mon, 8 Sep 2014 17:17:27 -0700 Subject: [PATCH 2/7] Use older constructor available in older Hadoop releases. --- .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index c8c7967df808f..81019cbd955f0 100644 --- a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -43,7 +43,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { override def beforeAll() { tempDir = Utils.createTempDir() - yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1, 1, false) + yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1) yarnCluster.init(new YarnConfiguration()) yarnCluster.start() From f01517c2916e1041086722916f50747bbb596134 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Mon, 15 Sep 2014 10:51:14 -0700 Subject: [PATCH 3/7] Review feedback. --- .../spark/deploy/yarn/YarnClusterSuite.scala | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 81019cbd955f0..f0b8b9276d795 100644 --- a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.deploy.yarn import java.io.File import scala.collection.JavaConversions._ -import scala.collection.mutable.HashMap import com.google.common.base.Charsets import com.google.common.io.Files @@ -35,7 +34,7 @@ import org.apache.spark.util.Utils class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { - private val oldConf = new HashMap[String, String]() + private var oldConf: Map[String, String] = _ private var yarnCluster: MiniYARNCluster = _ private var tempDir: File = _ private var fakeSparkJar: File = _ @@ -47,14 +46,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { yarnCluster.init(new YarnConfiguration()) yarnCluster.start() - val sysProps = sys.props.map { case (k, v) => (k, v) } - sysProps.foreach { case (k, v) => - if (k.startsWith("spark.")) { - oldConf += (k -> v) - sys.props -= k - } - } - + oldConf = sys.props.toMap yarnCluster.getConfig().foreach { e => sys.props += ("spark.hadoop." + e.getKey() -> e.getValue()) } @@ -78,7 +70,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { } } - oldConf.foreach { case (k, v) => sys.props += (k -> v) } + sys.props ++= oldConf super.afterAll() } @@ -100,7 +92,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { "--jar", "file:" + fakeSparkJar.getAbsolutePath(), "--arg", "yarn-cluster", "--arg", result.getAbsolutePath(), - "--num-executors", "4") + "--num-executors", "1") val sparkConf = new SparkConf() val yarnConf = SparkHadoopUtil.get.newConfiguration(sparkConf) val clientArgs = new ClientArguments(args, sparkConf) @@ -125,7 +117,7 @@ private object YarnClusterDriver extends Logging with Matchers { def main(args: Array[String]) = { val sc = new SparkContext(new SparkConf().setMaster(args(0)) - .setAppName("yarn \"test app\" 'with quotes'")) + .setAppName("yarn \"test app\" 'with quotes' and \\back\\slashes and $dollarSigns")) val status = new File(args(1)) var result = "failure" try { From 67f5b02d6a3866685194e6fede43bafde35375cb Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 23 Sep 2014 14:21:13 -0700 Subject: [PATCH 4/7] Review feedback. --- .../spark/deploy/yarn/YarnClusterSuite.scala | 26 +++++++++---------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index f0b8b9276d795..f9abb38537efa 100644 --- a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -34,7 +34,6 @@ import org.apache.spark.util.Utils class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { - private var oldConf: Map[String, String] = _ private var yarnCluster: MiniYARNCluster = _ private var tempDir: File = _ private var fakeSparkJar: File = _ @@ -45,8 +44,6 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1) yarnCluster.init(new YarnConfiguration()) yarnCluster.start() - - oldConf = sys.props.toMap yarnCluster.getConfig().foreach { e => sys.props += ("spark.hadoop." + e.getKey() -> e.getValue()) } @@ -62,16 +59,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { override def afterAll() { yarnCluster.stop() - - val sysProps = sys.props.map { case (k, v) => (k, v) } - sysProps.foreach { case (k, v) => - if (k.startsWith("spark.")) { - sys.props -= k - } - } - - sys.props ++= oldConf - + sys.props.retain { case (k, v) => !k.startsWith("spark.") } super.afterAll() } @@ -116,12 +104,22 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { private object YarnClusterDriver extends Logging with Matchers { def main(args: Array[String]) = { + if (args.length != 2) { + System.err.println( + s""" + |Invalid command line: ${args.mkString(" ")} + | + |Usage: YarnClusterDriver [master] [result file] + """.stripMargin) + System.exit(1) + } + val sc = new SparkContext(new SparkConf().setMaster(args(0)) .setAppName("yarn \"test app\" 'with quotes' and \\back\\slashes and $dollarSigns")) val status = new File(args(1)) var result = "failure" try { - val data = sc.parallelize(1 to 4).map(i => i).collect().toSet + val data = sc.parallelize(1 to 4, 4).collect().toSet data should be (Set(1, 2, 3, 4)) result = "success" } finally { From ec73f17f79d692377d3be8faa5cac9256f55ff3c Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 23 Sep 2014 15:12:57 -0700 Subject: [PATCH 5/7] More review feedback. --- .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index f9abb38537efa..459fea90267d2 100644 --- a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -37,10 +37,13 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { private var yarnCluster: MiniYARNCluster = _ private var tempDir: File = _ private var fakeSparkJar: File = _ + private var oldConf: Map[String, String] = _ override def beforeAll() { tempDir = Utils.createTempDir() + oldConf = sys.props.filter { case (k, v) => k.startsWith("spark.") }.toMap + yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1) yarnCluster.init(new YarnConfiguration()) yarnCluster.start() @@ -60,6 +63,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { override def afterAll() { yarnCluster.stop() sys.props.retain { case (k, v) => !k.startsWith("spark.") } + sys.props ++= oldConf super.afterAll() } From 5c2b56fc1947f973525abd0f9aa7cd77267c8fb3 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 23 Sep 2014 16:19:46 -0700 Subject: [PATCH 6/7] Use custom log4j conf for Yarn containers. --- .../spark/deploy/yarn/YarnClusterSuite.scala | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 459fea90267d2..857a4447dd738 100644 --- a/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -34,6 +34,16 @@ import org.apache.spark.util.Utils class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { + // log4j configuration for the Yarn containers, so that their output is collected + // by Yarn instead of trying to overwrite unit-tests.log. + private val LOG4J_CONF = """ + |log4j.rootCategory=DEBUG, console + |log4j.appender.console=org.apache.log4j.ConsoleAppender + |log4j.appender.console.target=System.err + |log4j.appender.console.layout=org.apache.log4j.PatternLayout + |log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n + """.stripMargin + private var yarnCluster: MiniYARNCluster = _ private var tempDir: File = _ private var fakeSparkJar: File = _ @@ -42,6 +52,15 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { override def beforeAll() { tempDir = Utils.createTempDir() + val logConfDir = new File(tempDir, "log4j") + logConfDir.mkdir() + + val logConfFile = new File(logConfDir, "log4j.properties") + Files.write(LOG4J_CONF, logConfFile, Charsets.UTF_8) + + val childClasspath = logConfDir.getAbsolutePath() + File.pathSeparator + + sys.props("java.class.path") + oldConf = sys.props.filter { case (k, v) => k.startsWith("spark.") }.toMap yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1) @@ -54,8 +73,8 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { fakeSparkJar = File.createTempFile("sparkJar", null, tempDir) sys.props += ("spark.yarn.jar" -> ("local:" + fakeSparkJar.getAbsolutePath())) sys.props += ("spark.executor.instances" -> "1") - sys.props += ("spark.driver.extraClassPath" -> sys.props("java.class.path")) - sys.props += ("spark.executor.extraClassPath" -> sys.props("java.class.path")) + sys.props += ("spark.driver.extraClassPath" -> childClasspath) + sys.props += ("spark.executor.extraClassPath" -> childClasspath) super.beforeAll() } From 6d5b84e8b5987683591d8c07b3ff8557d9581871 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 24 Sep 2014 10:41:06 -0700 Subject: [PATCH 7/7] Fix wrong system property being set. This was added by the fix to SPARK-2668: a stray equal sign was creating a bad system property, and the Jetty initialization code was tripping on it. Also fixed a "MatchError" that could be hit in ApplicationMaster. --- .../apache/spark/deploy/yarn/ApplicationMaster.scala | 10 +++++----- .../org/apache/spark/deploy/yarn/ClientBase.scala | 2 +- .../spark/deploy/yarn/ExecutorRunnableUtil.scala | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index 9050808157257..b51daeb437516 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -401,17 +401,17 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments, // it has an uncaught exception thrown out. It needs a shutdown hook to set SUCCEEDED. status = FinalApplicationStatus.SUCCEEDED } catch { - case e: InvocationTargetException => { + case e: InvocationTargetException => e.getCause match { - case _: InterruptedException => { + case _: InterruptedException => // Reporter thread can interrupt to stop user class - } + + case e => throw e } - } } finally { logDebug("Finishing main") + finalStatus = status } - finalStatus = status } } userClassThread.setName("Driver") diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala index 4870b0cb3ddaf..1cf19c198509c 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala @@ -348,7 +348,7 @@ private[spark] trait ClientBase extends Logging { } // For log4j configuration to reference - javaOpts += "-D=spark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) val userClass = if (args.userClass != null) { diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala index bbbf615510762..d7a7175d5e578 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala @@ -98,7 +98,7 @@ trait ExecutorRunnableUtil extends Logging { */ // For log4j configuration to reference - javaOpts += "-D=spark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) val commands = Seq(Environment.JAVA_HOME.$() + "/bin/java", "-server",