Skip to content

Commit 5b0ba8e

Browse files
committed
Don't ship executor envs
1 parent 84cc5e5 commit 5b0ba8e

File tree

3 files changed

+5
-13
lines changed

3 files changed

+5
-13
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -248,10 +248,7 @@ class SparkContext(config: SparkConf) extends Logging {
248248

249249
// Environment variables to pass to our executors
250250
private[spark] val executorEnvs = HashMap[String, String]()
251-
for (key <- Seq("SPARK_CLASSPATH", "SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS");
252-
value <- Option(System.getenv(key))) {
253-
executorEnvs(key) = value
254-
}
251+
255252
// Convert java options to env vars as a work around
256253
// since we can't set env vars directly in sbt.
257254
for { (envKey, propKey) <- Seq(("SPARK_HOME", "spark.home"), ("SPARK_TESTING", "spark.testing"))

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,12 +46,6 @@ object CommandUtils extends Logging {
4646
* the way the JAVA_OPTS are assembled there.
4747
*/
4848
def buildJavaOpts(command: Command, memory: Int, sparkHome: String): Seq[String] = {
49-
val libraryOpts = getEnv("SPARK_LIBRARY_PATH", command)
50-
.map(p => List("-Djava.library.path=" + p))
51-
.getOrElse(Nil)
52-
val workerLocalOpts = Option(getenv("SPARK_JAVA_OPTS"))
53-
.map(Utils.splitCommandString).getOrElse(Nil)
54-
val userOpts = getEnv("SPARK_JAVA_OPTS", command).map(Utils.splitCommandString).getOrElse(Nil)
5549
val memoryOpts = Seq(s"-Xms${memory}M", s"-Xmx${memory}M")
5650

5751
// Figure out our classpath with the external compute-classpath script
@@ -60,7 +54,7 @@ object CommandUtils extends Logging {
6054
Seq(sparkHome + "/bin/compute-classpath" + ext),
6155
extraEnvironment=command.environment)
6256

63-
Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
57+
Seq("-cp", classPath) ++ memoryOpts
6458
}
6559

6660
/** Spawn a thread that will redirect a given stream to a file */

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,10 @@ private[spark] class SparkDeploySchedulerBackend(
4444
val driverUrl = "akka.tcp://spark@%s:%s/user/%s".format(
4545
conf.get("spark.driver.host"), conf.get("spark.driver.port"),
4646
CoarseGrainedSchedulerBackend.ACTOR_NAME)
47-
val extraOpts = sc.conf.get("spark.executor.extraJavaOptions", "null")
48-
val args = Seq(extraOpts, driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}",
47+
val args = sc.conf.get("spark.executor.extraJavaOptions").split(" ") ++
48+
Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}",
4949
"{{CORES}}", "{{WORKER_URL}}")
50+
5051
val command = Command(
5152
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs)
5253
val sparkHome = sc.getSparkHome()

0 commit comments

Comments
 (0)