Skip to content

Commit d0f20db

Browse files
committed
Don't pass empty library paths, classpath, java opts etc.
This clarifies the default order of the different ways to set the configs. In particular, we no longer set an empty -Djava.library.path if no library paths are given.
1 parent a78cb26 commit d0f20db

File tree

2 files changed

+16
-9
lines changed

2 files changed

+16
-9
lines changed

conf/spark-defaults.conf.template

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,5 @@
66
# spark.eventLog.enabled true
77
# spark.eventLog.dir hdfs://namenode:8021/directory
88
# spark.serializer org.apache.spark.serializer.KryoSerializer
9+
# spark.driver.memory 5g
910
# spark.executor.extraJavaOptions -XX:+PrintGCDetail -Dkey=value -Dnumbers="one two three"

core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -58,39 +58,45 @@ private[spark] object SparkSubmitDriverBootstrapper {
5858
assume(defaultDriverMemory != null, "OUR_JAVA_MEM must be set")
5959
assume(deployMode == "client", "SPARK_SUBMIT_DEPLOY_MODE must be \"client\"!")
6060
assume(propertiesFile != null, "SPARK_SUBMIT_PROPERTIES_FILE must be set")
61-
assume(bootstrapDriver != null, "SPARK_SUBMIT_BOOTSTRAP_DRIVER must be set!")
61+
assume(bootstrapDriver != null, "SPARK_SUBMIT_BOOTSTRAP_DRIVER must be set")
6262

6363
// Parse the properties file for the equivalent spark.driver.* configs
6464
val properties = SparkSubmitArguments.getPropertiesFromFile(new File(propertiesFile)).toMap
65-
val confDriverMemory = properties.get("spark.driver.memory").getOrElse(defaultDriverMemory)
66-
val confLibraryPath = properties.get("spark.driver.extraLibraryPath").getOrElse("")
67-
val confClasspath = properties.get("spark.driver.extraClassPath").getOrElse("")
68-
val confJavaOpts = properties.get("spark.driver.extraJavaOptions").getOrElse("")
65+
val confDriverMemory = properties.get("spark.driver.memory")
66+
val confLibraryPath = properties.get("spark.driver.extraLibraryPath")
67+
val confClasspath = properties.get("spark.driver.extraClassPath")
68+
val confJavaOpts = properties.get("spark.driver.extraJavaOptions")
6969

7070
// Favor Spark submit arguments over the equivalent configs in the properties file.
7171
// Note that we do not actually use the Spark submit values for library path, classpath,
7272
// and Java opts here, because we have already captured them in Bash.
73-
val newDriverMemory = submitDriverMemory.getOrElse(confDriverMemory)
73+
74+
val newDriverMemory = submitDriverMemory
75+
.orElse(confDriverMemory)
76+
.getOrElse(defaultDriverMemory)
77+
7478
val newLibraryPath =
7579
if (submitLibraryPath.isDefined) {
7680
// SPARK_SUBMIT_LIBRARY_PATH is already captured in JAVA_OPTS
7781
""
7882
} else {
79-
"-Djava.library.path=" + confLibraryPath
83+
confLibraryPath.map("-Djava.library.path=" + _).getOrElse("")
8084
}
85+
8186
val newClasspath =
8287
if (submitClasspath.isDefined) {
8388
// SPARK_SUBMIT_CLASSPATH is already captured in CLASSPATH
8489
classpath
8590
} else {
86-
classpath + sys.props("path.separator") + confClasspath
91+
classpath + confClasspath.map(sys.props("path.separator") + _).getOrElse("")
8792
}
93+
8894
val newJavaOpts =
8995
if (submitJavaOpts.isDefined) {
9096
// SPARK_SUBMIT_OPTS is already captured in JAVA_OPTS
9197
javaOpts
9298
} else {
93-
javaOpts + " " + confJavaOpts
99+
javaOpts + confJavaOpts.map(" " + _).getOrElse("")
94100
}
95101

96102
// Build up command

0 commit comments

Comments
 (0)