@@ -52,8 +52,6 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
5252 }
5353 }
5454
55- validateSettings()
56-
5755 /** Set a configuration variable. */
5856 def set (key : String , value : String ): SparkConf = {
5957 if (key == null ) {
@@ -210,25 +208,79 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
210208 new SparkConf (false ).setAll(settings)
211209 }
212210
213- /** Checks for illegal or deprecated config settings. Throws an exception for the former. */
214- private def validateSettings () {
211+ /** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
212+ * idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
213+ private [spark] def validateSettings () {
215214 if (settings.contains(" spark.local.dir" )) {
216215 val msg = " In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
217216 " the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)."
218217 logWarning(msg)
219218 }
219+
220220 val executorOptsKey = " spark.executor.extraJavaOptions"
221+ val executorClasspathKey = " spark.executor.extraClassPath"
222+ val driverOptsKey = " spark.driver.extraJavaOptions"
223+ val driverClassPathKey = " spark.driver.extraClassPath"
224+
225+ // Validate spark.executor.extraJavaOptions
221226 settings.get(executorOptsKey).map { javaOpts =>
222227 if (javaOpts.contains(" -Dspark" )) {
223228 val msg = s " $executorOptsKey is not allowed to set Spark options. Was ' $javaOpts' "
224229 throw new Exception (msg)
225230 }
226231 if (javaOpts.contains(" -Xmx" ) || javaOpts.contains(" -Xms" )) {
227- val msg = s " $executorOptsKey is not allowed to alter memory settings (was ' $javaOpts'). Use " +
228- " spark.executor.memory instead."
232+ val msg = s " $executorOptsKey is not allowed to alter memory settings (was ' $javaOpts'). " +
233+ " Use spark.executor.memory instead."
229234 throw new Exception (msg)
230235 }
231236 }
237+
238+ // Check for legacy configs
239+ sys.env.get(" SPARK_JAVA_OPTS" ).foreach { value =>
240+ val error =
241+ s """
242+ |SPARK_JAVA_OPTS was detected (set to ' $value').
243+ |This has undefined behavior when running on a cluster and is deprecated in Spark 1.0+.
244+ |
245+ |Please instead use:
246+ | - ./spark-submit with conf/spark-defaults.conf to set properties for an application
247+ | - ./spark-submit with --driver-java-options to set -X options for a driver
248+ | - spark.executor.executor.extraJavaOptions to set -X options for executors
249+ | - SPARK_DAEMON_OPTS to set java options for standalone daemons (i.e. master, worker)
250+ """ .stripMargin
251+ logError(error)
252+
253+ for (key <- Seq (executorOptsKey, driverOptsKey)) {
254+ if (getOption(key).isDefined) {
255+ throw new SparkException (s " Found both $key and SPARK_JAVA_OPTS. Use only the former. " )
256+ } else {
257+ logWarning(s " Setting ' $key' to ' $value' as a work-around. " )
258+ set(key, value)
259+ }
260+ }
261+ }
262+
263+ sys.env.get(" SPARK_CLASSPATH" ).foreach { value =>
264+ val error =
265+ s """
266+ |SPARK_CLASSPATH was detected (set to ' $value').
267+ | This has undefined behavior when running on a cluster and is deprecated in Spark 1.0+.
268+ |
269+ |Please instead use:
270+ | - ./spark-submit with --driver-class-path to augment the driver classpath
271+ | - spark.executor.executor.extraClassPath to augment the executor classpath
272+ """ .stripMargin
273+ logError(error)
274+
275+ for (key <- Seq (executorClasspathKey, driverClassPathKey)) {
276+ if (getOption(key).isDefined) {
277+ throw new SparkException (s " Found both $key and SPARK_CLASSPATH. Use only the former. " )
278+ } else {
279+ logWarning(s " Setting ' $key' to ' $value' as a work-around. " )
280+ set(key, value)
281+ }
282+ }
283+ }
232284 }
233285
234286 /**
0 commit comments