@@ -17,6 +17,8 @@ rem See the License for the specific language governing permissions and
1717rem limitations under the License.
1818rem
1919
20+ rem Any changes to this file must be reflected in SparkSubmitDriverBootstrapper.scala!
21+
2022setlocal enabledelayedexpansion
2123
2224set SCALA_VERSION = 2.10
@@ -38,7 +40,7 @@ if not "x%1"=="x" goto arg_given
3840
3941if not " x%SPARK_MEM% " == " x" (
4042 echo Warning: SPARK_MEM is deprecated, please use a more specific config option
41- echo e.g., spark.executor.memory or SPARK_DRIVER_MEMORY .
43+ echo e.g., spark.executor.memory or spark.driver.memory .
4244)
4345
4446rem Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
@@ -67,18 +69,26 @@ rem Executors use SPARK_JAVA_OPTS + SPARK_EXECUTOR_MEMORY.
6769 set OUR_JAVA_OPTS = %SPARK_JAVA_OPTS% %SPARK_EXECUTOR_OPTS%
6870 if not " x%SPARK_EXECUTOR_MEMORY% " == " x" set OUR_JAVA_MEM = %SPARK_EXECUTOR_MEMORY%
6971
70- rem All drivers use SPARK_JAVA_OPTS + SPARK_DRIVER_MEMORY. The repl also uses SPARK_REPL_OPTS.
71- ) else if " %1 " == " org.apache.spark.repl.Main" (
72- set OUR_JAVA_OPTS = %SPARK_JAVA_OPTS% %SPARK_REPL_OPTS%
72+ rem Spark submit uses SPARK_JAVA_OPTS + SPARK_SUBMIT_OPTS +
73+ rem SPARK_DRIVER_MEMORY + SPARK_SUBMIT_DRIVER_MEMORY.
74+ rem The repl also uses SPARK_REPL_OPTS.
75+ ) else if " %1 " == " org.apache.spark.deploy.SparkSubmit" (
76+ set OUR_JAVA_OPTS = %SPARK_JAVA_OPTS% %SPARK_SUBMIT_OPTS% %SPARK_REPL_OPTS%
77+ if not " x%SPARK_SUBMIT_LIBRARY_PATH% " == " x" (
78+ set OUR_JAVA_OPTS = !OUR_JAVA_OPTS! -Djava.library.path=%SPARK_SUBMIT_LIBRARY_PATH%
79+ ) else if not " x%SPARK_LIBRARY_PATH% " == " x" (
80+ set OUR_JAVA_OPTS = !OUR_JAVA_OPTS! -Djava.library.path=%SPARK_LIBRARY_PATH%
81+ )
7382 if not " x%SPARK_DRIVER_MEMORY% " == " x" set OUR_JAVA_MEM = %SPARK_DRIVER_MEMORY%
83+ if not " x%SPARK_SUBMIT_DRIVER_MEMORY% " == " x" set OUR_JAVA_MEM = %SPARK_SUBMIT_DRIVER_MEMORY%
7484) else (
7585 set OUR_JAVA_OPTS = %SPARK_JAVA_OPTS%
7686 if not " x%SPARK_DRIVER_MEMORY% " == " x" set OUR_JAVA_MEM = %SPARK_DRIVER_MEMORY%
7787)
7888
79- rem Set JAVA_OPTS to be able to load native libraries and to set heap size
80- set JAVA_OPTS = -XX:MaxPermSize=128m %OUR_JAVA_OPTS% -Djava.library.path=%SPARK_LIBRARY_PATH% -Xms%OUR_JAVA_MEM% -Xmx%OUR_JAVA_MEM%
8189rem Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
90+ rem Set JAVA_OPTS to be able to load native libraries and to set heap size
91+ set JAVA_OPTS = -XX:MaxPermSize=128m %OUR_JAVA_OPTS% -Xms%OUR_JAVA_MEM% -Xmx%OUR_JAVA_MEM%
8292
8393rem Test whether the user has built Spark
8494if exist " %FWDIR% RELEASE" goto skip_build_test
@@ -109,5 +119,27 @@ rem Figure out where java is.
109119set RUNNER = java
110120if not " x%JAVA_HOME% " == " x" set RUNNER = %JAVA_HOME% \bin\java
111121
112- " %RUNNER% " -cp " %CLASSPATH% " %JAVA_OPTS% %*
122+ rem In Spark submit client mode, the driver is launched in the same JVM as Spark submit itself.
123+ rem Here we must parse the properties file for relevant "spark.driver.*" configs before launching
124+ rem the driver JVM itself. Instead of handling this complexity here, we launch a separate JVM
125+ rem to prepare the launch environment of this driver JVM.
126+
127+ rem In this case, leave out the main class (org.apache.spark.deploy.SparkSubmit) and use our own.
128+ rem Leaving out the first argument is surprisingly difficult to do in Windows. Note that this must
129+ rem be done here because the Windows "shift" command does not work in a conditional block.
130+ set BOOTSTRAP_ARGS =
131+ shift
132+ :start_parse
133+ if " %~1 " == " " goto end_parse
134+ set BOOTSTRAP_ARGS = %BOOTSTRAP_ARGS% %~1
135+ shift
136+ goto start_parse
137+ :end_parse
138+
139+ if not [%SPARK_SUBMIT_BOOTSTRAP_DRIVER% ] == [] (
140+ set SPARK_CLASS = 1
141+ " %RUNNER% " org.apache.spark.deploy.SparkSubmitDriverBootstrapper %BOOTSTRAP_ARGS%
142+ ) else (
143+ " %RUNNER% " -cp " %CLASSPATH% " %JAVA_OPTS% %*
144+ )
113145:exit
0 commit comments