From 180607d1e3a0c130c14101eb213d766a1f0dedcd Mon Sep 17 00:00:00 2001 From: Jonathan Maurer Date: Sat, 16 Jan 2016 22:25:32 -0600 Subject: [PATCH 1/5] quote to support spaces in path --- bin/pyspark.cmd | 2 +- bin/run-example.cmd | 2 +- bin/spark-class.cmd | 2 +- bin/spark-shell.cmd | 2 +- bin/spark-submit.cmd | 2 +- bin/sparkR.cmd | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bin/pyspark.cmd b/bin/pyspark.cmd index 7c26fbbac28b8..72d046a4ba2cf 100644 --- a/bin/pyspark.cmd +++ b/bin/pyspark.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running PySpark. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0pyspark2.cmd %* +cmd /V /E /C "%~dp0pyspark2.cmd" %* diff --git a/bin/run-example.cmd b/bin/run-example.cmd index 5b2d048d6ed50..64f6bc3728d07 100644 --- a/bin/run-example.cmd +++ b/bin/run-example.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running a Spark example. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0run-example2.cmd %* +cmd /V /E /C "%~dp0run-example2.cmd" %* diff --git a/bin/spark-class.cmd b/bin/spark-class.cmd index 19850db9e1e5d..3bf3d20cb57b5 100644 --- a/bin/spark-class.cmd +++ b/bin/spark-class.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running a Spark class. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0spark-class2.cmd %* +cmd /V /E /C "%~dp0spark-class2.cmd" %* diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd index 8f90ba5a0b3b8..991423da6ab99 100644 --- a/bin/spark-shell.cmd +++ b/bin/spark-shell.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running Spark shell. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0spark-shell2.cmd %* +cmd /V /E /C "%~dp0spark-shell2.cmd" %* diff --git a/bin/spark-submit.cmd b/bin/spark-submit.cmd index 8f3b84c7b971d..f301606933a95 100644 --- a/bin/spark-submit.cmd +++ b/bin/spark-submit.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running Spark submit. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0spark-submit2.cmd %* +cmd /V /E /C "%~dp0spark-submit2.cmd" %* diff --git a/bin/sparkR.cmd b/bin/sparkR.cmd index d7b60183ca8e0..1e5ea6a623219 100644 --- a/bin/sparkR.cmd +++ b/bin/sparkR.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running SparkR. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C %~dp0sparkR2.cmd %* +cmd /V /E /C "%~dp0sparkR2.cmd" %* From 9a3a4cc2989f17848eb8ebcce46dd592179ead87 Mon Sep 17 00:00:00 2001 From: Jonathan Maurer Date: Tue, 19 Jan 2016 22:38:13 -0600 Subject: [PATCH 2/5] changed the way dp0 is used to accept spaces in path --- bin/beeline.cmd | 6 +++--- bin/load-spark-env.cmd | 4 +++- bin/pyspark.cmd | 4 +++- bin/pyspark2.cmd | 4 +++- bin/run-example.cmd | 4 +++- bin/run-example2.cmd | 15 +++++++-------- bin/spark-class.cmd | 4 +++- bin/spark-class2.cmd | 8 +++++--- bin/spark-shell.cmd | 4 +++- bin/spark-shell2.cmd | 5 +++-- bin/spark-submit.cmd | 5 +++-- bin/spark-submit2.cmd | 4 +++- bin/sparkR.cmd | 4 +++- bin/sparkR2.cmd | 4 +++- 14 files changed, 48 insertions(+), 27 deletions(-) diff --git a/bin/beeline.cmd b/bin/beeline.cmd index 8293f311029dd..fbcb436026f46 100644 --- a/bin/beeline.cmd +++ b/bin/beeline.cmd @@ -1,5 +1,5 @@ @echo off - +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more rem contributor license agreements. See the NOTICE file distributed with @@ -17,5 +17,5 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. -cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %* +cmd /V /E /C spark-class.cmd org.apache.hive.beeline.BeeLine %* +popd diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd index 59080edd294f2..06f89331f81cf 100644 --- a/bin/load-spark-env.cmd +++ b/bin/load-spark-env.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -27,7 +28,7 @@ if [%SPARK_ENV_LOADED%] == [] ( if not [%SPARK_CONF_DIR%] == [] ( set user_conf_dir=%SPARK_CONF_DIR% ) else ( - set user_conf_dir=%~dp0..\conf + set user_conf_dir=\conf ) call :LoadSparkEnv @@ -57,3 +58,4 @@ exit /b 0 if exist "%user_conf_dir%\spark-env.cmd" ( call "%user_conf_dir%\spark-env.cmd" ) +popd diff --git a/bin/pyspark.cmd b/bin/pyspark.cmd index 72d046a4ba2cf..5b6eb22efe2dd 100644 --- a/bin/pyspark.cmd +++ b/bin/pyspark.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,4 +21,5 @@ rem rem This is the entry point for running PySpark. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0pyspark2.cmd" %* +cmd /V /E /C pyspark2.cmd %* +popd diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index 51d6d15f66c69..1b24995d617e5 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -18,7 +19,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME=..\ call %SPARK_HOME%\bin\load-spark-env.cmd set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] @@ -36,3 +37,4 @@ set OLD_PYTHONSTARTUP=%PYTHONSTARTUP% set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %* +popd diff --git a/bin/run-example.cmd b/bin/run-example.cmd index 64f6bc3728d07..59236db93757a 100644 --- a/bin/run-example.cmd +++ b/bin/run-example.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,4 +21,5 @@ rem rem This is the entry point for running a Spark example. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0run-example2.cmd" %* +cmd /V /E /C run-example2.cmd %* +popd diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd index c3e0221fb62e3..a1ed474d1338e 100644 --- a/bin/run-example2.cmd +++ b/bin/run-example2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,10 +21,7 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR=%~dp0..\ - -rem Export this as SPARK_HOME -set SPARK_HOME=%FWDIR% +set SPARK_HOME=..\ call %SPARK_HOME%\bin\load-spark-env.cmd @@ -36,12 +34,12 @@ if not "x%1"=="x" goto arg_given goto exit :arg_given -set EXAMPLES_DIR=%FWDIR%examples +set EXAMPLES_DIR=%SPARK_HOME%examples rem Figure out the JAR file that our examples were packaged into. set SPARK_EXAMPLES_JAR= -if exist "%FWDIR%RELEASE" ( - for %%d in ("%FWDIR%lib\spark-examples*.jar") do ( +if exist "%SPARK_HOME%RELEASE" ( + for %%d in ("%SPARK_HOME%lib\spark-examples*.jar") do ( set SPARK_EXAMPLES_JAR=%%d ) ) else ( @@ -80,9 +78,10 @@ if "%~1" neq "" ( ) if defined ARGS set ARGS=%ARGS:~1% -call "%FWDIR%bin\spark-submit.cmd" ^ +call "%SPARK_HOME%bin\spark-submit.cmd" ^ --master %EXAMPLE_MASTER% ^ --class %EXAMPLE_CLASS% ^ "%SPARK_EXAMPLES_JAR%" %ARGS% :exit +popd diff --git a/bin/spark-class.cmd b/bin/spark-class.cmd index 3bf3d20cb57b5..c8257324f3955 100644 --- a/bin/spark-class.cmd +++ b/bin/spark-class.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,4 +21,5 @@ rem rem This is the entry point for running a Spark class. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0spark-class2.cmd" %* +cmd /V /E /C spark-class2.cmd %* +popd diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index db09fa27e51a6..08fa4f1fbaadf 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -18,9 +19,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME=..\ -call %SPARK_HOME%\bin\load-spark-env.cmd +call %SPARK_HOME%bin\load-spark-env.cmd rem Test that an argument was given if "x%1"=="x" ( @@ -34,7 +35,7 @@ set SPARK_ASSEMBLY_JAR=0 if exist "%SPARK_HOME%\RELEASE" ( set ASSEMBLY_DIR=%SPARK_HOME%\lib ) else ( - set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION% + set ASSEMBLY_DIR=%SPARK_HOME\%assembly\target\scala-%SPARK_SCALA_VERSION% ) for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( @@ -68,3 +69,4 @@ for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do ( ) del %LAUNCHER_OUTPUT% %SPARK_CMD% +popd diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd index 991423da6ab99..9faed8dced871 100644 --- a/bin/spark-shell.cmd +++ b/bin/spark-shell.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,4 +21,5 @@ rem rem This is the entry point for running Spark shell. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0spark-shell2.cmd" %* +cmd /V /E /C spark-shell2.cmd %* +popd diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd index b9b0f510d7f5d..1bfb1f4f3dd37 100644 --- a/bin/spark-shell2.cmd +++ b/bin/spark-shell2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -17,7 +18,6 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options] rem SPARK-4161: scala does not assume use of the java classpath, @@ -32,4 +32,5 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" ( set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true" :run_shell -%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* +spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* +popd diff --git a/bin/spark-submit.cmd b/bin/spark-submit.cmd index f301606933a95..d480635fede96 100644 --- a/bin/spark-submit.cmd +++ b/bin/spark-submit.cmd @@ -1,5 +1,5 @@ @echo off - +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more rem contributor license agreements. See the NOTICE file distributed with @@ -20,4 +20,5 @@ rem rem This is the entry point for running Spark submit. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0spark-submit2.cmd" %* +cmd /V /E /C spark-submit2.cmd %* +popd diff --git a/bin/spark-submit2.cmd b/bin/spark-submit2.cmd index 651376e526928..0d60365f4a4ad 100644 --- a/bin/spark-submit2.cmd +++ b/bin/spark-submit2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -24,4 +25,5 @@ rem disable randomized hash for string in Python 3.3+ set PYTHONHASHSEED=0 set CLASS=org.apache.spark.deploy.SparkSubmit -%~dp0spark-class2.cmd %CLASS% %* +spark-class2.cmd %CLASS% %* +popd diff --git a/bin/sparkR.cmd b/bin/sparkR.cmd index 1e5ea6a623219..32a51a3daacaa 100644 --- a/bin/sparkR.cmd +++ b/bin/sparkR.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -20,4 +21,5 @@ rem rem This is the entry point for running SparkR. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0sparkR2.cmd" %* +cmd /V /E /C sparkR2.cmd %* +popd diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd index e47f22c7300bb..c34c90d3e727d 100644 --- a/bin/sparkR2.cmd +++ b/bin/sparkR2.cmd @@ -1,4 +1,5 @@ @echo off +pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -18,9 +19,10 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME=..\ call %SPARK_HOME%\bin\load-spark-env.cmd call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %* +popd From 29793f33d5c05e74c4cde79b16412d299719fe4d Mon Sep 17 00:00:00 2001 From: Jon Maurer Date: Tue, 26 Jan 2016 19:54:31 -0600 Subject: [PATCH 3/5] Revert "changed the way dp0 is used to accept spaces in path" This reverts commit 9a3a4cc2989f17848eb8ebcce46dd592179ead87. --- bin/beeline.cmd | 6 +++--- bin/load-spark-env.cmd | 4 +--- bin/pyspark.cmd | 4 +--- bin/pyspark2.cmd | 4 +--- bin/run-example.cmd | 4 +--- bin/run-example2.cmd | 15 ++++++++------- bin/spark-class.cmd | 4 +--- bin/spark-class2.cmd | 8 +++----- bin/spark-shell.cmd | 4 +--- bin/spark-shell2.cmd | 5 ++--- bin/spark-submit.cmd | 5 ++--- bin/spark-submit2.cmd | 4 +--- bin/sparkR.cmd | 4 +--- bin/sparkR2.cmd | 4 +--- 14 files changed, 27 insertions(+), 48 deletions(-) diff --git a/bin/beeline.cmd b/bin/beeline.cmd index fbcb436026f46..8293f311029dd 100644 --- a/bin/beeline.cmd +++ b/bin/beeline.cmd @@ -1,5 +1,5 @@ @echo off -pushd %~dp0 + rem rem Licensed to the Apache Software Foundation (ASF) under one or more rem contributor license agreements. See the NOTICE file distributed with @@ -17,5 +17,5 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -cmd /V /E /C spark-class.cmd org.apache.hive.beeline.BeeLine %* -popd +set SPARK_HOME=%~dp0.. +cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %* diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd index 06f89331f81cf..59080edd294f2 100644 --- a/bin/load-spark-env.cmd +++ b/bin/load-spark-env.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -28,7 +27,7 @@ if [%SPARK_ENV_LOADED%] == [] ( if not [%SPARK_CONF_DIR%] == [] ( set user_conf_dir=%SPARK_CONF_DIR% ) else ( - set user_conf_dir=\conf + set user_conf_dir=%~dp0..\conf ) call :LoadSparkEnv @@ -58,4 +57,3 @@ exit /b 0 if exist "%user_conf_dir%\spark-env.cmd" ( call "%user_conf_dir%\spark-env.cmd" ) -popd diff --git a/bin/pyspark.cmd b/bin/pyspark.cmd index 5b6eb22efe2dd..72d046a4ba2cf 100644 --- a/bin/pyspark.cmd +++ b/bin/pyspark.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,5 +20,4 @@ rem rem This is the entry point for running PySpark. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C pyspark2.cmd %* -popd +cmd /V /E /C "%~dp0pyspark2.cmd" %* diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index 1b24995d617e5..51d6d15f66c69 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -19,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=..\ +set SPARK_HOME=%~dp0.. call %SPARK_HOME%\bin\load-spark-env.cmd set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] @@ -37,4 +36,3 @@ set OLD_PYTHONSTARTUP=%PYTHONSTARTUP% set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %* -popd diff --git a/bin/run-example.cmd b/bin/run-example.cmd index 59236db93757a..64f6bc3728d07 100644 --- a/bin/run-example.cmd +++ b/bin/run-example.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,5 +20,4 @@ rem rem This is the entry point for running a Spark example. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C run-example2.cmd %* -popd +cmd /V /E /C "%~dp0run-example2.cmd" %* diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd index a1ed474d1338e..c3e0221fb62e3 100644 --- a/bin/run-example2.cmd +++ b/bin/run-example2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,7 +20,10 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set SPARK_HOME=..\ +set FWDIR=%~dp0..\ + +rem Export this as SPARK_HOME +set SPARK_HOME=%FWDIR% call %SPARK_HOME%\bin\load-spark-env.cmd @@ -34,12 +36,12 @@ if not "x%1"=="x" goto arg_given goto exit :arg_given -set EXAMPLES_DIR=%SPARK_HOME%examples +set EXAMPLES_DIR=%FWDIR%examples rem Figure out the JAR file that our examples were packaged into. set SPARK_EXAMPLES_JAR= -if exist "%SPARK_HOME%RELEASE" ( - for %%d in ("%SPARK_HOME%lib\spark-examples*.jar") do ( +if exist "%FWDIR%RELEASE" ( + for %%d in ("%FWDIR%lib\spark-examples*.jar") do ( set SPARK_EXAMPLES_JAR=%%d ) ) else ( @@ -78,10 +80,9 @@ if "%~1" neq "" ( ) if defined ARGS set ARGS=%ARGS:~1% -call "%SPARK_HOME%bin\spark-submit.cmd" ^ +call "%FWDIR%bin\spark-submit.cmd" ^ --master %EXAMPLE_MASTER% ^ --class %EXAMPLE_CLASS% ^ "%SPARK_EXAMPLES_JAR%" %ARGS% :exit -popd diff --git a/bin/spark-class.cmd b/bin/spark-class.cmd index c8257324f3955..3bf3d20cb57b5 100644 --- a/bin/spark-class.cmd +++ b/bin/spark-class.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,5 +20,4 @@ rem rem This is the entry point for running a Spark class. To avoid polluting rem the environment, it just launches a new cmd to do the real work. -cmd /V /E /C spark-class2.cmd %* -popd +cmd /V /E /C "%~dp0spark-class2.cmd" %* diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index 08fa4f1fbaadf..db09fa27e51a6 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -19,9 +18,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=..\ +set SPARK_HOME=%~dp0.. -call %SPARK_HOME%bin\load-spark-env.cmd +call %SPARK_HOME%\bin\load-spark-env.cmd rem Test that an argument was given if "x%1"=="x" ( @@ -35,7 +34,7 @@ set SPARK_ASSEMBLY_JAR=0 if exist "%SPARK_HOME%\RELEASE" ( set ASSEMBLY_DIR=%SPARK_HOME%\lib ) else ( - set ASSEMBLY_DIR=%SPARK_HOME\%assembly\target\scala-%SPARK_SCALA_VERSION% + set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION% ) for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( @@ -69,4 +68,3 @@ for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do ( ) del %LAUNCHER_OUTPUT% %SPARK_CMD% -popd diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd index 9faed8dced871..991423da6ab99 100644 --- a/bin/spark-shell.cmd +++ b/bin/spark-shell.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,5 +20,4 @@ rem rem This is the entry point for running Spark shell. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C spark-shell2.cmd %* -popd +cmd /V /E /C "%~dp0spark-shell2.cmd" %* diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd index 1bfb1f4f3dd37..b9b0f510d7f5d 100644 --- a/bin/spark-shell2.cmd +++ b/bin/spark-shell2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -18,6 +17,7 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem +set SPARK_HOME=%~dp0.. set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options] rem SPARK-4161: scala does not assume use of the java classpath, @@ -32,5 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" ( set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true" :run_shell -spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* -popd +%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* diff --git a/bin/spark-submit.cmd b/bin/spark-submit.cmd index d480635fede96..f301606933a95 100644 --- a/bin/spark-submit.cmd +++ b/bin/spark-submit.cmd @@ -1,5 +1,5 @@ @echo off -pushd %~dp0 + rem rem Licensed to the Apache Software Foundation (ASF) under one or more rem contributor license agreements. See the NOTICE file distributed with @@ -20,5 +20,4 @@ rem rem This is the entry point for running Spark submit. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C spark-submit2.cmd %* -popd +cmd /V /E /C "%~dp0spark-submit2.cmd" %* diff --git a/bin/spark-submit2.cmd b/bin/spark-submit2.cmd index 0d60365f4a4ad..651376e526928 100644 --- a/bin/spark-submit2.cmd +++ b/bin/spark-submit2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -25,5 +24,4 @@ rem disable randomized hash for string in Python 3.3+ set PYTHONHASHSEED=0 set CLASS=org.apache.spark.deploy.SparkSubmit -spark-class2.cmd %CLASS% %* -popd +%~dp0spark-class2.cmd %CLASS% %* diff --git a/bin/sparkR.cmd b/bin/sparkR.cmd index 32a51a3daacaa..1e5ea6a623219 100644 --- a/bin/sparkR.cmd +++ b/bin/sparkR.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -21,5 +20,4 @@ rem rem This is the entry point for running SparkR. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C sparkR2.cmd %* -popd +cmd /V /E /C "%~dp0sparkR2.cmd" %* diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd index c34c90d3e727d..e47f22c7300bb 100644 --- a/bin/sparkR2.cmd +++ b/bin/sparkR2.cmd @@ -1,5 +1,4 @@ @echo off -pushd %~dp0 rem rem Licensed to the Apache Software Foundation (ASF) under one or more @@ -19,10 +18,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=..\ +set SPARK_HOME=%~dp0.. call %SPARK_HOME%\bin\load-spark-env.cmd call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %* -popd From 7ed79f4788123075ac713f7f441fa04f272d7608 Mon Sep 17 00:00:00 2001 From: Jon Maurer Date: Tue, 26 Jan 2016 20:24:26 -0600 Subject: [PATCH 4/5] quoted directory paths to handle spaces --- bin/beeline.cmd | 4 ++-- bin/load-spark-env.cmd | 6 +++--- bin/pyspark2.cmd | 4 ++-- bin/run-example2.cmd | 4 ++-- bin/spark-class2.cmd | 24 ++++++++++++------------ bin/spark-shell2.cmd | 4 ++-- bin/spark-submit2.cmd | 2 +- bin/sparkR2.cmd | 6 +++--- 8 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bin/beeline.cmd b/bin/beeline.cmd index 8293f311029dd..70feb3a43634d 100644 --- a/bin/beeline.cmd +++ b/bin/beeline.cmd @@ -17,5 +17,5 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. -cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %* +set SPARK_HOME="%~dp0.." +cmd /V /E /C "%SPARK_HOME%\bin\spark-class.cmd" org.apache.hive.beeline.BeeLine %* diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd index 59080edd294f2..e97e1e637b7e5 100644 --- a/bin/load-spark-env.cmd +++ b/bin/load-spark-env.cmd @@ -27,7 +27,7 @@ if [%SPARK_ENV_LOADED%] == [] ( if not [%SPARK_CONF_DIR%] == [] ( set user_conf_dir=%SPARK_CONF_DIR% ) else ( - set user_conf_dir=%~dp0..\conf + set user_conf_dir="%~dp0..\conf" ) call :LoadSparkEnv @@ -35,8 +35,8 @@ if [%SPARK_ENV_LOADED%] == [] ( rem Setting SPARK_SCALA_VERSION if not already set. -set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11 -set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10 +set ASSEMBLY_DIR2="%SPARK_HOME%/assembly/target/scala-2.11" +set ASSEMBLY_DIR1="%SPARK_HOME%/assembly/target/scala-2.10" if [%SPARK_SCALA_VERSION%] == [] ( diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index 51d6d15f66c69..d359c15615add 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME="%~dp0.." call %SPARK_HOME%\bin\load-spark-env.cmd set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] @@ -35,4 +35,4 @@ set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.9.1-src.zip;%PYTHONPATH% set OLD_PYTHONSTARTUP=%PYTHONSTARTUP% set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py -call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %* +call "%SPARK_HOME%\bin\spark-submit2.cmd" pyspark-shell-main --name "PySparkShell" %* diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd index c3e0221fb62e3..d69d4da9ac2ae 100644 --- a/bin/run-example2.cmd +++ b/bin/run-example2.cmd @@ -20,7 +20,7 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR=%~dp0..\ +set FWDIR="%~dp0..\" rem Export this as SPARK_HOME set SPARK_HOME=%FWDIR% @@ -36,7 +36,7 @@ if not "x%1"=="x" goto arg_given goto exit :arg_given -set EXAMPLES_DIR=%FWDIR%examples +set EXAMPLES_DIR="%FWDIR%examples" rem Figure out the JAR file that our examples were packaged into. set SPARK_EXAMPLES_JAR= diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index db09fa27e51a6..a1aa157ff9aca 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -18,9 +18,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME="%~dp0.." -call %SPARK_HOME%\bin\load-spark-env.cmd +call "%SPARK_HOME%\bin\load-spark-env.cmd" rem Test that an argument was given if "x%1"=="x" ( @@ -32,13 +32,13 @@ rem Find assembly jar set SPARK_ASSEMBLY_JAR=0 if exist "%SPARK_HOME%\RELEASE" ( - set ASSEMBLY_DIR=%SPARK_HOME%\lib + set ASSEMBLY_DIR="%SPARK_HOME%\lib" ) else ( - set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION% + set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%" ) -for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( - set SPARK_ASSEMBLY_JAR=%%d +for %%d in ("%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar") do ( + set SPARK_ASSEMBLY_JAR="%%d" ) if "%SPARK_ASSEMBLY_JAR%"=="0" ( echo Failed to find Spark assembly JAR. @@ -46,14 +46,14 @@ if "%SPARK_ASSEMBLY_JAR%"=="0" ( exit /b 1 ) -set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR% +set LAUNCH_CLASSPATH="%SPARK_ASSEMBLY_JAR%" rem Add the launcher build dir to the classpath if requested. if not "x%SPARK_PREPEND_CLASSES%"=="x" ( - set LAUNCH_CLASSPATH=%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH% + set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%" ) -set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR% +set _SPARK_ASSEMBLY="%SPARK_ASSEMBLY_JAR%" rem Figure out where java is. set RUNNER=java @@ -61,9 +61,9 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java rem The launcher library prints the command to be executed in a single line suitable for being rem executed by the batch interpreter. So read all the output of the launcher into a variable. -set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt -"%RUNNER%" -cp %LAUNCH_CLASSPATH% org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT% -for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do ( +set LAUNCHER_OUTPUT="%temp%\spark-class-launcher-output-%RANDOM%.txt" +"%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > "%LAUNCHER_OUTPUT%" +for /f "tokens=*" %%i in ("%LAUNCHER_OUTPUT%") do ( set SPARK_CMD=%%i ) del %LAUNCHER_OUTPUT% diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd index b9b0f510d7f5d..5c6531336d344 100644 --- a/bin/spark-shell2.cmd +++ b/bin/spark-shell2.cmd @@ -17,7 +17,7 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. +set SPARK_HOME="%~dp0.." set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options] rem SPARK-4161: scala does not assume use of the java classpath, @@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" ( set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true" :run_shell -%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* +"%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.repl.Main --name "Spark shell" %* diff --git a/bin/spark-submit2.cmd b/bin/spark-submit2.cmd index 651376e526928..49e350fa5c416 100644 --- a/bin/spark-submit2.cmd +++ b/bin/spark-submit2.cmd @@ -24,4 +24,4 @@ rem disable randomized hash for string in Python 3.3+ set PYTHONHASHSEED=0 set CLASS=org.apache.spark.deploy.SparkSubmit -%~dp0spark-class2.cmd %CLASS% %* +"%~dp0spark-class2.cmd" %CLASS% %* diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd index e47f22c7300bb..f245fe94ec7d2 100644 --- a/bin/sparkR2.cmd +++ b/bin/sparkR2.cmd @@ -18,9 +18,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +set SPARK_HOME="%~dp0.." -call %SPARK_HOME%\bin\load-spark-env.cmd +call "%SPARK_HOME%\bin\load-spark-env.cmd" -call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %* +call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %* From eacab83aed5e6e28547731124bf90969f1051e63 Mon Sep 17 00:00:00 2001 From: Jon Maurer Date: Wed, 3 Feb 2016 23:55:36 -0600 Subject: [PATCH 5/5] quoted spaces in windows cmd scripts --- bin/beeline.cmd | 2 +- bin/load-spark-env.cmd | 6 +++--- bin/pyspark2.cmd | 4 ++-- bin/run-example2.cmd | 15 ++++++--------- bin/spark-class2.cmd | 16 ++++++++-------- bin/spark-shell2.cmd | 2 +- bin/spark-submit.cmd | 2 +- bin/sparkR2.cmd | 2 +- 8 files changed, 23 insertions(+), 26 deletions(-) diff --git a/bin/beeline.cmd b/bin/beeline.cmd index 70feb3a43634d..8ddaa419967a5 100644 --- a/bin/beeline.cmd +++ b/bin/beeline.cmd @@ -17,5 +17,5 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME="%~dp0.." +set SPARK_HOME=%~dp0.. cmd /V /E /C "%SPARK_HOME%\bin\spark-class.cmd" org.apache.hive.beeline.BeeLine %* diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd index e97e1e637b7e5..0977025c2036e 100644 --- a/bin/load-spark-env.cmd +++ b/bin/load-spark-env.cmd @@ -27,7 +27,7 @@ if [%SPARK_ENV_LOADED%] == [] ( if not [%SPARK_CONF_DIR%] == [] ( set user_conf_dir=%SPARK_CONF_DIR% ) else ( - set user_conf_dir="%~dp0..\conf" + set user_conf_dir=..\conf ) call :LoadSparkEnv @@ -35,8 +35,8 @@ if [%SPARK_ENV_LOADED%] == [] ( rem Setting SPARK_SCALA_VERSION if not already set. -set ASSEMBLY_DIR2="%SPARK_HOME%/assembly/target/scala-2.11" -set ASSEMBLY_DIR1="%SPARK_HOME%/assembly/target/scala-2.10" +set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11" +set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10" if [%SPARK_SCALA_VERSION%] == [] ( diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index d359c15615add..21fe28155a596 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -18,9 +18,9 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME="%~dp0.." +set SPARK_HOME=%~dp0.. -call %SPARK_HOME%\bin\load-spark-env.cmd +call "%SPARK_HOME%\bin\load-spark-env.cmd" set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] rem Figure out which Python to use. diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd index d69d4da9ac2ae..fada43581d184 100644 --- a/bin/run-example2.cmd +++ b/bin/run-example2.cmd @@ -20,12 +20,9 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR="%~dp0..\" +set SPARK_HOME=%~dp0.. -rem Export this as SPARK_HOME -set SPARK_HOME=%FWDIR% - -call %SPARK_HOME%\bin\load-spark-env.cmd +call "%SPARK_HOME%\bin\load-spark-env.cmd" rem Test that an argument was given if not "x%1"=="x" goto arg_given @@ -36,12 +33,12 @@ if not "x%1"=="x" goto arg_given goto exit :arg_given -set EXAMPLES_DIR="%FWDIR%examples" +set EXAMPLES_DIR=%SPARK_HOME%\examples rem Figure out the JAR file that our examples were packaged into. set SPARK_EXAMPLES_JAR= -if exist "%FWDIR%RELEASE" ( - for %%d in ("%FWDIR%lib\spark-examples*.jar") do ( +if exist "%SPARK_HOME%\RELEASE" ( + for %%d in ("%SPARK_HOME%\lib\spark-examples*.jar") do ( set SPARK_EXAMPLES_JAR=%%d ) ) else ( @@ -80,7 +77,7 @@ if "%~1" neq "" ( ) if defined ARGS set ARGS=%ARGS:~1% -call "%FWDIR%bin\spark-submit.cmd" ^ +call "%SPARK_HOME%\bin\spark-submit.cmd" ^ --master %EXAMPLE_MASTER% ^ --class %EXAMPLE_CLASS% ^ "%SPARK_EXAMPLES_JAR%" %ARGS% diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index a1aa157ff9aca..c4fadb822323d 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME="%~dp0.." +set SPARK_HOME=%~dp0.. call "%SPARK_HOME%\bin\load-spark-env.cmd" @@ -37,8 +37,8 @@ if exist "%SPARK_HOME%\RELEASE" ( set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%" ) -for %%d in ("%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar") do ( - set SPARK_ASSEMBLY_JAR="%%d" +for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( + set SPARK_ASSEMBLY_JAR=%%d ) if "%SPARK_ASSEMBLY_JAR%"=="0" ( echo Failed to find Spark assembly JAR. @@ -46,14 +46,14 @@ if "%SPARK_ASSEMBLY_JAR%"=="0" ( exit /b 1 ) -set LAUNCH_CLASSPATH="%SPARK_ASSEMBLY_JAR%" +set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR% rem Add the launcher build dir to the classpath if requested. if not "x%SPARK_PREPEND_CLASSES%"=="x" ( set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%" ) -set _SPARK_ASSEMBLY="%SPARK_ASSEMBLY_JAR%" +set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR% rem Figure out where java is. set RUNNER=java @@ -61,9 +61,9 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java rem The launcher library prints the command to be executed in a single line suitable for being rem executed by the batch interpreter. So read all the output of the launcher into a variable. -set LAUNCHER_OUTPUT="%temp%\spark-class-launcher-output-%RANDOM%.txt" -"%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > "%LAUNCHER_OUTPUT%" -for /f "tokens=*" %%i in ("%LAUNCHER_OUTPUT%") do ( +set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt +"%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT% +for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do ( set SPARK_CMD=%%i ) del %LAUNCHER_OUTPUT% diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd index 5c6531336d344..7b5d396be888c 100644 --- a/bin/spark-shell2.cmd +++ b/bin/spark-shell2.cmd @@ -17,7 +17,7 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME="%~dp0.." +set SPARK_HOME=%~dp0.. set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options] rem SPARK-4161: scala does not assume use of the java classpath, diff --git a/bin/spark-submit.cmd b/bin/spark-submit.cmd index f301606933a95..f121b62a53d24 100644 --- a/bin/spark-submit.cmd +++ b/bin/spark-submit.cmd @@ -20,4 +20,4 @@ rem rem This is the entry point for running Spark submit. To avoid polluting the rem environment, it just launches a new cmd to do the real work. -cmd /V /E /C "%~dp0spark-submit2.cmd" %* +cmd /V /E /C spark-submit2.cmd %* diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd index f245fe94ec7d2..459b780e2ae33 100644 --- a/bin/sparkR2.cmd +++ b/bin/sparkR2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME="%~dp0.." +set SPARK_HOME=%~dp0.. call "%SPARK_HOME%\bin\load-spark-env.cmd"