Skip to content

Commit 5f4ddcc

Browse files
author
Marcelo Vanzin
committed
Better usage messages.
Print the actual error message before the usual usage string.
1 parent 92a9cfb commit 5f4ddcc

File tree

8 files changed

+89
-34
lines changed

8 files changed

+89
-34
lines changed

bin/pyspark

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,12 @@ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
2323
source "$SPARK_HOME"/bin/load-spark-env.sh
2424

2525
function usage() {
26+
if [ -n "$1" ]; then
27+
echo $1
28+
fi
2629
echo "Usage: ./bin/pyspark [options]" 1>&2
2730
"$SPARK_HOME"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
28-
exit 0
31+
exit $2
2932
}
3033
export -f usage
3134

bin/spark-shell

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,12 @@ set -o posix
3131
export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
3232

3333
usage() {
34+
if [ -n "$1" ]; then
35+
echo $1
36+
fi
3437
echo "Usage: ./bin/spark-shell [options]"
3538
"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
36-
exit $1
39+
exit $2
3740
}
3841
export -f usage
3942

bin/spark-shell2.cmd

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,14 @@ set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
3939
:run_shell
4040
call %SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %*
4141
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
42-
if "%SPARK_LAUNCHER_USAGE_ERROR%"=="1" (
42+
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
4343
call :usage
4444
exit /b 1
4545
)
4646
exit /b %SPARK_ERROR_LEVEL%
4747

4848
:usage
49+
echo %SPARK_LAUNCHER_USAGE_ERROR%
4950
echo "Usage: .\bin\spark-shell.cmd [options]" >&2
5051
call %SPARK_HOME%\bin\spark-submit2.cmd --help 2>&1 | findstr /V "Usage" 1>&2
5152
goto :eof

bin/spark-sql

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,9 @@ export CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
3131
export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
3232

3333
function usage {
34+
if [ -n "$1" ]; then
35+
echo $1
36+
fi
3437
echo "Usage: ./bin/spark-sql [options] [cli option]"
3538
pattern="usage"
3639
pattern+="\|Spark assembly has been built with Hive"
@@ -43,12 +46,12 @@ function usage {
4346
echo
4447
echo "CLI options:"
4548
"$FWDIR"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
46-
exit $1
49+
exit $2
4750
}
4851
export -f usage
4952

5053
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
51-
usage 0
54+
usage "" 0
5255
fi
5356

5457
exec "$FWDIR"/bin/spark-submit --class $CLASS "$@"

bin/spark-submit

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,16 @@
1919

2020
SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
2121

22-
usage() {
23-
"$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit --help
24-
exit $1
25-
}
26-
export -f usage
22+
# Only define a usage function if an upstream script hasn't done so.
23+
if ! type -t usage >/dev/null 2>&1; then
24+
usage() {
25+
if [ -n "$1" ]; then
26+
echo $1
27+
fi
28+
"$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit --help
29+
exit $2
30+
}
31+
export -f usage
32+
fi
2733

2834
exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"

bin/spark-submit.cmd

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,4 @@ rem
2020
rem This is the entry point for running Spark submit. To avoid polluting the
2121
rem environment, it just launches a new cmd to do the real work.
2222

23-
set CLASS=org.apache.spark.deploy.SparkSubmit
24-
call %~dp0spark-class2.cmd %CLASS% %*
25-
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
26-
if "%SPARK_LAUNCHER_USAGE_ERROR%"=="1" (
27-
call :usage
28-
exit /b 1
29-
)
30-
exit /b %SPARK_ERROR_LEVEL%
31-
32-
:usage
33-
call %SPARK_HOME%\bin\spark-class2.cmd %CLASS% --help
34-
goto :eof
23+
cmd /V /E /C %~dp0spark-submit2.cmd %*

bin/spark-submit2.cmd

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
rem This is the entry point for running Spark submit. To avoid polluting the
21+
rem environment, it just launches a new cmd to do the real work.
22+
23+
set CLASS=org.apache.spark.deploy.SparkSubmit
24+
call %~dp0spark-class2.cmd %CLASS% %*
25+
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
26+
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
27+
call :usage
28+
exit /b 1
29+
)
30+
exit /b %SPARK_ERROR_LEVEL%
31+
32+
:usage
33+
echo %SPARK_LAUNCHER_USAGE_ERROR%
34+
call %SPARK_HOME%\bin\spark-class2.cmd %CLASS% --help
35+
goto :eof

launcher/src/main/java/org/apache/spark/launcher/Main.java

Lines changed: 27 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ public static void main(String[] argsArray) throws Exception {
5454
String className = args.remove(0);
5555

5656
boolean printLaunchCommand;
57+
boolean printUsage;
5758
AbstractCommandBuilder builder;
5859
try {
5960
if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
@@ -62,9 +63,11 @@ public static void main(String[] argsArray) throws Exception {
6263
builder = new SparkClassCommandBuilder(className, args);
6364
}
6465
printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
66+
printUsage = false;
6567
} catch (IllegalArgumentException e) {
66-
builder = new UsageLauncher();
68+
builder = new UsageCommandBuilder(e.getMessage());
6769
printLaunchCommand = false;
70+
printUsage = true;
6871
}
6972

7073
Map<String, String> env = new HashMap<String, String>();
@@ -75,7 +78,13 @@ public static void main(String[] argsArray) throws Exception {
7578
}
7679

7780
if (isWindows()) {
78-
System.out.println(prepareWindowsCommand(cmd, env));
81+
// When printing the usage message, we can't use "cmd /v" since that prevents the env
82+
// variable from being seen in the caller script. So do not call prepareWindowsCommand().
83+
if (printUsage) {
84+
System.out.println(join(" ", cmd));
85+
} else {
86+
System.out.println(prepareWindowsCommand(cmd, env));
87+
}
7988
} else {
8089
// In bash, use NULL as the arg separator since it cannot be used in an argument.
8190
List<String> bashCmd = prepareBashCommand(cmd, env);
@@ -133,23 +142,29 @@ private static List<String> prepareBashCommand(List<String> cmd, Map<String, Str
133142
* Internal launcher used when command line parsing fails. This will behave differently depending
134143
* on the platform:
135144
*
136-
* - On Unix-like systems, it will print a call to the "usage" function with argument "1". The
137-
* function is expected to print the command's usage and exit with the provided exit code.
138-
* The script should use "export -f usage" after declaring a function called "usage", so that
139-
* the function is available to downstream scripts.
145+
* - On Unix-like systems, it will print a call to the "usage" function with two arguments: the
146+
* the error string, and the exit code to use. The function is expected to print the command's
147+
* usage and exit with the provided exit code. The script should use "export -f usage" after
148+
* declaring a function called "usage", so that the function is available to downstream scripts.
140149
*
141-
* - On Windows it will set the variable "SPARK_LAUNCHER_USAGE_ERROR" to "1". The batch script
142-
* should check for this variable and print its usage, since batch scripts don't really support
143-
* the "export -f" functionality used in bash.
150+
* - On Windows it will set the variable "SPARK_LAUNCHER_USAGE_ERROR" to the usage error message.
151+
* The batch script should check for this variable and print its usage, since batch scripts
152+
* don't really support the "export -f" functionality used in bash.
144153
*/
145-
private static class UsageLauncher extends AbstractCommandBuilder {
154+
private static class UsageCommandBuilder extends AbstractCommandBuilder {
155+
156+
private final String message;
157+
158+
UsageCommandBuilder(String message) {
159+
this.message = message;
160+
}
146161

147162
@Override
148163
public List<String> buildCommand(Map<String, String> env) {
149164
if (isWindows()) {
150-
return Arrays.asList("set", "SPARK_LAUNCHER_USAGE_ERROR=1");
165+
return Arrays.asList("set", "SPARK_LAUNCHER_USAGE_ERROR=" + message);
151166
} else {
152-
return Arrays.asList("usage", "1");
167+
return Arrays.asList("usage", message, "1");
153168
}
154169
}
155170

0 commit comments

Comments
 (0)