Skip to content

Commit 1b3f6e9

Browse files
author
Marcelo Vanzin
committed
Call SparkSubmit from spark-class launcher for unknown classes.
For new-style launchers, do the launching using SparkSubmit; hopefully this will be the preferred method of launching new daemons (if any). Currently it handles the thrift server daemon.
1 parent 25c5ae6 commit 1b3f6e9

File tree

5 files changed

+27
-22
lines changed

5 files changed

+27
-22
lines changed

launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -194,15 +194,15 @@ protected List<String> buildClassPath(String appClassPath) throws IOException {
194194
}
195195

196196
boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES"));
197-
boolean isTesting = !isEmpty(getenv("SPARK_TESTING"));
197+
boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
198198
if (prependClasses || isTesting) {
199-
System.err.println(
200-
"NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
201-
"assembly.");
202199
List<String> projects = Arrays.asList("core", "repl", "mllib", "bagel", "graphx",
203200
"streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
204201
"yarn", "launcher");
205202
if (prependClasses) {
203+
System.err.println(
204+
"NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark classes ahead of " +
205+
"assembly.");
206206
for (String project : projects) {
207207
addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project,
208208
scala));

launcher/src/main/java/org/apache/spark/launcher/SparkClassLauncher.java

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,6 @@ class SparkClassLauncher extends AbstractLauncher<SparkClassLauncher> {
4242

4343
@Override
4444
protected List<String> buildLauncherCommand() throws IOException {
45-
List<String> cmd = createJavaCommand();
46-
4745
List<String> javaOptsKeys = new ArrayList<String>();
4846
String memKey = null;
4947
String extraClassPath = null;
@@ -88,8 +86,12 @@ protected List<String> buildLauncherCommand() throws IOException {
8886
toolsDir.getAbsolutePath(), className);
8987

9088
javaOptsKeys.add("SPARK_JAVA_OPTS");
89+
} else {
90+
// Any classes not explicitly listed above are submitted using SparkSubmit.
91+
return buildSparkSubmitCommand();
9192
}
9293

94+
List<String> cmd = createJavaCommand();
9395
for (String key : javaOptsKeys) {
9496
addOptionString(cmd, System.getenv(key));
9597
}
@@ -104,4 +106,14 @@ protected List<String> buildLauncherCommand() throws IOException {
104106
return prepareForOs(cmd, null, Collections.<String, String>emptyMap());
105107
}
106108

109+
private List<String> buildSparkSubmitCommand() throws IOException {
110+
List<String> sparkSubmitArgs = new ArrayList<String>(classArgs);
111+
sparkSubmitArgs.add(SparkSubmitOptionParser.CLASS);
112+
sparkSubmitArgs.add(className);
113+
114+
SparkSubmitCliLauncher launcher = new SparkSubmitCliLauncher(true, sparkSubmitArgs);
115+
launcher.setAppResource("spark-internal");
116+
return launcher.buildLauncherCommand();
117+
}
118+
107119
}

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCliLauncher.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -49,16 +49,16 @@ public class SparkSubmitCliLauncher extends SparkLauncher {
4949
}
5050

5151
private final List<String> driverArgs;
52-
private boolean isShell;
52+
private boolean hasMixedArguments;
5353

5454
SparkSubmitCliLauncher(List<String> args) {
5555
this(false, args);
5656
}
5757

58-
SparkSubmitCliLauncher(boolean isShell, List<String> args) {
58+
SparkSubmitCliLauncher(boolean hasMixedArguments, List<String> args) {
5959
boolean sparkSubmitOptionsEnded = false;
6060
this.driverArgs = new ArrayList<String>();
61-
this.isShell = isShell;
61+
this.hasMixedArguments = hasMixedArguments;
6262
new OptionParser().parse(args);
6363
}
6464

@@ -126,7 +126,7 @@ protected boolean handle(String opt, String value) {
126126
// non-spark-submit arguments.
127127
setClass(value);
128128
if (shells.containsKey(value)) {
129-
isShell = true;
129+
hasMixedArguments = true;
130130
setAppResource(shells.get(value));
131131
}
132132
} else {
@@ -137,10 +137,10 @@ protected boolean handle(String opt, String value) {
137137

138138
@Override
139139
protected boolean handleUnknown(String opt) {
140-
// When running a shell, add unrecognized parameters directly to the user arguments list.
140+
// When mixing arguments, add unrecognized parameters directly to the user arguments list.
141141
// In normal mode, any unrecognized parameter triggers the end of command line parsing.
142142
// The remaining params will be appended to the list of SparkSubmit arguments.
143-
if (isShell) {
143+
if (hasMixedArguments) {
144144
addArgs(opt);
145145
return true;
146146
} else {

sbin/spark-daemon.sh

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ fi
124124

125125
case $option in
126126

127-
(start|spark-submit)
127+
(start)
128128

129129
mkdir -p "$SPARK_PID_DIR"
130130

@@ -142,14 +142,7 @@ case $option in
142142

143143
spark_rotate_log "$log"
144144
echo starting $command, logging to $log
145-
if [ $option == spark-submit ]; then
146-
source "$SPARK_HOME"/bin/utils.sh
147-
gatherSparkSubmitOpts "$@"
148-
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-submit --class $command \
149-
"${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}" >> "$log" 2>&1 < /dev/null &
150-
else
151-
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
152-
fi
145+
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
153146
newpid=$!
154147
echo $newpid > $pid
155148
sleep 2

sbin/start-thriftserver.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,4 +50,4 @@ fi
5050

5151
export SUBMIT_USAGE_FUNCTION=usage
5252

53-
exec "$FWDIR"/sbin/spark-daemon.sh spark-submit $CLASS 1 "$@"
53+
exec "$FWDIR"/sbin/spark-daemon.sh start $CLASS 1 "$@"

0 commit comments

Comments
 (0)