Skip to content

Commit 44cd5f7

Browse files
author
Marcelo Vanzin
committed
Add package-info.java, clean up javadocs.
Mostly play around with visibility modifiers so that we remove most of the internal APIs from the generated javadoc. The remaining leakages are because of CommandUtils.scala, which is in a different package and thus needs some methods to be protected instead of package-private...
1 parent f7cacff commit 44cd5f7

File tree

8 files changed

+103
-61
lines changed

8 files changed

+103
-61
lines changed

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,7 @@ object CommandUtils extends Logging {
5959
private def buildCommandSeq(command: Command, memory: Int, sparkHome: String): Seq[String] = {
6060
// SPARK-698: do not call the run.cmd script, as process.destroy()
6161
// fails to kill a process tree on Windows
62-
val cmd = new CommandLauncher(sparkHome, memory, command.environment)
63-
.buildLauncherCommand(command.environment)
62+
val cmd = new CommandLauncher(sparkHome, memory, command.environment).buildCommand()
6463
cmd.toSeq ++ Seq(command.mainClass) ++ command.arguments
6564
}
6665

@@ -117,10 +116,8 @@ private class CommandLauncher(sparkHome: String, memoryMb: Int, env: Map[String,
117116

118117
setSparkHome(sparkHome)
119118

120-
override def buildLauncherCommand(env: JMap[String, String]): JList[String] = {
121-
val cmd = buildJavaCommand()
122-
cmd.add("-cp")
123-
cmd.add(buildClassPath(null).mkString(File.pathSeparator))
119+
def buildCommand(): JList[String] = {
120+
val cmd = buildJavaCommand(null)
124121
cmd.add(s"-Xms${memoryMb}M")
125122
cmd.add(s"-Xmx${memoryMb}M")
126123
addOptionString(cmd, getenv("SPARK_JAVA_OPTS"))

launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java

Lines changed: 21 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -42,15 +42,15 @@ public abstract class AbstractLauncher<T extends AbstractLauncher> extends Launc
4242

4343
private static final String ENV_SPARK_HOME = "SPARK_HOME";
4444
private static final String DEFAULT_PROPERTIES_FILE = "spark-defaults.conf";
45-
protected static final String DEFAULT_MEM = "512m";
45+
static final String DEFAULT_MEM = "512m";
4646

47-
protected String javaHome;
48-
protected String sparkHome;
49-
protected String propertiesFile;
50-
protected final Map<String, String> conf;
51-
protected final Map<String, String> launcherEnv;
47+
String javaHome;
48+
String sparkHome;
49+
String propertiesFile;
50+
final Map<String, String> conf;
51+
final Map<String, String> launcherEnv;
5252

53-
protected AbstractLauncher() {
53+
AbstractLauncher() {
5454
this(Collections.<String, String>emptyMap());
5555
}
5656

@@ -95,10 +95,16 @@ public T setConf(String key, String value) {
9595
/**
9696
* Launchers should implement this to create the command to be executed. This method should
9797
* also update the environment map with any environment variables needed by the child process.
98+
* <p/>
99+
* Note that this method is a no-op in the base class, even though subclasses in this package
100+
* really must implement it. This approach was taken to allow this method to be package private
101+
* while still allowing CommandUtils.scala to extend this class for its use.
98102
*
99103
* @param env Map containing environment variables to set for the Spark job.
100104
*/
101-
protected abstract List<String> buildLauncherCommand(Map<String, String> env) throws IOException;
105+
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
106+
throw new UnsupportedOperationException("Subclasses must implement this method.");
107+
}
102108

103109
/**
104110
* Prepares the launcher command for execution from a shell script. This is used by the `Main`
@@ -115,7 +121,7 @@ List<String> buildShellCommand() throws IOException {
115121
* user-specified properties file, or the spark-defaults.conf file under the Spark configuration
116122
* directory.
117123
*/
118-
protected Properties loadPropertiesFile() throws IOException {
124+
Properties loadPropertiesFile() throws IOException {
119125
Properties props = new Properties();
120126
File propsFile;
121127
if (propertiesFile != null) {
@@ -144,14 +150,14 @@ protected Properties loadPropertiesFile() throws IOException {
144150
return props;
145151
}
146152

147-
protected String getSparkHome() {
153+
String getSparkHome() {
148154
String path = getenv(ENV_SPARK_HOME);
149155
checkState(path != null,
150156
"Spark home not found; set it explicitly or use the SPARK_HOME environment variable.");
151157
return path;
152158
}
153159

154-
protected List<String> buildJavaCommand() throws IOException {
160+
protected List<String> buildJavaCommand(String extraClassPath) throws IOException {
155161
List<String> cmd = new ArrayList<String>();
156162
if (javaHome == null) {
157163
cmd.add(join(File.separator, System.getProperty("java.home"), "bin", "java"));
@@ -180,6 +186,8 @@ protected List<String> buildJavaCommand() throws IOException {
180186
}
181187
}
182188

189+
cmd.add("-cp");
190+
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
183191
return cmd;
184192
}
185193

@@ -196,7 +204,7 @@ protected void addOptionString(List<String> cmd, String options) {
196204
* each entry is formatted in the way expected by <i>java.net.URLClassLoader</i> (more
197205
* specifically, with trailing slashes for directories).
198206
*/
199-
protected List<String> buildClassPath(String appClassPath) throws IOException {
207+
List<String> buildClassPath(String appClassPath) throws IOException {
200208
String sparkHome = getSparkHome();
201209
String scala = getScalaVersion();
202210

@@ -313,7 +321,7 @@ private void addToClassPath(List<String> cp, String entries) {
313321
}
314322
}
315323

316-
protected String getScalaVersion() {
324+
String getScalaVersion() {
317325
String scala = getenv("SPARK_SCALA_VERSION");
318326
if (scala != null) {
319327
return scala;

launcher/src/main/java/org/apache/spark/launcher/LauncherCommon.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,12 @@ public class LauncherCommon {
5151
public static final String EXECUTOR_CORES = "spark.executor.cores";
5252

5353
/** Returns whether the given string is null or empty. */
54-
protected static boolean isEmpty(String s) {
54+
static boolean isEmpty(String s) {
5555
return s == null || s.isEmpty();
5656
}
5757

5858
/** Joins a list of strings using the given separator. */
59-
protected static String join(String sep, String... elements) {
59+
static String join(String sep, String... elements) {
6060
StringBuilder sb = new StringBuilder();
6161
for (String e : elements) {
6262
if (e != null) {
@@ -70,7 +70,7 @@ protected static String join(String sep, String... elements) {
7070
}
7171

7272
/** Joins a list of strings using the given separator. */
73-
protected static String join(String sep, Iterable<String> elements) {
73+
static String join(String sep, Iterable<String> elements) {
7474
StringBuilder sb = new StringBuilder();
7575
for (String e : elements) {
7676
if (e != null) {
@@ -84,7 +84,7 @@ protected static String join(String sep, Iterable<String> elements) {
8484
}
8585

8686
/** Returns the first value mapped to the given key in the given maps. */
87-
protected static String find(String key, Map<?, ?>... maps) {
87+
static String find(String key, Map<?, ?>... maps) {
8888
for (Map<?, ?> map : maps) {
8989
String value = (String) map.get(key);
9090
if (!isEmpty(value)) {
@@ -95,7 +95,7 @@ protected static String find(String key, Map<?, ?>... maps) {
9595
}
9696

9797
/** Returns the first non-empty, non-null string in the given list. */
98-
protected static String firstNonEmpty(String... candidates) {
98+
static String firstNonEmpty(String... candidates) {
9999
for (String s : candidates) {
100100
if (!isEmpty(s)) {
101101
return s;
@@ -105,7 +105,7 @@ protected static String firstNonEmpty(String... candidates) {
105105
}
106106

107107
/** Returns the name of the env variable that holds the native library path. */
108-
protected static String getLibPathEnvName() {
108+
static String getLibPathEnvName() {
109109
if (isWindows()) {
110110
return "PATH";
111111
}
@@ -119,7 +119,7 @@ protected static String getLibPathEnvName() {
119119
}
120120

121121
/** Returns whether the OS is Windows. */
122-
protected static boolean isWindows() {
122+
static boolean isWindows() {
123123
String os = System.getProperty("os.name");
124124
return os.startsWith("Windows");
125125
}
@@ -128,7 +128,7 @@ protected static boolean isWindows() {
128128
* Updates the user environment to contain the merged value of "envKey" after appending
129129
* the given path list.
130130
*/
131-
protected static void mergeEnvPathList(Map<String, String> userEnv, String envKey, String pathList) {
131+
static void mergeEnvPathList(Map<String, String> userEnv, String envKey, String pathList) {
132132
if (!isEmpty(pathList)) {
133133
String current = firstNonEmpty(userEnv.get(envKey), System.getenv(envKey));
134134
userEnv.put(envKey, join(File.pathSeparator, current, pathList));
@@ -142,7 +142,7 @@ protected static void mergeEnvPathList(Map<String, String> userEnv, String envKe
142142
* Input: "\"ab cd\" efgh 'i \" j'"
143143
* Output: [ "ab cd", "efgh", "i \" j" ]
144144
*/
145-
protected static List<String> parseOptionString(String s) {
145+
static List<String> parseOptionString(String s) {
146146
List<String> opts = new ArrayList<String>();
147147
StringBuilder opt = new StringBuilder();
148148
boolean inOpt = false;
@@ -224,21 +224,21 @@ protected static List<String> parseOptionString(String s) {
224224
}
225225

226226
/** Throws IllegalArgumentException if the given object is null. */
227-
protected static void checkNotNull(Object o, String arg) {
227+
static void checkNotNull(Object o, String arg) {
228228
if (o == null) {
229229
throw new IllegalArgumentException(String.format("'%s' must not be null.", arg));
230230
}
231231
}
232232

233233
/** Throws IllegalArgumentException with the given message if the check is false. */
234-
protected static void checkArgument(boolean check, String msg, Object... args) {
234+
static void checkArgument(boolean check, String msg, Object... args) {
235235
if (!check) {
236236
throw new IllegalArgumentException(String.format(msg, args));
237237
}
238238
}
239239

240240
/** Throws IllegalStateException with the given message if the check is false. */
241-
protected static void checkState(boolean check, String msg, Object... args) {
241+
static void checkState(boolean check, String msg, Object... args) {
242242
if (!check) {
243243
throw new IllegalStateException(String.format(msg, args));
244244
}

launcher/src/main/java/org/apache/spark/launcher/Main.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
/**
2626
* Command line interface for the Spark launcher. Used internally by Spark scripts.
2727
*/
28-
public class Main extends LauncherCommon {
28+
class Main extends LauncherCommon {
2929

3030
/**
3131
* Usage: Main [class] [class args]
@@ -97,7 +97,7 @@ public static void main(String[] argsArray) throws Exception {
9797
private static class UsageLauncher extends AbstractLauncher<UsageLauncher> {
9898

9999
@Override
100-
protected List<String> buildLauncherCommand(Map<String, String> env) {
100+
List<String> buildLauncherCommand(Map<String, String> env) {
101101
if (isWindows()) {
102102
return Arrays.asList("set SPARK_LAUNCHER_USAGE_ERROR=1");
103103
} else {

launcher/src/main/java/org/apache/spark/launcher/SparkClassLauncher.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ class SparkClassLauncher extends AbstractLauncher<SparkClassLauncher> {
4141
}
4242

4343
@Override
44-
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
44+
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
4545
List<String> javaOptsKeys = new ArrayList<String>();
4646
String memKey = null;
4747
String extraClassPath = null;
@@ -91,16 +91,14 @@ protected List<String> buildLauncherCommand(Map<String, String> env) throws IOEx
9191
return buildSparkSubmitCommand(env);
9292
}
9393

94-
List<String> cmd = buildJavaCommand();
94+
List<String> cmd = buildJavaCommand(extraClassPath);
9595
for (String key : javaOptsKeys) {
9696
addOptionString(cmd, System.getenv(key));
9797
}
9898

9999
String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM);
100100
cmd.add("-Xms" + mem);
101101
cmd.add("-Xmx" + mem);
102-
cmd.add("-cp");
103-
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
104102
cmd.add(className);
105103
cmd.addAll(classArgs);
106104
return cmd;

launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java

Lines changed: 16 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -31,25 +31,22 @@
3131
* Use this class to start Spark applications programmatically. The class uses a builder pattern
3232
* to allow clients to configure the Spark application and launch it as a child process.
3333
* <p/>
34-
* There's also support for running the application on a separate thread, although that is to
35-
* be considered experimental and avoided in production environments.
36-
* <p/>
3734
* Note that launching Spark applications using this class will not automatically load environment
3835
* variables from the "spark-env.sh" or "spark-env.cmd" scripts in the configuration directory.
3936
*/
4037
public class SparkLauncher extends AbstractLauncher<SparkLauncher> {
4138

42-
protected boolean verbose;
43-
protected String appName;
44-
protected String master;
45-
protected String deployMode;
46-
protected String mainClass;
47-
protected String appResource;
48-
protected final List<String> sparkArgs;
49-
protected final List<String> appArgs;
50-
protected final List<String> jars;
51-
protected final List<String> files;
52-
protected final List<String> pyFiles;
39+
boolean verbose;
40+
String appName;
41+
String master;
42+
String deployMode;
43+
String mainClass;
44+
String appResource;
45+
final List<String> sparkArgs;
46+
final List<String> appArgs;
47+
final List<String> jars;
48+
final List<String> files;
49+
final List<String> pyFiles;
5350

5451
public SparkLauncher() {
5552
this.sparkArgs = new ArrayList<String>();
@@ -218,20 +215,17 @@ List<String> buildSparkSubmitArgs() {
218215
}
219216

220217
@Override
221-
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
222-
List<String> cmd = buildJavaCommand();
223-
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
224-
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));
225-
218+
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
226219
// Load the properties file and check whether spark-submit will be running the app's driver
227220
// or just launching a cluster app. When running the driver, the JVM's argument will be
228221
// modified to cover the driver's configuration.
229222
Properties props = loadPropertiesFile();
230223
boolean isClientMode = isClientMode(props);
231-
232224
String extraClassPath = isClientMode ? find(DRIVER_EXTRA_CLASSPATH, conf, props) : null;
233-
cmd.add("-cp");
234-
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
225+
226+
List<String> cmd = buildJavaCommand(extraClassPath);
227+
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
228+
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));
235229

236230
if (isClientMode) {
237231
// Figuring out where the memory value come from is a little tricky due to precedence.

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCliLauncher.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
* <p/>
3838
* This class has also some special features to aid PySparkLauncher.
3939
*/
40-
public class SparkSubmitCliLauncher extends SparkLauncher {
40+
class SparkSubmitCliLauncher extends SparkLauncher {
4141

4242
/**
4343
* Name of the app resource used to identify the PySpark shell. The command line parser expects
@@ -83,7 +83,7 @@ public class SparkSubmitCliLauncher extends SparkLauncher {
8383
}
8484

8585
@Override
86-
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
86+
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
8787
if (PYSPARK_SHELL.equals(appResource)) {
8888
return buildPySparkShellCommand(env);
8989
} else {
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
/**
19+
* Library for launching Spark applications.
20+
* <p/>
21+
* This library allows applications to launch Spark programmatically. There's only one entry
22+
* point to the library - the {@link org.apache.spark.launcher.SparkLauncher} class.
23+
* <p/>
24+
* To launch a Spark application, just instantiate a {@link org.apache.spark.launcher.SparkLauncher}
25+
* and configure the application to run. For example:
26+
*
27+
* <pre>
28+
* {@code
29+
* import org.apache.spark.launcher.SparkLauncher;
30+
*
31+
* public class MyLauncher {
32+
* public static void main(String[] args) throws Exception {
33+
* Process spark = new SparkLauncher()
34+
* .setAppResource("/my/app.jar")
35+
* .setMainClass("my.spark.app.Main")
36+
* .setMaster("local")
37+
* .setConf(SparkLauncher.DRIVER_MEMORY, "2g")
38+
* .launch();
39+
* spark.waitFor();
40+
* }
41+
* }
42+
* }
43+
* </pre>
44+
*/
45+
package org.apache.spark.launcher;

0 commit comments

Comments
 (0)