Skip to content

Commit 63ccdef

Browse files
jerryshaoAndrew Or
authored andcommitted
[SPARK-10123][DEPLOY] Support specifying deploy mode from configuration
Please help to review, thanks a lot. Author: jerryshao <[email protected]> Closes #10195 from jerryshao/SPARK-10123.
1 parent 765a488 commit 63ccdef

File tree

5 files changed

+64
-7
lines changed

5 files changed

+64
-7
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,10 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
176176
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
177177
packagesExclusions = Option(packagesExclusions)
178178
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
179-
deployMode = Option(deployMode).orElse(env.get("DEPLOY_MODE")).orNull
179+
deployMode = Option(deployMode)
180+
.orElse(sparkProperties.get("spark.submit.deployMode"))
181+
.orElse(env.get("DEPLOY_MODE"))
182+
.orNull
180183
numExecutors = Option(numExecutors)
181184
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
182185
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,47 @@ class SparkSubmitSuite
136136
appArgs.childArgs should be (Seq("--master", "local", "some", "--weird", "args"))
137137
}
138138

139+
test("specify deploy mode through configuration") {
140+
val clArgs = Seq(
141+
"--master", "yarn",
142+
"--conf", "spark.submit.deployMode=client",
143+
"--class", "org.SomeClass",
144+
"thejar.jar"
145+
)
146+
val appArgs = new SparkSubmitArguments(clArgs)
147+
val (_, _, sysProps, _) = prepareSubmitEnvironment(appArgs)
148+
149+
appArgs.deployMode should be ("client")
150+
sysProps("spark.submit.deployMode") should be ("client")
151+
152+
// Both cmd line and configuration are specified, cmdline option takes the priority
153+
val clArgs1 = Seq(
154+
"--master", "yarn",
155+
"--deploy-mode", "cluster",
156+
"--conf", "spark.submit.deployMode=client",
157+
"-class", "org.SomeClass",
158+
"thejar.jar"
159+
)
160+
val appArgs1 = new SparkSubmitArguments(clArgs1)
161+
val (_, _, sysProps1, _) = prepareSubmitEnvironment(appArgs1)
162+
163+
appArgs1.deployMode should be ("cluster")
164+
sysProps1("spark.submit.deployMode") should be ("cluster")
165+
166+
// Neither cmdline nor configuration are specified, client mode is the default choice
167+
val clArgs2 = Seq(
168+
"--master", "yarn",
169+
"--class", "org.SomeClass",
170+
"thejar.jar"
171+
)
172+
val appArgs2 = new SparkSubmitArguments(clArgs2)
173+
appArgs2.deployMode should be (null)
174+
175+
val (_, _, sysProps2, _) = prepareSubmitEnvironment(appArgs2)
176+
appArgs2.deployMode should be ("client")
177+
sysProps2("spark.submit.deployMode") should be ("client")
178+
}
179+
139180
test("handles YARN cluster mode") {
140181
val clArgs = Seq(
141182
"--deploy-mode", "cluster",

docs/configuration.md

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ The following format is accepted:
4848
1y (years)
4949

5050

51-
Properties that specify a byte size should be configured with a unit of size.
51+
Properties that specify a byte size should be configured with a unit of size.
5252
The following format is accepted:
5353

5454
1b (bytes)
@@ -192,6 +192,15 @@ of the most common options to set are:
192192
<a href="submitting-applications.html#master-urls"> allowed master URL's</a>.
193193
</td>
194194
</tr>
195+
<tr>
196+
<td><code>spark.submit.deployMode</code></td>
197+
<td>(none)</td>
198+
<td>
199+
The deploy mode of Spark driver program, either "client" or "cluster",
200+
Which means to launch driver program locally ("client")
201+
or remotely ("cluster") on one of the nodes inside the cluster.
202+
</td>
203+
</tr>
195204
</table>
196205

197206
Apart from these, the following properties are also available, and may be useful in some situations:
@@ -1095,7 +1104,7 @@ Apart from these, the following properties are also available, and may be useful
10951104
<td><code>spark.rpc.lookupTimeout</code></td>
10961105
<td>120s</td>
10971106
<td>
1098-
Duration for an RPC remote endpoint lookup operation to wait before timing out.
1107+
Duration for an RPC remote endpoint lookup operation to wait before timing out.
10991108
</td>
11001109
</tr>
11011110
</table>
@@ -1559,7 +1568,7 @@ Apart from these, the following properties are also available, and may be useful
15591568
<td><code>spark.streaming.stopGracefullyOnShutdown</code></td>
15601569
<td>false</td>
15611570
<td>
1562-
If <code>true</code>, Spark shuts down the <code>StreamingContext</code> gracefully on JVM
1571+
If <code>true</code>, Spark shuts down the <code>StreamingContext</code> gracefully on JVM
15631572
shutdown rather than immediately.
15641573
</td>
15651574
</tr>

launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,9 @@ public class SparkLauncher {
4040
/** The Spark master. */
4141
public static final String SPARK_MASTER = "spark.master";
4242

43+
/** The Spark deploy mode. */
44+
public static final String DEPLOY_MODE = "spark.submit.deployMode";
45+
4346
/** Configuration key for the driver memory. */
4447
public static final String DRIVER_MEMORY = "spark.driver.memory";
4548
/** Configuration key for the driver class path. */

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -294,10 +294,11 @@ private void constructEnvVarArgs(
294294

295295
private boolean isClientMode(Map<String, String> userProps) {
296296
String userMaster = firstNonEmpty(master, userProps.get(SparkLauncher.SPARK_MASTER));
297-
// Default master is "local[*]", so assume client mode in that case.
297+
String userDeployMode = firstNonEmpty(deployMode, userProps.get(SparkLauncher.DEPLOY_MODE));
298+
// Default master is "local[*]", so assume client mode in that case
298299
return userMaster == null ||
299-
"client".equals(deployMode) ||
300-
(!userMaster.equals("yarn-cluster") && deployMode == null);
300+
"client".equals(userDeployMode) ||
301+
(!userMaster.equals("yarn-cluster") && userDeployMode == null);
301302
}
302303

303304
/**

0 commit comments

Comments
 (0)