Skip to content

Commit 91b244a

Browse files
committed
Change format to --conf PROP=VALUE
1 parent 8fabe77 commit 91b244a

File tree

3 files changed

+15
-14
lines changed

3 files changed

+15
-14
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -291,6 +291,14 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
291291
jars = Utils.resolveURIs(value)
292292
parse(tail)
293293

294+
case ("--conf") :: value :: tail =>
295+
val equalsIndex = value.indexOf('=')
296+
if (equalsIndex == -1) {
297+
SparkSubmit.printErrorAndExit(s"Spark config without '=': $value")
298+
}
299+
sparkProperties(value.substring(0, equalsIndex)) = value.substring(equalsIndex+1)
300+
parse(tail)
301+
294302
case ("--help" | "-h") :: tail =>
295303
printUsageAndExit(0)
296304

@@ -305,14 +313,6 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
305313
case v if v.startsWith("--") && v.contains("=") && v.split("=").size == 2 =>
306314
val parts = v.split("=")
307315
parse(Seq(parts(0), parts(1)) ++ tail)
308-
// spark config property
309-
case v if v.startsWith("--spark.") =>
310-
if (tail.isEmpty) {
311-
val errMessage = s"Spark config without value: $v"
312-
SparkSubmit.printErrorAndExit(errMessage)
313-
}
314-
sparkProperties(v.substring(2)) = tail.head
315-
parse(tail.tail)
316316
case v if v.startsWith("-") =>
317317
val errMessage = s"Unrecognized option '$value'."
318318
SparkSubmit.printErrorAndExit(errMessage)
@@ -372,6 +372,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
372372
|
373373
| --help, -h Show this help message and exit
374374
| --verbose, -v Print additional debug output
375+
| --conf PROP=VALUE Arbitrary Spark configuration property.
375376
|
376377
| Spark standalone with cluster deploy mode only:
377378
| --driver-cores NUM Cores for driver (Default: 1).

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
120120
"--archives", "archive1.txt,archive2.txt",
121121
"--num-executors", "6",
122122
"--name", "beauty",
123-
"--spark.shuffle.spill", "false",
123+
"--conf", "spark.shuffle.spill=false",
124124
"thejar.jar",
125125
"arg1", "arg2")
126126
val appArgs = new SparkSubmitArguments(clArgs)
@@ -158,7 +158,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
158158
"--archives", "archive1.txt,archive2.txt",
159159
"--num-executors", "6",
160160
"--name", "trill",
161-
"--spark.shuffle.spill", "false",
161+
"--conf", "spark.shuffle.spill=false",
162162
"thejar.jar",
163163
"arg1", "arg2")
164164
val appArgs = new SparkSubmitArguments(clArgs)
@@ -190,7 +190,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
190190
"--supervise",
191191
"--driver-memory", "4g",
192192
"--driver-cores", "5",
193-
"--spark.shuffle.spill", "false",
193+
"--conf", "spark.shuffle.spill=false",
194194
"thejar.jar",
195195
"arg1", "arg2")
196196
val appArgs = new SparkSubmitArguments(clArgs)
@@ -214,7 +214,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
214214
"--total-executor-cores", "5",
215215
"--class", "org.SomeClass",
216216
"--driver-memory", "4g",
217-
"--spark.shuffle.spill", "false",
217+
"--conf", "spark.shuffle.spill=false",
218218
"thejar.jar",
219219
"arg1", "arg2")
220220
val appArgs = new SparkSubmitArguments(clArgs)
@@ -236,7 +236,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
236236
"--total-executor-cores", "5",
237237
"--class", "org.SomeClass",
238238
"--driver-memory", "4g",
239-
"--spark.shuffle.spill", "false",
239+
"--conf", "spark.shuffle.spill=false",
240240
"thejar.jar",
241241
"arg1", "arg2")
242242
val appArgs = new SparkSubmitArguments(clArgs)

docs/configuration.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ val sc = new SparkContext(new SparkConf())
4242

4343
Then, you can supply configuration values at runtime:
4444
{% highlight bash %}
45-
./bin/spark-submit --name "My fancy app" --master local[4] myApp.jar --spark.shuffle.spill false
45+
./bin/spark-submit --name "My app" --master local[4] myApp.jar --conf spark.shuffle.spill=false
4646
{% endhighlight %}
4747

4848
The Spark shell and [`spark-submit`](cluster-overview.html#launching-applications-with-spark-submit)

0 commit comments

Comments
 (0)