Skip to content

Commit 77774ba

Browse files
author
Andrew Or
committed
Minor fixes
1 parent 206cae4 commit 77774ba

File tree

3 files changed

+9
-8
lines changed

3 files changed

+9
-8
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,10 +118,9 @@ object SparkSubmit {
118118
*/
119119
private def submit(args: SparkSubmitArguments): Unit = {
120120
val (childArgs, childClasspath, sysProps, childMainClass) = prepareSubmitEnvironment(args)
121-
val isStandaloneCluster = args.master.startsWith("spark://") && args.deployMode == "cluster"
122121
// In standalone cluster mode, use the stable application submission REST protocol.
123122
// Otherwise, just call the main method of the child class.
124-
if (isStandaloneCluster) {
123+
if (args.isStandaloneCluster) {
125124
// NOTE: since we mutate the values of some configs in `prepareSubmitEnvironment`, we
126125
// must update the corresponding fields in the original SparkSubmitArguments to reflect
127126
// these changes.
@@ -146,7 +145,7 @@ object SparkSubmit {
146145
*/
147146
private[spark] def prepareSubmitEnvironment(args: SparkSubmitArguments)
148147
: (Seq[String], Seq[String], Map[String, String], String) = {
149-
// Environment needed to launch the child main class
148+
// Return values
150149
val childArgs = new ArrayBuffer[String]()
151150
val childClasspath = new ArrayBuffer[String]()
152151
val sysProps = new HashMap[String, String]()
@@ -158,7 +157,7 @@ object SparkSubmit {
158157
case m if m.startsWith("spark") => STANDALONE
159158
case m if m.startsWith("mesos") => MESOS
160159
case m if m.startsWith("local") => LOCAL
161-
case _ => printErrorAndExit("Master must start with yarn, spark, mesos, local, or rest"); -1
160+
case _ => printErrorAndExit("Master must start with yarn, spark, mesos or local"); -1
162161
}
163162

164163
// Set the deploy mode; default is client mode

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
231231
}
232232

233233
private def validateKillArguments(): Unit = {
234-
if (!master.startsWith("spark://") || deployMode != "cluster") {
234+
if (!isStandaloneCluster) {
235235
SparkSubmit.printErrorAndExit("Killing drivers is only supported in standalone cluster mode")
236236
}
237237
if (driverToKill == null) {
@@ -240,7 +240,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
240240
}
241241

242242
private def validateStatusRequestArguments(): Unit = {
243-
if (!master.startsWith("spark://") || deployMode != "cluster") {
243+
if (!isStandaloneCluster) {
244244
SparkSubmit.printErrorAndExit(
245245
"Requesting driver statuses is only supported in standalone cluster mode")
246246
}
@@ -249,6 +249,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
249249
}
250250
}
251251

252+
def isStandaloneCluster: Boolean = {
253+
master.startsWith("spark://") && deployMode == "cluster"
254+
}
255+
252256
override def toString = {
253257
s"""Parsed arguments:
254258
| master $master

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,8 +135,6 @@ private[spark] class StandaloneRestServerHandler(
135135
// Translate all fields to the relevant Spark properties
136136
val conf = new SparkConf(false)
137137
.setAll(sparkProperties)
138-
// Use the actual master URL instead of the one that refers to this REST server
139-
// Otherwise, once the driver is launched it will contact with the wrong server
140138
.set("spark.master", masterUrl)
141139
.set("spark.app.name", appName)
142140
jars.foreach { j => conf.set("spark.jars", j) }

0 commit comments

Comments
 (0)