Skip to content

Commit c9a8ad7

Browse files
author
Andrew Or
committed
Do not include appResource and mainClass as properties
1 parent 6fc7670 commit c9a8ad7

File tree

6 files changed

+34
-27
lines changed

6 files changed

+34
-27
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -304,8 +304,6 @@ object SparkSubmit {
304304
OptionAssigner(args.ivyRepoPath, STANDALONE, CLUSTER, sysProp = "spark.jars.ivy"),
305305
OptionAssigner(args.driverMemory, STANDALONE, CLUSTER, sysProp = "spark.driver.memory"),
306306
OptionAssigner(args.driverCores, STANDALONE, CLUSTER, sysProp = "spark.driver.cores"),
307-
OptionAssigner(args.primaryResource, STANDALONE, CLUSTER, sysProp = "spark.app.resource"),
308-
OptionAssigner(args.mainClass, STANDALONE, CLUSTER, sysProp = "spark.app.mainClass"),
309307
OptionAssigner(args.supervise.toString, STANDALONE, CLUSTER,
310308
sysProp = "spark.driver.supervise"),
311309

@@ -375,6 +373,7 @@ object SparkSubmit {
375373
if (args.isStandaloneCluster) {
376374
if (args.useRest) {
377375
childMainClass = "org.apache.spark.deploy.rest.StandaloneRestClient"
376+
childArgs += (args.primaryResource, args.mainClass)
378377
} else {
379378
// In legacy standalone cluster mode, use Client as a wrapper around the user class
380379
childMainClass = "org.apache.spark.deploy.Client"
@@ -773,7 +772,7 @@ private[spark] object SparkSubmitUtils {
773772
* Provides an indirection layer for passing arguments as system properties or flags to
774773
* the user's driver program or to downstream launcher tools.
775774
*/
776-
private[spark] case class OptionAssigner(
775+
private case class OptionAssigner(
777776
value: String,
778777
clusterManager: Int,
779778
deployMode: Int,

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestClient.scala

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,10 @@ private[spark] class StandaloneRestClient extends Logging {
5656
*/
5757
def createSubmission(
5858
master: String,
59-
appArgs: Array[String],
60-
sparkProperties: Map[String, String],
61-
environmentVariables: Map[String, String]): SubmitRestProtocolResponse = {
59+
request: CreateSubmissionRequest): SubmitRestProtocolResponse = {
6260
logInfo(s"Submitting a request to launch a driver in $master.")
6361
validateMaster(master)
6462
val url = getSubmitUrl(master)
65-
val request = constructSubmitRequest(appArgs, sparkProperties, environmentVariables)
6663
val response = postJson(url, request.toJson)
6764
response match {
6865
case s: CreateSubmissionResponse =>
@@ -202,11 +199,15 @@ private[spark] class StandaloneRestClient extends Logging {
202199

203200
/** Construct a message that captures the specified parameters for submitting an application. */
204201
def constructSubmitRequest(
202+
appResource: String,
203+
mainClass: String,
205204
appArgs: Array[String],
206205
sparkProperties: Map[String, String],
207206
environmentVariables: Map[String, String]): CreateSubmissionRequest = {
208207
val message = new CreateSubmissionRequest
209208
message.clientSparkVersion = sparkVersion
209+
message.appResource = appResource
210+
message.mainClass = mainClass
210211
message.appArgs = appArgs
211212
message.sparkProperties = sparkProperties
212213
message.environmentVariables = environmentVariables
@@ -286,17 +287,23 @@ private[spark] object StandaloneRestClient {
286287
val REPORT_DRIVER_STATUS_MAX_TRIES = 10
287288
val PROTOCOL_VERSION = "v1"
288289

289-
/**
290-
* Submit an application, assuming parameters are specified through system properties.
291-
* Usage: StandaloneRestClient [app args*]
292-
*/
290+
/** Submit an application, assuming parameters are specified through system properties. */
293291
def main(args: Array[String]): Unit = {
292+
if (args.size < 2) {
293+
sys.error("Usage: StandaloneRestClient [app resource] [main class] [app args*]")
294+
sys.exit(1)
295+
}
296+
val appResource = args(0)
297+
val mainClass = args(1)
298+
val appArgs = args.slice(2, args.size)
294299
val client = new StandaloneRestClient
295300
val master = sys.props.get("spark.master").getOrElse {
296301
throw new IllegalArgumentException("'spark.master' must be set.")
297302
}
298303
val sparkProperties = new SparkConf().getAll.toMap
299304
val environmentVariables = sys.env.filter { case (k, _) => k.startsWith("SPARK_") }
300-
client.createSubmission(master, args, sparkProperties, environmentVariables)
305+
val submitRequest = client.constructSubmitRequest(
306+
appResource, mainClass, appArgs, sparkProperties, environmentVariables)
307+
client.createSubmission(master, submitRequest)
301308
}
302309
}

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -337,17 +337,15 @@ private[spark] class SubmitRequestServlet(master: Master) extends StandaloneRest
337337
* cluster mode yet.
338338
*/
339339
private def buildDriverDescription(request: CreateSubmissionRequest): DriverDescription = {
340-
val sparkProperties = request.sparkProperties
341340

342341
// Required fields, including the main class because python is not yet supported
343-
val appResource = sparkProperties.get("spark.app.resource").getOrElse {
344-
throw new SubmitRestMissingFieldException("Main application resource is missing.")
345-
}
346-
val mainClass = sparkProperties.get("spark.app.mainClass").getOrElse {
342+
val appResource = request.appResource
343+
val mainClass = Option(request.mainClass).getOrElse {
347344
throw new SubmitRestMissingFieldException("Main class is missing.")
348345
}
349346

350347
// Optional fields
348+
val sparkProperties = request.sparkProperties
351349
val driverMemory = sparkProperties.get("spark.driver.memory")
352350
val driverCores = sparkProperties.get("spark.driver.cores")
353351
val driverExtraJavaOptions = sparkProperties.get("spark.driver.extraJavaOptions")

core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,17 @@ private[spark] abstract class SubmitRestProtocolRequest extends SubmitRestProtoc
3232
* A request to submit a driver in the REST application submission protocol.
3333
*/
3434
private[spark] class CreateSubmissionRequest extends SubmitRestProtocolRequest {
35+
var appResource: String = null
36+
var mainClass: String = null
3537
var appArgs: Array[String] = null
3638
var sparkProperties: Map[String, String] = null
3739
var environmentVariables: Map[String, String] = null
3840

3941
protected override def doValidate(): Unit = {
4042
super.doValidate()
4143
assert(sparkProperties != null, "No Spark properties set!")
44+
assertFieldIsSet(appResource, "appResource")
4245
assertPropertyIsSet("spark.app.name")
43-
assertPropertyIsSet("spark.app.resource")
4446
assertPropertyIsBoolean("spark.driver.supervise")
4547
assertPropertyIsNumeric("spark.driver.cores")
4648
assertPropertyIsNumeric("spark.cores.max")

core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,8 +128,9 @@ class StandaloneRestSubmitSuite extends FunSuite with BeforeAndAfterAll with Bef
128128
mainJar) ++ appArgs
129129
val args = new SparkSubmitArguments(commandLineArgs)
130130
val (_, _, sparkProperties, _) = SparkSubmit.prepareSubmitEnvironment(args)
131-
val response = client.createSubmission(
132-
args.master, appArgs.toArray, sparkProperties.toMap, Map.empty)
131+
val request = client.constructSubmitRequest(
132+
mainJar, mainClass, appArgs.toArray, sparkProperties.toMap, Map.empty)
133+
val response = client.createSubmission(masterRestUrl, request)
133134
val submitResponse = getSubmitResponse(response)
134135
val submissionId = submitResponse.submissionId
135136
assert(submissionId != null, "Application submission was unsuccessful!")

core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -100,13 +100,13 @@ class SubmitRestProtocolSuite extends FunSuite {
100100
val message = new CreateSubmissionRequest
101101
intercept[SubmitRestProtocolException] { message.validate() }
102102
message.clientSparkVersion = "1.2.3"
103+
message.appResource = "honey-walnut-cherry.jar"
104+
message.mainClass = "org.apache.spark.examples.SparkPie"
103105
val conf = new SparkConf(false)
104106
conf.set("spark.app.name", "SparkPie")
105-
conf.set("spark.app.resource", "honey-walnut-cherry.jar")
106107
message.sparkProperties = conf.getAll.toMap
107108
message.validate()
108109
// optional fields
109-
conf.set("spark.app.mainClass", "org.apache.spark.examples.SparkPie")
110110
conf.set("spark.jars", "mayonnaise.jar,ketchup.jar")
111111
conf.set("spark.files", "fireball.png")
112112
conf.set("spark.driver.memory", "512m")
@@ -137,9 +137,9 @@ class SubmitRestProtocolSuite extends FunSuite {
137137
assertJsonEquals(json, submitDriverRequestJson)
138138
val newMessage = SubmitRestProtocolMessage.fromJson(json, classOf[CreateSubmissionRequest])
139139
assert(newMessage.clientSparkVersion === "1.2.3")
140+
assert(newMessage.appResource === "honey-walnut-cherry.jar")
141+
assert(newMessage.mainClass === "org.apache.spark.examples.SparkPie")
140142
assert(newMessage.sparkProperties("spark.app.name") === "SparkPie")
141-
assert(newMessage.sparkProperties("spark.app.resource") === "honey-walnut-cherry.jar")
142-
assert(newMessage.sparkProperties("spark.app.mainClass") === "org.apache.spark.examples.SparkPie")
143143
assert(newMessage.sparkProperties("spark.jars") === "mayonnaise.jar,ketchup.jar")
144144
assert(newMessage.sparkProperties("spark.files") === "fireball.png")
145145
assert(newMessage.sparkProperties("spark.driver.memory") === "512m")
@@ -261,10 +261,12 @@ class SubmitRestProtocolSuite extends FunSuite {
261261
|{
262262
| "action" : "CreateSubmissionRequest",
263263
| "appArgs" : [ "two slices", "a hint of cinnamon" ],
264+
| "appResource" : "honey-walnut-cherry.jar",
264265
| "clientSparkVersion" : "1.2.3",
265266
| "environmentVariables" : {
266267
| "PATH" : "/dev/null"
267268
| },
269+
| "mainClass" : "org.apache.spark.examples.SparkPie",
268270
| "sparkProperties" : {
269271
| "spark.driver.extraLibraryPath" : "pickle.jar",
270272
| "spark.jars" : "mayonnaise.jar,ketchup.jar",
@@ -273,12 +275,10 @@ class SubmitRestProtocolSuite extends FunSuite {
273275
| "spark.cores.max" : "10000",
274276
| "spark.driver.memory" : "512m",
275277
| "spark.files" : "fireball.png",
276-
| "spark.app.resource" : "honey-walnut-cherry.jar",
277278
| "spark.driver.cores" : "180",
278279
| "spark.driver.extraJavaOptions" : " -Dslices=5 -Dcolor=mostly_red",
279280
| "spark.executor.memory" : "256m",
280-
| "spark.driver.extraClassPath" : "food-coloring.jar",
281-
| "spark.app.mainClass" : "org.apache.spark.examples.SparkPie"
281+
| "spark.driver.extraClassPath" : "food-coloring.jar"
282282
| }
283283
|}
284284
""".stripMargin

0 commit comments

Comments
 (0)