Skip to content

Commit 3db7379

Browse files
author
Andrew Or
committed
Fix comments and name fields for better error messages
This commit also includes comprehensive cleanups across the board and simplifies the serialization process by eliminating the naming constraints on the JSON fields.
1 parent 8d43486 commit 3db7379

14 files changed

+301
-269
lines changed

core/src/main/scala/org/apache/spark/deploy/rest/DriverStatusRequest.scala

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,17 @@
1717

1818
package org.apache.spark.deploy.rest
1919

20+
/**
21+
* A request to query the status of a driver in the REST application submission protocol.
22+
*/
2023
class DriverStatusRequest extends SubmitRestProtocolRequest {
21-
private val driverId = new SubmitRestProtocolField[String]
24+
private val driverId = new SubmitRestProtocolField[String]("driverId")
25+
2226
def getDriverId: String = driverId.toString
2327
def setDriverId(s: String): this.type = setField(driverId, s)
28+
2429
override def validate(): Unit = {
2530
super.validate()
26-
assertFieldIsSet(driverId, "driver_id")
31+
assertFieldIsSet(driverId)
2732
}
2833
}

core/src/main/scala/org/apache/spark/deploy/rest/DriverStatusResponse.scala

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,16 @@
1717

1818
package org.apache.spark.deploy.rest
1919

20+
/**
21+
* A response to the [[DriverStatusRequest]] in the REST application submission protocol.
22+
*/
2023
class DriverStatusResponse extends SubmitRestProtocolResponse {
21-
private val driverId = new SubmitRestProtocolField[String]
22-
private val success = new SubmitRestProtocolField[Boolean]
23-
private val driverState = new SubmitRestProtocolField[String]
24-
private val workerId = new SubmitRestProtocolField[String]
25-
private val workerHostPort = new SubmitRestProtocolField[String]
24+
private val driverId = new SubmitRestProtocolField[String]("driverId")
25+
private val success = new SubmitRestProtocolField[Boolean]("success")
26+
// standalone cluster mode only
27+
private val driverState = new SubmitRestProtocolField[String]("driverState")
28+
private val workerId = new SubmitRestProtocolField[String]("workerId")
29+
private val workerHostPort = new SubmitRestProtocolField[String]("workerHostPort")
2630

2731
def getDriverId: String = driverId.toString
2832
def getSuccess: String = success.toString
@@ -38,7 +42,7 @@ class DriverStatusResponse extends SubmitRestProtocolResponse {
3842

3943
override def validate(): Unit = {
4044
super.validate()
41-
assertFieldIsSet(driverId, "driver_id")
42-
assertFieldIsSet(success, "success")
45+
assertFieldIsSet(driverId)
46+
assertFieldIsSet(success)
4347
}
4448
}

core/src/main/scala/org/apache/spark/deploy/rest/ErrorResponse.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,12 @@
1717

1818
package org.apache.spark.deploy.rest
1919

20+
/**
21+
* An error response message used in the REST application submission protocol.
22+
*/
2023
class ErrorResponse extends SubmitRestProtocolResponse {
2124
override def validate(): Unit = {
2225
super.validate()
23-
assertFieldIsSet(message, "message")
26+
assertFieldIsSet(message)
2427
}
2528
}

core/src/main/scala/org/apache/spark/deploy/rest/KillDriverRequest.scala

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,17 @@
1717

1818
package org.apache.spark.deploy.rest
1919

20+
/**
21+
* A request to kill a driver in the REST application submission protocol.
22+
*/
2023
class KillDriverRequest extends SubmitRestProtocolRequest {
21-
private val driverId = new SubmitRestProtocolField[String]
24+
private val driverId = new SubmitRestProtocolField[String]("driverId")
25+
2226
def getDriverId: String = driverId.toString
2327
def setDriverId(s: String): this.type = setField(driverId, s)
28+
2429
override def validate(): Unit = {
2530
super.validate()
26-
assertFieldIsSet(driverId, "driver_id")
31+
assertFieldIsSet(driverId)
2732
}
2833
}

core/src/main/scala/org/apache/spark/deploy/rest/KillDriverResponse.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,12 @@
1717

1818
package org.apache.spark.deploy.rest
1919

20+
/**
21+
* A response to the [[KillDriverRequest]] in the REST application submission protocol.
22+
*/
2023
class KillDriverResponse extends SubmitRestProtocolResponse {
21-
private val driverId = new SubmitRestProtocolField[String]
22-
private val success = new SubmitRestProtocolField[Boolean]
24+
private val driverId = new SubmitRestProtocolField[String]("driverId")
25+
private val success = new SubmitRestProtocolField[Boolean]("success")
2326

2427
def getDriverId: String = driverId.toString
2528
def getSuccess: String = success.toString
@@ -29,7 +32,7 @@ class KillDriverResponse extends SubmitRestProtocolResponse {
2932

3033
override def validate(): Unit = {
3134
super.validate()
32-
assertFieldIsSet(driverId, "driver_id")
33-
assertFieldIsSet(success, "success")
35+
assertFieldIsSet(driverId)
36+
assertFieldIsSet(success)
3437
}
3538
}

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestClient.scala

Lines changed: 37 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,10 @@ import java.net.URL
2121

2222
import org.apache.spark.{SPARK_VERSION => sparkVersion}
2323
import org.apache.spark.deploy.SparkSubmitArguments
24-
import org.apache.spark.util.Utils
2524

2625
/**
27-
* A client that submits applications to the standalone Master using the stable REST protocol.
28-
* This client is intended to communicate with the StandaloneRestServer. Cluster mode only.
26+
* A client that submits applications to the standalone Master using the REST protocol
27+
* This client is intended to communicate with the [[StandaloneRestServer]]. Cluster mode only.
2928
*/
3029
private[spark] class StandaloneRestClient extends SubmitRestClient {
3130
import StandaloneRestClient._
@@ -38,7 +37,8 @@ private[spark] class StandaloneRestClient extends SubmitRestClient {
3837
* this reports failure and logs an error message provided by the REST server.
3938
*/
4039
override def submitDriver(args: SparkSubmitArguments): SubmitDriverResponse = {
41-
val submitResponse = super.submitDriver(args).asInstanceOf[SubmitDriverResponse]
40+
validateSubmitArgs(args)
41+
val submitResponse = super.submitDriver(args)
4242
val submitSuccess = submitResponse.getSuccess.toBoolean
4343
if (submitSuccess) {
4444
val driverId = submitResponse.getDriverId
@@ -51,14 +51,25 @@ private[spark] class StandaloneRestClient extends SubmitRestClient {
5151
submitResponse
5252
}
5353

54+
/** Request that the REST server kill the specified driver. */
55+
override def killDriver(master: String, driverId: String): KillDriverResponse = {
56+
validateMaster(master)
57+
super.killDriver(master, driverId)
58+
}
59+
60+
/** Request the status of the specified driver from the REST server. */
61+
override def requestDriverStatus(master: String, driverId: String): DriverStatusResponse = {
62+
validateMaster(master)
63+
super.requestDriverStatus(master, driverId)
64+
}
65+
5466
/**
55-
* Poll the status of the driver that was just submitted and report it.
56-
* This retries up to a fixed number of times until giving up.
67+
* Poll the status of the driver that was just submitted and log it.
68+
* This retries up to a fixed number of times before giving up.
5769
*/
5870
private def pollSubmittedDriverStatus(master: String, driverId: String): Unit = {
5971
(1 to REPORT_DRIVER_STATUS_MAX_TRIES).foreach { _ =>
6072
val statusResponse = requestDriverStatus(master, driverId)
61-
.asInstanceOf[DriverStatusResponse]
6273
val statusSuccess = statusResponse.getSuccess.toBoolean
6374
if (statusSuccess) {
6475
val driverState = statusResponse.getDriverState
@@ -75,13 +86,13 @@ private[spark] class StandaloneRestClient extends SubmitRestClient {
7586
exception.foreach { e => logError(e) }
7687
return
7788
}
89+
Thread.sleep(REPORT_DRIVER_STATUS_INTERVAL)
7890
}
7991
logError(s"Error: Master did not recognize driver $driverId.")
8092
}
8193

8294
/** Construct a submit driver request message. */
83-
override protected def constructSubmitRequest(
84-
args: SparkSubmitArguments): SubmitDriverRequest = {
95+
protected override def constructSubmitRequest(args: SparkSubmitArguments): SubmitDriverRequest = {
8596
val message = new SubmitDriverRequest()
8697
.setSparkVersion(sparkVersion)
8798
.setAppName(args.name)
@@ -99,12 +110,14 @@ private[spark] class StandaloneRestClient extends SubmitRestClient {
99110
.setTotalExecutorCores(args.totalExecutorCores)
100111
args.childArgs.foreach(message.addAppArg)
101112
args.sparkProperties.foreach { case (k, v) => message.setSparkProperty(k, v) }
102-
// TODO: send special environment variables?
113+
sys.env.foreach { case (k, v) =>
114+
if (k.startsWith("SPARK_")) { message.setEnvironmentVariable(k, v) }
115+
}
103116
message
104117
}
105118

106119
/** Construct a kill driver request message. */
107-
override protected def constructKillRequest(
120+
protected override def constructKillRequest(
108121
master: String,
109122
driverId: String): KillDriverRequest = {
110123
new KillDriverRequest()
@@ -113,33 +126,34 @@ private[spark] class StandaloneRestClient extends SubmitRestClient {
113126
}
114127

115128
/** Construct a driver status request message. */
116-
override protected def constructStatusRequest(
129+
protected override def constructStatusRequest(
117130
master: String,
118131
driverId: String): DriverStatusRequest = {
119132
new DriverStatusRequest()
120133
.setSparkVersion(sparkVersion)
121134
.setDriverId(driverId)
122135
}
123136

137+
/** Extract the URL portion of the master address. */
138+
protected override def getHttpUrl(master: String): URL = {
139+
validateMaster(master)
140+
new URL("http://" + master.stripPrefix("spark://"))
141+
}
142+
124143
/** Throw an exception if this is not standalone mode. */
125-
override protected def validateMaster(master: String): Unit = {
144+
private def validateMaster(master: String): Unit = {
126145
if (!master.startsWith("spark://")) {
127146
throw new IllegalArgumentException("This REST client is only supported in standalone mode.")
128147
}
129148
}
130149

131-
/** Throw an exception if this is not cluster deploy mode. */
132-
override protected def validateDeployMode(deployMode: String): Unit = {
133-
if (deployMode != "cluster") {
134-
throw new IllegalArgumentException("This REST client is only supported in cluster mode.")
150+
/** Throw an exception if this is not standalone cluster mode. */
151+
private def validateSubmitArgs(args: SparkSubmitArguments): Unit = {
152+
if (!args.isStandaloneCluster) {
153+
throw new IllegalArgumentException(
154+
"This REST client is only supported in standalone cluster mode.")
135155
}
136156
}
137-
138-
/** Extract the URL portion of the master address. */
139-
override protected def getHttpUrl(master: String): URL = {
140-
validateMaster(master)
141-
new URL("http://" + master.stripPrefix("spark://"))
142-
}
143157
}
144158

145159
private object StandaloneRestClient {

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -24,23 +24,22 @@ import akka.actor.ActorRef
2424
import org.apache.spark.{SPARK_VERSION => sparkVersion}
2525
import org.apache.spark.SparkConf
2626
import org.apache.spark.util.{AkkaUtils, Utils}
27-
import org.apache.spark.deploy.{Command, DriverDescription}
27+
import org.apache.spark.deploy.{Command, DeployMessages, DriverDescription}
2828
import org.apache.spark.deploy.ClientArguments._
29-
import org.apache.spark.deploy.DeployMessages
3029
import org.apache.spark.deploy.master.Master
3130

3231
/**
33-
* A server that responds to requests submitted by the StandaloneRestClient.
34-
* This is intended to be embedded in the standalone Master. Cluster mode only.
32+
* A server that responds to requests submitted by the [[StandaloneRestClient]].
33+
* This is intended to be embedded in the standalone Master. Cluster mode only
3534
*/
3635
private[spark] class StandaloneRestServer(master: Master, host: String, requestedPort: Int)
3736
extends SubmitRestServer(host, requestedPort, master.conf) {
38-
override protected val handler = new StandaloneRestServerHandler(master)
37+
protected override val handler = new StandaloneRestServerHandler(master)
3938
}
4039

4140
/**
42-
* A handler for requests submitted to the standalone Master
43-
* via the stable application submission REST protocol.
41+
* A handler for requests submitted to the standalone
42+
* Master via the REST application submission protocol.
4443
*/
4544
private[spark] class StandaloneRestServerHandler(
4645
conf: SparkConf,
@@ -55,8 +54,7 @@ private[spark] class StandaloneRestServerHandler(
5554
}
5655

5756
/** Handle a request to submit a driver. */
58-
override protected def handleSubmit(
59-
request: SubmitDriverRequest): SubmitDriverResponse = {
57+
protected override def handleSubmit(request: SubmitDriverRequest): SubmitDriverResponse = {
6058
val driverDescription = buildDriverDescription(request)
6159
val response = AkkaUtils.askWithReply[DeployMessages.SubmitDriverResponse](
6260
DeployMessages.RequestSubmitDriver(driverDescription), masterActor, askTimeout)
@@ -68,8 +66,7 @@ private[spark] class StandaloneRestServerHandler(
6866
}
6967

7068
/** Handle a request to kill a driver. */
71-
override protected def handleKill(
72-
request: KillDriverRequest): KillDriverResponse = {
69+
protected override def handleKill(request: KillDriverRequest): KillDriverResponse = {
7370
val driverId = request.getDriverId
7471
val response = AkkaUtils.askWithReply[DeployMessages.KillDriverResponse](
7572
DeployMessages.RequestKillDriver(driverId), masterActor, askTimeout)
@@ -81,16 +78,11 @@ private[spark] class StandaloneRestServerHandler(
8178
}
8279

8380
/** Handle a request for a driver's status. */
84-
override protected def handleStatus(
85-
request: DriverStatusRequest): DriverStatusResponse = {
81+
protected override def handleStatus(request: DriverStatusRequest): DriverStatusResponse = {
8682
val driverId = request.getDriverId
8783
val response = AkkaUtils.askWithReply[DeployMessages.DriverStatusResponse](
8884
DeployMessages.RequestDriverStatus(driverId), masterActor, askTimeout)
89-
// Format exception nicely, if it exists
90-
val message = response.exception.map { e =>
91-
val stackTraceString = e.getStackTrace.map { "\t" + _ }.mkString("\n")
92-
s"Exception from the cluster:\n$e\n$stackTraceString"
93-
}
85+
val message = response.exception.map { s"Exception from the cluster:\n" + formatException(_) }
9486
new DriverStatusResponse()
9587
.setSparkVersion(sparkVersion)
9688
.setDriverId(driverId)
@@ -103,6 +95,7 @@ private[spark] class StandaloneRestServerHandler(
10395

10496
/**
10597
* Build a driver description from the fields specified in the submit request.
98+
*
10699
* This does not currently consider fields used by python applications since
107100
* python is not supported in standalone cluster mode yet.
108101
*/

0 commit comments

Comments
 (0)