Skip to content

Commit 09272b7

Browse files
committed
Always use Worker's working directory as spark home
1 parent 0da07da commit 09272b7

File tree

7 files changed

+7
-15
lines changed

7 files changed

+7
-15
lines changed

core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ private[spark] class ApplicationDescription(
2222
val maxCores: Option[Int],
2323
val memoryPerSlave: Int,
2424
val command: Command,
25-
val sparkHome: Option[String],
2625
var appUiUrl: String,
2726
val eventLogDir: Option[String] = None)
2827
extends Serializable {

core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@ private[spark] object JsonProtocol {
5656
("cores" -> obj.maxCores) ~
5757
("memoryperslave" -> obj.memoryPerSlave) ~
5858
("user" -> obj.user) ~
59-
("sparkhome" -> obj.sparkHome) ~
6059
("command" -> obj.command.toString)
6160
}
6261

core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,8 @@ private[spark] object TestClient {
4848
val conf = new SparkConf
4949
val (actorSystem, _) = AkkaUtils.createActorSystem("spark", Utils.localIpAddress, 0,
5050
conf = conf, securityManager = new SecurityManager(conf))
51-
val desc = new ApplicationDescription(
52-
"TestClient", Some(1), 512, Command("spark.deploy.client.TestExecutor", Seq(), Map(),
53-
Seq(), Seq(), Seq()), Some("dummy-spark-home"), "ignored")
51+
val desc = new ApplicationDescription("TestClient", Some(1), 512,
52+
Command("spark.deploy.client.TestExecutor", Seq(), Map(), Seq(), Seq(), Seq()), "ignored")
5453
val listener = new TestListener
5554
val client = new AppClient(actorSystem, Array(url), desc, listener, new SparkConf)
5655
client.start()

core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,9 +233,7 @@ private[spark] class Worker(
233233
try {
234234
logInfo("Asked to launch executor %s/%d for %s".format(appId, execId, appDesc.name))
235235
val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_,
236-
self, workerId, host,
237-
appDesc.sparkHome.map(userSparkHome => new File(userSparkHome)).getOrElse(sparkHome),
238-
workDir, akkaUrl, conf, ExecutorState.RUNNING)
236+
self, workerId, host, sparkHome, workDir, akkaUrl, conf, ExecutorState.RUNNING)
239237
executors(appId + "/" + execId) = manager
240238
manager.start()
241239
coresUsed += cores_

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,9 +60,8 @@ private[spark] class SparkDeploySchedulerBackend(
6060
val javaOpts = sparkJavaOpts ++ extraJavaOpts
6161
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
6262
args, sc.executorEnvs, classPathEntries, libraryPathEntries, javaOpts)
63-
val sparkHome = sc.getSparkHome()
6463
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
65-
sparkHome, sc.ui.appUIAddress, sc.eventLogger.map(_.logDir))
64+
sc.ui.appUIAddress, sc.eventLogger.map(_.logDir))
6665

6766
client = new AppClient(sc.env.actorSystem, masters, appDesc, this, conf)
6867
client.start()

core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ class JsonProtocolSuite extends FunSuite {
8989

9090
def createAppDesc(): ApplicationDescription = {
9191
val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Seq(), Seq(), Seq())
92-
new ApplicationDescription("name", Some(4), 1234, cmd, Some("sparkHome"), "appUiUrl")
92+
new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
9393
}
9494

9595
def createAppInfo() : ApplicationInfo = {
@@ -169,8 +169,7 @@ object JsonConstants {
169169
val appDescJsonStr =
170170
"""
171171
|{"name":"name","cores":4,"memoryperslave":1234,
172-
|"user":"%s","sparkhome":"sparkHome",
173-
|"command":"Command(mainClass,List(arg1, arg2),Map(),List(),List(),List())"}
172+
|"user":"%s","command":"Command(mainClass,List(arg1, arg2),Map(),List(),List(),List())"}
174173
""".format(System.getProperty("user.name", "<unknown>")).stripMargin
175174

176175
val executorRunnerJsonStr =

core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ class ExecutorRunnerTest extends FunSuite {
2929
def f(s:String) = new File(s)
3030
val sparkHome = sys.env.get("SPARK_HOME").orElse(sys.props.get("spark.home"))
3131
val appDesc = new ApplicationDescription("app name", Some(8), 500,
32-
Command("foo", Seq(), Map(), Seq(), Seq(), Seq()),
33-
sparkHome, "appUiUrl")
32+
Command("foo", Seq(), Map(), Seq(), Seq(), Seq()), "appUiUrl")
3433
val appId = "12345-worker321-9876"
3534
val er = new ExecutorRunner(appId, 1, appDesc, 8, 500, null, "blah", "worker321", f(sparkHome.getOrElse(".")),
3635
f("ooga"), "blah", new SparkConf, ExecutorState.RUNNING)

0 commit comments

Comments
 (0)