Skip to content

Commit fd9da51

Browse files
committed
Formatting changes (minor)
1 parent 85d3596 commit fd9da51

File tree

4 files changed

+17
-9
lines changed

4 files changed

+17
-9
lines changed

core/src/main/scala/org/apache/spark/deploy/Client.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends
5151
// truncate filesystem paths similar to what YARN does. For now, we just require
5252
// people call `addJar` assuming the jar is in the same directory.
5353
val env = Map[String, String]()
54-
System.getenv().foreach{case (k, v) => env(k) = v}
54+
System.getenv().foreach { case (k, v) => env(k) = v }
5555

5656
val mainClass = "org.apache.spark.deploy.worker.DriverWrapper"
5757

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ object CommandUtils extends Logging {
6262
val joined = command.libraryPathEntries.mkString(File.pathSeparator)
6363
Seq(s"-Djava.library.path=$joined")
6464
} else {
65-
Seq()
65+
Seq()
6666
}
6767

6868
val permGenOpt = Seq("-XX:MaxPermSize=128m")
@@ -71,7 +71,7 @@ object CommandUtils extends Logging {
7171
val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
7272
val classPath = Utils.executeAndGetOutput(
7373
Seq(sparkHome + "/bin/compute-classpath" + ext),
74-
extraEnvironment=command.environment)
74+
extraEnvironment = command.environment)
7575
val userClassPath = command.classPathEntries ++ Seq(classPath)
7676

7777
Seq("-cp", userClassPath.filterNot(_.isEmpty).mkString(File.pathSeparator)) ++

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ private[spark] class ExecutorRunner(
7272
}
7373

7474
/**
75-
* kill executor process, wait for exit and notify worker to update resource status
75+
* Kill executor process, wait for exit and notify worker to update resource status.
7676
*
7777
* @param message the exception message which caused the executor's death
7878
*/
@@ -114,9 +114,12 @@ private[spark] class ExecutorRunner(
114114
}
115115

116116
def getCommandSeq = {
117-
val command = Command(appDesc.command.mainClass,
118-
appDesc.command.arguments.map(substituteVariables) ++ Seq(appId), appDesc.command.environment,
119-
appDesc.command.classPathEntries, appDesc.command.libraryPathEntries,
117+
val command = Command(
118+
appDesc.command.mainClass,
119+
appDesc.command.arguments.map(substituteVariables) ++ Seq(appId),
120+
appDesc.command.environment,
121+
appDesc.command.classPathEntries,
122+
appDesc.command.libraryPathEntries,
120123
appDesc.command.extraJavaOptions)
121124
CommandUtils.buildCommandSeq(command, memory, sparkHome.getAbsolutePath)
122125
}

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -98,8 +98,13 @@ private[spark] class CoarseGrainedExecutorBackend(
9898
}
9999

100100
private[spark] object CoarseGrainedExecutorBackend extends Logging {
101-
def run(driverUrl: String, executorId: String, hostname: String, cores: Int,
102-
workerUrl: Option[String]) {
101+
102+
private def run(
103+
driverUrl: String,
104+
executorId: String,
105+
hostname: String,
106+
cores: Int,
107+
workerUrl: Option[String]) {
103108

104109
SignalLogger.register(log)
105110

0 commit comments

Comments
 (0)