Skip to content

Commit 8c11a0d

Browse files
committed
Clean up imports / comments (minor)
1 parent 2678d13 commit 8c11a0d

File tree

3 files changed

+2
-8
lines changed

3 files changed

+2
-8
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -305,9 +305,8 @@ private[spark] object SparkConf {
305305
/**
306306
* Return whether the given config should be passed to an executor on start-up.
307307
*
308-
* When connecting to the scheduler, the executor backend needs certain akka and authentication
309-
* settings to connect to the scheduler, while the rest of the spark configs can be inherited
310-
* from the driver later.
308+
* Certain akka and authentication configs are required of the executor when it connects to
309+
* the scheduler, while the rest of the spark configs can be inherited from the driver later.
311310
*/
312311
def isExecutorStartupConf(name: String): Boolean = {
313312
isAkkaConf(name) || name.startsWith("spark.akka") || name.startsWith("spark.auth")

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,7 @@ import org.apache.hadoop.yarn.api.protocolrecords._
3737
import org.apache.hadoop.yarn.api.records._
3838
import org.apache.hadoop.yarn.conf.YarnConfiguration
3939
import org.apache.hadoop.yarn.util.Records
40-
4140
import org.apache.spark.{SparkException, Logging, SparkConf, SparkContext}
42-
import org.apache.spark.util.Utils
4341

4442
/**
4543
* The entry point (starting in Client#main() and Client#run()) for launching Spark on YARN. The
@@ -386,7 +384,6 @@ trait ClientBase extends Logging {
386384
// Forward the Spark configuration to the application master / executors.
387385
// TODO: it might be nicer to pass these as an internal environment variable rather than
388386
// as Java options, due to complications with string parsing of nested quotes.
389-
// TODO: Use Utils.sparkJavaOpts here once we figure out how to deal with quotes and backslashes
390387
for ((k, v) <- sparkConf.getAll) {
391388
javaOpts += "-D" + k + "=" + "\\\"" + v + "\\\""
392389
}

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
3131
import org.apache.hadoop.yarn.util.{ConverterUtils, Records}
3232

3333
import org.apache.spark.{Logging, SparkConf}
34-
import org.apache.spark.util.Utils
3534

3635
trait ExecutorRunnableUtil extends Logging {
3736

@@ -67,7 +66,6 @@ trait ExecutorRunnableUtil extends Logging {
6766
// registers with the Scheduler and transfers the spark configs. Since the Executor backend
6867
// uses Akka to connect to the scheduler, the akka settings are needed as well as the
6968
// authentication settings.
70-
// TODO: Use Utils.sparkJavaOpts here once we figure out how to deal with quotes and backslashes
7169
sparkConf.getAll.
7270
filter { case (k, v) => k.startsWith("spark.auth") || k.startsWith("spark.akka") }.
7371
foreach { case (k, v) => javaOpts += "-D" + k + "=" + "\\\"" + v + "\\\"" }

0 commit comments

Comments
 (0)