Skip to content

Commit 46555c1

Browse files
committed
Review feedback and import clean-ups
1 parent 437aed1 commit 46555c1

File tree

3 files changed

+15
-33
lines changed

3 files changed

+15
-33
lines changed

docs/configuration.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -650,9 +650,9 @@ Apart from these, the following properties are also available, and may be useful
650650
<td>spark.executor.extraJavaOptions</td>
651651
<td>(none)</td>
652652
<td>
653-
A string of extra JVM options to pass to executors. For instance, GC settings or custom
654-
paths for native code. Note that it is illegal to set Spark properties or heap size
655-
settings with this option.
653+
A string of extra JVM options to pass to executors. For instance, GC settings or other
654+
logging. Note that it is illegal to set Spark properties or heap size settings with this
655+
option.
656656
</td>
657657
</tr>
658658

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -17,36 +17,25 @@
1717

1818
package org.apache.spark.deploy.yarn
1919

20-
import java.net.{InetAddress, UnknownHostException, URI}
21-
import java.nio.ByteBuffer
20+
import java.net.{InetAddress, URI, UnknownHostException}
2221

2322
import scala.collection.JavaConversions._
24-
import scala.collection.mutable.HashMap
25-
import scala.collection.mutable.Map
23+
import scala.collection.mutable.{HashMap, Map}
2624

2725
import org.apache.hadoop.conf.Configuration
2826
import org.apache.hadoop.fs._
29-
import org.apache.hadoop.fs.permission.FsPermission;
30-
import org.apache.hadoop.io.DataOutputBuffer
27+
import org.apache.hadoop.fs.permission.FsPermission
3128
import org.apache.hadoop.mapred.Master
3229
import org.apache.hadoop.mapreduce.MRJobConfig
33-
import org.apache.hadoop.net.NetUtils
3430
import org.apache.hadoop.security.UserGroupInformation
3531
import org.apache.hadoop.util.StringUtils
3632
import org.apache.hadoop.yarn.api._
3733
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
3834
import org.apache.hadoop.yarn.api.protocolrecords._
3935
import org.apache.hadoop.yarn.api.records._
4036
import org.apache.hadoop.yarn.conf.YarnConfiguration
41-
import org.apache.hadoop.yarn.ipc.YarnRPC
42-
import org.apache.hadoop.yarn.util.{Records, Apps}
43-
37+
import org.apache.hadoop.yarn.util.{Apps, Records}
4438
import org.apache.spark.{Logging, SparkConf}
45-
import org.apache.spark.util.Utils
46-
import org.apache.spark.deploy.SparkHadoopUtil
47-
import org.apache.spark.deploy.ExecutorLauncher
48-
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
49-
5039

5140
/**
5241
* The entry point (starting in Client#main() and Client#run()) for launching Spark on YARN. The
@@ -355,8 +344,8 @@ trait ClientBase extends Logging {
355344
JAVA_OPTS += s"-D$k=$v"
356345
}
357346
// TODO: honor driver classpath here: sys.props.get("spark.driver.classPath")
358-
sys.props.get("spark.driver.javaOpts").map(opts => JAVA_OPTS += opts)
359-
sys.props.get("spark.driver.libraryPath").map(p => JAVA_OPTS + s"-Djava.library.path=$p")
347+
sys.props.get("spark.driver.javaOpts").foreach(opts => JAVA_OPTS += opts)
348+
sys.props.get("spark.driver.libraryPath").foreach(p => JAVA_OPTS += s"-Djava.library.path=$p")
360349
}
361350

362351
if (!localResources.contains(ClientBase.LOG4J_PROP)) {

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,26 +18,18 @@
1818
package org.apache.spark.deploy.yarn
1919

2020
import java.net.URI
21-
import java.nio.ByteBuffer
22-
import java.security.PrivilegedExceptionAction
2321

2422
import scala.collection.JavaConversions._
2523
import scala.collection.mutable.HashMap
2624

27-
import org.apache.hadoop.conf.Configuration
2825
import org.apache.hadoop.fs.Path
29-
import org.apache.hadoop.io.DataOutputBuffer
30-
import org.apache.hadoop.net.NetUtils
31-
import org.apache.hadoop.security.UserGroupInformation
3226
import org.apache.hadoop.yarn.api._
3327
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
3428
import org.apache.hadoop.yarn.api.records._
35-
import org.apache.hadoop.yarn.api.protocolrecords._
36-
import org.apache.hadoop.yarn.util.{Apps, ConverterUtils, Records}
37-
38-
import org.apache.spark.{SparkConf, Logging}
3929
import org.apache.hadoop.yarn.conf.YarnConfiguration
30+
import org.apache.hadoop.yarn.util.{Apps, ConverterUtils, Records}
4031

32+
import org.apache.spark.{Logging, SparkConf}
4133

4234
trait ExecutorRunnableUtil extends Logging {
4335

@@ -58,9 +50,10 @@ trait ExecutorRunnableUtil extends Logging {
5850
val executorMemoryString = executorMemory + "m"
5951
JAVA_OPTS += "-Xms" + executorMemoryString + " -Xmx" + executorMemoryString + " "
6052

61-
// Set extra Java options for the executor
62-
val executorOpts = sys.props.find(_._1.contains("spark.executor.extraJavaOptions"))
63-
JAVA_OPTS += executorOpts
53+
// Set extra Java options for the executor, if defined
54+
sys.props.get("spark.executor.extraJavaOptions").foreach { opts =>
55+
JAVA_OPTS += opts
56+
}
6457

6558
JAVA_OPTS += " -Djava.io.tmpdir=" +
6659
new Path(Environment.PWD.$(), YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "

0 commit comments

Comments
 (0)