Skip to content

Commit f6680cd

Browse files
Thomas GravesMarcelo Vanzin
authored andcommitted
[SPARK-11555] spark on yarn spark-class --num-workers doesn't work
I tested the various options with both spark-submit and spark-class of specifying number of executors in both client and cluster mode where it applied. --num-workers, --num-executors, spark.executor.instances, SPARK_EXECUTOR_INSTANCES, default nothing supplied Author: Thomas Graves <[email protected]> Closes #9523 from tgravescs/SPARK-11555.
1 parent c447c9d commit f6680cd

File tree

2 files changed

+6
-3
lines changed

2 files changed

+6
-3
lines changed

yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ private[spark] class ClientArguments(args: Array[String], sparkConf: SparkConf)
8181
.orNull
8282
// If dynamic allocation is enabled, start at the configured initial number of executors.
8383
// Default to minExecutors if no initialExecutors is set.
84-
numExecutors = YarnSparkHadoopUtil.getInitialTargetExecutorNumber(sparkConf)
84+
numExecutors = YarnSparkHadoopUtil.getInitialTargetExecutorNumber(sparkConf, numExecutors)
8585
principal = Option(principal)
8686
.orElse(sparkConf.getOption("spark.yarn.principal"))
8787
.orNull

yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -392,8 +392,11 @@ object YarnSparkHadoopUtil {
392392
/**
393393
* Getting the initial target number of executors depends on whether dynamic allocation is
394394
* enabled.
395+
* If not using dynamic allocation it gets the number of executors reqeusted by the user.
395396
*/
396-
def getInitialTargetExecutorNumber(conf: SparkConf): Int = {
397+
def getInitialTargetExecutorNumber(
398+
conf: SparkConf,
399+
numExecutors: Int = DEFAULT_NUMBER_EXECUTORS): Int = {
397400
if (Utils.isDynamicAllocationEnabled(conf)) {
398401
val minNumExecutors = conf.getInt("spark.dynamicAllocation.minExecutors", 0)
399402
val initialNumExecutors =
@@ -406,7 +409,7 @@ object YarnSparkHadoopUtil {
406409
initialNumExecutors
407410
} else {
408411
val targetNumExecutors =
409-
sys.env.get("SPARK_EXECUTOR_INSTANCES").map(_.toInt).getOrElse(DEFAULT_NUMBER_EXECUTORS)
412+
sys.env.get("SPARK_EXECUTOR_INSTANCES").map(_.toInt).getOrElse(numExecutors)
410413
// System property can override environment variable.
411414
conf.getInt("spark.executor.instances", targetNumExecutors)
412415
}

0 commit comments

Comments
 (0)