@@ -19,7 +19,7 @@ package org.apache.spark.deploy.yarn
1919
2020import scala .collection .mutable .ArrayBuffer
2121
22- import org .apache .spark .SparkConf
22+ import org .apache .spark .{ SparkException , SparkConf }
2323import org .apache .spark .deploy .yarn .YarnSparkHadoopUtil ._
2424import org .apache .spark .util .{Utils , IntParam , MemoryParam }
2525
@@ -95,6 +95,10 @@ private[spark] class ClientArguments(args: Array[String], sparkConf: SparkConf)
9595 throw new IllegalArgumentException (
9696 " You must specify at least 1 executor!\n " + getUsageMessage())
9797 }
98+ if (executorCores < sparkConf.getInt(" spark.task.cpus" , 1 )) {
99+ throw new SparkException (" spark.executor.cores must not be less than " +
100+ " spark.task.cpus." )
101+ }
98102 if (isClusterMode) {
99103 for (key <- Seq (amMemKey, amMemOverheadKey, amCoresKey)) {
100104 if (sparkConf.contains(key)) {
@@ -222,7 +226,7 @@ private[spark] class ClientArguments(args: Array[String], sparkConf: SparkConf)
222226 | --arg ARG Argument to be passed to your application's main class.
223227 | Multiple invocations are possible, each will be passed in order.
224228 | --num-executors NUM Number of executors to start (Default: 2)
225- | --executor-cores NUM Number of cores for the executors (Default: 1).
229+ | --executor-cores NUM Number of cores per executor (Default: 1).
226230 | --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: 512 Mb)
227231 | --driver-cores NUM Number of cores used by the driver (Default: 1).
228232 | --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G)
0 commit comments