@@ -69,7 +69,8 @@ object SparkSubmit extends CommandLineUtils {
6969 private val STANDALONE = 2
7070 private val MESOS = 4
7171 private val LOCAL = 8
72- private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | LOCAL
72+ private val KUBERNETES = 16
73+ private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | KUBERNETES | LOCAL
7374
7475 // Deploy modes
7576 private val CLIENT = 1
@@ -229,9 +230,10 @@ object SparkSubmit extends CommandLineUtils {
229230 YARN
230231 case m if m.startsWith(" spark" ) => STANDALONE
231232 case m if m.startsWith(" mesos" ) => MESOS
233+ case m if m.startsWith(" k8s" ) => KUBERNETES
232234 case m if m.startsWith(" local" ) => LOCAL
233235 case _ =>
234- printErrorAndExit(" Master must either be yarn or start with spark, mesos, local" )
236+ printErrorAndExit(" Master must either be yarn or start with spark, mesos, k8s, or local" )
235237 - 1
236238 }
237239
@@ -274,6 +276,7 @@ object SparkSubmit extends CommandLineUtils {
274276 }
275277 val isYarnCluster = clusterManager == YARN && deployMode == CLUSTER
276278 val isMesosCluster = clusterManager == MESOS && deployMode == CLUSTER
279+ val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
277280
278281 // Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
279282 // too for packages that include Python code
@@ -320,6 +323,10 @@ object SparkSubmit extends CommandLineUtils {
320323
321324 // The following modes are not supported or applicable
322325 (clusterManager, deployMode) match {
326+ case (KUBERNETES , CLIENT ) =>
327+ printErrorAndExit(" Client mode is currently not supported for Kubernetes." )
328+ case (KUBERNETES , CLUSTER ) if args.isPython || args.isR =>
329+ printErrorAndExit(" Kubernetes does not currently support python or R applications." )
323330 case (STANDALONE , CLUSTER ) if args.isPython =>
324331 printErrorAndExit(" Cluster deploy mode is currently not supported for python " +
325332 " applications on standalone clusters." )
@@ -453,17 +460,21 @@ object SparkSubmit extends CommandLineUtils {
453460 OptionAssigner (args.principal, YARN , ALL_DEPLOY_MODES , sysProp = " spark.yarn.principal" ),
454461 OptionAssigner (args.keytab, YARN , ALL_DEPLOY_MODES , sysProp = " spark.yarn.keytab" ),
455462
456- // Other options
463+ OptionAssigner (args.kubernetesNamespace, KUBERNETES , ALL_DEPLOY_MODES ,
464+ sysProp = " spark.kubernetes.namespace" ),
465+
466+ // Other options
457467 OptionAssigner (args.executorCores, STANDALONE | YARN , ALL_DEPLOY_MODES ,
458468 sysProp = " spark.executor.cores" ),
459469 OptionAssigner (args.executorMemory, STANDALONE | MESOS | YARN , ALL_DEPLOY_MODES ,
460470 sysProp = " spark.executor.memory" ),
461471 OptionAssigner (args.totalExecutorCores, STANDALONE | MESOS , ALL_DEPLOY_MODES ,
462472 sysProp = " spark.cores.max" ),
463- OptionAssigner (args.files, LOCAL | STANDALONE | MESOS , ALL_DEPLOY_MODES ,
473+ OptionAssigner (args.files, LOCAL | STANDALONE | MESOS | KUBERNETES , ALL_DEPLOY_MODES ,
464474 sysProp = " spark.files" ),
465475 OptionAssigner (args.jars, LOCAL , CLIENT , sysProp = " spark.jars" ),
466- OptionAssigner (args.jars, STANDALONE | MESOS , ALL_DEPLOY_MODES , sysProp = " spark.jars" ),
476+ OptionAssigner (args.jars, STANDALONE | MESOS | KUBERNETES , ALL_DEPLOY_MODES ,
477+ sysProp = " spark.jars" ),
467478 OptionAssigner (args.driverMemory, STANDALONE | MESOS | YARN , CLUSTER ,
468479 sysProp = " spark.driver.memory" ),
469480 OptionAssigner (args.driverCores, STANDALONE | MESOS | YARN , CLUSTER ,
@@ -496,8 +507,9 @@ object SparkSubmit extends CommandLineUtils {
496507
497508 // Add the application jar automatically so the user doesn't have to call sc.addJar
498509 // For YARN cluster mode, the jar is already distributed on each node as "app.jar"
510+ // In Kubernetes cluster mode, the jar will be uploaded by the client separately.
499511 // For python and R files, the primary resource is already distributed as a regular file
500- if (! isYarnCluster && ! args.isPython && ! args.isR) {
512+ if (! isYarnCluster && ! isKubernetesCluster && ! args.isPython && ! args.isR) {
501513 var jars = sysProps.get(" spark.jars" ).map(x => x.split(" ," ).toSeq).getOrElse(Seq .empty)
502514 if (isUserJar(args.primaryResource)) {
503515 jars = jars ++ Seq (args.primaryResource)
@@ -596,6 +608,13 @@ object SparkSubmit extends CommandLineUtils {
596608 }
597609 }
598610
611+ if (isKubernetesCluster) {
612+ childMainClass = " org.apache.spark.deploy.kubernetes.Client"
613+ childArgs += args.primaryResource
614+ childArgs += args.mainClass
615+ childArgs ++= args.childArgs
616+ }
617+
599618 // Load any properties specified through --conf and the default properties file
600619 for ((k, v) <- args.sparkProperties) {
601620 sysProps.getOrElseUpdate(k, v)
0 commit comments