diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 0da9a04684fe..51b7396b4d0b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -481,6 +481,8 @@ object SparkSubmit extends CommandLineUtils {
OptionAssigner(args.kubernetesNamespace, KUBERNETES, ALL_DEPLOY_MODES,
sysProp = "spark.kubernetes.namespace"),
+ OptionAssigner(args.kubernetesDnsZone, KUBERNETES, ALL_DEPLOY_MODES,
+ sysProp = "spark.kubernetes.dnsZone"),
// Other options
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 3350987d17a8..2ef80260f073 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -73,6 +73,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
// Kubernetes only
var kubernetesNamespace: String = null
+ var kubernetesDnsZone: String = null
// Standalone cluster mode only
var supervise: Boolean = false
@@ -199,6 +200,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
kubernetesNamespace = Option(kubernetesNamespace)
.orElse(sparkProperties.get("spark.kubernetes.namespace"))
.orNull
+ kubernetesDnsZone = Option(kubernetesDnsZone)
+ .orElse(sparkProperties.get("spark.kubernetes.dnsZone"))
+ .orNull
// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
@@ -440,6 +444,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
case KUBERNETES_NAMESPACE =>
kubernetesNamespace = value
+ case KUBERNETES_DNS_ZONE =>
+ kubernetesDnsZone = value
+
case HELP =>
printUsageAndExit(0)
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index faaa60269686..0f79abf263e7 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -388,6 +388,7 @@ class SparkSubmitSuite
"--executor-memory", "5g",
"--class", "org.SomeClass",
"--kubernetes-namespace", "foo",
+ "--kubernetes-dns-zone", "my.domain",
"--driver-memory", "4g",
"--conf", "spark.kubernetes.driver.docker.image=bar",
"/home/thejar.jar",
@@ -404,6 +405,7 @@ class SparkSubmitSuite
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.driver.memory") should be ("4g")
sysProps("spark.kubernetes.namespace") should be ("foo")
+ sysProps("spark.kubernetes.dnsZone") should be ("my.domain")
sysProps("spark.kubernetes.driver.docker.image") should be ("bar")
}
diff --git a/docs/running-on-kubernetes.md b/docs/running-on-kubernetes.md
index 1b070973afe7..9e904f2e7c16 100644
--- a/docs/running-on-kubernetes.md
+++ b/docs/running-on-kubernetes.md
@@ -361,6 +361,15 @@ from the other deployment modes. See the [configuration page](configuration.html
--kubernetes-namespace command line argument.
+
+ spark.kubernetes.dnsZone |
+ cluster.local |
+
+ The DNS zone that Kubernetes cluster uses. When using spark-submit in cluster mode,
+ this can also be passed to spark-submit via the
+ --kubernetes-dns-zone command line argument.
+ |
+
spark.kubernetes.driver.docker.image |
spark-driver:2.2.0 |
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
index a4d43c0795ab..33e8df72f91f 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
@@ -78,6 +78,7 @@ class SparkSubmitOptionParser {
// Kubernetes-only options.
protected final String KUBERNETES_NAMESPACE = "--kubernetes-namespace";
+ protected final String KUBERNETES_DNS_ZONE = "--kubernetes-dns-zone";
/**
* This is the canonical list of spark-submit options. Each entry in the array contains the
@@ -118,7 +119,8 @@ class SparkSubmitOptionParser {
{ REPOSITORIES },
{ STATUS },
{ TOTAL_EXECUTOR_CORES },
- { KUBERNETES_NAMESPACE }
+ { KUBERNETES_NAMESPACE },
+ { KUBERNETES_DNS_ZONE }
};
/**
diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/config.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/config.scala
index e395fed810a3..0352e2453a51 100644
--- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/config.scala
+++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/config.scala
@@ -34,6 +34,14 @@ package object config extends Logging {
.stringConf
.createWithDefault("default")
+ private[spark] val KUBERNETES_DNS_ZONE =
+ ConfigBuilder("spark.kubernetes.dnsZone")
+ .doc("The DNS zone that Kubernetes cluster uses. When using" +
+ " spark-submit in cluster mode, this can also be passed to spark-submit via the" +
+ " --kubernetes-dns-zone command line argument.")
+ .stringConf
+ .createWithDefault("cluster.local")
+
private[spark] val DRIVER_DOCKER_IMAGE =
ConfigBuilder("spark.kubernetes.driver.docker.image")
.doc("Docker image to use for the driver. Specify this using the standard Docker tag format.")
diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStep.scala
index 1ee1851c61e6..67bb1330bb78 100644
--- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStep.scala
+++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStep.scala
@@ -80,7 +80,8 @@ private[spark] class DriverServiceBootstrapStep(
.build()
val namespace = submissionSparkConf.get(KUBERNETES_NAMESPACE)
- val driverHostname = s"${driverService.getMetadata.getName}.$namespace.svc.cluster.local"
+ val dns_zone = submissionSparkConf.get(KUBERNETES_DNS_ZONE)
+ val driverHostname = s"${driverService.getMetadata.getName}.$namespace.svc.$dns_zone"
val resolvedSparkConf = driverSpec.driverSparkConf.clone()
.set(org.apache.spark.internal.config.DRIVER_HOST_ADDRESS, driverHostname)
.set("spark.driver.port", driverPort.toString)
diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStepSuite.scala
index 7359017cc780..52cd76c599a2 100644
--- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStepSuite.scala
+++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/DriverServiceBootstrapStepSuite.scala
@@ -78,13 +78,14 @@ private[spark] class DriverServiceBootstrapStepSuite
sparkConf
.set("spark.driver.port", "9000")
.set(org.apache.spark.internal.config.DRIVER_BLOCK_MANAGER_PORT, 8080)
- .set(KUBERNETES_NAMESPACE, "my-namespace"),
+ .set(KUBERNETES_NAMESPACE, "my-namespace")
+ .set(KUBERNETES_DNS_ZONE, "my.domain"),
clock)
val baseDriverSpec = KubernetesDriverSpec.initialSpec(sparkConf.clone())
val resolvedDriverSpec = configurationStep.configureDriver(baseDriverSpec)
val expectedServiceName = SHORT_RESOURCE_NAME_PREFIX +
DriverServiceBootstrapStep.DRIVER_SVC_POSTFIX
- val expectedHostName = s"$expectedServiceName.my-namespace.svc.cluster.local"
+ val expectedHostName = s"$expectedServiceName.my-namespace.svc.my.domain"
verifySparkConfHostNames(resolvedDriverSpec.driverSparkConf, expectedHostName)
}