Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit 2773b77

Browse files
foxishash211
authored andcommitted
Fix issue with DNS resolution (#118)
* Fix issue with DNS resolution * Address comments
1 parent a800e20 commit 2773b77

File tree

4 files changed

+6
-5
lines changed

4 files changed

+6
-5
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/KubernetesClientBuilder.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ import com.google.common.base.Charsets
2222
import com.google.common.io.Files
2323
import io.fabric8.kubernetes.client.{Config, ConfigBuilder, DefaultKubernetesClient}
2424

25+
import org.apache.spark.deploy.kubernetes.constants._
26+
2527
private[spark] object KubernetesClientBuilder {
2628
private val API_SERVER_TOKEN = new File(Config.KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH)
2729
private val CA_CERT_FILE = new File(Config.KUBERNETES_SERVICE_ACCOUNT_CA_CRT_PATH)
@@ -33,11 +35,10 @@ private[spark] object KubernetesClientBuilder {
3335
* into the pod's disk space.
3436
*/
3537
def buildFromWithinPod(
36-
kubernetesMaster: String,
3738
kubernetesNamespace: String): DefaultKubernetesClient = {
3839
var clientConfigBuilder = new ConfigBuilder()
3940
.withApiVersion("v1")
40-
.withMasterUrl(kubernetesMaster)
41+
.withMasterUrl(KUBERNETES_MASTER_INTERNAL_URL)
4142
.withNamespace(kubernetesNamespace)
4243

4344
if (CA_CERT_FILE.isFile) {

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,4 +67,5 @@ package object constants {
6767
// Miscellaneous
6868
private[spark] val DRIVER_CONTAINER_NAME = "spark-kubernetes-driver"
6969
private[spark] val KUBERNETES_SUBMIT_SSL_NAMESPACE = "kubernetes.submit"
70+
private[spark] val KUBERNETES_MASTER_INTERNAL_URL = "https://kubernetes.default.svc"
7071
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterSchedulerBackend.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ private[spark] class KubernetesClusterSchedulerBackend(
4444
private val EXECUTOR_MODIFICATION_LOCK = new Object
4545
private val runningExecutorPods = new scala.collection.mutable.HashMap[String, Pod]
4646

47-
private val kubernetesMaster = "https://kubernetes"
4847
private val executorDockerImage = conf.get(EXECUTOR_DOCKER_IMAGE)
4948
private val kubernetesNamespace = conf.get(KUBERNETES_NAMESPACE)
5049
private val executorPort = conf.getInt("spark.executor.port", DEFAULT_STATIC_PORT)
@@ -77,7 +76,7 @@ private[spark] class KubernetesClusterSchedulerBackend(
7776
ThreadUtils.newDaemonCachedThreadPool("kubernetes-executor-requests"))
7877

7978
private val kubernetesClient = KubernetesClientBuilder
80-
.buildFromWithinPod(kubernetesMaster, kubernetesNamespace)
79+
.buildFromWithinPod(kubernetesNamespace)
8180

8281
private val driverPod = try {
8382
kubernetesClient.pods().inNamespace(kubernetesNamespace).

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/minikube/Minikube.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ private[spark] object Minikube extends Logging {
7676

7777
def getDockerEnv: Map[String, String] = synchronized {
7878
assert(MINIKUBE_EXECUTABLE_DEST.exists(), EXPECTED_DOWNLOADED_MINIKUBE_MESSAGE)
79-
executeMinikube("docker-env")
79+
executeMinikube("docker-env", "--shell", "bash")
8080
.filter(_.startsWith("export"))
8181
.map(_.replaceFirst("export ", "").split('='))
8282
.map(arr => (arr(0), arr(1).replaceAllLiterally("\"", "")))

0 commit comments

Comments
 (0)