Skip to content

Commit 6a1353f

Browse files
committed
Addressed comments
1 parent e92ef5b commit 6a1353f

File tree

3 files changed

+5
-3
lines changed

3 files changed

+5
-3
lines changed

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ private[spark] object KubernetesSuite {
251251
val TEST_SECRET_NAME = "test-secret"
252252
val TEST_SECRET_KEY = "test-key"
253253
val TEST_SECRET_VALUE = "test-data"
254-
val TEST_SECRET_MOUNT_PATH = "/wtc/secrets"
254+
val TEST_SECRET_MOUNT_PATH = "/etc/secrets"
255255

256256
case object ShuffleNotReadyException extends Exception
257257
}

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesCl
4747
val namespaceList = defaultClient
4848
.namespaces()
4949
.list()
50-
.getItems()
50+
.getItems
5151
.asScala
5252
require(!namespaceList.exists(_.getMetadata.getName == namespace))
5353
}
@@ -61,6 +61,8 @@ private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesCl
6161
System.getProperty("spark.docker.test.driverImage", "spark-driver:latest"))
6262
.set("spark.kubernetes.executor.container.image",
6363
System.getProperty("spark.docker.test.executorImage", "spark-executor:latest"))
64+
.set("spark.kubernetes.initContainer.image",
65+
System.getProperty("spark.docker.test.initContainerImage", "spark-init-container:latest"))
6466
.set("spark.executor.memory", "500m")
6567
.set("spark.executor.cores", "1")
6668
.set("spark.executors.instances", "1")

integration-test/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.deploy.k8s.integrationtest.constants.SPARK_DISTRO_PATH
2828
import org.apache.spark.deploy.k8s.integrationtest.Logging
2929

3030
private[spark] class SparkDockerImageBuilder
31-
(private val dockerEnv: Map[String, String]) extends Logging{
31+
(private val dockerEnv: Map[String, String]) extends Logging {
3232

3333
private val DOCKER_BUILD_PATH = SPARK_DISTRO_PATH
3434
// Dockerfile paths must be relative to the build path.

0 commit comments

Comments
 (0)