From b1b4eeb6a3b06fac7a5ca797b8e532401701fde8 Mon Sep 17 00:00:00 2001 From: mcheah Date: Fri, 18 Aug 2017 14:37:46 -0700 Subject: [PATCH 1/2] Use a list of environment variables for JVM options. --- .../spark/deploy/kubernetes/constants.scala | 1 + .../deploy/kubernetes/submit/Client.scala | 28 ++++---- .../kubernetes/submit/ClientSuite.scala | 29 +++++--- .../src/main/docker/driver-py/Dockerfile | 6 +- .../src/main/docker/driver/Dockerfile | 6 +- .../src/main/docker/spark-base/Dockerfile | 2 + .../jobs/JavaOptionsTest.scala | 68 +++++++++++++++++++ .../integrationtest/KubernetesSuite.scala | 40 ++++++++++- 8 files changed, 150 insertions(+), 30 deletions(-) create mode 100644 resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala index 92f051b2ac298..a377cc4942ad4 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala @@ -69,6 +69,7 @@ package object constants { private[spark] val ENV_MOUNTED_FILES_DIR = "SPARK_MOUNTED_FILES_DIR" private[spark] val ENV_PYSPARK_FILES = "PYSPARK_FILES" private[spark] val ENV_PYSPARK_PRIMARY = "PYSPARK_PRIMARY" + private[spark] val ENV_JAVA_OPT_PREFIX = "SPARK_JAVA_OPT_" // Bootstrapping dependencies with the init-container private[spark] val INIT_CONTAINER_ANNOTATION = "pod.beta.kubernetes.io/init-containers" diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/Client.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/Client.scala index 2fa9b416330e5..1901f1198a84f 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/Client.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/Client.scala @@ -18,9 +18,10 @@ package org.apache.spark.deploy.kubernetes.submit import java.util.{Collections, UUID} -import io.fabric8.kubernetes.api.model.{ContainerBuilder, OwnerReferenceBuilder, PodBuilder} +import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVar, EnvVarBuilder, OwnerReferenceBuilder, PodBuilder} import io.fabric8.kubernetes.client.KubernetesClient import scala.collection.mutable +import scala.collection.JavaConverters._ import org.apache.spark.SparkConf import org.apache.spark.deploy.kubernetes.config._ @@ -92,18 +93,21 @@ private[spark] class Client( currentDriverSpec = nextStep.configureDriver(currentDriverSpec) } val resolvedDriverJavaOpts = currentDriverSpec - .driverSparkConf - // We don't need this anymore since we just set the JVM options on the environment - .remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS) - .getAll - .map { - case (confKey, confValue) => s"-D$confKey=$confValue" - }.mkString(" ") + driverJavaOptions.map(" " + _).getOrElse("") + .driverSparkConf + // We don't need this anymore since we just set the JVM options on the environment + .remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS) + .getAll + .map { + case (confKey, confValue) => s"-D$confKey=$confValue" + } ++ driverJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty) + val driverJavaOptsEnvs: Seq[EnvVar] = resolvedDriverJavaOpts.zipWithIndex.map { + case (option, index) => new EnvVarBuilder() + .withName(s"$ENV_JAVA_OPT_PREFIX$index") + .withValue(option) + .build() + } val resolvedDriverContainer = new ContainerBuilder(currentDriverSpec.driverContainer) - .addNewEnv() - .withName(ENV_DRIVER_JAVA_OPTS) - .withValue(resolvedDriverJavaOpts) - .endEnv() + .addAllToEnv(driverJavaOptsEnvs.asJava) .build() val resolvedDriverPod = new PodBuilder(currentDriverSpec.driverPod) .editSpec() diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala index 965ee75c248b8..0100dce454a3f 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/ClientSuite.scala @@ -135,7 +135,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter { .set("spark.logConf", "true") .set( org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS, - "-XX:+|-HeapDumpOnOutOfMemoryError") + "-XX:+HeapDumpOnOutOfMemoryError -XX:+PrintGCDetails") val submissionClient = new Client( submissionSteps, sparkConf, @@ -147,15 +147,22 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter { val createdPod = createdPodArgumentCaptor.getValue val driverContainer = Iterables.getOnlyElement(createdPod.getSpec.getContainers) assert(driverContainer.getName === SecondTestConfigurationStep.containerName) - val driverJvmOptsEnv = Iterables.getOnlyElement(driverContainer.getEnv) - assert(driverJvmOptsEnv.getName === ENV_DRIVER_JAVA_OPTS) - val driverJvmOpts = driverJvmOptsEnv.getValue.split(" ").toSet - assert(driverJvmOpts.contains("-Dspark.logConf=true")) - assert(driverJvmOpts.contains( + val driverJvmOptsEnvs = driverContainer.getEnv.asScala.filter { env => + env.getName.startsWith(ENV_JAVA_OPT_PREFIX) + }.sortBy(_.getName) + assert(driverJvmOptsEnvs.size === 4) + + val expectedJvmOptsValues = Seq( + "-Dspark.logConf=true", s"-D${SecondTestConfigurationStep.sparkConfKey}=" + - SecondTestConfigurationStep.sparkConfValue)) - assert(driverJvmOpts.contains( - "-XX:+|-HeapDumpOnOutOfMemoryError")) + s"${SecondTestConfigurationStep.sparkConfValue}", + s"-XX:+HeapDumpOnOutOfMemoryError", + s"-XX:+PrintGCDetails") + driverJvmOptsEnvs.zip(expectedJvmOptsValues).zipWithIndex.foreach { + case ((resolvedEnv, expectedJvmOpt), index) => + assert(resolvedEnv.getName === s"$ENV_JAVA_OPT_PREFIX$index") + assert(resolvedEnv.getValue === expectedJvmOpt) + } } test("Waiting for app completion should stall on the watcher") { @@ -211,8 +218,8 @@ private object SecondTestConfigurationStep extends DriverConfigurationStep { override def configureDriver(driverSpec: KubernetesDriverSpec): KubernetesDriverSpec = { val modifiedPod = new PodBuilder(driverSpec.driverPod) .editMetadata() - .addToAnnotations(annotationKey, annotationValue) - .endMetadata() + .addToAnnotations(annotationKey, annotationValue) + .endMetadata() .build() val resolvedSparkConf = driverSpec.driverSparkConf.clone().set(sparkConfKey, sparkConfValue) val modifiedContainer = new ContainerBuilder(driverSpec.driverContainer) diff --git a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile index 731ea897458ce..41e4b31446c59 100644 --- a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile +++ b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile @@ -39,10 +39,10 @@ ENV PYSPARK_DRIVER_PYTHON python ENV PYTHONPATH ${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.4-src.zip:${PYTHONPATH} CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \ + env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \ + readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \ if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH \ - -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY \ - $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS + ${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS diff --git a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile index bd28af950f4dd..c2ff8f4f55822 100644 --- a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile +++ b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile @@ -24,8 +24,10 @@ FROM spark-base COPY examples /opt/spark/examples CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \ + env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \ + readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \ if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ - if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS + if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ + ${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS diff --git a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/spark-base/Dockerfile b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/spark-base/Dockerfile index 61d295a5b37c2..a982fc5fd30ce 100644 --- a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/spark-base/Dockerfile +++ b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/spark-base/Dockerfile @@ -24,6 +24,8 @@ RUN apk upgrade --no-cache && \ apk add --no-cache bash tini && \ mkdir -p /opt/spark && \ touch /opt/spark/RELEASE && \ + rm /bin/sh && \ + ln -sv /bin/bash /bin/sh && \ chgrp root /etc/passwd && chmod ug+rw /etc/passwd COPY jars /opt/spark/jars diff --git a/resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala b/resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala new file mode 100644 index 0000000000000..114f8ec0408fa --- /dev/null +++ b/resource-managers/kubernetes/integration-tests-spark-jobs/src/main/scala/org/apache/spark/deploy/kubernetes/integrationtest/jobs/JavaOptionsTest.scala @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.deploy.kubernetes.integrationtest.jobs + +import java.io.{File, FileInputStream} +import java.util.Properties + +import com.google.common.collect.Maps +import scala.collection.JavaConverters._ + +import org.apache.spark.sql.SparkSession +import org.apache.spark.util.Utils + +private[spark] object JavaOptionsTest { + + def main(args: Array[String]): Unit = { + // scalastyle:off println + if (args.length != 1) { + println(s"Invalid arguments: ${args.mkString(",")}." + + s"Usage: JavaOptionsTest ") + System.exit(1) + } + val expectedDriverJavaOptions = loadPropertiesFromFile(args(0)) + val nonMatchingDriverOptions = expectedDriverJavaOptions.filter { + case (optKey, optValue) => System.getProperty(optKey) != optValue + } + if (nonMatchingDriverOptions.nonEmpty) { + println(s"The driver's JVM options did not match. Expected $expectedDriverJavaOptions." + + s" But these options did not match: $nonMatchingDriverOptions.") + val sysProps = Maps.fromProperties(System.getProperties).asScala + println("System properties are:") + for (prop <- sysProps) { + println(s"Key: ${prop._1}, Value: ${prop._2}") + } + System.exit(1) + } + + // TODO support spark.executor.extraJavaOptions and test here. + println(s"All expected JVM options were present on the driver and executors.") + // scalastyle:on println + } + + private def loadPropertiesFromFile(filePath: String): Map[String, String] = { + val file = new File(filePath) + if (!file.isFile) { + throw new IllegalArgumentException(s"File not found at $filePath or is not a file.") + } + val properties = new Properties() + Utils.tryWithResource(new FileInputStream(file)) { is => + properties.load(is) + } + Maps.fromProperties(properties).asScala.toMap + } +} diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala index c6cd6a74c88d1..84d22381d090b 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala @@ -16,9 +16,9 @@ */ package org.apache.spark.deploy.kubernetes.integrationtest -import java.io.File +import java.io.{File, FileOutputStream} import java.nio.file.Paths -import java.util.UUID +import java.util.{Properties, UUID} import com.google.common.base.Charsets import com.google.common.io.Files @@ -226,6 +226,26 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { Seq.empty[String]) } + test("Setting JVM options on the driver and executors with spaces.") { + assume(testBackend.name == MINIKUBE_TEST_BACKEND) + launchStagingServer(SSLOptions(), None) + val driverJvmOptionsFile = storeJvmOptionsInTempFile( + Map("simpleDriverConf" -> "simpleDriverConfValue", + "driverconfwithspaces" -> "driver conf with spaces value"), + "driver-jvm-options.properties", + "JVM options that should be set on the driver.") + sparkConf.set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, + "-DsimpleDriverConf=simpleDriverConfValue" + + " -Ddriverconfwithspaces='driver conf with spaces value'") + sparkConf.set("spark.files", driverJvmOptionsFile.getAbsolutePath) + runSparkApplicationAndVerifyCompletion( + JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE), + JAVA_OPTIONS_MAIN_CLASS, + Seq(s"All expected JVM options were present on the driver and executors."), + Array(driverJvmOptionsFile.getName), + Seq.empty[String]) + } + test("Use a very long application name.") { assume(testBackend.name == MINIKUBE_TEST_BACKEND) @@ -339,6 +359,20 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { } } } + + private def storeJvmOptionsInTempFile( + options: Map[String, String], + propertiesFileName: String, + comments: String): File = { + val tempDir = Utils.createTempDir() + val propertiesFile = new File(tempDir, propertiesFileName) + val properties = new Properties() + options.foreach { case (propKey, propValue) => properties.setProperty(propKey, propValue) } + Utils.tryWithResource(new FileOutputStream(propertiesFile)) { os => + properties.store(os, comments) + } + propertiesFile + } } private[spark] object KubernetesSuite { @@ -368,6 +402,8 @@ private[spark] object KubernetesSuite { ".integrationtest.jobs.FileExistenceTest" val GROUP_BY_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" + ".integrationtest.jobs.GroupByTest" + val JAVA_OPTIONS_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" + + ".integrationtest.jobs.JavaOptionsTest" val TEST_EXISTENCE_FILE_CONTENTS = "contents" case object ShuffleNotReadyException extends Exception From af1a1b85c07a719ebe8474feb0c81ccf8321b003 Mon Sep 17 00:00:00 2001 From: mcheah Date: Mon, 21 Aug 2017 15:30:31 -0700 Subject: [PATCH 2/2] Fix merge conflicts. --- .../apache/spark/deploy/kubernetes/constants.scala | 4 ---- .../src/main/docker/driver-py/Dockerfile | 14 ++------------ .../src/main/docker/driver/Dockerfile | 10 +--------- .../integrationtest/KubernetesSuite.scala | 4 ---- 4 files changed, 3 insertions(+), 29 deletions(-) diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala index 39634833af6aa..3c4b451a271f2 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala @@ -69,12 +69,8 @@ package object constants { private[spark] val ENV_MOUNTED_FILES_DIR = "SPARK_MOUNTED_FILES_DIR" private[spark] val ENV_PYSPARK_FILES = "PYSPARK_FILES" private[spark] val ENV_PYSPARK_PRIMARY = "PYSPARK_PRIMARY" -<<<<<<< HEAD private[spark] val ENV_JAVA_OPT_PREFIX = "SPARK_JAVA_OPT_" -||||||| merged common ancestors -======= private[spark] val ENV_MOUNTED_FILES_FROM_SECRET_DIR = "SPARK_MOUNTED_FILES_FROM_SECRET_DIR" ->>>>>>> apache-spark-on-k8s/branch-2.2-kubernetes // Bootstrapping dependencies with the init-container private[spark] val INIT_CONTAINER_ANNOTATION = "pod.beta.kubernetes.io/init-containers" diff --git a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile index b12ad79925ca1..7b1effa911f19 100644 --- a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile +++ b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver-py/Dockerfile @@ -44,16 +44,6 @@ CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \ if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ - if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ -<<<<<<< HEAD + if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ + if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \ ${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS -||||||| merged common ancestors - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH \ - -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY \ - $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS -======= - if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH \ - -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY \ - $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS ->>>>>>> apache-spark-on-k8s/branch-2.2-kubernetes diff --git a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile index d165a448aed6a..26d1d805fde2b 100644 --- a/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile +++ b/resource-managers/kubernetes/docker-minimal-bundle/src/main/docker/driver/Dockerfile @@ -29,14 +29,6 @@ CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \ if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \ -<<<<<<< HEAD if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS -||||||| merged common ancestors - if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS -======= - if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \ if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \ - ${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS ->>>>>>> apache-spark-on-k8s/branch-2.2-kubernetes + ${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala index 60ba0fcc39924..9c1f9775681e1 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala @@ -229,7 +229,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { Seq.empty[String]) } -<<<<<<< HEAD test("Setting JVM options on the driver and executors with spaces.") { assume(testBackend.name == MINIKUBE_TEST_BACKEND) launchStagingServer(SSLOptions(), None) @@ -250,8 +249,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { Seq.empty[String]) } -||||||| merged common ancestors -======= test("Submit small local files without the resource staging server.") { assume(testBackend.name == MINIKUBE_TEST_BACKEND) sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH)) @@ -270,7 +267,6 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { Seq.empty[String]) } ->>>>>>> apache-spark-on-k8s/branch-2.2-kubernetes test("Use a very long application name.") { assume(testBackend.name == MINIKUBE_TEST_BACKEND)