This repository was archived by the owner on Jan 9, 2020. It is now read-only.
File tree Expand file tree Collapse file tree 5 files changed +8
-8
lines changed
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s
submit/submitsteps/hadoopsteps Expand file tree Collapse file tree 5 files changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -784,7 +784,7 @@ from the other deployment modes. See the [configuration page](configuration.html
784784 <td >(none)</td >
785785 <td >
786786 Assuming you have set <code>spark.kubernetes.kerberos.enabled</code> to be true. This will let you specify
787- the principal that you wish to use to handle renewing of Delegation Tokens. This is optional as you
787+ the principal that you wish to use to handle renewing of Delegation Tokens. This is optional as
788788 we will set the principal to be the job users principal by default.
789789 </td >
790790</tr >
Original file line number Diff line number Diff line change @@ -57,9 +57,9 @@ private[spark] class HadoopConfBootstrapImpl(
5757 .editSpec()
5858 .addNewVolume()
5959 .withName(HADOOP_FILE_VOLUME )
60- .withNewConfigMap()
61- .withName(hadoopConfConfigMapName)
62- .withItems(keyPaths.asJava)
60+ .withNewConfigMap()
61+ .withName(hadoopConfConfigMapName)
62+ .withItems(keyPaths.asJava)
6363 .endConfigMap()
6464 .endVolume()
6565 .endSpec()
Original file line number Diff line number Diff line change @@ -67,6 +67,7 @@ private[spark] class HadoopUGIUtil{
6767 val byteStream = new ByteArrayOutputStream
6868 val dataStream = new DataOutputStream (byteStream)
6969 creds.writeTokenStorageToStream(dataStream)
70+ dataStream.close()
7071 byteStream.toByteArray
7172 }
7273
Original file line number Diff line number Diff line change @@ -81,7 +81,6 @@ package object constants {
8181 private [spark] val ENV_SPARK_USER = " SPARK_USER"
8282
8383 // Bootstrapping dependencies with the init-container
84- private [spark] val INIT_CONTAINER_ANNOTATION = " pod.beta.kubernetes.io/init-containers"
8584 private [spark] val INIT_CONTAINER_SECRET_VOLUME_MOUNT_PATH =
8685 " /mnt/secrets/spark-init"
8786 private [spark] val INIT_CONTAINER_SUBMITTED_JARS_SECRET_KEY =
@@ -107,7 +106,7 @@ package object constants {
107106 private [spark] val ENV_HADOOP_CONF_DIR = " HADOOP_CONF_DIR"
108107 private [spark] val HADOOP_CONF_DIR_LOC = " spark.kubernetes.hadoop.conf.dir"
109108 private [spark] val HADOOP_CONFIG_MAP_SPARK_CONF_NAME =
110- " spark.kubernetes.hadoop.executor.hadoopconfigmapname "
109+ " spark.kubernetes.hadoop.executor.hadoopConfigMapName "
111110
112111 // Kerberos Configuration
113112 private [spark] val HADOOP_KERBEROS_SECRET_NAME =
Original file line number Diff line number Diff line change @@ -50,7 +50,7 @@ private[spark] class HadoopKerberosKeytabResolverStep(
5050 maybePrincipal : Option [String ],
5151 maybeKeytab : Option [File ],
5252 maybeRenewerPrincipal : Option [String ],
53- hadoopUGI : HadoopUGIUtil ) extends HadoopConfigurationStep with Logging {
53+ hadoopUGI : HadoopUGIUtil ) extends HadoopConfigurationStep with Logging {
5454 private var originalCredentials : Credentials = _
5555 private var dfs : FileSystem = _
5656 private var renewer : String = _
@@ -59,7 +59,7 @@ private[spark] class HadoopKerberosKeytabResolverStep(
5959
6060 override def configureContainers (hadoopConfigSpec : HadoopConfigSpec ): HadoopConfigSpec = {
6161 val hadoopConf = SparkHadoopUtil .get.newConfiguration(submissionSparkConf)
62- if (hadoopUGI.isSecurityEnabled) logDebug(" Hadoop not configured with Kerberos" )
62+ if (! hadoopUGI.isSecurityEnabled) logDebug(" Hadoop not configured with Kerberos" )
6363 val maybeJobUserUGI =
6464 for {
6565 principal <- maybePrincipal
You can’t perform that action at this time.
0 commit comments