From 6bd916a14d3eb7df827d1df5570fd02e0df19bc3 Mon Sep 17 00:00:00 2001 From: Lukas Majercak Date: Mon, 7 May 2018 14:02:10 -0700 Subject: [PATCH] Add spark.executor.bindAddress --- core/src/main/scala/org/apache/spark/SparkEnv.scala | 3 ++- .../org/apache/spark/internal/config/package.scala | 5 +++++ docs/configuration.md | 12 ++++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index 72123f2232532..0e6fe94c79ca0 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -197,10 +197,11 @@ object SparkEnv extends Logging { numCores: Int, ioEncryptionKey: Option[Array[Byte]], isLocal: Boolean): SparkEnv = { + val bindAddress = conf.get(EXECUTOR_BIND_ADDRESS) val env = create( conf, executorId, - hostname, + bindAddress, hostname, None, isLocal, diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index 6bb98c37b4479..c2e2d1ca65bf4 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -95,6 +95,11 @@ package object config { .bytesConf(ByteUnit.MiB) .createOptional + private[spark] val EXECUTOR_BIND_ADDRESS = ConfigBuilder("spark.executor.bindAddress") + .doc("Address where to bind network listen sockets on the executor.") + .stringConf + .createWithDefault(Utils.localHostName()) + private[spark] val MEMORY_OFFHEAP_ENABLED = ConfigBuilder("spark.memory.offHeap.enabled") .doc("If true, Spark will attempt to use off-heap memory for certain operations. " + "If off-heap memory use is enabled, then spark.memory.offHeap.size must be positive.") diff --git a/docs/configuration.md b/docs/configuration.md index 8a1aacef85760..a54f7efe4fd92 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -189,6 +189,18 @@ of the most common options to set are: This option is currently supported on YARN and Kubernetes. + +spark.executor.bindAddress + (local hostname) + + Hostname or IP address where to bind listening sockets. This config overrides the SPARK_LOCAL_IP + environment variable (see below). +
It also allows a different address from the local one to be advertised to other + executors or external systems. This is useful, for example, when running containers with bridged networking. + For this to properly work, the different ports used by the driver (RPC, block manager and UI) need to be + forwarded from the container's host. + + spark.extraListeners (none)