Skip to content

Commit 7deb67c

Browse files
ueshinHyukjinKwon
authored andcommitted
[SPARK-32160][CORE][PYSPARK][FOLLOWUP] Change the config name to switch allow/disallow SparkContext in executors
### What changes were proposed in this pull request? This is a follow-up of #29278. This PR changes the config name to switch allow/disallow `SparkContext` in executors as per the comment #29278 (review). ### Why are the changes needed? The config name `spark.executor.allowSparkContext` is more reasonable. ### Does this PR introduce _any_ user-facing change? Yes, the config name is changed. ### How was this patch tested? Updated tests. Closes #29340 from ueshin/issues/SPARK-32160/change_config_name. Authored-by: Takuya UESHIN <[email protected]> Signed-off-by: HyukjinKwon <[email protected]>
1 parent 9bbe8c7 commit 7deb67c

File tree

8 files changed

+11
-11
lines changed

8 files changed

+11
-11
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ class SparkContext(config: SparkConf) extends Logging {
8383
// The call site where this SparkContext was constructed.
8484
private val creationSite: CallSite = Utils.getCallSite()
8585

86-
if (!config.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
86+
if (!config.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
8787
// In order to prevent SparkContext from being created in executors.
8888
SparkContext.assertOnDriver()
8989
}

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1909,8 +1909,8 @@ package object config {
19091909
.booleanConf
19101910
.createWithDefault(false)
19111911

1912-
private[spark] val ALLOW_SPARK_CONTEXT_IN_EXECUTORS =
1913-
ConfigBuilder("spark.driver.allowSparkContextInExecutors")
1912+
private[spark] val EXECUTOR_ALLOW_SPARK_CONTEXT =
1913+
ConfigBuilder("spark.executor.allowSparkContext")
19141914
.doc("If set to true, SparkContext can be created in executors.")
19151915
.version("3.0.1")
19161916
.booleanConf

core/src/test/scala/org/apache/spark/SparkContextSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -952,7 +952,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
952952

953953
sc.range(0, 1).foreach { _ =>
954954
new SparkContext(new SparkConf().setAppName("test").setMaster("local")
955-
.set(ALLOW_SPARK_CONTEXT_IN_EXECUTORS, true)).stop()
955+
.set(EXECUTOR_ALLOW_SPARK_CONTEXT, true)).stop()
956956
}
957957
}
958958
}

docs/core-migration-guide.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ license: |
2424

2525
## Upgrading from Core 3.0 to 3.1
2626

27-
- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.driver.allowSparkContextInExecutors` when creating `SparkContext` in executors.
27+
- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.executor.allowSparkContext` when creating `SparkContext` in executors.
2828

2929
## Upgrading from Core 2.4 to 3.0
3030

python/pyspark/context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
118118
ValueError:...
119119
"""
120120
if (conf is None or
121-
conf.get("spark.driver.allowSparkContextInExecutors", "false").lower() != "true"):
121+
conf.get("spark.executor.allowSparkContext", "false").lower() != "true"):
122122
# In order to prevent SparkContext from being created in executors.
123123
SparkContext._assert_on_driver()
124124

python/pyspark/tests/test_context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ def test_allow_to_create_spark_context_in_executors(self):
279279
# SPARK-32160: SparkContext can be created in executors if the config is set.
280280

281281
def create_spark_context():
282-
conf = SparkConf().set("spark.driver.allowSparkContextInExecutors", "true")
282+
conf = SparkConf().set("spark.executor.allowSparkContext", "true")
283283
with SparkContext(conf=conf):
284284
pass
285285

sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext, TaskContext}
2929
import org.apache.spark.annotation.{DeveloperApi, Experimental, Stable, Unstable}
3030
import org.apache.spark.api.java.JavaRDD
3131
import org.apache.spark.internal.Logging
32-
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
32+
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
3333
import org.apache.spark.rdd.RDD
3434
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
3535
import org.apache.spark.sql.catalog.Catalog
@@ -904,7 +904,7 @@ object SparkSession extends Logging {
904904
val sparkConf = new SparkConf()
905905
options.foreach { case (k, v) => sparkConf.set(k, v) }
906906

907-
if (!sparkConf.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
907+
if (!sparkConf.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
908908
assertOnDriver()
909909
}
910910

sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql
2020
import org.scalatest.BeforeAndAfterEach
2121

2222
import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
23-
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
23+
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
2424
import org.apache.spark.internal.config.UI.UI_ENABLED
2525
import org.apache.spark.sql.internal.SQLConf
2626
import org.apache.spark.sql.internal.StaticSQLConf._
@@ -277,7 +277,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach {
277277

278278
session.range(1).foreach { v =>
279279
SparkSession.builder.master("local")
280-
.config(ALLOW_SPARK_CONTEXT_IN_EXECUTORS.key, true).getOrCreate().stop()
280+
.config(EXECUTOR_ALLOW_SPARK_CONTEXT.key, true).getOrCreate().stop()
281281
()
282282
}
283283
}

0 commit comments

Comments
 (0)