Skip to content

Commit 6d7ae4a

Browse files
ueshinHyukjinKwon
authored andcommitted
[SPARK-32160][CORE][PYSPARK][3.0][FOLLOWUP] Change the config name to switch allow/disallow SparkContext in executors
### What changes were proposed in this pull request? This is a follow-up of #29294. This PR changes the config name to switch allow/disallow `SparkContext` in executors as per the comment #29278 (review). ### Why are the changes needed? The config name `spark.executor.allowSparkContext` is more reasonable. ### Does this PR introduce _any_ user-facing change? Yes, the config name is changed. ### How was this patch tested? Updated tests. Closes #29341 from ueshin/issues/SPARK-32160/3.0/change_config_name. Authored-by: Takuya UESHIN <[email protected]> Signed-off-by: HyukjinKwon <[email protected]>
1 parent c148a98 commit 6d7ae4a

File tree

7 files changed

+10
-10
lines changed

7 files changed

+10
-10
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ class SparkContext(config: SparkConf) extends Logging {
8282
// The call site where this SparkContext was constructed.
8383
private val creationSite: CallSite = Utils.getCallSite()
8484

85-
if (!config.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
85+
if (!config.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
8686
// In order to prevent SparkContext from being created in executors.
8787
SparkContext.assertOnDriver()
8888
}

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1814,8 +1814,8 @@ package object config {
18141814
.bytesConf(ByteUnit.BYTE)
18151815
.createOptional
18161816

1817-
private[spark] val ALLOW_SPARK_CONTEXT_IN_EXECUTORS =
1818-
ConfigBuilder("spark.driver.allowSparkContextInExecutors")
1817+
private[spark] val EXECUTOR_ALLOW_SPARK_CONTEXT =
1818+
ConfigBuilder("spark.executor.allowSparkContext")
18191819
.doc("If set to true, SparkContext can be created in executors.")
18201820
.version("3.0.1")
18211821
.booleanConf

core/src/test/scala/org/apache/spark/SparkContextSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -957,7 +957,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
957957
val error = intercept[SparkException] {
958958
sc.range(0, 1).foreach { _ =>
959959
new SparkContext(new SparkConf().setAppName("test").setMaster("local")
960-
.set(ALLOW_SPARK_CONTEXT_IN_EXECUTORS, false))
960+
.set(EXECUTOR_ALLOW_SPARK_CONTEXT, false))
961961
}
962962
}.getMessage()
963963

python/pyspark/context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
120120
ValueError:...
121121
"""
122122
if (conf is not None and
123-
conf.get("spark.driver.allowSparkContextInExecutors", "true").lower() != "true"):
123+
conf.get("spark.executor.allowSparkContext", "true").lower() != "true"):
124124
# In order to prevent SparkContext from being created in executors.
125125
SparkContext._assert_on_driver()
126126

python/pyspark/tests/test_context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ def test_disallow_to_create_spark_context_in_executors(self):
271271
# SPARK-32160: SparkContext should not created in executors if the config is set.
272272

273273
def create_spark_context():
274-
conf = SparkConf().set("spark.driver.allowSparkContextInExecutors", "false")
274+
conf = SparkConf().set("spark.executor.allowSparkContext", "false")
275275
with SparkContext(conf=conf):
276276
pass
277277

sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext, TaskContext}
2929
import org.apache.spark.annotation.{DeveloperApi, Experimental, Stable, Unstable}
3030
import org.apache.spark.api.java.JavaRDD
3131
import org.apache.spark.internal.Logging
32-
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
32+
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
3333
import org.apache.spark.rdd.RDD
3434
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
3535
import org.apache.spark.sql.catalog.Catalog
@@ -904,7 +904,7 @@ object SparkSession extends Logging {
904904
val sparkConf = new SparkConf()
905905
options.foreach { case (k, v) => sparkConf.set(k, v) }
906906

907-
if (!sparkConf.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
907+
if (!sparkConf.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
908908
assertOnDriver()
909909
}
910910

sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql
2020
import org.scalatest.BeforeAndAfterEach
2121

2222
import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
23-
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
23+
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
2424
import org.apache.spark.internal.config.UI.UI_ENABLED
2525
import org.apache.spark.sql.internal.SQLConf
2626
import org.apache.spark.sql.internal.StaticSQLConf._
@@ -248,7 +248,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach {
248248
val error = intercept[SparkException] {
249249
session.range(1).foreach { v =>
250250
SparkSession.builder.master("local")
251-
.config(ALLOW_SPARK_CONTEXT_IN_EXECUTORS.key, false).getOrCreate()
251+
.config(EXECUTOR_ALLOW_SPARK_CONTEXT.key, false).getOrCreate()
252252
()
253253
}
254254
}.getMessage()

0 commit comments

Comments
 (0)