Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class SparkContext(config: SparkConf) extends Logging {
// The call site where this SparkContext was constructed.
private val creationSite: CallSite = Utils.getCallSite()

if (!config.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
if (!config.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
// In order to prevent SparkContext from being created in executors.
SparkContext.assertOnDriver()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1909,8 +1909,8 @@ package object config {
.booleanConf
.createWithDefault(false)

private[spark] val ALLOW_SPARK_CONTEXT_IN_EXECUTORS =
ConfigBuilder("spark.driver.allowSparkContextInExecutors")
private[spark] val EXECUTOR_ALLOW_SPARK_CONTEXT =
ConfigBuilder("spark.executor.allowSparkContext")
.doc("If set to true, SparkContext can be created in executors.")
.version("3.0.1")
.booleanConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -952,7 +952,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu

sc.range(0, 1).foreach { _ =>
new SparkContext(new SparkConf().setAppName("test").setMaster("local")
.set(ALLOW_SPARK_CONTEXT_IN_EXECUTORS, true)).stop()
.set(EXECUTOR_ALLOW_SPARK_CONTEXT, true)).stop()
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion docs/core-migration-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ license: |

## Upgrading from Core 3.0 to 3.1

- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.driver.allowSparkContextInExecutors` when creating `SparkContext` in executors.
- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.executor.allowSparkContext` when creating `SparkContext` in executors.

## Upgrading from Core 2.4 to 3.0

Expand Down
2 changes: 1 addition & 1 deletion python/pyspark/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
ValueError:...
"""
if (conf is None or
conf.get("spark.driver.allowSparkContextInExecutors", "false").lower() != "true"):
conf.get("spark.executor.allowSparkContext", "false").lower() != "true"):
# In order to prevent SparkContext from being created in executors.
SparkContext._assert_on_driver()

Expand Down
2 changes: 1 addition & 1 deletion python/pyspark/tests/test_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ def test_allow_to_create_spark_context_in_executors(self):
# SPARK-32160: SparkContext can be created in executors if the config is set.

def create_spark_context():
conf = SparkConf().set("spark.driver.allowSparkContextInExecutors", "true")
conf = SparkConf().set("spark.executor.allowSparkContext", "true")
with SparkContext(conf=conf):
pass

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext, TaskContext}
import org.apache.spark.annotation.{DeveloperApi, Experimental, Stable, Unstable}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql.catalog.Catalog
Expand Down Expand Up @@ -904,7 +904,7 @@ object SparkSession extends Logging {
val sparkConf = new SparkConf()
options.foreach { case (k, v) => sparkConf.set(k, v) }

if (!sparkConf.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) {
if (!sparkConf.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) {
assertOnDriver()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS
import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT
import org.apache.spark.internal.config.UI.UI_ENABLED
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf._
Expand Down Expand Up @@ -277,7 +277,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach {

session.range(1).foreach { v =>
SparkSession.builder.master("local")
.config(ALLOW_SPARK_CONTEXT_IN_EXECUTORS.key, true).getOrCreate().stop()
.config(EXECUTOR_ALLOW_SPARK_CONTEXT.key, true).getOrCreate().stop()
()
}
}
Expand Down