Skip to content

Commit e11d90b

Browse files
committed
[SPARK-20946][SQL] simplify the config setting logic in SparkSession.getOrCreate
## What changes were proposed in this pull request? The current conf setting logic is a little complex and has duplication, this PR simplifies it. ## How was this patch tested? existing tests. Author: Wenchen Fan <[email protected]> Closes #18172 from cloud-fan/session.
1 parent d1b80ab commit e11d90b

File tree

3 files changed

+10
-21
lines changed

3 files changed

+10
-21
lines changed

mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -820,15 +820,13 @@ class ALSCleanerSuite extends SparkFunSuite {
820820
FileUtils.listFiles(localDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE).asScala.toSet
821821
try {
822822
conf.set("spark.local.dir", localDir.getAbsolutePath)
823-
val sc = new SparkContext("local[2]", "test", conf)
823+
val sc = new SparkContext("local[2]", "ALSCleanerSuite", conf)
824824
try {
825825
sc.setCheckpointDir(checkpointDir.getAbsolutePath)
826826
// Generate test data
827827
val (training, _) = ALSSuite.genImplicitTestData(sc, 20, 5, 1, 0.2, 0)
828828
// Implicitly test the cleaning of parents during ALS training
829829
val spark = SparkSession.builder
830-
.master("local[2]")
831-
.appName("ALSCleanerSuite")
832830
.sparkContext(sc)
833831
.getOrCreate()
834832
import spark.implicits._

mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,6 @@ private[ml] object TreeTests extends SparkFunSuite {
4343
categoricalFeatures: Map[Int, Int],
4444
numClasses: Int): DataFrame = {
4545
val spark = SparkSession.builder()
46-
.master("local[2]")
47-
.appName("TreeTests")
4846
.sparkContext(data.sparkContext)
4947
.getOrCreate()
5048
import spark.implicits._

sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -757,6 +757,8 @@ object SparkSession {
757757

758758
private[this] var userSuppliedContext: Option[SparkContext] = None
759759

760+
// The `SparkConf` inside the given `SparkContext` may get changed if you specify some options
761+
// for this builder.
760762
private[spark] def sparkContext(sparkContext: SparkContext): Builder = synchronized {
761763
userSuppliedContext = Option(sparkContext)
762764
this
@@ -854,7 +856,7 @@ object SparkSession {
854856
*
855857
* @since 2.2.0
856858
*/
857-
def withExtensions(f: SparkSessionExtensions => Unit): Builder = {
859+
def withExtensions(f: SparkSessionExtensions => Unit): Builder = synchronized {
858860
f(extensions)
859861
this
860862
}
@@ -899,22 +901,14 @@ object SparkSession {
899901

900902
// No active nor global default session. Create a new one.
901903
val sparkContext = userSuppliedContext.getOrElse {
902-
// set app name if not given
903-
val randomAppName = java.util.UUID.randomUUID().toString
904904
val sparkConf = new SparkConf()
905-
options.foreach { case (k, v) => sparkConf.set(k, v) }
906-
if (!sparkConf.contains("spark.app.name")) {
907-
sparkConf.setAppName(randomAppName)
908-
}
909-
val sc = SparkContext.getOrCreate(sparkConf)
910-
// maybe this is an existing SparkContext, update its SparkConf which maybe used
911-
// by SparkSession
912-
options.foreach { case (k, v) => sc.conf.set(k, v) }
913-
if (!sc.conf.contains("spark.app.name")) {
914-
sc.conf.setAppName(randomAppName)
915-
}
916-
sc
905+
options.get("spark.master").foreach(sparkConf.setMaster)
906+
// set a random app name if not given.
907+
sparkConf.setAppName(options.getOrElse("spark.app.name",
908+
java.util.UUID.randomUUID().toString))
909+
SparkContext.getOrCreate(sparkConf)
917910
}
911+
options.foreach { case (k, v) => sparkContext.conf.set(k, v) }
918912

919913
// Initialize extensions if the user has defined a configurator class.
920914
val extensionConfOption = sparkContext.conf.get(StaticSQLConf.SPARK_SESSION_EXTENSIONS)
@@ -935,7 +929,6 @@ object SparkSession {
935929
}
936930

937931
session = new SparkSession(sparkContext, None, None, extensions)
938-
options.foreach { case (k, v) => session.sessionState.conf.setConfString(k, v) }
939932
defaultSession.set(session)
940933

941934
// Register a successfully instantiated context to the singleton. This should be at the

0 commit comments

Comments
 (0)