File tree Expand file tree Collapse file tree 4 files changed +19
-12
lines changed
examples/src/main/scala/org/apache/spark/examples/sql
scala-2.10/src/main/scala/org/apache/spark/repl
scala-2.11/src/main/scala/org/apache/spark/repl
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 4 files changed +19
-12
lines changed Original file line number Diff line number Diff line change 1818// scalastyle:off println
1919package org .apache .spark .examples .sql
2020
21- import org .apache .spark .{SparkConf , SparkContext }
2221import org .apache .spark .sql .{SaveMode , SparkSession }
2322
2423// One method for defining the schema of an RDD is to make a case class with the desired column
@@ -27,14 +26,12 @@ case class Record(key: Int, value: String)
2726
2827object RDDRelation {
2928 def main (args : Array [String ]) {
30- val sparkConf = new SparkConf ().setAppName(" RDDRelation" )
31- val sc = new SparkContext (sparkConf)
32- val spark = new SparkSession (sc)
29+ val spark = SparkSession .builder.appName(" RDDRelation" ).getOrCreate()
3330
3431 // Importing the SparkSession gives access to all the SQL functions and implicit conversions.
3532 import spark .implicits ._
3633
37- val df = sc.parallelize ((1 to 100 ).map(i => Record (i, s " val_ $i" ))).toDF( )
34+ val df = spark.createDataFrame ((1 to 100 ).map(i => Record (i, s " val_ $i" )))
3835 // Any RDD containing case classes can be registered as a table. The schema of the table is
3936 // automatically inferred using scala reflection.
4037 df.registerTempTable(" records" )
@@ -70,7 +67,7 @@ object RDDRelation {
7067 parquetFile.registerTempTable(" parquetFile" )
7168 spark.sql(" SELECT * FROM parquetFile" ).collect().foreach(println)
7269
73- sc .stop()
70+ spark .stop()
7471 }
7572}
7673// scalastyle:on println
Original file line number Diff line number Diff line change @@ -1030,10 +1030,10 @@ class SparkILoop(
10301030 def createSparkSession (): SparkSession = {
10311031 if (SparkSession .hiveClassesArePresent) {
10321032 logInfo(" Creating Spark session with Hive support" )
1033- SparkSession .withHiveSupport(sparkContext )
1033+ SparkSession .builder.enableHiveSupport().getOrCreate( )
10341034 } else {
10351035 logInfo(" Creating Spark session" )
1036- new SparkSession (sparkContext )
1036+ SparkSession .builder.getOrCreate( )
10371037 }
10381038 }
10391039
Original file line number Diff line number Diff line change @@ -94,10 +94,10 @@ object Main extends Logging {
9494
9595 def createSparkSession (): SparkSession = {
9696 if (SparkSession .hiveClassesArePresent) {
97- sparkSession = SparkSession .withHiveSupport(sparkContext )
97+ sparkSession = SparkSession .builder.enableHiveSupport().getOrCreate( )
9898 logInfo(" Created Spark session with Hive support" )
9999 } else {
100- sparkSession = new SparkSession (sparkContext )
100+ sparkSession = SparkSession .builder.getOrCreate( )
101101 logInfo(" Created Spark session" )
102102 }
103103 sparkSession
Original file line number Diff line number Diff line change @@ -54,6 +54,7 @@ import org.apache.spark.util.Utils
5454 * {{{
5555 * SparkSession.builder()
5656 * .master("local")
57+ * .appName("Word Count")
5758 * .config("spark.some.config.option", "some-value").
5859 * .getOrCreate()
5960 * }}}
@@ -63,7 +64,7 @@ class SparkSession private(
6364 @ transient private val existingSharedState : Option [SharedState ])
6465 extends Serializable with Logging { self =>
6566
66- def this (sc : SparkContext ) {
67+ private [sql] def this (sc : SparkContext ) {
6768 this (sc, None )
6869 }
6970
@@ -573,7 +574,7 @@ class SparkSession private(
573574 * common Scala objects into [[DataFrame ]]s.
574575 *
575576 * {{{
576- * val sparkSession = new SparkSession(sc )
577+ * val sparkSession = SparkSession.builder.getOrCreate( )
577578 * import sparkSession.implicits._
578579 * }}}
579580 *
@@ -586,6 +587,15 @@ class SparkSession private(
586587 }
587588 // scalastyle:on
588589
590+ /**
591+ * Stop the underlying [[SparkContext ]].
592+ *
593+ * @since 2.0.0
594+ */
595+ def stop (): Unit = {
596+ sparkContext.stop()
597+ }
598+
589599 protected [sql] def parseSql (sql : String ): LogicalPlan = {
590600 sessionState.sqlParser.parsePlan(sql)
591601 }
You can’t perform that action at this time.
0 commit comments