@@ -63,12 +63,8 @@ import org.apache.spark.util._
6363 *
6464 * @param config a Spark Config object describing the application configuration. Any settings in
6565 * this config overrides the default configs as well as system properties.
66- * @param sparkListeners an optional list of [[SparkListener ]]s to register.
6766 */
68- class SparkContext (
69- config : SparkConf ,
70- sparkListeners : Seq [SparkListener ] = Nil
71- ) extends Logging with ExecutorAllocationClient {
67+ class SparkContext (config : SparkConf ) extends Logging with ExecutorAllocationClient {
7268
7369 // The call site where this SparkContext was constructed.
7470 private val creationSite : CallSite = Utils .getCallSite()
@@ -93,15 +89,7 @@ class SparkContext(
9389 * Create a SparkContext that loads settings from system properties (for instance, when
9490 * launching with ./bin/spark-submit).
9591 */
96- def this () = this (new SparkConf (), Nil )
97-
98- /**
99- * Alternative constructor for binary compatibility.
100- *
101- * @param config a Spark Config object describing the application configuration. Any settings in
102- * this config overrides the default configs as well as system properties.
103- */
104- def this (config : SparkConf ) = this (config, Nil )
92+ def this () = this (new SparkConf ())
10593
10694 /**
10795 * :: DeveloperApi ::
@@ -136,40 +124,19 @@ class SparkContext(
136124 * @param jars Collection of JARs to send to the cluster. These can be paths on the local file
137125 * system or HDFS, HTTP, HTTPS, or FTP URLs.
138126 * @param environment Environment variables to set on worker nodes.
139- * @param sparkListeners an optional list of [[SparkListener ]]s to register.
140127 */
141128 def this (
142129 master : String ,
143130 appName : String ,
144131 sparkHome : String = null ,
145132 jars : Seq [String ] = Nil ,
146133 environment : Map [String , String ] = Map (),
147- preferredNodeLocationData : Map [String , Set [SplitInfo ]] = Map (),
148- sparkListeners : Seq [SparkListener ] = Nil ) = {
149- this (SparkContext .updatedConf(new SparkConf (), master, appName, sparkHome, jars, environment),
150- sparkListeners)
134+ preferredNodeLocationData : Map [String , Set [SplitInfo ]] = Map ()) =
135+ {
136+ this (SparkContext .updatedConf(new SparkConf (), master, appName, sparkHome, jars, environment))
151137 this .preferredNodeLocationData = preferredNodeLocationData
152138 }
153139
154- /**
155- * Alternative constructor for binary compatibility.
156- *
157- * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
158- * @param appName A name for your application, to display on the cluster web UI.
159- * @param sparkHome Location where Spark is installed on cluster nodes.
160- * @param jars Collection of JARs to send to the cluster. These can be paths on the local file
161- * system or HDFS, HTTP, HTTPS, or FTP URLs.
162- * @param environment Environment variables to set on worker nodes.
163- */
164- def this (
165- master : String ,
166- appName : String ,
167- sparkHome : String ,
168- jars : Seq [String ],
169- environment : Map [String , String ],
170- preferredNodeLocationData : Map [String , Set [SplitInfo ]]) =
171- this (master, appName, sparkHome, jars, environment, preferredNodeLocationData, Nil )
172-
173140 // NOTE: The below constructors could be consolidated using default arguments. Due to
174141 // Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
175142 // Until we have a good workaround for that bug the constructors remain broken out.
@@ -181,7 +148,7 @@ class SparkContext(
181148 * @param appName A name for your application, to display on the cluster web UI.
182149 */
183150 private [spark] def this (master : String , appName : String ) =
184- this (master, appName, null , Nil , Map (), Map (), Nil )
151+ this (master, appName, null , Nil , Map (), Map ())
185152
186153 /**
187154 * Alternative constructor that allows setting common Spark properties directly
@@ -191,7 +158,7 @@ class SparkContext(
191158 * @param sparkHome Location where Spark is installed on cluster nodes.
192159 */
193160 private [spark] def this (master : String , appName : String , sparkHome : String ) =
194- this (master, appName, sparkHome, Nil , Map (), Map (), Nil )
161+ this (master, appName, sparkHome, Nil , Map (), Map ())
195162
196163 /**
197164 * Alternative constructor that allows setting common Spark properties directly
@@ -203,7 +170,7 @@ class SparkContext(
203170 * system or HDFS, HTTP, HTTPS, or FTP URLs.
204171 */
205172 private [spark] def this (master : String , appName : String , sparkHome : String , jars : Seq [String ]) =
206- this (master, appName, sparkHome, jars, Map (), Map (), Nil )
173+ this (master, appName, sparkHome, jars, Map (), Map ())
207174
208175 // log out Spark Version in Spark driver log
209176 logInfo(s " Running Spark version $SPARK_VERSION" )
@@ -412,8 +379,6 @@ class SparkContext(
412379 }
413380 executorAllocationManager.foreach(_.start())
414381
415- sparkListeners.foreach(listenerBus.addListener)
416-
417382 // At this point, all relevant SparkListeners have been registered, so begin releasing events
418383 listenerBus.start()
419384
0 commit comments