@@ -90,11 +90,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
9090 // NOTE: this must be placed at the beginning of the SparkContext constructor.
9191 SparkContext .markPartiallyConstructed(this , allowMultipleContexts)
9292
93- // This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
94- // etc) too. This is typically generated from InputFormatInfo.computePreferredLocations. It
95- // contains a map from hostname to a list of input format splits on the host.
96- private [spark] var preferredNodeLocationData : Map [String , Set [SplitInfo ]] = Map ()
97-
9893 val startTime = System .currentTimeMillis()
9994
10095 private [spark] val stopped : AtomicBoolean = new AtomicBoolean (false )
@@ -116,16 +111,13 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
116111 * Alternative constructor for setting preferred locations where Spark will create executors.
117112 *
118113 * @param config a [[org.apache.spark.SparkConf ]] object specifying other Spark parameters
119- * @param preferredNodeLocationData used in YARN mode to select nodes to launch containers on.
120- * Can be generated using [[org.apache.spark.scheduler.InputFormatInfo.computePreferredLocations ]]
121- * from a list of input files or InputFormats for the application.
114+ * @param preferredNodeLocationData not used. Left for backward compatibility.
122115 */
123116 @ deprecated(" Passing in preferred locations has no effect at all, see SPARK-8949" , " 1.5.0" )
124117 @ DeveloperApi
125118 def this (config : SparkConf , preferredNodeLocationData : Map [String , Set [SplitInfo ]]) = {
126119 this (config)
127120 logWarning(" Passing in preferred locations has no effect at all, see SPARK-8949" )
128- this .preferredNodeLocationData = preferredNodeLocationData
129121 }
130122
131123 /**
@@ -147,10 +139,9 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
147139 * @param jars Collection of JARs to send to the cluster. These can be paths on the local file
148140 * system or HDFS, HTTP, HTTPS, or FTP URLs.
149141 * @param environment Environment variables to set on worker nodes.
150- * @param preferredNodeLocationData used in YARN mode to select nodes to launch containers on.
151- * Can be generated using [[org.apache.spark.scheduler.InputFormatInfo.computePreferredLocations ]]
152- * from a list of input files or InputFormats for the application.
142+ * @param preferredNodeLocationData not used. Left for backward compatibility.
153143 */
144+ @ deprecated(" Passing in preferred locations has no effect at all, see SPARK-10921" , " 1.6.0" )
154145 def this (
155146 master : String ,
156147 appName : String ,
@@ -163,7 +154,6 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
163154 if (preferredNodeLocationData.nonEmpty) {
164155 logWarning(" Passing in preferred locations has no effect at all, see SPARK-8949" )
165156 }
166- this .preferredNodeLocationData = preferredNodeLocationData
167157 }
168158
169159 // NOTE: The below constructors could be consolidated using default arguments. Due to
@@ -177,7 +167,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
177167 * @param appName A name for your application, to display on the cluster web UI.
178168 */
179169 private [spark] def this (master : String , appName : String ) =
180- this (master, appName, null , Nil , Map (), Map () )
170+ this (master, appName, null , Nil , Map ())
181171
182172 /**
183173 * Alternative constructor that allows setting common Spark properties directly
@@ -187,7 +177,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
187177 * @param sparkHome Location where Spark is installed on cluster nodes.
188178 */
189179 private [spark] def this (master : String , appName : String , sparkHome : String ) =
190- this (master, appName, sparkHome, Nil , Map (), Map () )
180+ this (master, appName, sparkHome, Nil , Map ())
191181
192182 /**
193183 * Alternative constructor that allows setting common Spark properties directly
@@ -199,7 +189,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
199189 * system or HDFS, HTTP, HTTPS, or FTP URLs.
200190 */
201191 private [spark] def this (master : String , appName : String , sparkHome : String , jars : Seq [String ]) =
202- this (master, appName, sparkHome, jars, Map (), Map () )
192+ this (master, appName, sparkHome, jars, Map ())
203193
204194 // log out Spark Version in Spark driver log
205195 logInfo(s " Running Spark version $SPARK_VERSION" )
0 commit comments