@@ -272,8 +272,6 @@ private[spark] object PythonRDD {
272272 }
273273 }
274274
275- // PySpark / Hadoop InputFormat//
276-
277275 /** Create and RDD from a path using [[org.apache.hadoop.mapred.SequenceFileInputFormat ]] */
278276 def sequenceFile [K , V ](sc : JavaSparkContext ,
279277 path : String ,
@@ -295,14 +293,15 @@ private[spark] object PythonRDD {
295293 * Create an RDD from a file path, using an arbitrary [[org.apache.hadoop.mapreduce.InputFormat ]],
296294 * key and value class
297295 */
298- def newAPIHadoopFile [K , V , F <: NewInputFormat [K , V ]](sc : JavaSparkContext ,
299- path : String ,
300- inputFormatClazz : String ,
301- keyClazz : String ,
302- valueClazz : String ,
303- keyWrapper : String ,
304- valueWrapper : String ,
305- confAsMap : java.util.HashMap [String , String ]) = {
296+ def newAPIHadoopFile [K , V , F <: NewInputFormat [K , V ]](
297+ sc : JavaSparkContext ,
298+ path : String ,
299+ inputFormatClazz : String ,
300+ keyClazz : String ,
301+ valueClazz : String ,
302+ keyWrapper : String ,
303+ valueWrapper : String ,
304+ confAsMap : java.util.HashMap [String , String ]) = {
306305 val conf = PythonHadoopUtil .mapToConf(confAsMap)
307306 val baseConf = sc.hadoopConfiguration()
308307 val mergedConf = PythonHadoopUtil .mergeConfs(baseConf, conf)
@@ -314,16 +313,18 @@ private[spark] object PythonRDD {
314313 }
315314
316315 /**
317- * Create an RDD from a [[org.apache.hadoop.conf.Configuration ]] converted from a map that is passed in from Python,
318- * using an arbitrary [[org.apache.hadoop.mapreduce.InputFormat ]], key and value class
316+ * Create an RDD from a [[org.apache.hadoop.conf.Configuration ]] converted from a map that is
317+ * passed in from Python, using an arbitrary [[org.apache.hadoop.mapreduce.InputFormat ]],
318+ * key and value class
319319 */
320- def newAPIHadoopRDD [K , V , F <: NewInputFormat [K , V ]](sc : JavaSparkContext ,
321- inputFormatClazz : String ,
322- keyClazz : String ,
323- valueClazz : String ,
324- keyWrapper : String ,
325- valueWrapper : String ,
326- confAsMap : java.util.HashMap [String , String ]) = {
320+ def newAPIHadoopRDD [K , V , F <: NewInputFormat [K , V ]](
321+ sc : JavaSparkContext ,
322+ inputFormatClazz : String ,
323+ keyClazz : String ,
324+ valueClazz : String ,
325+ keyWrapper : String ,
326+ valueWrapper : String ,
327+ confAsMap : java.util.HashMap [String , String ]) = {
327328 val conf = PythonHadoopUtil .mapToConf(confAsMap)
328329 val rdd =
329330 newAPIHadoopRDDFromClassNames[K , V , F ](sc,
@@ -332,12 +333,13 @@ private[spark] object PythonRDD {
332333 JavaRDD .fromRDD(SerDeUtil .serMsgPack[K , V ](converted))
333334 }
334335
335- private def newAPIHadoopRDDFromClassNames [K , V , F <: NewInputFormat [K , V ]](sc : JavaSparkContext ,
336- path : Option [String ] = None ,
337- inputFormatClazz : String ,
338- keyClazz : String ,
339- valueClazz : String ,
340- conf : Configuration ) = {
336+ private def newAPIHadoopRDDFromClassNames [K , V , F <: NewInputFormat [K , V ]](
337+ sc : JavaSparkContext ,
338+ path : Option [String ] = None ,
339+ inputFormatClazz : String ,
340+ keyClazz : String ,
341+ valueClazz : String ,
342+ conf : Configuration ) = {
341343 implicit val kcm = ClassTag (Class .forName(keyClazz)).asInstanceOf [ClassTag [K ]]
342344 implicit val vcm = ClassTag (Class .forName(valueClazz)).asInstanceOf [ClassTag [V ]]
343345 implicit val fcm = ClassTag (Class .forName(inputFormatClazz)).asInstanceOf [ClassTag [F ]]
@@ -356,14 +358,15 @@ private[spark] object PythonRDD {
356358 * Create an RDD from a file path, using an arbitrary [[org.apache.hadoop.mapred.InputFormat ]],
357359 * key and value class
358360 */
359- def hadoopFile [K , V , F <: InputFormat [K , V ]](sc : JavaSparkContext ,
360- path : String ,
361- inputFormatClazz : String ,
362- keyClazz : String ,
363- valueClazz : String ,
364- keyWrapper : String ,
365- valueWrapper : String ,
366- confAsMap : java.util.HashMap [String , String ]) = {
361+ def hadoopFile [K , V , F <: InputFormat [K , V ]](
362+ sc : JavaSparkContext ,
363+ path : String ,
364+ inputFormatClazz : String ,
365+ keyClazz : String ,
366+ valueClazz : String ,
367+ keyWrapper : String ,
368+ valueWrapper : String ,
369+ confAsMap : java.util.HashMap [String , String ]) = {
367370 val conf = PythonHadoopUtil .mapToConf(confAsMap)
368371 val baseConf = sc.hadoopConfiguration()
369372 val mergedConf = PythonHadoopUtil .mergeConfs(baseConf, conf)
@@ -375,16 +378,18 @@ private[spark] object PythonRDD {
375378 }
376379
377380 /**
378- * Create an RDD from a [[org.apache.hadoop.conf.Configuration ]] converted from a map that is passed in from Python,
379- * using an arbitrary [[org.apache.hadoop.mapred.InputFormat ]], key and value class
381+ * Create an RDD from a [[org.apache.hadoop.conf.Configuration ]] converted from a map
382+ * that is passed in from Python, using an arbitrary [[org.apache.hadoop.mapred.InputFormat ]],
383+ * key and value class
380384 */
381- def hadoopRDD [K , V , F <: InputFormat [K , V ]](sc : JavaSparkContext ,
382- inputFormatClazz : String ,
383- keyClazz : String ,
384- valueClazz : String ,
385- keyWrapper : String ,
386- valueWrapper : String ,
387- confAsMap : java.util.HashMap [String , String ]) = {
385+ def hadoopRDD [K , V , F <: InputFormat [K , V ]](
386+ sc : JavaSparkContext ,
387+ inputFormatClazz : String ,
388+ keyClazz : String ,
389+ valueClazz : String ,
390+ keyWrapper : String ,
391+ valueWrapper : String ,
392+ confAsMap : java.util.HashMap [String , String ]) = {
388393 val conf = PythonHadoopUtil .mapToConf(confAsMap)
389394 val rdd =
390395 hadoopRDDFromClassNames[K , V , F ](sc,
@@ -393,12 +398,13 @@ private[spark] object PythonRDD {
393398 JavaRDD .fromRDD(SerDeUtil .serMsgPack[K , V ](converted))
394399 }
395400
396- private def hadoopRDDFromClassNames [K , V , F <: InputFormat [K , V ]](sc : JavaSparkContext ,
397- path : Option [String ] = None ,
398- inputFormatClazz : String ,
399- keyClazz : String ,
400- valueClazz : String ,
401- conf : Configuration ) = {
401+ private def hadoopRDDFromClassNames [K , V , F <: InputFormat [K , V ]](
402+ sc : JavaSparkContext ,
403+ path : Option [String ] = None ,
404+ inputFormatClazz : String ,
405+ keyClazz : String ,
406+ valueClazz : String ,
407+ conf : Configuration ) = {
402408 implicit val kcm = ClassTag (Class .forName(keyClazz)).asInstanceOf [ClassTag [K ]]
403409 implicit val vcm = ClassTag (Class .forName(valueClazz)).asInstanceOf [ClassTag [V ]]
404410 implicit val fcm = ClassTag (Class .forName(inputFormatClazz)).asInstanceOf [ClassTag [F ]]
0 commit comments