@@ -47,44 +47,6 @@ import org.apache.spark.util.collection.OpenHashMap
4747import org .apache .spark .util .random .{BernoulliSampler , PoissonSampler , BernoulliCellSampler ,
4848 SamplingUtils }
4949
50-
51- object RDD {
52-
53- // The following implicit functions were in SparkContext before 1.2 and users had to
54- // `import SparkContext._` to enable them. Now we move them here to make the compiler find
55- // them automatically. However, we still keep the old functions in SparkContext for backward
56- // compatibility and forward to the following functions directly.
57-
58- implicit def rddToPairRDDFunctions [K , V ](rdd : RDD [(K , V )])
59- (implicit kt : ClassTag [K ], vt : ClassTag [V ], ord : Ordering [K ] = null ): PairRDDFunctions [K , V ] = {
60- new PairRDDFunctions (rdd)
61- }
62-
63- implicit def rddToAsyncRDDActions [T : ClassTag ](rdd : RDD [T ]): AsyncRDDActions [T ] = {
64- new AsyncRDDActions (rdd)
65- }
66-
67- implicit def rddToSequenceFileRDDFunctions [K <% Writable : ClassTag , V <% Writable : ClassTag ](
68- rdd : RDD [(K , V )]): SequenceFileRDDFunctions [K , V ] = {
69- new SequenceFileRDDFunctions (rdd)
70- }
71-
72- implicit def rddToOrderedRDDFunctions [K : Ordering : ClassTag , V : ClassTag ](rdd : RDD [(K , V )])
73- : OrderedRDDFunctions [K , V , (K , V )] = {
74- new OrderedRDDFunctions [K , V , (K , V )](rdd)
75- }
76-
77- implicit def doubleRDDToDoubleRDDFunctions (rdd : RDD [Double ]): DoubleRDDFunctions = {
78- new DoubleRDDFunctions (rdd)
79- }
80-
81- implicit def numericRDDToDoubleRDDFunctions [T ](rdd : RDD [T ])(implicit num : Numeric [T ])
82- : DoubleRDDFunctions = {
83- new DoubleRDDFunctions (rdd.map(x => num.toDouble(x)))
84- }
85- }
86-
87-
8850/**
8951 * A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable,
9052 * partitioned collection of elements that can be operated on in parallel. This class contains the
@@ -1423,3 +1385,46 @@ abstract class RDD[T: ClassTag](
14231385 new JavaRDD (this )(elementClassTag)
14241386 }
14251387}
1388+
1389+
1390+ /**
1391+ * Defines implicit functions that provide extra functionalities on RDDs of specific types.
1392+ *
1393+ * For example, [[RDD.rddToPairRDDFunctions ]] converts an RDD into a [[PairRDDFunctions ]] for
1394+ * key-value-pair RDDs, and enabling extra functionalities such as [[PairRDDFunctions.reduceByKey ]].
1395+ */
1396+ object RDD {
1397+
1398+ // The following implicit functions were in SparkContext before 1.2 and users had to
1399+ // `import SparkContext._` to enable them. Now we move them here to make the compiler find
1400+ // them automatically. However, we still keep the old functions in SparkContext for backward
1401+ // compatibility and forward to the following functions directly.
1402+
1403+ implicit def rddToPairRDDFunctions [K , V ](rdd : RDD [(K , V )])
1404+ (implicit kt : ClassTag [K ], vt : ClassTag [V ], ord : Ordering [K ] = null ): PairRDDFunctions [K , V ] = {
1405+ new PairRDDFunctions (rdd)
1406+ }
1407+
1408+ implicit def rddToAsyncRDDActions [T : ClassTag ](rdd : RDD [T ]): AsyncRDDActions [T ] = {
1409+ new AsyncRDDActions (rdd)
1410+ }
1411+
1412+ implicit def rddToSequenceFileRDDFunctions [K <% Writable : ClassTag , V <% Writable : ClassTag ](
1413+ rdd : RDD [(K , V )]): SequenceFileRDDFunctions [K , V ] = {
1414+ new SequenceFileRDDFunctions (rdd)
1415+ }
1416+
1417+ implicit def rddToOrderedRDDFunctions [K : Ordering : ClassTag , V : ClassTag ](rdd : RDD [(K , V )])
1418+ : OrderedRDDFunctions [K , V , (K , V )] = {
1419+ new OrderedRDDFunctions [K , V , (K , V )](rdd)
1420+ }
1421+
1422+ implicit def doubleRDDToDoubleRDDFunctions (rdd : RDD [Double ]): DoubleRDDFunctions = {
1423+ new DoubleRDDFunctions (rdd)
1424+ }
1425+
1426+ implicit def numericRDDToDoubleRDDFunctions [T ](rdd : RDD [T ])(implicit num : Numeric [T ])
1427+ : DoubleRDDFunctions = {
1428+ new DoubleRDDFunctions (rdd.map(x => num.toDouble(x)))
1429+ }
1430+ }
0 commit comments