Skip to content

Commit 365d0be

Browse files
committed
Make classes private[python]. Add docs and @experimental annotation to Converter interface.
1 parent eeb8205 commit 365d0be

File tree

1 file changed

+17
-3
lines changed

1 file changed

+17
-3
lines changed

core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,24 @@ import org.apache.spark.{Logging, SparkContext}
2222
import org.apache.hadoop.conf.Configuration
2323
import org.apache.hadoop.io._
2424
import scala.util.{Failure, Success, Try}
25+
import org.apache.spark.annotation.Experimental
2526

2627

28+
/**
29+
* :: Experimental ::
30+
* A trait for use with reading custom classes in PySpark. Implement this trait and add custom
31+
* transformation code by overriding the convert method.
32+
*/
33+
@Experimental
2734
trait Converter {
2835
def convert(obj: Any): Any
2936
}
3037

31-
object DefaultConverter extends Converter {
38+
/**
39+
* A converter that handles conversion of common [[org.apache.hadoop.io.Writable]] objects.
40+
* Other objects are passed through without conversion.
41+
*/
42+
private[python] object DefaultConverter extends Converter {
3243

3344
/**
3445
* Converts a [[org.apache.hadoop.io.Writable]] to the underlying primitive, String or
@@ -63,7 +74,11 @@ object DefaultConverter extends Converter {
6374
}
6475
}
6576

66-
class ConverterRegistry extends Logging {
77+
/**
78+
* The converter registry holds a key and value converter, so that they are only instantiated
79+
* once per RDD partition.
80+
*/
81+
private[python] class ConverterRegistry extends Logging {
6782

6883
var keyConverter: Converter = DefaultConverter
6984
var valueConverter: Converter = DefaultConverter
@@ -92,7 +107,6 @@ class ConverterRegistry extends Logging {
92107
logError(s"Failed to register converter: $converterClass")
93108
throw err
94109
}
95-
96110
}
97111
}
98112

0 commit comments

Comments
 (0)