Skip to content

Commit 97ef708

Browse files
committed
Remove old writeToStream
1 parent 2beeedb commit 97ef708

File tree

1 file changed

+3
-18
lines changed

1 file changed

+3
-18
lines changed

core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,9 @@ private[spark] object PythonRDD extends Logging {
255255
case other =>
256256
throw new SparkException("Unexpected element type " + first.getClass)
257257
}
258+
}
259+
}
260+
258261
// PySpark / Hadoop InputFormat//
259262

260263
/** Create and RDD from a path using [[org.apache.hadoop.mapred.SequenceFileInputFormat]] */
@@ -396,23 +399,6 @@ private[spark] object PythonRDD extends Logging {
396399
rdd
397400
}
398401

399-
def writeToStream(elem: Any, dataOut: DataOutputStream) {
400-
elem match {
401-
case bytes: Array[Byte] =>
402-
dataOut.writeInt(bytes.length)
403-
dataOut.write(bytes)
404-
case (a: Array[Byte], b: Array[Byte]) =>
405-
dataOut.writeInt(a.length)
406-
dataOut.write(a)
407-
dataOut.writeInt(b.length)
408-
dataOut.write(b)
409-
case str: String =>
410-
dataOut.writeUTF(str)
411-
case other =>
412-
throw new SparkException("Unexpected element type " + other.getClass)
413-
}
414-
}
415-
416402
def writeUTF(str: String, dataOut: DataOutputStream) {
417403
val bytes = str.getBytes("UTF-8")
418404
dataOut.writeInt(bytes.length)
@@ -429,7 +415,6 @@ private[spark] object PythonRDD extends Logging {
429415
writeIteratorToStream(items, file)
430416
file.close()
431417
}
432-
433418
}
434419

435420
private class BytesToString extends org.apache.spark.api.java.function.Function[Array[Byte], String] {

0 commit comments

Comments
 (0)