Skip to content

Commit 258a1e4

Browse files
committed
provide default do-nothing implementation for OutputWriter.init
1 parent fd1ee62 commit 258a1e4

File tree

10 files changed

+1
-19
lines changed

10 files changed

+1
-19
lines changed

external/avro/src/main/scala/org/apache/spark/sql/avro/AvroOutputWriter.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,6 @@ private[avro] class AvroOutputWriter(
6060

6161
}.getRecordWriter(context)
6262

63-
override def init(): Unit = {}
64-
6563
override def write(row: InternalRow): Unit = {
6664
val key = new AvroKey(serializer.serialize(row).asInstanceOf[GenericRecord])
6765
recordWriter.write(key, NullWritable.get())

mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,6 @@ private[libsvm] class LibSVMOutputWriter(
4949
// This `asInstanceOf` is safe because it's guaranteed by `LibSVMFileFormat.verifySchema`
5050
private val udt = dataSchema(1).dataType.asInstanceOf[VectorUDT]
5151

52-
override def init(): Unit = {}
53-
5452
override def write(row: InternalRow): Unit = {
5553
val label = row.getDouble(0)
5654
val vector = udt.deserialize(row.getStruct(1, udt.sqlType.length))

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/OutputWriter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ abstract class OutputWriterFactory extends Serializable {
5858
*/
5959
abstract class OutputWriter {
6060
/** Initializes before writing any rows. Invoked on executor size. */
61-
def init(): Unit
61+
def init(): Unit = {}
6262

6363
/**
6464
* Persists a single row. Invoked on the executor side. When writing to dynamically partitioned

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,8 +177,6 @@ private[json] class JsonOutputWriter(
177177

178178
private var jacksonGenerator: Option[JacksonGenerator] = None
179179

180-
override def init(): Unit = {}
181-
182180
override def write(row: InternalRow): Unit = {
183181
val gen = jacksonGenerator.getOrElse {
184182
val os = CodecStreams.createOutputStreamWriter(context, new Path(path), encoding)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcOutputWriter.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,6 @@ private[orc] class OrcOutputWriter(
5050
recordWriter
5151
}
5252

53-
override def init(): Unit = {}
54-
5553
override def write(row: InternalRow): Unit = {
5654
recordWriter.write(NullWritable.get(), serializer.serialize(row))
5755
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetOutputWriter.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@ private[parquet] class ParquetOutputWriter(path: String, context: TaskAttemptCon
3737
}.getRecordWriter(context)
3838
}
3939

40-
override def init(): Unit = {}
41-
4240
override def write(row: InternalRow): Unit = recordWriter.write(null, row)
4341

4442
override def close(): Unit = recordWriter.close(context)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/text/TextFileFormat.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,8 +152,6 @@ class TextOutputWriter(
152152

153153
private var outputStream: Option[OutputStream] = None
154154

155-
override def init(): Unit = {}
156-
157155
override def write(row: InternalRow): Unit = {
158156
val os = outputStream.getOrElse{
159157
val newStream = CodecStreams.createOutputStream(context, new Path(path))

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveFileFormat.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,8 +141,6 @@ class HiveOutputWriter(
141141
private val wrappers = fieldOIs.zip(dataTypes).map { case (f, dt) => wrapperFor(f, dt) }
142142
private val outputData = new Array[Any](fieldOIs.length)
143143

144-
override def init(): Unit = {}
145-
146144
override def write(row: InternalRow): Unit = {
147145
var i = 0
148146
while (i < fieldOIs.length) {

sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,8 +272,6 @@ private[orc] class OrcOutputWriter(
272272
).asInstanceOf[RecordWriter[NullWritable, Writable]]
273273
}
274274

275-
override def init(): Unit = {}
276-
277275
override def write(row: InternalRow): Unit = {
278276
recordWriter.write(NullWritable.get(), serializer.serialize(row))
279277
}

sql/hive/src/test/scala/org/apache/spark/sql/sources/SimpleTextRelation.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,6 @@ class SimpleTextOutputWriter(path: String, dataSchema: StructType, context: Task
122122

123123
private val writer = CodecStreams.createOutputStreamWriter(context, new Path(path))
124124

125-
override def init(): Unit = {}
126-
127125
override def write(row: InternalRow): Unit = {
128126
val serialized = row.toSeq(dataSchema).map { v =>
129127
if (v == null) "" else v.toString

0 commit comments

Comments
 (0)