From dee3d85226b0a8e8aad20682b452be622bc126f1 Mon Sep 17 00:00:00 2001 From: Sandy Ryza Date: Tue, 5 May 2015 12:54:45 -0700 Subject: [PATCH] Some minor cleanup after SPARK-4550. --- .../org/apache/spark/storage/BlockObjectWriter.scala | 9 ++------- .../collection/PartitionedSerializedPairBuffer.scala | 4 ++-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala index 499dd97c0656..8bc4e205bc3c 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala @@ -59,7 +59,7 @@ private[spark] abstract class BlockObjectWriter(val blockId: BlockId) extends Ou def write(key: Any, value: Any) /** - * Notify the writer that a record worth of bytes has been written with writeBytes. + * Notify the writer that a record worth of bytes has been written with OutputStream#write. */ def recordWritten() @@ -215,12 +215,7 @@ private[spark] class DiskBlockObjectWriter( objOut.writeKey(key) objOut.writeValue(value) - numRecordsWritten += 1 - writeMetrics.incShuffleRecordsWritten(1) - - if (numRecordsWritten % 32 == 0) { - updateBytesWritten() - } + recordWritten() } override def write(b: Int): Unit = throw new UnsupportedOperationException() diff --git a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala index b5ca0c62a04f..ac9ea6393628 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala @@ -71,10 +71,10 @@ private[spark] class PartitionedSerializedPairBuffer[K, V]( if (keyStart < 0) { throw new Exception(s"Can't grow buffer beyond ${1 << 31} bytes") } - kvSerializationStream.writeObject[Any](key) + kvSerializationStream.writeKey[Any](key) kvSerializationStream.flush() val valueStart = kvBuffer.size - kvSerializationStream.writeObject[Any](value) + kvSerializationStream.writeValue[Any](value) kvSerializationStream.flush() val valueEnd = kvBuffer.size