Skip to content

Commit 31fe08e

Browse files
committed
Removing un-needed semi-colons
1 parent 9df0276 commit 31fe08e

File tree

3 files changed

+9
-9
lines changed

3 files changed

+9
-9
lines changed

core/src/main/scala/org/apache/spark/CacheManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,8 +77,8 @@ private[spark] class CacheManager(blockManager: BlockManager) extends Logging {
7777
case Some(values) =>
7878
return new InterruptibleIterator(context, values.asInstanceOf[Iterator[T]])
7979
case None =>
80-
logInfo("Failure to store %s".format(key));
81-
return null;
80+
logInfo("Failure to store %s".format(key))
81+
return null
8282
}
8383
} else {
8484
val elements = new ArrayBuffer[Any]

core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,19 +25,19 @@ import org.apache.spark.SparkConf
2525

2626
private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
2727
val objOut = new ObjectOutputStream(out)
28-
var counter = 0;
28+
var counter = 0
2929
/* Calling reset to avoid memory leak:
3030
* http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
3131
* But only call it every 1000th time to avoid bloated serialization streams (when
3232
* the stream 'resets' object class descriptions have to be re-written)
3333
*/
3434
def writeObject[T](t: T): SerializationStream = {
35-
objOut.writeObject(t);
35+
objOut.writeObject(t)
3636
if (counter >= 1000) {
37-
objOut.reset();
38-
counter = 0;
37+
objOut.reset()
38+
counter = 0
3939
} else {
40-
counter+=1;
40+
counter += 1
4141
}
4242
this
4343
}

core/src/main/scala/org/apache/spark/storage/MemoryStore.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,8 @@ private class MemoryStore(blockManager: BlockManager, maxMemory: Long)
7171
: PutResult = {
7272

7373
if (level.deserialized) {
74-
val valueEntries = new ArrayBuffer[Any]();
75-
valueEntries ++= values;
74+
val valueEntries = new ArrayBuffer[Any]()
75+
valueEntries ++= values
7676
val sizeEstimate = SizeEstimator.estimate(valueEntries.asInstanceOf[AnyRef])
7777
tryToPut(blockId, valueEntries, sizeEstimate, true)
7878
PutResult(sizeEstimate, Left(valueEntries.toIterator))

0 commit comments

Comments
 (0)