@@ -19,6 +19,7 @@ package org.apache.spark.util.collection
1919
2020import org .apache .spark .SparkEnv
2121import org .apache .spark .internal .Logging
22+ import org .apache .spark .internal .config ._
2223import org .apache .spark .memory .{MemoryConsumer , MemoryMode , TaskMemoryManager }
2324
2425/**
@@ -41,7 +42,7 @@ private[spark] abstract class Spillable[C](taskMemoryManager: TaskMemoryManager)
4142 protected def forceSpill (): Boolean
4243
4344 // Number of elements read from input since last spill
44- protected def elementsRead : Long = _elementsRead
45+ protected def elementsRead : Int = _elementsRead
4546
4647 // Called by subclasses every time a record is read
4748 // It's used for checking spilling frequency
@@ -54,15 +55,15 @@ private[spark] abstract class Spillable[C](taskMemoryManager: TaskMemoryManager)
5455
5556 // Force this collection to spill when there are this many elements in memory
5657 // For testing only
57- private [this ] val numElementsForceSpillThreshold : Long =
58- SparkEnv .get.conf.getLong( " spark.shuffle.spill.numElementsForceSpillThreshold " , Long . MaxValue )
58+ private [this ] val numElementsForceSpillThreshold : Int =
59+ SparkEnv .get.conf.get( SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD )
5960
6061 // Threshold for this collection's size in bytes before we start tracking its memory usage
6162 // To avoid a large number of small spills, initialize this to a value orders of magnitude > 0
6263 @ volatile private [this ] var myMemoryThreshold = initialMemoryThreshold
6364
6465 // Number of elements read from input since last spill
65- private [this ] var _elementsRead = 0L
66+ private [this ] var _elementsRead = 0
6667
6768 // Number of bytes spilled in total
6869 @ volatile private [this ] var _memoryBytesSpilled = 0L
0 commit comments