Skip to content

Commit 162e899

Browse files
committed
for testing only, change default to always spill after read
1 parent a3c1c12 commit 162e899

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

core/src/main/scala/org/apache/spark/Aggregator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ case class Aggregator[K, V, C] (
3939
context: TaskContext): Iterator[(K, C)] = {
4040
val combiners = new ExternalAppendOnlyMap[K, V, C](createCombiner, mergeValue, mergeCombiners)
4141
combiners.insertAll(iter)
42-
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", false)) {
42+
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", true)) {
4343
combiners.spill()
4444
}
4545
updateMetrics(context, combiners)
@@ -51,7 +51,7 @@ case class Aggregator[K, V, C] (
5151
context: TaskContext): Iterator[(K, C)] = {
5252
val combiners = new ExternalAppendOnlyMap[K, C, C](identity, mergeCombiners, mergeCombiners)
5353
combiners.insertAll(iter)
54-
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", false)) {
54+
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", true)) {
5555
combiners.spill()
5656
}
5757
updateMetrics(context, combiners)

core/src/main/scala/org/apache/spark/shuffle/BlockStoreShuffleReader.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ private[spark] class BlockStoreShuffleReader[K, C](
103103
val sorter =
104104
new ExternalSorter[K, C, C](context, ordering = Some(keyOrd), serializer = dep.serializer)
105105
sorter.insertAll(aggregatedIter)
106-
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", false)) {
106+
if (SparkEnv.get.conf.getBoolean("spark.shuffle.spillAfterRead", true)) {
107107
sorter.spill()
108108
}
109109
context.taskMetrics().incMemoryBytesSpilled(sorter.memoryBytesSpilled)

0 commit comments

Comments
 (0)