File tree Expand file tree Collapse file tree 1 file changed +4
-2
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate Expand file tree Collapse file tree 1 file changed +4
-2
lines changed Original file line number Diff line number Diff line change 1818package org .apache .spark .sql .execution .aggregate
1919
2020import org .apache .spark .TaskContext
21- import org .apache .spark .memory .TaskMemoryManager
21+ import org .apache .spark .memory .{ SparkOutOfMemoryError , TaskMemoryManager }
2222import org .apache .spark .rdd .RDD
2323import org .apache .spark .sql .catalyst .InternalRow
2424import org .apache .spark .sql .catalyst .errors ._
@@ -762,6 +762,8 @@ case class HashAggregateExec(
762762 (" true" , " true" , " " , " " )
763763 }
764764
765+ val oomeClassName = classOf [SparkOutOfMemoryError ].getName
766+
765767 val findOrInsertRegularHashMap : String =
766768 s """
767769 |// generate grouping key
@@ -787,7 +789,7 @@ case class HashAggregateExec(
787789 | $unsafeRowKeys, ${hashEval.value});
788790 | if ( $unsafeRowBuffer == null) {
789791 | // failed to allocate the first page
790- | throw new OutOfMemoryError ("No enough memory for aggregation");
792+ | throw new $oomeClassName ("No enough memory for aggregation");
791793 | }
792794 |}
793795 """ .stripMargin
You can’t perform that action at this time.
0 commit comments