From f07ab0938563fe63dd20fa756543b14478a27c2f Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Thu, 8 Nov 2018 13:59:35 +0900 Subject: [PATCH] Throw `SparkOutOfMemoryError` in `HashAggregateExec`, too. --- .../spark/sql/execution/aggregate/HashAggregateExec.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala index 25d8e7dff3d9..08dcdf33fb8f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.aggregate import org.apache.spark.TaskContext -import org.apache.spark.memory.TaskMemoryManager +import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager} import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.errors._ @@ -762,6 +762,8 @@ case class HashAggregateExec( ("true", "true", "", "") } + val oomeClassName = classOf[SparkOutOfMemoryError].getName + val findOrInsertRegularHashMap: String = s""" |// generate grouping key @@ -787,7 +789,7 @@ case class HashAggregateExec( | $unsafeRowKeys, ${hashEval.value}); | if ($unsafeRowBuffer == null) { | // failed to allocate the first page - | throw new OutOfMemoryError("No enough memory for aggregation"); + | throw new $oomeClassName("No enough memory for aggregation"); | } |} """.stripMargin