Skip to content

Commit 225b661

Browse files
committed
fix bug
1 parent 4bdaead commit 225b661

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -498,7 +498,6 @@ case class HashAggregateExec(
498498
// quite inefficient and can potentially OOM the executor.
499499
val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf[DecimalType])
500500
.forall(!DecimalType.isByteArrayDecimalType(_))
501-
502501
isSupported && isNotByteArrayDecimalType &&
503502
schemaLength <= sqlContext.conf.vectorizedAggregateMapMaxColumns
504503
}
@@ -558,9 +557,9 @@ case class HashAggregateExec(
558557
// doing some smart decision logic to pick between rowbased or vectorized fast hashmap
559558
// TODO: make the decision based on more comprehensive benchmarking
560559
// we now defaults to vectorized hashmap because it was used previously
561-
if (!enableVectorizedHashMap(ctx)) {
560+
if (enableVectorizedHashMap(ctx)) {
562561
isVectorizedHashMapEnabled = true
563-
} else if (!enableVectorizedHashMap(ctx)) {
562+
} else if (enableVectorizedHashMap(ctx)) {
564563
isRowBasedHashMapEnabled = true
565564
}
566565
}

0 commit comments

Comments
 (0)