File tree Expand file tree Collapse file tree 1 file changed +2
-3
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate Expand file tree Collapse file tree 1 file changed +2
-3
lines changed Original file line number Diff line number Diff line change @@ -498,7 +498,6 @@ case class HashAggregateExec(
498498 // quite inefficient and can potentially OOM the executor.
499499 val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf [DecimalType ])
500500 .forall(! DecimalType .isByteArrayDecimalType(_))
501-
502501 isSupported && isNotByteArrayDecimalType &&
503502 schemaLength <= sqlContext.conf.vectorizedAggregateMapMaxColumns
504503 }
@@ -558,9 +557,9 @@ case class HashAggregateExec(
558557 // doing some smart decision logic to pick between rowbased or vectorized fast hashmap
559558 // TODO: make the decision based on more comprehensive benchmarking
560559 // we now defaults to vectorized hashmap because it was used previously
561- if (! enableVectorizedHashMap(ctx)) {
560+ if (enableVectorizedHashMap(ctx)) {
562561 isVectorizedHashMapEnabled = true
563- } else if (! enableVectorizedHashMap(ctx)) {
562+ } else if (enableVectorizedHashMap(ctx)) {
564563 isRowBasedHashMapEnabled = true
565564 }
566565 }
You can’t perform that action at this time.
0 commit comments