Skip to content

Commit 1f45a04

Browse files
committed
set aggPushDownEnabledKey to false explicitly to get the expected result row
1 parent b561d09 commit 1f45a04

File tree

1 file changed

+8
-2
lines changed

1 file changed

+8
-2
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,10 @@ trait FileSourceAggregatePushDownSuite
269269
spark.read.format(format).load(dir.getCanonicalPath).createOrReplaceTempView("tmp");
270270
val query = "SELECT count(*), count(id), p, max(id), p, count(p), max(id)," +
271271
" min(id), p FROM tmp group by p"
272-
val expected = sql(query).collect
272+
var expected = Array.empty[Row]
273+
withSQLConf(aggPushDownEnabledKey -> "false") {
274+
expected = sql(query).collect
275+
}
273276
Seq("false", "true").foreach { enableVectorizedReader =>
274277
withSQLConf(aggPushDownEnabledKey -> "true",
275278
vectorizedReaderEnabledKey -> enableVectorizedReader) {
@@ -303,7 +306,10 @@ trait FileSourceAggregatePushDownSuite
303306
spark.read.format(format).load(dir.getCanonicalPath).createOrReplaceTempView("tmp")
304307
val query = "SELECT count(*), count(value), max(value), min(value)," +
305308
" p4, p2, p3, p1 FROM tmp GROUP BY p1, p2, p3, p4"
306-
val expected = sql(query).collect
309+
var expected = Array.empty[Row]
310+
withSQLConf(aggPushDownEnabledKey -> "false") {
311+
expected = sql(query).collect
312+
}
307313
Seq("false", "true").foreach { enableVectorizedReader =>
308314
withSQLConf(aggPushDownEnabledKey -> "true",
309315
vectorizedReaderEnabledKey -> enableVectorizedReader) {

0 commit comments

Comments
 (0)