File tree Expand file tree Collapse file tree 1 file changed +2
-1
lines changed
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet Expand file tree Collapse file tree 1 file changed +2
-1
lines changed Original file line number Diff line number Diff line change @@ -80,7 +80,6 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
8080 assert(f.getClass === filterClass)
8181 }
8282 }
83-
8483 checkPlan(query)
8584 checker(query, expected)
8685 }
@@ -381,6 +380,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
381380
382381 test(" SPARK-11103: Filter applied on merged Parquet schema with new column fails" ) {
383382 import testImplicits ._
383+
384384 withSQLConf(SQLConf .PARQUET_FILTER_PUSHDOWN_ENABLED .key -> " true" ,
385385 SQLConf .PARQUET_SCHEMA_MERGING_ENABLED .key -> " true" ) {
386386 withTempPath { dir =>
@@ -454,6 +454,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
454454 // The unsafe row RecordReader does not support row by row filtering so run it with it disabled.
455455 test(" SPARK-11661 Still pushdown filters returned by unhandledFilters" ) {
456456 import testImplicits ._
457+
457458 withSQLConf(SQLConf .PARQUET_FILTER_PUSHDOWN_ENABLED .key -> " true" ) {
458459 withSQLConf(SQLConf .PARQUET_UNSAFE_ROW_RECORD_READER_ENABLED .key -> " false" ) {
459460 withTempPath { dir =>
You can’t perform that action at this time.
0 commit comments