Skip to content

Commit 0e149da

Browse files
committed
Update indentations
1 parent 67d4533 commit 0e149da

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,6 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
8080
assert(f.getClass === filterClass)
8181
}
8282
}
83-
8483
checkPlan(query)
8584
checker(query, expected)
8685
}
@@ -381,6 +380,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
381380

382381
test("SPARK-11103: Filter applied on merged Parquet schema with new column fails") {
383382
import testImplicits._
383+
384384
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
385385
SQLConf.PARQUET_SCHEMA_MERGING_ENABLED.key -> "true") {
386386
withTempPath { dir =>
@@ -454,6 +454,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
454454
// The unsafe row RecordReader does not support row by row filtering so run it with it disabled.
455455
test("SPARK-11661 Still pushdown filters returned by unhandledFilters") {
456456
import testImplicits._
457+
457458
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
458459
withSQLConf(SQLConf.PARQUET_UNSAFE_ROW_RECORD_READER_ENABLED.key -> "false") {
459460
withTempPath { dir =>

0 commit comments

Comments
 (0)