File tree Expand file tree Collapse file tree 1 file changed +2
-1
lines changed
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet Expand file tree Collapse file tree 1 file changed +2
-1
lines changed Original file line number Diff line number Diff line change @@ -347,6 +347,7 @@ class ParquetFileFormat
347347 val pushDownDecimal = sqlConf.parquetFilterPushDownDecimal
348348 val pushDownStringStartWith = sqlConf.parquetFilterPushDownStringStartWith
349349 val pushDownInFilterThreshold = sqlConf.parquetFilterPushDownInFilterThreshold
350+ val isCaseSensitive = sqlConf.caseSensitiveAnalysis
350351
351352 (file : PartitionedFile ) => {
352353 assert(file.partitionValues.numFields == partitionSchema.size)
@@ -377,7 +378,7 @@ class ParquetFileFormat
377378 // Collects all converted Parquet filter predicates. Notice that not all predicates can be
378379 // converted (`ParquetFilters.createFilter` returns an `Option`). That's why a `flatMap`
379380 // is used here.
380- .flatMap(parquetFilters.createFilter(parquetSchema, _, sqlConf.caseSensitiveAnalysis ))
381+ .flatMap(parquetFilters.createFilter(parquetSchema, _, isCaseSensitive ))
381382 .reduceOption(FilterApi .and)
382383 } else {
383384 None
You can’t perform that action at this time.
0 commit comments