diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala index a8c813a03e0ca..dc59526bb316b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala @@ -53,7 +53,8 @@ case class OrcScanBuilder( override def pushFilters(filters: Array[Filter]): Array[Filter] = { if (sparkSession.sessionState.conf.orcFilterPushDown) { - val dataTypeMap = OrcFilters.getSearchableTypeMap(schema, SQLConf.get.caseSensitiveAnalysis) + val dataTypeMap = OrcFilters.getSearchableTypeMap( + readDataSchema(), SQLConf.get.caseSensitiveAnalysis) _pushedFilters = OrcFilters.convertibleFilters(dataTypeMap, filters).toArray } filters diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala index e7fd139c73ca4..396d227218ab8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala @@ -460,7 +460,7 @@ class ExplainSuite extends ExplainSuiteHelper with DisableAdaptiveExecutionSuite "parquet" -> "|PushedFilters: \\[IsNotNull\\(value\\), GreaterThan\\(value,2\\)\\]", "orc" -> - "|PushedFilters: \\[.*\\(id\\), .*\\(value\\), .*\\(id,1\\), .*\\(value,2\\)\\]", + "|PushedFilters: \\[IsNotNull\\(value\\), GreaterThan\\(value,2\\)\\]", "csv" -> "|PushedFilters: \\[IsNotNull\\(value\\), GreaterThan\\(value,2\\)\\]", "json" ->