File tree Expand file tree Collapse file tree 3 files changed +20
-2
lines changed
catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans
main/scala/org/apache/spark/sql/execution/datasources/v2
test/scala/org/apache/spark/sql Expand file tree Collapse file tree 3 files changed +20
-2
lines changed Original file line number Diff line number Diff line change @@ -40,6 +40,7 @@ trait AliasAwareOutputExpression extends SQLConfHelper {
4040 // more than `aliasCandidateLimit` attributes for an expression. In those cases the old logic
4141 // handled only the last alias so we need to make sure that we give precedence to that.
4242 // If the `outputExpressions` contain simple attributes we need to add those too to the map.
43+ @ transient
4344 private lazy val aliasMap = {
4445 val aliases = mutable.Map [Expression , mutable.ArrayBuffer [Attribute ]]()
4546 outputExpressions.reverse.foreach {
Original file line number Diff line number Diff line change @@ -44,12 +44,13 @@ case class BatchScanExec(
4444 applyPartialClustering : Boolean = false ,
4545 replicatePartitions : Boolean = false ) extends DataSourceV2ScanExecBase {
4646
47- @ transient lazy val batch = scan.toBatch
47+ @ transient lazy val batch = if (scan == null ) null else scan.toBatch
4848
4949 // TODO: unify the equal/hashCode implementation for all data source v2 query plans.
5050 override def equals (other : Any ): Boolean = other match {
5151 case other : BatchScanExec =>
52- this .batch == other.batch && this .runtimeFilters == other.runtimeFilters &&
52+ this .batch != null && this .batch == other.batch &&
53+ this .runtimeFilters == other.runtimeFilters &&
5354 this .commonPartitionValues == other.commonPartitionValues &&
5455 this .replicatePartitions == other.replicatePartitions &&
5556 this .applyPartialClustering == other.applyPartialClustering
Original file line number Diff line number Diff line change @@ -2679,4 +2679,20 @@ class SubquerySuite extends QueryTest
26792679 Row (8 , 6 ))
26802680 }
26812681 }
2682+
2683+ test(" SPARK-42745: Improved AliasAwareOutputExpression works with DSv2" ) {
2684+ withSQLConf(
2685+ SQLConf .USE_V1_SOURCE_LIST .key -> " " ) {
2686+ withTempPath { path =>
2687+ spark.range(0 )
2688+ .write
2689+ .mode(" overwrite" )
2690+ .parquet(path.getCanonicalPath)
2691+ withTempView(" t1" ) {
2692+ spark.read.parquet(path.toString).createOrReplaceTempView(" t1" )
2693+ checkAnswer(sql(" select (select sum(id) from t1)" ), Row (null ))
2694+ }
2695+ }
2696+ }
2697+ }
26822698}
You can’t perform that action at this time.
0 commit comments