Skip to content

Commit 745de51

Browse files
committed
add test
1 parent d4e578c commit 745de51

File tree

2 files changed

+32
-0
lines changed

2 files changed

+32
-0
lines changed

sql/core/src/test/resources/sql-tests/inputs/explain.sql

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,11 @@ CREATE table explain_temp4 (key int, val string) USING PARQUET;
99

1010
SET spark.sql.codegen.wholeStage = true;
1111

12+
-- distinct func
13+
EXPLAIN EXTENDED
14+
SELECT sum(distinct val)
15+
FROM explain_temp1
16+
1217
-- single table
1318
EXPLAIN FORMATTED
1419
SELECT key, max(val)

sql/core/src/test/resources/sql-tests/results/explain.sql.out

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,33 @@ struct<key:string,value:string>
4242
spark.sql.codegen.wholeStage true
4343

4444

45+
-- !query
46+
== Parsed Logical Plan ==
47+
'Project [unresolvedalias('sum(distinct 'val), None)]
48+
+- 'UnresolvedRelation [explain_temp1]
49+
50+
== Analyzed Logical Plan ==
51+
sum(DISTINCT val): bigint
52+
Aggregate [sum(distinct cast(val#x as bigint)) AS sum(DISTINCT val)#x]
53+
+- SubqueryAlias spark_catalog.default.explain_temp1
54+
+- Relation[key#x,val#x] parquet
55+
56+
== Optimized Logical Plan ==
57+
Aggregate [sum(distinct cast(val#x as bigint)) AS sum(DISTINCT val)#x]
58+
+- Project [val#x]
59+
+- Relation[key#x,val#x] parquet
60+
61+
== Physical Plan ==
62+
*(3) HashAggregate(keys=[], functions=[sum(distinct cast(val#x as bigint)#x)], output=[sum(DISTINCT val)#x])
63+
+- Exchange SinglePartition, true, [id=#x]
64+
+- *(2) HashAggregate(keys=[], functions=[partial_sum(distinct cast(val#x as bigint)#x)], output=[sum#x])
65+
+- *(2) HashAggregate(keys=[cast(val#x as bigint)#x], functions=[], output=[cast(val#x as bigint)#x])
66+
+- Exchange hashpartitioning(cast(val#x as bigint)#x, 200), true, [id=#x]
67+
+- *(1) HashAggregate(keys=[cast(val#x as bigint) AS cast(val#x as bigint)#x], functions=[], output=[cast(val#x as bigint)#x])
68+
+- *(1) ColumnarToRow
69+
+- FileScan parquet default.explain_temp1[val#x] Batched: true, DataFilters: [], Format: Parquet, Location: InMemoryFileIndex[[not included in comparison]/{warehouse_dir}/explain_temp1], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<val:int>
70+
71+
4572
-- !query
4673
EXPLAIN FORMATTED
4774
SELECT key, max(val)

0 commit comments

Comments
 (0)