Skip to content

Commit c472eb1

Browse files
cloud-fanrxin
authored andcommitted
[SPARK-8970][SQL] remove unnecessary abstraction for ExtractValue
Author: Wenchen Fan <[email protected]> Closes apache#7339 from cloud-fan/minor and squashes the following commits: 84a2128 [Wenchen Fan] remove unapply 6a37c12 [Wenchen Fan] remove unnecessary abstraction for ExtractValue
1 parent 0c5207c commit c472eb1

File tree

3 files changed

+15
-32
lines changed

3 files changed

+15
-32
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,8 @@ class Analyzer(
141141
child match {
142142
case _: UnresolvedAttribute => u
143143
case ne: NamedExpression => ne
144-
case ev: ExtractValueWithStruct => Alias(ev, ev.field.name)()
144+
case g: GetStructField => Alias(g, g.field.name)()
145+
case g: GetArrayStructFields => Alias(g, g.field.name)()
145146
case g: Generator if g.resolved && g.elementTypes.size > 1 => MultiAlias(g, Nil)
146147
case e if !e.resolved => u
147148
case other => Alias(other, s"_c$i")()

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala

Lines changed: 7 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -78,12 +78,6 @@ object ExtractValue {
7878
}
7979
}
8080

81-
def unapply(g: ExtractValue): Option[(Expression, Expression)] = g match {
82-
case o: GetArrayItem => Some((o.child, o.ordinal))
83-
case o: GetMapValue => Some((o.child, o.key))
84-
case s: ExtractValueWithStruct => Some((s.child, null))
85-
}
86-
8781
/**
8882
* Find the ordinal of StructField, report error if no desired field or over one
8983
* desired fields are found.
@@ -103,32 +97,17 @@ object ExtractValue {
10397
}
10498
}
10599

106-
/**
107-
* A common interface of all kinds of extract value expressions.
108-
* Note: concrete extract value expressions are created only by `ExtractValue.apply`,
109-
* we don't need to do type check for them.
110-
*/
111-
trait ExtractValue {
112-
self: Expression =>
113-
}
114-
115-
abstract class ExtractValueWithStruct extends UnaryExpression with ExtractValue {
116-
self: Product =>
117-
118-
def field: StructField
119-
override def toString: String = s"$child.${field.name}"
120-
}
121-
122100
/**
123101
* Returns the value of fields in the Struct `child`.
124102
*
125103
* No need to do type checking since it is handled by [[ExtractValue]].
126104
*/
127105
case class GetStructField(child: Expression, field: StructField, ordinal: Int)
128-
extends ExtractValueWithStruct {
106+
extends UnaryExpression {
129107

130108
override def dataType: DataType = field.dataType
131109
override def nullable: Boolean = child.nullable || field.nullable
110+
override def toString: String = s"$child.${field.name}"
132111

133112
protected override def nullSafeEval(input: Any): Any =
134113
input.asInstanceOf[InternalRow](ordinal)
@@ -155,10 +134,11 @@ case class GetArrayStructFields(
155134
child: Expression,
156135
field: StructField,
157136
ordinal: Int,
158-
containsNull: Boolean) extends ExtractValueWithStruct {
137+
containsNull: Boolean) extends UnaryExpression {
159138

160139
override def dataType: DataType = ArrayType(field.dataType, containsNull)
161140
override def nullable: Boolean = child.nullable || containsNull || field.nullable
141+
override def toString: String = s"$child.${field.name}"
162142

163143
protected override def nullSafeEval(input: Any): Any = {
164144
input.asInstanceOf[Seq[InternalRow]].map { row =>
@@ -191,8 +171,7 @@ case class GetArrayStructFields(
191171
*
192172
* No need to do type checking since it is handled by [[ExtractValue]].
193173
*/
194-
case class GetArrayItem(child: Expression, ordinal: Expression)
195-
extends BinaryExpression with ExtractValue {
174+
case class GetArrayItem(child: Expression, ordinal: Expression) extends BinaryExpression {
196175

197176
override def toString: String = s"$child[$ordinal]"
198177

@@ -231,12 +210,11 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
231210
}
232211

233212
/**
234-
* Returns the value of key `ordinal` in Map `child`.
213+
* Returns the value of key `key` in Map `child`.
235214
*
236215
* No need to do type checking since it is handled by [[ExtractValue]].
237216
*/
238-
case class GetMapValue(child: Expression, key: Expression)
239-
extends BinaryExpression with ExtractValue {
217+
case class GetMapValue(child: Expression, key: Expression) extends BinaryExpression {
240218

241219
override def toString: String = s"$child[$key]"
242220

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -275,8 +275,12 @@ object NullPropagation extends Rule[LogicalPlan] {
275275
case e @ Count(Literal(null, _)) => Cast(Literal(0L), e.dataType)
276276
case e @ IsNull(c) if !c.nullable => Literal.create(false, BooleanType)
277277
case e @ IsNotNull(c) if !c.nullable => Literal.create(true, BooleanType)
278-
case e @ ExtractValue(Literal(null, _), _) => Literal.create(null, e.dataType)
279-
case e @ ExtractValue(_, Literal(null, _)) => Literal.create(null, e.dataType)
278+
case e @ GetArrayItem(Literal(null, _), _) => Literal.create(null, e.dataType)
279+
case e @ GetArrayItem(_, Literal(null, _)) => Literal.create(null, e.dataType)
280+
case e @ GetMapValue(Literal(null, _), _) => Literal.create(null, e.dataType)
281+
case e @ GetMapValue(_, Literal(null, _)) => Literal.create(null, e.dataType)
282+
case e @ GetStructField(Literal(null, _), _, _) => Literal.create(null, e.dataType)
283+
case e @ GetArrayStructFields(Literal(null, _), _, _, _) => Literal.create(null, e.dataType)
280284
case e @ EqualNullSafe(Literal(null, _), r) => IsNull(r)
281285
case e @ EqualNullSafe(l, Literal(null, _)) => IsNull(l)
282286
case e @ Count(expr) if !expr.nullable => Count(Literal(1))

0 commit comments

Comments
 (0)