Skip to content

Commit 4efea20

Browse files
committed
Don't check children resolved for UDF resolution.
1 parent 2ebe549 commit 4efea20

File tree

3 files changed

+10
-9
lines changed

3 files changed

+10
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
6868
protected val FULL = Keyword("FULL")
6969
protected val GROUP = Keyword("GROUP")
7070
protected val HAVING = Keyword("HAVING")
71-
protected val IF = Keyword("IF")
7271
protected val IN = Keyword("IN")
7372
protected val INNER = Keyword("INNER")
7473
protected val INSERT = Keyword("INSERT")
@@ -277,6 +276,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
277276
lexical.normalizeKeyword(udfName) match {
278277
case "sum" => SumDistinct(exprs.head)
279278
case "count" => CountDistinct(exprs)
279+
case _ => throw new AnalysisException(s"function $udfName does not support DISTINCT")
280280
}
281281
}
282282
| APPROXIMATE ~> ident ~ ("(" ~ DISTINCT ~> expression <~ ")") ^^ { case udfName ~ exp =>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -460,7 +460,7 @@ class Analyzer(
460460
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
461461
case q: LogicalPlan =>
462462
q transformExpressions {
463-
case u @ UnresolvedFunction(name, children) if u.childrenResolved =>
463+
case u @ UnresolvedFunction(name, children) =>
464464
withPosition(u) {
465465
registry.lookupFunction(name, children)
466466
}
@@ -494,20 +494,21 @@ class Analyzer(
494494
object UnresolvedHavingClauseAttributes extends Rule[LogicalPlan] {
495495
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
496496
case filter @ Filter(havingCondition, aggregate @ Aggregate(_, originalAggExprs, _))
497-
if aggregate.resolved && containsAggregate(havingCondition) => {
497+
if aggregate.resolved && containsAggregate(havingCondition) =>
498+
498499
val evaluatedCondition = Alias(havingCondition, "havingCondition")()
499500
val aggExprsWithHaving = evaluatedCondition +: originalAggExprs
500501

501502
Project(aggregate.output,
502503
Filter(evaluatedCondition.toAttribute,
503504
aggregate.copy(aggregateExpressions = aggExprsWithHaving)))
504-
}
505505
}
506506

507-
protected def containsAggregate(condition: Expression): Boolean =
507+
protected def containsAggregate(condition: Expression): Boolean = {
508508
condition
509509
.collect { case ae: AggregateExpression => ae }
510510
.nonEmpty
511+
}
511512
}
512513

513514
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ import org.apache.spark.sql.types._
2525

2626

2727
/**
28-
* For Catalyst to work correctly, concrete implementations of [[Expression]]s must be case classes
29-
* whose constructor arguments are all Expressions types. In addition, if we want to support more
30-
* than one constructor, define those constructors explicitly as apply methods in the companion
31-
* object.
28+
* If an expression wants to be exposed in the function registry (so users can call it with
29+
* "name(arguments...)", the concrete implementation must be a case class whose constructor
30+
* arguments are all Expressions types. In addition, if it needs to support more than one
31+
* constructor, define those constructors explicitly as apply methods in the companion object.
3232
*
3333
* See [[Substring]] for an example.
3434
*/

0 commit comments

Comments
 (0)