Skip to content

Commit 4435f20

Browse files
committed
Add ConvertAggregateFunction to HiveContext's analyzer.
1 parent 1b490ed commit 4435f20

File tree

2 files changed

+9
-0
lines changed

2 files changed

+9
-0
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,12 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
205205
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
206206
case logical.Aggregate(groupingExpressions, resultExpressions, child)
207207
if sqlContext.conf.useSqlAggregate2 =>
208+
// 0. Make sure we can convert.
209+
resultExpressions.foreach {
210+
case agg1: AggregateExpression =>
211+
sys.error(s"$agg1 is not supported. Please set spark.sql.useAggregate2 to false.")
212+
case _ => // ok
213+
}
208214
// 1. Extracts all distinct aggregate expressions from the resultExpressions.
209215
val aggregateExpressions = resultExpressions.flatMap { expr =>
210216
expr.collect {

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ import java.io.File
2121
import java.net.{URL, URLClassLoader}
2222
import java.sql.Timestamp
2323

24+
import org.apache.spark.sql.execution.aggregate2.ConvertAggregateFunction
25+
2426
import scala.collection.JavaConversions._
2527
import scala.collection.mutable.HashMap
2628
import scala.language.implicitConversions
@@ -385,6 +387,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging {
385387
ExtractPythonUDFs ::
386388
ResolveHiveWindowFunction ::
387389
sources.PreInsertCastAndRename ::
390+
ConvertAggregateFunction(self) ::
388391
Nil
389392

390393
override val extendedCheckRules = Seq(

0 commit comments

Comments
 (0)