Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ class SqlParser extends StandardTokenParsers with PackratParsers {
protected val DISTINCT = Keyword("DISTINCT")
protected val FALSE = Keyword("FALSE")
protected val FIRST = Keyword("FIRST")
protected val LAST = Keyword("LAST")
protected val FROM = Keyword("FROM")
protected val FULL = Keyword("FULL")
protected val GROUP = Keyword("GROUP")
Expand Down Expand Up @@ -124,6 +125,7 @@ class SqlParser extends StandardTokenParsers with PackratParsers {
protected val SUBSTR = Keyword("SUBSTR")
protected val SUBSTRING = Keyword("SUBSTRING")
protected val SQRT = Keyword("SQRT")
protected val ABS = Keyword("ABS")

// Use reflection to find the reserved words defined in this class.
protected val reservedWords =
Expand Down Expand Up @@ -311,6 +313,7 @@ class SqlParser extends StandardTokenParsers with PackratParsers {
case s ~ _ ~ _ ~ _ ~ _ ~ e => ApproxCountDistinct(e, s.toDouble)
} |
FIRST ~> "(" ~> expression <~ ")" ^^ { case exp => First(exp) } |
LAST ~> "(" ~> expression <~ ")" ^^ { case exp => Last(exp) } |
AVG ~> "(" ~> expression <~ ")" ^^ { case exp => Average(exp) } |
MIN ~> "(" ~> expression <~ ")" ^^ { case exp => Min(exp) } |
MAX ~> "(" ~> expression <~ ")" ^^ { case exp => Max(exp) } |
Expand All @@ -326,6 +329,7 @@ class SqlParser extends StandardTokenParsers with PackratParsers {
case s ~ "," ~ p ~ "," ~ l => Substring(s,p,l)
} |
SQRT ~> "(" ~> expression <~ ")" ^^ { case exp => Sqrt(exp) } |
ABS ~> "(" ~> expression <~ ")" ^^ { case exp => Abs(exp) } |
ident ~ "(" ~ repsep(expression, ",") <~ ")" ^^ {
case udfName ~ _ ~ exprs => UnresolvedFunction(udfName, exprs)
}
Expand Down
1 change: 1 addition & 0 deletions sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ package object dsl {
def approxCountDistinct(e: Expression, rsd: Double = 0.05) = ApproxCountDistinct(e, rsd)
def avg(e: Expression) = Average(e)
def first(e: Expression) = First(e)
def last(e: Expression) = Last(e)
def min(e: Expression) = Min(e)
def max(e: Expression) = Max(e)
def upper(e: Expression) = Upper(e)
Expand Down
28 changes: 28 additions & 0 deletions sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,21 @@ case class First(child: Expression) extends PartialAggregate with trees.UnaryNod
override def newInstance() = new FirstFunction(child, this)
}

case class Last(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def references = child.references
override def nullable = true
override def dataType = child.dataType
override def toString = s"LAST($child)"

override def asPartial: SplitEvaluation = {
val partialLast = Alias(Last(child), "PartialLast")()
SplitEvaluation(
Last(partialLast.toAttribute),
partialLast :: Nil)
}
override def newInstance() = new LastFunction(child, this)
}

case class AverageFunction(expr: Expression, base: AggregateExpression)
extends AggregateFunction {

Expand Down Expand Up @@ -489,3 +504,16 @@ case class FirstFunction(expr: Expression, base: AggregateExpression) extends Ag

override def eval(input: Row): Any = result
}

case class LastFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.

var result: Any = null

override def update(input: Row): Unit = {
result = input
}

override def eval(input: Row): Any = if (result != null) expr.eval(result.asInstanceOf[Row])
else null
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.expressions

import org.apache.spark.sql.catalyst.analysis.UnresolvedException
import org.apache.spark.sql.catalyst.types._
import scala.math.pow

case class UnaryMinus(child: Expression) extends UnaryExpression {
type EvaluatedType = Any
Expand Down Expand Up @@ -129,3 +130,17 @@ case class MaxOf(left: Expression, right: Expression) extends Expression {

override def toString = s"MaxOf($left, $right)"
}

/**
* A function that get the absolute value of the numeric value.
*/
case class Abs(child: Expression) extends UnaryExpression {
type EvaluatedType = Any

def dataType = child.dataType
override def foldable = child.foldable
def nullable = child.nullable
override def toString = s"Abs($child)"

override def eval(input: Row): Any = n1(child, input, _.abs(_))
}
23 changes: 21 additions & 2 deletions sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,25 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}


test("SPARK-3176 Added Parser of SQL ABS()") {
checkAnswer(
sql("SELECT ABS(-1.3)"),
1.3)
checkAnswer(
sql("SELECT ABS(0.0)"),
0.0)
checkAnswer(
sql("SELECT ABS(2.5)"),
2.5)
}

test("SPARK-3176 Added Parser of SQL LAST()") {
checkAnswer(
sql("SELECT LAST(n) FROM lowerCaseData"),
4)
}


test("SPARK-2041 column name equals tablename") {
checkAnswer(
sql("SELECT tableName FROM tableName"),
Expand All @@ -53,14 +72,14 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
(1 to 100).map(x => Row(math.sqrt(x.toDouble))).toSeq
)
}

test("SQRT with automatic string casts") {
checkAnswer(
sql("SELECT SQRT(CAST(key AS STRING)) FROM testData"),
(1 to 100).map(x => Row(math.sqrt(x.toDouble))).toSeq
)
}

test("SPARK-2407 Added Parser of SQL SUBSTR()") {
checkAnswer(
sql("SELECT substr(tableName, 1, 2) FROM tableName"),
Expand Down