Skip to content

Commit a138953

Browse files
committed
[SPARK-8347][SQL] Add unit tests for abs.
Also addressed code review feedback from apache#6754 Author: Reynold Xin <[email protected]> Closes apache#6803 from rxin/abs and squashes the following commits: d07beba [Reynold Xin] [SPARK-8347] Add unit tests for abs.
1 parent ddec452 commit a138953

File tree

5 files changed

+31
-33
lines changed

5 files changed

+31
-33
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -672,13 +672,13 @@ trait HiveTypeCoercion {
672672
findTightestCommonTypeToString(left.dataType, right.dataType).map { widestType =>
673673
val newLeft = if (left.dataType == widestType) left else Cast(left, widestType)
674674
val newRight = if (right.dataType == widestType) right else Cast(right, widestType)
675-
i.makeCopy(Array(pred, newLeft, newRight))
675+
If(pred, newLeft, newRight)
676676
}.getOrElse(i) // If there is no applicable conversion, leave expression unchanged.
677677

678678
// Convert If(null literal, _, _) into boolean type.
679679
// In the optimizer, we should short-circuit this directly into false value.
680-
case i @ If(pred, left, right) if pred.dataType == NullType =>
681-
i.makeCopy(Array(Literal.create(null, BooleanType), left, right))
680+
case If(pred, left, right) if pred.dataType == NullType =>
681+
If(Literal.create(null, BooleanType), left, right)
682682
}
683683
}
684684

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.scalatest.Matchers._
2121

2222
import org.apache.spark.SparkFunSuite
2323
import org.apache.spark.sql.catalyst.dsl.expressions._
24-
import org.apache.spark.sql.types.{DoubleType, IntegerType}
24+
import org.apache.spark.sql.types.{Decimal, DoubleType, IntegerType}
2525

2626

2727
class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
@@ -75,6 +75,21 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
7575
checkDoubleEvaluation(c3 % c2, (1.1 +- 0.001), row)
7676
}
7777

78+
test("Abs") {
79+
def testAbs(convert: (Int) => Any): Unit = {
80+
checkEvaluation(Abs(Literal(convert(0))), convert(0))
81+
checkEvaluation(Abs(Literal(convert(1))), convert(1))
82+
checkEvaluation(Abs(Literal(convert(-1))), convert(1))
83+
}
84+
testAbs(_.toByte)
85+
testAbs(_.toShort)
86+
testAbs(identity)
87+
testAbs(_.toLong)
88+
testAbs(_.toFloat)
89+
testAbs(_.toDouble)
90+
testAbs(Decimal(_))
91+
}
92+
7893
test("Divide") {
7994
checkEvaluation(Divide(Literal(2), Literal(1)), 2)
8095
checkEvaluation(Divide(Literal(1.0), Literal(2.0)), 0.5)

sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -369,23 +369,6 @@ class ColumnExpressionSuite extends QueryTest {
369369
)
370370
}
371371

372-
test("abs") {
373-
checkAnswer(
374-
testData.select(abs('key)).orderBy('key.asc),
375-
(1 to 100).map(n => Row(n))
376-
)
377-
378-
checkAnswer(
379-
negativeData.select(abs('key)).orderBy('key.desc),
380-
(1 to 100).map(n => Row(n))
381-
)
382-
383-
checkAnswer(
384-
testData.select(abs(lit(null))),
385-
(1 to 100).map(_ => Row(null))
386-
)
387-
}
388-
389372
test("upper") {
390373
checkAnswer(
391374
lowerCaseData.select(upper('l)),

sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,18 @@ class MathExpressionsSuite extends QueryTest {
236236
testOneToOneNonNegativeMathFunction(log1p, math.log1p)
237237
}
238238

239+
test("abs") {
240+
val input =
241+
Seq[(java.lang.Double, java.lang.Double)]((null, null), (0.0, 0.0), (1.5, 1.5), (-2.5, 2.5))
242+
checkAnswer(
243+
input.toDF("key", "value").select(abs($"key").alias("a")).sort("a"),
244+
input.map(pair => Row(pair._2)))
245+
246+
checkAnswer(
247+
input.toDF("key", "value").selectExpr("abs(key) a").sort("a"),
248+
input.map(pair => Row(pair._2)))
249+
}
250+
239251
test("log2") {
240252
val df = Seq((1, 2)).toDF("a", "b")
241253
checkAnswer(

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -178,18 +178,6 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll with SQLTestUtils {
178178
Seq(Row("1"), Row("2")))
179179
}
180180

181-
test("SPARK-3176 Added Parser of SQL ABS()") {
182-
checkAnswer(
183-
sql("SELECT ABS(-1.3)"),
184-
Row(1.3))
185-
checkAnswer(
186-
sql("SELECT ABS(0.0)"),
187-
Row(0.0))
188-
checkAnswer(
189-
sql("SELECT ABS(2.5)"),
190-
Row(2.5))
191-
}
192-
193181
test("aggregation with codegen") {
194182
val originalValue = sqlContext.conf.codegenEnabled
195183
sqlContext.setConf(SQLConf.CODEGEN_ENABLED, "true")

0 commit comments

Comments
 (0)