diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 568dcd10d1166..2dc7e82f77226 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -308,27 +308,35 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val mulResult = Decimal(mulExact.setScale(mulType.scale, RoundingMode.HALF_UP)) val mulExpected = if (mulResult.precision > DecimalType.MAX_PRECISION) null else mulResult - checkEvaluation(mulActual, mulExpected) + checkEvaluationOrException(mulActual, mulExpected) val divType = Divide(null, null).resultDecimalType(p1, s1, p2, s2) val divResult = Decimal(divExact.setScale(divType.scale, RoundingMode.HALF_UP)) val divExpected = if (divResult.precision > DecimalType.MAX_PRECISION) null else divResult - checkEvaluation(divActual, divExpected) + checkEvaluationOrException(divActual, divExpected) val remType = Remainder(null, null).resultDecimalType(p1, s1, p2, s2) val remResult = Decimal(remExact.setScale(remType.scale, RoundingMode.HALF_UP)) val remExpected = if (remResult.precision > DecimalType.MAX_PRECISION) null else remResult - checkEvaluation(remActual, remExpected) + checkEvaluationOrException(remActual, remExpected) val quotType = IntegralDivide(null, null).resultDecimalType(p1, s1, p2, s2) val quotResult = Decimal(quotExact.setScale(quotType.scale, RoundingMode.HALF_UP)) val quotExpected = if (quotResult.precision > DecimalType.MAX_PRECISION) null else quotResult - checkEvaluation(quotActual, quotExpected.toLong) + checkEvaluationOrException(quotActual, quotExpected.toLong) } } + + def checkEvaluationOrException(actual: BinaryArithmetic, expected: Any): Unit = + if (SQLConf.get.ansiEnabled && expected == null) { + checkExceptionInExpression[SparkArithmeticException](actual, + "NUMERIC_VALUE_OUT_OF_RANGE") + } else { + checkEvaluation(actual, expected) + } } }