diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala index a5b5b91e4ab3a..ac9901f88b6aa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala @@ -52,10 +52,10 @@ object TypeCoercion { DecimalPrecision :: BooleanEquality :: StringToIntegralCasts :: + Division :: FunctionArgumentConversion :: CaseWhenCoercion :: IfCoercion :: - Division :: PropagateTypes :: ImplicitTypeCasts :: DateTimeOperations :: diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 90465b65bdb1c..f1ea352207484 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -2843,4 +2843,19 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) } + + test("SPARK-15776: Type coercionn incorrect") { + Seq( + (1, 11), + (3, 25), + (5, 37) + ).toDF("k", "v").createOrReplaceTempView("tc") + + // If Division is after FunctionArgumentConversion then for the first query it's output + // data type is bigint and second is double. But actually both these two query it's + // output data type should be double. + checkAnswer( + sql("SELECT SUM(CASE WHEN k in (3, 5) THEN v / 10 ELSE 0 END) FROM tc"), + sql("SELECT SUM(CASE WHEN k in (3, 5) THEN v / 10.0 ELSE 0 END) FROM tc")) + } }