From 0d268e956f9ceb6a574895760afb0e4ebe7b89c7 Mon Sep 17 00:00:00 2001 From: Anton Okolnychyi Date: Tue, 22 Jan 2019 11:58:35 +0000 Subject: [PATCH] [SPARK-26706][SQL] Fix Cast$mayTruncate for bytes --- .../spark/sql/catalyst/expressions/Cast.scala | 2 +- .../sql/catalyst/expressions/CastSuite.scala | 36 +++++++++++++++++++ .../org/apache/spark/sql/DatasetSuite.scala | 9 +++++ 3 files changed, 46 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index ff6a68b290206..a6926d8996bbe 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -131,7 +131,7 @@ object Cast { private def illegalNumericPrecedence(from: DataType, to: DataType): Boolean = { val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from) val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to) - toPrecedence > 0 && fromPrecedence > toPrecedence + toPrecedence >= 0 && fromPrecedence > toPrecedence } /** diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index 94dee7ea048c3..11956e1b7f199 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -25,6 +25,7 @@ import scala.util.Random import org.apache.spark.SparkFunSuite import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.TypeCoercion.numericPrecedence import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.catalyst.util.DateTimeUtils @@ -955,4 +956,39 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { val ret6 = cast(Literal.create((1, Map(1 -> "a", 2 -> "b", 3 -> "c"))), StringType) checkEvaluation(ret6, "[1, [1 -> a, 2 -> b, 3 -> c]]") } + + test("SPARK-26706: Fix Cast.mayTruncate for bytes") { + assert(!Cast.mayTruncate(ByteType, ByteType)) + assert(!Cast.mayTruncate(DecimalType.ByteDecimal, ByteType)) + assert(Cast.mayTruncate(ShortType, ByteType)) + assert(Cast.mayTruncate(IntegerType, ByteType)) + assert(Cast.mayTruncate(LongType, ByteType)) + assert(Cast.mayTruncate(FloatType, ByteType)) + assert(Cast.mayTruncate(DoubleType, ByteType)) + assert(Cast.mayTruncate(DecimalType.IntDecimal, ByteType)) + } + + test("canSafeCast and mayTruncate must be consistent for numeric types") { + import DataTypeTestUtils._ + + def isCastSafe(from: NumericType, to: NumericType): Boolean = (from, to) match { + case (_, dt: DecimalType) => dt.isWiderThan(from) + case (dt: DecimalType, _) => dt.isTighterThan(to) + case _ => numericPrecedence.indexOf(from) <= numericPrecedence.indexOf(to) + } + + numericTypes.foreach { from => + val (safeTargetTypes, unsafeTargetTypes) = numericTypes.partition(to => isCastSafe(from, to)) + + safeTargetTypes.foreach { to => + assert(Cast.canSafeCast(from, to), s"It should be possible to safely cast $from to $to") + assert(!Cast.mayTruncate(from, to), s"No truncation is expected when casting $from to $to") + } + + unsafeTargetTypes.foreach { to => + assert(!Cast.canSafeCast(from, to), s"It shouldn't be possible to safely cast $from to $to") + assert(Cast.mayTruncate(from, to), s"Truncation is expected when casting $from to $to") + } + } + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala index ab8294838e755..105cec5d0d9ce 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala @@ -1678,6 +1678,15 @@ class DatasetSuite extends QueryTest with SharedSQLContext { assert(serializer.serializer.size == 1) checkAnswer(ds, Seq(Row("a"), Row("b"), Row("c"))) } + + test("SPARK-26706: Fix Cast.mayTruncate for bytes") { + val thrownException = intercept[AnalysisException] { + spark.range(Long.MaxValue - 10, Long.MaxValue).as[Byte] + .map(b => b - 1) + .collect() + } + assert(thrownException.message.contains("Cannot up cast `id` from bigint to tinyint")) + } } case class TestDataUnion(x: Int, y: Int, z: Int)