From ec3d55296abc9f355a0f0db0f40e04abb4b58d94 Mon Sep 17 00:00:00 2001 From: Weiqing Yang Date: Tue, 11 Oct 2016 23:14:48 -0700 Subject: [PATCH 1/2] [SPARK-17108][SQL]: Fix BIGINT and INT comparison failure in spark sql --- .../org/apache/spark/sql/types/DataType.scala | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala index 312585df1516b..e89c04004a012 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala @@ -78,6 +78,17 @@ abstract class DataType extends AbstractDataType { private[spark] def sameType(other: DataType): Boolean = DataType.equalsIgnoreNullability(this, other) + /** + * Check if two integers are compatible. Returns true if `right` can be converted to `left`. + */ + private[spark] def isCompatibleIntegralType(left: DataType, right: DataType): Boolean = { + (left, right) match { + case (l, r) => l.isInstanceOf[IntegralType] && r.isInstanceOf[IntegralType] && + l.defaultSize >= r.defaultSize + case _ => false + } + } + /** * Returns the same data type but set all nullability fields are true * (`StructField.nullable`, `ArrayType.containsNull`, and `MapType.valueContainsNull`). @@ -91,7 +102,8 @@ abstract class DataType extends AbstractDataType { override private[sql] def defaultConcreteType: DataType = this - override private[sql] def acceptsType(other: DataType): Boolean = sameType(other) + override private[sql] def acceptsType(other: DataType): Boolean = sameType(other) || + isCompatibleIntegralType(this, other) } From 8707caf6149f4dbc832878d17029fcfe94c3575b Mon Sep 17 00:00:00 2001 From: Weiqing Yang Date: Fri, 14 Oct 2016 17:29:00 -0700 Subject: [PATCH 2/2] Added implicit casting to the GetMapValue --- .../expressions/complexTypeExtractors.scala | 2 +- .../org/apache/spark/sql/types/DataType.scala | 14 +------------- .../spark/sql/hive/execution/SQLQuerySuite.scala | 11 +++++++++++ 3 files changed, 13 insertions(+), 14 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index abb5594bfa7f8..0c256c3d890f1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -260,7 +260,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression) * We need to do type checking here as `key` expression maybe unresolved. */ case class GetMapValue(child: Expression, key: Expression) - extends BinaryExpression with ExpectsInputTypes with ExtractValue { + extends BinaryExpression with ImplicitCastInputTypes with ExtractValue { private def keyType = child.dataType.asInstanceOf[MapType].keyType diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala index e89c04004a012..312585df1516b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala @@ -78,17 +78,6 @@ abstract class DataType extends AbstractDataType { private[spark] def sameType(other: DataType): Boolean = DataType.equalsIgnoreNullability(this, other) - /** - * Check if two integers are compatible. Returns true if `right` can be converted to `left`. - */ - private[spark] def isCompatibleIntegralType(left: DataType, right: DataType): Boolean = { - (left, right) match { - case (l, r) => l.isInstanceOf[IntegralType] && r.isInstanceOf[IntegralType] && - l.defaultSize >= r.defaultSize - case _ => false - } - } - /** * Returns the same data type but set all nullability fields are true * (`StructField.nullable`, `ArrayType.containsNull`, and `MapType.valueContainsNull`). @@ -102,8 +91,7 @@ abstract class DataType extends AbstractDataType { override private[sql] def defaultConcreteType: DataType = this - override private[sql] def acceptsType(other: DataType): Boolean = sameType(other) || - isCompatibleIntegralType(this, other) + override private[sql] def acceptsType(other: DataType): Boolean = sameType(other) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index 6f2a16662bf10..c186d8878902f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -1886,6 +1886,17 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } } + test("SPARK-17108: Fix BIGINT and INT comparison failure in spark sql") { + sql("create table t1(a map>)") + sql("select * from t1 where a[1] is not null") + + sql("create table t2(a map>)") + sql("select * from t2 where a[1] is not null") + + sql("create table t3(a map>)") + sql("select * from t3 where a[1L] is not null") + } + def testCommandAvailable(command: String): Boolean = { val attempt = Try(Process(command).run(ProcessLogger(_ => ())).exitValue()) attempt.isSuccess && attempt.get == 0