From 64d55c4c86273f397d99fd29fe85735523877539 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Sat, 16 Dec 2017 01:07:23 -0800 Subject: [PATCH 1/3] fix. --- .../spark/sql/catalyst/expressions/decimalExpressions.scala | 2 ++ .../org/apache/spark/sql/catalyst/optimizer/expressions.scala | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala index 752dea23e1f7a..db1579ba28671 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala @@ -70,10 +70,12 @@ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends Un case class PromotePrecision(child: Expression) extends UnaryExpression { override def dataType: DataType = child.dataType override def eval(input: InternalRow): Any = child.eval(input) + /** Just a simple pass-through for code generation. */ override def genCode(ctx: CodegenContext): ExprCode = child.genCode(ctx) override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = ev.copy("") override def prettyName: String = "promote_precision" override def sql: String = child.sql + override lazy val canonicalized: Expression = child.canonicalized } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala index 6305b6c84bae3..85295aff19808 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala @@ -614,7 +614,6 @@ object SimplifyCasts extends Rule[LogicalPlan] { object RemoveDispensableExpressions extends Rule[LogicalPlan] { def apply(plan: LogicalPlan): LogicalPlan = plan transformAllExpressions { case UnaryPositive(child) => child - case PromotePrecision(child) => child } } From 571ed448b1ecb4df3d185e1c4b5390c1b9206de0 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Sat, 16 Dec 2017 22:10:06 -0800 Subject: [PATCH 2/3] fix. --- .../spark/sql/catalyst/analysis/StreamingJoinHelper.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala index 072dc954879ca..f67c1adfbbe45 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala @@ -20,7 +20,10 @@ package org.apache.spark.sql.catalyst.analysis import scala.util.control.NonFatal import org.apache.spark.internal.Logging -import org.apache.spark.sql.catalyst.expressions.{Add, AttributeReference, AttributeSet, Cast, CheckOverflow, Expression, ExpressionSet, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual, Literal, Multiply, PreciseTimestampConversion, PredicateHelper, Subtract, TimeAdd, TimeSub, UnaryMinus} +import org.apache.spark.sql.catalyst.expressions.{Add, AttributeReference, AttributeSet, Cast, CheckOverflow, Expression} +import org.apache.spark.sql.catalyst.expressions.{ExpressionSet, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual} +import org.apache.spark.sql.catalyst.expressions.{Literal, Multiply, PreciseTimestampConversion, PredicateHelper} +import org.apache.spark.sql.catalyst.expressions.{PromotePrecision, Subtract, TimeAdd, TimeSub, UnaryMinus} import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys import org.apache.spark.sql.catalyst.plans.logical.{EventTimeWatermark, LogicalPlan} import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark._ @@ -238,6 +241,8 @@ object StreamingJoinHelper extends PredicateHelper with Logging { collect(child, !negate) case CheckOverflow(child, _) => collect(child, negate) + case PromotePrecision(child) => + collect(child, negate) case Cast(child, dataType, _) => dataType match { case _: NumericType | _: TimestampType => collect(child, negate) From 3f27c4b839a352716c204d1580f17d6625f7bee4 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Mon, 18 Dec 2017 17:14:51 -0800 Subject: [PATCH 3/3] address comments. --- .../spark/sql/catalyst/analysis/StreamingJoinHelper.scala | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala index f67c1adfbbe45..7a0aa08289efa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala @@ -20,10 +20,7 @@ package org.apache.spark.sql.catalyst.analysis import scala.util.control.NonFatal import org.apache.spark.internal.Logging -import org.apache.spark.sql.catalyst.expressions.{Add, AttributeReference, AttributeSet, Cast, CheckOverflow, Expression} -import org.apache.spark.sql.catalyst.expressions.{ExpressionSet, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual} -import org.apache.spark.sql.catalyst.expressions.{Literal, Multiply, PreciseTimestampConversion, PredicateHelper} -import org.apache.spark.sql.catalyst.expressions.{PromotePrecision, Subtract, TimeAdd, TimeSub, UnaryMinus} +import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys import org.apache.spark.sql.catalyst.plans.logical.{EventTimeWatermark, LogicalPlan} import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark._