diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala index d494ae7b71d1..6df3db1aed80 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala @@ -96,7 +96,7 @@ class SqlLexical extends StdLexical { ",", ";", "%", "{", "}", ":", "[", "]", ".", "&", "|", "^", "~", "<=>" ) - protected override def processIdent(name: String) = { + override protected def processIdent(name: String) = { val token = normalizeKeyword(name) if (reserved contains token) Keyword(token) else Identifier(name) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 7f2383dedc03..78daba33654c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -419,7 +419,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w private[this] lazy val cast: Any => Any = cast(child.dataType, dataType) - protected override def nullSafeEval(input: Any): Any = cast(input) + override protected def nullSafeEval(input: Any): Any = cast(input) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { // TODO: Add support for more data types. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala index 54ec10444c4f..573ab917b492 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala @@ -344,6 +344,18 @@ abstract class BinaryOperator extends BinaryExpression { def symbol: String override def toString: String = s"($left $symbol $right)" + + override def checkInputDataTypes(): TypeCheckResult = { + if (left.dataType != right.dataType) { + TypeCheckResult.TypeCheckFailure( + s"differing types in ${this.getClass.getSimpleName} " + + s"(${left.dataType} and ${right.dataType}).") + } else { + checkTypesInternal(dataType) + } + } + + protected def checkTypesInternal(t: DataType): TypeCheckResult } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 8476af4a5d8d..acd34df0552d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -42,7 +42,7 @@ case class UnaryMinus(child: Expression) extends UnaryArithmetic { case dt: NumericType => defineCodeGen(ctx, ev, c => s"(${ctx.javaType(dt)})(-($c))") } - protected override def nullSafeEval(input: Any): Any = numeric.negate(input) + override protected def nullSafeEval(input: Any): Any = numeric.negate(input) } case class UnaryPositive(child: Expression) extends UnaryArithmetic { @@ -51,7 +51,7 @@ case class UnaryPositive(child: Expression) extends UnaryArithmetic { override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = defineCodeGen(ctx, ev, c => c) - protected override def nullSafeEval(input: Any): Any = input + override protected def nullSafeEval(input: Any): Any = input } /** @@ -63,7 +63,7 @@ case class Abs(child: Expression) extends UnaryArithmetic { private lazy val numeric = TypeUtils.getNumeric(dataType) - protected override def nullSafeEval(input: Any): Any = numeric.abs(input) + override protected def nullSafeEval(input: Any): Any = numeric.abs(input) } abstract class BinaryArithmetic extends BinaryOperator { @@ -71,18 +71,6 @@ abstract class BinaryArithmetic extends BinaryOperator { override def dataType: DataType = left.dataType - override def checkInputDataTypes(): TypeCheckResult = { - if (left.dataType != right.dataType) { - TypeCheckResult.TypeCheckFailure( - s"differing types in ${this.getClass.getSimpleName} " + - s"(${left.dataType} and ${right.dataType}).") - } else { - checkTypesInternal(dataType) - } - } - - protected def checkTypesInternal(t: DataType): TypeCheckResult - /** Name of the function for this expression on a [[Decimal]] type. */ def decimalMethod: String = sys.error("BinaryArithmetics must override either decimalMethod or genCode") @@ -110,12 +98,12 @@ case class Add(left: Expression, right: Expression) extends BinaryArithmetic { override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !DecimalType.isFixed(dataType) - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForNumericExpr(t, "operator " + symbol) private lazy val numeric = TypeUtils.getNumeric(dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = numeric.plus(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = numeric.plus(input1, input2) } case class Subtract(left: Expression, right: Expression) extends BinaryArithmetic { @@ -125,12 +113,12 @@ case class Subtract(left: Expression, right: Expression) extends BinaryArithmeti override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !DecimalType.isFixed(dataType) - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForNumericExpr(t, "operator " + symbol) private lazy val numeric = TypeUtils.getNumeric(dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = numeric.minus(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = numeric.minus(input1, input2) } case class Multiply(left: Expression, right: Expression) extends BinaryArithmetic { @@ -140,12 +128,12 @@ case class Multiply(left: Expression, right: Expression) extends BinaryArithmeti override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !DecimalType.isFixed(dataType) - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForNumericExpr(t, "operator " + symbol) private lazy val numeric = TypeUtils.getNumeric(dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = numeric.times(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = numeric.times(input1, input2) } case class Divide(left: Expression, right: Expression) extends BinaryArithmetic { @@ -157,7 +145,7 @@ case class Divide(left: Expression, right: Expression) extends BinaryArithmetic override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !DecimalType.isFixed(dataType) - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForNumericExpr(t, "operator " + symbol) private lazy val div: (Any, Any) => Any = dataType match { @@ -223,7 +211,7 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !DecimalType.isFixed(dataType) - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForNumericExpr(t, "operator " + symbol) private lazy val integral = dataType match { @@ -283,7 +271,7 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet case class MaxOf(left: Expression, right: Expression) extends BinaryArithmetic { override def nullable: Boolean = left.nullable && right.nullable - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForOrderingExpr(t, "function maxOf") private lazy val ordering = TypeUtils.getOrdering(dataType) @@ -337,7 +325,7 @@ case class MaxOf(left: Expression, right: Expression) extends BinaryArithmetic { case class MinOf(left: Expression, right: Expression) extends BinaryArithmetic { override def nullable: Boolean = left.nullable && right.nullable - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForOrderingExpr(t, "function minOf") private lazy val ordering = TypeUtils.getOrdering(dataType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitwise.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitwise.scala index 2d47124d247e..b7fa7d5af763 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitwise.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitwise.scala @@ -31,7 +31,7 @@ import org.apache.spark.sql.types._ case class BitwiseAnd(left: Expression, right: Expression) extends BinaryArithmetic { override def symbol: String = "&" - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForBitwiseExpr(t, "operator " + symbol) private lazy val and: (Any, Any) => Any = dataType match { @@ -45,7 +45,7 @@ case class BitwiseAnd(left: Expression, right: Expression) extends BinaryArithme ((evalE1: Long, evalE2: Long) => evalE1 & evalE2).asInstanceOf[(Any, Any) => Any] } - protected override def nullSafeEval(input1: Any, input2: Any): Any = and(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = and(input1, input2) } /** @@ -56,7 +56,7 @@ case class BitwiseAnd(left: Expression, right: Expression) extends BinaryArithme case class BitwiseOr(left: Expression, right: Expression) extends BinaryArithmetic { override def symbol: String = "|" - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForBitwiseExpr(t, "operator " + symbol) private lazy val or: (Any, Any) => Any = dataType match { @@ -70,7 +70,7 @@ case class BitwiseOr(left: Expression, right: Expression) extends BinaryArithmet ((evalE1: Long, evalE2: Long) => evalE1 | evalE2).asInstanceOf[(Any, Any) => Any] } - protected override def nullSafeEval(input1: Any, input2: Any): Any = or(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = or(input1, input2) } /** @@ -81,7 +81,7 @@ case class BitwiseOr(left: Expression, right: Expression) extends BinaryArithmet case class BitwiseXor(left: Expression, right: Expression) extends BinaryArithmetic { override def symbol: String = "^" - protected def checkTypesInternal(t: DataType) = + override protected def checkTypesInternal(t: DataType) = TypeUtils.checkForBitwiseExpr(t, "operator " + symbol) private lazy val xor: (Any, Any) => Any = dataType match { @@ -95,7 +95,7 @@ case class BitwiseXor(left: Expression, right: Expression) extends BinaryArithme ((evalE1: Long, evalE2: Long) => evalE1 ^ evalE2).asInstanceOf[(Any, Any) => Any] } - protected override def nullSafeEval(input1: Any, input2: Any): Any = xor(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = xor(input1, input2) } /** @@ -122,5 +122,5 @@ case class BitwiseNot(child: Expression) extends UnaryArithmetic { defineCodeGen(ctx, ev, c => s"(${ctx.javaType(dataType)}) ~($c)") } - protected override def nullSafeEval(input: Any): Any = not(input) + override protected def nullSafeEval(input: Any): Any = not(input) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index 73cc930c4583..8a82084e65da 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -130,7 +130,7 @@ case class GetStructField(child: Expression, field: StructField, ordinal: Int) override def dataType: DataType = field.dataType override def nullable: Boolean = child.nullable || field.nullable - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = input.asInstanceOf[InternalRow](ordinal) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -160,7 +160,7 @@ case class GetArrayStructFields( override def dataType: DataType = ArrayType(field.dataType, containsNull) override def nullable: Boolean = child.nullable || containsNull || field.nullable - protected override def nullSafeEval(input: Any): Any = { + override protected def nullSafeEval(input: Any): Any = { input.asInstanceOf[Seq[InternalRow]].map { row => if (row == null) null else row(ordinal) } @@ -204,7 +204,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression) override def dataType: DataType = child.dataType.asInstanceOf[ArrayType].elementType - protected override def nullSafeEval(value: Any, ordinal: Any): Any = { + override protected def nullSafeEval(value: Any, ordinal: Any): Any = { // TODO: consider using Array[_] for ArrayType child to avoid // boxing of primitives val baseValue = value.asInstanceOf[Seq[_]] @@ -248,7 +248,7 @@ case class GetMapValue(child: Expression, key: Expression) override def dataType: DataType = child.dataType.asInstanceOf[MapType].valueType - protected override def nullSafeEval(value: Any, ordinal: Any): Any = { + override protected def nullSafeEval(value: Any, ordinal: Any): Any = { val baseValue = value.asInstanceOf[Map[Any, _]] baseValue.get(ordinal).orNull } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala index 2fa74b4ffc5d..f36be4b78ba2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala @@ -30,7 +30,7 @@ case class UnscaledValue(child: Expression) extends UnaryExpression { override def dataType: DataType = LongType override def toString: String = s"UnscaledValue($child)" - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = input.asInstanceOf[Decimal].toUnscaledLong override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -48,7 +48,7 @@ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends Un override def dataType: DataType = DecimalType(precision, scale) override def toString: String = s"MakeDecimal($child,$precision,$scale)" - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = Decimal(input.asInstanceOf[Long], precision, scale) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala index c31890e27fb5..98edb576a00f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala @@ -62,7 +62,7 @@ abstract class UnaryMathExpression(f: Double => Double, name: String) override def nullable: Boolean = true override def toString: String = s"$name($child)" - protected override def nullSafeEval(input: Any): Any = { + override protected def nullSafeEval(input: Any): Any = { val result = f(input.asInstanceOf[Double]) if (result.isNaN) null else result } @@ -97,7 +97,7 @@ abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String) override def dataType: DataType = DoubleType - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { val result = f(input1.asInstanceOf[Double], input2.asInstanceOf[Double]) if (result.isNaN) null else result } @@ -183,7 +183,7 @@ case class Factorial(child: Expression) extends UnaryExpression with ExpectsInpu // If the value not in the range of [0, 20], it still will be null, so set it to be true here. override def nullable: Boolean = true - protected override def nullSafeEval(input: Any): Any = { + override protected def nullSafeEval(input: Any): Any = { val value = input.asInstanceOf[jl.Integer] if (value > 20 || value < 0) { null @@ -256,7 +256,7 @@ case class Bin(child: Expression) override def inputTypes: Seq[DataType] = Seq(LongType) override def dataType: DataType = StringType - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = UTF8String.fromString(jl.Long.toBinaryString(input.asInstanceOf[Long])) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -293,7 +293,7 @@ case class Hex(child: Expression) extends UnaryExpression with ExpectsInputTypes override def dataType: DataType = StringType - protected override def nullSafeEval(num: Any): Any = child.dataType match { + override protected def nullSafeEval(num: Any): Any = child.dataType match { case LongType => hex(num.asInstanceOf[Long]) case BinaryType => hex(num.asInstanceOf[Array[Byte]]) case StringType => hex(num.asInstanceOf[UTF8String].getBytes) @@ -337,7 +337,7 @@ case class Unhex(child: Expression) extends UnaryExpression with ExpectsInputTyp override def nullable: Boolean = true override def dataType: DataType = BinaryType - protected override def nullSafeEval(num: Any): Any = + override protected def nullSafeEval(num: Any): Any = unhex(num.asInstanceOf[UTF8String].getBytes) private[this] def unhex(bytes: Array[Byte]): Array[Byte] = { @@ -383,7 +383,7 @@ case class Unhex(child: Expression) extends UnaryExpression with ExpectsInputTyp case class Atan2(left: Expression, right: Expression) extends BinaryMathExpression(math.atan2, "ATAN2") { - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { // With codegen, the values returned by -0.0 and 0.0 are different. Handled with +0.0 val result = math.atan2(input1.asInstanceOf[Double] + 0.0, input2.asInstanceOf[Double] + 0.0) if (result.isNaN) null else result @@ -423,7 +423,7 @@ case class ShiftLeft(left: Expression, right: Expression) override def dataType: DataType = left.dataType - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { input1 match { case l: jl.Long => l << input2.asInstanceOf[jl.Integer] case i: jl.Integer => i << input2.asInstanceOf[jl.Integer] @@ -449,7 +449,7 @@ case class ShiftRight(left: Expression, right: Expression) override def dataType: DataType = left.dataType - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { input1 match { case l: jl.Long => l >> input2.asInstanceOf[jl.Integer] case i: jl.Integer => i >> input2.asInstanceOf[jl.Integer] @@ -475,7 +475,7 @@ case class ShiftRightUnsigned(left: Expression, right: Expression) override def dataType: DataType = left.dataType - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { input1 match { case l: jl.Long => l >>> input2.asInstanceOf[jl.Integer] case i: jl.Integer => i >>> input2.asInstanceOf[jl.Integer] diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala index 3b59cd431b87..7cdabb80b7bf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala @@ -37,7 +37,7 @@ case class Md5(child: Expression) extends UnaryExpression with ExpectsInputTypes override def inputTypes: Seq[DataType] = Seq(BinaryType) - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = UTF8String.fromString(DigestUtils.md5Hex(input.asInstanceOf[Array[Byte]])) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -61,7 +61,7 @@ case class Sha2(left: Expression, right: Expression) override def inputTypes: Seq[DataType] = Seq(BinaryType, IntegerType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { val bitLength = input2.asInstanceOf[Int] val input = input1.asInstanceOf[Array[Byte]] bitLength match { @@ -124,7 +124,7 @@ case class Sha1(child: Expression) extends UnaryExpression with ExpectsInputType override def inputTypes: Seq[DataType] = Seq(BinaryType) - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = UTF8String.fromString(DigestUtils.shaHex(input.asInstanceOf[Array[Byte]])) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -144,7 +144,7 @@ case class Crc32(child: Expression) extends UnaryExpression with ExpectsInputTyp override def inputTypes: Seq[DataType] = Seq(BinaryType) - protected override def nullSafeEval(input: Any): Any = { + override protected def nullSafeEval(input: Any): Any = { val checksum = new CRC32 checksum.update(input.asInstanceOf[Array[Byte]], 0, input.asInstanceOf[Array[Byte]].length) checksum.getValue diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index f74fd0461971..4db002559ec5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -75,7 +75,7 @@ case class Not(child: Expression) extends UnaryExpression with Predicate with Ex override def inputTypes: Seq[DataType] = Seq(BooleanType) - protected override def nullSafeEval(input: Any): Any = !input.asInstanceOf[Boolean] + override protected def nullSafeEval(input: Any): Any = !input.asInstanceOf[Boolean] override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { defineCodeGen(ctx, ev, c => s"!($c)") @@ -213,18 +213,6 @@ case class Or(left: Expression, right: Expression) abstract class BinaryComparison extends BinaryOperator with Predicate { self: Product => - override def checkInputDataTypes(): TypeCheckResult = { - if (left.dataType != right.dataType) { - TypeCheckResult.TypeCheckFailure( - s"differing types in ${this.getClass.getSimpleName} " + - s"(${left.dataType} and ${right.dataType}).") - } else { - checkTypesInternal(dataType) - } - } - - protected def checkTypesInternal(t: DataType): TypeCheckResult - override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { if (ctx.isPrimitiveType(left.dataType)) { // faster version @@ -253,7 +241,7 @@ case class EqualTo(left: Expression, right: Expression) extends BinaryComparison override protected def checkTypesInternal(t: DataType) = TypeCheckResult.TypeCheckSuccess - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { if (left.dataType != BinaryType) input1 == input2 else java.util.Arrays.equals(input1.asInstanceOf[Array[Byte]], input2.asInstanceOf[Array[Byte]]) } @@ -306,7 +294,7 @@ case class LessThan(left: Expression, right: Expression) extends BinaryCompariso private lazy val ordering = TypeUtils.getOrdering(left.dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lt(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = ordering.lt(input1, input2) } case class LessThanOrEqual(left: Expression, right: Expression) extends BinaryComparison { @@ -317,7 +305,7 @@ case class LessThanOrEqual(left: Expression, right: Expression) extends BinaryCo private lazy val ordering = TypeUtils.getOrdering(left.dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.lteq(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = ordering.lteq(input1, input2) } case class GreaterThan(left: Expression, right: Expression) extends BinaryComparison { @@ -328,7 +316,7 @@ case class GreaterThan(left: Expression, right: Expression) extends BinaryCompar private lazy val ordering = TypeUtils.getOrdering(left.dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gt(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = ordering.gt(input1, input2) } case class GreaterThanOrEqual(left: Expression, right: Expression) extends BinaryComparison { @@ -339,5 +327,5 @@ case class GreaterThanOrEqual(left: Expression, right: Expression) extends Binar private lazy val ordering = TypeUtils.getOrdering(left.dataType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = ordering.gteq(input1, input2) + override protected def nullSafeEval(input1: Any, input2: Any): Any = ordering.gteq(input1, input2) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala index 49b2026364cd..0c85e5c92e57 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala @@ -185,7 +185,7 @@ case class CountSet(child: Expression) extends UnaryExpression { override def dataType: DataType = LongType - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = input.asInstanceOf[OpenHashSet[Any]].size.toLong override def toString: String = s"$child.count()" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala index f64899c1ed84..77ad7cf7cca7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala @@ -53,7 +53,7 @@ trait StringRegexExpression extends ExpectsInputTypes { protected def pattern(str: String) = if (cache == null) compile(str) else cache - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { val regex = pattern(input2.asInstanceOf[UTF8String].toString()) if(regex == null) { null @@ -113,7 +113,7 @@ trait String2StringExpression extends ExpectsInputTypes { override def dataType: DataType = StringType override def inputTypes: Seq[DataType] = Seq(StringType) - protected override def nullSafeEval(input: Any): Any = + override protected def nullSafeEval(input: Any): Any = convert(input.asInstanceOf[UTF8String]) } @@ -149,7 +149,7 @@ trait StringComparison extends ExpectsInputTypes { override def inputTypes: Seq[DataType] = Seq(StringType, StringType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = + override protected def nullSafeEval(input1: Any, input2: Any): Any = compare(input1.asInstanceOf[UTF8String], input2.asInstanceOf[UTF8String]) override def toString: String = s"$nodeName($left, $right)" @@ -559,7 +559,7 @@ case class StringLength(child: Expression) extends UnaryExpression with ExpectsI override def dataType: DataType = IntegerType override def inputTypes: Seq[DataType] = Seq(StringType) - protected override def nullSafeEval(string: Any): Any = + override protected def nullSafeEval(string: Any): Any = string.asInstanceOf[UTF8String].numChars override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -579,7 +579,7 @@ case class Levenshtein(left: Expression, right: Expression) extends BinaryExpres override def dataType: DataType = IntegerType - protected override def nullSafeEval(leftValue: Any, rightValue: Any): Any = + override protected def nullSafeEval(leftValue: Any, rightValue: Any): Any = leftValue.asInstanceOf[UTF8String].levenshteinDistance(rightValue.asInstanceOf[UTF8String]) override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { @@ -595,7 +595,7 @@ case class Ascii(child: Expression) extends UnaryExpression with ExpectsInputTyp override def dataType: DataType = IntegerType override def inputTypes: Seq[DataType] = Seq(StringType) - protected override def nullSafeEval(string: Any): Any = { + override protected def nullSafeEval(string: Any): Any = { val bytes = string.asInstanceOf[UTF8String].getBytes if (bytes.length > 0) { bytes(0).asInstanceOf[Int] @@ -612,7 +612,7 @@ case class Base64(child: Expression) extends UnaryExpression with ExpectsInputTy override def dataType: DataType = StringType override def inputTypes: Seq[DataType] = Seq(BinaryType) - protected override def nullSafeEval(bytes: Any): Any = { + override protected def nullSafeEval(bytes: Any): Any = { UTF8String.fromBytes( org.apache.commons.codec.binary.Base64.encodeBase64( bytes.asInstanceOf[Array[Byte]])) @@ -626,7 +626,7 @@ case class UnBase64(child: Expression) extends UnaryExpression with ExpectsInput override def dataType: DataType = BinaryType override def inputTypes: Seq[DataType] = Seq(StringType) - protected override def nullSafeEval(string: Any): Any = + override protected def nullSafeEval(string: Any): Any = org.apache.commons.codec.binary.Base64.decodeBase64(string.asInstanceOf[UTF8String].toString) } @@ -643,7 +643,7 @@ case class Decode(bin: Expression, charset: Expression) override def dataType: DataType = StringType override def inputTypes: Seq[DataType] = Seq(BinaryType, StringType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { val fromCharset = input2.asInstanceOf[UTF8String].toString UTF8String.fromString(new String(input1.asInstanceOf[Array[Byte]], fromCharset)) } @@ -662,7 +662,7 @@ case class Encode(value: Expression, charset: Expression) override def dataType: DataType = BinaryType override def inputTypes: Seq[DataType] = Seq(StringType, StringType) - protected override def nullSafeEval(input1: Any, input2: Any): Any = { + override protected def nullSafeEval(input1: Any, input2: Any): Any = { val toCharset = input2.asInstanceOf[UTF8String].toString input1.asInstanceOf[UTF8String].toString.getBytes(toCharset) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/package.scala index ea6aa1850db4..e2683ae25fbd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/package.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/package.scala @@ -35,7 +35,7 @@ import org.apache.spark.Logging */ package object trees extends Logging { // Since we want tree nodes to be lightweight, we create one logger for all treenode instances. - protected override def logName = "catalyst.trees" + override protected def logName = "catalyst.trees" /** * A [[TreeNode]] companion for reference equality for Hash based Collection.