diff --git a/docs/sql-keywords.md b/docs/sql-keywords.md index 9ad6984913a27..4f50ba6d440cb 100644 --- a/docs/sql-keywords.md +++ b/docs/sql-keywords.md @@ -19,7 +19,7 @@ license: | limitations under the License. --- -When `spark.sql.dialect=PostgreSQL` or keep default `spark.sql.dialect=Spark` with setting `spark.sql.dialect.spark.ansi.enabled` to true, Spark SQL will use the ANSI mode parser. +When `spark.sql.ansi.enabled` is true, Spark SQL will use the ANSI mode parser. In this mode, Spark SQL has two kinds of keywords: * Reserved keywords: Keywords that are reserved and can't be used as identifiers for table, view, column, function, alias, etc. * Non-reserved keywords: Keywords that have a special meaning only in particular contexts and can be used as identifiers in other contexts. For example, `SELECT 1 WEEK` is an interval literal, but WEEK can be used as identifiers in other places. @@ -28,7 +28,7 @@ When the ANSI mode is disabled, Spark SQL has two kinds of keywords: * Non-reserved keywords: Same definition as the one when the ANSI mode enabled. * Strict-non-reserved keywords: A strict version of non-reserved keywords, which can not be used as table alias. -By default `spark.sql.dialect.spark.ansi.enabled` is false. +By default `spark.sql.ansi.enabled` is false. Below is a list of all the keywords in Spark SQL. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 659e4a5c86ec1..e292514667500 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -232,7 +232,6 @@ class Analyzer( ResolveBinaryArithmetic(conf) :: TypeCoercion.typeCoercionRules(conf) ++ extendedResolutionRules : _*), - Batch("PostgreSQL Dialect", Once, PostgreSQLDialect.postgreSQLDialectRules: _*), Batch("Post-Hoc Resolution", Once, postHocResolutionRules: _*), Batch("Remove Unresolved Hints", Once, new ResolveHints.RemoveAllHints(conf)), @@ -287,11 +286,7 @@ class Analyzer( case (_, CalendarIntervalType) => Cast(TimeSub(l, r), l.dataType) case (TimestampType, _) => SubtractTimestamps(l, r) case (_, TimestampType) => SubtractTimestamps(l, r) - case (_, DateType) => if (conf.usePostgreSQLDialect) { - DateDiff(l, r) - } else { - SubtractDates(l, r) - } + case (_, DateType) => SubtractDates(l, r) case (DateType, _) => DateSub(l, r) case _ => s } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala deleted file mode 100644 index e7f0e571804d3..0000000000000 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.catalyst.analysis - -import org.apache.spark.internal.Logging -import org.apache.spark.sql.catalyst.expressions.Cast -import org.apache.spark.sql.catalyst.expressions.postgreSQL.PostgreCastToBoolean -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan -import org.apache.spark.sql.catalyst.rules.Rule -import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.types.{BooleanType, StringType} - -object PostgreSQLDialect { - val postgreSQLDialectRules: List[Rule[LogicalPlan]] = - CastToBoolean :: - Nil - - object CastToBoolean extends Rule[LogicalPlan] with Logging { - override def apply(plan: LogicalPlan): LogicalPlan = { - // The SQL configuration `spark.sql.dialect` can be changed in runtime. - // To make sure the configuration is effective, we have to check it during rule execution. - val conf = SQLConf.get - if (conf.usePostgreSQLDialect) { - plan.transformExpressions { - case Cast(child, dataType, timeZoneId) - if child.dataType != BooleanType && dataType == BooleanType => - PostgreCastToBoolean(child, timeZoneId) - } - } else { - plan - } - } - } -} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala index e76193fd94222..dd174ef28f309 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala @@ -59,7 +59,7 @@ object TypeCoercion { CaseWhenCoercion :: IfCoercion :: StackCoercion :: - Division(conf) :: + Division :: ImplicitTypeCasts :: DateTimeOperations :: WindowFrameCoercion :: @@ -662,7 +662,7 @@ object TypeCoercion { * Hive only performs integral division with the DIV operator. The arguments to / are always * converted to fractional types. */ - case class Division(conf: SQLConf) extends TypeCoercionRule { + object Division extends TypeCoercionRule { override protected def coerceTypes( plan: LogicalPlan): LogicalPlan = plan resolveExpressions { // Skip nodes who has not been resolved yet, @@ -673,13 +673,7 @@ object TypeCoercion { case d: Divide if d.dataType == DoubleType => d case d: Divide if d.dataType.isInstanceOf[DecimalType] => d case Divide(left, right) if isNumericOrNull(left) && isNumericOrNull(right) => - val preferIntegralDivision = conf.usePostgreSQLDialect - (left.dataType, right.dataType) match { - case (_: IntegralType, _: IntegralType) if preferIntegralDivision => - IntegralDivide(left, right) - case _ => - Divide(Cast(left, DoubleType), Cast(right, DoubleType)) - } + Divide(Cast(left, DoubleType), Cast(right, DoubleType)) } private def isNumericOrNull(ex: Expression): Boolean = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index a871a746d64ff..fa27a48419dbb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -276,7 +276,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType) // [[func]] assumes the input is no longer null because eval already does the null check. - @inline protected def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T]) + @inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T]) private lazy val dateFormatter = DateFormatter(zoneId) private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId) @@ -387,7 +387,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit } // UDFToBoolean - protected[this] def castToBoolean(from: DataType): Any => Any = from match { + private[this] def castToBoolean(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => { if (StringUtils.isTrueString(s)) { @@ -602,7 +602,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit * Change the precision / scale in a given decimal to those set in `decimalType` (if any), * modifying `value` in-place and returning it if successful. If an overflow occurs, it * either returns null or throws an exception according to the value set for - * `spark.sql.dialect.spark.ansi.enabled`. + * `spark.sql.ansi.enabled`. * * NOTE: this modifies `value` in-place, so don't call it on external data. */ @@ -621,7 +621,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit /** * Create new `Decimal` with precision and scale given in `decimalType` (if any). - * If overflow occurs, if `spark.sql.dialect.spark.ansi.enabled` is false, null is returned; + * If overflow occurs, if `spark.sql.ansi.enabled` is false, null is returned; * otherwise, an `ArithmeticException` is thrown. */ private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal = @@ -794,7 +794,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit } } - override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { + override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval = child.genCode(ctx) val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx) @@ -804,7 +804,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit // The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull` // in parameter list, because the returned code will be put in null safe evaluation region. - protected type CastFunction = (ExprValue, ExprValue, ExprValue) => Block + private[this] type CastFunction = (ExprValue, ExprValue, ExprValue) => Block private[this] def nullSafeCastFunction( from: DataType, @@ -1254,7 +1254,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit private[this] def timestampToDoubleCode(ts: ExprValue): Block = code"$ts / (double)$MICROS_PER_SECOND" - protected[this] def castToBooleanCode(from: DataType): CastFunction = from match { + private[this] def castToBooleanCode(from: DataType): CastFunction = from match { case StringType => val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}" (c, evPrim, evNull) => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 7650fb07a61cd..82a8e6d80a0bd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -150,7 +150,7 @@ abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant { sys.error("BinaryArithmetics must override either calendarIntervalMethod or genCode") // Name of the function for the exact version of this expression in [[Math]]. - // If the option "spark.sql.dialect.spark.ansi.enabled" is enabled and there is corresponding + // If the option "spark.sql.ansi.enabled" is enabled and there is corresponding // function in [[Math]], the exact function will be called instead of evaluation with [[symbol]]. def exactMathMethod: Option[String] = None diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToBoolean.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToBoolean.scala deleted file mode 100644 index 02bc6f0d0d8bf..0000000000000 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToBoolean.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.spark.sql.catalyst.expressions.postgreSQL - -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult -import org.apache.spark.sql.catalyst.expressions.{CastBase, Expression, TimeZoneAwareExpression} -import org.apache.spark.sql.catalyst.expressions.codegen.Block._ -import org.apache.spark.sql.catalyst.util.postgreSQL.StringUtils -import org.apache.spark.sql.types._ -import org.apache.spark.unsafe.types.UTF8String - -case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String]) - extends CastBase { - - override protected def ansiEnabled = - throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode") - - override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = - copy(timeZoneId = Option(timeZoneId)) - - override def checkInputDataTypes(): TypeCheckResult = child.dataType match { - case StringType | IntegerType | NullType => - TypeCheckResult.TypeCheckSuccess - case _ => - TypeCheckResult.TypeCheckFailure(s"cannot cast type ${child.dataType} to boolean") - } - - override def castToBoolean(from: DataType): Any => Any = from match { - case StringType => - buildCast[UTF8String](_, str => { - val s = str.trimAll().toLowerCase() - if (StringUtils.isTrueString(s)) { - true - } else if (StringUtils.isFalseString(s)) { - false - } else { - throw new IllegalArgumentException(s"invalid input syntax for type boolean: $s") - } - }) - case IntegerType => - super.castToBoolean(from) - } - - override def castToBooleanCode(from: DataType): CastFunction = from match { - case StringType => - val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}" - (c, evPrim, evNull) => - code""" - if ($stringUtils.isTrueString($c.trim().toLowerCase())) { - $evPrim = true; - } else if ($stringUtils.isFalseString($c.trim().toLowerCase())) { - $evPrim = false; - } else { - throw new IllegalArgumentException("invalid input syntax for type boolean: $c"); - } - """ - - case IntegerType => - super.castToBooleanCode(from) - } - - override def dataType: DataType = BooleanType - - override def nullable: Boolean = child.nullable - - override def toString: String = s"PostgreCastToBoolean($child as ${dataType.simpleString})" - - override def sql: String = s"CAST(${child.sql} AS ${dataType.sql})" -} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index b968848224c54..c96f2da9a5289 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -28,7 +28,6 @@ import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.trees.Origin import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.internal.SQLConf.Dialect import org.apache.spark.sql.types.{DataType, StructType} /** @@ -89,20 +88,13 @@ abstract class AbstractSqlParser(conf: SQLConf) extends ParserInterface with Log protected def parse[T](command: String)(toResult: SqlBaseParser => T): T = { logDebug(s"Parsing command: $command") - // When we use PostgreSQL dialect or use Spark dialect with setting - // `spark.sql.dialect.spark.ansi.enabled=true`, the parser will use ANSI SQL standard keywords. - val SQLStandardKeywordBehavior = conf.dialect match { - case Dialect.POSTGRESQL => true - case Dialect.SPARK => conf.dialectSparkAnsiEnabled - } - val lexer = new SqlBaseLexer(new UpperCaseCharStream(CharStreams.fromString(command))) lexer.removeErrorListeners() lexer.addErrorListener(ParseErrorListener) lexer.legacy_setops_precedence_enbled = conf.setOpsPrecedenceEnforced lexer.legacy_exponent_literal_as_decimal_enabled = conf.exponentLiteralAsDecimalEnabled lexer.legacy_create_hive_table_by_default_enabled = conf.createHiveTableByDefaultEnabled - lexer.SQL_standard_keyword_behavior = SQLStandardKeywordBehavior + lexer.SQL_standard_keyword_behavior = conf.ansiEnabled val tokenStream = new CommonTokenStream(lexer) val parser = new SqlBaseParser(tokenStream) @@ -112,7 +104,7 @@ abstract class AbstractSqlParser(conf: SQLConf) extends ParserInterface with Log parser.legacy_setops_precedence_enbled = conf.setOpsPrecedenceEnforced parser.legacy_exponent_literal_as_decimal_enabled = conf.exponentLiteralAsDecimalEnabled parser.legacy_create_hive_table_by_default_enabled = conf.createHiveTableByDefaultEnabled - parser.SQL_standard_keyword_behavior = SQLStandardKeywordBehavior + parser.SQL_standard_keyword_behavior = conf.ansiEnabled try { try { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala index 51ab3df0e0bfb..b42ae4e45366e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala @@ -38,7 +38,7 @@ object StringUtils extends Logging { * throw an [[AnalysisException]]. * * @param pattern the SQL pattern to convert - * @param escapeStr the escape string contains one character. + * @param escapeChar the escape string contains one character. * @return the equivalent Java regular expression of the pattern */ def escapeLikeRegex(pattern: String, escapeChar: Char): String = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/postgreSQL/StringUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/postgreSQL/StringUtils.scala deleted file mode 100644 index 1ae15df29d6e7..0000000000000 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/postgreSQL/StringUtils.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.catalyst.util.postgreSQL - -import org.apache.spark.unsafe.types.UTF8String - -object StringUtils { - // "true", "yes", "1", "false", "no", "0", and unique prefixes of these strings are accepted. - private[this] val trueStrings = - Set("true", "tru", "tr", "t", "yes", "ye", "y", "on", "1").map(UTF8String.fromString) - - private[this] val falseStrings = - Set("false", "fals", "fal", "fa", "f", "no", "n", "off", "of", "0").map(UTF8String.fromString) - - def isTrueString(s: UTF8String): Boolean = trueStrings.contains(s) - - def isFalseString(s: UTF8String): Boolean = falseStrings.contains(s) -} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index f535792489fb4..8024c5e3377c6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1669,37 +1669,6 @@ object SQLConf { .booleanConf .createWithDefault(false) - object Dialect extends Enumeration { - val SPARK, POSTGRESQL = Value - } - - val DIALECT = - buildConf("spark.sql.dialect") - .doc("The specific features of the SQL language to be adopted, which are available when " + - "accessing the given database. Currently, Spark supports two database dialects, `Spark` " + - "and `PostgreSQL`. With `PostgreSQL` dialect, Spark will: " + - "1. perform integral division with the / operator if both sides are integral types; " + - "2. accept \"true\", \"yes\", \"1\", \"false\", \"no\", \"0\", and unique prefixes as " + - "input and trim input for the boolean data type.") - .stringConf - .transform(_.toUpperCase(Locale.ROOT)) - .checkValues(Dialect.values.map(_.toString)) - .createWithDefault(Dialect.SPARK.toString) - - val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") - .internal() - .doc("This configuration is deprecated and will be removed in the future releases." + - "It is replaced by spark.sql.dialect.spark.ansi.enabled.") - .booleanConf - .createWithDefault(false) - - val DIALECT_SPARK_ANSI_ENABLED = buildConf("spark.sql.dialect.spark.ansi.enabled") - .doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " + - "throw a runtime exception if an overflow occurs in any operation on integral/decimal " + - "field. 2. Spark will forbid using the reserved keywords of ANSI SQL as identifiers in " + - "the SQL parser.") - .fallbackConf(ANSI_ENABLED) - val VALIDATE_PARTITION_COLUMNS = buildConf("spark.sql.sources.validatePartitionColumns") .internal() @@ -1820,6 +1789,14 @@ object SQLConf { .checkValues(IntervalStyle.values.map(_.toString)) .createWithDefault(IntervalStyle.MULTI_UNITS.toString) + val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") + .doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " + + "throw a runtime exception if an overflow occurs in any operation on integral/decimal " + + "field. 2. Spark will forbid using the reserved keywords of ANSI SQL as identifiers in " + + "the SQL parser.") + .booleanConf + .createWithDefault(false) + val SORT_BEFORE_REPARTITION = buildConf("spark.sql.execution.sortBeforeRepartition") .internal() @@ -2555,13 +2532,7 @@ class SQLConf extends Serializable with Logging { def intervalOutputStyle: IntervalStyle.Value = IntervalStyle.withName(getConf(INTERVAL_STYLE)) - def dialect: Dialect.Value = Dialect.withName(getConf(DIALECT)) - - def usePostgreSQLDialect: Boolean = dialect == Dialect.POSTGRESQL - - def dialectSparkAnsiEnabled: Boolean = getConf(DIALECT_SPARK_ANSI_ENABLED) - - def ansiEnabled: Boolean = usePostgreSQLDialect || dialectSparkAnsiEnabled + def ansiEnabled: Boolean = getConf(ANSI_ENABLED) def nestedSchemaPruningEnabled: Boolean = getConf(NESTED_SCHEMA_PRUNING_ENABLED) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala index f35617b374c99..0d6f9bcedb6a2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala @@ -1425,7 +1425,7 @@ class TypeCoercionSuite extends AnalysisTest { test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " + "in aggregation function like sum") { - val rules = Seq(FunctionArgumentConversion, Division(conf)) + val rules = Seq(FunctionArgumentConversion, Division) // Casts Integer to Double ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType)))) // Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will @@ -1444,35 +1444,12 @@ class TypeCoercionSuite extends AnalysisTest { } test("SPARK-17117 null type coercion in divide") { - val rules = Seq(FunctionArgumentConversion, Division(conf), ImplicitTypeCasts) + val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts) val nullLit = Literal.create(null, NullType) ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType))) ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType))) } - test("SPARK-28395 Division operator support integral division") { - val rules = Seq(FunctionArgumentConversion, Division(conf)) - Seq(SQLConf.Dialect.SPARK, SQLConf.Dialect.POSTGRESQL).foreach { dialect => - withSQLConf(SQLConf.DIALECT.key -> dialect.toString) { - val result1 = if (dialect == SQLConf.Dialect.POSTGRESQL) { - IntegralDivide(1L, 1L) - } else { - Divide(Cast(1L, DoubleType), Cast(1L, DoubleType)) - } - ruleTest(rules, Divide(1L, 1L), result1) - val result2 = if (dialect == SQLConf.Dialect.POSTGRESQL) { - IntegralDivide(1, Cast(1, ShortType)) - } else { - Divide(Cast(1, DoubleType), Cast(Cast(1, ShortType), DoubleType)) - } - ruleTest(rules, Divide(1, Cast(1, ShortType)), result2) - - ruleTest(rules, Divide(1L, 1D), Divide(Cast(1L, DoubleType), Cast(1D, DoubleType))) - ruleTest(rules, Divide(Decimal(1.1), 1L), Divide(Decimal(1.1), 1L)) - } - } - } - test("binary comparison with string promotion") { val rule = TypeCoercion.PromoteStrings(conf) ruleTest(rule, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala index 62e688e4d4bd6..c1f1be3b30e4b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala @@ -436,7 +436,7 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes testAndVerifyNotLeakingReflectionObjects( s"overflowing $testName, ansiEnabled=$ansiEnabled") { withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> ansiEnabled.toString + SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString ) { // Need to construct Encoder here rather than implicitly resolving it // so that SQLConf changes are respected. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala index fe068f7a5f6c2..1a1cab823d4f3 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala @@ -169,7 +169,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest { } private def testDecimalOverflow(schema: StructType, row: Row): Unit = { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { val encoder = RowEncoder(schema).resolveAndBind() intercept[Exception] { encoder.toRow(row) @@ -182,7 +182,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest { } } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val encoder = RowEncoder(schema).resolveAndBind() assert(encoder.fromRow(encoder.toRow(row)).get(0) == null) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 6e3fc438e41ea..ad8b1a1673679 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -61,7 +61,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Add(positiveLongLit, negativeLongLit), -1L) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericAndInterval.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Add, tpe, tpe) } @@ -80,7 +80,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(UnaryMinus(Literal(Int.MinValue)), Int.MinValue) checkEvaluation(UnaryMinus(Literal(Short.MinValue)), Short.MinValue) checkEvaluation(UnaryMinus(Literal(Byte.MinValue)), Byte.MinValue) - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( UnaryMinus(Literal(Long.MinValue)), "overflow") checkExceptionInExpression[ArithmeticException]( @@ -122,7 +122,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Subtract(positiveLongLit, negativeLongLit), positiveLong - negativeLong) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericAndInterval.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Subtract, tpe, tpe) } @@ -144,7 +144,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Multiply(positiveLongLit, negativeLongLit), positiveLong * negativeLong) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericTypeWithoutDecimal.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Multiply, tpe, tpe) } @@ -445,12 +445,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minLongLiteral, minLongLiteral) val e5 = Subtract(minLongLiteral, maxLongLiteral) val e6 = Multiply(minLongLiteral, minLongLiteral) - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Long.MinValue) checkEvaluation(e2, Long.MinValue) checkEvaluation(e3, -2L) @@ -469,12 +469,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minIntLiteral, minIntLiteral) val e5 = Subtract(minIntLiteral, maxIntLiteral) val e6 = Multiply(minIntLiteral, minIntLiteral) - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Int.MinValue) checkEvaluation(e2, Int.MinValue) checkEvaluation(e3, -2) @@ -493,12 +493,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minShortLiteral, minShortLiteral) val e5 = Subtract(minShortLiteral, maxShortLiteral) val e6 = Multiply(minShortLiteral, minShortLiteral) - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Short.MinValue) checkEvaluation(e2, Short.MinValue) checkEvaluation(e3, (-2).toShort) @@ -517,12 +517,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minByteLiteral, minByteLiteral) val e5 = Subtract(minByteLiteral, maxByteLiteral) val e6 = Multiply(minByteLiteral, minByteLiteral) - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Byte.MinValue) checkEvaluation(e2, Byte.MinValue) checkEvaluation(e3, (-2).toByte) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index dde25ad994c5b..2d8f22c34ade7 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -685,10 +685,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { checkCast("y", true) checkCast("yes", true) checkCast("1", true) - checkCast("1 ", true) - checkCast("f", false) - checkCast("f\t", false) checkCast("false", false) checkCast("FAlsE", false) checkCast("n", false) @@ -696,8 +693,6 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { checkCast("0", false) checkEvaluation(cast("abc", BooleanType), null) - checkEvaluation(cast("tru", BooleanType), null) - checkEvaluation(cast("fla", BooleanType), null) checkEvaluation(cast("", BooleanType), null) } @@ -894,8 +889,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { } test("Throw exception on casting out-of-range value to decimal type") { - withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { checkExceptionInExpression[ArithmeticException]( cast(Literal("134.12"), DecimalType(3, 2)), "cannot be represented") checkExceptionInExpression[ArithmeticException]( @@ -961,8 +955,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { } test("Throw exception on casting out-of-range value to byte type") { - withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { testIntMaxAndMin(ByteType) Seq(Byte.MaxValue + 1, Byte.MinValue - 1).foreach { value => checkExceptionInExpression[ArithmeticException](cast(value, ByteType), "overflow") @@ -987,8 +980,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { } test("Throw exception on casting out-of-range value to short type") { - withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { testIntMaxAndMin(ShortType) Seq(Short.MaxValue + 1, Short.MinValue - 1).foreach { value => checkExceptionInExpression[ArithmeticException](cast(value, ShortType), "overflow") @@ -1013,8 +1005,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { } test("Throw exception on casting out-of-range value to int type") { - withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key ->requiredAnsiEnabledForOverflowTestCases.toString) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { testIntMaxAndMin(IntegerType) testLongMaxAndMin(IntegerType) @@ -1031,8 +1022,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { } test("Throw exception on casting out-of-range value to long type") { - withSQLConf( - SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> requiredAnsiEnabledForOverflowTestCases.toString) { testLongMaxAndMin(LongType) Seq(Long.MaxValue, 0, Long.MinValue).foreach { value => @@ -1209,7 +1199,7 @@ class CastSuite extends CastSuiteBase { } test("SPARK-28470: Cast should honor nullOnOverflow property") { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(Cast(Literal("134.12"), DecimalType(3, 2)), null) checkEvaluation( Cast(Literal(Timestamp.valueOf("2019-07-25 22:04:36")), DecimalType(3, 2)), null) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala index 8609d888b7bc9..36bc3db580400 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala @@ -32,7 +32,7 @@ class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { } test("MakeDecimal") { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) val overflowExpr = MakeDecimal(Literal.create(1000L, LongType), 3, 1) @@ -41,7 +41,7 @@ class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { evaluateWithoutCodegen(overflowExpr, null) checkEvaluationWithUnsafeProjection(overflowExpr, null) } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) val overflowExpr = MakeDecimal(Literal.create(1000L, LongType), 3, 1) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala index cf6ebfb0ecefb..c5ffc381b58e2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala @@ -57,7 +57,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper { } test("SPARK-28369: honor nullOnOverflow config for ScalaUDF") { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { val udf = ScalaUDF( (a: java.math.BigDecimal) => a.multiply(new java.math.BigDecimal(100)), DecimalType.SYSTEM_DEFAULT, @@ -69,7 +69,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper { } assert(e2.getCause.isInstanceOf[ArithmeticException]) } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val udf = ScalaUDF( (a: java.math.BigDecimal) => a.multiply(new java.math.BigDecimal(100)), DecimalType.SYSTEM_DEFAULT, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/CastSuite.scala deleted file mode 100644 index 6c5218b379f31..0000000000000 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/CastSuite.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.spark.sql.catalyst.expressions.postgreSQL - -import java.sql.{Date, Timestamp} - -import org.apache.spark.SparkFunSuite -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.expressions.{ExpressionEvalHelper, Literal} - -class CastSuite extends SparkFunSuite with ExpressionEvalHelper { - private def checkPostgreCastToBoolean(v: Any, expected: Any): Unit = { - checkEvaluation(PostgreCastToBoolean(Literal(v), None), expected) - } - - test("cast string to boolean") { - checkPostgreCastToBoolean("true", true) - checkPostgreCastToBoolean("tru", true) - checkPostgreCastToBoolean("tr", true) - checkPostgreCastToBoolean("t", true) - checkPostgreCastToBoolean("tRUe", true) - checkPostgreCastToBoolean(" tRue ", true) - checkPostgreCastToBoolean(" tRu ", true) - checkPostgreCastToBoolean("yes", true) - checkPostgreCastToBoolean("ye", true) - checkPostgreCastToBoolean("y", true) - checkPostgreCastToBoolean("1", true) - checkPostgreCastToBoolean("on", true) - - checkPostgreCastToBoolean("false", false) - checkPostgreCastToBoolean("fals", false) - checkPostgreCastToBoolean("fal", false) - checkPostgreCastToBoolean("fa", false) - checkPostgreCastToBoolean("f", false) - checkPostgreCastToBoolean(" fAlse ", false) - checkPostgreCastToBoolean(" fAls ", false) - checkPostgreCastToBoolean(" FAlsE ", false) - checkPostgreCastToBoolean("no", false) - checkPostgreCastToBoolean("n", false) - checkPostgreCastToBoolean("0", false) - checkPostgreCastToBoolean("off", false) - checkPostgreCastToBoolean("of", false) - - intercept[IllegalArgumentException](PostgreCastToBoolean(Literal("o"), None).eval()) - intercept[IllegalArgumentException](PostgreCastToBoolean(Literal("abc"), None).eval()) - intercept[IllegalArgumentException](PostgreCastToBoolean(Literal(""), None).eval()) - } - - test("unsupported data types to cast to boolean") { - assert(PostgreCastToBoolean(Literal(new Timestamp(1)), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(new Date(1)), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(1.toLong), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(1.toShort), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(1.toByte), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(BigDecimal(1.0)), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(1.toDouble), None).checkInputDataTypes().isFailure) - assert(PostgreCastToBoolean(Literal(1.toFloat), None).checkInputDataTypes().isFailure) - } -} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 0b694ea954156..90bb487ec6168 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -645,7 +645,7 @@ class ExpressionParserSuite extends AnalysisTest { assertEqual(s"${sign}interval $intervalValue", expectedLiteral) // SPARK-23264 Support interval values without INTERVAL clauses if ANSI SQL enabled - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual(intervalValue, expected) } } @@ -732,12 +732,12 @@ class ExpressionParserSuite extends AnalysisTest { test("SPARK-23264 Interval Compatibility tests") { def checkIntervals(intervalValue: String, expected: Literal): Unit = { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual(intervalValue, expected) } // Compatibility tests: If ANSI SQL disabled, `intervalValue` should be parsed as an alias - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val aliases = defaultParser.parseExpression(intervalValue).collect { case a @ Alias(_: Literal, name) if intervalUnits.exists { unit => name.startsWith(unit.toString) } => a @@ -835,12 +835,12 @@ class ExpressionParserSuite extends AnalysisTest { } test("current date/timestamp braceless expressions") { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual("current_date", CurrentDate()) assertEqual("current_timestamp", CurrentTimestamp()) } - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { assertEqual("current_date", UnresolvedAttribute.quoted("current_date")) assertEqual("current_timestamp", UnresolvedAttribute.quoted("current_timestamp")) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala index 904ae384154c8..23063bbab7aa2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala @@ -660,7 +660,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { } test("table identifier - reserved/non-reserved keywords if ANSI mode enabled") { - withSQLConf(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { reservedKeywordsInAnsiMode.foreach { keyword => val errMsg = intercept[ParseException] { parseTableIdentifier(keyword) diff --git a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/boolean.sql b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/boolean.sql index 178823bcfe9d6..3a949c834deb5 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/boolean.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/boolean.sql @@ -98,7 +98,6 @@ SELECT boolean('f') <= boolean('t') AS true; -- explicit casts to/from text SELECT boolean(string('TrUe')) AS true, boolean(string('fAlse')) AS `false`; - SELECT boolean(string(' true ')) AS true, boolean(string(' FALSE')) AS `false`; SELECT string(boolean(true)) AS true, string(boolean(false)) AS `false`; diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out old mode 100644 new mode 100755 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index e5f3425efc458..39f239d7dbf2e 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -53,10 +53,9 @@ true -- !query 6 SELECT boolean('test') AS error -- !query 6 schema -struct<> +struct -- !query 6 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: test +NULL -- !query 7 @@ -70,10 +69,9 @@ false -- !query 8 SELECT boolean('foo') AS error -- !query 8 schema -struct<> +struct -- !query 8 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: foo +NULL -- !query 9 @@ -95,10 +93,9 @@ true -- !query 11 SELECT boolean('yeah') AS error -- !query 11 schema -struct<> +struct -- !query 11 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: yeah +NULL -- !query 12 @@ -120,10 +117,9 @@ false -- !query 14 SELECT boolean('nay') AS error -- !query 14 schema -struct<> +struct -- !query 14 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: nay +NULL -- !query 15 @@ -131,7 +127,7 @@ SELECT boolean('on') AS true -- !query 15 schema struct -- !query 15 output -true +NULL -- !query 16 @@ -139,7 +135,7 @@ SELECT boolean('off') AS `false` -- !query 16 schema struct -- !query 16 output -false +NULL -- !query 17 @@ -147,34 +143,31 @@ SELECT boolean('of') AS `false` -- !query 17 schema struct -- !query 17 output -false +NULL -- !query 18 SELECT boolean('o') AS error -- !query 18 schema -struct<> +struct -- !query 18 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: o +NULL -- !query 19 SELECT boolean('on_') AS error -- !query 19 schema -struct<> +struct -- !query 19 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: on_ +NULL -- !query 20 SELECT boolean('off_') AS error -- !query 20 schema -struct<> +struct -- !query 20 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: off_ +NULL -- !query 21 @@ -188,10 +181,9 @@ true -- !query 22 SELECT boolean('11') AS error -- !query 22 schema -struct<> +struct -- !query 22 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: 11 +NULL -- !query 23 @@ -205,19 +197,17 @@ false -- !query 24 SELECT boolean('000') AS error -- !query 24 schema -struct<> +struct -- !query 24 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: 000 +NULL -- !query 25 SELECT boolean('') AS error -- !query 25 schema -struct<> +struct -- !query 25 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: +NULL -- !query 26 @@ -320,19 +310,17 @@ true false -- !query 38 SELECT boolean(string(' tru e ')) AS invalid -- !query 38 schema -struct<> +struct -- !query 38 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: tru e +NULL -- !query 39 SELECT boolean(string('')) AS invalid -- !query 39 schema -struct<> +struct -- !query 39 output -java.lang.IllegalArgumentException -invalid input syntax for type boolean: +NULL -- !query 40 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out index 348198b060238..a3410684e85dd 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out @@ -176,28 +176,28 @@ struct -- !query 18 SELECT CASE WHEN 1=0 THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END -- !query 18 schema -struct +struct -- !query 18 output -1 +1.0 -- !query 19 SELECT CASE 1 WHEN 0 THEN 1/0 WHEN 1 THEN 1 ELSE 2/0 END -- !query 19 schema -struct +struct -- !query 19 output -1 +1.0 -- !query 20 SELECT CASE WHEN i > 100 THEN 1/0 ELSE 0 END FROM case_tbl -- !query 20 schema -struct 100) THEN (1 div 0) ELSE 0 END:int> +struct 100) THEN (CAST(1 AS DOUBLE) / CAST(0 AS DOUBLE)) ELSE CAST(0 AS DOUBLE) END:double> -- !query 20 output -0 -0 -0 -0 +0.0 +0.0 +0.0 +0.0 -- !query 21 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out old mode 100644 new mode 100755 index 5371e07423c28..700476e91eaac --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -482,93 +482,93 @@ SELECT date '5874898-01-01' -- !query 46 SELECT f1 - date '2000-01-01' AS `Days From 2K` FROM DATE_TBL -- !query 46 schema -struct +struct -- !query 46 output --1035 --1036 --1037 --1400 --1401 --1402 --1403 --15542 --15607 -13977 -14343 -14710 -91 -92 -93 +-2 years -10 months +-2 years -10 months -1 days +-2 years -9 months -30 days +-3 years -10 months +-3 years -10 months -1 days +-3 years -10 months -2 days +-3 years -9 months -30 days +-42 years -6 months -18 days +-42 years -8 months -22 days +3 months +3 months 1 days +3 months 2 days +38 years 3 months 7 days +39 years 3 months 8 days +40 years 3 months 9 days -- !query 47 SELECT f1 - date 'epoch' AS `Days From Epoch` FROM DATE_TBL -- !query 47 schema -struct +struct -- !query 47 output --4585 --4650 -11048 -11049 -11050 -24934 -25300 -25667 -9554 -9555 -9556 -9557 -9920 -9921 -9922 +-12 years -6 months -18 days +-12 years -8 months -22 days +26 years 1 months 27 days +26 years 1 months 28 days +26 years 2 months +26 years 2 months 1 days +27 years 1 months 27 days +27 years 2 months +27 years 2 months 1 days +30 years 3 months +30 years 3 months 1 days +30 years 3 months 2 days +68 years 3 months 7 days +69 years 3 months 8 days +70 years 3 months 9 days -- !query 48 SELECT date 'yesterday' - date 'today' AS `One day` -- !query 48 schema -struct +struct -- !query 48 output --1 +-1 days -- !query 49 SELECT date 'today' - date 'tomorrow' AS `One day` -- !query 49 schema -struct +struct -- !query 49 output --1 +-1 days -- !query 50 SELECT date 'yesterday' - date 'tomorrow' AS `Two days` -- !query 50 schema -struct +struct -- !query 50 output --2 +-2 days -- !query 51 SELECT date 'tomorrow' - date 'today' AS `One day` -- !query 51 schema -struct +struct -- !query 51 output -1 +1 days -- !query 52 SELECT date 'today' - date 'yesterday' AS `One day` -- !query 52 schema -struct +struct -- !query 52 output -1 +1 days -- !query 53 SELECT date 'tomorrow' - date 'yesterday' AS `Two days` -- !query 53 schema -struct +struct -- !query 53 output -2 +2 days -- !query 54 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int2.sql.out old mode 100644 new mode 100755 index d0a14618a5163..d6e9f7ec79d68 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int2.sql.out @@ -268,25 +268,25 @@ struct -- !query 27 SELECT '' AS five, i.f1, i.f1 / smallint('2') AS x FROM INT2_TBL i -- !query 27 schema -struct +struct -- !query 27 output - -1234 -617 - -32767 -16383 - 0 0 - 1234 617 - 32767 16383 + -1234 -617.0 + -32767 -16383.5 + 0 0.0 + 1234 617.0 + 32767 16383.5 -- !query 28 SELECT '' AS five, i.f1, i.f1 / int('2') AS x FROM INT2_TBL i -- !query 28 schema -struct +struct -- !query 28 output - -1234 -617 - -32767 -16383 - 0 0 - 1234 617 - 32767 16383 + -1234 -617.0 + -32767 -16383.5 + 0 0.0 + 1234 617.0 + 32767 16383.5 -- !query 29 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out old mode 100644 new mode 100755 index 30afd6e695842..e1cb96d09ec75 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -321,25 +321,25 @@ struct -- !query 33 SELECT '' AS five, i.f1, i.f1 / smallint('2') AS x FROM INT4_TBL i -- !query 33 schema -struct +struct -- !query 33 output - -123456 -61728 - -2147483647 -1073741823 - 0 0 - 123456 61728 - 2147483647 1073741823 + -123456 -61728.0 + -2147483647 -1.0737418235E9 + 0 0.0 + 123456 61728.0 + 2147483647 1.0737418235E9 -- !query 34 SELECT '' AS five, i.f1, i.f1 / int('2') AS x FROM INT4_TBL i -- !query 34 schema -struct +struct -- !query 34 output - -123456 -61728 - -2147483647 -1073741823 - 0 0 - 123456 61728 - 2147483647 1073741823 + -123456 -61728.0 + -2147483647 -1.0737418235E9 + 0 0.0 + 123456 61728.0 + 2147483647 1.0737418235E9 -- !query 35 @@ -417,17 +417,17 @@ struct -- !query 44 SELECT 2 + 2 / 2 AS three -- !query 44 schema -struct +struct -- !query 44 output -3 +3.0 -- !query 45 SELECT (2 + 2) / 2 AS two -- !query 45 schema -struct +struct -- !query 45 output -2 +2.0 -- !query 46 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out old mode 100644 new mode 100755 index 7a8e706cb8d85..8e2700caa3345 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -409,13 +409,13 @@ struct -- !query 42 SELECT '' AS five, q1, q2, q1 / q2 AS divide, q1 % q2 AS mod FROM INT8_TBL -- !query 42 schema -struct +struct -- !query 42 output - 123 456 0 123 - 123 4567890123456789 0 123 - 4567890123456789 -4567890123456789 -1 0 - 4567890123456789 123 37137318076884 57 - 4567890123456789 4567890123456789 1 0 + 123 456 0.26973684210526316 123 + 123 4567890123456789 2.6927092525360204E-14 123 + 4567890123456789 -4567890123456789 -1.0 0 + 4567890123456789 123 3.713731807688446E13 57 + 4567890123456789 4567890123456789 1.0 0 -- !query 43 @@ -493,49 +493,49 @@ struct -- !query 49 SELECT q1 + int(42) AS `8plus4`, q1 - int(42) AS `8minus4`, q1 * int(42) AS `8mul4`, q1 / int(42) AS `8div4` FROM INT8_TBL -- !query 49 schema -struct<8plus4:bigint,8minus4:bigint,8mul4:bigint,8div4:bigint> +struct<8plus4:bigint,8minus4:bigint,8mul4:bigint,8div4:double> -- !query 49 output -165 81 5166 2 -165 81 5166 2 -4567890123456831 4567890123456747 191851385185185138 108759288653733 -4567890123456831 4567890123456747 191851385185185138 108759288653733 -4567890123456831 4567890123456747 191851385185185138 108759288653733 +165 81 5166 2.9285714285714284 +165 81 5166 2.9285714285714284 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 -- !query 50 SELECT int(246) + q1 AS `4plus8`, int(246) - q1 AS `4minus8`, int(246) * q1 AS `4mul8`, int(246) / q1 AS `4div8` FROM INT8_TBL -- !query 50 schema -struct<4plus8:bigint,4minus8:bigint,4mul8:bigint,4div8:bigint> +struct<4plus8:bigint,4minus8:bigint,4mul8:bigint,4div8:double> -- !query 50 output -369 123 30258 2 -369 123 30258 2 -4567890123457035 -4567890123456543 1123700970370370094 0 -4567890123457035 -4567890123456543 1123700970370370094 0 -4567890123457035 -4567890123456543 1123700970370370094 0 +369 123 30258 2.0 +369 123 30258 2.0 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 -- !query 51 SELECT q1 + smallint(42) AS `8plus2`, q1 - smallint(42) AS `8minus2`, q1 * smallint(42) AS `8mul2`, q1 / smallint(42) AS `8div2` FROM INT8_TBL -- !query 51 schema -struct<8plus2:bigint,8minus2:bigint,8mul2:bigint,8div2:bigint> +struct<8plus2:bigint,8minus2:bigint,8mul2:bigint,8div2:double> -- !query 51 output -165 81 5166 2 -165 81 5166 2 -4567890123456831 4567890123456747 191851385185185138 108759288653733 -4567890123456831 4567890123456747 191851385185185138 108759288653733 -4567890123456831 4567890123456747 191851385185185138 108759288653733 +165 81 5166 2.9285714285714284 +165 81 5166 2.9285714285714284 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 +4567890123456831 4567890123456747 191851385185185138 1.0875928865373308E14 -- !query 52 SELECT smallint(246) + q1 AS `2plus8`, smallint(246) - q1 AS `2minus8`, smallint(246) * q1 AS `2mul8`, smallint(246) / q1 AS `2div8` FROM INT8_TBL -- !query 52 schema -struct<2plus8:bigint,2minus8:bigint,2mul8:bigint,2div8:bigint> +struct<2plus8:bigint,2minus8:bigint,2mul8:bigint,2div8:double> -- !query 52 output -369 123 30258 2 -369 123 30258 2 -4567890123457035 -4567890123456543 1123700970370370094 0 -4567890123457035 -4567890123456543 1123700970370370094 0 -4567890123457035 -4567890123456543 1123700970370370094 0 +369 123 30258 2.0 +369 123 30258 2.0 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 +4567890123457035 -4567890123456543 1123700970370370094 5.385418505072041E-14 -- !query 53 @@ -569,7 +569,7 @@ struct -- !query 56 select bigint('9223372036854775800') / bigint('0') -- !query 56 schema -struct<(CAST(9223372036854775800 AS BIGINT) div CAST(0 AS BIGINT)):bigint> +struct<(CAST(CAST(9223372036854775800 AS BIGINT) AS DOUBLE) / CAST(CAST(0 AS BIGINT) AS DOUBLE)):double> -- !query 56 output NULL @@ -577,7 +577,7 @@ NULL -- !query 57 select bigint('-9223372036854775808') / smallint('0') -- !query 57 schema -struct<(CAST(-9223372036854775808 AS BIGINT) div CAST(CAST(0 AS SMALLINT) AS BIGINT)):bigint> +struct<(CAST(CAST(-9223372036854775808 AS BIGINT) AS DOUBLE) / CAST(CAST(0 AS SMALLINT) AS DOUBLE)):double> -- !query 57 output NULL @@ -585,7 +585,7 @@ NULL -- !query 58 select smallint('100') / bigint('0') -- !query 58 schema -struct<(CAST(CAST(100 AS SMALLINT) AS BIGINT) div CAST(0 AS BIGINT)):bigint> +struct<(CAST(CAST(100 AS SMALLINT) AS DOUBLE) / CAST(CAST(0 AS BIGINT) AS DOUBLE)):double> -- !query 58 output NULL @@ -740,9 +740,9 @@ long overflow -- !query 74 SELECT bigint((-9223372036854775808)) / bigint((-1)) -- !query 74 schema -struct<(CAST(-9223372036854775808 AS BIGINT) div CAST(-1 AS BIGINT)):bigint> +struct<(CAST(CAST(-9223372036854775808 AS BIGINT) AS DOUBLE) / CAST(CAST(-1 AS BIGINT) AS DOUBLE)):double> -- !query 74 output --9223372036854775808 +9.223372036854776E18 -- !query 75 @@ -765,9 +765,9 @@ long overflow -- !query 77 SELECT bigint((-9223372036854775808)) / int((-1)) -- !query 77 schema -struct<(CAST(-9223372036854775808 AS BIGINT) div CAST(CAST(-1 AS INT) AS BIGINT)):bigint> +struct<(CAST(CAST(-9223372036854775808 AS BIGINT) AS DOUBLE) / CAST(CAST(-1 AS INT) AS DOUBLE)):double> -- !query 77 output --9223372036854775808 +9.223372036854776E18 -- !query 78 @@ -790,9 +790,9 @@ long overflow -- !query 80 SELECT bigint((-9223372036854775808)) / smallint((-1)) -- !query 80 schema -struct<(CAST(-9223372036854775808 AS BIGINT) div CAST(CAST(-1 AS SMALLINT) AS BIGINT)):bigint> +struct<(CAST(CAST(-9223372036854775808 AS BIGINT) AS DOUBLE) / CAST(CAST(-1 AS SMALLINT) AS DOUBLE)):double> -- !query 80 output --9223372036854775808 +9.223372036854776E18 -- !query 81 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out old mode 100644 new mode 100755 index 0675820b381da..e9ba62801d6a2 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -239,31 +239,36 @@ struct SELECT a/2, a/2 FROM test_missing_target ORDER BY a/2 -- !query 23 schema -struct<(a div 2):int,(a div 2):int> +struct<(CAST(a AS DOUBLE) / CAST(2 AS DOUBLE)):double,(CAST(a AS DOUBLE) / CAST(2 AS DOUBLE)):double> -- !query 23 output -0 0 -0 0 -1 1 -1 1 -2 2 -2 2 -3 3 -3 3 -4 4 -4 4 +0.0 0.0 +0.5 0.5 +1.0 1.0 +1.5 1.5 +2.0 2.0 +2.5 2.5 +3.0 3.0 +3.5 3.5 +4.0 4.0 +4.5 4.5 -- !query 24 SELECT a/2, a/2 FROM test_missing_target GROUP BY a/2 ORDER BY a/2 -- !query 24 schema -struct<(a div 2):int,(a div 2):int> +struct<(CAST(a AS DOUBLE) / CAST(2 AS DOUBLE)):double,(CAST(a AS DOUBLE) / CAST(2 AS DOUBLE)):double> -- !query 24 output -0 0 -1 1 -2 2 -3 3 -4 4 +0.0 0.0 +0.5 0.5 +1.0 1.0 +1.5 1.5 +2.0 2.0 +2.5 2.5 +3.0 3.0 +3.5 3.5 +4.0 4.0 +4.5 4.5 -- !query 25 @@ -331,7 +336,8 @@ SELECT count(b) FROM test_missing_target GROUP BY b/2 ORDER BY b/2 struct -- !query 30 output 1 -5 +2 +3 4 @@ -370,8 +376,10 @@ SELECT count(b) FROM test_missing_target -- !query 33 schema struct -- !query 33 output -7 +4 3 +2 +1 -- !query 34 @@ -390,11 +398,12 @@ SELECT x.b/2, count(x.b) FROM test_missing_target x, test_missing_target y WHERE x.a = y.a GROUP BY x.b/2 ORDER BY x.b/2 -- !query 35 schema -struct<(b div 2):int,count(b):bigint> +struct<(CAST(b AS DOUBLE) / CAST(2 AS DOUBLE)):double,count(b):bigint> -- !query 35 output -0 1 -1 5 -2 4 +0.5 1 +1.0 2 +1.5 3 +2.0 4 -- !query 36 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part1.sql.out old mode 100644 new mode 100755 index 45bc98ae97640..61b86b556e1a8 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part1.sql.out @@ -387,7 +387,7 @@ struct -- !query 23 SELECT avg(four) OVER (PARTITION BY four ORDER BY thousand / 100) FROM tenk1 WHERE unique2 < 10 -- !query 23 schema -struct +struct -- !query 23 output 0.0 0.0 diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out old mode 100644 new mode 100755 index 44a764ce4e6dd..7012b15213662 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out @@ -176,28 +176,28 @@ struct -- !query 18 SELECT CASE WHEN udf(1=0) THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END -- !query 18 schema -struct +struct -- !query 18 output -1 +1.0 -- !query 19 SELECT CASE 1 WHEN 0 THEN 1/udf(0) WHEN 1 THEN 1 ELSE 2/0 END -- !query 19 schema -struct +struct -- !query 19 output -1 +1.0 -- !query 20 SELECT CASE WHEN i > 100 THEN udf(1/0) ELSE udf(0) END FROM case_tbl -- !query 20 schema -struct 100) THEN CAST(udf(cast((1 div 0) as string)) AS INT) ELSE CAST(udf(cast(0 as string)) AS INT) END:int> +struct 100) THEN CAST(udf(cast((cast(1 as double) / cast(0 as double)) as string)) AS DOUBLE) ELSE CAST(CAST(udf(cast(0 as string)) AS INT) AS DOUBLE) END:double> -- !query 20 output -0 -0 -0 -0 +0.0 +0.0 +0.0 +0.0 -- !query 21 diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index a60cbf33b9b24..2540f0260f09c 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -242,31 +242,36 @@ struct +struct -- !query 23 output -0 0 -0 0 -1 1 -1 1 -2 2 -2 2 -3 3 -3 3 -4 4 -4 4 +0.0 0.0 +0.5 0.5 +1.0 1.0 +1.5 1.5 +2.0 2.0 +2.5 2.5 +3.0 3.0 +3.5 3.5 +4.0 4.0 +4.5 4.5 -- !query 24 SELECT udf(a/2), udf(a/2) FROM test_missing_target GROUP BY udf(a/2) ORDER BY udf(a/2) -- !query 24 schema -struct +struct -- !query 24 output -0 0 -1 1 -2 2 -3 3 -4 4 +0.0 0.0 +0.5 0.5 +1.0 1.0 +1.5 1.5 +2.0 2.0 +2.5 2.5 +3.0 3.0 +3.5 3.5 +4.0 4.0 +4.5 4.5 -- !query 25 @@ -334,7 +339,8 @@ SELECT udf(count(b)) FROM test_missing_target GROUP BY udf(b/2) ORDER BY udf(b/2 struct -- !query 30 output 1 -5 +2 +3 4 @@ -373,8 +379,10 @@ SELECT udf(count(b)) FROM test_missing_target -- !query 33 schema struct -- !query 33 output -7 +4 3 +2 +1 -- !query 34 @@ -394,11 +402,12 @@ test_missing_target y WHERE udf(x.a) = udf(y.a) GROUP BY udf(x.b/2) ORDER BY udf(x.b/2) -- !query 35 schema -struct +struct -- !query 35 output -0 1 -1 5 -2 4 +0.5 1 +1.0 2 +1.5 3 +2.0 4 -- !query 36 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 13c2f9a810dcb..5269da1651e76 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -164,7 +164,7 @@ class DataFrameSuite extends QueryTest with SharedSparkSession { DecimalData(BigDecimal("9"* 20 + ".123"), BigDecimal("9"* 20 + ".123")) :: Nil).toDF() Seq(true, false).foreach { ansiEnabled => - withSQLConf((SQLConf.DIALECT_SPARK_ANSI_ENABLED.key, ansiEnabled.toString)) { + withSQLConf((SQLConf.ANSI_ENABLED.key, ansiEnabled.toString)) { val structDf = largeDecimals.select("a").agg(sum("a")) if (!ansiEnabled) { checkAnswer(structDf, Row(null)) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/PostgreSQLDialectQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/PostgreSQLDialectQuerySuite.scala deleted file mode 100644 index 7056f483609a9..0000000000000 --- a/sql/core/src/test/scala/org/apache/spark/sql/PostgreSQLDialectQuerySuite.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.spark.sql - -import org.apache.spark.SparkConf -import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.test.SharedSparkSession - -class PostgreSQLDialectQuerySuite extends QueryTest with SharedSparkSession { - - override def sparkConf: SparkConf = - super.sparkConf.set(SQLConf.DIALECT.key, SQLConf.Dialect.POSTGRESQL.toString) - - test("cast string to boolean") { - Seq("true", "tru", "tr", "t", " tRue ", " tRu ", "yes", "ye", - "y", "1", "on").foreach { input => - checkAnswer(sql(s"select cast('$input' as boolean)"), Row(true)) - } - Seq("false", "fals", "fal", "fa", "f", " fAlse ", " fAls ", "no", "n", - "0", "off", "of").foreach { input => - checkAnswer(sql(s"select cast('$input' as boolean)"), Row(false)) - } - - Seq("o", "abc", "").foreach { input => - intercept[IllegalArgumentException](sql(s"select cast('$input' as boolean)").collect()) - } - } -} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index e6dcf0b86308a..9169b3819f0a4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -332,9 +332,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession { localSparkSession.udf.register("boolne", (b1: Boolean, b2: Boolean) => b1 != b2) // vol used by boolean.sql and case.sql. localSparkSession.udf.register("vol", (s: String) => s) - localSparkSession.conf.set(SQLConf.DIALECT.key, SQLConf.Dialect.POSTGRESQL.toString) + localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) case _: AnsiTest => - localSparkSession.conf.set(SQLConf.DIALECT_SPARK_ANSI_ENABLED.key, true) + localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) case _ => } diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index b564bb2d24005..7d2963f3c21f0 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -117,14 +117,10 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite { } testCase match { - case _: PgSQLTest => - statement.execute(s"SET ${SQLConf.DIALECT.key} = ${SQLConf.Dialect.POSTGRESQL.toString}") - case _: AnsiTest => - statement.execute(s"SET ${SQLConf.DIALECT.key} = ${SQLConf.Dialect.SPARK.toString}") - statement.execute(s"SET ${SQLConf.DIALECT_SPARK_ANSI_ENABLED.key} = true") + case _: PgSQLTest | _: AnsiTest => + statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") case _ => - statement.execute(s"SET ${SQLConf.DIALECT.key} = ${SQLConf.Dialect.SPARK.toString}") - statement.execute(s"SET ${SQLConf.DIALECT_SPARK_ANSI_ENABLED.key} = false") + statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false") } // Run the SQL queries preparing them for comparison.