From 3a1bac1c593e1a3726911d518c3864161c22dee5 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 30 May 2024 09:51:01 +0200 Subject: [PATCH 01/20] Fix OracleIntegrationSuite for JDK8 --- .../org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala index 2c97a588670a..f753882b18a0 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala @@ -95,7 +95,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes " bonus BINARY_DOUBLE)").executeUpdate() connection.prepareStatement( s"""CREATE TABLE pattern_testing_table ( - |pattern_testing_col VARCHAR(50) + |pattern_testing_col VARCHAR2(50) |) """.stripMargin ).executeUpdate() From 3d340801f8d66f2980e525a73414ed78e7a81abc Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 30 May 2024 10:07:38 +0200 Subject: [PATCH 02/20] Revert "Fix OracleIntegrationSuite for JDK8" This reverts commit d54453ee8a8e04256d2cd85b83f845b352e7caf9. --- .../org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala index f753882b18a0..2c97a588670a 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleIntegrationSuite.scala @@ -95,7 +95,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTes " bonus BINARY_DOUBLE)").executeUpdate() connection.prepareStatement( s"""CREATE TABLE pattern_testing_table ( - |pattern_testing_col VARCHAR2(50) + |pattern_testing_col VARCHAR(50) |) """.stripMargin ).executeUpdate() From f8470aa28fdcabe5c24e680c998f7353fb2f3392 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 08:58:04 +0200 Subject: [PATCH 03/20] Rename legacy error --- .../src/main/resources/error/error-conditions.json | 13 +++++++------ .../catalyst/expressions/datetimeExpressions.scala | 4 ++-- .../spark/sql/catalyst/util/DateTimeUtils.scala | 4 ++-- .../spark/sql/errors/QueryExecutionErrors.scala | 8 ++++---- .../catalyst/expressions/DateExpressionsSuite.scala | 2 +- .../sql/catalyst/util/DateTimeUtilsSuite.scala | 4 ++-- .../resources/sql-tests/results/ansi/date.sql.out | 4 ++-- .../sql-tests/results/ansi/timestamp.sql.out | 6 +++--- .../sql-tests/results/postgreSQL/date.sql.out | 6 +++--- .../results/timestampNTZ/timestamp-ansi.sql.out | 6 +++--- 10 files changed, 29 insertions(+), 28 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 3786643125a9..a7e5f6a1f829 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -2148,6 +2148,12 @@ }, "sqlState" : "HY109" }, + "INVALID_DATE_ARGUMENT_VALUE" : { + "message" : [ + ". If necessary set to false to bypass this error." + ], + "sqlState" : "22023" + }, "INVALID_DATETIME_PATTERN" : { "message" : [ "Unrecognized datetime pattern: ." @@ -6700,12 +6706,7 @@ "message" : [ "Sinks cannot request distribution and ordering in continuous execution mode." ] - }, - "_LEGACY_ERROR_TEMP_2000" : { - "message" : [ - ". If necessary set to false to bypass this error." - ] - }, + } "_LEGACY_ERROR_TEMP_2003" : { "message" : [ "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index b166d235557f..304ab5558660 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2489,7 +2489,7 @@ case class MakeDate( localDateToDays(ld) } catch { case e: java.time.DateTimeException => - if (failOnError) throw QueryExecutionErrors.ansiDateTimeError(e) else null + if (failOnError) throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) else null } } @@ -2723,7 +2723,7 @@ case class MakeTimestamp( } catch { case e: SparkDateTimeException if failOnError => throw e case e: DateTimeException if failOnError => - throw QueryExecutionErrors.ansiDateTimeError(e) + throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) case _: DateTimeException => null } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index e27ce29fc231..9fe092afae34 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -304,8 +304,8 @@ object DateTimeUtils extends SparkDateTimeUtils { start: Int, interval: CalendarInterval): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.ansiIllegalArgumentError( - "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date") + throw QueryExecutionErrors.ansiNoMicrosecondArgumentSupported( + "Cannot add hours, minutes, seconds or microseconds to a date") } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 4a23e9766fc5..9b907ccdd3af 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -277,9 +277,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def ansiDateTimeError(e: Exception): SparkDateTimeException = { + def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = { new SparkDateTimeException( - errorClass = "_LEGACY_ERROR_TEMP_2000", + errorClass = "INVALID_DATE_ARGUMENT_VALUE", messageParameters = Map( "message" -> e.getMessage, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), @@ -287,9 +287,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def ansiIllegalArgumentError(message: String): SparkIllegalArgumentException = { + def ansiNoMicrosecondArgumentSupported(message: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "_LEGACY_ERROR_TEMP_2000", + errorClass = "INVALID_DATE_ARGUMENT_VALUE", messageParameters = Map( "message" -> message, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 21ae35146282..2df33bbd5ede 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -436,7 +436,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), - "_LEGACY_ERROR_TEMP_2000", + "INVALID_DATE_ARGUMENT_VALUE", Map("message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date", "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index 96aaf13052b0..c233aa7820b6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -542,9 +542,9 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { checkError( exception = intercept[SparkIllegalArgumentException]( dateAddInterval(input, new CalendarInterval(36, 47, 1))), - condition = "_LEGACY_ERROR_TEMP_2000", + condition = "INVALID_DATE_ARGUMENT_VALUE", parameters = Map( - "message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date", + "message" -> "Cannot add hours, minutes, seconds or microseconds to a date", "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 67cd23faf255..aadb5237391f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -53,7 +53,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -68,7 +68,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index d7a58e321b0f..f66df7342dc0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -185,7 +185,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -200,7 +200,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 8caf8c54b9f3..7a0c9a909c32 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -687,7 +687,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid date 'FEBRUARY 30'" @@ -702,7 +702,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -717,7 +717,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index cd94674d2bf2..344c41b1bc36 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -185,7 +185,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -200,7 +200,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" From 1e275eef1cd7419d15b4b874e9f3181946f8094f Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 09:02:32 +0200 Subject: [PATCH 04/20] Fix , deletion --- common/utils/src/main/resources/error/error-conditions.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index a7e5f6a1f829..85f82cfafd76 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -6706,7 +6706,7 @@ "message" : [ "Sinks cannot request distribution and ordering in continuous execution mode." ] - } + }, "_LEGACY_ERROR_TEMP_2003" : { "message" : [ "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." From 13ee2b0551b6fb838f2ce46798a8e5ac2631e215 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 10:54:21 +0200 Subject: [PATCH 05/20] Fix ordering --- .../src/main/resources/error/error-conditions.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 85f82cfafd76..671719962786 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -2148,12 +2148,6 @@ }, "sqlState" : "HY109" }, - "INVALID_DATE_ARGUMENT_VALUE" : { - "message" : [ - ". If necessary set to false to bypass this error." - ], - "sqlState" : "22023" - }, "INVALID_DATETIME_PATTERN" : { "message" : [ "Unrecognized datetime pattern: ." @@ -2177,6 +2171,12 @@ }, "sqlState" : "22007" }, + "INVALID_DATE_ARGUMENT_VALUE" : { + "message" : [ + ". If necessary set to false to bypass this error." + ], + "sqlState" : "22023" + }, "INVALID_DEFAULT_VALUE" : { "message" : [ "Failed to execute command because the destination column or variable has a DEFAULT value ," From b2eec806924ff7dfd4295e896e3e71d528e09e50 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 11:10:51 +0200 Subject: [PATCH 06/20] Add new error condition --- .../utils/src/main/resources/error/error-conditions.json | 5 +++++ .../apache/spark/sql/catalyst/util/DateTimeUtils.scala | 3 +-- .../apache/spark/sql/errors/QueryExecutionErrors.scala | 8 +++----- .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 6 ++---- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 671719962786..89cdf4a96552 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1646,6 +1646,11 @@ "Illegal input for day of week: ." ], "sqlState" : "22009" + },"ILLEGAL_INTERVAL_ARGUMENT_VALUE" : { + "message" : [ + "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to false to bypass this error." + ], + "sqlState" : "22023" }, "ILLEGAL_STATE_STORE_VALUE" : { "message" : [ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 9fe092afae34..c41d90f7eece 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -304,8 +304,7 @@ object DateTimeUtils extends SparkDateTimeUtils { start: Int, interval: CalendarInterval): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.ansiNoMicrosecondArgumentSupported( - "Cannot add hours, minutes, seconds or microseconds to a date") + throw QueryExecutionErrors.ansiIllegalIntervalArgumentValue() } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 9b907ccdd3af..51f420c7eaff 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -287,12 +287,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def ansiNoMicrosecondArgumentSupported(message: String): SparkIllegalArgumentException = { + def ansiIllegalIntervalArgumentValue(): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "INVALID_DATE_ARGUMENT_VALUE", - messageParameters = Map( - "message" -> message, - "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) + errorClass = "ILLEGAL_INTERVAL_ARGUMENT_VALUE", + messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) } def overflowInSumOfDecimalError(context: QueryContext): ArithmeticException = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index c233aa7820b6..d52d577acf91 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -542,10 +542,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { checkError( exception = intercept[SparkIllegalArgumentException]( dateAddInterval(input, new CalendarInterval(36, 47, 1))), - condition = "INVALID_DATE_ARGUMENT_VALUE", - parameters = Map( - "message" -> "Cannot add hours, minutes, seconds or microseconds to a date", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + condition = "ILLEGAL_INTERVAL_ARGUMENT_VALUE", + parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } test("timestamp add interval") { From a2b93715c4c5070e99092298d5d4f6fd7aae9c02 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 11:25:55 +0200 Subject: [PATCH 07/20] Improve error name --- .../src/main/resources/error/error-conditions.json | 12 ++++++------ .../spark/sql/errors/QueryExecutionErrors.scala | 2 +- .../catalyst/expressions/DateExpressionsSuite.scala | 6 ++---- .../resources/sql-tests/results/ansi/date.sql.out | 4 ++-- .../sql-tests/results/ansi/timestamp.sql.out | 6 +++--- .../sql-tests/results/postgreSQL/date.sql.out | 6 +++--- .../results/timestampNTZ/timestamp-ansi.sql.out | 6 +++--- 7 files changed, 20 insertions(+), 22 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 89cdf4a96552..777b35e900c8 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1087,6 +1087,12 @@ ], "sqlState" : "42K03" }, + "DATETIME_ARGUMENT_OUT_OF_RANGE" : { + "message" : [ + ". If necessary set to false to bypass this error." + ], + "sqlState" : "22023" + }, "DATETIME_OVERFLOW" : { "message" : [ "Datetime operation overflow: ." @@ -2176,12 +2182,6 @@ }, "sqlState" : "22007" }, - "INVALID_DATE_ARGUMENT_VALUE" : { - "message" : [ - ". If necessary set to false to bypass this error." - ], - "sqlState" : "22023" - }, "INVALID_DEFAULT_VALUE" : { "message" : [ "Failed to execute command because the destination column or variable has a DEFAULT value ," diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 51f420c7eaff..eee707b81448 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -279,7 +279,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = { new SparkDateTimeException( - errorClass = "INVALID_DATE_ARGUMENT_VALUE", + errorClass = "DATETIME_ARGUMENT_OUT_OF_RANGE", messageParameters = Map( "message" -> e.getMessage, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 2df33bbd5ede..de6a9e0e4c1c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -436,10 +436,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), - "INVALID_DATE_ARGUMENT_VALUE", - Map("message" -> - "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + "ILLEGAL_INTERVAL_ARGUMENT_VALUE", + Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index aadb5237391f..600a4646b35b 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -53,7 +53,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -68,7 +68,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index f66df7342dc0..0bdd49ac6164 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -185,7 +185,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -200,7 +200,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 7a0c9a909c32..dae6ee7efcda 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -687,7 +687,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid date 'FEBRUARY 30'" @@ -702,7 +702,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -717,7 +717,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 344c41b1bc36..ad584d43ebaf 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -185,7 +185,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -200,7 +200,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "INVALID_DATE_ARGUMENT_VALUE", + "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" From 468df0df4383184563d8ea0d1fc2684b8c17b213 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 19:37:52 +0200 Subject: [PATCH 08/20] Update common/utils/src/main/resources/error/error-conditions.json Co-authored-by: Maxim Gekk --- common/utils/src/main/resources/error/error-conditions.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 777b35e900c8..1a82563d69a1 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1654,7 +1654,7 @@ "sqlState" : "22009" },"ILLEGAL_INTERVAL_ARGUMENT_VALUE" : { "message" : [ - "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to false to bypass this error." + "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to \"false\" to bypass this error." ], "sqlState" : "22023" }, From a8efe0ff880d3928d85ecedf761ea1a0ae0778b5 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Thu, 3 Oct 2024 19:38:57 +0200 Subject: [PATCH 09/20] Update error-conditions.json --- common/utils/src/main/resources/error/error-conditions.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 1a82563d69a1..45b4e4bfc12f 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1652,7 +1652,8 @@ "Illegal input for day of week: ." ], "sqlState" : "22009" - },"ILLEGAL_INTERVAL_ARGUMENT_VALUE" : { + }, + "ILLEGAL_INTERVAL_ARGUMENT_VALUE" : { "message" : [ "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to \"false\" to bypass this error." ], From 84da3d33809c11282cca758afb581c1586a62d4f Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 4 Oct 2024 08:19:26 +0200 Subject: [PATCH 10/20] Fix tests --- .../spark/sql/catalyst/expressions/datetimeExpressions.scala | 2 +- .../src/test/resources/sql-tests/results/ansi/date.sql.out | 2 ++ .../test/resources/sql-tests/results/ansi/timestamp.sql.out | 3 +++ .../test/resources/sql-tests/results/postgreSQL/date.sql.out | 3 +++ .../sql-tests/results/timestampNTZ/timestamp-ansi.sql.out | 3 +++ 5 files changed, 12 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 304ab5558660..20516ea2a981 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2496,7 +2496,7 @@ case class MakeDate( override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { - "throw QueryExecutionErrors.ansiDateTimeError(e);" + "throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);" } else { s"${ev.isNull} = true;" } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 600a4646b35b..9455191e3ee2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -54,6 +54,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -69,6 +70,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 0bdd49ac6164..29a492ffd4d1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -155,6 +155,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -186,6 +187,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -201,6 +203,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index dae6ee7efcda..bca2fe22876f 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -688,6 +688,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid date 'FEBRUARY 30'" @@ -703,6 +704,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" @@ -718,6 +720,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index ad584d43ebaf..a188cb67c895 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -155,6 +155,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" @@ -186,6 +187,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" @@ -201,6 +203,7 @@ struct<> org.apache.spark.SparkDateTimeException { "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" From 58336d2f020e045a5f2e55562db71c408c68242a Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 4 Oct 2024 09:20:33 +0200 Subject: [PATCH 11/20] Fix test --- .../spark/sql/catalyst/expressions/datetimeExpressions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 20516ea2a981..40767a0d15ba 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2754,7 +2754,7 @@ case class MakeTimestamp( val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val d = Decimal.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { - "throw QueryExecutionErrors.ansiDateTimeError(e);" + "throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);" } else { s"${ev.isNull} = true;" } From cdd7e4292c3f87a2cb253b2f19701e3ce976fbff Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 4 Oct 2024 12:55:25 +0200 Subject: [PATCH 12/20] Incorporate changes --- .../main/resources/error/error-conditions.json | 13 ++++++------- .../expressions/datetimeExpressions.scala | 2 +- .../spark/sql/catalyst/util/DateTimeUtils.scala | 5 +++-- .../spark/sql/errors/QueryExecutionErrors.scala | 9 ++++++--- .../expressions/DateExpressionsSuite.scala | 2 +- .../sql/catalyst/util/DateTimeUtilsSuite.scala | 15 +++++++++------ 6 files changed, 26 insertions(+), 20 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 45b4e4bfc12f..d1e3a5e3bc38 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1089,7 +1089,7 @@ }, "DATETIME_ARGUMENT_OUT_OF_RANGE" : { "message" : [ - ". If necessary set to false to bypass this error." + ". If necessary set to \"false\" to bypass this error." ], "sqlState" : "22023" }, @@ -1653,12 +1653,6 @@ ], "sqlState" : "22009" }, - "ILLEGAL_INTERVAL_ARGUMENT_VALUE" : { - "message" : [ - "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22023" - }, "ILLEGAL_STATE_STORE_VALUE" : { "message" : [ "Illegal value provided to the State Store" @@ -2844,6 +2838,11 @@ "expects an integer literal, but got ." ] }, + "INTERVAL_WITH_MICROSECONDS" : { + "message" : [ + "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to \"false\" to bypass this error." + ] + }, "LENGTH" : { "message" : [ "Expects `length` greater than or equal to 0, but got ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 40767a0d15ba..cafdc099cf5e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1723,7 +1723,7 @@ case class DateAddInterval( override def nullSafeEval(start: Any, interval: Any): Any = { val itvl = interval.asInstanceOf[CalendarInterval] if (ansiEnabled || itvl.microseconds == 0) { - DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl) + DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl, prettyName) } else { val startTs = DateTimeUtils.daysToMicros(start.asInstanceOf[Int], zoneId) val resultTs = DateTimeUtils.timestampAddInterval( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index c41d90f7eece..4a21e995eaa7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -302,9 +302,10 @@ object DateTimeUtils extends SparkDateTimeUtils { */ def dateAddInterval( start: Int, - interval: CalendarInterval): Int = { + interval: CalendarInterval, + funcName: String): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.ansiIllegalIntervalArgumentValue() + throw QueryExecutionErrors.invalidIntervalWithMicrosecondsError(funcName) } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index eee707b81448..88a3772d9285 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -287,10 +287,13 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def ansiIllegalIntervalArgumentValue(): SparkIllegalArgumentException = { + def invalidIntervalWithMicrosecondsError(funcName: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "ILLEGAL_INTERVAL_ARGUMENT_VALUE", - messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) + errorClass = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", + messageParameters = Map( + "parameter" -> toSQLId("interval"), + "functionName" -> toSQLId(funcName), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) } def overflowInSumOfDecimalError(context: QueryContext): ArithmeticException = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index de6a9e0e4c1c..11aa50e3c76a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -436,7 +436,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), - "ILLEGAL_INTERVAL_ARGUMENT_VALUE", + "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index d52d577acf91..4208ff815394 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -536,14 +536,17 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { test("date add interval with day precision") { val input = days(1997, 2, 28) - assert(dateAddInterval(input, new CalendarInterval(36, 0, 0)) === days(2000, 2, 28)) - assert(dateAddInterval(input, new CalendarInterval(36, 47, 0)) === days(2000, 4, 15)) - assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0)) === days(1996, 1, 28)) + assert(dateAddInterval(input, new CalendarInterval(36, 0, 0), "") === days(2000, 2, 28)) + assert(dateAddInterval(input, new CalendarInterval(36, 47, 0), "") === days(2000, 4, 15)) + assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0), "") === days(1996, 1, 28)) checkError( exception = intercept[SparkIllegalArgumentException]( - dateAddInterval(input, new CalendarInterval(36, 47, 1))), - condition = "ILLEGAL_INTERVAL_ARGUMENT_VALUE", - parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + dateAddInterval(input, new CalendarInterval(36, 47, 1), "test")), + condition = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", + parameters = Map( + "parameter" -> "`interval`", + "functionName" -> "`test`", + "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } test("timestamp add interval") { From 6c3ee3d8f6c5423793525237f08f03c424a9c5ea Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 4 Oct 2024 16:34:16 +0200 Subject: [PATCH 13/20] Fix duplicates --- common/utils/src/main/resources/error/error-conditions.json | 2 +- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- .../src/test/resources/sql-tests/results/ansi/date.sql.out | 4 ++-- .../test/resources/sql-tests/results/ansi/timestamp.sql.out | 6 +++--- .../resources/sql-tests/results/postgreSQL/date.sql.out | 6 +++--- .../sql-tests/results/timestampNTZ/timestamp-ansi.sql.out | 6 +++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index d1e3a5e3bc38..3de9b87e79d1 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1089,7 +1089,7 @@ }, "DATETIME_ARGUMENT_OUT_OF_RANGE" : { "message" : [ - ". If necessary set to \"false\" to bypass this error." + ". If necessary set to \"false\" to bypass this error." ], "sqlState" : "22023" }, diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 88a3772d9285..a034da74285f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -281,7 +281,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE new SparkDateTimeException( errorClass = "DATETIME_ARGUMENT_OUT_OF_RANGE", messageParameters = Map( - "message" -> e.getMessage, + "rangeMessage" -> e.getMessage, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = Array.empty, summary = "") diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 9455191e3ee2..f5755a377c1a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -57,7 +57,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" } } @@ -73,7 +73,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 29a492ffd4d1..c1396fb24899 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -158,7 +158,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" } } @@ -190,7 +190,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" } } @@ -206,7 +206,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" } } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index bca2fe22876f..515ef61a39f5 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -691,7 +691,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'FEBRUARY 30'" + "rangeMessage" : "Invalid date 'FEBRUARY 30'" } } @@ -707,7 +707,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" } } @@ -723,7 +723,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" } } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index a188cb67c895..3778431edf45 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -158,7 +158,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" } } @@ -190,7 +190,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" } } @@ -206,7 +206,7 @@ org.apache.spark.SparkDateTimeException "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" } } From 093cfe4148402601e7bf759c15628a4ad394bb31 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 4 Oct 2024 20:56:06 +0200 Subject: [PATCH 14/20] Fix codegen --- .../spark/sql/catalyst/expressions/datetimeExpressions.scala | 4 ++-- .../spark/sql/catalyst/expressions/DateExpressionsSuite.scala | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index cafdc099cf5e..b197cf76a2f3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1735,14 +1735,14 @@ case class DateAddInterval( override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") nullSafeCodeGen(ctx, ev, (sd, i) => if (ansiEnabled) { - s"""${ev.value} = $dtu.dateAddInterval($sd, $i);""" + s"""${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName");""" } else { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val startTs = ctx.freshName("startTs") val resultTs = ctx.freshName("resultTs") s""" |if ($i.microseconds == 0) { - | ${ev.value} = $dtu.dateAddInterval($sd, $i); + | ${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName"); |} else { | long $startTs = $dtu.daysToMicros($sd, $zid); | long $resultTs = diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 11aa50e3c76a..e7e57a5c8e18 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -437,7 +437,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", - Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + Map("parameter" -> "`interval`", "functionName" -> "`dateaddinterval`", + "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { From 0843220ebf9e86d83c96ba5decf517b8d06a353a Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 8 Nov 2024 08:02:44 +0100 Subject: [PATCH 15/20] Fix error message and align with other PR --- common/utils/src/main/resources/error/error-conditions.json | 4 ++-- .../src/test/resources/sql-tests/results/ansi/date.sql.out | 4 ++-- .../test/resources/sql-tests/results/ansi/timestamp.sql.out | 6 +++--- .../resources/sql-tests/results/postgreSQL/date.sql.out | 6 +++--- .../sql-tests/results/timestampNTZ/timestamp-ansi.sql.out | 6 +++--- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index ad3729bd406c..0bfddb59a26f 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1111,7 +1111,7 @@ ], "sqlState" : "42K03" }, - "DATETIME_ARGUMENT_OUT_OF_RANGE" : { + "DATETIME_FIELD_OUT_OF_BOUNDS" : { "message" : [ ". If necessary set to \"false\" to bypass this error." ], @@ -2901,7 +2901,7 @@ }, "INTERVAL_WITH_MICROSECONDS" : { "message" : [ - "Cannot add hours, minutes, seconds or microseconds to a date. If necessary set to \"false\" to bypass this error." + "Cannot add an interval to a date because its microseconds part is not 0. If necessary set to \"false\" to bypass this error." ] }, "LENGTH" : { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index f5755a377c1a..aa283d324961 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -53,7 +53,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -69,7 +69,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index a7851e7ceeca..e3cf1a154922 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -186,7 +186,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -202,7 +202,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 515ef61a39f5..d9f4301dd0e8 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -687,7 +687,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -703,7 +703,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -719,7 +719,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 221b51e6d1b5..681306ba9f40 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -154,7 +154,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -186,7 +186,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", @@ -202,7 +202,7 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "DATETIME_ARGUMENT_OUT_OF_RANGE", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", From 42f353941fa8ae623d88b8b44b094b4896633e14 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Fri, 8 Nov 2024 08:55:45 +0100 Subject: [PATCH 16/20] Fix problems --- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index bcc886ae76c9..3286ee47afe7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -279,7 +279,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = { new SparkDateTimeException( - errorClass = "DATETIME_ARGUMENT_OUT_OF_RANGE", + errorClass = "DATETIME_FIELD_OUT_OF_BOUNDS", messageParameters = Map( "rangeMessage" -> e.getMessage, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), From 03ef7382202ae0e10aa25c8006903affc669fd37 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Tue, 12 Nov 2024 09:38:05 +0100 Subject: [PATCH 17/20] Fix wrong message --- .../src/main/resources/error/error-conditions.json | 11 ++++++----- .../spark/sql/catalyst/util/DateTimeUtils.scala | 2 +- .../spark/sql/errors/QueryExecutionErrors.scala | 9 +++------ .../catalyst/expressions/DateExpressionsSuite.scala | 5 ++--- .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 7 ++----- 5 files changed, 14 insertions(+), 20 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 0bfddb59a26f..e16fe8295b5b 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -2613,6 +2613,12 @@ }, "sqlState" : "22006" }, + "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : { + "message" : [ + "Cannot add an interval to a date because its microseconds part is not 0. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22006" + }, "INVALID_INVERSE_DISTRIBUTION_FUNCTION" : { "message" : [ "Invalid inverse distribution function ." @@ -2899,11 +2905,6 @@ "expects an integer literal, but got ." ] }, - "INTERVAL_WITH_MICROSECONDS" : { - "message" : [ - "Cannot add an interval to a date because its microseconds part is not 0. If necessary set to \"false\" to bypass this error." - ] - }, "LENGTH" : { "message" : [ "Expects `length` greater than or equal to 0, but got ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 4a21e995eaa7..7930bc3ba87a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -305,7 +305,7 @@ object DateTimeUtils extends SparkDateTimeUtils { interval: CalendarInterval, funcName: String): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.invalidIntervalWithMicrosecondsError(funcName) + throw QueryExecutionErrors.invalidIntervalWithMicrosecondsAdditionError(funcName) } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 3286ee47afe7..05c67f582bdf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -287,13 +287,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def invalidIntervalWithMicrosecondsError(funcName: String): SparkIllegalArgumentException = { + def invalidIntervalWithMicrosecondsAdditionError(funcName: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", - messageParameters = Map( - "parameter" -> toSQLId("interval"), - "functionName" -> toSQLId(funcName), - "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) + errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) } def overflowInSumOfDecimalError( diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index d77e62270bdb..5cd974838fa2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -436,9 +436,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), - "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", - Map("parameter" -> "`interval`", "functionName" -> "`dateaddinterval`", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index 4208ff815394..248210d952a8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -542,11 +542,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { checkError( exception = intercept[SparkIllegalArgumentException]( dateAddInterval(input, new CalendarInterval(36, 47, 1), "test")), - condition = "INVALID_PARAMETER_VALUE.INTERVAL_WITH_MICROSECONDS", - parameters = Map( - "parameter" -> "`interval`", - "functionName" -> "`test`", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } test("timestamp add interval") { From d7c176aef83cadad2bd3bcd8f32b083affd1abd0 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Tue, 12 Nov 2024 10:19:08 +0100 Subject: [PATCH 18/20] Fix style --- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 05c67f582bdf..e603e8796739 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -287,7 +287,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def invalidIntervalWithMicrosecondsAdditionError(funcName: String): SparkIllegalArgumentException = { + def invalidIntervalWithMicrosecondsAdditionError( + funcName: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) From fb34be0b0f58e07c198d7cb19e98bcf586cbf82c Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Wed, 13 Nov 2024 11:46:32 +0100 Subject: [PATCH 19/20] Incorporate changes --- .../sql/catalyst/expressions/datetimeExpressions.scala | 2 +- .../apache/spark/sql/catalyst/util/DateTimeUtils.scala | 5 ++--- .../apache/spark/sql/errors/QueryExecutionErrors.scala | 3 +-- .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 8 ++++---- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 795154337a6d..86586e9222af 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1723,7 +1723,7 @@ case class DateAddInterval( override def nullSafeEval(start: Any, interval: Any): Any = { val itvl = interval.asInstanceOf[CalendarInterval] if (ansiEnabled || itvl.microseconds == 0) { - DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl, prettyName) + DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl) } else { val startTs = DateTimeUtils.daysToMicros(start.asInstanceOf[Int], zoneId) val resultTs = DateTimeUtils.timestampAddInterval( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 7930bc3ba87a..c9ca3ed864c1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -302,10 +302,9 @@ object DateTimeUtils extends SparkDateTimeUtils { */ def dateAddInterval( start: Int, - interval: CalendarInterval, - funcName: String): Int = { + interval: CalendarInterval): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.invalidIntervalWithMicrosecondsAdditionError(funcName) + throw QueryExecutionErrors.invalidIntervalWithMicrosecondsAdditionError() } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index e603e8796739..c37b7352acda 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -287,8 +287,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def invalidIntervalWithMicrosecondsAdditionError( - funcName: String): SparkIllegalArgumentException = { + def invalidIntervalWithMicrosecondsAdditionError(): SparkIllegalArgumentException = { new SparkIllegalArgumentException( errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index 248210d952a8..790c834d83e9 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -536,12 +536,12 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { test("date add interval with day precision") { val input = days(1997, 2, 28) - assert(dateAddInterval(input, new CalendarInterval(36, 0, 0), "") === days(2000, 2, 28)) - assert(dateAddInterval(input, new CalendarInterval(36, 47, 0), "") === days(2000, 4, 15)) - assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0), "") === days(1996, 1, 28)) + assert(dateAddInterval(input, new CalendarInterval(36, 0, 0)) === days(2000, 2, 28)) + assert(dateAddInterval(input, new CalendarInterval(36, 47, 0)) === days(2000, 4, 15)) + assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0)) === days(1996, 1, 28)) checkError( exception = intercept[SparkIllegalArgumentException]( - dateAddInterval(input, new CalendarInterval(36, 47, 1), "test")), + dateAddInterval(input, new CalendarInterval(36, 47, 1))), condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } From 2a1f41c42077f5f80d5eb90218add58cba64dc38 Mon Sep 17 00:00:00 2001 From: Mihailo Milosevic Date: Wed, 13 Nov 2024 14:48:44 +0100 Subject: [PATCH 20/20] Fix codeGen --- .../spark/sql/catalyst/expressions/datetimeExpressions.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 86586e9222af..5042fec49727 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1735,14 +1735,14 @@ case class DateAddInterval( override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") nullSafeCodeGen(ctx, ev, (sd, i) => if (ansiEnabled) { - s"""${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName");""" + s"""${ev.value} = $dtu.dateAddInterval($sd, $i);""" } else { val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val startTs = ctx.freshName("startTs") val resultTs = ctx.freshName("resultTs") s""" |if ($i.microseconds == 0) { - | ${ev.value} = $dtu.dateAddInterval($sd, $i, "$prettyName"); + | ${ev.value} = $dtu.dateAddInterval($sd, $i); |} else { | long $startTs = $dtu.daysToMicros($sd, $zid); | long $resultTs =