diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 5e1c3f46fd110..eb772f053a889 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1101,6 +1101,12 @@ ], "sqlState" : "42K03" }, + "DATETIME_FIELD_OUT_OF_BOUNDS" : { + "message" : [ + ". If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22023" + }, "DATETIME_OVERFLOW" : { "message" : [ "Datetime operation overflow: ." @@ -2609,6 +2615,12 @@ }, "sqlState" : "22006" }, + "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : { + "message" : [ + "Cannot add an interval to a date because its microseconds part is not 0. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22006" + }, "INVALID_INVERSE_DISTRIBUTION_FUNCTION" : { "message" : [ "Invalid inverse distribution function ." @@ -6905,11 +6917,6 @@ "Sinks cannot request distribution and ordering in continuous execution mode." ] }, - "_LEGACY_ERROR_TEMP_2000" : { - "message" : [ - ". If necessary set to false to bypass this error." - ] - }, "_LEGACY_ERROR_TEMP_2003" : { "message" : [ "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index f2ba3ed95b850..fba3927a0bc9c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2507,14 +2507,14 @@ case class MakeDate( localDateToDays(ld) } catch { case e: java.time.DateTimeException => - if (failOnError) throw QueryExecutionErrors.ansiDateTimeError(e) else null + if (failOnError) throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) else null } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { - "throw QueryExecutionErrors.ansiDateTimeError(e);" + "throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);" } else { s"${ev.isNull} = true;" } @@ -2839,7 +2839,7 @@ case class MakeTimestamp( } catch { case e: SparkDateTimeException if failOnError => throw e case e: DateTimeException if failOnError => - throw QueryExecutionErrors.ansiDateTimeError(e) + throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) case _: DateTimeException => null } } @@ -2870,7 +2870,7 @@ case class MakeTimestamp( val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) val d = Decimal.getClass.getName.stripSuffix("$") val failOnErrorBranch = if (failOnError) { - "throw QueryExecutionErrors.ansiDateTimeError(e);" + "throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);" } else { s"${ev.isNull} = true;" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index e27ce29fc2318..c9ca3ed864c16 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -304,8 +304,7 @@ object DateTimeUtils extends SparkDateTimeUtils { start: Int, interval: CalendarInterval): Int = { if (interval.microseconds != 0) { - throw QueryExecutionErrors.ansiIllegalArgumentError( - "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date") + throw QueryExecutionErrors.invalidIntervalWithMicrosecondsAdditionError() } val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days) localDateToDays(ld) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index fb39d3c5d7c6b..ba48000f2aeca 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -277,22 +277,20 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE summary = "") } - def ansiDateTimeError(e: Exception): SparkDateTimeException = { + def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = { new SparkDateTimeException( - errorClass = "_LEGACY_ERROR_TEMP_2000", + errorClass = "DATETIME_FIELD_OUT_OF_BOUNDS", messageParameters = Map( - "message" -> e.getMessage, + "rangeMessage" -> e.getMessage, "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = Array.empty, summary = "") } - def ansiIllegalArgumentError(message: String): SparkIllegalArgumentException = { + def invalidIntervalWithMicrosecondsAdditionError(): SparkIllegalArgumentException = { new SparkIllegalArgumentException( - errorClass = "_LEGACY_ERROR_TEMP_2000", - messageParameters = Map( - "message" -> message, - "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) + errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key))) } def overflowInSumOfDecimalError( diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 05d68504a7270..5cd974838fa24 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -436,10 +436,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { checkErrorInExpression[SparkIllegalArgumentException]( DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), - "_LEGACY_ERROR_TEMP_2000", - Map("message" -> - "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index 96aaf13052b02..790c834d83e97 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -542,10 +542,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { checkError( exception = intercept[SparkIllegalArgumentException]( dateAddInterval(input, new CalendarInterval(36, 47, 1))), - condition = "_LEGACY_ERROR_TEMP_2000", - parameters = Map( - "message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date", - "ansiConfig" -> "\"spark.sql.ansi.enabled\"")) + condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION", + parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\"")) } test("timestamp add interval") { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 67cd23faf2556..aa283d3249617 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -53,10 +53,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" } } @@ -68,10 +69,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33" } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index d75380b16cc83..e3cf1a1549228 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -154,10 +154,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" } } @@ -185,10 +186,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" } } @@ -200,10 +202,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" } } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 8caf8c54b9f39..d9f4301dd0e8d 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -687,10 +687,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid date 'FEBRUARY 30'" + "rangeMessage" : "Invalid date 'FEBRUARY 30'" } } @@ -702,10 +703,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" + "rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13" } } @@ -717,10 +719,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" + "rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1" } } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 79996d838c1e5..681306ba9f405 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -154,10 +154,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61" } } @@ -185,10 +186,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99" } } @@ -200,10 +202,11 @@ struct<> -- !query output org.apache.spark.SparkDateTimeException { - "errorClass" : "_LEGACY_ERROR_TEMP_2000", + "errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS", + "sqlState" : "22023", "messageParameters" : { "ansiConfig" : "\"spark.sql.ansi.enabled\"", - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" + "rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999" } }