Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 12 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -1101,6 +1101,12 @@
],
"sqlState" : "42K03"
},
"DATETIME_FIELD_OUT_OF_BOUNDS" : {
"message" : [
"<rangeMessage>. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22023"
},
"DATETIME_OVERFLOW" : {
"message" : [
"Datetime operation overflow: <operation>."
Expand Down Expand Up @@ -2609,6 +2615,12 @@
},
"sqlState" : "22006"
},
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : {
"message" : [
"Cannot add an interval to a date because its microseconds part is not 0. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22006"
},
"INVALID_INVERSE_DISTRIBUTION_FUNCTION" : {
"message" : [
"Invalid inverse distribution function <funcName>."
Expand Down Expand Up @@ -6905,11 +6917,6 @@
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_2000" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2003" : {
"message" : [
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2507,14 +2507,14 @@ case class MakeDate(
localDateToDays(ld)
} catch {
case e: java.time.DateTimeException =>
if (failOnError) throw QueryExecutionErrors.ansiDateTimeError(e) else null
if (failOnError) throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e) else null
}
}

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
val failOnErrorBranch = if (failOnError) {
"throw QueryExecutionErrors.ansiDateTimeError(e);"
"throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);"
} else {
s"${ev.isNull} = true;"
}
Expand Down Expand Up @@ -2839,7 +2839,7 @@ case class MakeTimestamp(
} catch {
case e: SparkDateTimeException if failOnError => throw e
case e: DateTimeException if failOnError =>
throw QueryExecutionErrors.ansiDateTimeError(e)
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e)
case _: DateTimeException => null
}
}
Expand Down Expand Up @@ -2870,7 +2870,7 @@ case class MakeTimestamp(
val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName)
val d = Decimal.getClass.getName.stripSuffix("$")
val failOnErrorBranch = if (failOnError) {
"throw QueryExecutionErrors.ansiDateTimeError(e);"
"throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e);"
} else {
s"${ev.isNull} = true;"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,8 +304,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
start: Int,
interval: CalendarInterval): Int = {
if (interval.microseconds != 0) {
throw QueryExecutionErrors.ansiIllegalArgumentError(
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date")
throw QueryExecutionErrors.invalidIntervalWithMicrosecondsAdditionError()
}
val ld = daysToLocalDate(start).plusMonths(interval.months).plusDays(interval.days)
localDateToDays(ld)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,22 +277,20 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
summary = "")
}

def ansiDateTimeError(e: Exception): SparkDateTimeException = {
def ansiDateTimeArgumentOutOfRange(e: Exception): SparkDateTimeException = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changes in this PR clash with #48242 by @itholic

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's resolve that ticket first, but keep this one open for now, as I am not sure what complete scope will be of the other ticket and this PRs goal is to remove _LEGACY_ERROR_TEMP_2000 completely.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@MaxGekk I aligned my changes with the @itholic PR, so we can go on and merge this change, then the other PR can improve the message, while this one stays scoped to assigning proper error classes.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sgtm

new SparkDateTimeException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
errorClass = "DATETIME_FIELD_OUT_OF_BOUNDS",
messageParameters = Map(
"message" -> e.getMessage,
"rangeMessage" -> e.getMessage,
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
context = Array.empty,
summary = "")
}

def ansiIllegalArgumentError(message: String): SparkIllegalArgumentException = {
def invalidIntervalWithMicrosecondsAdditionError(): SparkIllegalArgumentException = {
new SparkIllegalArgumentException(
errorClass = "_LEGACY_ERROR_TEMP_2000",
messageParameters = Map(
"message" -> message,
"ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)))
errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
messageParameters = Map("ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)))
}

def overflowInSumOfDecimalError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,10 +436,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) {
checkErrorInExpression[SparkIllegalArgumentException](
DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))),
"_LEGACY_ERROR_TEMP_2000",
Map("message" ->
"Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
Map("ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -542,10 +542,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
checkError(
exception = intercept[SparkIllegalArgumentException](
dateAddInterval(input, new CalendarInterval(36, 47, 1))),
condition = "_LEGACY_ERROR_TEMP_2000",
parameters = Map(
"message" -> "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

test("timestamp add interval") {
Expand Down
10 changes: 6 additions & 4 deletions sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
}
}

Expand All @@ -68,10 +69,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33"
"rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
}
}

Expand Down Expand Up @@ -185,10 +186,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
}
}

Expand All @@ -200,10 +202,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -687,10 +687,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid date 'FEBRUARY 30'"
"rangeMessage" : "Invalid date 'FEBRUARY 30'"
}
}

Expand All @@ -702,10 +703,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
"rangeMessage" : "Invalid value for MonthOfYear (valid values 1 - 12): 13"
}
}

Expand All @@ -717,10 +719,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1"
"rangeMessage" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61"
}
}

Expand Down Expand Up @@ -185,10 +186,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99"
}
}

Expand All @@ -200,10 +202,11 @@ struct<>
-- !query output
org.apache.spark.SparkDateTimeException
{
"errorClass" : "_LEGACY_ERROR_TEMP_2000",
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS",
"sqlState" : "22023",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
"rangeMessage" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999"
}
}

Expand Down