Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 12 additions & 15 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
{
"ADD_DEFAULT_UNSUPPORTED" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"<dataSource>\"."
],
"sqlState" : "42623"
},
"AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : {
"message" : [
"Non-deterministic expression <sqlExpr> should not appear in the arguments of an aggregate function."
Expand Down Expand Up @@ -1090,6 +1096,12 @@
],
"sqlState" : "42608"
},
"DEFAULT_UNSUPPORTED" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported for target data source with table provider: \"<dataSource>\"."
],
"sqlState" : "42623"
},
"DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : {
"message" : [
"Distinct window functions are not supported: <windowExpr>."
Expand Down Expand Up @@ -6626,21 +6638,6 @@
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_1344" : {
"message" : [
"Invalid DEFAULT value for column <fieldName>: <defaultValue> fails to parse as a valid literal value."
]
},
"_LEGACY_ERROR_TEMP_1345" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported for target data source with table provider: \"<dataSource>\"."
]
},
"_LEGACY_ERROR_TEMP_1346" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"<dataSource>\"."
]
},
"_LEGACY_ERROR_TEMP_2000" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.util

import scala.collection.mutable.ArrayBuffer

import org.apache.spark.{SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.{SparkException, SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.internal.{Logging, MDC}
import org.apache.spark.internal.LogKeys._
import org.apache.spark.sql.AnalysisException
Expand Down Expand Up @@ -412,8 +412,11 @@ object ResolveDefaultColumns extends QueryErrorsBase
case _: ExprLiteral | _: Cast => expr
}
} catch {
case _: AnalysisException | _: MatchError =>
throw QueryCompilationErrors.failedToParseExistenceDefaultAsLiteral(field.name, text)
// AnalysisException thrown from analyze is already formatted, throw it directly.
case ae: AnalysisException => throw ae
case _: MatchError =>
throw SparkException.internalError(s"parse existence default as literal err," +
s" field name: ${field.name}, value: $text")
}
// The expression should be a literal value by this point, possibly wrapped in a cast
// function. This is enforced by the execution of commands that assign default values.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3516,29 +3516,21 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
"cond" -> toSQLExpr(cond)))
}

def failedToParseExistenceDefaultAsLiteral(fieldName: String, defaultValue: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1344",
messageParameters = Map(
"fieldName" -> fieldName,
"defaultValue" -> defaultValue))
}

def defaultReferencesNotAllowedInDataSource(
statementType: String, dataSource: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1345",
errorClass = "DEFAULT_UNSUPPORTED",
messageParameters = Map(
"statementType" -> statementType,
"statementType" -> toSQLStmt(statementType),
"dataSource" -> dataSource))
}

def addNewDefaultColumnToExistingTableNotAllowed(
statementType: String, dataSource: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1346",
errorClass = "ADD_DEFAULT_UNSUPPORTED",
messageParameters = Map(
"statementType" -> statementType,
"statementType" -> toSQLStmt(statementType),
"dataSource" -> dataSource))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -564,7 +564,6 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {
.putString(ResolveDefaultColumns.EXISTS_DEFAULT_COLUMN_METADATA_KEY, "1 + 1")
.putString(ResolveDefaultColumns.CURRENT_DEFAULT_COLUMN_METADATA_KEY, "1 + 1")
.build())))
val error = "fails to parse as a valid literal value"
assert(ResolveDefaultColumns.existenceDefaultValues(source2).length == 1)
assert(ResolveDefaultColumns.existenceDefaultValues(source2)(0) == 2)

Expand All @@ -576,9 +575,13 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {
.putString(ResolveDefaultColumns.EXISTS_DEFAULT_COLUMN_METADATA_KEY, "invalid")
.putString(ResolveDefaultColumns.CURRENT_DEFAULT_COLUMN_METADATA_KEY, "invalid")
.build())))
assert(intercept[AnalysisException] {
ResolveDefaultColumns.existenceDefaultValues(source3)
}.getMessage.contains(error))

checkError(
exception = intercept[AnalysisException]{
ResolveDefaultColumns.existenceDefaultValues(source3)
},
condition = "INVALID_DEFAULT_VALUE.UNRESOLVED_EXPRESSION",
parameters = Map("statement" -> "", "colName" -> "`c1`", "defaultValue" -> "invalid"))

// Negative test: StructType.defaultValues fails because the existence default value fails to
// resolve.
Expand All @@ -592,9 +595,15 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {
ResolveDefaultColumns.CURRENT_DEFAULT_COLUMN_METADATA_KEY,
"(SELECT 'abc' FROM missingtable)")
.build())))
assert(intercept[AnalysisException] {
ResolveDefaultColumns.existenceDefaultValues(source4)
}.getMessage.contains(error))

checkError(
exception = intercept[AnalysisException]{
ResolveDefaultColumns.existenceDefaultValues(source4)
},
condition = "INVALID_DEFAULT_VALUE.SUBQUERY_EXPRESSION",
parameters = Map("statement" -> "",
"colName" -> "`c1`",
"defaultValue" -> "(SELECT 'abc' FROM missingtable)"))
}

test("SPARK-46629: Test STRUCT DDL with NOT NULL round trip") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1998,15 +1998,15 @@ class InsertSuite extends DataSourceTest with SharedSparkSession {
exception = intercept[AnalysisException] {
sql(s"create table t(a string default 'abc') using parquet")
},
condition = "_LEGACY_ERROR_TEMP_1345",
condition = "DEFAULT_UNSUPPORTED",
parameters = Map("statementType" -> "CREATE TABLE", "dataSource" -> "parquet"))
withTable("t") {
sql(s"create table t(a string, b int) using parquet")
checkError(
exception = intercept[AnalysisException] {
sql("alter table t add column s bigint default 42")
},
condition = "_LEGACY_ERROR_TEMP_1345",
condition = "DEFAULT_UNSUPPORTED",
parameters = Map(
"statementType" -> "ALTER TABLE ADD COLUMNS",
"dataSource" -> "parquet"))
Expand Down Expand Up @@ -2314,7 +2314,7 @@ class InsertSuite extends DataSourceTest with SharedSparkSession {
// provider is now in the denylist.
sql(s"alter table t1 add column (b string default 'abc')")
},
condition = "_LEGACY_ERROR_TEMP_1346",
condition = "ADD_DEFAULT_UNSUPPORTED",
parameters = Map(
"statementType" -> "ALTER TABLE ADD COLUMNS",
"dataSource" -> provider))
Expand Down