Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
65 commits
Select commit Hold shift + click to select a range
0860f9d
introduce table constraint
gengliangwang Feb 20, 2025
0b222ba
add AlterTableAddConstraintParseSuite and AlterTableDropConstraintPar…
gengliangwang Feb 20, 2025
40d27ae
update error conditions and add tests
gengliangwang Feb 21, 2025
f244c75
rename logical plans
gengliangwang Feb 21, 2025
b60e100
add methods in CatalogV2Util and tests
gengliangwang Feb 22, 2025
d2b7f0a
add DropConstraintSuite
gengliangwang Feb 24, 2025
8264d5a
refactor parse changes
gengliangwang Feb 24, 2025
5c57ce2
fix visitCreateTableClauses
gengliangwang Feb 25, 2025
c3a7c41
rename as AddCheckConstraint
gengliangwang Feb 25, 2025
90e1404
save for now
gengliangwang Feb 27, 2025
6381a24
refactor check constraint
gengliangwang Mar 4, 2025
5eaa069
rename AddConstraint
gengliangwang Mar 4, 2025
a63c1d9
introduce CheckConstraint expr
gengliangwang Mar 4, 2025
0063155
introduce Constraints expression
gengliangwang Mar 4, 2025
af2fa61
add CreateTableConstraintParseSuite
gengliangwang Mar 5, 2025
5d86c93
add new create table api
gengliangwang Mar 5, 2025
a8972ec
add CreateTableConstraintSuite
gengliangwang Mar 5, 2025
ea23855
fix CreateTableConstraintSuite
gengliangwang Mar 5, 2025
3944f60
resolve constraints with a fake project
gengliangwang Mar 5, 2025
6b1a4af
resolve constraint with a default analyzer
gengliangwang Mar 6, 2025
46aa448
improve error message
gengliangwang Mar 6, 2025
18f7c65
move constraint class
gengliangwang Mar 20, 2025
3acca28
remove Constraint.java
gengliangwang Mar 20, 2025
a0ab768
[SPARK-51441][SQL] Add DSv2 APIs for constraints
aokolnychyi Mar 11, 2025
d029a99
Flatten the structure, add builders
aokolnychyi Mar 20, 2025
12fe567
fix compiling
gengliangwang Mar 20, 2025
6c30863
refactor parser and fix tests
gengliangwang Mar 22, 2025
cdf55b5
new syntax; compiling version
gengliangwang Mar 24, 2025
c80e898
getOriginalText
gengliangwang Mar 24, 2025
41dcc23
add more tests for characteristic
gengliangwang Mar 25, 2025
8bc6c32
add tests in CreateTableConstraintParseSuite
gengliangwang Mar 25, 2025
24e3bf7
add tests for ConstraintCharacteristics in CheckConstraintSuite
gengliangwang Mar 25, 2025
252332e
save for now
gengliangwang Mar 25, 2025
462de00
fix create table constraint syntax
gengliangwang Mar 25, 2025
60f2d0b
refactor syntax
gengliangwang Mar 26, 2025
ff3133a
revise syntax; fix test failures
gengliangwang Mar 26, 2025
7a33e57
refactor test code
gengliangwang Mar 26, 2025
1923e1b
remove Expression Constraints
gengliangwang Mar 27, 2025
3c6cac5
rename ConstraintExpression as TableConstraint
gengliangwang Mar 27, 2025
7aea90c
refactor syntax as per standard
gengliangwang Mar 27, 2025
78c3904
remove column check constraint test cases
gengliangwang Mar 27, 2025
20d7cf9
add pk&fk
gengliangwang Mar 27, 2025
af3fb91
parse column constraint
gengliangwang Mar 27, 2025
337c35f
support PK & Unique; add tests
gengliangwang Mar 27, 2025
c8edfd0
create table with FK
gengliangwang Mar 28, 2025
5317f28
refactor alter
gengliangwang Mar 28, 2025
9ada207
add test case for unique
gengliangwang Mar 28, 2025
3451452
add test case for fk
gengliangwang Mar 28, 2025
657fce1
remove legacy CreateTableConstraintSuite.scala
gengliangwang Mar 28, 2025
1b5cc2f
change default value of rely; add PrimaryKeyConstraintSuite
gengliangwang Mar 28, 2025
44bbd78
change check constraint to NORELY
gengliangwang Mar 28, 2025
87d2206
change the valid status of check
gengliangwang Mar 28, 2025
258d10d
disallow enforce in pk/fk/unique
gengliangwang Mar 31, 2025
05bff35
refactor tests
gengliangwang Mar 31, 2025
d1abaf3
add test for unique and fk
gengliangwang Mar 31, 2025
528b6ff
save for now
gengliangwang Apr 1, 2025
d14f5e5
support generated name
gengliangwang Apr 1, 2025
0b84a72
unnamed check constraint
gengliangwang Apr 1, 2025
d3f7df4
simplify test
gengliangwang Apr 1, 2025
7e8273e
support unnamed constraint in replace table;add test cases for replac…
gengliangwang Apr 1, 2025
5cc47a3
handle Nondeterministic check
gengliangwang Apr 1, 2025
675b573
add more tests in CheckConstraintParseSuite
gengliangwang Apr 1, 2025
e51cbd0
fix PrimaryKeyConstraintParseSuite
gengliangwang Apr 1, 2025
aead1b2
fix ForeignKeyConstraintParseSuite
gengliangwang Apr 1, 2025
dc24937
fix UniqueConstraintParseSuite
gengliangwang Apr 2, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -788,6 +788,20 @@
},
"sqlState" : "XX000"
},
"CONSTRAINT_ALREADY_EXISTS" : {
"message" : [
"Constraint '<constraintName>' already exists. Please delete the old constraint first.",
"Old constraint:",
"<oldConstraint>"
],
"sqlState" : "42710"
},
"CONSTRAINT_DOES_NOT_EXIST" : {
"message" : [
"Cannot drop nonexistent constraint <constraintName> from table <tableName>."
],
"sqlState" : "42704"
},
"CONVERSION_INVALID_INPUT" : {
"message" : [
"The value <str> (<fmt>) cannot be converted to <targetType> because it is malformed. Correct the value as per the syntax, or change its format. Use <suggestion> to tolerate malformed input and return NULL instead."
Expand Down Expand Up @@ -2303,6 +2317,29 @@
],
"sqlState" : "22P03"
},
"INVALID_CHECK_CONSTRAINT": {
"message" : [
"The check constraint expression is invalid."
],
"subClass" : {
"INVALID_V2_PREDICATE" : {
"message" : [
"It cannot be converted to a data source V2 predicate."
]
},
"NONDETERMINISTIC" : {
"message" : [
"It contains nondeterministic expression."
]
},
"MISSING_NAME": {
"message": [
"The check constraint must have a name."
]
}
},
"sqlState": "42621"
},
"INVALID_COLUMN_NAME_AS_PATH" : {
"message" : [
"The datasource <datasource> cannot save the column <columnName> because its name contains some characters that are not allowed in file paths. Please, use an alias to rename it."
Expand All @@ -2328,6 +2365,12 @@
},
"sqlState" : "22022"
},
"INVALID_CONSTRAINT_CHARACTERISTICS": {
"message": [
"Constraint characteristics [<characteristics>] are duplicated or conflict with each other."
],
"sqlState": "42613"
},
"INVALID_CORRUPT_RECORD_TYPE" : {
"message" : [
"The column <columnName> for corrupt records must have the nullable STRING type, but got <actualType>."
Expand Down Expand Up @@ -3884,6 +3927,12 @@
],
"sqlState" : "42P20"
},
"MULTIPLE_PRIMARY_KEYS" : {
"message" : [
"Multiple primary keys are defined. Please ensure that only one primary key is defined for the table."
],
"sqlState" : "42K0E"
},
"MULTIPLE_QUERY_RESULT_CLAUSES_WITH_PIPE_OPERATORS" : {
"message" : [
"<clause1> and <clause2> cannot coexist in the same SQL pipe operator using '|>'. Please separate the multiple result clauses into separate pipe operators and then retry the query again."
Expand Down Expand Up @@ -5452,6 +5501,12 @@
},
"sqlState" : "0A000"
},
"UNSUPPORTED_CONSTRAINT_CHARACTERISTIC": {
"message": [
"Constraint characteristic '<characteristic>' is not supported for constraint type '<constraintType>'."
],
"sqlState": "0A000"
},
"UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : {
"message" : [
"Unsupported data source type for direct query on files: <dataSourceType>"
Expand Down Expand Up @@ -5611,6 +5666,11 @@
"Attach a comment to the namespace <namespace>."
]
},
"CONSTRAINT_TYPE" : {
"message" : [
"Constraint <constraint>."
]
},
"CONTINUE_EXCEPTION_HANDLER" : {
"message" : [
"CONTINUE exception handler is not supported. Use EXIT handler."
Expand Down
21 changes: 17 additions & 4 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -371,10 +371,17 @@ abstract class SparkFunSuite
"Invalid objectType of a query context Actual:" + actual.toString)
assert(actual.objectName() === expected.objectName,
"Invalid objectName of a query context. Actual:" + actual.toString)
assert(actual.startIndex() === expected.startIndex,
"Invalid startIndex of a query context. Actual:" + actual.toString)
assert(actual.stopIndex() === expected.stopIndex,
"Invalid stopIndex of a query context. Actual:" + actual.toString)
// If startIndex and stopIndex are -1, it means we simply want to check the
// fragment of the query context. This should be the case when the fragment is
// distinguished within the query text.
if (expected.startIndex != -1) {
assert(actual.startIndex() === expected.startIndex,
"Invalid startIndex of a query context. Actual:" + actual.toString)
}
if (expected.stopIndex != -1) {
assert(actual.stopIndex() === expected.stopIndex,
"Invalid stopIndex of a query context. Actual:" + actual.toString)
}
assert(actual.fragment() === expected.fragment,
"Invalid fragment of a query context. Actual:" + actual.toString)
} else if (actual.contextType() == QueryContextType.DataFrame) {
Expand Down Expand Up @@ -478,6 +485,12 @@ abstract class SparkFunSuite
ExpectedContext("", "", start, stop, fragment)
}

// Check the fragment only. This is only used when the fragment is distinguished within
// the query text
def apply(fragment: String): ExpectedContext = {
ExpectedContext("", "", -1, -1, fragment)
}

def apply(
objectType: String,
objectName: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ DROP: 'DROP';
ELSE: 'ELSE';
ELSEIF: 'ELSEIF';
END: 'END';
ENFORCED: 'ENFORCED';
ESCAPE: 'ESCAPE';
ESCAPED: 'ESCAPED';
EVOLUTION: 'EVOLUTION';
Expand Down Expand Up @@ -290,6 +291,7 @@ ITEMS: 'ITEMS';
ITERATE: 'ITERATE';
JOIN: 'JOIN';
JSON: 'JSON';
KEY: 'KEY';
KEYS: 'KEYS';
LANGUAGE: 'LANGUAGE';
LAST: 'LAST';
Expand Down Expand Up @@ -337,6 +339,8 @@ NOT: 'NOT';
NULL: 'NULL';
NULLS: 'NULLS';
NUMERIC: 'NUMERIC';
NORELY: 'NORELY';
NOVALIDATE: 'NOVALIDATE';
OF: 'OF';
OFFSET: 'OFFSET';
ON: 'ON';
Expand Down Expand Up @@ -376,6 +380,7 @@ RECURSIVE: 'RECURSIVE';
REDUCE: 'REDUCE';
REFERENCES: 'REFERENCES';
REFRESH: 'REFRESH';
RELY: 'RELY';
RENAME: 'RENAME';
REPAIR: 'REPAIR';
REPEAT: 'REPEAT';
Expand Down Expand Up @@ -475,6 +480,7 @@ UPDATE: 'UPDATE';
USE: 'USE';
USER: 'USER';
USING: 'USING';
VALIDATE: 'VALIDATE';
VALUE: 'VALUE';
VALUES: 'VALUES';
VARCHAR: 'VARCHAR';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ statement
(RESTRICT | CASCADE)? #dropNamespace
| SHOW namespaces ((FROM | IN) multipartIdentifier)?
(LIKE? pattern=stringLit)? #showNamespaces
| createTableHeader (LEFT_PAREN colDefinitionList RIGHT_PAREN)? tableProvider?
| createTableHeader (LEFT_PAREN tableElementList RIGHT_PAREN)? tableProvider?
createTableClauses
(AS? query)? #createTable
| CREATE TABLE (IF errorCapturingNot EXISTS)? target=tableIdentifier
Expand All @@ -208,7 +208,7 @@ statement
createFileFormat |
locationSpec |
(TBLPROPERTIES tableProps=propertyList))* #createTableLike
| replaceTableHeader (LEFT_PAREN colDefinitionList RIGHT_PAREN)? tableProvider?
| replaceTableHeader (LEFT_PAREN tableElementList RIGHT_PAREN)? tableProvider?
createTableClauses
(AS? query)? #replaceTable
| ANALYZE TABLE identifierReference partitionSpec? COMPUTE STATISTICS
Expand Down Expand Up @@ -261,6 +261,10 @@ statement
| ALTER TABLE identifierReference
(clusterBySpec | CLUSTER BY NONE) #alterClusterBy
| ALTER TABLE identifierReference collationSpec #alterTableCollation
| ALTER TABLE identifierReference ADD tableConstraintDefinition #addTableConstraint
| ALTER TABLE identifierReference
DROP CONSTRAINT (IF EXISTS)? name=identifier
(RESTRICT | CASCADE)? #dropTableConstraint
| DROP TABLE (IF EXISTS)? identifierReference PURGE? #dropTable
| DROP VIEW (IF EXISTS)? identifierReference #dropView
| CREATE (OR REPLACE)? (GLOBAL? TEMPORARY)?
Expand Down Expand Up @@ -1334,6 +1338,15 @@ colType
: colName=errorCapturingIdentifier dataType (errorCapturingNot NULL)? commentSpec?
;

tableElementList
: tableElement (COMMA tableElement)*
;

tableElement
: tableConstraintDefinition
| colDefinition
;

colDefinitionList
: colDefinition (COMMA colDefinition)*
;
Expand All @@ -1347,6 +1360,7 @@ colDefinitionOption
| defaultExpression
| generationExpression
| commentSpec
| columnConstraintDefinition
;

generationExpression
Expand Down Expand Up @@ -1516,6 +1530,62 @@ number
| MINUS? BIGDECIMAL_LITERAL #bigDecimalLiteral
;

columnConstraintDefinition
: (CONSTRAINT name=errorCapturingIdentifier)? columnConstraint constraintCharacteristic*
;

columnConstraint
: checkConstraint
| uniqueSpec
| referenceSpec
;

tableConstraintDefinition
: (CONSTRAINT name=errorCapturingIdentifier)? tableConstraint constraintCharacteristic*
;

tableConstraint
: checkConstraint
| uniqueConstraint
| foreignKeyConstraint
;

checkConstraint
: CHECK LEFT_PAREN (expr=booleanExpression) RIGHT_PAREN
;

uniqueSpec
: UNIQUE
| PRIMARY KEY
;

uniqueConstraint
: uniqueSpec identifierList
;

referenceSpec
: REFERENCES multipartIdentifier (parentColumns=identifierList)?
;

foreignKeyConstraint
: FOREIGN KEY identifierList referenceSpec
;

constraintCharacteristic
: enforcedCharacteristic
| relyCharacteristic
;

enforcedCharacteristic
: ENFORCED
| NOT ENFORCED
;

relyCharacteristic
: RELY
| NORELY
;

alterColumnSpecList
: alterColumnSpec (COMMA alterColumnSpec)*
;
Expand Down Expand Up @@ -1673,6 +1743,7 @@ ansiNonReserved
| DOUBLE
| DROP
| ELSEIF
| ENFORCED
| ESCAPED
| EVOLUTION
| EXCHANGE
Expand Down Expand Up @@ -1761,6 +1832,8 @@ ansiNonReserved
| NANOSECONDS
| NO
| NONE
| NORELY
| NOVALIDATE
| NULLS
| NUMERIC
| OF
Expand Down Expand Up @@ -1792,6 +1865,7 @@ ansiNonReserved
| RECOVER
| REDUCE
| REFRESH
| RELY
| RENAME
| REPAIR
| REPEAT
Expand Down Expand Up @@ -1875,6 +1949,7 @@ ansiNonReserved
| UNTIL
| UPDATE
| USE
| VALIDATE
| VALUE
| VALUES
| VARCHAR
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -791,4 +791,24 @@ private[sql] object QueryParsingErrors extends DataTypeErrorsBase {
def clusterByWithBucketing(ctx: ParserRuleContext): Throwable = {
new ParseException(errorClass = "SPECIFY_CLUSTER_BY_WITH_BUCKETING_IS_NOT_ALLOWED", ctx)
}

def invalidConstraintCharacteristics(
ctx: ParserRuleContext,
characteristics: String): Throwable = {
new ParseException(
errorClass = "INVALID_CONSTRAINT_CHARACTERISTICS",
messageParameters = Map("characteristics" -> characteristics),
ctx)
}

def constraintNotSupportedError(ctx: ParserRuleContext, constraint: String): Throwable = {
new ParseException(
errorClass = "UNSUPPORTED_FEATURE.CONSTRAINT_TYPE",
messageParameters = Map("constraint" -> constraint),
ctx)
}

def multiplePrimaryKeysError(ctx: ParserRuleContext): Throwable = {
new ParseException(errorClass = "MULTIPLE_PRIMARY_KEYS", ctx)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark.sql.connector.catalog;

import org.apache.spark.annotation.Evolving;
import org.apache.spark.sql.connector.catalog.constraints.Constraint;
import org.apache.spark.sql.connector.expressions.Transform;
import org.apache.spark.sql.types.StructType;

Expand Down Expand Up @@ -83,4 +84,9 @@ default Map<String, String> properties() {
* Returns the set of capabilities for this table.
*/
Set<TableCapability> capabilities();

/**
* Returns the constraints for this table.
*/
default Constraint[] constraints() { return new Constraint[0]; }
}
Loading
Loading