Skip to content

Commit 12796fb

Browse files
committed
Add case sensitivity doc to TableCatalog, rename methods in TableChange.
1 parent 06713bc commit 12796fb

File tree

3 files changed

+20
-11
lines changed

3 files changed

+20
-11
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/catalog/v2/TableCatalog.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,12 @@
2828

2929
/**
3030
* Catalog methods for working with Tables.
31+
* <p>
32+
* TableCatalog implementations may be case sensitive or case insensitive. Spark will pass
33+
* {@link Identifier table identifiers} without modification. Field names passed to
34+
* {@link #alterTable(Identifier, TableChange...)} will be normalized to match the case used in the
35+
* table schema when updating, renaming, or dropping existing columns when catalyst analysis is case
36+
* insensitive.
3137
*/
3238
public interface TableCatalog extends CatalogPlugin {
3339
/**

sql/catalyst/src/main/java/org/apache/spark/sql/catalog/v2/TableChange.java

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ static TableChange renameColumn(String[] fieldNames, String newName) {
138138
* @param newDataType the new data type
139139
* @return a TableChange for the update
140140
*/
141-
static TableChange updateColumn(String[] fieldNames, DataType newDataType) {
141+
static TableChange updateColumnType(String[] fieldNames, DataType newDataType) {
142142
return new UpdateColumnType(fieldNames, newDataType, true);
143143
}
144144

@@ -153,7 +153,10 @@ static TableChange updateColumn(String[] fieldNames, DataType newDataType) {
153153
* @param newDataType the new data type
154154
* @return a TableChange for the update
155155
*/
156-
static TableChange updateColumn(String[] fieldNames, DataType newDataType, boolean isNullable) {
156+
static TableChange updateColumnType(
157+
String[] fieldNames,
158+
DataType newDataType,
159+
boolean isNullable) {
157160
return new UpdateColumnType(fieldNames, newDataType, isNullable);
158161
}
159162

@@ -168,7 +171,7 @@ static TableChange updateColumn(String[] fieldNames, DataType newDataType, boole
168171
* @param newComment the new comment
169172
* @return a TableChange for the update
170173
*/
171-
static TableChange updateComment(String[] fieldNames, String newComment) {
174+
static TableChange updateColumnComment(String[] fieldNames, String newComment) {
172175
return new UpdateColumnComment(fieldNames, newComment);
173176
}
174177

sql/catalyst/src/test/scala/org/apache/spark/sql/catalog/v2/TableCatalogSuite.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,7 @@ class TableCatalogSuite extends SparkFunSuite {
359359

360360
assert(table.schema == schema)
361361

362-
val updated = catalog.alterTable(testIdent, TableChange.updateColumn(Array("id"), LongType))
362+
val updated = catalog.alterTable(testIdent, TableChange.updateColumnType(Array("id"), LongType))
363363

364364
val expectedSchema = new StructType().add("id", LongType).add("data", StringType)
365365
assert(updated.schema == expectedSchema)
@@ -376,7 +376,7 @@ class TableCatalogSuite extends SparkFunSuite {
376376
assert(table.schema == originalSchema)
377377

378378
val updated = catalog.alterTable(testIdent,
379-
TableChange.updateColumn(Array("id"), LongType, true))
379+
TableChange.updateColumnType(Array("id"), LongType, true))
380380

381381
val expectedSchema = new StructType().add("id", LongType).add("data", StringType)
382382
assert(updated.schema == expectedSchema)
@@ -390,7 +390,7 @@ class TableCatalogSuite extends SparkFunSuite {
390390
assert(table.schema == schema)
391391

392392
val exc = intercept[IllegalArgumentException] {
393-
catalog.alterTable(testIdent, TableChange.updateColumn(Array("id"), LongType, false))
393+
catalog.alterTable(testIdent, TableChange.updateColumnType(Array("id"), LongType, false))
394394
}
395395

396396
assert(exc.getMessage.contains("Cannot change optional column to required"))
@@ -406,7 +406,7 @@ class TableCatalogSuite extends SparkFunSuite {
406406

407407
val exc = intercept[IllegalArgumentException] {
408408
catalog.alterTable(testIdent,
409-
TableChange.updateColumn(Array("missing_col"), LongType))
409+
TableChange.updateColumnType(Array("missing_col"), LongType))
410410
}
411411

412412
assert(exc.getMessage.contains("missing_col"))
@@ -421,7 +421,7 @@ class TableCatalogSuite extends SparkFunSuite {
421421
assert(table.schema == schema)
422422

423423
val updated = catalog.alterTable(testIdent,
424-
TableChange.updateComment(Array("id"), "comment text"))
424+
TableChange.updateColumnComment(Array("id"), "comment text"))
425425

426426
val expectedSchema = new StructType()
427427
.add("id", IntegerType, nullable = true, "comment text")
@@ -436,14 +436,14 @@ class TableCatalogSuite extends SparkFunSuite {
436436

437437
assert(table.schema == schema)
438438

439-
catalog.alterTable(testIdent, TableChange.updateComment(Array("id"), "comment text"))
439+
catalog.alterTable(testIdent, TableChange.updateColumnComment(Array("id"), "comment text"))
440440

441441
val expectedSchema = new StructType()
442442
.add("id", IntegerType, nullable = true, "replacement comment")
443443
.add("data", StringType)
444444

445445
val updated = catalog.alterTable(testIdent,
446-
TableChange.updateComment(Array("id"), "replacement comment"))
446+
TableChange.updateColumnComment(Array("id"), "replacement comment"))
447447

448448
assert(updated.schema == expectedSchema)
449449
}
@@ -457,7 +457,7 @@ class TableCatalogSuite extends SparkFunSuite {
457457

458458
val exc = intercept[IllegalArgumentException] {
459459
catalog.alterTable(testIdent,
460-
TableChange.updateComment(Array("missing_col"), "comment"))
460+
TableChange.updateColumnComment(Array("missing_col"), "comment"))
461461
}
462462

463463
assert(exc.getMessage.contains("missing_col"))

0 commit comments

Comments
 (0)