|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.hive |
19 | 19 |
|
| 20 | +import java.net.URI |
| 21 | + |
20 | 22 | import org.apache.hadoop.conf.Configuration |
21 | 23 |
|
22 | 24 | import org.apache.spark.SparkConf |
@@ -178,4 +180,42 @@ class HiveExternalCatalogSuite extends ExternalCatalogSuite { |
178 | 180 | assertThrows[QueryExecutionException](client.runSqlHive( |
179 | 181 | "INSERT overwrite directory \"fs://localhost/tmp\" select 1 as a")) |
180 | 182 | } |
| 183 | + |
| 184 | + test("SPARK-31061: alterTable should be able to change table provider") { |
| 185 | + val catalog = newBasicCatalog() |
| 186 | + val parquetTable = CatalogTable( |
| 187 | + identifier = TableIdentifier("parq_tbl", Some("db1")), |
| 188 | + tableType = CatalogTableType.MANAGED, |
| 189 | + storage = storageFormat.copy(locationUri = Some(new URI("file:/some/path"))), |
| 190 | + schema = new StructType().add("col1", "int").add("col2", "string"), |
| 191 | + provider = Some("parquet")) |
| 192 | + catalog.createTable(parquetTable, ignoreIfExists = false) |
| 193 | + |
| 194 | + val rawTable = externalCatalog.getTable("db1", "parq_tbl") |
| 195 | + assert(rawTable.provider === Some("parquet")) |
| 196 | + |
| 197 | + val fooTable = parquetTable.copy(provider = Some("foo")) |
| 198 | + catalog.alterTable(fooTable) |
| 199 | + val alteredTable = externalCatalog.getTable("db1", "parq_tbl") |
| 200 | + assert(alteredTable.provider === Some("foo")) |
| 201 | + } |
| 202 | + |
| 203 | + test("SPARK-31061: alterTable should be able to change table provider from hive") { |
| 204 | + val catalog = newBasicCatalog() |
| 205 | + val hiveTable = CatalogTable( |
| 206 | + identifier = TableIdentifier("parq_tbl", Some("db1")), |
| 207 | + tableType = CatalogTableType.MANAGED, |
| 208 | + storage = storageFormat, |
| 209 | + schema = new StructType().add("col1", "int").add("col2", "string"), |
| 210 | + provider = Some("hive")) |
| 211 | + catalog.createTable(hiveTable, ignoreIfExists = false) |
| 212 | + |
| 213 | + val rawTable = externalCatalog.getTable("db1", "parq_tbl") |
| 214 | + assert(rawTable.provider === Some("hive")) |
| 215 | + |
| 216 | + val fooTable = rawTable.copy(provider = Some("foo")) |
| 217 | + catalog.alterTable(fooTable) |
| 218 | + val alteredTable = externalCatalog.getTable("db1", "parq_tbl") |
| 219 | + assert(alteredTable.provider === Some("foo")) |
| 220 | + } |
181 | 221 | } |
0 commit comments