Skip to content

Commit 81a5eec

Browse files
committed
throw exception when location is not consistent
1 parent 43a949c commit 81a5eec

File tree

3 files changed

+16
-22
lines changed

3 files changed

+16
-22
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -545,10 +545,13 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
545545
}
546546

547547
private def getLocationFromStorageProps(table: CatalogTable): Option[String] = {
548-
if (conf.get(HiveUtils.FOLLOW_TABLE_LOCATION)) {
549-
table.storage.locationUri.map(_.toString)
548+
val storageLoc = table.storage.locationUri.map(_.toString)
549+
val storageProp = CaseInsensitiveMap(table.storage.properties).get("path")
550+
if (storageLoc.equals(storageProp)) {
551+
storageProp
550552
} else {
551-
CaseInsensitiveMap(table.storage.properties).get("path")
553+
throw new AnalysisException(s"path in location ${storageLoc} " +
554+
s"not equal to table prop path ${storageProp}, please use alter table in spark")
552555
}
553556
}
554557

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -165,13 +165,6 @@ private[spark] object HiveUtils extends Logging {
165165
.booleanConf
166166
.createWithDefault(true)
167167

168-
val FOLLOW_TABLE_LOCATION =
169-
buildStaticConf("spark.sql.hive.follow.table.location")
170-
.doc("If prefer hive table location to compatible with legacy `path` in catalog")
171-
.version("3.1.0")
172-
.booleanConf
173-
.createWithDefault(false)
174-
175168
/**
176169
* The version of the hive client that will be used to communicate with the metastore. Note that
177170
* this does not necessarily need to be the same version of Hive that is used internally by

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogSuite.scala

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.net.URI
2222
import org.apache.hadoop.conf.Configuration
2323

2424
import org.apache.spark.SparkConf
25+
import org.apache.spark.sql.AnalysisException
2526
import org.apache.spark.sql.catalyst.TableIdentifier
2627
import org.apache.spark.sql.catalyst.catalog._
2728
import org.apache.spark.sql.execution.QueryExecutionException
@@ -228,19 +229,16 @@ class HiveExternalCatalogSuite extends ExternalCatalogSuite {
228229
schema = new StructType().add("col1", "int"),
229230
provider = Some("parquet"))
230231
catalog.createTable(hiveTable, ignoreIfExists = false)
232+
val beforeAlterTable = externalCatalog.getTable("db1", "parq_alter")
233+
assert(beforeAlterTable.storage.locationUri.toString.contains("parq_alter"))
234+
231235
externalCatalog.client.runSqlHive(
232236
"alter table db1.parq_alter rename to db1.parq_alter2")
233-
val noFollowTable = externalCatalog.getTable("db1", "parq_alter2")
234-
assert(!noFollowTable.storage.locationUri.toString.contains("parq_alter2"))
235-
236-
val confField = classOf[HiveExternalCatalog].getDeclaredField("conf")
237-
confField.setAccessible(true)
238-
val sparkConf = confField.get(externalCatalog).asInstanceOf[SparkConf]
239-
sparkConf.set("spark.sql.hive.follow.table.location", "true")
240-
val followTable = externalCatalog.getTable("db1", "parq_alter2")
241-
assert(followTable.storage.locationUri.toString.contains("parq_alter2"))
242-
243-
sparkConf.set("spark.sql.hive.follow.table.location", "false")
244-
confField.setAccessible(false)
237+
238+
val e = intercept[AnalysisException](
239+
externalCatalog.getTable("db1", "parq_alter2")
240+
)
241+
assert(e.getMessage.contains("not equal to table prop path")
242+
&& e.getMessage.contains("parq_alter2"))
245243
}
246244
}

0 commit comments

Comments
 (0)