Skip to content

Commit 2ce35b1

Browse files
committed
Yin's comments
1 parent cae4413 commit 2ce35b1

File tree

2 files changed

+9
-6
lines changed

2 files changed

+9
-6
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -323,13 +323,12 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
323323

324324
// TODO: Support persisting partitioned data source relations in Hive compatible format
325325
val qualifiedTableName = tableIdent.quotedString
326-
val skipHiveMetadata = options.getOrElse("skip_hive_metadata", "false").toBoolean
326+
val skipHiveMetadata = options.getOrElse("skipHiveMetadata", "false").toBoolean
327327
val (hiveCompatibleTable, logMessage) = (maybeSerDe, dataSource.relation) match {
328-
case (Some(serde), relation: HadoopFsRelation) if skipHiveMetadata =>
328+
case _ if skipHiveMetadata =>
329329
val message =
330330
s"Persisting partitioned data source relation $qualifiedTableName into " +
331-
"Hive metastore in Spark SQL specific format, which is NOT compatible with Hive. " +
332-
"Input path(s): " + relation.paths.mkString("\n", "\n", "")
331+
"Hive metastore in Spark SQL specific format, which is NOT compatible with Hive."
333332
(None, message)
334333

335334
case (Some(serde), relation: HadoopFsRelation)

sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -857,9 +857,11 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
857857
partitionColumns = Array.empty[String],
858858
bucketSpec = None,
859859
provider = "parquet",
860-
options = Map("path" -> "just a dummy path", "skip_hive_metadata" -> "false"),
860+
options = Map("path" -> "just a dummy path", "skipHiveMetadata" -> "false"),
861861
isExternal = false)
862862

863+
// As a proxy for verifying that the table was stored in Hive compatible format, we verify that
864+
// each column of the table is of native type StringType.
863865
assert(catalog.client.getTable("default", "not_skip_hive_metadata").schema
864866
.forall(column => HiveMetastoreTypes.toDataType(column.hiveType) == StringType))
865867

@@ -869,9 +871,11 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
869871
partitionColumns = Array.empty[String],
870872
bucketSpec = None,
871873
provider = "parquet",
872-
options = Map("path" -> "just a dummy path", "skip_hive_metadata" -> "true"),
874+
options = Map("path" -> "just a dummy path", "skipHiveMetadata" -> "true"),
873875
isExternal = false)
874876

877+
// As a proxy for verifying that the table was stored in SparkSQL format, we verify that
878+
// the table has a column type as array of StringType.
875879
assert(catalog.client.getTable("default", "skip_hive_metadata").schema
876880
.forall(column => HiveMetastoreTypes.toDataType(column.hiveType) == ArrayType(StringType)))
877881
}

0 commit comments

Comments
 (0)