@@ -857,9 +857,11 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
857857 partitionColumns = Array .empty[String ],
858858 bucketSpec = None ,
859859 provider = " parquet" ,
860- options = Map (" path" -> " just a dummy path" , " skip_hive_metadata " -> " false" ),
860+ options = Map (" path" -> " just a dummy path" , " skipHiveMetadata " -> " false" ),
861861 isExternal = false )
862862
863+ // As a proxy for verifying that the table was stored in Hive compatible format, we verify that
864+ // each column of the table is of native type StringType.
863865 assert(catalog.client.getTable(" default" , " not_skip_hive_metadata" ).schema
864866 .forall(column => HiveMetastoreTypes .toDataType(column.hiveType) == StringType ))
865867
@@ -869,9 +871,11 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
869871 partitionColumns = Array .empty[String ],
870872 bucketSpec = None ,
871873 provider = " parquet" ,
872- options = Map (" path" -> " just a dummy path" , " skip_hive_metadata " -> " true" ),
874+ options = Map (" path" -> " just a dummy path" , " skipHiveMetadata " -> " true" ),
873875 isExternal = false )
874876
877+ // As a proxy for verifying that the table was stored in SparkSQL format, we verify that
878+ // the table has a column type as array of StringType.
875879 assert(catalog.client.getTable(" default" , " skip_hive_metadata" ).schema
876880 .forall(column => HiveMetastoreTypes .toDataType(column.hiveType) == ArrayType (StringType )))
877881 }
0 commit comments