@@ -19,6 +19,8 @@ package org.apache.spark.sql.hive.execution
1919
2020import scala .util .Try
2121
22+ import org .apache .hadoop .hive .conf .HiveConf .ConfVars
23+
2224import org .apache .spark .SparkException
2325import org .apache .spark .sql .hive ._
2426import org .apache .spark .sql .hive .test .TestHive
@@ -545,6 +547,45 @@ class HiveQuerySuite extends HiveComparisonTest {
545547 |DROP TABLE IF EXISTS dynamic_part_table;
546548 """ .stripMargin)
547549
550+ test(" Dynamic partition folder layout" ) {
551+ sql(" DROP TABLE IF EXISTS dynamic_part_table" )
552+ sql(" CREATE TABLE dynamic_part_table(intcol INT) PARTITIONED BY (partcol1 INT, partcol2 INT)" )
553+ sql(" SET hive.exec.dynamic.partition.mode=nonstrict" )
554+
555+ val data = Map (
556+ Seq (" 1" , " 1" ) -> 1 ,
557+ Seq (" 1" , " NULL" ) -> 2 ,
558+ Seq (" NULL" , " 1" ) -> 3 ,
559+ Seq (" NULL" , " NULL" ) -> 4 )
560+
561+ data.foreach { case (parts, value) =>
562+ sql(
563+ s """ INSERT INTO TABLE dynamic_part_table PARTITION(partcol1, partcol2)
564+ |SELECT $value, ${parts.mkString(" , " )} FROM src WHERE key=150
565+ """ .stripMargin)
566+
567+ val partFolder = Seq (" partcol1" , " partcol2" )
568+ .zip(parts)
569+ .map { case (k, v) =>
570+ if (v == " NULL" ) {
571+ s " $k= ${ConfVars .DEFAULTPARTITIONNAME .defaultVal}"
572+ } else {
573+ s " $k= $v"
574+ }
575+ }
576+ .mkString(" /" )
577+
578+ // Loads partition data to a temporary table to verify contents
579+ val path = s " $warehousePath/dynamic_part_table/ $partFolder/part-00000 "
580+
581+ sql(" DROP TABLE IF EXISTS dp_verify" )
582+ sql(" CREATE TABLE dp_verify(intcol INT)" )
583+ sql(s " LOAD DATA LOCAL INPATH ' $path' INTO TABLE dp_verify " )
584+
585+ assert(sql(" SELECT * FROM dp_verify" ).collect() === Array (Row (value)))
586+ }
587+ }
588+
548589 test(" Partition spec validation" ) {
549590 sql(" DROP TABLE IF EXISTS dp_test" )
550591 sql(" CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)" )
0 commit comments