@@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.execution
1919
2020import scala .util .Try
2121
22+ import org .apache .spark .SparkException
2223import org .apache .spark .sql .hive ._
2324import org .apache .spark .sql .hive .test .TestHive
2425import org .apache .spark .sql .hive .test .TestHive ._
@@ -334,7 +335,7 @@ class HiveQuerySuite extends HiveComparisonTest {
334335
335336 def isExplanation (result : SchemaRDD ) = {
336337 val explanation = result.select(' plan ).collect().map { case Row (plan : String ) => plan }
337- explanation.exists(_ == " == Physical Plan ==" )
338+ explanation.contains( " == Physical Plan ==" )
338339 }
339340
340341 test(" SPARK-1704: Explain commands as a SchemaRDD" ) {
@@ -544,6 +545,30 @@ class HiveQuerySuite extends HiveComparisonTest {
544545 |DROP TABLE IF EXISTS dynamic_part_table;
545546 """ .stripMargin)
546547
548+ test(" Partition spec validation" ) {
549+ sql(" DROP TABLE IF EXISTS dp_test" )
550+ sql(" CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)" )
551+ sql(" SET hive.exec.dynamic.partition.mode=strict" )
552+
553+ // Should throw when using strict dynamic partition mode without any static partition
554+ intercept[SparkException ] {
555+ sql(
556+ """ INSERT INTO TABLE dp_test PARTITION(dp)
557+ |SELECT key, value, key % 5 FROM src
558+ """ .stripMargin)
559+ }
560+
561+ sql(" SET hive.exec.dynamic.partition.mode=nonstrict" )
562+
563+ // Should throw when a static partition appears after a dynamic partition
564+ intercept[SparkException ] {
565+ sql(
566+ """ INSERT INTO TABLE dp_test PARTITION(dp, sp = 1)
567+ |SELECT key, value, key % 5 FROM src
568+ """ .stripMargin)
569+ }
570+ }
571+
547572 test(" SPARK-3414 regression: should store analyzed logical plan when registering a temp table" ) {
548573 sparkContext.makeRDD(Seq .empty[LogEntry ]).registerTempTable(" rawLogs" )
549574 sparkContext.makeRDD(Seq .empty[LogFile ]).registerTempTable(" logFiles" )
@@ -601,27 +626,27 @@ class HiveQuerySuite extends HiveComparisonTest {
601626 assert(sql(" SET" ).collect().size == 0 )
602627
603628 assertResult(Set (testKey -> testVal)) {
604- collectResults(hql (s " SET $testKey= $testVal" ))
629+ collectResults(sql (s " SET $testKey= $testVal" ))
605630 }
606631
607632 assert(hiveconf.get(testKey, " " ) == testVal)
608633 assertResult(Set (testKey -> testVal)) {
609- collectResults(hql (" SET" ))
634+ collectResults(sql (" SET" ))
610635 }
611636
612637 sql(s " SET ${testKey + testKey}= ${testVal + testVal}" )
613638 assert(hiveconf.get(testKey + testKey, " " ) == testVal + testVal)
614639 assertResult(Set (testKey -> testVal, (testKey + testKey) -> (testVal + testVal))) {
615- collectResults(hql (" SET" ))
640+ collectResults(sql (" SET" ))
616641 }
617642
618643 // "set key"
619644 assertResult(Set (testKey -> testVal)) {
620- collectResults(hql (s " SET $testKey" ))
645+ collectResults(sql (s " SET $testKey" ))
621646 }
622647
623648 assertResult(Set (nonexistentKey -> " <undefined>" )) {
624- collectResults(hql (s " SET $nonexistentKey" ))
649+ collectResults(sql (s " SET $nonexistentKey" ))
625650 }
626651
627652 // Assert that sql() should have the same effects as sql() by repeating the above using sql().
0 commit comments