@@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.execution
19
19
20
20
import scala .util .Try
21
21
22
+ import org .apache .spark .SparkException
22
23
import org .apache .spark .sql .hive ._
23
24
import org .apache .spark .sql .hive .test .TestHive
24
25
import org .apache .spark .sql .hive .test .TestHive ._
@@ -334,7 +335,7 @@ class HiveQuerySuite extends HiveComparisonTest {
334
335
335
336
def isExplanation (result : SchemaRDD ) = {
336
337
val explanation = result.select(' plan ).collect().map { case Row (plan : String ) => plan }
337
- explanation.exists(_ == " == Physical Plan ==" )
338
+ explanation.contains( " == Physical Plan ==" )
338
339
}
339
340
340
341
test(" SPARK-1704: Explain commands as a SchemaRDD" ) {
@@ -544,6 +545,30 @@ class HiveQuerySuite extends HiveComparisonTest {
544
545
|DROP TABLE IF EXISTS dynamic_part_table;
545
546
""" .stripMargin)
546
547
548
+ test(" Partition spec validation" ) {
549
+ sql(" DROP TABLE IF EXISTS dp_test" )
550
+ sql(" CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)" )
551
+ sql(" SET hive.exec.dynamic.partition.mode=strict" )
552
+
553
+ // Should throw when using strict dynamic partition mode without any static partition
554
+ intercept[SparkException ] {
555
+ sql(
556
+ """ INSERT INTO TABLE dp_test PARTITION(dp)
557
+ |SELECT key, value, key % 5 FROM src
558
+ """ .stripMargin)
559
+ }
560
+
561
+ sql(" SET hive.exec.dynamic.partition.mode=nonstrict" )
562
+
563
+ // Should throw when a static partition appears after a dynamic partition
564
+ intercept[SparkException ] {
565
+ sql(
566
+ """ INSERT INTO TABLE dp_test PARTITION(dp, sp = 1)
567
+ |SELECT key, value, key % 5 FROM src
568
+ """ .stripMargin)
569
+ }
570
+ }
571
+
547
572
test(" SPARK-3414 regression: should store analyzed logical plan when registering a temp table" ) {
548
573
sparkContext.makeRDD(Seq .empty[LogEntry ]).registerTempTable(" rawLogs" )
549
574
sparkContext.makeRDD(Seq .empty[LogFile ]).registerTempTable(" logFiles" )
@@ -601,27 +626,27 @@ class HiveQuerySuite extends HiveComparisonTest {
601
626
assert(sql(" SET" ).collect().size == 0 )
602
627
603
628
assertResult(Set (testKey -> testVal)) {
604
- collectResults(hql (s " SET $testKey= $testVal" ))
629
+ collectResults(sql (s " SET $testKey= $testVal" ))
605
630
}
606
631
607
632
assert(hiveconf.get(testKey, " " ) == testVal)
608
633
assertResult(Set (testKey -> testVal)) {
609
- collectResults(hql (" SET" ))
634
+ collectResults(sql (" SET" ))
610
635
}
611
636
612
637
sql(s " SET ${testKey + testKey}= ${testVal + testVal}" )
613
638
assert(hiveconf.get(testKey + testKey, " " ) == testVal + testVal)
614
639
assertResult(Set (testKey -> testVal, (testKey + testKey) -> (testVal + testVal))) {
615
- collectResults(hql (" SET" ))
640
+ collectResults(sql (" SET" ))
616
641
}
617
642
618
643
// "set key"
619
644
assertResult(Set (testKey -> testVal)) {
620
- collectResults(hql (s " SET $testKey" ))
645
+ collectResults(sql (s " SET $testKey" ))
621
646
}
622
647
623
648
assertResult(Set (nonexistentKey -> " <undefined>" )) {
624
- collectResults(hql (s " SET $nonexistentKey" ))
649
+ collectResults(sql (s " SET $nonexistentKey" ))
625
650
}
626
651
627
652
// Assert that sql() should have the same effects as sql() by repeating the above using sql().
0 commit comments