Skip to content

Commit 44b283f

Browse files
committed
Adds test case for SPARK-7616
1 parent 6733276 commit 44b283f

File tree

2 files changed

+21
-5
lines changed

2 files changed

+21
-5
lines changed

sql/core/src/main/scala/org/apache/spark/sql/test/SQLTestUtils.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,4 +78,11 @@ trait SQLTestUtils {
7878
protected def withTempTable(tableName: String)(f: => Unit): Unit = {
7979
try f finally sqlContext.dropTempTable(tableName)
8080
}
81+
82+
/**
83+
* Drops table `tableName` after calling `f`.
84+
*/
85+
protected def withTable(tableName: String)(f: => Unit): Unit = {
86+
try f finally sqlContext.sql(s"DROP TABLE IF EXISTS $tableName")
87+
}
8188
}

sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path
2222
import org.apache.spark.deploy.SparkHadoopUtil
2323
import org.apache.spark.sql._
2424
import org.apache.spark.sql.hive.test.TestHive
25-
import org.apache.spark.sql.parquet.ParquetTest
2625
import org.apache.spark.sql.test.SQLTestUtils
2726
import org.apache.spark.sql.types._
2827

@@ -237,10 +236,6 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils {
237236
}
238237
}
239238

240-
def withTable(tableName: String)(f: => Unit): Unit = {
241-
try f finally sql(s"DROP TABLE $tableName")
242-
}
243-
244239
test("saveAsTable()/load() - non-partitioned table - Overwrite") {
245240
testDF.write.format(dataSourceName).mode(SaveMode.Overwrite)
246241
.option("dataSchema", dataSchema.json)
@@ -521,4 +516,18 @@ class ParquetHadoopFsRelationSuite extends HadoopFsRelationTest {
521516
.load(file.getCanonicalPath))
522517
}
523518
}
519+
520+
test("SPARK-7616: adjust column name order accordingly when saving partitioned table") {
521+
val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
522+
523+
df.write
524+
.format("parquet")
525+
.mode(SaveMode.Overwrite)
526+
.partitionBy("c", "a")
527+
.saveAsTable("t")
528+
529+
withTable("t") {
530+
checkAnswer(table("t"), df.select('b, 'c, 'a).collect())
531+
}
532+
}
524533
}

0 commit comments

Comments
 (0)