|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql |
19 | 19 |
|
| 20 | +import java.io.File |
| 21 | + |
20 | 22 | import scala.language.postfixOps |
21 | 23 |
|
| 24 | +import org.apache.spark.sql.catalyst.plans.logical.OneRowRelation |
22 | 25 | import org.apache.spark.sql.functions._ |
23 | 26 | import org.apache.spark.sql.types._ |
24 | | -import org.apache.spark.sql.test.{ExamplePointUDT, ExamplePoint} |
25 | | - |
| 27 | +import org.apache.spark.sql.test.{ExamplePointUDT, ExamplePoint, SQLTestUtils} |
26 | 28 |
|
27 | | -class DataFrameSuite extends QueryTest { |
| 29 | +class DataFrameSuite extends QueryTest with SQLTestUtils { |
28 | 30 | import org.apache.spark.sql.TestData._ |
29 | 31 |
|
30 | 32 | lazy val ctx = org.apache.spark.sql.test.TestSQLContext |
31 | 33 | import ctx.implicits._ |
32 | 34 |
|
| 35 | + def sqlContext: SQLContext = ctx |
| 36 | + |
33 | 37 | test("analysis error should be eagerly reported") { |
34 | 38 | val oldSetting = ctx.conf.dataFrameEagerAnalysis |
35 | 39 | // Eager analysis. |
@@ -761,4 +765,49 @@ class DataFrameSuite extends QueryTest { |
761 | 765 | assert(f.getMessage.contains("column3")) |
762 | 766 | assert(!f.getMessage.contains("column2")) |
763 | 767 | } |
| 768 | + |
| 769 | + test("SPARK-6941: Better error message for inserting into RDD-based Table") { |
| 770 | + withTempDir { dir => |
| 771 | + |
| 772 | + val tempParquetFile = new File(dir, "tmp_parquet") |
| 773 | + val tempJsonFile = new File(dir, "tmp_json") |
| 774 | + |
| 775 | + val df = Seq(Tuple1(1)).toDF() |
| 776 | + val insertion = Seq(Tuple1(2)).toDF("col") |
| 777 | + |
| 778 | + // pass case: parquet table (HadoopFsRelation) |
| 779 | + df.write.mode(SaveMode.Overwrite).parquet(tempParquetFile.getCanonicalPath) |
| 780 | + val pdf = ctx.read.parquet(tempParquetFile.getCanonicalPath) |
| 781 | + pdf.registerTempTable("parquet_base") |
| 782 | + insertion.write.insertInto("parquet_base") |
| 783 | + |
| 784 | + // pass case: json table (InsertableRelation) |
| 785 | + df.write.mode(SaveMode.Overwrite).json(tempJsonFile.getCanonicalPath) |
| 786 | + val jdf = ctx.read.json(tempJsonFile.getCanonicalPath) |
| 787 | + jdf.registerTempTable("json_base") |
| 788 | + insertion.write.mode(SaveMode.Overwrite).insertInto("json_base") |
| 789 | + |
| 790 | + // error cases: insert into an RDD |
| 791 | + df.registerTempTable("rdd_base") |
| 792 | + val e1 = intercept[AnalysisException] { |
| 793 | + insertion.write.insertInto("rdd_base") |
| 794 | + } |
| 795 | + assert(e1.getMessage.contains("Inserting into an RDD-based table is not allowed.")) |
| 796 | + |
| 797 | + // error case: insert into a logical plan that is not a LeafNode |
| 798 | + val indirectDS = pdf.select("_1").filter($"_1" > 5) |
| 799 | + indirectDS.registerTempTable("indirect_ds") |
| 800 | + val e2 = intercept[AnalysisException] { |
| 801 | + insertion.write.insertInto("indirect_ds") |
| 802 | + } |
| 803 | + assert(e2.getMessage.contains("Inserting into an RDD-based table is not allowed.")) |
| 804 | + |
| 805 | + // error case: insert into an OneRowRelation |
| 806 | + new DataFrame(ctx, OneRowRelation).registerTempTable("one_row") |
| 807 | + val e3 = intercept[AnalysisException] { |
| 808 | + insertion.write.insertInto("one_row") |
| 809 | + } |
| 810 | + assert(e3.getMessage.contains("Inserting into an RDD-based table is not allowed.")) |
| 811 | + } |
| 812 | + } |
764 | 813 | } |
0 commit comments