1717
1818package org .apache .spark .sql
1919
20+ import org .apache .spark .sql .catalyst .plans .logical .OneRowRelation
21+
2022import scala .language .postfixOps
2123
2224import org .apache .spark .sql .functions ._
@@ -762,7 +764,7 @@ class DataFrameSuite extends QueryTest {
762764 }
763765
764766 test(" SPARK-6941: Better error message for inserting into RDD-based Table" ) {
765- val df = Seq (Tuple1 (1 )).toDF(" col " )
767+ val df = Seq (Tuple1 (1 )).toDF()
766768 val insertion = Seq (Tuple1 (2 )).toDF(" col" )
767769
768770 // pass case: parquet table (HadoopFsRelation)
@@ -782,14 +784,21 @@ class DataFrameSuite extends QueryTest {
782784 val e1 = intercept[AnalysisException ] {
783785 insertion.write.insertInto(" rdd_base" )
784786 }
785- assert(e1.getMessage.contains(" Attempt to insert into a RDD-based table" ))
787+ assert(e1.getMessage.contains(" Inserting into an RDD-based table is not allowed. " ))
786788
787789 // error case: insert into a RDD based on data source
788- val indirectDS = pdf.select(" col " ).filter($" col " > 5 )
790+ val indirectDS = pdf.select(" _1 " ).filter($" _1 " > 5 )
789791 indirectDS.registerTempTable(" indirect_ds" )
790792 val e2 = intercept[AnalysisException ] {
791793 insertion.write.insertInto(" indirect_ds" )
792794 }
793- assert(e2.getMessage.contains(" Attempt to insert into a RDD-based table" ))
795+ assert(e2.getMessage.contains(" Inserting into an RDD-based table is not allowed." ))
796+
797+ // error case: insert into a OneRowRelation
798+ new DataFrame (ctx, OneRowRelation ).registerTempTable(" one_row" )
799+ val e3 = intercept[AnalysisException ] {
800+ insertion.write.insertInto(" one_row" )
801+ }
802+ assert(e3.getMessage.contains(" Inserting into an RDD-based table is not allowed." ))
794803 }
795804}
0 commit comments