@@ -21,6 +21,7 @@ import java.io.File
2121
2222import org .scalatest .BeforeAndAfterAll
2323
24+ import org .apache .spark .SparkException
2425import org .apache .spark .sql ._
2526import org .apache .spark .sql .hive .test .TestHiveSingleton
2627import org .apache .spark .sql .sources ._
@@ -32,7 +33,7 @@ case class OrcData(intField: Int, stringField: String)
3233
3334abstract class OrcSuite extends QueryTest
3435 with TestHiveSingleton with SQLTestUtils with BeforeAndAfterAll {
35- import spark ._
36+ import spark .implicits . _
3637
3738 var orcTableDir : File = null
3839 var orcTableAsDir : File = null
@@ -194,30 +195,6 @@ abstract class OrcSuite extends QueryTest
194195 assert(e.contains(" Codec [illegal] is not available. Known codecs are" ))
195196 }
196197 }
197- }
198-
199- class OrcSourceSuite extends OrcSuite {
200- import spark .implicits ._
201-
202- override def beforeAll (): Unit = {
203- super .beforeAll()
204-
205- spark.sql(
206- s """ CREATE TEMPORARY TABLE normal_orc_source
207- |USING org.apache.spark.sql.hive.orc
208- |OPTIONS (
209- | PATH ' ${new File (orcTableAsDir.getAbsolutePath).getCanonicalPath}'
210- |)
211- """ .stripMargin)
212-
213- spark.sql(
214- s """ CREATE TEMPORARY TABLE normal_orc_as_source
215- |USING org.apache.spark.sql.hive.orc
216- |OPTIONS (
217- | PATH ' ${new File (orcTableAsDir.getAbsolutePath).getCanonicalPath}'
218- |)
219- """ .stripMargin)
220- }
221198
222199 test(" orc - API" ) {
223200 val userSchema = new StructType ().add(" s" , StringType )
@@ -240,27 +217,52 @@ class OrcSourceSuite extends OrcSuite {
240217 // Test explicit calls to single arg method - SPARK-16009
241218 testRead(Option (dir).map(spark.read.orc).get, data, schema)
242219
243- // Reader, with user specified schema, should just apply user schema on the file data
220+ // Reader, with user specified schema, report an exception as schema in file different
221+ // from user schema.
244222 testRead(spark.read.schema(userSchema).orc(), Seq .empty, userSchema)
245- spark.read.schema(userSchema).orc(dir).printSchema()
246-
247- spark.read.schema(userSchema).orc(dir).explain(true )
248-
249- spark.read.schema(userSchema).orc().show()
250- spark.read.schema(userSchema).orc(dir).show()
251- val expData = Seq [String ](null , null , null )
252- testRead(spark.read.schema(userSchema).orc(dir), expData, userSchema)
253- testRead(spark.read.schema(userSchema).orc(dir, dir), expData ++ expData, userSchema)
254- testRead(spark.read.schema(userSchema).orc(Seq (dir, dir): _* ), expData ++ expData, userSchema)
255-
223+ var e = intercept[SparkException ] {
224+ testRead(spark.read.schema(userSchema).orc(dir), Seq .empty, userSchema)
225+ }.getMessage
226+ assert(e.contains(" Field \" s\" does not exist" ))
227+ e = intercept[SparkException ] {
228+ testRead(spark.read.schema(userSchema).orc(dir, dir), Seq .empty, userSchema)
229+ }.getMessage
230+ assert(e.contains(" Field \" s\" does not exist" ))
231+ e = intercept[SparkException ] {
232+ testRead(spark.read.schema(userSchema).orc(Seq (dir, dir): _* ), Seq .empty, userSchema)
233+ }.getMessage
234+ assert(e.contains(" Field \" s\" does not exist" ))
256235 }
236+
257237 private def testRead (
258238 df : => DataFrame ,
259239 expectedResult : Seq [String ],
260240 expectedSchema : StructType ): Unit = {
261241 checkAnswer(df, spark.createDataset(expectedResult).toDF())
262242 assert(df.schema === expectedSchema)
263243 }
244+ }
245+
246+ class OrcSourceSuite extends OrcSuite {
247+ override def beforeAll (): Unit = {
248+ super .beforeAll()
249+
250+ spark.sql(
251+ s """ CREATE TEMPORARY TABLE normal_orc_source
252+ |USING org.apache.spark.sql.hive.orc
253+ |OPTIONS (
254+ | PATH ' ${new File (orcTableAsDir.getAbsolutePath).getCanonicalPath}'
255+ |)
256+ """ .stripMargin)
257+
258+ spark.sql(
259+ s """ CREATE TEMPORARY TABLE normal_orc_as_source
260+ |USING org.apache.spark.sql.hive.orc
261+ |OPTIONS (
262+ | PATH ' ${new File (orcTableAsDir.getAbsolutePath).getCanonicalPath}'
263+ |)
264+ """ .stripMargin)
265+ }
264266
265267 test(" SPARK-12218 Converting conjunctions into ORC SearchArguments" ) {
266268 // The `LessThan` should be converted while the `StringContains` shouldn't
0 commit comments