Skip to content

Commit a6926db

Browse files
author
Joseph Batchik
committed
added test cases for data source loader
1 parent 208a2a8 commit a6926db

File tree

5 files changed

+81
-3
lines changed

5 files changed

+81
-3
lines changed

.rat-excludes

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,4 +93,4 @@ INDEX
9393
.lintr
9494
gen-java.*
9595
.*avpr
96-
org.apache.spark.sql.sources.DataSourceProvider
96+
services/*
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1+
org.apache.spark.sql.jdbc.DefaultSource
12
org.apache.spark.sql.json.DefaultSource
23
org.apache.spark.sql.parquet.DefaultSource
3-
org.apache.spark.sql.jdbc.DefaultSource

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ private[sql] object ResolvedDataSource extends Logging {
202202
val sl = ServiceLoader.load(classOf[DataSourceProvider], loader)
203203

204204
sl.iterator().filter(_.format() == provider).toList match {
205-
case Nil => logInfo(s"provider: $provider is not registered in the service loader")
205+
case Nil => logDebug(s"provider: $provider is not registered in the service loader")
206206
case head :: Nil => return head.getClass
207207
case sources => sys.error(s"Multiple sources found for $provider, " +
208208
s"(${sources.map(_.getClass.getName).mkString(", ")}, " +
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
org.apache.spark.sql.sources.FakeSourceOne
2+
org.apache.spark.sql.sources.FakeSourceTwo
3+
org.apache.spark.sql.sources.FakeSourceThree
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.sources
19+
20+
import org.apache.spark.sql.SQLContext
21+
import org.apache.spark.sql.types.{StringType, StructField, StructType}
22+
23+
class FakeSourceOne extends RelationProvider {
24+
25+
override def format() = "Fluet da Bomb"
26+
27+
override def createRelation(cont: SQLContext, param: Map[String, String]) = new BaseRelation {
28+
override def sqlContext: SQLContext = cont
29+
30+
override def schema: StructType =
31+
StructType(Seq(StructField("stringType", StringType, nullable = false)))
32+
}
33+
}
34+
35+
class FakeSourceTwo extends RelationProvider {
36+
37+
override def format() = "Fluet da Bomb"
38+
39+
override def createRelation(cont: SQLContext, param: Map[String, String]) = new BaseRelation {
40+
override def sqlContext: SQLContext = cont
41+
42+
override def schema: StructType =
43+
StructType(Seq(StructField("stringType", StringType, nullable = false)))
44+
}
45+
}
46+
47+
class FakeSourceThree extends RelationProvider {
48+
override def format() = "gathering quorum"
49+
50+
override def createRelation(cont: SQLContext, param: Map[String, String]) = new BaseRelation {
51+
override def sqlContext: SQLContext = cont
52+
53+
override def schema: StructType =
54+
StructType(Seq(StructField("stringType", StringType, nullable = false)))
55+
}
56+
}
57+
// please note that the META-INF/services had to be modified for the test directory for this to work
58+
class DDLSourceLoadSuite extends DataSourceTest {
59+
60+
test("data sources with the same name") {
61+
intercept[RuntimeException] {
62+
caseInsensitiveContext.read.format("Fluet da Bomb").load()
63+
}
64+
}
65+
66+
test("load data source from format alias") {
67+
caseInsensitiveContext.read.format("gathering quorum").load().schema ==
68+
StructType(Seq(StructField("stringType", StringType, nullable = false)))
69+
}
70+
71+
test("specify full classname with duplicate formats") {
72+
caseInsensitiveContext.read.format("org.apache.spark.sql.sources.FakeSourceOne")
73+
.load().schema == StructType(Seq(StructField("stringType", StringType, nullable = false)))
74+
}
75+
}

0 commit comments

Comments
 (0)