@@ -19,22 +19,31 @@ package org.apache.spark.sql.jdbc
1919
2020import java .math .BigDecimal
2121import java .sql .DriverManager
22- import java .util .{Calendar , GregorianCalendar }
22+ import java .util .{Calendar , GregorianCalendar , Properties }
2323
2424import org .apache .spark .sql .test ._
25+ import org .h2 .jdbc .JdbcSQLException
2526import org .scalatest .{FunSuite , BeforeAndAfter }
2627import TestSQLContext ._
2728import TestSQLContext .implicits ._
2829
2930class JDBCSuite extends FunSuite with BeforeAndAfter {
3031 val url = " jdbc:h2:mem:testdb0"
32+ val urlWithUserAndPass = " jdbc:h2:mem:testdb0;user=testUser;password=testPass"
3133 var conn : java.sql.Connection = null
3234
3335 val testBytes = Array [Byte ](99 .toByte, 134 .toByte, 135 .toByte, 200 .toByte, 205 .toByte)
3436
3537 before {
3638 Class .forName(" org.h2.Driver" )
37- conn = DriverManager .getConnection(url)
39+ // Extra properties that will be specified for our database. We need these to test
40+ // usage of parameters from OPTIONS clause in queries.
41+ val properties = new Properties ()
42+ properties.setProperty(" user" , " testUser" )
43+ properties.setProperty(" password" , " testPass" )
44+ properties.setProperty(" rowId" , " false" )
45+
46+ conn = DriverManager .getConnection(url, properties)
3847 conn.prepareStatement(" create schema test" ).executeUpdate()
3948 conn.prepareStatement(" create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)" ).executeUpdate()
4049 conn.prepareStatement(" insert into test.people values ('fred', 1)" ).executeUpdate()
@@ -46,15 +55,15 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
4655 s """
4756 |CREATE TEMPORARY TABLE foobar
4857 |USING org.apache.spark.sql.jdbc
49- |OPTIONS (url ' $url', dbtable 'TEST.PEOPLE')
58+ |OPTIONS (url ' $url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass' )
5059 """ .stripMargin.replaceAll(" \n " , " " ))
5160
5261 sql(
5362 s """
5463 |CREATE TEMPORARY TABLE parts
5564 |USING org.apache.spark.sql.jdbc
56- |OPTIONS (url ' $url', dbtable 'TEST.PEOPLE',
57- |partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
65+ |OPTIONS (url ' $url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
66+ | partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
5867 """ .stripMargin.replaceAll(" \n " , " " ))
5968
6069 conn.prepareStatement(" create table test.inttypes (a INT, b BOOLEAN, c TINYINT, "
@@ -68,12 +77,12 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
6877 s """
6978 |CREATE TEMPORARY TABLE inttypes
7079 |USING org.apache.spark.sql.jdbc
71- |OPTIONS (url ' $url', dbtable 'TEST.INTTYPES')
80+ |OPTIONS (url ' $url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass' )
7281 """ .stripMargin.replaceAll(" \n " , " " ))
7382
7483 conn.prepareStatement(" create table test.strtypes (a BINARY(20), b VARCHAR(20), "
7584 + " c VARCHAR_IGNORECASE(20), d CHAR(20), e BLOB, f CLOB)" ).executeUpdate()
76- var stmt = conn.prepareStatement(" insert into test.strtypes values (?, ?, ?, ?, ?, ?)" )
85+ val stmt = conn.prepareStatement(" insert into test.strtypes values (?, ?, ?, ?, ?, ?)" )
7786 stmt.setBytes(1 , testBytes)
7887 stmt.setString(2 , " Sensitive" )
7988 stmt.setString(3 , " Insensitive" )
@@ -85,7 +94,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
8594 s """
8695 |CREATE TEMPORARY TABLE strtypes
8796 |USING org.apache.spark.sql.jdbc
88- |OPTIONS (url ' $url', dbtable 'TEST.STRTYPES')
97+ |OPTIONS (url ' $url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass' )
8998 """ .stripMargin.replaceAll(" \n " , " " ))
9099
91100 conn.prepareStatement(" create table test.timetypes (a TIME, b DATE, c TIMESTAMP)"
@@ -97,7 +106,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
97106 s """
98107 |CREATE TEMPORARY TABLE timetypes
99108 |USING org.apache.spark.sql.jdbc
100- |OPTIONS (url ' $url', dbtable 'TEST.TIMETYPES')
109+ |OPTIONS (url ' $url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass' )
101110 """ .stripMargin.replaceAll(" \n " , " " ))
102111
103112
@@ -112,7 +121,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
112121 s """
113122 |CREATE TEMPORARY TABLE flttypes
114123 |USING org.apache.spark.sql.jdbc
115- |OPTIONS (url ' $url', dbtable 'TEST.FLTTYPES')
124+ |OPTIONS (url ' $url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass' )
116125 """ .stripMargin.replaceAll(" \n " , " " ))
117126
118127 // Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
@@ -174,16 +183,17 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
174183 }
175184
176185 test(" Basic API" ) {
177- assert(TestSQLContext .jdbc(url , " TEST.PEOPLE" ).collect.size == 3 )
186+ assert(TestSQLContext .jdbc(urlWithUserAndPass , " TEST.PEOPLE" ).collect.size == 3 )
178187 }
179188
180189 test(" Partitioning via JDBCPartitioningInfo API" ) {
181- assert(TestSQLContext .jdbc(url, " TEST.PEOPLE" , " THEID" , 0 , 4 , 3 ).collect.size == 3 )
190+ assert(TestSQLContext .jdbc(urlWithUserAndPass, " TEST.PEOPLE" , " THEID" , 0 , 4 , 3 )
191+ .collect.size == 3 )
182192 }
183193
184194 test(" Partitioning via list-of-where-clauses API" ) {
185195 val parts = Array [String ](" THEID < 2" , " THEID >= 2" )
186- assert(TestSQLContext .jdbc(url , " TEST.PEOPLE" , parts).collect.size == 3 )
196+ assert(TestSQLContext .jdbc(urlWithUserAndPass , " TEST.PEOPLE" , parts).collect.size == 3 )
187197 }
188198
189199 test(" H2 integral types" ) {
@@ -216,7 +226,6 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
216226 assert(rows(0 ).getString(5 ).equals(" I am a clob!" ))
217227 }
218228
219-
220229 test(" H2 time types" ) {
221230 val rows = sql(" SELECT * FROM timetypes" ).collect()
222231 val cal = new GregorianCalendar (java.util.Locale .ROOT )
@@ -246,17 +255,31 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
246255 .equals(new BigDecimal (" 123456789012345.54321543215432100000" )))
247256 }
248257
249-
250258 test(" SQL query as table name" ) {
251259 sql(
252260 s """
253261 |CREATE TEMPORARY TABLE hack
254262 |USING org.apache.spark.sql.jdbc
255- |OPTIONS (url ' $url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)')
263+ |OPTIONS (url ' $url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)',
264+ | user 'testUser', password 'testPass')
256265 """ .stripMargin.replaceAll(" \n " , " " ))
257266 val rows = sql(" SELECT * FROM hack" ).collect()
258267 assert(rows(0 ).getDouble(0 ) == 1.00000011920928955 ) // Yes, I meant ==.
259268 // For some reason, H2 computes this square incorrectly...
260269 assert(math.abs(rows(0 ).getDouble(1 ) - 1.00000023841859331 ) < 1e-12 )
261270 }
271+
272+ test(" Pass extra properties via OPTIONS" ) {
273+ // We set rowId to false during setup, which means that _ROWID_ column should be absent from
274+ // all tables. If rowId is true (default), the query below doesn't throw an exception.
275+ intercept[JdbcSQLException ] {
276+ sql(
277+ s """
278+ |CREATE TEMPORARY TABLE abc
279+ |USING org.apache.spark.sql.jdbc
280+ |OPTIONS (url ' $url', dbtable '(SELECT _ROWID_ FROM test.people)',
281+ | user 'testUser', password 'testPass')
282+ """ .stripMargin.replaceAll(" \n " , " " ))
283+ }
284+ }
262285}
0 commit comments