@@ -27,6 +27,7 @@ import org.scalatest.BeforeAndAfter
2727import org .apache .spark .SparkException
2828import org .apache .spark .sql .{Row , SaveMode }
2929import org .apache .spark .sql .execution .datasources .jdbc .JDBCOptions
30+ import org .apache .spark .sql .internal .SQLConf
3031import org .apache .spark .sql .test .SharedSQLContext
3132import org .apache .spark .sql .types ._
3233import org .apache .spark .util .Utils
@@ -175,14 +176,14 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
175176
176177 df.write.jdbc(url, " TEST.APPENDTEST" , new Properties ())
177178
178- withSQLConf(" spark.sql.caseSensitive " -> " true" ) {
179+ withSQLConf(SQLConf . CASE_SENSITIVE .key -> " true" ) {
179180 val m = intercept[SparkException ] {
180181 df2.write.mode(SaveMode .Append ).jdbc(url, " TEST.APPENDTEST" , new Properties ())
181182 }.getMessage
182183 assert(m.contains(" Column \" NAME\" not found" ))
183184 }
184185
185- withSQLConf(" spark.sql.caseSensitive " -> " false" ) {
186+ withSQLConf(SQLConf . CASE_SENSITIVE .key -> " false" ) {
186187 df2.write.mode(SaveMode .Append ).jdbc(url, " TEST.APPENDTEST" , new Properties ())
187188 assert(3 === spark.read.jdbc(url, " TEST.APPENDTEST" , new Properties ()).count())
188189 assert(2 === spark.read.jdbc(url, " TEST.APPENDTEST" , new Properties ()).collect()(0 ).length)
0 commit comments