@@ -20,8 +20,6 @@ package org.apache.spark.sql.execution.datasources.jdbc
2020import java .sql .{Connection , DriverManager }
2121import java .util .Properties
2222
23- import scala .collection .mutable .ArrayBuffer
24-
2523import org .apache .spark .sql .catalyst .util .CaseInsensitiveMap
2624
2725/**
@@ -41,16 +39,23 @@ class JDBCOptions(
4139 JDBCOptions .JDBC_TABLE_NAME -> table)))
4240 }
4341
42+ /**
43+ * Returns a property with all options.
44+ */
4445 val asProperties : Properties = {
4546 val properties = new Properties ()
4647 parameters.foreach { case (k, v) => properties.setProperty(k, v) }
4748 properties
4849 }
4950
51+ /**
52+ * Returns a property with all options except Spark internal data source options like `url`,
53+ * `dbtable`, and `numPartition`. This should be used when invoking JDBC API like `Driver.connect`
54+ * because each DBMS vendor has its own property list for JDBC driver. See SPARK-17776.
55+ */
5056 val asConnectionProperties : Properties = {
5157 val properties = new Properties ()
52- // We should avoid to pass the options into properties. See SPARK-17776.
53- parameters.filterKeys(key => ! jdbcOptionNames.contains(key.toLowerCase))
58+ parameters.filterKeys(key => ! jdbcOptionNames(key.toLowerCase))
5459 .foreach { case (k, v) => properties.setProperty(k, v) }
5560 properties
5661 }
@@ -132,7 +137,7 @@ class JDBCOptions(
132137}
133138
134139object JDBCOptions {
135- private val jdbcOptionNames = ArrayBuffer .empty [String ]
140+ private val jdbcOptionNames = collection.mutable. Set [String ]()
136141
137142 private def newOption (name : String ): String = {
138143 jdbcOptionNames += name.toLowerCase
0 commit comments