Skip to content

Commit b38a1a8

Browse files
committed
Change all SQL type to upper case.
1 parent 1e2c1d9 commit b38a1a8

File tree

4 files changed

+17
-17
lines changed

4 files changed

+17
-17
lines changed

docs/sql-programming-guide.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1339,7 +1339,7 @@ the following case-insensitive options:
13391339
<tr>
13401340
<td><code>customDataFrameColumnTypes</code></td>
13411341
<td>
1342-
The DataFrame column data types to use instead of the defaults when reading data from jdbc API. (e.g: <code>"id decimal(38, 0), name string")</code>. The specified types should be valid spark sql data types. This option applies only to reading.
1342+
The DataFrame column data types to use instead of the defaults when reading data from jdbc API. (e.g: <code>"id DECIMAL(38, 0), name STRING")</code>. The specified types should be valid spark sql data types. This option applies only to reading.
13431343
</td>
13441344
</tr>
13451345
</table>

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
8080
conn.commit()
8181

8282
conn.prepareStatement(
83-
"CREATE TABLE tableWithCustomSchema (id NUMBER, n1 number(1), n2 number(1))").executeUpdate()
83+
"CREATE TABLE tableWithCustomSchema (id NUMBER, n1 NUMBER(1), n2 NUMBER(1))").executeUpdate()
8484
conn.prepareStatement(
8585
"INSERT INTO tableWithCustomSchema values(12312321321321312312312312123, 1, 0)").executeUpdate()
8686
conn.commit()
@@ -291,7 +291,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
291291
// custom schema can read data
292292
val props = new Properties()
293293
props.put("customDataFrameColumnTypes",
294-
s"ID decimal(${DecimalType.MAX_PRECISION}, 0), N1 int, N2 boolean")
294+
s"ID DECIMAL(${DecimalType.MAX_PRECISION}, 0), N1 INT, N2 BOOLEAN")
295295
val dfRead = spark.read.jdbc(jdbcUrl, "tableWithCustomSchema", props)
296296

297297
val rows = dfRead.collect()

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtilsSuite.scala

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -31,55 +31,55 @@ class JdbcUtilsSuite extends SparkFunSuite {
3131

3232
test("Parse user specified column types") {
3333
assert(
34-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 date, C2 string", caseInsensitive) ===
34+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 DATE, C2 STRING", caseInsensitive) ===
3535
StructType(Seq(StructField("C1", DateType, true), StructField("C2", StringType, true))))
36-
assert(JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 date, C2 string", caseSensitive) ===
36+
assert(JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 DATE, C2 STRING", caseSensitive) ===
3737
StructType(Seq(StructField("C1", DateType, true), StructField("C2", StringType, true))))
3838
assert(
39-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 date, C2 string", caseInsensitive) ===
39+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 DATE, C2 STRING", caseInsensitive) ===
4040
StructType(Seq(StructField("c1", DateType, true), StructField("C2", StringType, true))))
4141
assert(JdbcUtils.parseUserSpecifiedColumnTypes(
42-
schema, "c1 decimal(38, 0), C2 string", caseInsensitive) ===
42+
schema, "c1 DECIMAL(38, 0), C2 STRING", caseInsensitive) ===
4343
StructType(Seq(StructField("c1", DecimalType(38, 0), true),
4444
StructField("C2", StringType, true))))
4545

4646
// Throw AnalysisException
4747
val duplicate = intercept[AnalysisException]{
48-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 date, c1 string", caseInsensitive) ===
48+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 DATE, c1 STRING", caseInsensitive) ===
4949
StructType(Seq(StructField("c1", DateType, true), StructField("c1", StringType, true)))
5050
}
5151
assert(duplicate.getMessage.contains(
5252
"Found duplicate column(s) in the createTableColumnTypes option value"))
5353

5454
val allColumns = intercept[AnalysisException]{
55-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 string", caseSensitive) ===
55+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "C1 STRING", caseSensitive) ===
5656
StructType(Seq(StructField("C1", DateType, true)))
5757
}
5858
assert(allColumns.getMessage.contains("Please provide all the columns,"))
5959

6060
val caseSensitiveColumnNotFound = intercept[AnalysisException]{
61-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 date, C2 string", caseSensitive) ===
61+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c1 DATE, C2 STRING", caseSensitive) ===
6262
StructType(Seq(StructField("c1", DateType, true), StructField("C2", StringType, true)))
6363
}
6464
assert(caseSensitiveColumnNotFound.getMessage.contains(
6565
s"${JDBCOptions.JDBC_CUSTOM_DATAFRAME_COLUMN_TYPES} option column c1 not found in schema"))
6666

6767
val caseInsensitiveColumnNotFound = intercept[AnalysisException]{
68-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 date, C2 string", caseInsensitive) ===
68+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 DATE, C2 STRING", caseInsensitive) ===
6969
StructType(Seq(StructField("c3", DateType, true), StructField("C2", StringType, true)))
7070
}
7171
assert(caseInsensitiveColumnNotFound.getMessage.contains(
7272
s"${JDBCOptions.JDBC_CUSTOM_DATAFRAME_COLUMN_TYPES} option column c3 not found in schema"))
7373

7474
// Throw ParseException
75-
val DataTypeNotSupported = intercept[ParseException]{
76-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 datee, C2 string", caseInsensitive) ===
75+
val dataTypeNotSupported = intercept[ParseException]{
76+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 DATEE, C2 STRING", caseInsensitive) ===
7777
StructType(Seq(StructField("c3", DateType, true), StructField("C2", StringType, true)))
7878
}
79-
assert(DataTypeNotSupported.getMessage.contains("DataType datee is not supported"))
79+
assert(dataTypeNotSupported.getMessage.contains("DataType datee is not supported"))
8080

8181
val mismatchedInput = intercept[ParseException]{
82-
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 date. C2 string", caseInsensitive) ===
82+
JdbcUtils.parseUserSpecifiedColumnTypes(schema, "c3 DATE. C2 STRING", caseInsensitive) ===
8383
StructType(Seq(StructField("c3", DateType, true), StructField("C2", StringType, true)))
8484
}
8585
assert(mismatchedInput.getMessage.contains("mismatched input '.' expecting"))

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -971,7 +971,7 @@ class JDBCSuite extends SparkFunSuite
971971
test("jdbc API support custom schema") {
972972
val parts = Array[String]("THEID < 2", "THEID >= 2")
973973
val props = new Properties()
974-
props.put("customDataFrameColumnTypes", "NAME string, THEID bigint")
974+
props.put("customDataFrameColumnTypes", "NAME STRING, THEID BIGINT")
975975
val schema = StructType(Seq(
976976
StructField("NAME", StringType, true), StructField("THEID", LongType, true)))
977977
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, props)
@@ -987,7 +987,7 @@ class JDBCSuite extends SparkFunSuite
987987
|CREATE TEMPORARY VIEW people_view
988988
|USING org.apache.spark.sql.jdbc
989989
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass',
990-
|customDataFrameColumnTypes 'NAME string, THEID int')
990+
|customDataFrameColumnTypes 'NAME STRING, THEID INT')
991991
""".stripMargin.replaceAll("\n", " "))
992992
val schema = StructType(
993993
Seq(StructField("NAME", StringType, true), StructField("THEID", IntegerType, true)))

0 commit comments

Comments
 (0)