Skip to content

Commit 60b007d

Browse files
author
YIHAODIAN\wangshuangshuang
committed
Spark-19726: add a new boolean parameter alwaysNullable to getSchema
1 parent 3b1785c commit 60b007d

File tree

4 files changed

+14
-97
lines changed

4 files changed

+14
-97
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -61,13 +61,7 @@ object JDBCRDD extends Logging {
6161
try {
6262
val rs = statement.executeQuery()
6363
try {
64-
val metaStructType = JdbcUtils.getSchema(rs, dialect)
65-
StructType(metaStructType.map(f =>
66-
if(f.nullable)
67-
f
68-
else
69-
StructField(f.name, f.dataType, true, f.metadata)
70-
))
64+
JdbcUtils.getSchema(rs, dialect)
7165
} finally {
7266
rs.close()
7367
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -273,6 +273,9 @@ object JdbcUtils extends Logging {
273273
val rsmd = resultSet.getMetaData
274274
val ncols = rsmd.getColumnCount
275275
val fields = new Array[StructField](ncols)
276+
// if true, spark will propagate null to underlying DB engine instead of using type default value
277+
val alwaysNullable = true
278+
276279
var i = 0
277280
while (i < ncols) {
278281
val columnName = rsmd.getColumnLabel(i + 1)
@@ -290,7 +293,7 @@ object JdbcUtils extends Logging {
290293
rsmd.getClass.getName == "org.apache.hive.jdbc.HiveResultSetMetaData" => true
291294
}
292295
}
293-
val nullable = rsmd.isNullable(i + 1) != ResultSetMetaData.columnNoNulls
296+
val nullable = if (alwaysNullable) alwaysNullable else rsmd.isNullable(i + 1) != ResultSetMetaData.columnNoNulls
294297
val metadata = new MetadataBuilder()
295298
.putString("name", columnName)
296299
.putLong("scale", fieldScale)

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@ package org.apache.spark.sql.jdbc
2020
import java.sql.{Date, DriverManager, Timestamp}
2121
import java.util.Properties
2222

23-
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
23+
import org.apache.spark.SparkException
2424

25+
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
2526
import org.scalatest.BeforeAndAfter
26-
2727
import org.apache.spark.sql.{AnalysisException, DataFrame, Row, SaveMode}
2828
import org.apache.spark.sql.catalyst.parser.ParseException
2929
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
@@ -506,4 +506,11 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {
506506
"schema struct<name:string,id:int>"))
507507
}
508508
}
509+
510+
test("SPARK-19726: INSERT null to a NOT NULL column") {
511+
val e = intercept[SparkException] {
512+
sql("INSERT INTO PEOPLE1 values (null, null)")
513+
}.getMessage
514+
assert(e.contains("NULL not allowed for column \"NAME\""))
515+
}
509516
}

sql/core/src/test/scala/org/apache/spark/sql/sources/JdbcInsertSuite.scala

Lines changed: 0 additions & 87 deletions
This file was deleted.

0 commit comments

Comments
 (0)