diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala index f7a88b98c0b4..7c2cc5475eb9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala @@ -59,6 +59,7 @@ class DDLParser(parseQuery: String => LogicalPlan) protected val AS = Keyword("AS") protected val COMMENT = Keyword("COMMENT") protected val REFRESH = Keyword("REFRESH") + protected val NULL = Keyword("NULL") protected lazy val ddl: Parser[LogicalPlan] = createTable | describeTable | refreshTable @@ -173,13 +174,15 @@ class DDLParser(parseQuery: String => LogicalPlan) optionName ~ stringLit ^^ { case k ~ v => (k, v) } protected lazy val column: Parser[StructField] = - ident ~ dataType ~ (COMMENT ~> stringLit).? ^^ { case columnName ~ typ ~ cm => - val meta = cm match { - case Some(comment) => - new MetadataBuilder().putString(COMMENT.str.toLowerCase, comment).build() - case None => Metadata.empty - } - - StructField(columnName, typ, nullable = true, meta) + ident ~ dataType ~ (NOT ~ NULL).? ~ (COMMENT ~> stringLit).? ^^ { + case columnName ~ typ ~ notNull ~ cm => + val meta = cm match { + case Some(comment) => + new MetadataBuilder().putString(COMMENT.str.toLowerCase, comment).build() + case None => Metadata.empty + } + + val isNullable = notNull.isEmpty + StructField(columnName, typ, nullable = isNullable, meta) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala index 5f8514e1a241..213654c7cd2d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala @@ -113,4 +113,23 @@ class DDLTestSuite extends DataSourceTest with SharedSQLContext { assert(attributes.map(_.name) === Seq("col_name", "data_type", "comment")) assert(attributes.map(_.dataType).toSet === Set(StringType)) } + + test("SPARK-7012 Create table statement should support NOT NULL modifier for columns") { + withTempPath { dir => + val path = dir.getCanonicalPath + sql( + s""" + |CREATE TEMPORARY TABLE tempTableDDL + |( tCol1 INT NOT NULL, + | tCol2 STRING + |) + |USING parquet + |OPTIONS ( + | path '$path' + |) + """.stripMargin + ) + caseInsensitiveContext.dropTempTable("tempTableDDL") + } + } }