Skip to content

Commit a9cbcd2

Browse files
Andrew Orroygao94
authored andcommitted
[SPARK-13139][SQL] Follow-ups to apache#11573
Addressing outstanding comments in apache#11573. Jenkins, new test case in `DDLCommandSuite` Author: Andrew Or <[email protected]> Closes apache#11667 from andrewor14/ddl-parser-followups.
1 parent d18186d commit a9cbcd2

File tree

4 files changed

+94
-68
lines changed

4 files changed

+94
-68
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala

Lines changed: 40 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,21 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
3434
}
3535

3636
/**
37-
* For each node, extract properties in the form of a list ['key1', 'key2', 'key3', 'value']
38-
* into a pair (key1.key2.key3, value).
37+
* For each node, extract properties in the form of a list
38+
* ['key_part1', 'key_part2', 'key_part3', 'value']
39+
* into a pair (key_part1.key_part2.key_part3, value).
40+
*
41+
* Example format:
42+
*
43+
* TOK_TABLEPROPERTY
44+
* :- 'k1'
45+
* +- 'v1'
46+
* TOK_TABLEPROPERTY
47+
* :- 'k2'
48+
* +- 'v2'
49+
* TOK_TABLEPROPERTY
50+
* :- 'k3'
51+
* +- 'v3'
3952
*/
4053
private def extractProps(
4154
props: Seq[ASTNode],
@@ -101,6 +114,16 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
101114
}
102115
val props = dbprops.toSeq.flatMap {
103116
case Token("TOK_DATABASEPROPERTIES", Token("TOK_DBPROPLIST", propList) :: Nil) =>
117+
// Example format:
118+
//
119+
// TOK_DATABASEPROPERTIES
120+
// +- TOK_DBPROPLIST
121+
// :- TOK_TABLEPROPERTY
122+
// : :- 'k1'
123+
// : +- 'v1'
124+
// :- TOK_TABLEPROPERTY
125+
// :- 'k2'
126+
// +- 'v2'
104127
extractProps(propList, "TOK_TABLEPROPERTY")
105128
case _ => parseFailed("Invalid CREATE DATABASE command", node)
106129
}.toMap
@@ -112,16 +135,16 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
112135
// Example format:
113136
//
114137
// TOK_CREATEFUNCTION
115-
// :- db_name
116-
// :- func_name
117-
// :- alias
118-
// +- TOK_RESOURCE_LIST
119-
// :- TOK_RESOURCE_URI
120-
// : :- TOK_JAR
121-
// : +- '/path/to/jar'
122-
// +- TOK_RESOURCE_URI
123-
// :- TOK_FILE
124-
// +- 'path/to/file'
138+
// :- db_name
139+
// :- func_name
140+
// :- alias
141+
// +- TOK_RESOURCE_LIST
142+
// :- TOK_RESOURCE_URI
143+
// : :- TOK_JAR
144+
// : +- '/path/to/jar'
145+
// +- TOK_RESOURCE_URI
146+
// :- TOK_FILE
147+
// +- 'path/to/file'
125148
val (funcNameArgs, otherArgs) = args.partition {
126149
case Token("TOK_RESOURCE_LIST", _) => false
127150
case Token("TOK_TEMPORARY", _) => false
@@ -139,9 +162,9 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
139162
}
140163
// Extract other keywords, if they exist
141164
val Seq(rList, temp) = getClauses(Seq("TOK_RESOURCE_LIST", "TOK_TEMPORARY"), otherArgs)
142-
val resourcesMap = rList.toSeq.flatMap {
143-
case Token("TOK_RESOURCE_LIST", resources) =>
144-
resources.map {
165+
val resources: Seq[(String, String)] = rList.toSeq.flatMap {
166+
case Token("TOK_RESOURCE_LIST", resList) =>
167+
resList.map {
145168
case Token("TOK_RESOURCE_URI", rType :: Token(rPath, Nil) :: Nil) =>
146169
val resourceType = rType match {
147170
case Token("TOK_JAR", Nil) => "jar"
@@ -153,8 +176,8 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
153176
case _ => parseFailed("Invalid CREATE FUNCTION command", node)
154177
}
155178
case _ => parseFailed("Invalid CREATE FUNCTION command", node)
156-
}.toMap
157-
CreateFunction(funcName, alias, resourcesMap, temp.isDefined)(node.source)
179+
}
180+
CreateFunction(funcName, alias, resources, temp.isDefined)(node.source)
158181

159182
case Token("TOK_ALTERTABLE", alterTableArgs) =>
160183
AlterTableCommandParser.parse(node)

sql/core/src/main/scala/org/apache/spark/sql/execution/command/AlterTableCommandParser.scala

Lines changed: 48 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -55,20 +55,22 @@ object AlterTableCommandParser {
5555
/**
5656
* Extract partition spec from the given [[ASTNode]] as a map, assuming it exists.
5757
*
58-
* Expected format:
59-
* +- TOK_PARTSPEC
60-
* :- TOK_PARTVAL
61-
* : :- dt
62-
* : +- '2008-08-08'
63-
* +- TOK_PARTVAL
64-
* :- country
65-
* +- 'us'
58+
* Example format:
59+
*
60+
* TOK_PARTSPEC
61+
* :- TOK_PARTVAL
62+
* : :- dt
63+
* : +- '2008-08-08'
64+
* +- TOK_PARTVAL
65+
* :- country
66+
* +- 'us'
6667
*/
6768
private def parsePartitionSpec(node: ASTNode): Map[String, String] = {
6869
node match {
6970
case Token("TOK_PARTSPEC", partitions) =>
7071
partitions.map {
7172
// Note: sometimes there's a "=", "<" or ">" between the key and the value
73+
// (e.g. when dropping all partitions with value > than a certain constant)
7274
case Token("TOK_PARTVAL", ident :: conj :: constant :: Nil) =>
7375
(cleanAndUnquoteString(ident.text), cleanAndUnquoteString(constant.text))
7476
case Token("TOK_PARTVAL", ident :: constant :: Nil) =>
@@ -86,15 +88,16 @@ object AlterTableCommandParser {
8688
/**
8789
* Extract table properties from the given [[ASTNode]] as a map, assuming it exists.
8890
*
89-
* Expected format:
90-
* +- TOK_TABLEPROPERTIES
91-
* +- TOK_TABLEPROPLIST
92-
* :- TOK_TABLEPROPERTY
93-
* : :- 'test'
94-
* : +- 'value'
95-
* +- TOK_TABLEPROPERTY
96-
* :- 'comment'
97-
* +- 'new_comment'
91+
* Example format:
92+
*
93+
* TOK_TABLEPROPERTIES
94+
* +- TOK_TABLEPROPLIST
95+
* :- TOK_TABLEPROPERTY
96+
* : :- 'test'
97+
* : +- 'value'
98+
* +- TOK_TABLEPROPERTY
99+
* :- 'comment'
100+
* +- 'new_comment'
98101
*/
99102
private def extractTableProps(node: ASTNode): Map[String, String] = {
100103
node match {
@@ -209,21 +212,21 @@ object AlterTableCommandParser {
209212
Token("TOK_TABCOLNAME", colNames) :: colValues :: rest) :: Nil) :: _ =>
210213
// Example format:
211214
//
212-
// +- TOK_ALTERTABLE_SKEWED
213-
// :- TOK_TABLESKEWED
214-
// : :- TOK_TABCOLNAME
215-
// : : :- dt
216-
// : : +- country
217-
// :- TOK_TABCOLVALUE_PAIR
218-
// : :- TOK_TABCOLVALUES
219-
// : : :- TOK_TABCOLVALUE
220-
// : : : :- '2008-08-08'
221-
// : : : +- 'us'
222-
// : :- TOK_TABCOLVALUES
223-
// : : :- TOK_TABCOLVALUE
224-
// : : : :- '2009-09-09'
225-
// : : : +- 'uk'
226-
// +- TOK_STOREASDIR
215+
// TOK_ALTERTABLE_SKEWED
216+
// :- TOK_TABLESKEWED
217+
// : :- TOK_TABCOLNAME
218+
// : : :- dt
219+
// : : +- country
220+
// :- TOK_TABCOLVALUE_PAIR
221+
// : :- TOK_TABCOLVALUES
222+
// : : :- TOK_TABCOLVALUE
223+
// : : : :- '2008-08-08'
224+
// : : : +- 'us'
225+
// : :- TOK_TABCOLVALUES
226+
// : : :- TOK_TABCOLVALUE
227+
// : : : :- '2009-09-09'
228+
// : : : +- 'uk'
229+
// +- TOK_STOREASDIR
227230
val names = colNames.map { n => cleanAndUnquoteString(n.text) }
228231
val values = colValues match {
229232
case Token("TOK_TABCOLVALUE", vals) =>
@@ -260,20 +263,20 @@ object AlterTableCommandParser {
260263
case Token("TOK_ALTERTABLE_SKEWED_LOCATION",
261264
Token("TOK_SKEWED_LOCATIONS",
262265
Token("TOK_SKEWED_LOCATION_LIST", locationMaps) :: Nil) :: Nil) :: _ =>
263-
// Expected format:
266+
// Example format:
264267
//
265-
// +- TOK_ALTERTABLE_SKEWED_LOCATION
266-
// +- TOK_SKEWED_LOCATIONS
267-
// +- TOK_SKEWED_LOCATION_LIST
268-
// :- TOK_SKEWED_LOCATION_MAP
269-
// : :- 'col1'
270-
// : +- 'loc1'
271-
// +- TOK_SKEWED_LOCATION_MAP
272-
// :- TOK_TABCOLVALUES
273-
// : +- TOK_TABCOLVALUE
274-
// : :- 'col2'
275-
// : +- 'col3'
276-
// +- 'loc2'
268+
// TOK_ALTERTABLE_SKEWED_LOCATION
269+
// +- TOK_SKEWED_LOCATIONS
270+
// +- TOK_SKEWED_LOCATION_LIST
271+
// :- TOK_SKEWED_LOCATION_MAP
272+
// : :- 'col1'
273+
// : +- 'loc1'
274+
// +- TOK_SKEWED_LOCATION_MAP
275+
// :- TOK_TABCOLVALUES
276+
// : +- TOK_TABCOLVALUE
277+
// : :- 'col2'
278+
// : +- 'col3'
279+
// +- 'loc2'
277280
val skewedMaps = locationMaps.flatMap {
278281
case Token("TOK_SKEWED_LOCATION_MAP", col :: loc :: Nil) =>
279282
col match {

sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ case class CreateDatabase(
5555
case class CreateFunction(
5656
functionName: String,
5757
alias: String,
58-
resourcesMap: Map[String, String],
58+
resources: Seq[(String, String)],
5959
isTemp: Boolean)(sql: String)
6060
extends NativeDDLCommand(sql) with Logging
6161

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -48,26 +48,26 @@ class DDLCommandSuite extends PlanTest {
4848
val sql1 =
4949
"""
5050
|CREATE TEMPORARY FUNCTION helloworld as
51-
|'com.matthewrathbone.example.SimpleUDFExample' USING JAR '/path/to/jar',
52-
|FILE 'path/to/file'
51+
|'com.matthewrathbone.example.SimpleUDFExample' USING JAR '/path/to/jar1',
52+
|JAR '/path/to/jar2'
5353
""".stripMargin
5454
val sql2 =
5555
"""
5656
|CREATE FUNCTION hello.world as
5757
|'com.matthewrathbone.example.SimpleUDFExample' USING ARCHIVE '/path/to/archive',
58-
|FILE 'path/to/file'
58+
|FILE '/path/to/file'
5959
""".stripMargin
6060
val parsed1 = parser.parsePlan(sql1)
6161
val parsed2 = parser.parsePlan(sql2)
6262
val expected1 = CreateFunction(
6363
"helloworld",
6464
"com.matthewrathbone.example.SimpleUDFExample",
65-
Map("jar" -> "/path/to/jar", "file" -> "path/to/file"),
65+
Seq(("jar", "/path/to/jar1"), ("jar", "/path/to/jar2")),
6666
isTemp = true)(sql1)
6767
val expected2 = CreateFunction(
6868
"hello.world",
6969
"com.matthewrathbone.example.SimpleUDFExample",
70-
Map("archive" -> "/path/to/archive", "file" -> "path/to/file"),
70+
Seq(("archive", "/path/to/archive"), ("file", "/path/to/file")),
7171
isTemp = false)(sql2)
7272
comparePlans(parsed1, expected1)
7373
comparePlans(parsed2, expected2)

0 commit comments

Comments
 (0)