Skip to content

Commit 16c829e

Browse files
committed
address comments.
1 parent 536cf36 commit 16c829e

File tree

6 files changed

+101
-92
lines changed

6 files changed

+101
-92
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala

Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,12 @@
1717

1818
package org.apache.spark.sql.execution.command
1919

20-
import java.io.File
2120
import java.util.NoSuchElementException
2221

2322
import org.apache.spark.internal.Logging
2423
import org.apache.spark.rdd.RDD
2524
import org.apache.spark.sql.{Dataset, Row, SQLContext}
2625
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, TableIdentifier}
27-
import org.apache.spark.sql.catalyst.catalog.CatalogDatabase
2826
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2927
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
3028
import org.apache.spark.sql.catalyst.plans.logical
@@ -319,50 +317,6 @@ case class DescribeCommand(
319317
}
320318
}
321319

322-
/**
323-
* A command for users to show the name of the database, its comment (if one has been set), and its
324-
* root location on the filesystem. When extended is true, it also shows the database's properties
325-
* If the database does not exist, an error message will be issued to indicate the database
326-
* does not exist.
327-
* The syntax of using this command in SQL is
328-
* {{{
329-
* DESCRIBE DATABASE [EXTENDED] db_name
330-
* }}}
331-
*/
332-
case class DescribeDatabase(
333-
databaseName: String,
334-
extended: Boolean)
335-
extends RunnableCommand {
336-
337-
override def run(sqlContext: SQLContext): Seq[Row] = {
338-
val dbMetadata: CatalogDatabase = sqlContext.sessionState.catalog.getDatabase(databaseName)
339-
val result =
340-
Row("Database Name", dbMetadata.name) ::
341-
Row("Description", dbMetadata.description) ::
342-
Row("Location", dbMetadata.locationUri) :: Nil
343-
344-
if (extended) {
345-
val properties =
346-
if (dbMetadata.properties.isEmpty) {
347-
""
348-
} else {
349-
dbMetadata.properties.toSeq.mkString("(", ", ", ")")
350-
}
351-
result :+ Row("Properties", properties)
352-
} else {
353-
result
354-
}
355-
}
356-
357-
override val output: Seq[Attribute] = {
358-
val schema = StructType(
359-
StructField("database_description_item", StringType, nullable = false) ::
360-
StructField("database_description_value", StringType, nullable = false) :: Nil)
361-
362-
schema.toAttributes
363-
}
364-
}
365-
366320
/**
367321
* A command for users to get tables in the given database.
368322
* If a databaseName is not given, the current database will be used.

sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala

Lines changed: 44 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ case class CreateDatabase(
8282
/**
8383
* A command for users to remove a database from the system.
8484
*
85-
* 'ignoreIfNotExists':
85+
* 'ifExists':
8686
* - true, if database_name does't exist, no action
8787
* - false (default), if database_name does't exist, a warning message will be issued
8888
* 'cascade':
@@ -97,12 +97,12 @@ case class CreateDatabase(
9797
*/
9898
case class DropDatabase(
9999
databaseName: String,
100-
ignoreIfNotExists: Boolean,
100+
ifExists: Boolean,
101101
cascade: Boolean)
102102
extends RunnableCommand {
103103

104104
override def run(sqlContext: SQLContext): Seq[Row] = {
105-
sqlContext.sessionState.catalog.dropDatabase(databaseName, ignoreIfNotExists, cascade)
105+
sqlContext.sessionState.catalog.dropDatabase(databaseName, ifExists, cascade)
106106
Seq.empty[Row]
107107
}
108108

@@ -134,6 +134,47 @@ case class AlterDatabaseProperties(
134134
override val output: Seq[Attribute] = Seq.empty
135135
}
136136

137+
/**
138+
* A command for users to show the name of the database, its comment (if one has been set), and its
139+
* root location on the filesystem. When extended is true, it also shows the database's properties
140+
* If the database does not exist, an error message will be issued to indicate the database
141+
* does not exist.
142+
* The syntax of using this command in SQL is
143+
* {{{
144+
* DESCRIBE DATABASE [EXTENDED] db_name
145+
* }}}
146+
*/
147+
case class DescribeDatabase(
148+
databaseName: String,
149+
extended: Boolean)
150+
extends RunnableCommand {
151+
152+
override def run(sqlContext: SQLContext): Seq[Row] = {
153+
val dbMetadata: CatalogDatabase = sqlContext.sessionState.catalog.getDatabase(databaseName)
154+
val result =
155+
Row("Database Name", dbMetadata.name) ::
156+
Row("Description", dbMetadata.description) ::
157+
Row("Location", dbMetadata.locationUri) :: Nil
158+
159+
if (extended) {
160+
val properties =
161+
if (dbMetadata.properties.isEmpty) {
162+
""
163+
} else {
164+
dbMetadata.properties.toSeq.mkString("(", ", ", ")")
165+
}
166+
result :+ Row("Properties", properties)
167+
} else {
168+
result
169+
}
170+
}
171+
172+
override val output: Seq[Attribute] = {
173+
AttributeReference("database_description_item", StringType, nullable = false)() ::
174+
AttributeReference("database_description_value", StringType, nullable = false)() :: Nil
175+
}
176+
}
177+
137178
case class CreateFunction(
138179
databaseName: Option[String],
139180
functionName: String,

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,19 +65,19 @@ class DDLCommandSuite extends PlanTest {
6565

6666
val expected1 = DropDatabase(
6767
"database_name",
68-
ignoreIfNotExists = true,
68+
ifExists = true,
6969
cascade = false)
7070
val expected2 = DropDatabase(
7171
"database_name",
72-
ignoreIfNotExists = true,
72+
ifExists = true,
7373
cascade = true)
7474
val expected3 = DropDatabase(
7575
"database_name",
76-
ignoreIfNotExists = false,
76+
ifExists = false,
7777
cascade = false)
7878
val expected4 = DropDatabase(
7979
"database_name",
80-
ignoreIfNotExists = false,
80+
ifExists = false,
8181
cascade = true)
8282

8383
comparePlans(parsed1, expected1)

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 49 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.sql.execution.command
1919

20+
import java.io.File
21+
2022
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
2123
import org.apache.spark.sql.catalyst.catalog.CatalogDatabase
2224
import org.apache.spark.sql.catalyst.parser.ParserUtils._
@@ -35,30 +37,67 @@ class DDLSuite extends QueryTest with SharedSQLContext {
3537
}
3638
}
3739

38-
test("Create/Drop/Alter/Describe Database - basic") {
40+
test("Create/Drop Database") {
3941
val catalog = sqlContext.sessionState.catalog
4042

4143
val databaseNames = Seq("db1", "`database`")
4244

4345
databaseNames.foreach { dbName =>
4446
withDatabase(dbName) {
4547
val dbNameWithoutBackTicks = cleanIdentifier(dbName)
48+
4649
sql(s"CREATE DATABASE $dbName")
4750
val db1 = catalog.getDatabase(dbNameWithoutBackTicks)
4851
assert(db1 == CatalogDatabase(
49-
dbNameWithoutBackTicks, "", s"$dbNameWithoutBackTicks.db", Map.empty))
52+
dbNameWithoutBackTicks,
53+
"",
54+
System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db",
55+
Map.empty))
56+
sql(s"DROP DATABASE $dbName CASCADE")
57+
assert(!catalog.databaseExists(dbNameWithoutBackTicks))
58+
}
59+
}
60+
}
5061

51-
checkAnswer(
52-
sql(s"DESCRIBE DATABASE $dbName"),
53-
Row("Database Name", dbNameWithoutBackTicks) ::
54-
Row("Description", "") ::
55-
Row("Location", s"$dbNameWithoutBackTicks.db") :: Nil)
62+
test("Create Database - database already exists") {
63+
val catalog = sqlContext.sessionState.catalog
64+
val databaseNames = Seq("db1", "`database`")
65+
66+
databaseNames.foreach { dbName =>
67+
val dbNameWithoutBackTicks = cleanIdentifier(dbName)
68+
withDatabase(dbName) {
69+
sql(s"CREATE DATABASE $dbName")
70+
val db1 = catalog.getDatabase(dbNameWithoutBackTicks)
71+
assert(db1 == CatalogDatabase(
72+
dbNameWithoutBackTicks,
73+
"",
74+
System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db",
75+
Map.empty))
76+
77+
val message = intercept[AnalysisException] {
78+
sql(s"CREATE DATABASE $dbName")
79+
}.getMessage
80+
assert(message.contains(s"Database '$dbNameWithoutBackTicks' already exists."))
81+
}
82+
}
83+
}
84+
85+
test("Alter/Describe Database") {
86+
val catalog = sqlContext.sessionState.catalog
87+
val databaseNames = Seq("db1", "`database`")
88+
89+
databaseNames.foreach { dbName =>
90+
withDatabase(dbName) {
91+
val dbNameWithoutBackTicks = cleanIdentifier(dbName)
92+
val location =
93+
System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db"
94+
sql(s"CREATE DATABASE $dbName")
5695

5796
checkAnswer(
5897
sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
5998
Row("Database Name", dbNameWithoutBackTicks) ::
6099
Row("Description", "") ::
61-
Row("Location", s"$dbNameWithoutBackTicks.db") ::
100+
Row("Location", location) ::
62101
Row("Properties", "") :: Nil)
63102

64103
sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')")
@@ -67,7 +106,7 @@ class DDLSuite extends QueryTest with SharedSQLContext {
67106
sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
68107
Row("Database Name", dbNameWithoutBackTicks) ::
69108
Row("Description", "") ::
70-
Row("Location", s"$dbNameWithoutBackTicks.db") ::
109+
Row("Location", location) ::
71110
Row("Properties", "((a,a), (b,b), (c,c))") :: Nil)
72111

73112
sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('d'='d')")
@@ -76,31 +115,8 @@ class DDLSuite extends QueryTest with SharedSQLContext {
76115
sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
77116
Row("Database Name", dbNameWithoutBackTicks) ::
78117
Row("Description", "") ::
79-
Row("Location", s"$dbNameWithoutBackTicks.db") ::
118+
Row("Location", location) ::
80119
Row("Properties", "((a,a), (b,b), (c,c), (d,d))") :: Nil)
81-
82-
sql(s"DROP DATABASE $dbName CASCADE")
83-
assert(!catalog.databaseExists(dbNameWithoutBackTicks))
84-
}
85-
}
86-
}
87-
88-
test("Create Database - database already exists") {
89-
val catalog = sqlContext.sessionState.catalog
90-
val databaseNames = Seq("db1", "`database`")
91-
92-
databaseNames.foreach { dbName =>
93-
val dbNameWithoutBackTicks = cleanIdentifier(dbName)
94-
withDatabase(dbName) {
95-
sql(s"CREATE DATABASE $dbName")
96-
val db1 = catalog.getDatabase(dbNameWithoutBackTicks)
97-
assert(db1 == CatalogDatabase(
98-
dbNameWithoutBackTicks, "", s"$dbNameWithoutBackTicks.db", Map.empty))
99-
100-
val message = intercept[AnalysisException] {
101-
sql(s"CREATE DATABASE $dbName")
102-
}.getMessage
103-
assert(message.contains(s"Database '$dbNameWithoutBackTicks' already exists."))
104120
}
105121
}
106122
}

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
183183
test("Single command with --database") {
184184
runCliWithin(2.minute)(
185185
"CREATE DATABASE hive_test_db;"
186-
-> "OK",
186+
-> "",
187187
"USE hive_test_db;"
188188
-> "",
189189
"CREATE TABLE hive_test(key INT, val STRING);"

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.spark.sql.hive
1919

20-
import java.io.File
21-
20+
import org.apache.hadoop.fs.Path
2221
import org.apache.hadoop.hive.conf.HiveConf
2322

2423
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -62,9 +61,8 @@ class HiveSessionCatalog(
6261
// ----------------------------------------------------------------
6362

6463
override def getDefaultDBPath(db: String): String = {
65-
val defaultPath = client.getConf(HiveConf.ConfVars.METASTOREWAREHOUSE.varname,
66-
HiveConf.ConfVars.METASTOREWAREHOUSE.defaultStrVal)
67-
defaultPath + File.separator + db + ".db"
64+
val defaultPath = context.hiveconf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE)
65+
new Path(new Path(defaultPath), db + ".db").toString
6866
}
6967

7068
// Catalog for handling data source tables. TODO: This really doesn't belong here since it is

0 commit comments

Comments
 (0)