Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ case class CatalogStorageFormat(

def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
locationUri.foreach(l => map.put("Location", l.toString))
locationUri.foreach(l => map.put("Location", CatalogUtils.URIToString(l)))
serde.foreach(map.put("Serde Library", _))
inputFormat.foreach(map.put("InputFormat", _))
outputFormat.foreach(map.put("OutputFormat", _))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,14 +318,7 @@ DropTableCommand `spark_catalog`.`default`.`v`, false, true, false
-- !query
CREATE TABLE d (a STRING DEFAULT 'default-value', b INT DEFAULT 42) USING parquet COMMENT 'table_comment'
-- !query analysis
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
{
"errorClass" : "TABLE_OR_VIEW_ALREADY_EXISTS",
"sqlState" : "42P07",
"messageParameters" : {
"relationName" : "`spark_catalog`.`default`.`d`"
}
}
CreateDataSourceTableCommand `spark_catalog`.`default`.`d`, false


-- !query
Expand Down Expand Up @@ -355,14 +348,7 @@ DescribeTableCommand `spark_catalog`.`default`.`d`, true, [col_name#x, data_type
-- !query
CREATE TABLE e (a STRING DEFAULT CONCAT('a\n b\n ', 'c\n d'), b INT DEFAULT 42) USING parquet COMMENT 'table_comment'
-- !query analysis
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
{
"errorClass" : "TABLE_OR_VIEW_ALREADY_EXISTS",
"sqlState" : "42P07",
"messageParameters" : {
"relationName" : "`spark_catalog`.`default`.`e`"
}
}
CreateDataSourceTableCommand `spark_catalog`.`default`.`e`, false


-- !query
Expand All @@ -387,3 +373,52 @@ DescribeTableCommand `spark_catalog`.`default`.`e`, true, [col_name#x, data_type
DESC FORMATTED e
-- !query analysis
DescribeTableCommand `spark_catalog`.`default`.`e`, true, [col_name#x, data_type#x, comment#x]


-- !query
CREATE TABLE f USING json PARTITIONED BY (B, C) AS SELECT 'APACHE' A, CAST('SPARK' AS BINARY) B, TIMESTAMP'2018-11-17 13:33:33' C
-- !query analysis
CreateDataSourceTableAsSelectCommand `spark_catalog`.`default`.`f`, ErrorIfExists, [A, B, C]
+- Project [APACHE AS A#x, cast(SPARK as binary) AS B#x, 2018-11-17 13:33:33 AS C#x]
+- OneRowRelation


-- !query
DESC FORMATTED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33')
-- !query analysis
DescribeTableCommand `spark_catalog`.`default`.`f`, [B=SPARK, C=2018-11-17 13:33:33], true, [col_name#x, data_type#x, comment#x]


-- !query
DESC TABLE EXTENDED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33') AS JSON
-- !query analysis
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42601",
"messageParameters" : {
"error" : "'JSON'",
"hint" : ": extra input 'JSON'"
}
}


-- !query
DROP TABLE d
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.d


-- !query
DROP TABLE e
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.e


-- !query
DROP TABLE f
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.f
12 changes: 12 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/describe.sql
Original file line number Diff line number Diff line change
Expand Up @@ -119,3 +119,15 @@ DESC EXTENDED e;
DESC TABLE EXTENDED e;

DESC FORMATTED e;

CREATE TABLE f USING json PARTITIONED BY (B, C) AS SELECT 'APACHE' A, CAST('SPARK' AS BINARY) B, TIMESTAMP'2018-11-17 13:33:33' C;

DESC FORMATTED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33');

DESC TABLE EXTENDED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33') AS JSON;

DROP TABLE d;

DROP TABLE e;

DROP TABLE f;
74 changes: 74 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/describe.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -864,3 +864,77 @@ Location [not included in comparison]/{warehouse_dir}/e
# Column Default Values
a string CONCAT('a\n b\n ', 'c\n d')
b int 42


-- !query
CREATE TABLE f USING json PARTITIONED BY (B, C) AS SELECT 'APACHE' A, CAST('SPARK' AS BINARY) B, TIMESTAMP'2018-11-17 13:33:33' C
-- !query schema
struct<>
-- !query output



-- !query
DESC FORMATTED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33')
-- !query schema
struct<col_name:string,data_type:string,comment:string>
-- !query output
A string
B binary
C timestamp
# Partition Information
# col_name data_type comment
B binary
C timestamp

# Detailed Partition Information
Database default
Table f
Partition Values [B=SPARK, C=2018-11-17 13:33:33]
Location [not included in comparison]/{warehouse_dir}/f/B=SPARK/C=2018-11-17 13%3A33%3A33
Partition Parameters {numFiles=1, totalSize=15, transient_lastDdlTime=[not included in comparison]}
Created Time [not included in comparison]
Last Access [not included in comparison]

# Storage Information
Location [not included in comparison]/{warehouse_dir}/f


-- !query
DESC TABLE EXTENDED f PARTITION (B='SPARK', C=TIMESTAMP'2018-11-17 13:33:33') AS JSON
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42601",
"messageParameters" : {
"error" : "'JSON'",
"hint" : ": extra input 'JSON'"
}
}


-- !query
DROP TABLE d
-- !query schema
struct<>
-- !query output



-- !query
DROP TABLE e
-- !query schema
struct<>
-- !query output



-- !query
DROP TABLE f
-- !query schema
struct<>
-- !query output

Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ trait SQLQueryTestHelper extends Logging {
.replaceAll(s"file:[^\\s,]*$clsName", s"file:$notIncludedMsg/{warehouse_dir}")
.replaceAll("Created By.*", s"Created By $notIncludedMsg")
.replaceAll("Created Time.*", s"Created Time $notIncludedMsg")
.replaceAll(s"transient_lastDdlTime=\\d+", s"transient_lastDdlTime=$notIncludedMsg")
.replaceAll(s""""transient_lastDdlTime":"\\d+"""",
s""""transient_lastDdlTime $notIncludedMsg":"None"""")
.replaceAll("Last Access.*", s"Last Access $notIncludedMsg")
.replaceAll("Owner\t.*", s"Owner\t$notIncludedMsg")
.replaceAll("Partition Statistics\t\\d+", s"Partition Statistics\t$notIncludedMsg")
Expand Down