Skip to content

Commit 68bb05c

Browse files
committed
fix.
1 parent 1d72079 commit 68bb05c

File tree

4 files changed

+106
-140
lines changed

4 files changed

+106
-140
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala

Lines changed: 72 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.net.URI
2121
import java.util.Date
2222

2323
import com.google.common.base.Objects
24+
import scala.collection.mutable
2425

2526
import org.apache.spark.sql.AnalysisException
2627
import org.apache.spark.sql.catalyst.{CatalystConf, FunctionIdentifier, InternalRow, TableIdentifier}
@@ -57,20 +58,25 @@ case class CatalogStorageFormat(
5758
properties: Map[String, String]) {
5859

5960
override def toString: String = {
60-
val serdePropsToString = CatalogUtils.maskCredentials(properties) match {
61-
case props if props.isEmpty => ""
62-
case props => "Properties: " + props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
63-
}
64-
val output =
65-
Seq(locationUri.map("Location: " + _).getOrElse(""),
66-
inputFormat.map("InputFormat: " + _).getOrElse(""),
67-
outputFormat.map("OutputFormat: " + _).getOrElse(""),
68-
if (compressed) "Compressed" else "",
69-
serde.map("Serde: " + _).getOrElse(""),
70-
serdePropsToString)
71-
output.filter(_.nonEmpty).mkString("Storage(", ", ", ")")
61+
toLinkedHashMap.map { case ((key, value)) =>
62+
if (value.isEmpty) key else s"$key: $value"
63+
}.mkString("Storage(", ", ", ")")
7264
}
7365

66+
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
67+
val map = new mutable.LinkedHashMap[String, String]()
68+
locationUri.foreach(l => map.put("Location", l.toString))
69+
serde.foreach(map.put("Serde Library", _))
70+
inputFormat.foreach(map.put("InputFormat", _))
71+
outputFormat.foreach(map.put("OutputFormat", _))
72+
if (compressed) map.put("Compressed", "")
73+
CatalogUtils.maskCredentials(properties) match {
74+
case props if props.isEmpty => // No-op
75+
case props =>
76+
map.put("Properties", props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]"))
77+
}
78+
map
79+
}
7480
}
7581

7682
object CatalogStorageFormat {
@@ -91,20 +97,27 @@ case class CatalogTablePartition(
9197
storage: CatalogStorageFormat,
9298
parameters: Map[String, String] = Map.empty) {
9399

94-
private def toStringSeq: Seq[String] = {
100+
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
101+
val map = new mutable.LinkedHashMap[String, String]()
95102
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
96-
Seq(
97-
s"Partition Values: [$specString]",
98-
s"$storage",
99-
s"Partition Parameters:{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
103+
map.put("Partition Values", s"[$specString]")
104+
map ++= storage.toLinkedHashMap
105+
if (parameters.nonEmpty) {
106+
map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
107+
}
108+
map
100109
}
101110

102111
override def toString: String = {
103-
toStringSeq.filter(_.nonEmpty).mkString("CatalogPartition(\n\t", "\n\t", ")")
112+
toLinkedHashMap.map { case ((key, value)) =>
113+
if (value.isEmpty) key else s"$key: $value"
114+
}.mkString("CatalogPartition(\n\t", "\n\t", ")")
104115
}
105116

106117
def simpleString: String = {
107-
toStringSeq.filter(_.nonEmpty).mkString("", "\n", "")
118+
toLinkedHashMap.map { case ((key, value)) =>
119+
if (value.isEmpty) key else s"$key: $value"
120+
}.mkString("", "\n", "")
108121
}
109122

110123
/** Return the partition location, assuming it is specified. */
@@ -159,6 +172,14 @@ case class BucketSpec(
159172
}
160173
s"$numBuckets buckets, $bucketString$sortString"
161174
}
175+
176+
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
177+
mutable.LinkedHashMap[String, String](
178+
"Num Buckets" -> numBuckets.toString,
179+
"Bucket Columns" -> bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]"),
180+
"Sort Columns" -> sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
181+
)
182+
}
162183
}
163184

164185
/**
@@ -266,44 +287,48 @@ case class CatalogTable(
266287
locationUri, inputFormat, outputFormat, serde, compressed, properties))
267288
}
268289

269-
private def toStringSeq: Seq[String] = {
290+
291+
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
292+
val map = new mutable.LinkedHashMap[String, String]()
270293
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
271294
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
272-
val bucketStrings = bucketSpec match {
273-
case Some(BucketSpec(numBuckets, bucketColumnNames, sortColumnNames)) =>
274-
val bucketColumnsString = bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
275-
val sortColumnsString = sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
276-
Seq(
277-
s"Num Buckets: $numBuckets",
278-
if (bucketColumnNames.nonEmpty) s"Bucket Columns: $bucketColumnsString" else "",
279-
if (sortColumnNames.nonEmpty) s"Sort Columns: $sortColumnsString" else ""
280-
)
281-
case _ => Nil
295+
296+
map.put("Table", identifier.quotedString)
297+
if (owner.nonEmpty) map.put("Owner", owner)
298+
map.put("Created", new Date(createTime).toString)
299+
map.put("Last Access", new Date(lastAccessTime).toString)
300+
map.put("Type", tableType.name)
301+
provider.foreach(map.put("Provider", _))
302+
bucketSpec.foreach(map ++= _.toLinkedHashMap)
303+
comment.foreach(map.put("Comment", _))
304+
if (tableType == CatalogTableType.VIEW) {
305+
viewText.foreach(map.put("View Text", _))
306+
viewDefaultDatabase.foreach(map.put("View Default Database", _))
307+
if (viewQueryColumnNames.nonEmpty) {
308+
map.put("View Query Output Columns", viewQueryColumnNames.mkString("[", ", ", "]"))
309+
}
282310
}
283311

284-
Seq(s"Table: ${identifier.quotedString}",
285-
if (owner.nonEmpty) s"Owner: $owner" else "",
286-
s"Created: ${new Date(createTime).toString}",
287-
s"Last Access: ${new Date(lastAccessTime).toString}",
288-
s"Type: ${tableType.name}",
289-
if (provider.isDefined) s"Provider: ${provider.get}" else "",
290-
if (partitionColumnNames.nonEmpty) s"Partition Columns: $partitionColumns" else ""
291-
) ++ bucketStrings ++ Seq(
292-
viewText.map("View: " + _).getOrElse(""),
293-
comment.map("Comment: " + _).getOrElse(""),
294-
if (properties.nonEmpty) s"Properties: $tableProperties" else "",
295-
if (stats.isDefined) s"Statistics: ${stats.get.simpleString}" else "",
296-
s"$storage",
297-
if (tracksPartitionsInCatalog) "Partition Provider: Catalog" else "",
298-
if (schema.nonEmpty) s"Schema: ${schema.treeString}" else "")
312+
if (properties.nonEmpty) map.put("Properties", tableProperties)
313+
stats.foreach(s => map.put("Statistics", s.simpleString))
314+
map ++= storage.toLinkedHashMap
315+
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
316+
if (partitionColumnNames.nonEmpty) map.put("Partition Columns", partitionColumns)
317+
if (schema.nonEmpty) map.put("Schema", schema.treeString)
318+
319+
map
299320
}
300321

301322
override def toString: String = {
302-
toStringSeq.filter(_.nonEmpty).mkString("CatalogTable(\n", "\n", ")")
323+
toLinkedHashMap.map { case ((key, value)) =>
324+
if (value.isEmpty) key else s"$key: $value"
325+
}.mkString("CatalogTable(\n", "\n", ")")
303326
}
304327

305328
def simpleString: String = {
306-
toStringSeq.filter(_.nonEmpty).mkString("", "\n", "")
329+
toLinkedHashMap.map { case ((key, value)) =>
330+
if (value.isEmpty) key else s"$key: $value"
331+
}.mkString("", "\n", "")
307332
}
308333
}
309334

sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala

Lines changed: 11 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -567,64 +567,7 @@ case class DescribeTableCommand(
567567
private def describeFormattedTableInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
568568
append(buffer, "", "", "")
569569
append(buffer, "# Detailed Table Information", "", "")
570-
append(buffer, "Database:", table.database, "")
571-
append(buffer, "Owner:", table.owner, "")
572-
append(buffer, "Created:", new Date(table.createTime).toString, "")
573-
append(buffer, "Last Access:", new Date(table.lastAccessTime).toString, "")
574-
append(buffer, "Location:", table.storage.locationUri.map(CatalogUtils.URIToString(_))
575-
.getOrElse(""), "")
576-
append(buffer, "Table Type:", table.tableType.name, "")
577-
append(buffer, "Comment:", table.comment.getOrElse(""), "")
578-
table.stats.foreach(s => append(buffer, "Statistics:", s.simpleString, ""))
579-
580-
append(buffer, "Table Parameters:", "", "")
581-
table.properties.foreach { case (key, value) =>
582-
append(buffer, s" $key", value, "")
583-
}
584-
585-
describeStorageInfo(table, buffer)
586-
587-
if (table.tableType == CatalogTableType.VIEW) describeViewInfo(table, buffer)
588-
589-
if (DDLUtils.isDatasourceTable(table) && table.tracksPartitionsInCatalog) {
590-
append(buffer, "Partition Provider:", "Catalog", "")
591-
}
592-
}
593-
594-
private def describeStorageInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
595-
append(buffer, "", "", "")
596-
append(buffer, "# Storage Information", "", "")
597-
metadata.storage.serde.foreach(serdeLib => append(buffer, "SerDe Library:", serdeLib, ""))
598-
metadata.storage.inputFormat.foreach(format => append(buffer, "InputFormat:", format, ""))
599-
metadata.storage.outputFormat.foreach(format => append(buffer, "OutputFormat:", format, ""))
600-
append(buffer, "Compressed:", if (metadata.storage.compressed) "Yes" else "No", "")
601-
describeBucketingInfo(metadata, buffer)
602-
603-
append(buffer, "Storage Desc Parameters:", "", "")
604-
val maskedProperties = CatalogUtils.maskCredentials(metadata.storage.properties)
605-
maskedProperties.foreach { case (key, value) =>
606-
append(buffer, s" $key", value, "")
607-
}
608-
}
609-
610-
private def describeViewInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
611-
append(buffer, "", "", "")
612-
append(buffer, "# View Information", "", "")
613-
append(buffer, "View Text:", metadata.viewText.getOrElse(""), "")
614-
append(buffer, "View Default Database:", metadata.viewDefaultDatabase.getOrElse(""), "")
615-
append(buffer, "View Query Output Columns:",
616-
metadata.viewQueryColumnNames.mkString("[", ", ", "]"), "")
617-
}
618-
619-
private def describeBucketingInfo(metadata: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
620-
metadata.bucketSpec match {
621-
case Some(BucketSpec(numBuckets, bucketColumnNames, sortColumnNames)) =>
622-
append(buffer, "Num Buckets:", numBuckets.toString, "")
623-
append(buffer, "Bucket Columns:", bucketColumnNames.mkString("[", ", ", "]"), "")
624-
append(buffer, "Sort Columns:", sortColumnNames.mkString("[", ", ", "]"), "")
625-
626-
case _ =>
627-
}
570+
table.toLinkedHashMap.foreach(s => append(buffer, s._1 + ":", s._2, ""))
628571
}
629572

630573
private def describeDetailedPartitionInfo(
@@ -642,7 +585,6 @@ case class DescribeTableCommand(
642585
describeExtendedDetailedPartitionInfo(table, metadata, partition, result)
643586
} else if (isFormatted) {
644587
describeFormattedDetailedPartitionInfo(table, metadata, partition, result)
645-
describeStorageInfo(metadata, result)
646588
}
647589
}
648590

@@ -652,7 +594,8 @@ case class DescribeTableCommand(
652594
partition: CatalogTablePartition,
653595
buffer: ArrayBuffer[Row]): Unit = {
654596
append(buffer, "", "", "")
655-
append(buffer, "Detailed Partition Information " + partition.toString, "", "")
597+
append(buffer, "# Detailed Partition Information", "", "")
598+
append(buffer, partition.simpleString, "", "")
656599
}
657600

658601
private def describeFormattedDetailedPartitionInfo(
@@ -662,15 +605,17 @@ case class DescribeTableCommand(
662605
buffer: ArrayBuffer[Row]): Unit = {
663606
append(buffer, "", "", "")
664607
append(buffer, "# Detailed Partition Information", "", "")
665-
append(buffer, "Partition Value:", s"[${partition.spec.values.mkString(", ")}]", "")
666608
append(buffer, "Database:", table.database, "")
667609
append(buffer, "Table:", tableIdentifier.table, "")
668-
append(buffer, "Location:", partition.storage.locationUri.map(CatalogUtils.URIToString(_))
669-
.getOrElse(""), "")
670-
append(buffer, "Partition Parameters:", "", "")
671-
partition.parameters.foreach { case (key, value) =>
672-
append(buffer, s" $key", value, "")
610+
partition.toLinkedHashMap.foreach(s => append(buffer, s._1 + ":", s._2, ""))
611+
append(buffer, "", "", "")
612+
append(buffer, "# Table Storage Information", "", "")
613+
table.bucketSpec match {
614+
case Some(spec) =>
615+
spec.toLinkedHashMap.foreach(s => append(buffer, s._1 + ":", s._2, ""))
616+
case _ =>
673617
}
618+
table.storage.toLinkedHashMap.foreach(s => append(buffer, s._1 + ":", s._2, ""))
674619
}
675620

676621
private def describeSchema(schema: StructType, buffer: ArrayBuffer[Row]): Unit = {

sql/core/src/test/resources/sql-tests/results/describe.sql.out

Lines changed: 19 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -78,19 +78,20 @@ c string
7878
d string
7979

8080
# Detailed Table Information
81-
Database: default
82-
Owner:
81+
Table: `default`.`t`
8382
Created:
8483
Last Access:
85-
Location: sql/core/spark-warehouse/t
86-
Table Type: MANAGED
84+
Type: MANAGED
85+
Provider: parquet
8786
Comment: table_comment
88-
Table Parameters:
89-
90-
# Storage Information
91-
Compressed: No
92-
Storage Desc Parameters:
93-
Partition Provider: Catalog
87+
Location: sql/core/spark-warehouse/t
88+
Partition Provider: Catalog
89+
Partition Columns: [`c`, `d`]
90+
Schema: root
91+
|-- a: string (nullable = true)
92+
|-- b: integer (nullable = true)
93+
|-- c: string (nullable = true)
94+
|-- d: string (nullable = true)
9495

9596

9697
-- !query 6
@@ -113,10 +114,10 @@ Created:
113114
Last Access:
114115
Type: MANAGED
115116
Provider: parquet
116-
Partition Columns: [`c`, `d`]
117117
Comment: table_comment
118-
Storage(Location: sql/core/spark-warehouse/t)
118+
Location: sql/core/spark-warehouse/t
119119
Partition Provider: Catalog
120+
Partition Columns: [`c`, `d`]
120121
Schema: root
121122
|-- a: string (nullable = true)
122123
|-- b: integer (nullable = true)
@@ -153,10 +154,9 @@ d string
153154
c string
154155
d string
155156

156-
Detailed Partition Information CatalogPartition(
157-
Partition Values: [c=Us, d=1]
158-
Storage(Location: sql/core/spark-warehouse/t/c=Us/d=1)
159-
Partition Parameters:{})
157+
# Detailed Partition Information
158+
Partition Values: [c=Us, d=1]
159+
Location: sql/core/spark-warehouse/t/c=Us/d=1
160160

161161

162162
-- !query 9
@@ -174,15 +174,13 @@ c string
174174
d string
175175

176176
# Detailed Partition Information
177-
Partition Value: [Us, 1]
178177
Database: default
179178
Table: t
179+
Partition Values: [c=Us, d=1]
180180
Location: sql/core/spark-warehouse/t/c=Us/d=1
181-
Partition Parameters:
182181

183-
# Storage Information
184-
Compressed: No
185-
Storage Desc Parameters:
182+
# Table Storage Information
183+
Location: sql/core/spark-warehouse/t
186184

187185

188186
-- !query 10

sql/core/src/test/resources/sql-tests/results/show-tables.sql.out

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,6 @@ show_t3 true Table: `show_t3`
122122
Created:
123123
Last Access:
124124
Type: VIEW
125-
Storage()
126125
Schema: root
127126
|-- e: integer (nullable = true)
128127

@@ -132,9 +131,9 @@ Created:
132131
Last Access:
133132
Type: MANAGED
134133
Provider: parquet
135-
Partition Columns: [`c`, `d`]
136-
Storage(Location: sql/core/spark-warehouse/showdb.db/show_t1)
134+
Location: sql/core/spark-warehouse/showdb.db/show_t1
137135
Partition Provider: Catalog
136+
Partition Columns: [`c`, `d`]
138137
Schema: root
139138
|-- a: string (nullable = true)
140139
|-- b: integer (nullable = true)
@@ -147,7 +146,7 @@ Created:
147146
Last Access:
148147
Type: MANAGED
149148
Provider: parquet
150-
Storage(Location: sql/core/spark-warehouse/showdb.db/show_t2)
149+
Location: sql/core/spark-warehouse/showdb.db/show_t2
151150
Schema: root
152151
|-- b: string (nullable = true)
153152
|-- d: integer (nullable = true)
@@ -173,8 +172,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us', d=1)
173172
struct<database:string,tableName:string,isTemporary:boolean,information:string>
174173
-- !query 14 output
175174
showdb show_t1 false Partition Values: [c=Us, d=1]
176-
Storage(Location: sql/core/spark-warehouse/showdb.db/show_t1/c=Us/d=1)
177-
Partition Parameters:{}
175+
Location: sql/core/spark-warehouse/showdb.db/show_t1/c=Us/d=1
178176

179177

180178
-- !query 15

0 commit comments

Comments
 (0)