Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.DescribeCommandSchema
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdentifier, CaseInsensitiveMap}
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdentifier, CaseInsensitiveMap, CharVarcharUtils}
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat
import org.apache.spark.sql.execution.datasources.json.JsonFileFormat
Expand Down Expand Up @@ -631,7 +631,7 @@ case class DescribeTableCommand(
}
describeSchema(catalog.lookupRelation(table).schema, result, header = false)
} else {
val metadata = catalog.getTableMetadata(table)
val metadata = catalog.getTableRawMetadata(table)
if (metadata.schema.isEmpty) {
// In older version(prior to 2.1) of Spark, the table schema can be empty and should be
// inferred at runtime. We should still support it.
Expand Down Expand Up @@ -782,9 +782,11 @@ case class DescribeColumnCommand(
None
}

val dataType = CharVarcharUtils.getRawType(field.metadata)
.getOrElse(field.dataType).catalogString
val buffer = ArrayBuffer[Row](
Row("col_name", field.name),
Row("data_type", field.dataType.catalogString),
Row("data_type", dataType),
Row("comment", comment.getOrElse("NULL"))
)
if (isExtended) {
Expand Down Expand Up @@ -1111,7 +1113,7 @@ case class ShowCreateTableCommand(table: TableIdentifier)
throw new AnalysisException(
s"SHOW CREATE TABLE is not supported on a temporary view: ${table.identifier}")
} else {
val tableMetadata = catalog.getTableMetadata(table)
val tableMetadata = catalog.getTableRawMetadata(table)

// TODO: [SPARK-28692] unify this after we unify the
// CREATE TABLE syntax for hive serde and data source table.
Expand Down Expand Up @@ -1262,7 +1264,7 @@ case class ShowCreateTableAsSerdeCommand(table: TableIdentifier)

override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val tableMetadata = catalog.getTableMetadata(table)
val tableMetadata = catalog.getTableRawMetadata(table)

val stmt = if (DDLUtils.isDatasourceTable(tableMetadata)) {
throw new AnalysisException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.v2

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet, GenericRowWithSchema}
import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRowWithSchema}
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Table}

/**
Expand All @@ -30,8 +30,6 @@ case class ShowTablePropertiesExec(
catalogTable: Table,
propertyKey: Option[String]) extends V2CommandExec {

override def producedAttributes: AttributeSet = AttributeSet(output)

override protected def run(): Seq[InternalRow] = {
import scala.collection.JavaConverters._
val toRow = RowEncoder(schema).resolveAndBind().createSerializer()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.datasources.v2

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.AttributeSet
import org.apache.spark.sql.execution.SparkPlan

/**
Expand Down Expand Up @@ -55,4 +56,7 @@ abstract class V2CommandExec extends SparkPlan {
}

override def children: Seq[SparkPlan] = Nil

override def producedAttributes: AttributeSet = outputSet

}
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,14 @@ trait CharVarcharTestSuite extends QueryTest with SQLTestUtils {
("c1 IN (c2)", true)))
}
}

test("SPARK-33892: DESCRIBE TABLE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
checkAnswer(sql("desc t").selectExpr("data_type").where("data_type like '%char%'"),
Seq(Row("char(5)"), Row("varchar(3)")))
}
}
}

// Some basic char/varchar tests which doesn't rely on table implementation.
Expand Down Expand Up @@ -603,6 +611,27 @@ class FileSourceCharVarcharTestSuite extends CharVarcharTestSuite with SharedSpa
}
}
}

// TODO(SPARK-33875): Move these tests to super after DESCRIBE COLUMN v2 implemented
test("SPARK-33892: DESCRIBE COLUMN w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
checkAnswer(sql("desc t v").selectExpr("info_value").where("info_value like '%char%'"),
Row("varchar(3)"))
checkAnswer(sql("desc t c").selectExpr("info_value").where("info_value like '%char%'"),
Row("char(5)"))
}
}

// TODO(SPARK-33898): Move these tests to super after SHOW CREATE TABLE for v2 implemented
test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
val rest = sql("SHOW CREATE TABLE t").head().getString(0)
assert(rest.contains("VARCHAR(3)"))
assert(rest.contains("CHAR(5)"))
}
}
}

class DSV2CharVarcharTestSuite extends CharVarcharTestSuite
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,15 @@ class HiveCharVarcharTestSuite extends CharVarcharTestSuite with TestHiveSinglet
}
super.afterAll()
}

test("SPARK-33892: SHOW CREATE TABLE AS SERDE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
val rest = sql("SHOW CREATE TABLE t AS SERDE").head().getString(0)
assert(rest.contains("VARCHAR(3)"))
assert(rest.contains("CHAR(5)"))
}
}
}

class HiveCharVarcharDDLTestSuite extends CharVarcharDDLTestBase with TestHiveSingleton {
Expand Down