Skip to content

Commit 80d2f50

Browse files
committed
address comments
1 parent b781ef8 commit 80d2f50

File tree

1 file changed

+10
-4
lines changed

1 file changed

+10
-4
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPa
4343
import org.apache.spark.sql.catalyst.catalog._
4444
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
4545
import org.apache.spark.sql.catalyst.expressions.Expression
46-
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
46+
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException}
4747
import org.apache.spark.sql.execution.QueryExecutionException
4848
import org.apache.spark.sql.execution.command.DDLUtils
4949
import org.apache.spark.sql.types.{StructField, StructType}
@@ -728,11 +728,17 @@ private[hive] class HiveClientImpl(
728728
}
729729

730730
private def fromHiveColumn(hc: FieldSchema): StructField = {
731-
val f = StructField(
731+
val columnType = try {
732+
CatalystSqlParser.parseDataType(hc.getType)
733+
} catch {
734+
case e: ParseException =>
735+
throw new SparkException("Cannot recognize hive type string: " + hc.getType, e)
736+
}
737+
val field = StructField(
732738
name = hc.getName,
733-
dataType = CatalystSqlParser.parseDataType(hc.getType),
739+
dataType = columnType,
734740
nullable = true)
735-
Option(hc.getComment).map(f.withComment).getOrElse(f)
741+
Option(hc.getComment).map(field.withComment).getOrElse(field)
736742
}
737743

738744
private def toHiveTable(table: CatalogTable): HiveTable = {

0 commit comments

Comments
 (0)