@@ -64,7 +64,11 @@ case class CreateTableAsSelect(
6464
6565 override def output : Seq [Attribute ] = Seq .empty[Attribute ]
6666 override lazy val resolved : Boolean =
67- tableDesc.specifiedDatabase.isDefined && tableDesc.schema.size > 0 && childrenResolved
67+ // TODO add more condition?
68+ tableDesc.specifiedDatabase.isDefined &&
69+ tableDesc.schema.size > 0 &&
70+ tableDesc.serde.isDefined &&
71+ childrenResolved
6872}
6973
7074/** Provides a mapping from HiveQL statements to catalyst logical plans and expression trees. */
@@ -607,26 +611,24 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
607611 serde = None ,
608612 viewText = None )
609613
610- // default serde & input/output format
611- tableDesc = if (" SequenceFile" .equalsIgnoreCase(
612- hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTFILEFORMAT ))) {
614+ // default storage type abbriviation (e.g. RCFile, ORC, PARQUET etc.)
615+ val defaultStorageType = hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTFILEFORMAT )
616+ // handle the default format for the storage type abbriviation
617+ tableDesc = if (" SequenceFile" .equalsIgnoreCase(defaultStorageType)) {
613618 tableDesc.copy(
614619 inputFormat = Option (" org.apache.hadoop.mapred.SequenceFileInputFormat" ),
615620 outputFormat = Option (" org.apache.hadoop.mapred.SequenceFileOutputFormat" ))
616- } else if (" RCFile" .equalsIgnoreCase(
617- hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTFILEFORMAT ))) {
621+ } else if (" RCFile" .equalsIgnoreCase(defaultStorageType)) {
618622 tableDesc.copy(
619623 inputFormat = Option (" org.apache.hadoop.hive.ql.io.RCFileInputFormat" ),
620624 outputFormat = Option (" org.apache.hadoop.hive.ql.io.RCFileOutputFormat" ),
621625 serde = Option (hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTRCFILESERDE )))
622- } else if (" ORC" .equalsIgnoreCase(
623- hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTFILEFORMAT ))) {
626+ } else if (" ORC" .equalsIgnoreCase(defaultStorageType)) {
624627 tableDesc.copy(
625628 inputFormat = Option (" org.apache.hadoop.hive.ql.io.orc.OrcInputFormat" ),
626629 outputFormat = Option (" org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat" ),
627630 serde = Option (" org.apache.hadoop.hive.ql.io.orc.OrcSerde" ))
628- } else if (" PARQUET" .equalsIgnoreCase(
629- hiveConf.getVar(HiveConf .ConfVars .HIVEDEFAULTFILEFORMAT ))) {
631+ } else if (" PARQUET" .equalsIgnoreCase(defaultStorageType)) {
630632 tableDesc.copy(
631633 inputFormat =
632634 Option (" org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat" ),
@@ -766,12 +768,6 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
766768 case _ => // Unsupport features
767769 }
768770
769- if (tableDesc.serde.isEmpty) {
770- // add default serde
771- tableDesc = tableDesc.copy(
772- serde = Some (" org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe" ))
773- }
774-
775771 CreateTableAsSelect (tableDesc, nodeToPlan(query), allowExisting != None )
776772
777773 // If its not a "CTAS" like above then take it as a native command
0 commit comments