diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 25423e510157..606d92306144 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2946,7 +2946,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg val location = visitLocationSpecList(ctx.locationSpec()) val (cleanedOptions, newLocation) = cleanTableOptions(ctx, options, location) val comment = visitCommentSpecList(ctx.commentSpec()) - val serdeInfo = getSerdeInfo(ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx) + val serdeInfo = + getSerdeInfo(ctx.rowFormat.asScala.toSeq, ctx.createFileFormat.asScala.toSeq, ctx) (partTransforms, partCols, bucketSpec, cleanedProperties, cleanedOptions, newLocation, comment, serdeInfo) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index a92f0775f1c0..568c7112954f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -440,7 +440,7 @@ class SparkSqlAstBuilder extends AstBuilder { val location = visitLocationSpecList(ctx.locationSpec()) // TODO: Do not skip serde check for CREATE TABLE LIKE. val serdeInfo = getSerdeInfo( - ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx, skipCheck = true) + ctx.rowFormat.asScala.toSeq, ctx.createFileFormat.asScala.toSeq, ctx, skipCheck = true) if (provider.isDefined && serdeInfo.isDefined) { operationNotAllowed(s"CREATE TABLE LIKE ... USING ... ${serdeInfo.get.describe}", ctx) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index f330d6a8c99e..a0bc65d3f905 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -114,7 +114,7 @@ class V2SessionCatalog(catalog: SessionCatalog) private def toOptions(properties: Map[String, String]): Map[String, String] = { properties.filterKeys(_.startsWith(TableCatalog.OPTION_PREFIX)).map { case (key, value) => key.drop(TableCatalog.OPTION_PREFIX.length) -> value - } + }.toMap } override def alterTable(