@@ -375,11 +375,11 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
375375 df.sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName) match {
376376 case CatalogObjectIdentifier (Some (catalog), ident) =>
377377 insertInto(catalog, ident)
378+ // TODO(SPARK-28667): Support the V2SessionCatalog
378379 case AsTableIdentifier (tableIdentifier) =>
379380 insertInto(tableIdentifier)
380381 case other =>
381- // TODO(SPARK-28667): This should go through V2SessionCatalog
382- throw new UnsupportedOperationException (
382+ throw new AnalysisError (
383383 s " Couldn't find a catalog to handle the identifier ${other.quoted}. " )
384384 }
385385 }
@@ -499,13 +499,13 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
499499 session.sessionState.sqlParser.parseMultipartIdentifier(tableName) match {
500500 case CatalogObjectIdentifier (Some (catalog), ident) =>
501501 saveAsTable(catalog.asTableCatalog, ident, modeForDSV2)
502+ // TODO(SPARK-28666): This should go through V2SessionCatalog
502503
503504 case AsTableIdentifier (tableIdentifier) =>
504505 saveAsTable(tableIdentifier)
505506
506507 case other =>
507- // TODO(SPARK-28666): This should go through V2SessionCatalog
508- throw new UnsupportedOperationException (
508+ throw new AnalysisError (
509509 s " Couldn't find a catalog to handle the identifier ${other.quoted}. " )
510510 }
511511 }
0 commit comments