@@ -219,37 +219,37 @@ case class LoadDataCommand(
219219 override def run (sparkSession : SparkSession ): Seq [Row ] = {
220220 val catalog = sparkSession.sessionState.catalog
221221 val targetTable = catalog.getTableMetadata(table)
222- val qualifiedName = targetTable.identifier.quotedString
222+ val tableIdentwithDB = targetTable.identifier.quotedString
223223
224224 if (targetTable.tableType == CatalogTableType .VIEW ) {
225- throw new AnalysisException (s " Target table in LOAD DATA cannot be a view: $qualifiedName " )
225+ throw new AnalysisException (s " Target table in LOAD DATA cannot be a view: $tableIdentwithDB " )
226226 }
227227 if (DDLUtils .isDatasourceTable(targetTable)) {
228228 throw new AnalysisException (
229- s " LOAD DATA is not supported for datasource tables: $qualifiedName " )
229+ s " LOAD DATA is not supported for datasource tables: $tableIdentwithDB " )
230230 }
231231 if (targetTable.partitionColumnNames.nonEmpty) {
232232 if (partition.isEmpty) {
233- throw new AnalysisException (s " LOAD DATA target table $qualifiedName is partitioned, " +
233+ throw new AnalysisException (s " LOAD DATA target table $tableIdentwithDB is partitioned, " +
234234 s " but no partition spec is provided " )
235235 }
236236 if (targetTable.partitionColumnNames.size != partition.get.size) {
237- throw new AnalysisException (s " LOAD DATA target table $qualifiedName is partitioned, " +
237+ throw new AnalysisException (s " LOAD DATA target table $tableIdentwithDB is partitioned, " +
238238 s " but number of columns in provided partition spec ( ${partition.get.size}) " +
239239 s " do not match number of partitioned columns in table " +
240240 s " (s ${targetTable.partitionColumnNames.size}) " )
241241 }
242242 partition.get.keys.foreach { colName =>
243243 if (! targetTable.partitionColumnNames.contains(colName)) {
244- throw new AnalysisException (s " LOAD DATA target table $qualifiedName is partitioned, " +
244+ throw new AnalysisException (s " LOAD DATA target table $tableIdentwithDB is partitioned, " +
245245 s " but the specified partition spec refers to a column that is not partitioned: " +
246246 s " ' $colName' " )
247247 }
248248 }
249249 } else {
250250 if (partition.nonEmpty) {
251- throw new AnalysisException (s " LOAD DATA target table $qualifiedName is not partitioned, " +
252- s " but a partition spec was provided. " )
251+ throw new AnalysisException (s " LOAD DATA target table $tableIdentwithDB is not " +
252+ s " partitioned, but a partition spec was provided." )
253253 }
254254 }
255255
@@ -338,26 +338,26 @@ case class TruncateTableCommand(
338338 override def run (spark : SparkSession ): Seq [Row ] = {
339339 val catalog = spark.sessionState.catalog
340340 val table = catalog.getTableMetadata(tableName)
341- val qualifiedName = table.identifier.quotedString
341+ val tableIdentwithDB = table.identifier.quotedString
342342
343343 if (table.tableType == CatalogTableType .EXTERNAL ) {
344344 throw new AnalysisException (
345- s " Operation not allowed: TRUNCATE TABLE on external tables: $qualifiedName " )
345+ s " Operation not allowed: TRUNCATE TABLE on external tables: $tableIdentwithDB " )
346346 }
347347 if (table.tableType == CatalogTableType .VIEW ) {
348348 throw new AnalysisException (
349- s " Operation not allowed: TRUNCATE TABLE on views: $qualifiedName " )
349+ s " Operation not allowed: TRUNCATE TABLE on views: $tableIdentwithDB " )
350350 }
351351 val isDatasourceTable = DDLUtils .isDatasourceTable(table)
352352 if (isDatasourceTable && partitionSpec.isDefined) {
353353 throw new AnalysisException (
354354 s " Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported " +
355- s " for tables created using the data sources API: $qualifiedName " )
355+ s " for tables created using the data sources API: $tableIdentwithDB " )
356356 }
357357 if (table.partitionColumnNames.isEmpty && partitionSpec.isDefined) {
358358 throw new AnalysisException (
359359 s " Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported " +
360- s " for tables that are not partitioned: $qualifiedName " )
360+ s " for tables that are not partitioned: $tableIdentwithDB " )
361361 }
362362 val locations =
363363 if (isDatasourceTable) {
@@ -378,7 +378,7 @@ case class TruncateTableCommand(
378378 } catch {
379379 case NonFatal (e) =>
380380 throw new AnalysisException (
381- s " Failed to truncate table $qualifiedName when removing data of the path: $path " +
381+ s " Failed to truncate table $tableIdentwithDB when removing data of the path: $path " +
382382 s " because of ${e.toString}" )
383383 }
384384 }
@@ -391,7 +391,7 @@ case class TruncateTableCommand(
391391 spark.sharedState.cacheManager.uncacheQuery(spark.table(table.identifier))
392392 } catch {
393393 case NonFatal (e) =>
394- log.warn(s " Exception when attempting to uncache table $qualifiedName " , e)
394+ log.warn(s " Exception when attempting to uncache table $tableIdentwithDB " , e)
395395 }
396396 Seq .empty[Row ]
397397 }
@@ -646,7 +646,7 @@ case class ShowPartitionsCommand(
646646 override def run (sparkSession : SparkSession ): Seq [Row ] = {
647647 val catalog = sparkSession.sessionState.catalog
648648 val table = catalog.getTableMetadata(tableName)
649- val qualifiedName = table.identifier.quotedString
649+ val tableIdentWithDB = table.identifier.quotedString
650650
651651 /**
652652 * Validate and throws an [[AnalysisException ]] exception under the following conditions:
@@ -655,17 +655,17 @@ case class ShowPartitionsCommand(
655655 * 3. If it is a view.
656656 */
657657 if (table.tableType == VIEW ) {
658- throw new AnalysisException (s " SHOW PARTITIONS is not allowed on a view: $qualifiedName " )
658+ throw new AnalysisException (s " SHOW PARTITIONS is not allowed on a view: $tableIdentWithDB " )
659659 }
660660
661661 if (table.partitionColumnNames.isEmpty) {
662662 throw new AnalysisException (
663- s " SHOW PARTITIONS is not allowed on a table that is not partitioned: $qualifiedName " )
663+ s " SHOW PARTITIONS is not allowed on a table that is not partitioned: $tableIdentWithDB " )
664664 }
665665
666666 if (DDLUtils .isDatasourceTable(table)) {
667667 throw new AnalysisException (
668- s " SHOW PARTITIONS is not allowed on a datasource table: $qualifiedName " )
668+ s " SHOW PARTITIONS is not allowed on a datasource table: $tableIdentWithDB " )
669669 }
670670
671671 /**
0 commit comments