Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,20 @@ import org.apache.spark.sql.catalyst.analysis.PartitionsAlreadyExistException
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.internal.SQLConf

/**
* This base suite contains unified tests for the `ALTER TABLE .. ADD PARTITION` command that
* check V1 and V2 table catalogs. The tests that cannot run for all supported catalogs are
* located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableAddPartitionSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddPartitionSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddPartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableAddPartitionSuite`
*/
trait AlterTableAddPartitionSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. ADD PARTITION"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,20 @@ import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException
import org.apache.spark.sql.internal.SQLConf

/**
* This base suite contains unified tests for the `ALTER TABLE .. DROP PARTITION` command that
* check V1 and V2 table catalogs. The tests that cannot run for all supported catalogs are
* located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableDropPartitionSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableDropPartitionSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableDropPartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableDropPartitionSuite`
*/
trait AlterTableDropPartitionSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. DROP PARTITION"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,20 @@ package org.apache.spark.sql.execution.command

import org.apache.spark.sql.QueryTest

/**
* This base suite contains unified tests for the `ALTER TABLE .. RENAME PARTITION` command that
* check V1 and V2 table catalogs. The tests that cannot run for all supported catalogs are
* located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableRenamePartitionSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableRenamePartitionSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableRenamePartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableRenamePartitionSuite`
*/
trait AlterTableRenamePartitionSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. RENAME PARTITION"
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,26 @@ import org.apache.spark.sql.Row
import org.apache.spark.sql.execution.datasources.PartitioningUtils
import org.apache.spark.sql.test.SQLTestUtils

/**
* The common settings and utility functions for all v1 and v2 test suites. When a function
* is not applicable to all supported catalogs, it should be placed to a specific trait:
*
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.CommandSuiteBase`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.CommandSuiteBase`
* - V2 In-Memory catalog: `org.apache.spark.sql.execution.command.v2.CommandSuiteBase`
*/
trait DDLCommandTestUtils extends SQLTestUtils {
// The version of the catalog under testing such as "V1", "V2", "Hive V1".
protected def version: String
// Name of the command as SQL statement, for instance "SHOW PARTITIONS"
protected def command: String
// The catalog name which can be used in SQL statements under testing
protected def catalog: String
// The clause is used in creating tables for testing
protected def defaultUsing: String

// Overrides the `test` method, and adds a prefix to easily find identify the catalog to which
// the failed test in logs belongs to.
override def test(testName: String, testTags: Tag*)(testFun: => Any)
(implicit pos: Position): Unit = {
super.test(s"$command $version: " + testName, testTags: _*)(testFun)
Expand All @@ -49,6 +61,7 @@ trait DDLCommandTestUtils extends SQLTestUtils {
}
}

// Checks that the table `t` contains only the `expected` partitions.
protected def checkPartitions(t: String, expected: Map[String, String]*): Unit = {
val partitions = sql(s"SHOW PARTITIONS $t")
.collect()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,16 @@ package org.apache.spark.sql.execution.command

import org.apache.spark.sql.{AnalysisException, QueryTest, Row}

/**
* This base suite contains unified tests for the `DROP TABLE` command that check V1 and V2
* table catalogs. The tests that cannot run for all supported catalogs are located in more
* specific test suites:
*
* - V2 table catalog tests: `org.apache.spark.sql.execution.command.v2.DropTableSuite`
* - V1 table catalog tests: `org.apache.spark.sql.execution.command.v1.DropTableSuiteBase`
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.DropTableSuite`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.DropTableSuite`
*/
trait DropTableSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "DROP TABLE"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,17 @@ import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{StringType, StructType}

/**
* This base suite contains unified tests for the `SHOW PARTITIONS` command that check V1 and V2
* table catalogs. The tests that cannot run for all supported catalogs are located in more
* specific test suites:
*
* - V2 table catalog tests: `org.apache.spark.sql.execution.command.v2.ShowPartitionsSuite`
* - V1 table catalog tests: `org.apache.spark.sql.execution.command.v1.ShowPartitionsSuiteBase`
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.ShowPartitionsSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.ShowPartitionsSuite`
*/
trait ShowPartitionsSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "SHOW PARTITIONS"
// Gets the schema of `SHOW PARTITIONS`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,16 @@ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.StructType

/**
* This base suite contains unified tests for the `SHOW TABLES` command that check V1 and V2
* table catalogs. The tests that cannot run for all supported catalogs are located in more
* specific test suites:
*
* - V2 table catalog tests: `org.apache.spark.sql.execution.command.v2.ShowTablesSuite`
* - V1 table catalog tests: `org.apache.spark.sql.execution.command.v1.ShowTablesSuiteBase`
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.ShowTablesSuite`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.ShowTablesSuite`
*/
trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "SHOW TABLES"
protected def defaultNamespace: Seq[String]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,16 @@ import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.execution.command

/**
* This base suite contains unified tests for the `ALTER TABLE .. ADD PARTITION` command that
* check V1 table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddPartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableAddPartitionSuite`
*/
trait AlterTableAddPartitionSuiteBase extends command.AlterTableAddPartitionSuiteBase {
override protected def checkLocation(
t: String,
Expand Down Expand Up @@ -49,4 +59,8 @@ trait AlterTableAddPartitionSuiteBase extends command.AlterTableAddPartitionSuit
}
}

/**
* The class contains tests for the `ALTER TABLE .. ADD PARTITION` command to check
* V1 In-Memory table catalog.
*/
class AlterTableAddPartitionSuite extends AlterTableAddPartitionSuiteBase with CommandSuiteBase
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,16 @@ package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.command

/**
* This base suite contains unified tests for the `ALTER TABLE .. DROP PARTITION` command that
* check V1 table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableDropPartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableDropPartitionSuite`
*/
trait AlterTableDropPartitionSuiteBase extends command.AlterTableDropPartitionSuiteBase {
override protected val notFullPartitionSpecErr = "The following partitions not found in table"

Expand All @@ -34,6 +44,10 @@ trait AlterTableDropPartitionSuiteBase extends command.AlterTableDropPartitionSu
}
}

/**
* The class contains tests for the `ALTER TABLE .. DROP PARTITION` command to check
* V1 In-Memory table catalog.
*/
class AlterTableDropPartitionSuite
extends AlterTableDropPartitionSuiteBase
with CommandSuiteBase {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,16 @@ import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, Partiti
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.SQLConf

/**
* This base suite contains unified tests for the `ALTER TABLE .. RENAME PARTITION` command that
* check V1 table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableRenamePartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableRenamePartitionSuite`
*/
trait AlterTableRenamePartitionSuiteBase extends command.AlterTableRenamePartitionSuiteBase {
protected def createSinglePartTable(t: String): Unit = {
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)")
Expand Down Expand Up @@ -164,6 +174,10 @@ trait AlterTableRenamePartitionSuiteBase extends command.AlterTableRenamePartiti
}
}

/**
* The class contains tests for the `ALTER TABLE .. RENAME PARTITION` command to check
* V1 In-Memory table catalog.
*/
class AlterTableRenamePartitionSuite
extends AlterTableRenamePartitionSuiteBase
with CommandSuiteBase
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,14 @@ package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.test.SharedSparkSession

/**
* The trait contains settings and utility functions. It can be mixed to the test suites for
* datasource v1 In-Memory catalog. This trait complements the common trait
* `org.apache.spark.sql.execution.command.DDLCommandTestUtils` with utility functions and
* settings for all unified datasource V1 and V2 test suites.
*/
trait CommandSuiteBase extends SharedSparkSession {
def version: String = "V1"
def version: String = "V1" // The prefix is added to test names
def catalog: String = CatalogManager.SESSION_CATALOG_NAME
def defaultUsing: String = "USING parquet"
def defaultUsing: String = "USING parquet" // The clause is used in creating tables under testing
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@ package org.apache.spark.sql.execution.command.v1

import org.apache.spark.sql.execution.command

/**
* This base suite contains unified tests for the `DROP TABLE` command that check V1
* table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.DropTableSuite`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.DropTableSuite`
*/
trait DropTableSuiteBase extends command.DropTableSuiteBase {
test("purge option") {
withNamespace(s"$catalog.ns") {
Expand All @@ -33,5 +41,8 @@ trait DropTableSuiteBase extends command.DropTableSuiteBase {
}
}

/**
* The class contains tests for the `DROP TABLE` command to check V1 In-Memory table catalog.
*/
class DropTableSuite extends DropTableSuiteBase with CommandSuiteBase

Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,14 @@ package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
import org.apache.spark.sql.execution.command

/**
* This base suite contains unified tests for the `SHOW PARTITIONS` command that check V1
* table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.ShowPartitionsSuite`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.ShowPartitionsSuite`
*/
trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase {
test("show everything in the default database") {
val table = "dateTable"
Expand Down Expand Up @@ -63,6 +71,9 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase {
}
}

/**
* The class contains tests for the `SHOW PARTITIONS` command to check V1 In-Memory table catalog.
*/
class ShowPartitionsSuite extends ShowPartitionsSuiteBase with CommandSuiteBase {
// The test is placed here because it fails with `USING HIVE`:
// org.apache.spark.sql.AnalysisException:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,14 @@ import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{BooleanType, StringType, StructType}

/**
* This base suite contains unified tests for the `SHOW TABLES` command that check V1
* table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.ShowTablesSuite`
* - V1 Hive External catalog: `org.apache.spark.sql.hive.execution.command.ShowTablesSuite`
*/
trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase {
override def defaultNamespace: Seq[String] = Seq("default")
override def showSchema: StructType = {
Expand Down Expand Up @@ -102,6 +110,9 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase {
}
}

/**
* The class contains tests for the `SHOW TABLES` command to check V1 In-Memory table catalog.
*/
class ShowTablesSuite extends ShowTablesSuiteBase with CommandSuiteBase {
test("SPARK-33670: show partitions from a datasource table") {
import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ import org.apache.spark.sql.connector.InMemoryPartitionTable
import org.apache.spark.sql.connector.catalog.{CatalogV2Implicits, Identifier}
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `ALTER TABLE .. ADD PARTITION` command
* to check V2 table catalogs.
*/
class AlterTableAddPartitionSuite
extends command.AlterTableAddPartitionSuiteBase
with CommandSuiteBase {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ package org.apache.spark.sql.execution.command.v2
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `ALTER TABLE .. DROP PARTITION` command
* to check V2 table catalogs.
*/
class AlterTableDropPartitionSuite
extends command.AlterTableDropPartitionSuiteBase
with CommandSuiteBase {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ package org.apache.spark.sql.execution.command.v2
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `ALTER TABLE .. RENAME PARTITION` command
* to check V2 table catalogs.
*/
class AlterTableRenamePartitionSuite
extends command.AlterTableRenamePartitionSuiteBase
with CommandSuiteBase {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,18 @@ import org.apache.spark.SparkConf
import org.apache.spark.sql.connector.{InMemoryPartitionTableCatalog, InMemoryTableCatalog}
import org.apache.spark.sql.test.SharedSparkSession

/**
* The trait contains settings and utility functions. It can be mixed to the test suites for
* datasource v2 catalogs (in-memory test catalogs). This trait complements the trait
* `org.apache.spark.sql.execution.command.DDLCommandTestUtils` with common utility functions
* for all unified datasource V1 and V2 test suites.
*/
trait CommandSuiteBase extends SharedSparkSession {
def version: String = "V2"
def catalog: String = "test_catalog"
def defaultUsing: String = "USING _"
def version: String = "V2" // The prefix is added to test names
def catalog: String = "test_catalog" // The default V2 catalog for testing
def defaultUsing: String = "USING _" // The clause is used in creating v2 tables under testing

// V2 catalogs created and used especially for testing
override def sparkConf: SparkConf = super.sparkConf
.set(s"spark.sql.catalog.$catalog", classOf[InMemoryPartitionTableCatalog].getName)
.set(s"spark.sql.catalog.non_part_$catalog", classOf[InMemoryTableCatalog].getName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ import org.apache.spark.sql.connector.InMemoryTableSessionCatalog
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION

/**
* The class contains tests for the `DROP TABLE` command to check V2 table catalogs.
*/
class DropTableSuite extends command.DropTableSuiteBase with CommandSuiteBase {
test("purge option") {
withNamespaceAndTable("ns", "tbl") { t =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ package org.apache.spark.sql.execution.command.v2
import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `SHOW PARTITIONS` command to check V2 table catalogs.
*/
class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with CommandSuiteBase {
test("a table does not support partitioning") {
val table = s"non_part_$catalog.tab1"
Expand Down
Loading