diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala index 97f521a378eb7..6e76b74c7d830 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala @@ -119,6 +119,8 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest override def supportsIndex: Boolean = true + override def supportListIndexes: Boolean = true + override def indexOptions: String = "KEY_BLOCK_SIZE=10" testVarPop() diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala index 5f0033490d5c5..0f85bd534c393 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala @@ -197,6 +197,8 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu def supportsIndex: Boolean = false + def supportListIndexes: Boolean = false + def indexOptions: String = "" test("SPARK-36895: Test INDEX Using SQL") { @@ -219,11 +221,21 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu s" The supported Index Types are:")) sql(s"CREATE index i1 ON $catalogName.new_table USING BTREE (col1)") + assert(jdbcTable.indexExists("i1")) + if (supportListIndexes) { + val indexes = jdbcTable.listIndexes() + assert(indexes.size == 1) + assert(indexes.head.indexName() == "i1") + } + sql(s"CREATE index i2 ON $catalogName.new_table (col2, col3, col5)" + s" OPTIONS ($indexOptions)") - - assert(jdbcTable.indexExists("i1") == true) - assert(jdbcTable.indexExists("i2") == true) + assert(jdbcTable.indexExists("i2")) + if (supportListIndexes) { + val indexes = jdbcTable.listIndexes() + assert(indexes.size == 2) + assert(indexes.map(_.indexName()).sorted === Array("i1", "i2")) + } // This should pass without exception sql(s"CREATE index IF NOT EXISTS i1 ON $catalogName.new_table (col1)") @@ -234,10 +246,18 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu assert(m.contains("Failed to create index i1 in new_table")) sql(s"DROP index i1 ON $catalogName.new_table") - sql(s"DROP index i2 ON $catalogName.new_table") - assert(jdbcTable.indexExists("i1") == false) + if (supportListIndexes) { + val indexes = jdbcTable.listIndexes() + assert(indexes.size == 1) + assert(indexes.head.indexName() == "i2") + } + + sql(s"DROP index i2 ON $catalogName.new_table") assert(jdbcTable.indexExists("i2") == false) + if (supportListIndexes) { + assert(jdbcTable.listIndexes().isEmpty) + } // This should pass without exception sql(s"DROP index IF EXISTS i1 ON $catalogName.new_table") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala index 24f9bac74f86d..c4cb5369af9e7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala @@ -206,7 +206,7 @@ private case object MySQLDialect extends JdbcDialect with SQLConfHelper { val indexName = rs.getString("key_name") val colName = rs.getString("column_name") val indexType = rs.getString("index_type") - val indexComment = rs.getString("Index_comment") + val indexComment = rs.getString("index_comment") if (indexMap.contains(indexName)) { val index = indexMap.get(indexName).get val newIndex = new TableIndex(indexName, indexType,