From 39aa1fd040c62d63f48de7723e853180b31065f5 Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Wed, 7 Aug 2019 18:23:45 +0800 Subject: [PATCH 1/3] Hide credentials in show create table --- .../spark/sql/execution/command/tables.scala | 3 +- .../org/apache/spark/sql/jdbc/JDBCSuite.scala | 29 ++++++++++++++++++- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index ca42de39db86..25d39c71cdf8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -48,6 +48,7 @@ import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetDataSourceV2 import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.sql.util.SchemaUtils +import org.apache.spark.util.Utils /** * A command to create a table with the same definition of the given existing table. @@ -1099,7 +1100,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman private def showDataSourceTableOptions(metadata: CatalogTable, builder: StringBuilder): Unit = { builder ++= s"USING ${metadata.provider.get}\n" - val dataSourceOptions = metadata.storage.properties.map { + val dataSourceOptions = Utils.redact(metadata.storage.properties).map { case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'" } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 89eaac8e5927..b651ad158314 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -26,10 +26,11 @@ import org.scalatest.{BeforeAndAfter, PrivateMethodTester} import org.apache.spark.SparkException import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row} +import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeTestUtils} import org.apache.spark.sql.execution.DataSourceScanExec -import org.apache.spark.sql.execution.command.ExplainCommand +import org.apache.spark.sql.execution.command.{ExplainCommand, ShowCreateTableCommand} import org.apache.spark.sql.execution.datasources.LogicalRelation import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JDBCPartition, JDBCRDD, JDBCRelation, JdbcUtils} import org.apache.spark.sql.execution.metric.InputOutputMetricsHelper @@ -1029,6 +1030,32 @@ class JDBCSuite extends QueryTest } } + test("Hide credentials in show create table") { + val password = "testPass" + val tableName = "tab1" + withTable(tableName) { + val df = sql( + s""" + |CREATE TABLE $tableName + |USING org.apache.spark.sql.jdbc + |OPTIONS ( + | url '$urlWithUserAndPass', + | dbtable 'TEST.PEOPLE', + | user 'testUser', + | password '$password') + """.stripMargin) + + val show = ShowCreateTableCommand(TableIdentifier(tableName)) + spark.sessionState.executePlan(show).executedPlan.executeCollect().foreach { r => + assert(!r.toString.contains(password)) + } + + sql(s"SHOW CREATE TABLE $tableName").collect().foreach { r => + assert(!r.toString().contains(password)) + } + } + } + test("SPARK 12941: The data type mapping for StringType to Oracle") { val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db") assert(oracleDialect.getJDBCType(StringType). From 20bf38a931820fbb48b8f75dae453c0fd4a28f5c Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Thu, 8 Aug 2019 23:47:31 +0800 Subject: [PATCH 2/3] Utils.redact -> SQLConf.get.redactOptions? --- .../scala/org/apache/spark/sql/execution/command/tables.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 25d39c71cdf8..af1e1bc59cbb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -48,7 +48,6 @@ import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetDataSourceV2 import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.sql.util.SchemaUtils -import org.apache.spark.util.Utils /** * A command to create a table with the same definition of the given existing table. @@ -1100,7 +1099,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman private def showDataSourceTableOptions(metadata: CatalogTable, builder: StringBuilder): Unit = { builder ++= s"USING ${metadata.provider.get}\n" - val dataSourceOptions = Utils.redact(metadata.storage.properties).map { + val dataSourceOptions = SQLConf.get.redactOptions(metadata.storage.properties).map { case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'" } From e764f9eb66c8ecd199dd89687e9115b2a0f8bad5 Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Fri, 9 Aug 2019 06:55:55 +0800 Subject: [PATCH 3/3] Remove val df = --- .../src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index b651ad158314..9f7faa2baf56 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -1034,7 +1034,7 @@ class JDBCSuite extends QueryTest val password = "testPass" val tableName = "tab1" withTable(tableName) { - val df = sql( + sql( s""" |CREATE TABLE $tableName |USING org.apache.spark.sql.jdbc