-
Notifications
You must be signed in to change notification settings - Fork 28.9k
[SPARK-21054] [SQL] Reset Command support reset specific property. #18268
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
f050f39
f2bf940
4e76a31
579adf9
cbb424f
99436d6
ecc2650
d8e7106
e4cb39c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -150,12 +150,21 @@ object SetCommand { | |
| * This command is for resetting SQLConf to the default values. Command that runs | ||
| * {{{ | ||
| * reset; | ||
| * reset key; | ||
| * reset key1 key2 ...; | ||
| * }}} | ||
| */ | ||
| case object ResetCommand extends RunnableCommand with Logging { | ||
| case class ResetCommand(key: Option[String]) extends RunnableCommand with Logging { | ||
|
|
||
| override def run(sparkSession: SparkSession): Seq[Row] = { | ||
| sparkSession.sessionState.conf.clear() | ||
| key match { | ||
| case None => | ||
| sparkSession.sessionState.conf.clear() | ||
| // "RESET key" clear a specific property. | ||
| case Some(key) => | ||
| key.split("\\s+") | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What happened if the space is part of the property key? For example,
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. OK, i plan adding I though we should open another issue,
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. See the PR #18368 |
||
| .foreach(confName => if (!confName.isEmpty) sparkSession.conf.unset(confName)) | ||
| } | ||
| Seq.empty[Row] | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -301,4 +301,10 @@ class SparkSqlParserSuite extends PlanTest { | |
| "SELECT a || b || c FROM t", | ||
| Project(UnresolvedAlias(concat) :: Nil, UnresolvedRelation(TableIdentifier("t")))) | ||
| } | ||
|
|
||
| test("reset") { | ||
| assertEqual("reset", ResetCommand(None)) | ||
| assertEqual("reset spark.test.property", ResetCommand(Some("spark.test.property"))) | ||
|
||
| assertEqual("reset #$a! !a$# \t ", ResetCommand(Some("#$a! !a$#"))) | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -114,50 +114,79 @@ class SQLConfSuite extends QueryTest with SharedSQLContext { | |
| } | ||
| } | ||
|
|
||
| test("reset - public conf") { | ||
| spark.sessionState.conf.clear() | ||
| val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL) | ||
| try { | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true) | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false") | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1) | ||
| sql(s"reset") | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original") | ||
| Seq("reset", s"reset ${SQLConf.GROUP_BY_ORDINAL.key}").foreach { resetCmd => | ||
| test(s"$resetCmd - public conf") { | ||
| spark.sessionState.conf.clear() | ||
| val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL) | ||
| try { | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true) | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false") | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1) | ||
| sql(resetCmd) | ||
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original") | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. not introduced in this PR, but we don't need to do this as we call
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Would you mean it is a duplicated clean action? I suppose the purpose is to prevent impact of other tests..
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think this is OK. After |
||
| } | ||
| } | ||
| } | ||
|
|
||
| test("reset - internal conf") { | ||
| spark.sessionState.conf.clear() | ||
| val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) | ||
| try { | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100) | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10") | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1) | ||
| sql(s"reset") | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original") | ||
| Seq("reset", s"reset ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}").foreach { resetCmd => | ||
| test(s"$resetCmd - internal conf") { | ||
| spark.sessionState.conf.clear() | ||
| val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) | ||
| try { | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100) | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10") | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1) | ||
| sql(resetCmd) | ||
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original") | ||
| } | ||
| } | ||
| } | ||
|
|
||
| test("reset - user-defined conf") { | ||
| spark.sessionState.conf.clear() | ||
| val userDefinedConf = "x.y.z.reset" | ||
| try { | ||
| assert(spark.conf.getOption(userDefinedConf).isEmpty) | ||
| sql(s"set $userDefinedConf=false") | ||
| assert(spark.conf.get(userDefinedConf) === "false") | ||
| assert(sql(s"set").where(s"key = '$userDefinedConf'").count() == 1) | ||
| sql(s"reset") | ||
| assert(spark.conf.getOption(userDefinedConf).isEmpty) | ||
| } finally { | ||
| spark.conf.unset(userDefinedConf) | ||
| Seq("reset", s"reset $testKey").foreach { resetCmd => | ||
| test(s"$resetCmd - user-defined conf") { | ||
| spark.sessionState.conf.clear() | ||
| try { | ||
| assert(spark.conf.getOption(testKey).isEmpty) | ||
| sql(s"set $testKey=false") | ||
| assert(spark.conf.get(testKey) === "false") | ||
| assert(sql(s"set").where(s"key = '$testKey'").count() == 1) | ||
| sql(resetCmd) | ||
| assert(spark.conf.getOption(testKey).isEmpty) | ||
| } finally { | ||
| spark.conf.unset(testKey) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| Seq("reset", s"reset ${testKey}1 \t ${testKey}2 \t ").foreach { resetCmd => | ||
| test(s"$resetCmd - multiple conf") { | ||
| spark.sessionState.conf.clear() | ||
| val key1 = testKey + "1" | ||
| val key2 = testKey + "2" | ||
| try { | ||
| assert(spark.conf.getOption(key1).isEmpty) | ||
| assert(spark.conf.getOption(key2).isEmpty) | ||
| sql(s"set $key1=false") | ||
| sql(s"set $key2=true") | ||
| assert(spark.conf.get(key1) === "false") | ||
| assert(spark.conf.get(key2) === "true") | ||
| assert(sql(s"set").where(s"key = '$key1'").count() == 1) | ||
| assert(sql(s"set").where(s"key = '$key2'").count() == 1) | ||
| sql(resetCmd) | ||
| assert(spark.conf.getOption(key1).isEmpty) | ||
| assert(spark.conf.getOption(key2).isEmpty) | ||
| } finally { | ||
| spark.conf.unset(key1) | ||
| spark.conf.unset(key2) | ||
| } | ||
| } | ||
| } | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
what about