Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ statement
| op=(ADD | LIST) identifier .*? #manageResource
| SET ROLE .*? #failNativeCommand
| SET .*? #setConfiguration
| RESET #resetConfiguration
| RESET .*? #resetConfiguration
| unsupportedHiveNativeCommands .*? #failNativeCommand
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,13 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
* Example SQL :
* {{{
* RESET;
* RESET key;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about

RESET `special#$!`?

* }}}
*/
override def visitResetConfiguration(
ctx: ResetConfigurationContext): LogicalPlan = withOrigin(ctx) {
ResetCommand
val raw = remainder(ctx.RESET.getSymbol)
if (raw.nonEmpty) ResetCommand(Some(raw.trim)) else ResetCommand(None)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,12 +150,21 @@ object SetCommand {
* This command is for resetting SQLConf to the default values. Command that runs
* {{{
* reset;
* reset key;
* reset key1 key2 ...;
* }}}
*/
case object ResetCommand extends RunnableCommand with Logging {
case class ResetCommand(key: Option[String]) extends RunnableCommand with Logging {

override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.sessionState.conf.clear()
key match {
case None =>
sparkSession.sessionState.conf.clear()
// "RESET key" clear a specific property.
case Some(key) =>
key.split("\\s+")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What happened if the space is part of the property key? For example,
RESET `a b` `c d` ?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK, i plan adding Quoted Identifiers support for such special situation, after all Quote symbol will be the only special delimiter. final use case like this:

spark-sql> set a b=1;
key	value
a b	1
Time taken: 0.018 seconds, Fetched 1 row(s)
spark-sql> set c d=2;
key	value
c d	2
Time taken: 0.018 seconds, Fetched 1 row(s)
spark-sql> reset a b c d;
Time taken: 0.01 seconds
spark-sql> set a b;
key	value
a b	1
spark-sql> reset `a b` `c d`;
Time taken: 0.01 seconds
spark-sql> set a b;
key	value
a b	<undefined>
Time taken: 0.016 seconds, Fetched 1 row(s)
spark-sql> set c d;
key	value
c d	<undefined>

spark-sql> reset `a b;
Error in query: Invalid usage of '`' in expression;

I though we should open another issue,

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See the PR #18368

.foreach(confName => if (!confName.isEmpty) sparkSession.conf.unset(confName))
}
Seq.empty[Row]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -301,4 +301,10 @@ class SparkSqlParserSuite extends PlanTest {
"SELECT a || b || c FROM t",
Project(UnresolvedAlias(concat) :: Nil, UnresolvedRelation(TableIdentifier("t"))))
}

test("reset") {
assertEqual("reset", ResetCommand(None))
assertEqual("reset spark.test.property", ResetCommand(Some("spark.test.property")))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we add a test for

reset `#a!`

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok, now added one plus sql-parse test for reset special character.

assertEqual("reset #$a! !a$# \t ", ResetCommand(Some("#$a! !a$#")))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -114,50 +114,79 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
}
}

test("reset - public conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL)
try {
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1)
sql(s"reset")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original")
Seq("reset", s"reset ${SQLConf.GROUP_BY_ORDINAL.key}").foreach { resetCmd =>
test(s"$resetCmd - public conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL)
try {
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1)
sql(resetCmd)
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not introduced in this PR, but we don't need to do this as we call spark.sessionState.conf.clear() at the beginning.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would you mean it is a duplicated clean action? I suppose the purpose is to prevent impact of other tests..

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is OK. After clear(), original gets the default value of the config, so here it wants to keep the default value unchanged.

}
}
}

test("reset - internal conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS)
try {
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1)
sql(s"reset")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original")
Seq("reset", s"reset ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}").foreach { resetCmd =>
test(s"$resetCmd - internal conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS)
try {
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1)
sql(resetCmd)
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original")
}
}
}

test("reset - user-defined conf") {
spark.sessionState.conf.clear()
val userDefinedConf = "x.y.z.reset"
try {
assert(spark.conf.getOption(userDefinedConf).isEmpty)
sql(s"set $userDefinedConf=false")
assert(spark.conf.get(userDefinedConf) === "false")
assert(sql(s"set").where(s"key = '$userDefinedConf'").count() == 1)
sql(s"reset")
assert(spark.conf.getOption(userDefinedConf).isEmpty)
} finally {
spark.conf.unset(userDefinedConf)
Seq("reset", s"reset $testKey").foreach { resetCmd =>
test(s"$resetCmd - user-defined conf") {
spark.sessionState.conf.clear()
try {
assert(spark.conf.getOption(testKey).isEmpty)
sql(s"set $testKey=false")
assert(spark.conf.get(testKey) === "false")
assert(sql(s"set").where(s"key = '$testKey'").count() == 1)
sql(resetCmd)
assert(spark.conf.getOption(testKey).isEmpty)
} finally {
spark.conf.unset(testKey)
}
}
}

Seq("reset", s"reset ${testKey}1 \t ${testKey}2 \t ").foreach { resetCmd =>
test(s"$resetCmd - multiple conf") {
spark.sessionState.conf.clear()
val key1 = testKey + "1"
val key2 = testKey + "2"
try {
assert(spark.conf.getOption(key1).isEmpty)
assert(spark.conf.getOption(key2).isEmpty)
sql(s"set $key1=false")
sql(s"set $key2=true")
assert(spark.conf.get(key1) === "false")
assert(spark.conf.get(key2) === "true")
assert(sql(s"set").where(s"key = '$key1'").count() == 1)
assert(sql(s"set").where(s"key = '$key2'").count() == 1)
sql(resetCmd)
assert(spark.conf.getOption(key1).isEmpty)
assert(spark.conf.getOption(key2).isEmpty)
} finally {
spark.conf.unset(key1)
spark.conf.unset(key2)
}
}
}

Expand Down