Skip to content

Commit f8983d1

Browse files
Minor changes per review comments.
1 parent 1ce8a5e commit f8983d1

File tree

4 files changed

+14
-9
lines changed

4 files changed

+14
-9
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,10 @@ import java.util.Properties
2222
import scala.collection.mutable
2323

2424
/**
25-
* SQLConf holds potentially query-dependent, mutable config parameters and hints.
25+
* SQLConf holds mutable config parameters and hints. These can be set and
26+
* queried either by passing SET commands into Spark SQL's DSL
27+
* functions (sql(), hql(), etc.), or by programmatically using setters and
28+
* getters of this class.
2629
*/
2730
class SQLConf {
2831

@@ -39,12 +42,8 @@ class SQLConf {
3942
}
4043

4144
def set(key: String, value: String): SQLConf = {
42-
if (key == null) {
43-
throw new NullPointerException("null key")
44-
}
45-
if (value == null) {
46-
throw new NullPointerException("null value")
47-
}
45+
require(key != null, "key cannot be null")
46+
require(value != null, s"value cannot be null for ${key}")
4847
settings(key) = value
4948
this
5049
}

sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,20 +29,25 @@ import org.apache.spark.sql.catalyst.expressions.{GenericRow, Attribute}
2929
case class SetCommandPhysical(key: Option[String], value: Option[String])
3030
(@transient context: SQLContext) extends LeafNode {
3131
def execute(): RDD[Row] = (key, value) match {
32+
// Set value for key k; the action itself would
33+
// have been performed in QueryExecution eagerly.
3234
case (Some(k), Some(v)) => context.emptyResult
35+
// Query the value bound to key k.
3336
case (Some(k), None) =>
3437
val resultString = context.sqlConf.getOption(k) match {
3538
case Some(v) => s"$k=$v"
3639
case None => s"$k is undefined"
3740
}
3841
context.sparkContext.parallelize(Seq(new GenericRow(Array[Any](resultString))), 1)
42+
// Query all key-value pairs that are set in the SQLConf of the context.
3943
case (None, None) =>
4044
val pairs = context.sqlConf.getAll
4145
val rows = pairs.map { case (k, v) =>
4246
new GenericRow(Array[Any](s"$k=$v"))
4347
}.toSeq
4448
// Assume config parameters can fit into one split (machine) ;)
4549
context.sparkContext.parallelize(rows, 1)
50+
// The only other case is invalid semantics and is impossible.
4651
case _ => context.emptyResult
4752
}
4853

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
133133
@transient override lazy val sqlConf: SQLConf = new SQLConf(hiveconf.getAllProperties) {
134134
override def set(key: String, value: String): SQLConf = {
135135
runSqlHive(s"SET $key=$value")
136-
settings(key) = value
137-
this
136+
super.set(key, value)
138137
}
139138
}
140139
@transient protected[hive] lazy val sessionState = new SessionState(hiveconf)

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,8 @@ private[hive] object HiveQl {
208208
def parseSql(sql: String): LogicalPlan = {
209209
try {
210210
if (sql.trim.toLowerCase.startsWith("set")) {
211+
// Split in two parts since we treat the part before the first "="
212+
// as key, and the part after as value, which may contain other "=" signs.
211213
sql.trim.drop(3).split("=", 2).map(_.trim) match {
212214
case Array("") => // "set"
213215
SetCommand(None, None)

0 commit comments

Comments
 (0)