diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala index 4daf9e916ae8..db36434288c2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala @@ -138,7 +138,8 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm // Queries all key-value pairs that are set in the SQLConf of the sqlContext. case None => val runFunc = (sqlContext: SQLContext) => { - sqlContext.getAllConfs.map { case (k, v) => Row(k, v) }.toSeq + sqlContext.getAllConfs.toSeq.sortBy(_._1).map { case (k, v) => Row(k, v) } ++ + getEnvList(withDoc = false) } (keyValueOutput, runFunc) @@ -146,9 +147,9 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm // SQLConf of the sqlContext. case Some(("-v", None)) => val runFunc = (sqlContext: SQLContext) => { - sqlContext.conf.getAllDefinedConfs.map { case (key, defaultValue, doc) => + sqlContext.conf.getAllDefinedConfs.sortBy(_._1).map { case (key, defaultValue, doc) => Row(key, defaultValue, doc) - } + } ++ getEnvList(withDoc = true) } val schema = StructType( StructField("key", StringType, nullable = false) :: @@ -182,4 +183,18 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm override def run(sqlContext: SQLContext): Seq[Row] = runFunc(sqlContext) + /** + * get the system environment properties as a sequence + * + * @param withDoc whether the result has a doc column or not + * @return the sequence of the rows containing the key/value pair of system properties + */ + private def getEnvList(withDoc: Boolean) = { + sys.env.toSeq.sortBy(_._1).map { + case (k, v) => if (withDoc) Row(s"env:$k", v, "") else Row(s"env:$k", v) + } ++ + sys.props.toSeq.sortBy(_._1).map { + case (k, v) => if (withDoc) Row(s"system:$k", v, "") else Row(s"system:$k", v) + } + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 9e640493cf73..cf83ccd3189e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1036,16 +1036,20 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { val nonexistentKey = "nonexistent" // "set" itself returns all config variables currently specified in SQLConf. - assert(sql("SET").collect().size === TestSQLContext.overrideConfs.size) + val additionalSize = sys.env.size + sys.props.size + assert(sql("SET").collect().size === TestSQLContext.overrideConfs.size + additionalSize) + val expectedMap = TestSQLContext.overrideConfs ++ + sys.env.map { case (k, v) => (s"env:$k", v) } ++ + sys.props.map { case (k, v) => (s"system:$k", v) } sql("SET").collect().foreach { row => val key = row.getString(0) val value = row.getString(1) assert( - TestSQLContext.overrideConfs.contains(key), + expectedMap.contains(key), s"$key should exist in SQLConf.") assert( - TestSQLContext.overrideConfs(key) === value, - s"The value of $key should be ${TestSQLContext.overrideConfs(key)} instead of $value.") + expectedMap(key) === value, + s"The value of $key should be ${expectedMap(key)} instead of $value.") } val overrideConfs = sql("SET").collect() diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index 93d63f224132..185216d36736 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -1145,12 +1145,15 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter { }.toSet conf.clear() - val expectedConfs = conf.getAllDefinedConfs.toSet + val expectedConfs = conf.getAllDefinedConfs.toSet ++ + sys.env.map { case (k, v) => (s"env:$k", v, "") }.toSet ++ + sys.props.map { case (k, v) => (s"system:$k", v, "") }.toSet assertResult(expectedConfs)(collectResults(sql("SET -v"))) // "SET" itself returns all config variables currently specified in SQLConf. // TODO: Should we be listing the default here always? probably... - assert(sql("SET").collect().size === TestHiveContext.overrideConfs.size) + val additionalSize = sys.env.size + sys.props.size + assert(sql("SET").collect().size === TestHiveContext.overrideConfs.size + additionalSize) val defaults = collectResults(sql("SET")) assertResult(Set(testKey -> testVal)) {