|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.execution |
19 | 19 |
|
| 20 | +import java.util.NoSuchElementException |
| 21 | + |
20 | 22 | import org.apache.spark.Logging |
21 | 23 | import org.apache.spark.annotation.DeveloperApi |
22 | 24 | import org.apache.spark.rdd.RDD |
@@ -94,8 +96,9 @@ case class SetCommand( |
94 | 96 | s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " + |
95 | 97 | s"automatically converted to ${SQLConf.SHUFFLE_PARTITIONS.key} instead.") |
96 | 98 | if (value.toInt < 1) { |
97 | | - val msg = s"Setting negative ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} for automatically " + |
98 | | - "determining the number of reducers is not supported." |
| 99 | + val msg = |
| 100 | + s"Setting negative ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} for automatically " + |
| 101 | + "determining the number of reducers is not supported." |
99 | 102 | throw new IllegalArgumentException(msg) |
100 | 103 | } else { |
101 | 104 | sqlContext.setConf(SQLConf.SHUFFLE_PARTITIONS.key, value) |
@@ -147,7 +150,13 @@ case class SetCommand( |
147 | 150 | // Queries a single property. |
148 | 151 | case Some((key, None)) => |
149 | 152 | val runFunc = (sqlContext: SQLContext) => { |
150 | | - Seq(Row(key, sqlContext.getConf(key, "<undefined>"))) |
| 153 | + val value = |
| 154 | + try { |
| 155 | + sqlContext.getConf(key) |
| 156 | + } catch { |
| 157 | + case _: NoSuchElementException => "<undefined>" |
| 158 | + } |
| 159 | + Seq(Row(key, value)) |
151 | 160 | } |
152 | 161 | (keyValueOutput, runFunc) |
153 | 162 | } |
|
0 commit comments