@@ -194,7 +194,7 @@ private[spark] object HiveUtils extends Logging {
194194 //
195195 // Here we enumerate all time `ConfVar`s and convert their values to numeric strings according
196196 // to their output time units.
197- Seq (
197+ val commonTimeVars = Seq (
198198 ConfVars .METASTORE_CLIENT_CONNECT_RETRY_DELAY -> TimeUnit .SECONDS ,
199199 ConfVars .METASTORE_CLIENT_SOCKET_TIMEOUT -> TimeUnit .SECONDS ,
200200 ConfVars .METASTORE_CLIENT_SOCKET_LIFETIME -> TimeUnit .SECONDS ,
@@ -207,8 +207,6 @@ private[spark] object HiveUtils extends Logging {
207207 ConfVars .METASTORE_AGGREGATE_STATS_CACHE_MAX_READER_WAIT -> TimeUnit .MILLISECONDS ,
208208 ConfVars .HIVES_AUTO_PROGRESS_TIMEOUT -> TimeUnit .SECONDS ,
209209 ConfVars .HIVE_LOG_INCREMENTAL_PLAN_PROGRESS_INTERVAL -> TimeUnit .MILLISECONDS ,
210- ConfVars .HIVE_STATS_JDBC_TIMEOUT -> TimeUnit .SECONDS ,
211- ConfVars .HIVE_STATS_RETRIES_WAIT -> TimeUnit .MILLISECONDS ,
212210 ConfVars .HIVE_LOCK_SLEEP_BETWEEN_RETRIES -> TimeUnit .SECONDS ,
213211 ConfVars .HIVE_ZOOKEEPER_SESSION_TIMEOUT -> TimeUnit .MILLISECONDS ,
214212 ConfVars .HIVE_ZOOKEEPER_CONNECTION_BASESLEEPTIME -> TimeUnit .MILLISECONDS ,
@@ -236,7 +234,18 @@ private[spark] object HiveUtils extends Logging {
236234 ConfVars .SPARK_RPC_CLIENT_HANDSHAKE_TIMEOUT -> TimeUnit .MILLISECONDS
237235 ).map { case (confVar, unit) =>
238236 confVar.varname -> HiveConf .getTimeVar(hadoopConf, confVar, unit).toString
239- }.toMap
237+ }
238+
239+ // The following configurations were removed by HIVE-12164(Hive 2.0)
240+ val removedTimeVars = Seq (
241+ (" hive.stats.jdbc.timeout" , " 30s" ) -> TimeUnit .SECONDS ,
242+ (" hive.stats.retries.wait" , " 3000ms" ) -> TimeUnit .MILLISECONDS
243+ ).map { case ((key, defaultValue), unit) =>
244+ val value = hadoopConf.get(key, defaultValue)
245+ key -> HiveConf .toTime(value, unit, unit).toString
246+ }
247+
248+ (commonTimeVars ++ removedTimeVars).toMap
240249 }
241250
242251 /**
0 commit comments