Skip to content

Commit 07dd32e

Browse files
committed
still detach SessionState
1 parent 098a432 commit 07dd32e

File tree

2 files changed

+5
-3
lines changed

2 files changed

+5
-3
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -294,6 +294,7 @@ private[hive] class HiveClientImpl(
294294
def withHiveState[A](f: => A): A = retryLocked {
295295
val original = Thread.currentThread().getContextClassLoader
296296
val originalConfLoader = state.getConf.getClassLoader
297+
val originState = SessionState.get()
297298
// The classloader in clientLoader could be changed after addJar, always use the latest
298299
// classloader. We explicitly set the context class loader since "conf.setClassLoader" does
299300
// not do that, and the Hive client libraries may need to load classes defined by the client's
@@ -311,6 +312,9 @@ private[hive] class HiveClientImpl(
311312
val ret = try f finally {
312313
state.getConf.setClassLoader(originalConfLoader)
313314
Thread.currentThread().setContextClassLoader(original)
315+
if (originState != null) {
316+
SessionState.setCurrentSessionState(originState)
317+
}
314318
HiveCatalogMetrics.incrementHiveClientCalls(1)
315319
}
316320
ret

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,6 @@ case class HiveTableScanExec(
8787
BindReferences.bindReference(pred, relation.partitionCols)
8888
}
8989

90-
@transient private lazy val hiveClient: HiveClient = sparkSession.sharedState.externalCatalog
91-
.unwrapped.asInstanceOf[HiveExternalCatalog].client
9290
@transient private lazy val hiveQlTable = HiveClientImpl.toHiveTable(relation.tableMeta)
9391
@transient private lazy val tableDesc = new TableDesc(
9492
hiveQlTable.getInputFormatClass,
@@ -97,7 +95,7 @@ case class HiveTableScanExec(
9795

9896
// Create a local copy of hadoopConf,so that scan specific modifications should not impact
9997
// other queries
100-
@transient private lazy val hadoopConf = hiveClient.withHiveState {
98+
@transient private lazy val hadoopConf = {
10199
val c = sparkSession.sessionState.newHadoopConf()
102100
// append columns ids and names before broadcast
103101
addColumnMetadataToConf(c)

0 commit comments

Comments
 (0)