Skip to content

Commit 35c961f

Browse files
committed
solve this when construct HiveClientImpl's SessionState.
1 parent 340e0b6 commit 35c961f

File tree

2 files changed

+2
-12
lines changed

2 files changed

+2
-12
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,7 @@ private[hive] class HiveClientImpl(
190190
if (clientLoader.cachedHive != null) {
191191
Hive.set(clientLoader.cachedHive.asInstanceOf[Hive])
192192
}
193+
state.getConf.setClassLoader(initClassLoader)
193194
SessionState.start(state)
194195
state.out = new PrintStream(outputBuffer, true, UTF_8.name())
195196
state.err = new PrintStream(outputBuffer, true, UTF_8.name())

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import scala.collection.JavaConverters._
2222
import org.apache.hadoop.conf.Configuration
2323
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition}
2424
import org.apache.hadoop.hive.ql.plan.TableDesc
25-
import org.apache.hadoop.hive.ql.session.SessionState
2625
import org.apache.hadoop.hive.serde.serdeConstants
2726
import org.apache.hadoop.hive.serde2.objectinspector._
2827
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption
@@ -121,16 +120,7 @@ case class HiveTableScanExec(
121120

122121
HiveShim.appendReadColumns(hiveConf, neededColumnIDs, output.map(_.name))
123122

124-
val currentState = SessionState.get()
125-
val deserializer = if (currentState != null) {
126-
val originClassLoader = currentState.getConf.getClassLoader
127-
currentState.getConf.setClassLoader(sparkSession.sharedState.jarClassLoader)
128-
val instance = tableDesc.getDeserializerClass.getConstructor().newInstance()
129-
currentState.getConf.setClassLoader(originClassLoader)
130-
instance
131-
} else {
132-
tableDesc.getDeserializerClass.getConstructor().newInstance()
133-
}
123+
val deserializer = tableDesc.getDeserializerClass.getConstructor().newInstance()
134124
deserializer.initialize(hiveConf, tableDesc.getProperties)
135125

136126
// Specifies types and object inspectors of columns to be scanned.
@@ -191,7 +181,6 @@ case class HiveTableScanExec(
191181
}
192182

193183
protected override def doExecute(): RDD[InternalRow] = {
194-
Thread.currentThread().setContextClassLoader(sparkSession.sharedState.jarClassLoader)
195184
// Using dummyCallSite, as getCallSite can turn out to be expensive with
196185
// multiple partitions.
197186
val rdd = if (!relation.isPartitioned) {

0 commit comments

Comments
 (0)