Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,8 @@ object SparkBuild extends PomBuild {
/* Hive console settings */
enable(Hive.settings)(hive)

enable(HiveThriftServer.settings)(hiveThriftServer)

enable(SparkConnectCommon.settings)(connectCommon)
enable(SparkConnect.settings)(connect)
enable(SparkConnectClient.settings)(connectClient)
Expand Down Expand Up @@ -1203,6 +1205,14 @@ object Hive {
)
}

object HiveThriftServer {
lazy val settings = Seq(
excludeDependencies ++= Seq(
ExclusionRule("org.apache.hive", "hive-llap-common"),
ExclusionRule("org.apache.hive", "hive-llap-client"))
)
}

object YARN {
val genConfigProperties = TaskKey[Unit]("gen-config-properties",
"Generate config.properties which contains a setting whether Hadoop is provided or not")
Expand Down
10 changes: 0 additions & 10 deletions sql/hive-thriftserver/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -148,16 +148,6 @@
<artifactId>byte-buddy-agent</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it possible for us to add some configuration in SparkBuild.scala to ensure that hive-llap is not included in the classpath too when testing the thriftserver module with sbt?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done by updating the SparkBuild.scala, it could be verified by

build/sbt -Phive-thriftserver hive-thriftserver/Test/dependencyTree | grep hive-llap

before

[info]   +-org.apache.hive:hive-llap-client:2.3.10
[info]   +-org.apache.hive:hive-llap-common:2.3.10
[info]   | +-org.apache.hive:hive-llap-client:2.3.10
[info]   | +-org.apache.hive:hive-llap-common:2.3.10

now result is empty.

<groupId>${hive.group}</groupId>
<artifactId>hive-llap-common</artifactId>
<scope>${hive.llap.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.group}</groupId>
<artifactId>hive-llap-client</artifactId>
<scope>${hive.llap.scope}</scope>
</dependency>
<dependency>
<groupId>net.sf.jpam</groupId>
<artifactId>jpam</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -673,15 +673,23 @@ public void close() throws HiveSQLException {
hiveHist.closeStream();
}
try {
// Forcibly initialize thread local Hive so that
// SessionState#unCacheDataNucleusClassLoaders won't trigger
// Hive built-in UDFs initialization.
Hive.getWithoutRegisterFns(sessionState.getConf());
sessionState.close();
} finally {
sessionState = null;
}
} catch (IOException ioe) {
} catch (IOException | HiveException ioe) {
throw new HiveSQLException("Failure to close", ioe);
} finally {
if (sessionState != null) {
try {
// Forcibly initialize thread local Hive so that
// SessionState#unCacheDataNucleusClassLoaders won't trigger
// Hive built-in UDFs initialization.
Hive.getWithoutRegisterFns(sessionState.getConf());
sessionState.close();
} catch (Throwable t) {
LOG.warn("Error closing session", t);
Expand Down
3 changes: 3 additions & 0 deletions sql/hive-thriftserver/src/test/resources/log4j2.properties
Original file line number Diff line number Diff line change
Expand Up @@ -92,3 +92,6 @@ logger.parquet2.level = error

logger.thriftserver.name = org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation
logger.thriftserver.level = off

logger.dagscheduler.name = org.apache.spark.scheduler.DAGScheduler
logger.dagscheduler.level = error
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to suppress noisy logs like

20:44:53.029 WARN org.apache.spark.scheduler.DAGScheduler: Failed to cancel job group 2f794b16-abee-4bbe-9caa-8be3416c500b. Cannot find active jobs for it.

Loading