Skip to content

Commit f6ee80b

Browse files
kayousterhoutliancheng
authored andcommitted
[SPARK-5846] Correctly set job description and pool for SQL jobs
This is #4630 but modified for the 1.2 branch, because I'm guessing it makes sense to fix this issue in that branch (again, unless I missed something obvious here...) Author: Kay Ousterhout <[email protected]> Closes #4631 from kayousterhout/SPARK-5846_1.2.1 and squashes the following commits: ffe8ff2 [Kay Ousterhout] [SPARK-5846] Correctly set job description and pool for SQL jobs
1 parent 36e15b4 commit f6ee80b

File tree

2 files changed

+10
-10
lines changed
  • sql/hive-thriftserver
    • v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver
    • v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver

2 files changed

+10
-10
lines changed

sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,11 @@ private[hive] class SparkExecuteStatementOperation(
186186
def run(): Unit = {
187187
logInfo(s"Running query '$statement'")
188188
setState(OperationState.RUNNING)
189+
val groupId = round(random * 1000000).toString
190+
hiveContext.sparkContext.setJobGroup(groupId, statement)
191+
sessionToActivePool.get(parentSession).foreach { pool =>
192+
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
193+
}
189194
try {
190195
result = hiveContext.sql(statement)
191196
logDebug(result.queryExecution.toString())
@@ -196,11 +201,6 @@ private[hive] class SparkExecuteStatementOperation(
196201
case _ =>
197202
}
198203

199-
val groupId = round(random * 1000000).toString
200-
hiveContext.sparkContext.setJobGroup(groupId, statement)
201-
sessionToActivePool.get(parentSession).foreach { pool =>
202-
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
203-
}
204204
iter = {
205205
val useIncrementalCollect =
206206
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean

sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,11 @@ private[hive] class SparkExecuteStatementOperation(
157157
def run(): Unit = {
158158
logInfo(s"Running query '$statement'")
159159
setState(OperationState.RUNNING)
160+
val groupId = round(random * 1000000).toString
161+
hiveContext.sparkContext.setJobGroup(groupId, statement)
162+
sessionToActivePool.get(parentSession).foreach { pool =>
163+
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
164+
}
160165
try {
161166
result = hiveContext.sql(statement)
162167
logDebug(result.queryExecution.toString())
@@ -167,11 +172,6 @@ private[hive] class SparkExecuteStatementOperation(
167172
case _ =>
168173
}
169174

170-
val groupId = round(random * 1000000).toString
171-
hiveContext.sparkContext.setJobGroup(groupId, statement)
172-
sessionToActivePool.get(parentSession).foreach { pool =>
173-
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
174-
}
175175
iter = {
176176
val useIncrementalCollect =
177177
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean

0 commit comments

Comments
 (0)