Skip to content

Commit 1853db3

Browse files
shahidki31srowen
authored andcommitted
[SPARK-27125][SQL][TEST] Add test suite for sql execution page
## What changes were proposed in this pull request? Added test suite for AllExecutionsPage class. Checked the scenarios for SPARK-27019 and SPARK-27075. ## How was this patch tested? Added UT, manually tested Closes #24052 from shahidki31/SPARK-27125. Authored-by: Shahid <[email protected]> Signed-off-by: Sean Owen <[email protected]>
1 parent b154233 commit 1853db3

File tree

1 file changed

+127
-0
lines changed

1 file changed

+127
-0
lines changed
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.ui
19+
20+
import java.util
21+
import java.util.{Locale, Properties}
22+
import javax.servlet.http.HttpServletRequest
23+
24+
import scala.xml.Node
25+
26+
import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
27+
28+
import org.apache.spark.scheduler.{JobFailed, SparkListenerJobEnd, SparkListenerJobStart}
29+
import org.apache.spark.sql.DataFrame
30+
import org.apache.spark.sql.execution.{SparkPlanInfo, SQLExecution}
31+
import org.apache.spark.sql.test.SharedSQLContext
32+
import org.apache.spark.status.ElementTrackingStore
33+
import org.apache.spark.util.kvstore.InMemoryStore
34+
35+
class AllExecutionsPageSuite extends SharedSQLContext {
36+
37+
import testImplicits._
38+
39+
test("SPARK-27019: correctly display SQL page when event reordering happens") {
40+
val statusStore = createStatusStore
41+
val tab = mock(classOf[SQLTab], RETURNS_SMART_NULLS)
42+
when(tab.sqlStore).thenReturn(statusStore)
43+
44+
val request = mock(classOf[HttpServletRequest])
45+
when(tab.appName).thenReturn("testing")
46+
when(tab.headerTabs).thenReturn(Seq.empty)
47+
48+
val html = renderSQLPage(request, tab, statusStore).toString().toLowerCase(Locale.ROOT)
49+
assert(html.contains("failed queries"))
50+
assert(!html.contains("1970"))
51+
}
52+
53+
test("sorting should be successful") {
54+
val statusStore = createStatusStore
55+
val tab = mock(classOf[SQLTab], RETURNS_SMART_NULLS)
56+
val request = mock(classOf[HttpServletRequest])
57+
58+
when(tab.sqlStore).thenReturn(statusStore)
59+
when(tab.appName).thenReturn("testing")
60+
when(tab.headerTabs).thenReturn(Seq.empty)
61+
when(request.getParameter("failed.sort")).thenReturn("Duration")
62+
val map = new util.HashMap[String, Array[String]]()
63+
map.put("failed.sort", Array("duration"))
64+
when(request.getParameterMap()).thenReturn(map)
65+
val html = renderSQLPage(request, tab, statusStore).toString().toLowerCase(Locale.ROOT)
66+
assert(!html.contains("IllegalArgumentException"))
67+
assert(html.contains("duration"))
68+
}
69+
70+
71+
private def createStatusStore: SQLAppStatusStore = {
72+
val conf = sparkContext.conf
73+
val store = new ElementTrackingStore(new InMemoryStore, conf)
74+
val listener = new SQLAppStatusListener(conf, store, live = true)
75+
new SQLAppStatusStore(store, Some(listener))
76+
}
77+
78+
private def createTestDataFrame: DataFrame = {
79+
Seq(
80+
(1, 1),
81+
(2, 2)
82+
).toDF().filter("_1 > 1")
83+
}
84+
85+
/**
86+
* Render a stage page started with the given conf and return the HTML.
87+
* This also runs a dummy execution page to populate the page with useful content.
88+
*/
89+
private def renderSQLPage(
90+
request: HttpServletRequest,
91+
tab: SQLTab,
92+
statusStore: SQLAppStatusStore): Seq[Node] = {
93+
94+
val listener = statusStore.listener.get
95+
96+
val page = new AllExecutionsPage(tab)
97+
Seq(0, 1).foreach { executionId =>
98+
val df = createTestDataFrame
99+
listener.onOtherEvent(SparkListenerSQLExecutionStart(
100+
executionId,
101+
"test",
102+
"test",
103+
df.queryExecution.toString,
104+
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
105+
System.currentTimeMillis()))
106+
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
107+
executionId, System.currentTimeMillis()))
108+
listener.onJobStart(SparkListenerJobStart(
109+
jobId = 0,
110+
time = System.currentTimeMillis(),
111+
stageInfos = Nil,
112+
createProperties(executionId)))
113+
listener.onJobEnd(SparkListenerJobEnd(
114+
jobId = 0,
115+
time = System.currentTimeMillis(),
116+
JobFailed(new RuntimeException("Oops"))))
117+
}
118+
page.render(request)
119+
}
120+
121+
private def createProperties(executionId: Long): Properties = {
122+
val properties = new Properties()
123+
properties.setProperty(SQLExecution.EXECUTION_ID_KEY, executionId.toString)
124+
properties
125+
}
126+
}
127+

0 commit comments

Comments
 (0)