diff --git a/core/src/main/resources/org/apache/spark/ui/static/historypage.js b/core/src/main/resources/org/apache/spark/ui/static/historypage.js
index d2161662d5679..177120aaa6c1f 100644
--- a/core/src/main/resources/org/apache/spark/ui/static/historypage.js
+++ b/core/src/main/resources/org/apache/spark/ui/static/historypage.js
@@ -15,6 +15,12 @@
* limitations under the License.
*/
+var appLimit = -1;
+
+function setAppLimit(val) {
+ appLimit = val;
+}
+
// this function works exactly the same as UIUtils.formatDuration
function formatDuration(milliseconds) {
if (milliseconds < 100) {
@@ -111,7 +117,7 @@ $(document).ready(function() {
requestedIncomplete = getParameterByName("showIncomplete", searchString);
requestedIncomplete = (requestedIncomplete == "true" ? true : false);
- $.getJSON("api/v1/applications", function(response,status,jqXHR) {
+ $.getJSON("api/v1/applications?limit=" + appLimit, function(response,status,jqXHR) {
var array = [];
var hasMultipleAttempts = false;
for (i in response) {
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
index 2fad1120cdc8a..a120b6c5fcdff 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
@@ -44,7 +44,8 @@ private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("")
if (allAppsSize > 0) {
++
++
-
+ ++
+
} else if (requestedIncomplete) {
No incomplete applications found!
} else {
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index d821474bdb590..c178917d8da3b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -28,6 +28,7 @@ import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config._
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationInfo, ApplicationsListResource, UIRoot}
import org.apache.spark.ui.{SparkUI, UIUtils, WebUI}
import org.apache.spark.ui.JettyUtils._
@@ -55,6 +56,9 @@ class HistoryServer(
// How many applications to retain
private val retainedApplications = conf.getInt("spark.history.retainedApplications", 50)
+ // How many applications the summary ui displays
+ private[history] val maxApplications = conf.get(HISTORY_UI_MAX_APPS);
+
// application
private val appCache = new ApplicationCache(this, retainedApplications, new SystemClock())
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 5a59aee652712..f28a9a5cf81d6 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -108,4 +108,8 @@ package object config {
private[spark] val UI_RETAINED_TASKS = ConfigBuilder("spark.ui.retainedTasks")
.intConf
.createWithDefault(100000)
+
+ // To limit how many applications are shown in the History Server summary ui
+ private[spark] val HISTORY_UI_MAX_APPS =
+ ConfigBuilder("spark.history.ui.maxApplications").intConf.createWithDefault(Integer.MAX_VALUE)
}
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
index 02fd2985fa20d..075b9ba37dc84 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
@@ -29,7 +29,8 @@ private[v1] class ApplicationListResource(uiRoot: UIRoot) {
def appList(
@QueryParam("status") status: JList[ApplicationStatus],
@DefaultValue("2010-01-01") @QueryParam("minDate") minDate: SimpleDateParam,
- @DefaultValue("3000-01-01") @QueryParam("maxDate") maxDate: SimpleDateParam)
+ @DefaultValue("3000-01-01") @QueryParam("maxDate") maxDate: SimpleDateParam,
+ @QueryParam("limit") limit: Integer)
: Iterator[ApplicationInfo] = {
val allApps = uiRoot.getApplicationInfoList
val adjStatus = {
@@ -41,7 +42,7 @@ private[v1] class ApplicationListResource(uiRoot: UIRoot) {
}
val includeCompleted = adjStatus.contains(ApplicationStatus.COMPLETED)
val includeRunning = adjStatus.contains(ApplicationStatus.RUNNING)
- allApps.filter { app =>
+ val appList = allApps.filter { app =>
val anyRunning = app.attempts.exists(!_.completed)
// if any attempt is still running, we consider the app to also still be running
val statusOk = (!anyRunning && includeCompleted) ||
@@ -53,6 +54,11 @@ private[v1] class ApplicationListResource(uiRoot: UIRoot) {
}
statusOk && dateOk
}
+ if (limit != null) {
+ appList.take(limit)
+ } else {
+ appList
+ }
}
}
diff --git a/core/src/test/resources/HistoryServerExpectations/limit_app_list_json_expectation.json b/core/src/test/resources/HistoryServerExpectations/limit_app_list_json_expectation.json
new file mode 100644
index 0000000000000..9165f549d7d25
--- /dev/null
+++ b/core/src/test/resources/HistoryServerExpectations/limit_app_list_json_expectation.json
@@ -0,0 +1,67 @@
+[ {
+ "id" : "local-1430917381534",
+ "name" : "Spark shell",
+ "attempts" : [ {
+ "startTime" : "2015-05-06T13:03:00.893GMT",
+ "endTime" : "2015-05-06T13:03:11.398GMT",
+ "lastUpdated" : "",
+ "duration" : 10505,
+ "sparkUser" : "irashid",
+ "completed" : true,
+ "startTimeEpoch" : 1430917380893,
+ "endTimeEpoch" : 1430917391398,
+ "lastUpdatedEpoch" : 0
+ } ]
+}, {
+ "id" : "local-1430917381535",
+ "name" : "Spark shell",
+ "attempts" : [ {
+ "attemptId" : "2",
+ "startTime" : "2015-05-06T13:03:00.893GMT",
+ "endTime" : "2015-05-06T13:03:00.950GMT",
+ "lastUpdated" : "",
+ "duration" : 57,
+ "sparkUser" : "irashid",
+ "completed" : true,
+ "startTimeEpoch" : 1430917380893,
+ "endTimeEpoch" : 1430917380950,
+ "lastUpdatedEpoch" : 0
+ }, {
+ "attemptId" : "1",
+ "startTime" : "2015-05-06T13:03:00.880GMT",
+ "endTime" : "2015-05-06T13:03:00.890GMT",
+ "lastUpdated" : "",
+ "duration" : 10,
+ "sparkUser" : "irashid",
+ "completed" : true,
+ "startTimeEpoch" : 1430917380880,
+ "endTimeEpoch" : 1430917380890,
+ "lastUpdatedEpoch" : 0
+ } ]
+}, {
+ "id" : "local-1426533911241",
+ "name" : "Spark shell",
+ "attempts" : [ {
+ "attemptId" : "2",
+ "startTime" : "2015-03-17T23:11:50.242GMT",
+ "endTime" : "2015-03-17T23:12:25.177GMT",
+ "lastUpdated" : "",
+ "duration" : 34935,
+ "sparkUser" : "irashid",
+ "completed" : true,
+ "startTimeEpoch" : 1426633910242,
+ "endTimeEpoch" : 1426633945177,
+ "lastUpdatedEpoch" : 0
+ }, {
+ "attemptId" : "1",
+ "startTime" : "2015-03-16T19:25:10.242GMT",
+ "endTime" : "2015-03-16T19:25:45.177GMT",
+ "lastUpdated" : "",
+ "duration" : 34935,
+ "sparkUser" : "irashid",
+ "completed" : true,
+ "startTimeEpoch" : 1426533910242,
+ "endTimeEpoch" : 1426533945177,
+ "lastUpdatedEpoch" : 0
+ } ]
+} ]
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 631a7cd9d5d7a..ae3f5d9c012ea 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -100,6 +100,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
"minDate app list json" -> "applications?minDate=2015-02-10",
"maxDate app list json" -> "applications?maxDate=2015-02-10",
"maxDate2 app list json" -> "applications?maxDate=2015-02-03T16:42:40.000GMT",
+ "limit app list json" -> "applications?limit=3",
"one app json" -> "applications/local-1422981780767",
"one app multi-attempt json" -> "applications/local-1426533911241",
"job list json" -> "applications/local-1422981780767/jobs",
diff --git a/docs/monitoring.md b/docs/monitoring.md
index ee932cfc6d705..1bc3d266b66b4 100644
--- a/docs/monitoring.md
+++ b/docs/monitoring.md
@@ -114,8 +114,17 @@ The history server can be configured as follows:
spark.history.retainedApplications |
50 |
- The number of application UIs to retain. If this cap is exceeded, then the oldest
- applications will be removed.
+ The number of applications to retain UI data for in the cache. If this cap is exceeded, then
+ the oldest applications will be removed from the cache. If an application is not in the cache,
+ it will have to be loaded from disk if its accessed from the UI.
+ |
+
+
+ | spark.history.ui.maxApplications |
+ Int.MaxValue |
+
+ The number of applications to display on the history summary page. Application UIs are still
+ available by accessing their URLs directly even if they are not displayed on the history summary page.
|
@@ -242,7 +251,8 @@ can be identified by their `[attempt-id]`. In the API listed below, when running
Examples:
?minDate=2015-02-10
?minDate=2015-02-03T16:42:40.000GMT
-
?maxDate=[date] latest date/time to list; uses same format as minDate.
+
?maxDate=[date] latest date/time to list; uses same format as minDate.
+
?limit=[limit] limits the number of applications listed.
/applications/[app-id]/jobs |