Skip to content

Commit 427b26c

Browse files
committed
Small coment & visibility cleanup
1 parent 3839d31 commit 427b26c

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

core/src/main/scala/org/apache/spark/ExecutorAllocationClient.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ private[spark] trait ExecutorAllocationClient {
103103

104104
/**
105105
* Request that the cluster manager decommission the specified executor.
106-
* Default implementation delegates to kill, scheduler can override
106+
* Default implementation delegates to decommissionExecutors, scheduler can override
107107
* if it supports graceful decommissioning.
108108
*
109109
* @param executorId identifiers of executor to decommission

core/src/main/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitor.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -451,12 +451,12 @@ private[spark] class ExecutorMonitor(
451451
}
452452

453453
// Visible for testing
454-
def executorsPendingToRemove(): Set[String] = {
454+
private[spark] def executorsPendingToRemove(): Set[String] = {
455455
executors.asScala.filter { case (_, exec) => exec.pendingRemoval }.keys.toSet
456456
}
457457

458458
// Visible for testing
459-
def executorsDecommissioning(): Set[String] = {
459+
private[spark] def executorsDecommissioning(): Set[String] = {
460460
executors.asScala.filter { case (_, exec) => exec.pendingDecommissioning }.keys.toSet
461461
}
462462

streaming/src/test/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManagerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ class ExecutorAllocationManagerSuite extends TestSuiteBase
9696
Map.empty)}
9797
}
9898

99-
/** Verify that particular executors was killed */
99+
/** Verify that a particular executor was scaled down. */
100100
def verifyKilledExec(expectedKilledExec: Option[String]): Unit = {
101101
if (expectedKilledExec.nonEmpty) {
102102
val decomInfo = ExecutorDecommissionInfo("spark scale down", false)

0 commit comments

Comments
 (0)