diff --git a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala index b93536e6536e2..1548583cca74a 100644 --- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala +++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala @@ -232,6 +232,14 @@ private[spark] class ExecutorAllocationManager( executor.scheduleAtFixedRate(scheduleTask, 0, intervalMillis, TimeUnit.MILLISECONDS) } + /** + * Change the value of numExecutorsTarget. + */ + def reSetNumExecutorsTarget(): Unit = { + logDebug(s"Now reset the value of numExecutorsTarget.") + numExecutorsTarget = conf.getInt("spark.dynamicAllocation.initialExecutors", minNumExecutors) + } + /** * Stop the allocation manager. */ diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala index 6a4b536dee191..fb3379318fbff 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala @@ -170,6 +170,7 @@ private[spark] abstract class YarnSchedulerBackend( case RegisterClusterManager(am) => logInfo(s"ApplicationMaster registered as $am") amEndpoint = Some(am) + scheduler.sc.executorAllocationManager.foreach(_.reSetNumExecutorsTarget()) case AddWebUIFilter(filterName, filterParams, proxyBase) => addWebUIFilter(filterName, filterParams, proxyBase)