1818package org .apache .spark
1919
2020import java .lang .ref .{ReferenceQueue , WeakReference }
21- import java .util .concurrent .{ScheduledExecutorService , TimeUnit }
21+ import java .util .concurrent .{ConcurrentLinkedQueue , ScheduledExecutorService , TimeUnit }
2222
23- import scala .collection .mutable .{ ArrayBuffer , SynchronizedBuffer }
23+ import scala .collection .JavaConverters . _
2424
2525import org .apache .spark .broadcast .Broadcast
2626import org .apache .spark .rdd .{RDD , ReliableRDDCheckpointData }
@@ -57,13 +57,11 @@ private class CleanupTaskWeakReference(
5757 */
5858private [spark] class ContextCleaner (sc : SparkContext ) extends Logging {
5959
60- private val referenceBuffer = new ArrayBuffer [CleanupTaskWeakReference ]
61- with SynchronizedBuffer [CleanupTaskWeakReference ]
60+ private val referenceBuffer = new ConcurrentLinkedQueue [CleanupTaskWeakReference ]()
6261
6362 private val referenceQueue = new ReferenceQueue [AnyRef ]
6463
65- private val listeners = new ArrayBuffer [CleanerListener ]
66- with SynchronizedBuffer [CleanerListener ]
64+ private val listeners = new ConcurrentLinkedQueue [CleanerListener ]()
6765
6866 private val cleaningThread = new Thread () { override def run () { keepCleaning() }}
6967
@@ -111,7 +109,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
111109
112110 /** Attach a listener object to get information of when objects are cleaned. */
113111 def attachListener (listener : CleanerListener ): Unit = {
114- listeners += listener
112+ listeners.add( listener)
115113 }
116114
117115 /** Start the cleaner. */
@@ -166,7 +164,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
166164
167165 /** Register an object for cleanup. */
168166 private def registerForCleanup (objectForCleanup : AnyRef , task : CleanupTask ): Unit = {
169- referenceBuffer += new CleanupTaskWeakReference (task, objectForCleanup, referenceQueue)
167+ referenceBuffer.add( new CleanupTaskWeakReference (task, objectForCleanup, referenceQueue) )
170168 }
171169
172170 /** Keep cleaning RDD, shuffle, and broadcast state. */
@@ -179,7 +177,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
179177 synchronized {
180178 reference.map(_.task).foreach { task =>
181179 logDebug(" Got cleaning task " + task)
182- referenceBuffer -= reference.get
180+ referenceBuffer.remove( reference.get)
183181 task match {
184182 case CleanRDD (rddId) =>
185183 doCleanupRDD(rddId, blocking = blockOnCleanupTasks)
@@ -206,7 +204,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
206204 try {
207205 logDebug(" Cleaning RDD " + rddId)
208206 sc.unpersistRDD(rddId, blocking)
209- listeners.foreach(_.rddCleaned(rddId))
207+ listeners.asScala. foreach(_.rddCleaned(rddId))
210208 logInfo(" Cleaned RDD " + rddId)
211209 } catch {
212210 case e : Exception => logError(" Error cleaning RDD " + rddId, e)
@@ -219,7 +217,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
219217 logDebug(" Cleaning shuffle " + shuffleId)
220218 mapOutputTrackerMaster.unregisterShuffle(shuffleId)
221219 blockManagerMaster.removeShuffle(shuffleId, blocking)
222- listeners.foreach(_.shuffleCleaned(shuffleId))
220+ listeners.asScala. foreach(_.shuffleCleaned(shuffleId))
223221 logInfo(" Cleaned shuffle " + shuffleId)
224222 } catch {
225223 case e : Exception => logError(" Error cleaning shuffle " + shuffleId, e)
@@ -231,7 +229,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
231229 try {
232230 logDebug(s " Cleaning broadcast $broadcastId" )
233231 broadcastManager.unbroadcast(broadcastId, true , blocking)
234- listeners.foreach(_.broadcastCleaned(broadcastId))
232+ listeners.asScala. foreach(_.broadcastCleaned(broadcastId))
235233 logDebug(s " Cleaned broadcast $broadcastId" )
236234 } catch {
237235 case e : Exception => logError(" Error cleaning broadcast " + broadcastId, e)
@@ -243,7 +241,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
243241 try {
244242 logDebug(" Cleaning accumulator " + accId)
245243 Accumulators .remove(accId)
246- listeners.foreach(_.accumCleaned(accId))
244+ listeners.asScala. foreach(_.accumCleaned(accId))
247245 logInfo(" Cleaned accumulator " + accId)
248246 } catch {
249247 case e : Exception => logError(" Error cleaning accumulator " + accId, e)
@@ -258,7 +256,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
258256 try {
259257 logDebug(" Cleaning rdd checkpoint data " + rddId)
260258 ReliableRDDCheckpointData .cleanCheckpoint(sc, rddId)
261- listeners.foreach(_.checkpointCleaned(rddId))
259+ listeners.asScala. foreach(_.checkpointCleaned(rddId))
262260 logInfo(" Cleaned rdd checkpoint data " + rddId)
263261 }
264262 catch {
0 commit comments