-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-1103] [WIP] Automatic garbage collection of RDD, shuffle and broadcast data #126
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
1e752f1
80dd977
e427a9e
8512612
a24fefc
cb0a5a6
ae9da88
e61daa0
a7260d3
892b952
e1fba5f
f2881fd
620eca3
a007307
d2f8b97
6c9dcf6
c7ccef1
ba52e00
d0edef3
544ac86
e95479c
f201a8d
c92e4d9
0d17060
34f436f
fbfeec8
88904a3
e442246
8557c12
7edbc98
634a097
7ed72fb
5016375
f0aabb1
762a4d8
a6460d4
c5b1d98
a2cc8bc
ada45f0
cd72d19
b27f8e8
a430f06
104a89a
6222697
41c9ece
2b95b5e
4d05314
cff023c
d25a86e
f489fdc
61b8d6e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
This largely accounts for the cases when WeakReference becomes no longer strongly reachable, in which case the map should return None for all get() operations, and should skip the entry for all listing operations.
- Loading branch information
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,47 +18,61 @@ | |
| package org.apache.spark.util | ||
|
|
||
| import java.lang.ref.WeakReference | ||
| import java.util.concurrent.atomic.AtomicInteger | ||
|
|
||
| import scala.collection.{immutable, mutable} | ||
| import scala.collection.mutable | ||
|
|
||
| import org.apache.spark.Logging | ||
|
|
||
| /** | ||
| * A wrapper of TimeStampedHashMap that ensures the values are weakly referenced and timestamped. | ||
| * | ||
| * If the value is garbage collected and the weak reference is null, get() operation returns | ||
| * a non-existent value. However, the corresponding key is actually not removed in the current | ||
| * implementation. Key-value pairs whose timestamps are older than a particular threshold time | ||
| * can then be removed using the clearOldValues method. It exposes a scala.collection.mutable.Map | ||
| * interface to allow it to be a drop-in replacement for Scala HashMaps. | ||
| * If the value is garbage collected and the weak reference is null, get() will return a | ||
| * non-existent value. These entries are removed from the map periodically (every N inserts), as | ||
| * their values are no longer strongly reachable. Further, key-value pairs whose timestamps are | ||
| * older than a particular threshold can be removed using the clearOldValues method. | ||
| * | ||
| * Internally, it uses a Java ConcurrentHashMap, so all operations on this HashMap are thread-safe. | ||
| * TimeStampedWeakValueHashMap exposes a scala.collection.mutable.Map interface, which allows it | ||
| * to be a drop-in replacement for Scala HashMaps. Internally, it uses a Java ConcurrentHashMap, | ||
| * so all operations on this HashMap are thread-safe. | ||
| * | ||
| * @param updateTimeStampOnGet Whether timestamp of a pair will be updated when it is accessed. | ||
| */ | ||
| private[spark] class TimeStampedWeakValueHashMap[A, B](updateTimeStampOnGet: Boolean = false) | ||
| extends mutable.Map[A, B]() { | ||
| extends mutable.Map[A, B]() with Logging { | ||
|
|
||
| import TimeStampedWeakValueHashMap._ | ||
|
|
||
| private val internalMap = new TimeStampedHashMap[A, WeakReference[B]](updateTimeStampOnGet) | ||
| private val insertCount = new AtomicInteger(0) | ||
|
|
||
| /** Return a map consisting only of entries whose values are still strongly reachable. */ | ||
| private def nonNullReferenceMap = internalMap.filter { case (_, ref) => ref.get != null } | ||
|
|
||
| def get(key: A): Option[B] = internalMap.get(key) | ||
|
|
||
| def iterator: Iterator[(A, B)] = internalMap.iterator | ||
| def iterator: Iterator[(A, B)] = nonNullReferenceMap.iterator | ||
|
|
||
| override def + [B1 >: B](kv: (A, B1)): mutable.Map[A, B1] = { | ||
| val newMap = new TimeStampedWeakValueHashMap[A, B1] | ||
| val oldMap = nonNullReferenceMap.asInstanceOf[mutable.Map[A, WeakReference[B1]]] | ||
| newMap.internalMap.putAll(oldMap.toMap) | ||
| newMap.internalMap += kv | ||
| newMap | ||
| } | ||
|
|
||
| override def - (key: A): mutable.Map[A, B] = { | ||
| val newMap = new TimeStampedWeakValueHashMap[A, B] | ||
| newMap.internalMap.putAll(nonNullReferenceMap.toMap) | ||
| newMap.internalMap -= key | ||
| newMap | ||
| } | ||
|
|
||
| override def += (kv: (A, B)): this.type = { | ||
| internalMap += kv | ||
| if (insertCount.incrementAndGet() % CLEAR_NULL_VALUES_INTERVAL == 0) { | ||
| clearNullValues() | ||
| } | ||
| this | ||
| } | ||
|
|
||
|
|
@@ -71,31 +85,53 @@ private[spark] class TimeStampedWeakValueHashMap[A, B](updateTimeStampOnGet: Boo | |
|
|
||
| override def apply(key: A): B = internalMap.apply(key) | ||
|
|
||
| override def filter(p: ((A, B)) => Boolean): mutable.Map[A, B] = internalMap.filter(p) | ||
| override def filter(p: ((A, B)) => Boolean): mutable.Map[A, B] = nonNullReferenceMap.filter(p) | ||
|
|
||
| override def empty: mutable.Map[A, B] = new TimeStampedWeakValueHashMap[A, B]() | ||
|
|
||
| override def size: Int = internalMap.size | ||
|
|
||
| override def foreach[U](f: ((A, B)) => U) = internalMap.foreach(f) | ||
| override def foreach[U](f: ((A, B)) => U) = nonNullReferenceMap.foreach(f) | ||
|
|
||
| def putIfAbsent(key: A, value: B): Option[B] = internalMap.putIfAbsent(key, value) | ||
|
|
||
| def toMap: immutable.Map[A, B] = iterator.toMap | ||
| def toMap: Map[A, B] = iterator.toMap | ||
|
|
||
| /** | ||
| * Remove old key-value pairs that have timestamp earlier than `threshTime`. | ||
| */ | ||
| /** Remove old key-value pairs with timestamps earlier than `threshTime`. */ | ||
| def clearOldValues(threshTime: Long) = internalMap.clearOldValues(threshTime) | ||
|
|
||
| /** Remove entries with values that are no longer strongly reachable. */ | ||
| def clearNullValues() { | ||
| val it = internalMap.getEntrySet.iterator | ||
| while (it.hasNext) { | ||
| val entry = it.next() | ||
| if (entry.getValue.value.get == null) { | ||
| logDebug("Removing key " + entry.getKey + " because it is no longer strongly reachable.") | ||
| it.remove() | ||
| } | ||
| } | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How about ?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not the same logic. This when value is null this makes the function return Some(null) instead of None. Changing map to flatMap is the solution. |
||
| } | ||
|
|
||
| // For testing | ||
|
|
||
| def getTimestamp(key: A): Option[Long] = { | ||
| internalMap.getTimeStampedValue(key).map(_.timestamp) | ||
| } | ||
|
|
||
| def getReference(key: A): Option[WeakReference[B]] = { | ||
| internalMap.getTimeStampedValue(key).map(_.value) | ||
| } | ||
| } | ||
|
|
||
| /** | ||
| * Helper methods for converting to and from WeakReferences. | ||
| */ | ||
| private[spark] object TimeStampedWeakValueHashMap { | ||
| private object TimeStampedWeakValueHashMap { | ||
|
|
||
| /* Implicit conversion methods to WeakReferences */ | ||
| // Number of inserts after which entries with null references are removed | ||
| val CLEAR_NULL_VALUES_INTERVAL = 100 | ||
|
|
||
| /* Implicit conversion methods to WeakReferences. */ | ||
|
|
||
| implicit def toWeakReference[V](v: V): WeakReference[V] = new WeakReference[V](v) | ||
|
|
||
|
|
@@ -107,12 +143,15 @@ private[spark] object TimeStampedWeakValueHashMap { | |
| (kv: (K, WeakReference[V])) => p(kv) | ||
| } | ||
|
|
||
| /* Implicit conversion methods from WeakReferences */ | ||
| /* Implicit conversion methods from WeakReferences. */ | ||
|
|
||
| implicit def fromWeakReference[V](ref: WeakReference[V]): V = ref.get | ||
|
|
||
| implicit def fromWeakReferenceOption[V](v: Option[WeakReference[V]]): Option[V] = { | ||
| v.map(fromWeakReference) | ||
| v match { | ||
| case Some(ref) => Option(fromWeakReference(ref)) | ||
| case None => None | ||
| } | ||
| } | ||
|
|
||
| implicit def fromWeakReferenceTuple[K, V](kv: (K, WeakReference[V])): (K, V) = { | ||
|
|
@@ -128,5 +167,4 @@ private[spark] object TimeStampedWeakValueHashMap { | |
| map: mutable.Map[K, WeakReference[V]]) : mutable.Map[K, V] = { | ||
| mutable.Map(map.mapValues(fromWeakReference).toSeq: _*) | ||
| } | ||
|
|
||
| } | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This map is used for storing persisted RDDs in SparkContext |
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Use @aarondav 's import organizer!
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I do! Too bad that the user of the tool (i.e., me) forgets to engage its keyboard shortcut!