File tree Expand file tree Collapse file tree 1 file changed +3
-5
lines changed
core/src/main/scala/org/apache/spark/util/collection Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Original file line number Diff line number Diff line change @@ -199,11 +199,8 @@ class AppendOnlyMap[K, V](initialCapacity: Int = 64)
199199
200200 /** Increase table size by 1, rehashing if necessary */
201201 private def incrementSize () {
202- if (curSize == MAXIMUM_CAPACITY ) {
203- throw new IllegalStateException (s " Can't put more that ${MAXIMUM_CAPACITY } elements " )
204- }
205202 curSize += 1
206- if (curSize > growThreshold && capacity < MAXIMUM_CAPACITY ) {
203+ if (curSize > growThreshold) {
207204 growTable()
208205 }
209206 }
@@ -216,7 +213,8 @@ class AppendOnlyMap[K, V](initialCapacity: Int = 64)
216213 /** Double the table's size and re-hash everything */
217214 protected def growTable () {
218215 // capacity < MAXIMUM_CAPACITY (2 ^ 29) so capacity * 2 won't overflow
219- val newCapacity = (capacity * 2 ).min(MAXIMUM_CAPACITY )
216+ val newCapacity = capacity * 2
217+ require(newCapacity <= MAXIMUM_CAPACITY , s " Can't contain more than ${growThreshold} elements " )
220218 val newData = new Array [AnyRef ](2 * newCapacity)
221219 val newMask = newCapacity - 1
222220 // Insert all our old values into the new array. Note that because our old keys are
You can’t perform that action at this time.
0 commit comments