diff --git a/src/FastSerialization/SegmentedDictionary/HashHelpers.cs b/src/FastSerialization/SegmentedDictionary/HashHelpers.cs
new file mode 100644
index 000000000..bb07be670
--- /dev/null
+++ b/src/FastSerialization/SegmentedDictionary/HashHelpers.cs
@@ -0,0 +1,115 @@
+// Tests copied from dotnet/roslyn repo. Original source code can be found here:
+// https://github.com/dotnet/roslyn/blob/main/src/Dependencies/Collections/Internal/HashHelpers.cs
+
+using System;
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+
+namespace Microsoft.Diagnostics.FastSerialization
+{
+ internal static class HashHelpers
+ {
+ // This is the maximum prime smaller than Array.MaxArrayLength
+ public const int MaxPrimeArrayLength = 0x7FEFFFFD;
+
+ public const int HashPrime = 101;
+
+ // Table of prime numbers to use as hash table sizes.
+ // A typical resize algorithm would pick the smallest prime number in this array
+ // that is larger than twice the previous capacity.
+ // Suppose our Hashtable currently has capacity x and enough elements are added
+ // such that a resize needs to occur. Resizing first computes 2x then finds the
+ // first prime in the table greater than 2x, i.e. if primes are ordered
+ // p_1, p_2, ..., p_i, ..., it finds p_n such that p_n-1 < 2x < p_n.
+ // Doubling is important for preserving the asymptotic complexity of the
+ // hashtable operations such as add. Having a prime guarantees that double
+ // hashing does not lead to infinite loops. IE, your hash function will be
+ // h1(key) + i*h2(key), 0 <= i < size. h2 and the size must be relatively prime.
+ // We prefer the low computation costs of higher prime numbers over the increased
+ // memory allocation of a fixed prime number i.e. when right sizing a HashSet.
+ private static readonly int[] s_primes =
+ {
+ 3, 7, 11, 17, 23, 29, 37, 47, 59, 71, 89, 107, 131, 163, 197, 239, 293, 353, 431, 521, 631, 761, 919,
+ 1103, 1327, 1597, 1931, 2333, 2801, 3371, 4049, 4861, 5839, 7013, 8419, 10103, 12143, 14591,
+ 17519, 21023, 25229, 30293, 36353, 43627, 52361, 62851, 75431, 90523, 108631, 130363, 156437,
+ 187751, 225307, 270371, 324449, 389357, 467237, 560689, 672827, 807403, 968897, 1162687, 1395263,
+ 1674319, 2009191, 2411033, 2893249, 3471899, 4166287, 4999559, 5999471, 7199369
+ };
+
+ public static bool IsPrime(int candidate)
+ {
+ if ((candidate & 1) != 0)
+ {
+ var limit = (int)Math.Sqrt(candidate);
+ for (var divisor = 3; divisor <= limit; divisor += 2)
+ {
+ if ((candidate % divisor) == 0)
+ return false;
+ }
+ return true;
+ }
+ return candidate == 2;
+ }
+
+ public static int GetPrime(int min)
+ {
+ if (min < 0)
+ throw new ArgumentException("Collection's capacity overflowed and went negative.");
+
+ foreach (var prime in s_primes)
+ {
+ if (prime >= min)
+ return prime;
+ }
+
+ // Outside of our predefined table. Compute the hard way.
+ for (var i = (min | 1); i < int.MaxValue; i += 2)
+ {
+ if (IsPrime(i) && ((i - 1) % HashPrime != 0))
+ return i;
+ }
+ return min;
+ }
+
+ // Returns size of hashtable to grow to.
+ public static int ExpandPrime(int oldSize)
+ {
+ var newSize = 2 * oldSize;
+
+ // Allow the hashtables to grow to maximum possible size (~2G elements) before encountering capacity overflow.
+ // Note that this check works even when _items.Length overflowed thanks to the (uint) cast
+ if ((uint)newSize > MaxPrimeArrayLength && MaxPrimeArrayLength > oldSize)
+ {
+ Debug.Assert(MaxPrimeArrayLength == GetPrime(MaxPrimeArrayLength), "Invalid MaxPrimeArrayLength");
+ return MaxPrimeArrayLength;
+ }
+
+ return GetPrime(newSize);
+ }
+
+ /// Returns approximate reciprocal of the divisor: ceil(2**64 / divisor).
+ /// This should only be used on 64-bit.
+ public static ulong GetFastModMultiplier(uint divisor) =>
+ ulong.MaxValue / divisor + 1;
+
+ /// Performs a mod operation using the multiplier pre-computed with .
+ ///
+ /// PERF: This improves performance in 64-bit scenarios at the expense of performance in 32-bit scenarios. Since
+ /// we only build a single AnyCPU binary, we opt for improved performance in the 64-bit scenario.
+ ///
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static uint FastMod(uint value, uint divisor, ulong multiplier)
+ {
+ // We use modified Daniel Lemire's fastmod algorithm (https://github.com/dotnet/runtime/pull/406),
+ // which allows to avoid the long multiplication if the divisor is less than 2**31.
+ Debug.Assert(divisor <= int.MaxValue);
+
+ // This is equivalent of (uint)Math.BigMul(multiplier * value, divisor, out _). This version
+ // is faster than BigMul currently because we only need the high bits.
+ var highbits = (uint)(((((multiplier * value) >> 32) + 1) * divisor) >> 32);
+
+ Debug.Assert(highbits == value % divisor);
+ return highbits;
+ }
+ }
+}
diff --git a/src/FastSerialization/SegmentedDictionary/SegmentedDictionary.cs b/src/FastSerialization/SegmentedDictionary/SegmentedDictionary.cs
new file mode 100644
index 000000000..4e2991fcf
--- /dev/null
+++ b/src/FastSerialization/SegmentedDictionary/SegmentedDictionary.cs
@@ -0,0 +1,1346 @@
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+using Microsoft.Diagnostics.FastSerialization;
+
+namespace System.Collections.Generic
+{
+ ///
+ /// Represents a collection of keys and values.
+ ///
+ ///
+ /// This collection has the similar performance characteristics as , but
+ /// uses segmented lists to avoid allocations in the Large Object Heap.
+ ///
+ ///
+ /// This implementation was based on the SegmentedDictionary implementation made for dotnet/roslyn. Original source code:
+ /// https://github.com/dotnet/roslyn/blob/release/dev17.0/src/Dependencies/Collections/SegmentedDictionary%602.cs
+ ///
+ ///
+ /// The type of the keys in the dictionary.
+ /// The type of the values in the dictionary.
+ public sealed class SegmentedDictionary : IDictionary, IDictionary
+ {
+ #region Private Fields
+ private static Entry EntryPlaceholder = new Entry();
+
+ private SegmentedList _buckets = new SegmentedList(defaultSegmentSize);
+ private SegmentedList _entries = new SegmentedList(defaultSegmentSize);
+
+ private const int defaultSegmentSize = 8_192;
+
+ private int _count;
+ private int _freeList;
+ private int _freeCount;
+ private ulong _fastModMultiplier;
+ private int _version;
+
+ private readonly IEqualityComparer _comparer;
+
+ private KeyCollection _keys = null;
+ private ValueCollection _values = null;
+ private const int StartOfFreeList = -3;
+
+ private enum InsertionBehavior
+ {
+ None, OverwriteExisting, ThrowOnExisting
+ }
+
+ private struct Entry
+ {
+ public uint _hashCode;
+ ///
+ /// 0-based index of next entry in chain: -1 means end of chain
+ /// also encodes whether this entry _itself_ is part of the free list by changing sign and subtracting 3,
+ /// so -2 means end of free list, -3 means index 0 but on free list, -4 means index 1 but on free list, etc.
+ ///
+ public int _next;
+ public TKey _key; // Key of entry
+ public TValue _value; // Value of entry
+ }
+
+ #endregion
+
+ #region Helper Methods
+
+ private int Initialize(int capacity)
+ {
+ var size = HashHelpers.GetPrime(capacity);
+ var buckets = new SegmentedList(defaultSegmentSize, size);
+ var entries = new SegmentedList(defaultSegmentSize, size);
+
+ // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails
+ _freeList = -1;
+ _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)buckets.Capacity);
+ _buckets = buckets;
+ _entries = entries;
+
+ return size;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private ref int GetBucket(uint hashCode)
+ {
+ var buckets = _buckets;
+ return ref buckets.GetElementByReference((int)HashHelpers.FastMod(hashCode, (uint)buckets.Capacity, _fastModMultiplier));
+ }
+
+ private bool FindEntry(TKey key, out Entry entry)
+ {
+ entry = EntryPlaceholder;
+
+ if (key == null)
+ {
+ throw new ArgumentNullException("Key cannot be null.");
+ }
+
+ if (_buckets.Capacity > 0)
+ {
+ Debug.Assert(_entries.Capacity > 0, "expected entries to be non-empty");
+ var comparer = _comparer;
+
+ var hashCode = (uint)comparer.GetHashCode(key);
+ var i = GetBucket(hashCode) - 1; // Value in _buckets is 1-based; subtract 1 from i. We do it here so it fuses with the following conditional.
+ var entries = _entries;
+ uint collisionCount = 0;
+
+ do
+ {
+ // Should be a while loop https://github.com/dotnet/runtime/issues/9422
+ // Test in if to drop range check for following array access
+ if ((uint)i >= (uint)entries.Capacity)
+ {
+ return false;
+ }
+
+ ref var currentEntry = ref entries.GetElementByReference(i);
+ if (currentEntry._hashCode == hashCode && comparer.Equals(currentEntry._key, key))
+ {
+ entry = currentEntry;
+ return true;
+ }
+
+ i = currentEntry._next;
+
+ collisionCount++;
+ } while (collisionCount <= (uint)entries.Capacity);
+
+ // The chain of entries forms a loop; which means a concurrent update has happened.
+ // Break out of the loop and throw, rather than looping forever.
+ throw new InvalidOperationException("Dictionary does not support concurrent operations.");
+ }
+
+ return false;
+ }
+
+ private bool TryInsert(TKey key, TValue value, InsertionBehavior behavior)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException("Key cannot be null.");
+ }
+
+ if (_buckets.Capacity == 0)
+ {
+ Initialize(0);
+ }
+ Debug.Assert(_buckets.Capacity > 0);
+
+ var entries = _entries;
+ Debug.Assert(entries.Capacity > 0, "expected entries to be non-empty");
+
+ var comparer = _comparer;
+ var hashCode = (uint)comparer.GetHashCode(key);
+
+ uint collisionCount = 0;
+ ref var bucket = ref GetBucket(hashCode);
+ var i = bucket - 1; // Value in _buckets is 1-based
+
+ while (true)
+ {
+ // Should be a while loop https://github.com/dotnet/runtime/issues/9422
+ // Test uint in if rather than loop condition to drop range check for following array access
+ if ((uint)i >= (uint)entries.Capacity)
+ {
+ break;
+ }
+
+ if (entries[i]._hashCode == hashCode && comparer.Equals(entries[i]._key, key))
+ {
+ if (behavior == InsertionBehavior.OverwriteExisting)
+ {
+ entries.GetElementByReference(i)._value = value;
+ return true;
+ }
+
+ if (behavior == InsertionBehavior.ThrowOnExisting)
+ {
+ throw new ArgumentException($"The key with value {key} is already present in the dictionary.");
+ }
+
+ return false;
+ }
+
+ i = entries[i]._next;
+
+ collisionCount++;
+ if (collisionCount > (uint)entries.Capacity)
+ {
+ // The chain of entries forms a loop; which means a concurrent update has happened.
+ // Break out of the loop and throw, rather than looping forever.
+ throw new InvalidOperationException("Dictionary does not support concurrent operations.");
+ }
+ }
+
+
+ int index;
+ if (_freeCount > 0)
+ {
+ index = _freeList;
+ Debug.Assert((StartOfFreeList - entries[_freeList]._next) >= -1, "shouldn't overflow because `next` cannot underflow");
+ _freeList = StartOfFreeList - entries[_freeList]._next;
+ _freeCount--;
+ }
+ else
+ {
+ var count = _count;
+ if (count == entries.Capacity)
+ {
+ Resize();
+ bucket = ref GetBucket(hashCode);
+ }
+ index = count;
+ _count = count + 1;
+ entries = _entries;
+ }
+
+ ref var entry = ref entries.GetElementByReference(index);
+ entry._hashCode = hashCode;
+ entry._next = bucket - 1; // Value in _buckets is 1-based
+ entry._key = key;
+ entry._value = value; // Value in _buckets is 1-based
+ bucket = index + 1;
+ _version++;
+ return true;
+ }
+
+ private void Resize()
+ => Resize(HashHelpers.ExpandPrime(_count));
+
+ private void Resize(int newSize)
+ {
+ Debug.Assert(_entries.Capacity > 0, "_entries should be non-empty");
+ Debug.Assert(newSize >= _entries.Capacity);
+
+ var entries = new SegmentedList(defaultSegmentSize, newSize);
+
+ var count = _count;
+
+ entries.AppendFrom(_entries, 0, count);
+
+ // Assign member variables after both arrays allocated to guard against corruption from OOM if second fails
+ _buckets = new SegmentedList(defaultSegmentSize, newSize);
+ _fastModMultiplier = HashHelpers.GetFastModMultiplier((uint)_buckets.Capacity);
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ {
+ ref var bucket = ref GetBucket(entries[i]._hashCode);
+ entries.GetElementByReference(i)._next = bucket - 1; // Value in _buckets is 1-based
+ bucket = i + 1;
+ }
+ }
+
+ _entries = entries;
+ }
+
+ private static bool IsCompatibleKey(object key)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+ return key is TKey;
+ }
+
+ #endregion
+
+ #region Constructors
+
+ public SegmentedDictionary()
+ : this(0, null)
+ {
+ }
+
+ public SegmentedDictionary(int capacity)
+ : this(capacity, null)
+ {
+ }
+
+ public SegmentedDictionary(IEqualityComparer comparer)
+ : this(0, comparer)
+ {
+ }
+
+ public SegmentedDictionary(int capacity, IEqualityComparer comparer)
+ {
+ if (capacity < 0)
+ {
+ throw new ArgumentException(nameof(capacity));
+ }
+
+ if (capacity > 0)
+ {
+ Initialize(capacity);
+ }
+
+ if (comparer != null && comparer != EqualityComparer.Default) // first check for null to avoid forcing default comparer instantiation unnecessarily
+ {
+ _comparer = comparer;
+ }
+ else
+ {
+ _comparer = EqualityComparer.Default;
+ }
+ }
+
+ public SegmentedDictionary(IDictionary dictionary)
+ : this(dictionary, null)
+ {
+ }
+
+ public SegmentedDictionary(IDictionary dictionary, IEqualityComparer comparer)
+ : this(dictionary != null ? dictionary.Count : 0, comparer)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+
+ // It is likely that the passed-in dictionary is SegmentedDictionary. When this is the case,
+ // avoid the enumerator allocation and overhead by looping through the entries array directly.
+ // We only do this when dictionary is SegmentedDictionary and not a subclass, to maintain
+ // back-compat with subclasses that may have overridden the enumerator behavior.
+ if (dictionary.GetType() == typeof(SegmentedDictionary))
+ {
+ var d = (SegmentedDictionary)dictionary;
+ var count = d._count;
+ var entries = d._entries;
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ {
+ Add(entries[i]._key, entries[i]._value);
+ }
+ }
+ return;
+ }
+
+ foreach (var pair in dictionary)
+ {
+ Add(pair.Key, pair.Value);
+ }
+ }
+
+ #endregion
+
+ #region IDictionary Implementation
+
+ public TValue this[TKey key]
+ {
+ get
+ {
+ if (FindEntry(key, out Entry entry))
+ {
+ return entry._value;
+ }
+
+ ThrowHelper.ThrowKeyNotFoundException(key);
+ return default;
+ }
+ set
+ {
+ var modified = TryInsert(key, value, InsertionBehavior.OverwriteExisting);
+ Debug.Assert(modified);
+ }
+ }
+
+ ICollection IDictionary.Keys => Keys;
+
+ ICollection IDictionary.Values => Values;
+
+ public void Add(TKey key, TValue value)
+ {
+ var modified = TryInsert(key, value, InsertionBehavior.ThrowOnExisting);
+ Debug.Assert(modified); // If there was an existing key and the Add failed, an exception will already have been thrown.
+ }
+
+ public bool ContainsKey(TKey key)
+ {
+ return FindEntry(key, out Entry entry);
+ }
+
+ public bool Remove(TKey key)
+ {
+ return Remove(key, out TValue _);
+ }
+
+ public bool TryGetValue(TKey key, out TValue value)
+ {
+ bool entryFound = FindEntry(key, out Entry entry);
+ if (entryFound)
+ {
+ value = entry._value;
+ return true;
+ }
+
+ value = default;
+ return false;
+ }
+
+ #endregion
+
+ #region ICollection> Implementation
+
+ public int Count => _count - _freeCount;
+
+ public bool IsReadOnly => false;
+
+ public void Add(KeyValuePair item) =>
+ Add(item.Key, item.Value);
+
+ public void Clear()
+ {
+ var count = _count;
+ if (count > 0)
+ {
+ Debug.Assert(_buckets.Capacity > 0, "_buckets should be non-empty");
+ Debug.Assert(_entries.Capacity > 0, "_entries should be non-empty");
+
+ _buckets.Clear();
+
+ _count = 0;
+ _freeList = -1;
+ _freeCount = 0;
+ _entries.Clear();
+ }
+ }
+
+ public bool Contains(KeyValuePair item)
+ {
+ bool valueFound = FindEntry(item.Key, out Entry entry);
+ if (valueFound && EqualityComparer.Default.Equals(entry._value, item.Value))
+ {
+ return true;
+ }
+
+ return false;
+ }
+
+ public void CopyTo(KeyValuePair[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if ((uint)index > (uint)array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ var count = _count;
+ var entries = _entries;
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ {
+ array[index++] = new KeyValuePair(entries[i]._key, entries[i]._value);
+ }
+ }
+ }
+
+ public bool Remove(KeyValuePair item)
+ {
+ if (FindEntry(item.Key, out Entry entry) && EqualityComparer.Default.Equals(item.Value, entry._value))
+ {
+ return Remove(item.Key, out TValue _);
+ }
+
+ return false;
+ }
+
+ #endregion
+
+ #region IEnumerable> Implementation
+
+ public IEnumerator> GetEnumerator() =>
+ new Enumerator(this, Enumerator.KeyValuePair);
+
+ #endregion
+
+ #region IEnumerable Implementation
+
+ IEnumerator IEnumerable.GetEnumerator() =>
+ new Enumerator(this, Enumerator.KeyValuePair);
+
+ #endregion
+
+ #region IDictionary Implementation
+
+ public object this[object key]
+ {
+ get
+ {
+ if (IsCompatibleKey(key))
+ {
+ if (FindEntry((TKey)key, out Entry entry))
+ {
+ return entry._value;
+ }
+ }
+
+ return null;
+ }
+ set
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ ThrowHelper.IfNullAndNullsAreIllegalThenThrow(value, nameof(value));
+
+ try
+ {
+ var tempKey = (TKey)key;
+ try
+ {
+ this[tempKey] = (TValue)value;
+ }
+ catch (InvalidCastException)
+ {
+ ThrowHelper.ThrowWrongTypeArgumentException(value, typeof(TValue));
+ }
+ }
+ catch (InvalidCastException)
+ {
+ ThrowHelper.ThrowWrongTypeArgumentException(key, typeof(TKey));
+ }
+ }
+ }
+
+ ICollection IDictionary.Keys => Keys;
+
+ ICollection IDictionary.Values => Values;
+
+ public bool IsFixedSize => false;
+
+ public void Add(object key, object value)
+ {
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ ThrowHelper.IfNullAndNullsAreIllegalThenThrow(value, nameof(value));
+
+ try
+ {
+ var tempKey = (TKey)key;
+
+ try
+ {
+ Add(tempKey, (TValue)value);
+ }
+ catch (InvalidCastException)
+ {
+ ThrowHelper.ThrowWrongTypeArgumentException(value, typeof(TValue));
+ }
+ }
+ catch (InvalidCastException)
+ {
+ ThrowHelper.ThrowWrongTypeArgumentException(key, typeof(TKey));
+ }
+ }
+
+ public bool Contains(object key)
+ {
+ if (IsCompatibleKey(key))
+ {
+ return ContainsKey((TKey)key);
+ }
+
+ return false;
+ }
+
+ IDictionaryEnumerator IDictionary.GetEnumerator() =>
+ new Enumerator(this, Enumerator.DictEntry);
+
+ public void Remove(object key)
+ {
+ if (IsCompatibleKey(key))
+ {
+ Remove((TKey)key);
+ }
+ }
+
+ #endregion
+
+ #region ICollection Implementation
+
+ public object SyncRoot => this;
+
+ public bool IsSynchronized => false;
+
+ public void CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_RankMultiDimNotSupported);
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_NonZeroLowerBound);
+ }
+
+ if ((uint)index > (uint)array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ if (array is KeyValuePair[] pairs)
+ {
+ CopyTo(pairs, index);
+ }
+ else if (array is DictionaryEntry[] dictEntryArray)
+ {
+ var entries = _entries;
+ for (var i = 0; i < _count; i++)
+ {
+ if (entries[i]._next >= -1)
+ {
+ dictEntryArray[index++] = new DictionaryEntry(entries[i]._key, entries[i]._value);
+ }
+ }
+ }
+ else
+ {
+ var objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+
+ try
+ {
+ var count = _count;
+ var entries = _entries;
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ {
+ objects[index++] = new KeyValuePair(entries[i]._key, entries[i]._value);
+ }
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+ }
+ }
+
+ #endregion
+
+ #region Public Properties
+
+ public IEqualityComparer Comparer
+ {
+ get
+ {
+ return _comparer ?? EqualityComparer.Default;
+ }
+ }
+
+ public KeyCollection Keys
+ {
+ get
+ {
+ if (_keys == null)
+ {
+ _keys = new KeyCollection(this);
+ }
+
+ return _keys;
+ }
+ }
+
+ public ValueCollection Values
+ {
+ get
+ {
+ if (_values == null)
+ {
+ _values = new ValueCollection(this);
+ }
+
+ return _values;
+ }
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ public bool TryAdd(TKey key, TValue value) =>
+ TryInsert(key, value, InsertionBehavior.None);
+
+ public bool Remove(TKey key, out TValue value)
+ {
+ // If perfomarnce becomes an issue, you can copy this implementation over to the other Remove method overloads.
+
+ if (key == null)
+ {
+ throw new ArgumentNullException(nameof(key));
+ }
+
+ if (_buckets.Capacity > 0)
+ {
+ Debug.Assert(_entries.Capacity > 0, "entries should be non-empty");
+ uint collisionCount = 0;
+ var hashCode = (uint)(_comparer?.GetHashCode(key) ?? key.GetHashCode());
+ ref var bucket = ref GetBucket(hashCode);
+ var entries = _entries;
+ var last = -1;
+ var i = bucket - 1; // Value in buckets is 1-based
+ while (i >= 0)
+ {
+ ref var entry = ref entries.GetElementByReference(i);
+
+ if (entry._hashCode == hashCode && (_comparer?.Equals(entry._key, key) ?? EqualityComparer.Default.Equals(entry._key, key)))
+ {
+ if (last < 0)
+ {
+ bucket = entry._next + 1; // Value in buckets is 1-based
+ }
+ else
+ {
+ entries.GetElementByReference(last)._next = entry._next;
+ }
+
+ value = entry._value;
+
+ Debug.Assert((StartOfFreeList - _freeList) < 0, "shouldn't underflow because max hashtable length is MaxPrimeArrayLength = 0x7FEFFFFD(2146435069) _freelist underflow threshold 2147483646");
+ entry._next = StartOfFreeList - _freeList;
+
+ entry._key = default;
+ entry._value = default;
+
+ _freeList = i;
+ _freeCount++;
+ return true;
+ }
+
+ last = i;
+ i = entry._next;
+
+ collisionCount++;
+ if (collisionCount > (uint)entries.Capacity)
+ {
+ // The chain of entries forms a loop; which means a concurrent update has happened.
+ // Break out of the loop and throw, rather than looping forever.
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_ConcurrentOperationsNotSupported);
+ }
+ }
+ }
+
+ value = default;
+ return false;
+ }
+
+ public int EnsureCapacity(int capacity)
+ {
+ if (capacity < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(capacity));
+ }
+
+ var currentCapacity = _entries.Capacity;
+ if (currentCapacity >= capacity)
+ {
+ return currentCapacity;
+ }
+
+ _version++;
+
+ if (_buckets.Capacity == 0)
+ {
+ return Initialize(capacity);
+ }
+
+ var newSize = HashHelpers.GetPrime(capacity);
+ Resize(newSize);
+ return newSize;
+ }
+
+ public bool ContainsValue(TValue value)
+ {
+ var entries = _entries;
+ if (value == null)
+ {
+ for (var i = 0; i < _count; i++)
+ {
+ if (entries[i]._next >= -1 && entries[i]._value == null)
+ {
+ return true;
+ }
+ }
+ }
+ else
+ {
+ // Object type: Shared Generic, EqualityComparer.Default won't devirtualize
+ // https://github.com/dotnet/runtime/issues/10050
+ // So cache in a local rather than get EqualityComparer per loop iteration
+ var defaultComparer = EqualityComparer.Default;
+ for (var i = 0; i < _count; i++)
+ {
+ if (entries[i]._next >= -1 && defaultComparer.Equals(entries[i]._value, value))
+ {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ #endregion
+
+ public struct Enumerator : IEnumerator>, IDictionaryEnumerator
+ {
+ private readonly SegmentedDictionary _dictionary;
+ private readonly int _version;
+ private int _index;
+ private KeyValuePair _current;
+ private readonly int _getEnumeratorRetType; // What should Enumerator.Current return?
+
+ internal const int DictEntry = 1;
+ internal const int KeyValuePair = 2;
+
+ internal Enumerator(SegmentedDictionary dictionary, int getEnumeratorRetType)
+ {
+ _dictionary = dictionary;
+ _version = dictionary._version;
+ _index = 0;
+ _getEnumeratorRetType = getEnumeratorRetType;
+ _current = default;
+ }
+
+ public bool MoveNext()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ // Use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends.
+ // dictionary.count+1 could be negative if dictionary.count is int.MaxValue
+ while ((uint)_index < (uint)_dictionary._count)
+ {
+ ref var entry = ref _dictionary._entries.GetElementByReference(_index++);
+
+ if (entry._next >= -1)
+ {
+ _current = new KeyValuePair(entry._key, entry._value);
+ return true;
+ }
+ }
+
+ _index = _dictionary._count + 1;
+ _current = default;
+ return false;
+ }
+
+ public KeyValuePair Current => _current;
+
+ public void Dispose()
+ {
+ }
+
+ object IEnumerator.Current
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ if (_getEnumeratorRetType == DictEntry)
+ {
+ return new DictionaryEntry(_current.Key, _current.Value);
+ }
+
+ return new KeyValuePair(_current.Key, _current.Value);
+ }
+ }
+
+ void IEnumerator.Reset()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ _index = 0;
+ _current = default;
+ }
+
+ DictionaryEntry IDictionaryEnumerator.Entry
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return new DictionaryEntry(_current.Key, _current.Value);
+ }
+ }
+
+ object IDictionaryEnumerator.Key
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return _current.Key;
+ }
+ }
+
+ object IDictionaryEnumerator.Value
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return _current.Value;
+ }
+ }
+ }
+
+ public sealed class KeyCollection : ICollection, ICollection, IReadOnlyCollection
+ {
+ private readonly SegmentedDictionary _dictionary;
+
+ public KeyCollection(SegmentedDictionary dictionary)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+
+ _dictionary = dictionary;
+ }
+
+ public Enumerator GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ public void CopyTo(TKey[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (index < 0 || index > array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < _dictionary.Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ var count = _dictionary._count;
+ var entries = _dictionary._entries;
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ array[index++] = entries[i]._key;
+ }
+ }
+
+ public int Count => _dictionary.Count;
+
+ bool ICollection.IsReadOnly => true;
+
+ void ICollection.Add(TKey item)
+ => throw new NotSupportedException();
+
+ void ICollection.Clear()
+ => throw new NotSupportedException();
+
+ public bool Contains(TKey item)
+ => _dictionary.ContainsKey(item);
+
+ bool ICollection.Remove(TKey item)
+ {
+ throw new NotSupportedException();
+ }
+
+ IEnumerator IEnumerable.GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ IEnumerator IEnumerable.GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ void ICollection.CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_RankMultiDimNotSupported);
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_NonZeroLowerBound);
+ }
+
+ if ((uint)index > (uint)array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < _dictionary.Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ if (array is TKey[] keys)
+ {
+ CopyTo(keys, index);
+ }
+ else
+ {
+ var objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+
+ var count = _dictionary._count;
+ var entries = _dictionary._entries;
+ try
+ {
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ objects[index++] = entries[i]._key;
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+ }
+ }
+
+ bool ICollection.IsSynchronized => false;
+
+ object ICollection.SyncRoot => ((ICollection)_dictionary).SyncRoot;
+
+ public struct Enumerator : IEnumerator, IEnumerator
+ {
+ private readonly SegmentedDictionary _dictionary;
+ private int _index;
+ private readonly int _version;
+ private TKey _currentKey;
+
+ internal Enumerator(SegmentedDictionary dictionary)
+ {
+ _dictionary = dictionary;
+ _version = dictionary._version;
+ _index = 0;
+ _currentKey = default;
+ }
+
+ public void Dispose()
+ {
+ }
+
+ public bool MoveNext()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ while ((uint)_index < (uint)_dictionary._count)
+ {
+ ref var entry = ref _dictionary._entries.GetElementByReference(_index++);
+
+ if (entry._next >= -1)
+ {
+ _currentKey = entry._key;
+ return true;
+ }
+ }
+
+ _index = _dictionary._count + 1;
+ _currentKey = default;
+ return false;
+ }
+
+ public TKey Current => _currentKey;
+
+ object IEnumerator.Current
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return _currentKey;
+ }
+ }
+
+ void IEnumerator.Reset()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ _index = 0;
+ _currentKey = default;
+ }
+ }
+ }
+
+ public sealed class ValueCollection : ICollection, ICollection, IReadOnlyCollection
+ {
+ private readonly SegmentedDictionary _dictionary;
+
+ public ValueCollection(SegmentedDictionary dictionary)
+ {
+ if (dictionary == null)
+ {
+ throw new ArgumentNullException(nameof(dictionary));
+ }
+
+ _dictionary = dictionary;
+ }
+
+ public Enumerator GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ public void CopyTo(TValue[] array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if ((uint)index > array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < _dictionary.Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ var count = _dictionary._count;
+ var entries = _dictionary._entries;
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ array[index++] = entries[i]._value;
+ }
+ }
+
+ public int Count => _dictionary.Count;
+
+ bool ICollection.IsReadOnly => true;
+
+ void ICollection.Add(TValue item)
+ => throw new NotSupportedException();
+
+ bool ICollection.Remove(TValue item)
+ => throw new NotSupportedException();
+
+ void ICollection.Clear()
+ => throw new NotSupportedException();
+
+ public bool Contains(TValue item)
+ => _dictionary.ContainsValue(item);
+
+ IEnumerator IEnumerable.GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ IEnumerator IEnumerable.GetEnumerator()
+ => new Enumerator(_dictionary);
+
+ void ICollection.CopyTo(Array array, int index)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+
+ if (array.Rank != 1)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_RankMultiDimNotSupported);
+ }
+
+ if (array.GetLowerBound(0) != 0)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_NonZeroLowerBound);
+ }
+
+ if ((uint)index > (uint)array.Length)
+ {
+ ThrowHelper.ThrowIndexArgumentOutOfRange_NeedNonNegNumException();
+ }
+
+ if (array.Length - index < _dictionary.Count)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Arg_ArrayPlusOffTooSmall);
+ }
+
+ if (array is TValue[] values)
+ {
+ CopyTo(values, index);
+ }
+ else
+ {
+ var objects = array as object[];
+ if (objects == null)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+
+ var count = _dictionary._count;
+ var entries = _dictionary._entries;
+ try
+ {
+ for (var i = 0; i < count; i++)
+ {
+ if (entries[i]._next >= -1)
+ objects[index++] = entries[i]._value;
+ }
+ }
+ catch (ArrayTypeMismatchException)
+ {
+ throw new ArgumentException(ThrowHelper.CommonStrings.Argument_InvalidArrayType);
+ }
+ }
+ }
+
+ bool ICollection.IsSynchronized => false;
+
+ object ICollection.SyncRoot => ((ICollection)_dictionary).SyncRoot;
+
+ public struct Enumerator : IEnumerator, IEnumerator
+ {
+ private readonly SegmentedDictionary _dictionary;
+ private int _index;
+ private readonly int _version;
+ private TValue _currentValue;
+
+ internal Enumerator(SegmentedDictionary dictionary)
+ {
+ _dictionary = dictionary;
+ _version = dictionary._version;
+ _index = 0;
+ _currentValue = default;
+ }
+
+ public void Dispose()
+ {
+ }
+
+ public bool MoveNext()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ while ((uint)_index < (uint)_dictionary._count)
+ {
+ ref var entry = ref _dictionary._entries.GetElementByReference(_index++);
+
+ if (entry._next >= -1)
+ {
+ _currentValue = entry._value;
+ return true;
+ }
+ }
+ _index = _dictionary._count + 1;
+ _currentValue = default;
+ return false;
+ }
+
+ public TValue Current => _currentValue;
+
+ object IEnumerator.Current
+ {
+ get
+ {
+ if (_index == 0 || (_index == _dictionary._count + 1))
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumOpCantHappen);
+ }
+
+ return _currentValue;
+ }
+ }
+
+ void IEnumerator.Reset()
+ {
+ if (_version != _dictionary._version)
+ {
+ throw new InvalidOperationException(ThrowHelper.CommonStrings.InvalidOperation_EnumFailedVersion);
+ }
+
+ _index = 0;
+ _currentValue = default;
+ }
+ }
+ }
+ }
+}
diff --git a/src/FastSerialization/SegmentedDictionary/ThrowHelper.cs b/src/FastSerialization/SegmentedDictionary/ThrowHelper.cs
new file mode 100644
index 000000000..72594c0c4
--- /dev/null
+++ b/src/FastSerialization/SegmentedDictionary/ThrowHelper.cs
@@ -0,0 +1,63 @@
+// Tests copied from dotnet/roslyn repo. Original source code can be found here:
+// https://github.com/dotnet/roslyn/blob/main/src/Dependencies/Collections/Internal/ThrowHelper.cs
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.CompilerServices;
+
+namespace Microsoft.Diagnostics.FastSerialization
+{
+ ///
+ /// Utility class for exception throwing for the SegmentedDictionary.
+ ///
+ internal static class ThrowHelper
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal static void IfNullAndNullsAreIllegalThenThrow(object value, string argName)
+ {
+ // Note that default(T) is not equal to null for value types except when T is Nullable.
+ if (!(default(T) == null) && value == null)
+ throw new ArgumentNullException(argName);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal static void ThrowKeyNotFoundException(T key)
+ {
+ throw new KeyNotFoundException($"The given key '{key}' was not present in the dictionary.");
+ }
+
+ internal static void ThrowIndexArgumentOutOfRange_NeedNonNegNumException()
+ {
+ throw GetArgumentOutOfRangeException("index",
+ CommonStrings.ArgumentOutOfRange_NeedNonNegNum);
+ }
+
+ internal static void ThrowWrongTypeArgumentException(T value, Type targetType)
+ {
+ throw GetWrongTypeArgumentException(value, targetType);
+ }
+
+ private static ArgumentOutOfRangeException GetArgumentOutOfRangeException(string argument, string message)
+ {
+ return new ArgumentOutOfRangeException(argument, message);
+ }
+
+ private static ArgumentException GetWrongTypeArgumentException(object value, Type targetType)
+ {
+ return new ArgumentException($"The value '{value}' is not of type '{targetType}' and cannot be used in this generic collection.",
+ nameof(value));
+ }
+
+ internal static class CommonStrings
+ {
+ public static readonly string Arg_ArrayPlusOffTooSmall = "Destination array is not long enough to copy all the items in the collection. Check array index and length.";
+ public static readonly string ArgumentOutOfRange_NeedNonNegNum = "Non-negative number required.";
+ public static readonly string Arg_RankMultiDimNotSupported = "Only single dimensional arrays are supported for the requested action.";
+ public static readonly string Arg_NonZeroLowerBound = "The lower bound of target array must be zero.";
+ public static readonly string Argument_InvalidArrayType = "Target array type is not compatible with the type of items in the collection.";
+ public static readonly string InvalidOperation_ConcurrentOperationsNotSupported = "Operations that change non-concurrent collections must have exclusive access. A concurrent update was performed on this collection and corrupted its state. The collection's state is no longer correct.";
+ public static readonly string InvalidOperation_EnumFailedVersion = "Collection was modified; enumeration operation may not execute.";
+ public static readonly string InvalidOperation_EnumOpCantHappen = "Enumeration has either not started or has already finished.";
+ }
+ }
+}
diff --git a/src/FastSerialization/SegmentedList.cs b/src/FastSerialization/SegmentedList.cs
index f78b80c51..cb422e0fb 100644
--- a/src/FastSerialization/SegmentedList.cs
+++ b/src/FastSerialization/SegmentedList.cs
@@ -93,6 +93,8 @@ public int Count
}
}
+ public int Capacity => this.capacity;
+
///
/// Copy to Array
///
@@ -149,6 +151,9 @@ public T this[int index]
}
}
+ public ref T GetElementByReference(int index) =>
+ ref this.items[index >> this.segmentShift][index & this.offsetMask];
+
///
/// Necessary if the list is being used as an array since it creates the segments lazily.
///
diff --git a/src/MemoryGraph/MemoryGraph.cs b/src/MemoryGraph/MemoryGraph.cs
index 7e5ee2e95..447ec1ec7 100644
--- a/src/MemoryGraph/MemoryGraph.cs
+++ b/src/MemoryGraph/MemoryGraph.cs
@@ -10,7 +10,17 @@ public class MemoryGraph : Graph, IFastSerializable
public MemoryGraph(int expectedSize)
: base(expectedSize)
{
- m_addressToNodeIndex = new Dictionary(expectedSize);
+ // If we have too many addresses we will reach the Dictionary's internal array's size limit and throw.
+ // Therefore use a new implementation of it that is similar in performance but that can handle the extra load.
+ if (expectedSize > 200_000)
+ {
+ m_addressToNodeIndex = new SegmentedDictionary(expectedSize);
+ }
+ else
+ {
+ m_addressToNodeIndex = new Dictionary(expectedSize);
+ }
+
m_nodeAddresses = new SegmentedList(SegmentSize, expectedSize);
}
@@ -111,7 +121,7 @@ public bool IsInGraph(Address objectAddress)
/// THis table maps the ID that CLRProfiler uses (an address), to the NodeIndex we have assigned to it.
/// It is only needed while the file is being read in.
///
- protected Dictionary m_addressToNodeIndex; // This field is only used during construction
+ protected IDictionary m_addressToNodeIndex; // This field is only used during construction
#endregion
#region private
diff --git a/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/AssertExtensions.cs b/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/AssertExtensions.cs
new file mode 100644
index 000000000..59bbc2404
--- /dev/null
+++ b/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/AssertExtensions.cs
@@ -0,0 +1,21 @@
+// Tests copied from dotnet/runtime repo. Original source code can be found here:
+// https://github.com/dotnet/runtime/blob/main/src/libraries/Common/tests/TestUtilities/System/AssertExtensions.cs
+
+using System;
+using Xunit;
+
+namespace PerfView.Collections.Tests
+{
+ internal static class AssertExtensions
+ {
+ public static T Throws(string expectedParamName, Action action)
+ where T : ArgumentException
+ {
+ T exception = Assert.Throws(action);
+
+ Assert.Equal(expectedParamName, exception.ParamName);
+
+ return exception;
+ }
+ }
+}
diff --git a/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/CollectionAsserts.cs b/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/CollectionAsserts.cs
new file mode 100644
index 000000000..1ad7a1555
--- /dev/null
+++ b/src/TraceEvent/TraceEvent.Tests/SegmentedDictionary/CollectionAsserts.cs
@@ -0,0 +1,31 @@
+// Tests copied from dotnet/runtime repo. Original source code can be found here:
+// https://github.com/dotnet/runtime/blob/main/src/libraries/Common/tests/System/Collections/CollectionAsserts.cs
+
+using System.Collections.Generic;
+using System.Linq;
+using Xunit;
+
+namespace PerfView.Collections.Tests
+{
+ internal static class CollectionAsserts
+ {
+ public static void EqualUnordered(ICollection expected, ICollection actual)
+ {
+ Assert.Equal(expected == null, actual == null);
+ if (expected == null)
+ {
+ return;
+ }
+
+ // Lookups are an aggregated collections (enumerable contents), but ordered.
+ ILookup