diff --git a/eng/MSBuild/LegacySupport.props b/eng/MSBuild/LegacySupport.props
index 842951ab867..2983903a196 100644
--- a/eng/MSBuild/LegacySupport.props
+++ b/eng/MSBuild/LegacySupport.props
@@ -23,7 +23,7 @@
-
+
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs
index 0d7d54cfdd6..d17eacb3484 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs
@@ -15,11 +15,13 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
internal readonly struct BufferChunk
{
private const int FlagReturnToPool = (1 << 31);
-
private readonly int _lengthAndPoolFlag;
- public byte[]? Array { get; } // null for default
+ public byte[]? OversizedArray { get; } // null for default
+
+ public bool HasValue => OversizedArray is not null;
+ public int Offset { get; }
public int Length => _lengthAndPoolFlag & ~FlagReturnToPool;
public bool ReturnToPool => (_lengthAndPoolFlag & FlagReturnToPool) != 0;
@@ -27,8 +29,9 @@ internal readonly struct BufferChunk
public BufferChunk(byte[] array)
{
Debug.Assert(array is not null, "expected valid array input");
- Array = array;
+ OversizedArray = array;
_lengthAndPoolFlag = array!.Length;
+ Offset = 0;
// assume not pooled, if exact-sized
// (we don't expect array.Length to be negative; we're really just saying
@@ -39,11 +42,12 @@ public BufferChunk(byte[] array)
Debug.Assert(Length == array.Length, "array length not respected");
}
- public BufferChunk(byte[] array, int length, bool returnToPool)
+ public BufferChunk(byte[] array, int offset, int length, bool returnToPool)
{
Debug.Assert(array is not null, "expected valid array input");
Debug.Assert(length >= 0, "expected valid length");
- Array = array;
+ OversizedArray = array;
+ Offset = offset;
_lengthAndPoolFlag = length | (returnToPool ? FlagReturnToPool : 0);
Debug.Assert(ReturnToPool == returnToPool, "return-to-pool not respected");
Debug.Assert(Length == length, "length not respected");
@@ -58,7 +62,7 @@ public byte[] ToArray()
}
var copy = new byte[length];
- Buffer.BlockCopy(Array!, 0, copy, 0, length);
+ Buffer.BlockCopy(OversizedArray!, Offset, copy, 0, length);
return copy;
// Note on nullability of Array; the usage here is that a non-null array
@@ -73,15 +77,19 @@ internal void RecycleIfAppropriate()
{
if (ReturnToPool)
{
- ArrayPool.Shared.Return(Array!);
+ ArrayPool.Shared.Return(OversizedArray!);
}
Unsafe.AsRef(in this) = default; // anti foot-shotgun double-return guard; not 100%, but worth doing
- Debug.Assert(Array is null && !ReturnToPool, "expected clean slate after recycle");
+ Debug.Assert(OversizedArray is null && !ReturnToPool, "expected clean slate after recycle");
}
+ internal ArraySegment AsArraySegment() => Length == 0 ? default! : new(OversizedArray!, Offset, Length);
+
+ internal ReadOnlySpan AsSpan() => Length == 0 ? default : new(OversizedArray!, Offset, Length);
+
// get the data as a ROS; for note on null-logic of Array!, see comment in ToArray
- internal ReadOnlySequence AsSequence() => Length == 0 ? default : new ReadOnlySequence(Array!, 0, Length);
+ internal ReadOnlySequence AsSequence() => Length == 0 ? default : new ReadOnlySequence(OversizedArray!, Offset, Length);
internal BufferChunk DoNotReturnToPool()
{
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs
index 05edc65dc06..59d2f59df41 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs
@@ -3,6 +3,7 @@
using System;
using System.Diagnostics;
+using System.Runtime.CompilerServices;
using System.Threading;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
@@ -13,10 +14,22 @@ internal partial class DefaultHybridCache
{
internal abstract class CacheItem
{
+ private readonly long _creationTimestamp;
+
+ protected CacheItem(long creationTimestamp, TagSet tags)
+ {
+ Tags = tags;
+ _creationTimestamp = creationTimestamp;
+ }
+
private int _refCount = 1; // the number of pending operations against this cache item
public abstract bool DebugIsImmutable { get; }
+ public long CreationTimestamp => _creationTimestamp;
+
+ public TagSet Tags { get; }
+
// Note: the ref count is the number of callers anticipating this value at any given time. Initially,
// it is one for a simple "get the value" flow, but if another call joins with us, it'll be incremented.
// If either cancels, it will get decremented, with the entire flow being cancelled if it ever becomes
@@ -27,6 +40,9 @@ internal abstract class CacheItem
internal int RefCount => Volatile.Read(ref _refCount);
+ internal void UnsafeSetCreationTimestamp(long timestamp)
+ => Unsafe.AsRef(in _creationTimestamp) = timestamp;
+
internal static readonly PostEvictionDelegate SharedOnEviction = static (key, value, reason, state) =>
{
if (value is CacheItem item)
@@ -88,6 +104,11 @@ protected virtual void OnFinalRelease() // any required release semantics
internal abstract class CacheItem : CacheItem
{
+ protected CacheItem(long creationTimestamp, TagSet tags)
+ : base(creationTimestamp, tags)
+ {
+ }
+
public abstract bool TryGetSize(out long size);
// Attempt to get a value that was *not* previously reserved.
@@ -112,6 +133,7 @@ public T GetReservedValue(ILogger log)
static void Throw() => throw new ObjectDisposedException("The cache item has been recycled before the value was obtained");
}
- internal static CacheItem Create() => ImmutableTypeCache.IsImmutable ? new ImmutableCacheItem() : new MutableCacheItem();
+ internal static CacheItem Create(long creationTimestamp, TagSet tags) => ImmutableTypeCache.IsImmutable
+ ? new ImmutableCacheItem(creationTimestamp, tags) : new MutableCacheItem(creationTimestamp, tags);
}
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs
index a9901103555..cf40c8e9ed1 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs
@@ -47,14 +47,13 @@ internal void DebugOnlyIncrementOutstandingBuffers()
}
#endif
- private partial class MutableCacheItem
+ internal partial class MutableCacheItem
{
#if DEBUG
private DefaultHybridCache? _cache; // for buffer-tracking - only needed in DEBUG
#endif
[Conditional("DEBUG")]
- [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "Instance state used in debug")]
internal void DebugOnlyTrackBuffer(DefaultHybridCache cache)
{
#if DEBUG
@@ -63,11 +62,12 @@ internal void DebugOnlyTrackBuffer(DefaultHybridCache cache)
{
_cache?.DebugOnlyIncrementOutstandingBuffers();
}
+#else
+ _ = this; // dummy just to prevent CA1822, never hit
#endif
}
[Conditional("DEBUG")]
- [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "Instance state used in debug")]
private void DebugOnlyDecrementOutstandingBuffers()
{
#if DEBUG
@@ -75,6 +75,8 @@ private void DebugOnlyDecrementOutstandingBuffers()
{
_cache?.DebugOnlyDecrementOutstandingBuffers();
}
+#else
+ _ = this; // dummy just to prevent CA1822, never hit
#endif
}
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs
index 2e803d87ad6..1898cf2bb9c 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs
@@ -8,10 +8,15 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
internal partial class DefaultHybridCache
{
- private sealed class ImmutableCacheItem : CacheItem // used to hold types that do not require defensive copies
+ internal sealed class ImmutableCacheItem : CacheItem // used to hold types that do not require defensive copies
{
private static ImmutableCacheItem? _sharedDefault;
+ public ImmutableCacheItem(long creationTimestamp, TagSet tags)
+ : base(creationTimestamp, tags)
+ {
+ }
+
private T _value = default!; // deferred until SetValue
public long Size { get; private set; } = -1;
@@ -25,7 +30,7 @@ public static ImmutableCacheItem GetReservedShared()
ImmutableCacheItem? obj = Volatile.Read(ref _sharedDefault);
if (obj is null || !obj.TryReserve())
{
- obj = new();
+ obj = new(0, TagSet.Empty); // timestamp doesn't matter - not used in L1/L2
_ = obj.TryReserve(); // this is reliable on a new instance
Volatile.Write(ref _sharedDefault, obj);
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs
index 230a657bdc3..33dfc9bb75d 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs
@@ -2,6 +2,8 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System;
+using System.Buffers;
+using System.Buffers.Binary;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
@@ -14,11 +16,15 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
internal partial class DefaultHybridCache
{
+ private const int MaxCacheDays = 1000;
+ private const string TagKeyPrefix = "__MSFT_HCT__";
+ private static readonly DistributedCacheEntryOptions _tagInvalidationEntryOptions = new() { AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(MaxCacheDays) };
+
[SuppressMessage("Performance", "CA1849:Call async methods when in an async method", Justification = "Manual sync check")]
[SuppressMessage("Usage", "VSTHRD003:Avoid awaiting foreign Tasks", Justification = "Manual sync check")]
[SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "Explicit async exception handling")]
[SuppressMessage("Reliability", "CA2000:Dispose objects before losing scope", Justification = "Deliberate recycle only on success")]
- internal ValueTask GetFromL2Async(string key, CancellationToken token)
+ internal ValueTask GetFromL2DirectAsync(string key, CancellationToken token)
{
switch (GetFeatures(CacheFeatures.BackendCache | CacheFeatures.BackendBuffers))
{
@@ -48,7 +54,7 @@ internal ValueTask GetFromL2Async(string key, CancellationToken tok
}
BufferChunk result = pendingBuffers.GetAwaiter().GetResult()
- ? new(writer.DetachCommitted(out var length), length, returnToPool: true)
+ ? new(writer.DetachCommitted(out var length), 0, length, returnToPool: true)
: default;
writer.Dispose(); // it is not accidental that this isn't "using"; avoid recycling if not 100% sure what happened
return new(result);
@@ -65,35 +71,114 @@ static async Task AwaitedLegacyAsync(Task pending, Default
static async Task AwaitedBuffersAsync(ValueTask pending, RecyclableArrayBufferWriter writer)
{
BufferChunk result = await pending.ConfigureAwait(false)
- ? new(writer.DetachCommitted(out var length), length, returnToPool: true)
+ ? new(writer.DetachCommitted(out var length), 0, length, returnToPool: true)
: default;
writer.Dispose(); // it is not accidental that this isn't "using"; avoid recycling if not 100% sure what happened
return result;
}
}
- internal ValueTask SetL2Async(string key, in BufferChunk buffer, HybridCacheEntryOptions? options, CancellationToken token)
+ internal ValueTask SetL2Async(string key, CacheItem cacheItem, in BufferChunk buffer, HybridCacheEntryOptions? options, CancellationToken token)
+ => HasBackendCache ? WritePayloadAsync(key, cacheItem, buffer, options, token) : default;
+
+ internal ValueTask SetDirectL2Async(string key, in BufferChunk buffer, DistributedCacheEntryOptions options, CancellationToken token)
{
- Debug.Assert(buffer.Array is not null, "array should be non-null");
+ Debug.Assert(buffer.OversizedArray is not null, "array should be non-null");
switch (GetFeatures(CacheFeatures.BackendCache | CacheFeatures.BackendBuffers))
{
case CacheFeatures.BackendCache: // legacy byte[]-based
- var arr = buffer.Array!;
- if (arr.Length != buffer.Length)
+ var arr = buffer.OversizedArray!;
+ if (buffer.Offset != 0 || arr.Length != buffer.Length)
{
// we'll need a right-sized snapshot
arr = buffer.ToArray();
}
- return new(_backendCache!.SetAsync(key, arr, GetOptions(options), token));
+ return new(_backendCache!.SetAsync(key, arr, options, token));
case CacheFeatures.BackendCache | CacheFeatures.BackendBuffers: // ReadOnlySequence-based
var cache = Unsafe.As(_backendCache!); // type-checked already
- return cache.SetAsync(key, buffer.AsSequence(), GetOptions(options), token);
+ return cache.SetAsync(key, buffer.AsSequence(), options, token);
}
return default;
}
+ [SuppressMessage("Performance", "CA1849:Call async methods when in an async method", Justification = "Manual async core implementation")]
+ internal ValueTask InvalidateL2TagAsync(string tag, long timestamp, CancellationToken token)
+ {
+ if (!HasBackendCache)
+ {
+ return default; // no L2
+ }
+
+ byte[] oversized = ArrayPool.Shared.Rent(sizeof(long));
+ BinaryPrimitives.WriteInt64LittleEndian(oversized, timestamp);
+ var pending = SetDirectL2Async(TagKeyPrefix + tag, new BufferChunk(oversized, 0, sizeof(long), false), _tagInvalidationEntryOptions, token);
+
+ if (pending.IsCompletedSuccessfully)
+ {
+ pending.GetAwaiter().GetResult(); // ensure observed (IVTS etc)
+ ArrayPool.Shared.Return(oversized);
+ return default;
+ }
+ else
+ {
+ return AwaitedAsync(pending, oversized);
+ }
+
+ static async ValueTask AwaitedAsync(ValueTask pending, byte[] oversized)
+ {
+ await pending.ConfigureAwait(false);
+ ArrayPool.Shared.Return(oversized);
+ }
+ }
+
+ [SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "Cancellation handled internally")]
+ [SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "All failure is critical")]
+ internal async Task SafeReadTagInvalidationAsync(string tag)
+ {
+ Debug.Assert(HasBackendCache, "shouldn't be here without L2");
+
+ const int READ_TIMEOUT = 4000;
+
+ try
+ {
+ using var cts = new CancellationTokenSource(millisecondsDelay: READ_TIMEOUT);
+ var buffer = await GetFromL2DirectAsync(TagKeyPrefix + tag, cts.Token).ConfigureAwait(false);
+
+ long timestamp;
+ if (buffer.OversizedArray is not null)
+ {
+ if (buffer.Length == sizeof(long))
+ {
+ timestamp = BinaryPrimitives.ReadInt64LittleEndian(buffer.AsSpan());
+ }
+ else
+ {
+ // not what we expected! assume invalid
+ timestamp = CurrentTimestamp();
+ }
+
+ buffer.RecycleIfAppropriate();
+ }
+ else
+ {
+ timestamp = 0; // never invalidated
+ }
+
+ buffer.RecycleIfAppropriate();
+ return timestamp;
+ }
+ catch (Exception ex)
+ {
+ // ^^^ this catch is the "Safe" in "SafeReadTagInvalidationAsync"
+ Debug.WriteLine(ex.Message);
+
+ // if anything goes wrong reading tag invalidations; we have to assume the tag is invalid
+ return CurrentTimestamp();
+ }
+ }
+
internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions? options)
{
// incr ref-count for the the cache itself; this *may* be released via the NeedsEvictionCallback path
@@ -105,7 +190,7 @@ internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions?
// that actually commits the add - so: if we fault, we don't want to try
// committing a partially configured cache entry
ICacheEntry cacheEntry = _localCache.CreateEntry(key);
- cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration;
+ cacheEntry.AbsoluteExpirationRelativeToNow = GetL1AbsoluteExpirationRelativeToNow(options);
cacheEntry.Value = value;
if (value.TryGetSize(out var size))
@@ -128,6 +213,20 @@ internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions?
}
}
+ private async ValueTask WritePayloadAsync(string key, CacheItem cacheItem, BufferChunk payload, HybridCacheEntryOptions? options, CancellationToken token)
+ {
+ // bundle a serialized payload inside the wrapper used at the DC layer
+ var maxLength = HybridCachePayload.GetMaxBytes(key, cacheItem.Tags, payload.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLength);
+
+ var length = HybridCachePayload.Write(oversized, key, cacheItem.CreationTimestamp, GetL2AbsoluteExpirationRelativeToNow(options),
+ HybridCachePayload.PayloadFlags.None, cacheItem.Tags, payload.AsSequence());
+
+ await SetDirectL2Async(key, new(oversized, 0, length, true), GetL2DistributedCacheOptions(options), token).ConfigureAwait(false);
+
+ ArrayPool.Shared.Return(oversized);
+ }
+
private BufferChunk GetValidPayloadSegment(byte[]? payload)
{
if (payload is not null)
@@ -154,12 +253,17 @@ private void ThrowPayloadLengthExceeded(int size) // splitting the exception bit
#if NET8_0_OR_GREATER
[SuppressMessage("Maintainability", "CA1508:Avoid dead conditional code", Justification = "False positive from unsafe accessor")]
#endif
- private DistributedCacheEntryOptions GetOptions(HybridCacheEntryOptions? options)
+ private DistributedCacheEntryOptions GetL2DistributedCacheOptions(HybridCacheEntryOptions? options)
{
DistributedCacheEntryOptions? result = null;
- if (options is not null && options.Expiration.HasValue && options.Expiration.GetValueOrDefault() != _defaultExpiration)
+ if (options is not null)
{
- result = ToDistributedCacheEntryOptions(options);
+ var expiration = GetL2AbsoluteExpirationRelativeToNow(options);
+ if (expiration != _defaultExpiration)
+ {
+ // ^^^ avoid creating unnecessary DC options objects if the expiration still matches the default
+ result = ToDistributedCacheEntryOptions(options);
+ }
}
return result ?? _defaultDistributedCacheExpiration;
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs
index db95e8c4590..8bd2310b4be 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs
@@ -8,12 +8,17 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
internal partial class DefaultHybridCache
{
- private sealed partial class MutableCacheItem : CacheItem // used to hold types that require defensive copies
+ internal sealed partial class MutableCacheItem : CacheItem // used to hold types that require defensive copies
{
private IHybridCacheSerializer? _serializer;
private BufferChunk _buffer;
private T? _fallbackValue; // only used in the case of serialization failures
+ public MutableCacheItem(long creationTimestamp, TagSet tags)
+ : base(creationTimestamp, tags)
+ {
+ }
+
public override bool NeedsEvictionCallback => _buffer.ReturnToPool;
public override bool DebugIsImmutable => false;
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs
index d12b2cce592..cb39696d532 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs
@@ -67,7 +67,7 @@ private bool TrySerialize(T value, out BufferChunk buffer, out IHybridCacheSe
serializer.Serialize(value, writer);
- buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); // remove buffer ownership from the writer
+ buffer = new(writer.DetachCommitted(out var length), 0, length, returnToPool: true); // remove buffer ownership from the writer
writer.Dispose(); // we're done with the writer
return true;
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs
index ef5c570c670..660233e41ef 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs
@@ -3,6 +3,7 @@
using System;
using System.Collections.Concurrent;
+using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
@@ -13,7 +14,7 @@ internal partial class DefaultHybridCache
private readonly ConcurrentDictionary _currentOperations = new();
// returns true for a new session (in which case: we need to start the work), false for a pre-existing session
- public bool GetOrCreateStampedeState(string key, HybridCacheEntryFlags flags, out StampedeState stampedeState, bool canBeCanceled)
+ public bool GetOrCreateStampedeState(string key, HybridCacheEntryFlags flags, out StampedeState stampedeState, bool canBeCanceled, IEnumerable? tags)
{
var stampedeKey = new StampedeKey(key, flags);
@@ -27,7 +28,7 @@ public bool GetOrCreateStampedeState(string key, HybridCacheEntryFlag
// Most common scenario here, then, is that we're not fighting with anyone else
// go ahead and create a placeholder state object and *try* to add it.
- stampedeState = new StampedeState(this, stampedeKey, canBeCanceled);
+ stampedeState = new StampedeState(this, stampedeKey, TagSet.Create(tags), canBeCanceled);
if (_currentOperations.TryAdd(stampedeKey, stampedeState))
{
// successfully added; indeed, no-one else was fighting: we're done
@@ -56,8 +57,9 @@ public bool GetOrCreateStampedeState(string key, HybridCacheEntryFlag
// Check whether the value was L1-cached by an outgoing operation (for *us* to check needs local-cache-read,
// and for *them* to have updated needs local-cache-write, but since the shared us/them key includes flags,
// we can skip this if *either* flag is set).
- if ((flags & HybridCacheEntryFlags.DisableLocalCache) == 0 && _localCache.TryGetValue(key, out var untyped)
- && untyped is CacheItem typed && typed.TryReserve())
+ if ((flags & HybridCacheEntryFlags.DisableLocalCache) == 0
+ && TryGetExisting(key, out var typed)
+ && typed.TryReserve())
{
stampedeState.SetResultDirect(typed);
return false; // the work has ALREADY been done
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs
index 77322eecee6..19615674af5 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs
@@ -2,8 +2,10 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System;
+using System.Buffers;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
+using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -28,14 +30,14 @@ internal sealed class StampedeState : StampedeState
internal void SetResultDirect(CacheItem value)
=> _result?.TrySetResult(value);
- public StampedeState(DefaultHybridCache cache, in StampedeKey key, bool canBeCanceled)
- : base(cache, key, CacheItem.Create(), canBeCanceled)
+ public StampedeState(DefaultHybridCache cache, in StampedeKey key, TagSet tags, bool canBeCanceled)
+ : base(cache, key, CacheItem.Create(cache.CurrentTimestamp(), tags), canBeCanceled)
{
_result = new(TaskCreationOptions.RunContinuationsAsynchronously);
}
- public StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationToken token)
- : base(cache, key, CacheItem.Create(), token)
+ public StampedeState(DefaultHybridCache cache, in StampedeKey key, TagSet tags, CancellationToken token)
+ : base(cache, key, CacheItem.Create(cache.CurrentTimestamp(), tags), token)
{
// no TCS in this case - this is for SetValue only
}
@@ -161,14 +163,28 @@ static async Task AwaitedAsync(ILogger log, Task> task)
[SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "In this case the cancellation token is provided internally via SharedToken")]
[SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "Exception is passed through to faulted task result")]
+ [SuppressMessage("Reliability", "EA0002:Use 'System.TimeProvider' to make the code easier to test", Justification = "Does not apply")]
private async Task BackgroundFetchAsync()
{
bool eventSourceEnabled = HybridCacheEventSource.Log.IsEnabled();
try
{
+ var activeFlags = Key.Flags;
+ if ((activeFlags & HybridCacheEntryFlags.DisableDistributedCache) != HybridCacheEntryFlags.DisableDistributedCache)
+ {
+ // in order to use distributed cache, the tags and keys must be valid unicode, to avoid security complications
+ if (!ValidateUnicodeCorrectness(Cache._logger, Key.Key, CacheItem.Tags))
+ {
+ activeFlags |= HybridCacheEntryFlags.DisableDistributedCache;
+ }
+ }
+
// read from L2 if appropriate
- if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheRead) == 0)
+ if ((activeFlags & HybridCacheEntryFlags.DisableDistributedCacheRead) == 0)
{
+ // kick off any necessary tag invalidation fetches
+ Cache.PrefetchTags(CacheItem.Tags);
+
BufferChunk result;
try
{
@@ -177,10 +193,10 @@ private async Task BackgroundFetchAsync()
HybridCacheEventSource.Log.DistributedCacheGet();
}
- result = await Cache.GetFromL2Async(Key.Key, SharedToken).ConfigureAwait(false);
+ result = await Cache.GetFromL2DirectAsync(Key.Key, SharedToken).ConfigureAwait(false);
if (eventSourceEnabled)
{
- if (result.Array is not null)
+ if (result.HasValue)
{
HybridCacheEventSource.Log.DistributedCacheHit();
}
@@ -210,15 +226,40 @@ private async Task BackgroundFetchAsync()
result = default; // treat as "miss"
}
- if (result.Array is not null)
+ if (result.HasValue)
{
- SetResultAndRecycleIfAppropriate(ref result);
- return;
+ // result is the wider payload including HC headers; unwrap it:
+ var parseResult = HybridCachePayload.TryParse(result.AsArraySegment(), Key.Key, CacheItem.Tags, Cache, out var payload,
+ out var flags, out var entropy, out var pendingTags, out var fault);
+ switch (parseResult)
+ {
+ case HybridCachePayload.HybridCachePayloadParseResult.Success:
+ // check any pending expirations, if necessary
+ if (pendingTags.IsEmpty || !await Cache.IsAnyTagExpiredAsync(pendingTags, CacheItem.CreationTimestamp).ConfigureAwait(false))
+ {
+ // move into the payload segment (minus any framing/header/etc data)
+ result = new(payload.Array!, payload.Offset, payload.Count, result.ReturnToPool);
+ SetResultAndRecycleIfAppropriate(ref result);
+ return;
+ }
+
+ break;
+ case HybridCachePayload.HybridCachePayloadParseResult.ExpiredByEntry:
+ case HybridCachePayload.HybridCachePayloadParseResult.ExpiredByWildcard:
+ case HybridCachePayload.HybridCachePayloadParseResult.ExpiredByTag:
+ // we don't need to log anything in the case of expiration
+ break;
+ default:
+ Cache._logger.CacheBackendDataRejected(parseResult, fault);
+ break;
+ }
+
+ result.RecycleIfAppropriate();
}
}
// nothing from L2; invoke the underlying data store
- if ((Key.Flags & HybridCacheEntryFlags.DisableUnderlyingData) == 0)
+ if ((activeFlags & HybridCacheEntryFlags.DisableUnderlyingData) == 0)
{
// invoke the callback supplied by the caller
T newValue;
@@ -253,6 +294,35 @@ private async Task BackgroundFetchAsync()
throw;
}
+ // check whether we're going to hit a timing problem with tag invalidation
+ if (!Cache.IsValid(CacheItem))
+ {
+ // When writing to L1, we need to avoid a problem where either "*" or one of
+ // the active tags matches "now" - we get into a problem whereby it is
+ // ambiguous whether the data is invalidated; consider all of the following happen
+ // *in the same measured instance*:
+ // - write with value A
+ // - invalidate by tag (or wildcard)
+ // - write with value B
+ // Both A and B have the same timestamp as the invalidated one; to avoid this problem,
+ // we need to detect this (very rare) scenario, and inject an artificial delay, such that
+ // B effectively gets written at a later time.
+ var time = Cache.CurrentTimestamp();
+ if (time <= CacheItem.CreationTimestamp)
+ {
+ // Clock hasn't changed; this is *very rare*, and honestly mostly applies to
+ // tests with dummy fetch calls; inject an artificial delay and re-fetch
+ // the time.
+ await System.Threading.Tasks.Task.Delay(1, CancellationToken.None).ConfigureAwait(false);
+ time = Cache.CurrentTimestamp();
+ }
+
+ // We can safely update the timestamp without fear of torn values etc; no competing code
+ // will access this until we set it into L1, which happens towards the *end* of this method,
+ // and we (the current thread/path) are the only execution for this instance.
+ CacheItem.UnsafeSetCreationTimestamp(time);
+ }
+
// If we're writing this value *anywhere*, we're going to need to serialize; this is obvious
// in the case of L2, but we also need it for L1, because MemoryCache might be enforcing
// SizeLimit (we can't know - it is an abstraction), and for *that* we need to know the item size.
@@ -261,9 +331,8 @@ private async Task BackgroundFetchAsync()
// Rephrasing that: the only scenario in which we *do not* need to serialize is if:
// - it is an ImmutableCacheItem (so we don't need bytes for the CacheItem, L1)
// - we're not writing to L2
-
CacheItem cacheItem = CacheItem;
- bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & FlagsDisableL1AndL2Write) == FlagsDisableL1AndL2Write;
+ bool skipSerialize = cacheItem is ImmutableCacheItem && (activeFlags & FlagsDisableL1AndL2Write) == FlagsDisableL1AndL2Write;
if (skipSerialize)
{
@@ -274,7 +343,6 @@ private async Task BackgroundFetchAsync()
// ^^^ The first thing we need to do is make sure we're not getting into a thread race over buffer disposal.
// In particular, if this cache item is somehow so short-lived that the buffers would be released *before* we're
// done writing them to L2, which happens *after* we've provided the value to consumers.
-
BufferChunk bufferToRelease = default;
if (Cache.TrySerialize(newValue, out var buffer, out var serializer))
{
@@ -297,12 +365,12 @@ private async Task BackgroundFetchAsync()
// from this point onwards happens in the background, from the perspective of the calling code.
// Write to L2 if appropriate.
- if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheWrite) == 0)
+ if ((activeFlags & HybridCacheEntryFlags.DisableDistributedCacheWrite) == 0)
{
// We already have the payload serialized, so this is trivial to do.
try
{
- await Cache.SetL2Async(Key.Key, in buffer, _options, SharedToken).ConfigureAwait(false);
+ await Cache.SetL2Async(Key.Key, cacheItem, in buffer, _options, SharedToken).ConfigureAwait(false);
if (eventSourceEnabled)
{
@@ -375,7 +443,7 @@ private void SetDefaultResult()
private void SetResultAndRecycleIfAppropriate(ref BufferChunk value)
{
// set a result from L2 cache
- Debug.Assert(value.Array is not null, "expected buffer");
+ Debug.Assert(value.OversizedArray is not null, "expected buffer");
IHybridCacheSerializer serializer = Cache.GetSerializer();
CacheItem cacheItem;
@@ -383,7 +451,7 @@ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value)
{
case ImmutableCacheItem immutable:
// deserialize; and store object; buffer can be recycled now
- immutable.SetValue(serializer.Deserialize(new(value.Array!, 0, value.Length)), value.Length);
+ immutable.SetValue(serializer.Deserialize(new(value.OversizedArray!, value.Offset, value.Length)), value.Length);
value.RecycleIfAppropriate();
cacheItem = immutable;
break;
@@ -472,4 +540,86 @@ private void SetResult(CacheItem value)
}
}
}
+
+ [SuppressMessage("Major Code Smell", "S1121:Assignments should not be made from within sub-expressions", Justification = "Reasonable in this case, due to stack alloc scope.")]
+ private static bool ValidateUnicodeCorrectness(ILogger logger, string key, TagSet tags)
+ {
+ var maxChars = Math.Max(key.Length, tags.MaxLength());
+ var maxBytes = HybridCachePayload.Encoding.GetMaxByteCount(maxChars);
+
+ byte[] leasedBytes = [];
+ char[] leasedChars = [];
+
+ Span byteBuffer = maxBytes <= 128 ? stackalloc byte[128] : (leasedBytes = ArrayPool.Shared.Rent(maxBytes));
+ Span charBuffer = maxChars <= 128 ? stackalloc char[128] : (leasedChars = ArrayPool.Shared.Rent(maxChars));
+
+ bool isValid = true;
+
+ if (!Test(key, byteBuffer, charBuffer))
+ {
+ Log.KeyInvalidUnicode(logger);
+ isValid = false;
+ }
+
+ if (isValid)
+ {
+ switch (tags.Count)
+ {
+ case 0:
+ break;
+ case 1:
+ if (!Test(tags.GetSinglePrechecked(), byteBuffer, charBuffer))
+ {
+ Log.TagInvalidUnicode(logger);
+ isValid = false;
+ break;
+ }
+
+ break;
+ default:
+ foreach (var tag in tags.GetSpanPrechecked())
+ {
+ if (!Test(tag, byteBuffer, charBuffer))
+ {
+ Log.TagInvalidUnicode(logger);
+ isValid = false;
+ break;
+ }
+ }
+
+ break;
+ }
+ }
+
+ ArrayPool.Shared.Return(leasedChars);
+ ArrayPool.Shared.Return(leasedBytes);
+ return isValid;
+
+ static unsafe bool Test(string value, Span byteBuffer, Span charBuffer)
+ {
+ // for reliable confidence of unicode correctness: encode and decode, and verify equality
+#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+ byteBuffer = byteBuffer.Slice(0, HybridCachePayload.Encoding.GetBytes(value.AsSpan(), byteBuffer));
+ charBuffer = charBuffer.Slice(0, HybridCachePayload.Encoding.GetChars(byteBuffer, charBuffer));
+#else
+ unsafe
+ {
+ int bytes;
+ fixed (byte* bPtr = byteBuffer)
+ {
+ fixed (char* cPtr = value)
+ {
+ bytes = HybridCachePayload.Encoding.GetBytes(cPtr, value.Length, bPtr, byteBuffer.Length);
+ }
+
+ fixed (char* cPtr = charBuffer)
+ {
+ charBuffer = charBuffer.Slice(0, HybridCachePayload.Encoding.GetChars(bPtr, bytes, cPtr, charBuffer.Length));
+ }
+ }
+ }
+#endif
+ return charBuffer.SequenceEqual(value.AsSpan());
+ }
+ }
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.TagInvalidation.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.TagInvalidation.cs
new file mode 100644
index 00000000000..cdfa0476a55
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.TagInvalidation.cs
@@ -0,0 +1,262 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Concurrent;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ private static readonly Task _zeroTimestamp = Task.FromResult(0L);
+
+ private readonly ConcurrentDictionary> _tagInvalidationTimes = [];
+
+#if NET9_0_OR_GREATER
+ private readonly ConcurrentDictionary>.AlternateLookup> _tagInvalidationTimesBySpan;
+ private readonly bool _tagInvalidationTimesUseAltLookup;
+#endif
+
+ private Task _globalInvalidateTimestamp;
+
+ public override ValueTask RemoveByTagAsync(string tag, CancellationToken token = default)
+ {
+ if (string.IsNullOrWhiteSpace(tag))
+ {
+ return default; // nothing sensible to do
+ }
+
+ var now = CurrentTimestamp();
+ InvalidateTagLocalCore(tag, now, isNow: true); // isNow to be 100% explicit
+ return InvalidateL2TagAsync(tag, now, token);
+ }
+
+ public bool IsValid(CacheItem cacheItem)
+ {
+ var timestamp = cacheItem.CreationTimestamp;
+
+ if (IsWildcardExpired(timestamp))
+ {
+ return false;
+ }
+
+ var tags = cacheItem.Tags;
+ switch (tags.Count)
+ {
+ case 0:
+ return true;
+
+ case 1:
+ return !IsTagExpired(tags.GetSinglePrechecked(), timestamp, out _);
+
+ default:
+ bool allValid = true;
+ foreach (var tag in tags.GetSpanPrechecked())
+ {
+ if (IsTagExpired(tag, timestamp, out _))
+ {
+ allValid = false; // but check them all, to kick-off tag fetch
+ }
+ }
+
+ return allValid;
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "VSTHRD002:Avoid problematic synchronous waits", Justification = "Completion-checked")]
+ public bool IsWildcardExpired(long timestamp)
+ {
+ if (_globalInvalidateTimestamp.IsCompleted)
+ {
+ if (timestamp <= _globalInvalidateTimestamp.Result)
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "VSTHRD002:Avoid problematic synchronous waits", Justification = "Completion-checked")]
+ public bool IsTagExpired(ReadOnlySpan tag, long timestamp, out bool isPending)
+ {
+ isPending = false;
+#if NET9_0_OR_GREATER
+ if (_tagInvalidationTimesUseAltLookup && _tagInvalidationTimesBySpan.TryGetValue(tag, out var pending))
+ {
+ if (pending.IsCompleted)
+ {
+ return timestamp <= pending.Result;
+ }
+ else
+ {
+ isPending = true;
+ return true; // assume invalid until completed
+ }
+ }
+ else if (!HasBackendCache)
+ {
+ // not invalidated, and no L2 to check
+ return false;
+ }
+#endif
+
+ // fallback to using a string
+ return IsTagExpired(tag.ToString(), timestamp, out isPending);
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "VSTHRD002:Avoid problematic synchronous waits", Justification = "Completion-checked")]
+ public bool IsTagExpired(string tag, long timestamp, out bool isPending)
+ {
+ isPending = false;
+ if (!_tagInvalidationTimes.TryGetValue(tag, out var pending))
+ {
+ // not in the tag invalidation cache; if we have L2, need to check there
+ if (HasBackendCache)
+ {
+ pending = SafeReadTagInvalidationAsync(tag);
+ _ = _tagInvalidationTimes.TryAdd(tag, pending);
+ }
+ else
+ {
+ // not invalidated, and no L2 to check
+ return false;
+ }
+ }
+
+ if (pending.IsCompleted)
+ {
+ return timestamp <= pending.Result;
+ }
+ else
+ {
+ isPending = true;
+ return true; // assume invalid until completed
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "Ack")]
+ public ValueTask IsAnyTagExpiredAsync(TagSet tags, long timestamp)
+ {
+ return tags.Count switch
+ {
+ 0 => new(false),
+ 1 => IsTagExpiredAsync(tags.GetSinglePrechecked(), timestamp),
+ _ => SlowAsync(this, tags, timestamp),
+ };
+
+ static async ValueTask SlowAsync(DefaultHybridCache @this, TagSet tags, long timestamp)
+ {
+ int count = tags.Count;
+ for (int i = 0; i < count; i++)
+ {
+ if (await @this.IsTagExpiredAsync(tags[i], timestamp).ConfigureAwait(false))
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "Ack")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1849:Call async methods when in an async method", Justification = "Completion-checked")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "VSTHRD003:Avoid awaiting foreign Tasks", Justification = "Manual async unwrap")]
+ public ValueTask IsTagExpiredAsync(string tag, long timestamp)
+ {
+ if (!_tagInvalidationTimes.TryGetValue(tag, out var pending))
+ {
+ // not in the tag invalidation cache; if we have L2, need to check there
+ if (HasBackendCache)
+ {
+ pending = SafeReadTagInvalidationAsync(tag);
+ _ = _tagInvalidationTimes.TryAdd(tag, pending);
+ }
+ else
+ {
+ // not invalidated, and no L2 to check
+ return new(false);
+ }
+ }
+
+ if (pending.IsCompleted)
+ {
+ return new(timestamp <= pending.Result);
+ }
+ else
+ {
+ return AwaitedAsync(pending, timestamp);
+ }
+
+ static async ValueTask AwaitedAsync(Task pending, long timestamp) => timestamp <= await pending.ConfigureAwait(false);
+ }
+
+ internal void DebugInvalidateTag(string tag, Task pending)
+ {
+ if (tag == TagSet.WildcardTag)
+ {
+ _globalInvalidateTimestamp = pending;
+ }
+ else
+ {
+ _tagInvalidationTimes[tag] = pending;
+ }
+ }
+
+ internal long CurrentTimestamp() => _clock.GetUtcNow().UtcTicks;
+
+ internal void PrefetchTags(TagSet tags)
+ {
+ if (HasBackendCache && !tags.IsEmpty)
+ {
+ // only needed if L2 exists
+ switch (tags.Count)
+ {
+ case 1:
+ PrefetchTagWithBackendCache(tags.GetSinglePrechecked());
+ break;
+ default:
+ foreach (var tag in tags.GetSpanPrechecked())
+ {
+ PrefetchTagWithBackendCache(tag);
+ }
+
+ break;
+ }
+ }
+ }
+
+ private void PrefetchTagWithBackendCache(string tag)
+ {
+ if (!_tagInvalidationTimes.TryGetValue(tag, out var pending))
+ {
+ _ = _tagInvalidationTimes.TryAdd(tag, SafeReadTagInvalidationAsync(tag));
+ }
+ }
+
+ private void InvalidateTagLocalCore(string tag, long timestamp, bool isNow)
+ {
+ var timestampTask = Task.FromResult(timestamp);
+ if (tag == TagSet.WildcardTag)
+ {
+ _globalInvalidateTimestamp = timestampTask;
+ if (isNow && !HasBackendCache)
+ {
+ // no L2, so we don't need any prior invalidated tags any more; can clear
+ _tagInvalidationTimes.Clear();
+ }
+ }
+ else
+ {
+ _tagInvalidationTimes[tag] = timestampTask;
+
+ if (HybridCacheEventSource.Log.IsEnabled())
+ {
+ HybridCacheEventSource.Log.TagInvalidated();
+ }
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs
index 71dbf71fd54..7a517a20cac 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs
@@ -3,6 +3,7 @@
using System;
using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
@@ -20,8 +21,11 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
///
/// The inbuilt implementation of , as registered via .
///
+[SkipLocalsInit]
internal sealed partial class DefaultHybridCache : HybridCache
{
+ internal const int DefaultExpirationMinutes = 5;
+
// reserve non-printable characters from keys, to prevent potential L2 abuse
private static readonly char[] _keyReservedCharacters = Enumerable.Range(0, 32).Select(i => (char)i).ToArray();
@@ -35,6 +39,7 @@ internal sealed partial class DefaultHybridCache : HybridCache
private readonly HybridCacheOptions _options;
private readonly ILogger _logger;
private readonly CacheFeatures _features; // used to avoid constant type-testing
+ private readonly TimeProvider _clock;
private readonly HybridCacheEntryFlags _hardFlags; // *always* present (for example, because no L2)
private readonly HybridCacheEntryFlags _defaultFlags; // note this already includes hardFlags
@@ -60,13 +65,15 @@ internal enum CacheFeatures
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private CacheFeatures GetFeatures(CacheFeatures mask) => _features & mask;
+ internal bool HasBackendCache => (_features & CacheFeatures.BackendCache) != 0;
+
public DefaultHybridCache(IOptions options, IServiceProvider services)
{
_services = Throw.IfNull(services);
_localCache = services.GetRequiredService();
_options = options.Value;
_logger = services.GetService()?.CreateLogger(typeof(HybridCache)) ?? NullLogger.Instance;
-
+ _clock = services.GetService() ?? TimeProvider.System;
_backendCache = services.GetService(); // note optional
// ignore L2 if it is really just the same L1, wrapped
@@ -104,9 +111,16 @@ public DefaultHybridCache(IOptions options, IServiceProvider
}
_defaultFlags = (defaultEntryOptions?.Flags ?? HybridCacheEntryFlags.None) | _hardFlags;
- _defaultExpiration = defaultEntryOptions?.Expiration ?? TimeSpan.FromMinutes(5);
- _defaultLocalCacheExpiration = defaultEntryOptions?.LocalCacheExpiration ?? TimeSpan.FromMinutes(1);
+ _defaultExpiration = defaultEntryOptions?.Expiration ?? TimeSpan.FromMinutes(DefaultExpirationMinutes);
+ _defaultLocalCacheExpiration = GetEffectiveLocalCacheExpiration(defaultEntryOptions) ?? _defaultExpiration;
_defaultDistributedCacheExpiration = new DistributedCacheEntryOptions { AbsoluteExpirationRelativeToNow = _defaultExpiration };
+
+#if NET9_0_OR_GREATER
+ _tagInvalidationTimesUseAltLookup = _tagInvalidationTimes.TryGetAlternateLookup(out _tagInvalidationTimesBySpan);
+#endif
+
+ // do this last
+ _globalInvalidateTimestamp = _backendCache is null ? _zeroTimestamp : SafeReadTagInvalidationAsync(TagSet.WildcardTag);
}
internal IDistributedCache? BackendCache => _backendCache;
@@ -131,10 +145,11 @@ public override ValueTask GetOrCreateAsync(string key, TState stat
}
bool eventSourceEnabled = HybridCacheEventSource.Log.IsEnabled();
+
if ((flags & HybridCacheEntryFlags.DisableLocalCacheRead) == 0)
{
- if (_localCache.TryGetValue(key, out var untyped)
- && untyped is CacheItem typed && typed.TryGetValue(_logger, out var value))
+ if (TryGetExisting(key, out var typed)
+ && typed.TryGetValue(_logger, out var value))
{
// short-circuit
if (eventSourceEnabled)
@@ -153,7 +168,7 @@ public override ValueTask GetOrCreateAsync(string key, TState stat
}
}
- if (GetOrCreateStampedeState(key, flags, out var stampede, canBeCanceled))
+ if (GetOrCreateStampedeState(key, flags, out var stampede, canBeCanceled, tags))
{
// new query; we're responsible for making it happen
if (canBeCanceled)
@@ -187,18 +202,24 @@ public override ValueTask RemoveAsync(string key, CancellationToken token = defa
return _backendCache is null ? default : new(_backendCache.RemoveAsync(key, token));
}
- public override ValueTask RemoveByTagAsync(string tag, CancellationToken token = default)
- => default; // tags not yet implemented
-
public override ValueTask SetAsync(string key, T value, HybridCacheEntryOptions? options = null, IEnumerable? tags = null, CancellationToken token = default)
{
// since we're forcing a write: disable L1+L2 read; we'll use a direct pass-thru of the value as the callback, to reuse all the code
// note also that stampede token is not shared with anyone else
var flags = GetEffectiveFlags(options) | (HybridCacheEntryFlags.DisableLocalCacheRead | HybridCacheEntryFlags.DisableDistributedCacheRead);
- var state = new StampedeState(this, new StampedeKey(key, flags), token);
+ var state = new StampedeState(this, new StampedeKey(key, flags), TagSet.Create(tags), token);
return new(state.ExecuteDirectAsync(value, static (state, _) => new(state), options)); // note this spans L2 write etc
}
+ // exposed as internal for testability
+ internal TimeSpan GetL1AbsoluteExpirationRelativeToNow(HybridCacheEntryOptions? options) => GetEffectiveLocalCacheExpiration(options) ?? _defaultLocalCacheExpiration;
+
+ internal TimeSpan GetL2AbsoluteExpirationRelativeToNow(HybridCacheEntryOptions? options) => options?.Expiration ?? _defaultExpiration;
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal HybridCacheEntryFlags GetEffectiveFlags(HybridCacheEntryOptions? options)
+ => (options?.Flags | _hardFlags) ?? _defaultFlags;
+
private static ValueTask RunWithoutCacheAsync(HybridCacheEntryFlags flags, TState state,
Func> underlyingDataCallback,
CancellationToken cancellationToken)
@@ -207,9 +228,15 @@ private static ValueTask RunWithoutCacheAsync(HybridCacheEntryFlag
? underlyingDataCallback(state, cancellationToken) : default;
}
- [MethodImpl(MethodImplOptions.AggressiveInlining)]
- private HybridCacheEntryFlags GetEffectiveFlags(HybridCacheEntryOptions? options)
- => (options?.Flags | _hardFlags) ?? _defaultFlags;
+ private static TimeSpan? GetEffectiveLocalCacheExpiration(HybridCacheEntryOptions? options)
+ {
+ // If LocalCacheExpiration is not specified, then use option's Expiration, to keep in sync by default.
+ // Or in other words: the inheritance of "LocalCacheExpiration : Expiration" in a single object takes
+ // precedence between the inheritance between per-entry options and global options, and if a caller
+ // provides a per-entry option with *just* the Expiration specified, then that is assumed to also
+ // specify the LocalCacheExpiration.
+ return options is not null ? options.LocalCacheExpiration ?? options.Expiration : null;
+ }
private bool ValidateKey(string key)
{
@@ -234,4 +261,25 @@ private bool ValidateKey(string key)
// nothing to complain about
return true;
}
+
+ private bool TryGetExisting(string key, [NotNullWhen(true)] out CacheItem? value)
+ {
+ if (_localCache.TryGetValue(key, out var untyped) && untyped is CacheItem typed)
+ {
+ // check tag-based and global invalidation
+ if (IsValid(typed))
+ {
+ value = typed;
+ return true;
+ }
+
+ // remove from L1; note there's a little unavoidable race here; worst case is that
+ // a fresher value gets dropped - we'll have to accept it
+ _localCache.Remove(key);
+ }
+
+ // failure
+ value = null;
+ return false;
+ }
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheEventSource.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheEventSource.cs
index 2db179cfc4c..412f713034f 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheEventSource.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheEventSource.cs
@@ -27,6 +27,7 @@ internal sealed class HybridCacheEventSource : EventSource
internal const int EventIdStampedeJoin = 12;
internal const int EventIdUnderlyingDataQueryCanceled = 13;
internal const int EventIdDistributedCacheCanceled = 14;
+ internal const int EventIdTagInvalidated = 15;
// fast local counters
private long _totalLocalCacheHit;
@@ -39,6 +40,7 @@ internal sealed class HybridCacheEventSource : EventSource
private long _totalLocalCacheWrite;
private long _totalDistributedCacheWrite;
private long _totalStampedeJoin;
+ private long _totalTagInvalidations;
#if !(NETSTANDARD2_0 || NET462)
// full Counter infrastructure
@@ -60,6 +62,7 @@ public void ResetCounters()
Volatile.Write(ref _totalLocalCacheWrite, 0);
Volatile.Write(ref _totalDistributedCacheWrite, 0);
Volatile.Write(ref _totalStampedeJoin, 0);
+ Volatile.Write(ref _totalTagInvalidations, 0);
}
[Event(EventIdLocalCacheHit, Level = EventLevel.Verbose)]
@@ -185,6 +188,14 @@ internal void StampedeJoin()
WriteEvent(EventIdStampedeJoin);
}
+ [Event(EventIdTagInvalidated, Level = EventLevel.Verbose)]
+ internal void TagInvalidated()
+ {
+ DebugAssertEnabled();
+ _ = Interlocked.Increment(ref _totalTagInvalidations);
+ WriteEvent(EventIdTagInvalidated);
+ }
+
#if !(NETSTANDARD2_0 || NET462)
[System.Diagnostics.CodeAnalysis.SuppressMessage("Reliability", "CA2000:Dispose objects before losing scope", Justification = "Lifetime exceeds obvious scope; handed to event source")]
[NonEvent]
@@ -204,6 +215,7 @@ protected override void OnEventCommand(EventCommandEventArgs command)
new PollingCounter("total-local-cache-writes", this, () => Volatile.Read(ref _totalLocalCacheWrite)) { DisplayName = "Total Local Cache Writes" },
new PollingCounter("total-distributed-cache-writes", this, () => Volatile.Read(ref _totalDistributedCacheWrite)) { DisplayName = "Total Distributed Cache Writes" },
new PollingCounter("total-stampede-joins", this, () => Volatile.Read(ref _totalStampedeJoin)) { DisplayName = "Total Stampede Joins" },
+ new PollingCounter("total-tag-invalidations", this, () => Volatile.Read(ref _totalTagInvalidations)) { DisplayName = "Total Tag Invalidations" },
];
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCachePayload.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCachePayload.cs
new file mode 100644
index 00000000000..edb7b5d2c98
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCachePayload.cs
@@ -0,0 +1,417 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Buffers;
+using System.Buffers.Binary;
+using System.Text;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+// logic related to the payload that we send to IDistributedCache
+internal static class HybridCachePayload
+{
+ // FORMAT (v1):
+ // fixed-size header (so that it can be reliably broadcast)
+ // 2 bytes: sentinel+version
+ // 2 bytes: entropy (this is a random, and is to help with multi-node collisions at the same time)
+ // 8 bytes: creation time (UTC ticks, little-endian)
+
+ // and the dynamic part
+ // varint: flags (little-endian)
+ // varint: payload size
+ // varint: duration (ticks relative to creation time)
+ // varint: tag count
+ // varint+utf8: key
+ // (for each tag): varint+utf8: tagN
+ // (payload-size bytes): payload
+ // 2 bytes: sentinel+version (repeated, for reliability)
+ // (at this point, all bytes *must* be exhausted, or it is treated as failure)
+
+ // the encoding for varint etc is akin to BinaryWriter, also comparable to FormatterBinaryWriter in OutputCaching
+
+ private const int MaxVarint64Length = 10;
+ private const byte SentinelPrefix = 0x03;
+ private const byte ProtocolVersion = 0x01;
+ private const ushort UInt16SentinelPrefixPair = (ProtocolVersion << 8) | SentinelPrefix;
+
+ private static readonly Random _entropySource = new(); // doesn't need to be cryptographic
+
+ [Flags]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Minor Code Smell", "S2344:Enumeration type names should not have \"Flags\" or \"Enum\" suffixes", Justification = "Clarity")]
+ internal enum PayloadFlags : uint
+ {
+ None = 0,
+ }
+
+ internal enum HybridCachePayloadParseResult
+ {
+ Success = 0,
+ FormatNotRecognized = 1,
+ InvalidData = 2,
+ InvalidKey = 3,
+ ExpiredByEntry = 4,
+ ExpiredByTag = 5,
+ ExpiredByWildcard = 6,
+ ParseFault = 7,
+ }
+
+ public static UTF8Encoding Encoding { get; } = new(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: false);
+
+ public static int GetMaxBytes(string key, TagSet tags, int payloadSize)
+ {
+ int length =
+ 2 // sentinel+version
+ + 2 // entropy
+ + 8 // creation time
+ + MaxVarint64Length // flags
+ + MaxVarint64Length // payload size
+ + MaxVarint64Length // duration
+ + MaxVarint64Length // tag count
+ + 2 // trailing sentinel + version
+ + GetMaxStringLength(key.Length) // key
+ + payloadSize; // the payload itself
+
+ // keys
+ switch (tags.Count)
+ {
+ case 0:
+ break;
+ case 1:
+ length += GetMaxStringLength(tags.GetSinglePrechecked().Length);
+ break;
+ default:
+ foreach (var tag in tags.GetSpanPrechecked())
+ {
+ length += GetMaxStringLength(tag.Length);
+ }
+
+ break;
+ }
+
+ return length;
+
+ // pay the cost to get the actual length, to avoid significant
+ // over-estimate in ASCII cases
+ static int GetMaxStringLength(int charCount) =>
+ MaxVarint64Length + Encoding.GetMaxByteCount(charCount);
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S109:Magic numbers should not be used", Justification = "Encoding details; clear in context")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA5394:Do not use insecure randomness", Justification = "Not cryptographic")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S107:Methods should not have too many parameters", Justification = "Borderline")]
+ public static int Write(byte[] destination,
+ string key, long creationTime, TimeSpan duration, PayloadFlags flags, TagSet tags, ReadOnlySequence payload)
+ {
+ var payloadLength = checked((int)payload.Length);
+
+ BinaryPrimitives.WriteUInt16LittleEndian(destination.AsSpan(0, 2), UInt16SentinelPrefixPair);
+ BinaryPrimitives.WriteUInt16LittleEndian(destination.AsSpan(2, 2), (ushort)_entropySource.Next(0, 0x010000)); // Next is exclusive at RHS
+ BinaryPrimitives.WriteInt64LittleEndian(destination.AsSpan(4, 8), creationTime);
+ var len = 12;
+
+ long durationTicks = duration.Ticks;
+ if (durationTicks < 0)
+ {
+ durationTicks = 0;
+ }
+
+ Write7BitEncodedInt64(destination, ref len, (uint)flags);
+ Write7BitEncodedInt64(destination, ref len, (ulong)payloadLength);
+ Write7BitEncodedInt64(destination, ref len, (ulong)durationTicks);
+ Write7BitEncodedInt64(destination, ref len, (ulong)tags.Count);
+ WriteString(destination, ref len, key);
+ switch (tags.Count)
+ {
+ case 0:
+ break;
+ case 1:
+ WriteString(destination, ref len, tags.GetSinglePrechecked());
+ break;
+ default:
+ foreach (var tag in tags.GetSpanPrechecked())
+ {
+ WriteString(destination, ref len, tag);
+ }
+
+ break;
+ }
+
+ payload.CopyTo(destination.AsSpan(len, payloadLength));
+ len += payloadLength;
+ BinaryPrimitives.WriteUInt16LittleEndian(destination.AsSpan(len, 2), UInt16SentinelPrefixPair);
+ return len + 2;
+
+ static void Write7BitEncodedInt64(byte[] target, ref int offset, ulong value)
+ {
+ // Write out an int 7 bits at a time. The high bit of the byte,
+ // when on, tells reader to continue reading more bytes.
+ //
+ // Using the constants 0x7F and ~0x7F below offers smaller
+ // codegen than using the constant 0x80.
+
+ while (value > 0x7Fu)
+ {
+ target[offset++] = (byte)((uint)value | ~0x7Fu);
+ value >>= 7;
+ }
+
+ target[offset++] = (byte)value;
+ }
+
+ static void WriteString(byte[] target, ref int offset, string value)
+ {
+ var len = Encoding.GetByteCount(value);
+ Write7BitEncodedInt64(target, ref offset, (ulong)len);
+ offset += Encoding.GetBytes(value, 0, value.Length, target, offset);
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("StyleCop.CSharp.ReadabilityRules",
+ "SA1108:Block statements should not contain embedded comments", Justification = "Byte offset comments for clarity")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("StyleCop.CSharp.ReadabilityRules",
+ "SA1122:Use string.Empty for empty strings", Justification = "Subjective, but; ugly")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("StyleCop.CSharp.OrderingRules", "SA1204:Static elements should appear before instance elements", Justification = "False positive?")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S109:Magic numbers should not be used", Justification = "Encoding details; clear in context")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S107:Methods should not have too many parameters", Justification = "Borderline")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "Exposed for logging")]
+ public static HybridCachePayloadParseResult TryParse(ArraySegment source, string key, TagSet knownTags, DefaultHybridCache cache,
+ out ArraySegment payload, out PayloadFlags flags, out ushort entropy, out TagSet pendingTags, out Exception? fault)
+ {
+ fault = null;
+
+ // note "cache" is used primarily for expiration checks; we don't automatically add etc
+ entropy = 0;
+ payload = default;
+ flags = 0;
+ string[] pendingTagBuffer = [];
+ int pendingTagsCount = 0;
+
+ pendingTags = TagSet.Empty;
+ ReadOnlySpan bytes = new(source.Array!, source.Offset, source.Count);
+ if (bytes.Length < 19) // minimum needed for empty payload and zero tags
+ {
+ return HybridCachePayloadParseResult.FormatNotRecognized;
+ }
+
+ var now = cache.CurrentTimestamp();
+ char[] scratch = [];
+ try
+ {
+ switch (BinaryPrimitives.ReadUInt16LittleEndian(bytes))
+ {
+ case UInt16SentinelPrefixPair:
+ entropy = BinaryPrimitives.ReadUInt16LittleEndian(bytes.Slice(2));
+ var creationTime = BinaryPrimitives.ReadInt64LittleEndian(bytes.Slice(4));
+ bytes = bytes.Slice(12); // the end of the fixed part
+
+ if (cache.IsWildcardExpired(creationTime))
+ {
+ return HybridCachePayloadParseResult.ExpiredByWildcard;
+ }
+
+ if (!TryRead7BitEncodedInt64(ref bytes, out var u64)) // flags
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ flags = (PayloadFlags)u64;
+
+ if (!TryRead7BitEncodedInt64(ref bytes, out u64) || u64 > int.MaxValue) // payload length
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ var payloadLength = (int)u64;
+
+ if (!TryRead7BitEncodedInt64(ref bytes, out var duration)) // duration
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ if ((creationTime + (long)duration) <= now)
+ {
+ return HybridCachePayloadParseResult.ExpiredByEntry;
+ }
+
+ if (!TryRead7BitEncodedInt64(ref bytes, out u64) || u64 > int.MaxValue) // tag count
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ var tagCount = (int)u64;
+
+ if (!TryReadString(ref bytes, ref scratch, out var stringSpan))
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ if (!stringSpan.SequenceEqual(key.AsSpan()))
+ {
+ return HybridCachePayloadParseResult.InvalidKey; // key must match!
+ }
+
+ for (int i = 0; i < tagCount; i++)
+ {
+ if (!TryReadString(ref bytes, ref scratch, out stringSpan))
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ bool isTagExpired;
+ bool isPending;
+ if (knownTags.TryFind(stringSpan, out var tagString))
+ {
+ // prefer to re-use existing tag strings when they exist
+ isTagExpired = cache.IsTagExpired(tagString, creationTime, out isPending);
+ }
+ else
+ {
+ // if an unknown tag; we might need to juggle
+ isTagExpired = cache.IsTagExpired(stringSpan, creationTime, out isPending);
+ }
+
+ if (isPending)
+ {
+ // might be expired, but the operation is still in-flight
+ if (pendingTagsCount == pendingTagBuffer.Length)
+ {
+ var newBuffer = ArrayPool.Shared.Rent(Math.Max(4, pendingTagsCount * 2));
+ pendingTagBuffer.CopyTo(newBuffer, 0);
+ ArrayPool.Shared.Return(pendingTagBuffer);
+ pendingTagBuffer = newBuffer;
+ }
+
+ pendingTagBuffer[pendingTagsCount++] = tagString ?? stringSpan.ToString();
+ }
+ else if (isTagExpired)
+ {
+ // definitely an expired tag
+ return HybridCachePayloadParseResult.ExpiredByTag;
+ }
+ }
+
+ if (bytes.Length != payloadLength + 2
+ || BinaryPrimitives.ReadUInt16LittleEndian(bytes.Slice(payloadLength)) != UInt16SentinelPrefixPair)
+ {
+ return HybridCachePayloadParseResult.InvalidData;
+ }
+
+ var start = source.Offset + source.Count - (payloadLength + 2);
+ payload = new(source.Array!, start, payloadLength);
+
+ // finalize the pending tag buffer (in-flight tag expirations)
+ switch (pendingTagsCount)
+ {
+ case 0:
+ break;
+ case 1:
+ pendingTags = new(pendingTagBuffer[0]);
+ break;
+ default:
+ var final = new string[pendingTagsCount];
+ pendingTagBuffer.CopyTo(final, 0);
+ pendingTags = new(final);
+ break;
+ }
+
+ return HybridCachePayloadParseResult.Success;
+ default:
+ return HybridCachePayloadParseResult.FormatNotRecognized;
+ }
+ }
+ catch (Exception ex)
+ {
+ fault = ex;
+ return HybridCachePayloadParseResult.ParseFault;
+ }
+ finally
+ {
+ ArrayPool.Shared.Return(scratch);
+ ArrayPool.Shared.Return(pendingTagBuffer);
+ }
+
+ static bool TryReadString(ref ReadOnlySpan buffer, ref char[] scratch, out ReadOnlySpan value)
+ {
+ int length;
+ if (!TryRead7BitEncodedInt64(ref buffer, out var u64Length)
+ || u64Length > int.MaxValue
+ || buffer.Length < (length = (int)u64Length)) // note buffer is now past the prefix via "ref"
+ {
+ value = default;
+ return false;
+ }
+
+ // make sure we have enough buffer space
+ var maxChars = Encoding.GetMaxCharCount(length);
+ if (scratch.Length < maxChars)
+ {
+ ArrayPool.Shared.Return(scratch);
+ scratch = ArrayPool.Shared.Rent(maxChars);
+ }
+
+ // decode
+#if NETCOREAPP3_1_OR_GREATER
+ var charCount = Encoding.GetChars(buffer.Slice(0, length), scratch);
+#else
+ int charCount;
+ unsafe
+ {
+ fixed (byte* bPtr = buffer)
+ {
+ fixed (char* cPtr = scratch)
+ {
+ charCount = Encoding.GetChars(bPtr, length, cPtr, scratch.Length);
+ }
+ }
+ }
+#endif
+ value = new(scratch, 0, charCount);
+ buffer = buffer.Slice(length);
+ return true;
+ }
+
+ static bool TryRead7BitEncodedInt64(ref ReadOnlySpan buffer, out ulong result)
+ {
+ byte byteReadJustNow;
+
+ // Read the integer 7 bits at a time. The high bit
+ // of the byte when on means to continue reading more bytes.
+ //
+ // There are two failure cases: we've read more than 10 bytes,
+ // or the tenth byte is about to cause integer overflow.
+ // This means that we can read the first 9 bytes without
+ // worrying about integer overflow.
+
+ const int MaxBytesWithoutOverflow = 9;
+ result = 0;
+ int index = 0;
+ for (int shift = 0; shift < MaxBytesWithoutOverflow * 7; shift += 7)
+ {
+ // ReadByte handles end of stream cases for us.
+ byteReadJustNow = buffer[index++];
+ result |= (byteReadJustNow & 0x7Ful) << shift;
+
+ if (byteReadJustNow <= 0x7Fu)
+ {
+ buffer = buffer.Slice(index);
+ return true; // early exit
+ }
+ }
+
+ // Read the 10th byte. Since we already read 63 bits,
+ // the value of this byte must fit within 1 bit (64 - 63),
+ // and it must not have the high bit set.
+
+ byteReadJustNow = buffer[index++];
+ if (byteReadJustNow > 0b_1u)
+ {
+ throw new OverflowException();
+ }
+
+ result |= (ulong)byteReadJustNow << (MaxBytesWithoutOverflow * 7);
+ buffer = buffer.Slice(index);
+ return true;
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/Log.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/Log.cs
index 785107c32ec..c6fbd70c7bf 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/Log.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/Log.cs
@@ -3,6 +3,7 @@
using System;
using Microsoft.Extensions.Logging;
+using static Microsoft.Extensions.Caching.Hybrid.Internal.HybridCachePayload;
namespace Microsoft.Extensions.Caching.Hybrid.Internal;
@@ -16,6 +17,9 @@ internal static partial class Log
internal const int IdCacheBackendReadFailure = 6;
internal const int IdCacheBackendWriteFailure = 7;
internal const int IdKeyInvalidContent = 8;
+ internal const int IdKeyInvalidUnicode = 9;
+ internal const int IdTagInvalidUnicode = 10;
+ internal const int IdCacheBackendDataRejected = 11;
[LoggerMessage(LogLevel.Error, "Cache MaximumPayloadBytes ({Bytes}) exceeded.", EventName = "MaximumPayloadBytesExceeded", EventId = IdMaximumPayloadBytesExceeded, SkipEnabledCheck = false)]
internal static partial void MaximumPayloadBytesExceeded(this ILogger logger, Exception e, int bytes);
@@ -46,4 +50,16 @@ internal static partial class Log
[LoggerMessage(LogLevel.Error, "Cache key contains invalid content.", EventName = "KeyInvalidContent", EventId = IdKeyInvalidContent, SkipEnabledCheck = false)]
internal static partial void KeyInvalidContent(this ILogger logger); // for PII etc reasons, we won't include the actual key
+
+ [LoggerMessage(LogLevel.Error, "Key contains malformed unicode.",
+ EventName = "KeyInvalidUnicode", EventId = IdKeyInvalidUnicode, SkipEnabledCheck = false)]
+ internal static partial void KeyInvalidUnicode(this ILogger logger);
+
+ [LoggerMessage(LogLevel.Error, "Tag contains malformed unicode.",
+ EventName = "TagInvalidUnicode", EventId = IdTagInvalidUnicode, SkipEnabledCheck = false)]
+ internal static partial void TagInvalidUnicode(this ILogger logger);
+
+ [LoggerMessage(LogLevel.Warning, "Cache backend data rejected: {reason}.",
+ EventName = "CacheBackendDataRejected", EventId = IdCacheBackendDataRejected, SkipEnabledCheck = false)]
+ internal static partial void CacheBackendDataRejected(this ILogger logger, HybridCachePayloadParseResult reason, Exception? ex);
}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs
index 985d55c9f0e..82d7fba4755 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs
@@ -131,6 +131,8 @@ public Span GetSpan(int sizeHint = 0)
// create a standalone isolated copy of the buffer
public T[] ToArray() => _buffer.AsSpan(0, _index).ToArray();
+ public ReadOnlySequence AsSequence() => new(_buffer, 0, _index);
+
///
/// Disconnect the current buffer so that we can store it without it being recycled.
///
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/TagSet.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/TagSet.cs
new file mode 100644
index 00000000000..063fcb1034a
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/TagSet.cs
@@ -0,0 +1,238 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Buffers;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+///
+/// Represents zero (null), one (string) or more (string[]) tags, avoiding the additional array overhead when necessary.
+///
+[System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1066:Implement IEquatable when overriding Object.Equals", Justification = "Equals throws by intent")]
+[System.Diagnostics.DebuggerDisplay("{ToString()}")]
+internal readonly struct TagSet
+{
+ public static readonly TagSet Empty = default!;
+
+ // this array is used in CopyTo to efficiently copy out of collections
+ [ThreadStatic]
+ private static string[]? _perThreadSingleLengthArray;
+
+ private readonly object? _tagOrTags;
+
+ internal TagSet(string tag)
+ {
+ Validate(tag);
+ _tagOrTags = tag;
+ }
+
+ internal TagSet(string[] tags)
+ {
+ Debug.Assert(tags is { Length: > 1 }, "should be non-trivial array");
+ foreach (var tag in tags)
+ {
+ Validate(tag);
+ }
+
+ Array.Sort(tags, StringComparer.InvariantCulture);
+ _tagOrTags = tags;
+ }
+
+ public string GetSinglePrechecked() => (string)_tagOrTags!; // we expect this to fail if used on incorrect types
+ public Span GetSpanPrechecked() => (string[])_tagOrTags!; // we expect this to fail if used on incorrect types
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1065:Do not raise exceptions in unexpected locations", Justification = "Intentional; should not be used")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Blocker Code Smell", "S3877:Exceptions should not be thrown from unexpected methods", Justification = "Intentional; should not be used")]
+ public override bool Equals(object? obj) => throw new NotSupportedException();
+
+ // [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1065:Do not raise exceptions in unexpected locations", Justification = "Intentional; should not be used")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Blocker Code Smell", "S3877:Exceptions should not be thrown from unexpected methods", Justification = "Intentional; should not be used")]
+ public override int GetHashCode() => throw new NotSupportedException();
+
+ public override string ToString() => _tagOrTags switch
+ {
+ string tag => tag,
+ string[] tags => string.Join(", ", tags),
+ _ => "(no tags)",
+ };
+
+ public bool IsEmpty => _tagOrTags is null;
+
+ public int Count => _tagOrTags switch
+ {
+ null => 0,
+ string => 1,
+ string[] arr => arr.Length,
+ _ => 0, // should never happen, but treat as empty
+ };
+
+ internal bool IsArray => _tagOrTags is string[];
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "CA2201:Do not raise reserved exception types", Justification = "This is the most appropriate exception here.")]
+ public string this[int index] => _tagOrTags switch
+ {
+ string tag when index == 0 => tag,
+ string[] tags => tags[index],
+ _ => throw new IndexOutOfRangeException(nameof(index)),
+ };
+
+ public void CopyTo(Span target)
+ {
+ switch (_tagOrTags)
+ {
+ case string tag:
+ target[0] = tag;
+ break;
+ case string[] tags:
+ tags.CopyTo(target);
+ break;
+ }
+ }
+
+ internal static TagSet Create(IEnumerable? tags)
+ {
+ if (tags is null)
+ {
+ return Empty;
+ }
+
+ // note that in multi-tag scenarios we always create a defensive copy
+ if (tags is ICollection collection)
+ {
+ switch (collection.Count)
+ {
+ case 0:
+ return Empty;
+ case 1 when collection is IList list:
+ return new TagSet(list[0]);
+ case 1:
+ // avoid the GetEnumerator() alloc
+ var arr = _perThreadSingleLengthArray ??= new string[1];
+ collection.CopyTo(arr, 0);
+ return new TagSet(arr[0]);
+ default:
+ arr = new string[collection.Count];
+ collection.CopyTo(arr, 0);
+ return new TagSet(arr);
+ }
+ }
+
+ // perhaps overkill, but: avoid as much as possible when unrolling
+ using var iterator = tags.GetEnumerator();
+ if (!iterator.MoveNext())
+ {
+ return Empty;
+ }
+
+ var firstTag = iterator.Current;
+ if (!iterator.MoveNext())
+ {
+ return new TagSet(firstTag);
+ }
+
+ string[] oversized = ArrayPool.Shared.Rent(8);
+ oversized[0] = firstTag;
+ int count = 1;
+ do
+ {
+ if (count == oversized.Length)
+ {
+ // grow
+ var bigger = ArrayPool.Shared.Rent(count * 2);
+ oversized.CopyTo(bigger, 0);
+ ArrayPool.Shared.Return(oversized);
+ oversized = bigger;
+ }
+
+ oversized[count++] = iterator.Current;
+ }
+ while (iterator.MoveNext());
+
+ if (count == oversized.Length)
+ {
+ return new TagSet(oversized);
+ }
+ else
+ {
+ var final = oversized.AsSpan(0, count).ToArray();
+ ArrayPool.Shared.Return(oversized);
+ return new TagSet(final);
+ }
+ }
+
+ internal string[] ToArray() // for testing only
+ {
+ var arr = new string[Count];
+ CopyTo(arr);
+ return arr;
+ }
+
+ internal const string WildcardTag = "*";
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("StyleCop.CSharp.ReadabilityRules", "SA1122:Use string.Empty for empty strings", Justification = "Not needed")]
+ internal bool TryFind(ReadOnlySpan span, [NotNullWhen(true)] out string? tag)
+ {
+ switch (_tagOrTags)
+ {
+ case string single when span.SequenceEqual(single.AsSpan()):
+ tag = single;
+ return true;
+ case string[] tags:
+ foreach (string test in tags)
+ {
+ if (span.SequenceEqual(test.AsSpan()))
+ {
+ tag = test;
+ return true;
+ }
+ }
+
+ break;
+ }
+
+ tag = null;
+ return false;
+ }
+
+ internal int MaxLength()
+ {
+ switch (_tagOrTags)
+ {
+ case string single:
+ return single.Length;
+ case string[] tags:
+ int max = 0;
+ foreach (string test in tags)
+ {
+ max = Math.Max(max, test.Length);
+ }
+
+ return max;
+ default:
+ return 0;
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S3928:Parameter names used into ArgumentException constructors should match an existing one ",
+ Justification = "Using parameter name from public callable API")]
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Usage", "CA2208:Instantiate argument exceptions correctly", Justification = "Using parameter name from public callable API")]
+ private static void Validate(string tag)
+ {
+ if (string.IsNullOrWhiteSpace(tag))
+ {
+ ThrowEmpty();
+ }
+
+ if (tag == WildcardTag)
+ {
+ ThrowReserved();
+ }
+
+ static void ThrowEmpty() => throw new ArgumentException("Tags cannot be empty.", "tags");
+ static void ThrowReserved() => throw new ArgumentException($"The tag '{WildcardTag}' is reserved and cannot be used in this context.", "tags");
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj
index ede3b88ca36..b8aff39eb98 100644
--- a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj
@@ -29,6 +29,8 @@
EXTEXP0018
86
50
+ Fundamentals
+ true
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs
index 4996406c09a..39cd6355b41 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs
@@ -7,12 +7,13 @@
using Microsoft.Extensions.Caching.Hybrid.Internal;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using static Microsoft.Extensions.Caching.Hybrid.Internal.DefaultHybridCache;
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class BufferReleaseTests // note that buffer ref-counting is only enabled for DEBUG builds; can only verify general behaviour without that
+public class BufferReleaseTests : IClassFixture // note that buffer ref-counting is only enabled for DEBUG builds; can only verify general behaviour without that
{
private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
{
@@ -121,7 +122,11 @@ private static bool Write(IBufferWriter destination, byte[]? buffer)
using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue))
{
serializer.Serialize(await GetAsync(), writer);
- cache.BackendCache.Set(key, writer.ToArray());
+
+ var arr = ArrayPool.Shared.Rent(HybridCachePayload.GetMaxBytes(key, TagSet.Empty, writer.CommittedBytes));
+ var bytes = HybridCachePayload.Write(arr, key, cache.CurrentTimestamp(), TimeSpan.FromHours(1), 0, TagSet.Empty, writer.AsSequence());
+ cache.BackendCache.Set(key, new ReadOnlySpan(arr, 0, bytes).ToArray());
+ ArrayPool.Shared.Return(arr);
}
#if DEBUG
cache.DebugOnlyGetOutstandingBuffers(flush: true);
@@ -180,7 +185,11 @@ private static bool Write(IBufferWriter destination, byte[]? buffer)
using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue))
{
serializer.Serialize(await GetAsync(), writer);
- cache.BackendCache.Set(key, writer.ToArray());
+
+ var arr = ArrayPool.Shared.Rent(HybridCachePayload.GetMaxBytes(key, TagSet.Empty, writer.CommittedBytes));
+ var bytes = HybridCachePayload.Write(arr, key, cache.CurrentTimestamp(), TimeSpan.FromHours(1), 0, TagSet.Empty, writer.AsSequence());
+ cache.BackendCache.Set(key, new ReadOnlySpan(arr, 0, bytes).ToArray());
+ ArrayPool.Shared.Return(arr);
}
#if DEBUG
cache.DebugOnlyGetOutstandingBuffers(flush: true);
@@ -225,6 +234,54 @@ private static bool Write(IBufferWriter destination, byte[]? buffer)
static ValueTask GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
}
+ [Fact]
+ public void ImmutableCacheItem_Reservation()
+ {
+ var obj = Assert.IsType>(CacheItem.Create(12345, TagSet.Empty));
+ Assert.True(obj.DebugIsImmutable);
+ obj.SetValue("abc", 3);
+ Assert.False(obj.TryReserveBuffer(out _));
+ Assert.True(obj.TryGetValue(NullLogger.Instance, out var value));
+ Assert.Equal("abc", value);
+ Assert.True(obj.TryGetSize(out var size));
+ Assert.Equal(3, size);
+ }
+
+ [Fact]
+ public void MutableCacheItem_Reservation()
+ {
+ var obj = Assert.IsType>(CacheItem.Create(12345, TagSet.Empty));
+
+ Assert.True(new DefaultJsonSerializerFactory().TryCreateSerializer(out var serializer));
+ var target = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ serializer.Serialize(new Customer { Id = 42, Name = "Fred" }, target);
+ var arr = target.DetachCommitted(out var length);
+ var chunk = new BufferChunk(arr, 0, length, true);
+ target.Dispose();
+
+ Assert.False(obj.DebugIsImmutable);
+ obj.SetValue(ref chunk, serializer);
+
+ for (int i = 0; i < 3; i++)
+ {
+ Assert.True(obj.TryReserveBuffer(out var lease));
+ Assert.Equal(length, lease.Length);
+ Assert.False(obj.Release());
+
+ Assert.True(obj.TryGetValue(NullLogger.Instance, out var value));
+ Assert.Equal(42, value.Id);
+ Assert.Equal("Fred", value.Name);
+
+ Assert.True(obj.TryGetSize(out var size));
+ Assert.Equal(length, size);
+ }
+
+ Assert.True(obj.Release());
+ Assert.False(obj.TryReserveBuffer(out _));
+ Assert.False(obj.TryGetValue(NullLogger.Instance, out _));
+ Assert.False(obj.TryGetSize(out var _));
+ }
+
public class Customer
{
public int Id { get; set; }
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs
index 5a565866f63..0e86c742c5a 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs
@@ -15,7 +15,7 @@ namespace Microsoft.Extensions.Caching.Hybrid.Tests;
///
/// Validate over-arching expectations of DC implementations, in particular behaviour re IBufferDistributedCache added for HybridCache.
///
-public abstract class DistributedCacheTests
+public abstract class DistributedCacheTests : IClassFixture
{
protected DistributedCacheTests(ITestOutputHelper log)
{
@@ -26,18 +26,17 @@ protected DistributedCacheTests(ITestOutputHelper log)
protected abstract ValueTask ConfigureAsync(IServiceCollection services);
protected abstract bool CustomClockSupported { get; }
- protected FakeTime Clock { get; } = new();
+ internal FakeTime Clock { get; } = new();
- protected sealed class FakeTime : TimeProvider, ISystemClock
+ internal sealed class FakeTime : TimeProvider, ISystemClock
{
- private DateTimeOffset _now = DateTimeOffset.UtcNow;
- public void Reset() => _now = DateTimeOffset.UtcNow;
+ public void Reset() => UtcNow = DateTimeOffset.UtcNow;
- DateTimeOffset ISystemClock.UtcNow => _now;
+ public DateTimeOffset UtcNow { get; private set; } = DateTimeOffset.UtcNow;
- public override DateTimeOffset GetUtcNow() => _now;
+ public override DateTimeOffset GetUtcNow() => UtcNow;
- public void Add(TimeSpan delta) => _now += delta;
+ public void Add(TimeSpan delta) => UtcNow += delta;
}
private async ValueTask InitAsync()
@@ -185,7 +184,7 @@ public async Task ReadOnlySequenceBufferRoundtrip(int size, SequenceKind kind)
Assert.Equal(size, expected.Length);
cache.Set(key, payload, _fiveMinutes);
- RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ var writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
Assert.True(cache.TryGet(key, writer));
Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
writer.ResetInPlace();
@@ -247,7 +246,7 @@ public async Task ReadOnlySequenceBufferRoundtripAsync(int size, SequenceKind ki
Assert.Equal(size, expected.Length);
await cache.SetAsync(key, payload, _fiveMinutes);
- RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ var writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
Assert.True(await cache.TryGetAsync(key, writer));
Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
writer.ResetInPlace();
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs
index 5edd99722ac..7b8396eb50d 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs
@@ -6,7 +6,7 @@
using Microsoft.Extensions.DependencyInjection;
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class FunctionalTests
+public class FunctionalTests : IClassFixture
{
private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
{
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/HybridCacheEventSourceTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/HybridCacheEventSourceTests.cs
index 74876053e34..8e23143475f 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/HybridCacheEventSourceTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/HybridCacheEventSourceTests.cs
@@ -207,6 +207,19 @@ public async Task StampedeJoin()
listener.AssertRemainingCountersZero();
}
+ [SkippableFact]
+ public async Task TagInvalidated()
+ {
+ AssertEnabled();
+
+ listener.Reset().Source.TagInvalidated();
+ listener.AssertSingleEvent(HybridCacheEventSource.EventIdTagInvalidated, "TagInvalidated", EventLevel.Verbose);
+
+ await AssertCountersAsync();
+ listener.AssertCounter("total-tag-invalidations", "Total Tag Invalidations", 1);
+ listener.AssertRemainingCountersZero();
+ }
+
private void AssertEnabled()
{
// including this data for visibility when tests fail - ETW subsystem can be ... weird
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs
index 850c6a054b9..f82e716f854 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs
@@ -11,7 +11,7 @@
using Xunit.Abstractions;
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class L2Tests(ITestOutputHelper log)
+public class L2Tests(ITestOutputHelper log) : IClassFixture
{
private static string CreateString(bool work = false)
{
@@ -52,7 +52,7 @@ public async Task AssertL2Operations_Immutable(bool buffers)
var backend = Assert.IsAssignableFrom(cache.BackendCache);
Log.WriteLine("Inventing key...");
var s = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString(true)));
- Assert.Equal(2, backend.OpCount); // GET, SET
+ Assert.Equal(3, backend.OpCount); // (wildcard timstamp GET), GET, SET
Log.WriteLine("Reading with L1...");
for (var i = 0; i < 5; i++)
@@ -62,7 +62,7 @@ public async Task AssertL2Operations_Immutable(bool buffers)
Assert.Same(s, x);
}
- Assert.Equal(2, backend.OpCount); // shouldn't be hit
+ Assert.Equal(3, backend.OpCount); // shouldn't be hit
Log.WriteLine("Reading without L1...");
for (var i = 0; i < 5; i++)
@@ -72,7 +72,7 @@ public async Task AssertL2Operations_Immutable(bool buffers)
Assert.NotSame(s, x);
}
- Assert.Equal(7, backend.OpCount); // should be read every time
+ Assert.Equal(8, backend.OpCount); // should be read every time
Log.WriteLine("Setting value directly");
s = CreateString(true);
@@ -84,16 +84,16 @@ public async Task AssertL2Operations_Immutable(bool buffers)
Assert.Same(s, x);
}
- Assert.Equal(8, backend.OpCount); // SET
+ Assert.Equal(9, backend.OpCount); // SET
Log.WriteLine("Removing key...");
await cache.RemoveAsync(Me());
- Assert.Equal(9, backend.OpCount); // DEL
+ Assert.Equal(10, backend.OpCount); // DEL
Log.WriteLine("Fetching new...");
var t = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString(true)));
Assert.NotEqual(s, t);
- Assert.Equal(11, backend.OpCount); // GET, SET
+ Assert.Equal(12, backend.OpCount); // GET, SET
}
public sealed class Foo
@@ -110,7 +110,7 @@ public async Task AssertL2Operations_Mutable(bool buffers)
var backend = Assert.IsAssignableFrom(cache.BackendCache);
Log.WriteLine("Inventing key...");
var s = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString(true) }), _expiry);
- Assert.Equal(2, backend.OpCount); // GET, SET
+ Assert.Equal(3, backend.OpCount); // (wildcard timstamp GET), GET, SET
Log.WriteLine("Reading with L1...");
for (var i = 0; i < 5; i++)
@@ -120,7 +120,7 @@ public async Task AssertL2Operations_Mutable(bool buffers)
Assert.NotSame(s, x);
}
- Assert.Equal(2, backend.OpCount); // shouldn't be hit
+ Assert.Equal(3, backend.OpCount); // shouldn't be hit
Log.WriteLine("Reading without L1...");
for (var i = 0; i < 5; i++)
@@ -130,7 +130,7 @@ public async Task AssertL2Operations_Mutable(bool buffers)
Assert.NotSame(s, x);
}
- Assert.Equal(7, backend.OpCount); // should be read every time
+ Assert.Equal(8, backend.OpCount); // should be read every time
Log.WriteLine("Setting value directly");
s = new Foo { Value = CreateString(true) };
@@ -142,16 +142,16 @@ public async Task AssertL2Operations_Mutable(bool buffers)
Assert.NotSame(s, x);
}
- Assert.Equal(8, backend.OpCount); // SET
+ Assert.Equal(9, backend.OpCount); // SET
Log.WriteLine("Removing key...");
await cache.RemoveAsync(Me());
- Assert.Equal(9, backend.OpCount); // DEL
+ Assert.Equal(10, backend.OpCount); // DEL
Log.WriteLine("Fetching new...");
var t = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString(true) }), _expiry);
Assert.NotEqual(s.Value, t.Value);
- Assert.Equal(11, backend.OpCount); // GET, SET
+ Assert.Equal(12, backend.OpCount); // GET, SET
}
private class BufferLoggingCache : LoggingCache, IBufferDistributedCache
@@ -204,7 +204,7 @@ async ValueTask IBufferDistributedCache.TryGetAsync(string key, IBufferWri
}
}
- private class LoggingCache(ITestOutputHelper log, IDistributedCache tail) : IDistributedCache
+ internal class LoggingCache(ITestOutputHelper log, IDistributedCache tail) : IDistributedCache
{
protected ITestOutputHelper Log => log;
protected IDistributedCache Tail => tail;
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/LocalInvalidationTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/LocalInvalidationTests.cs
new file mode 100644
index 00000000000..310f7d5cdce
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/LocalInvalidationTests.cs
@@ -0,0 +1,130 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.DependencyInjection;
+using Xunit.Abstractions;
+using static Microsoft.Extensions.Caching.Hybrid.Tests.L2Tests;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+public class LocalInvalidationTests(ITestOutputHelper log) : IClassFixture
+{
+ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
+ {
+ var services = new ServiceCollection();
+ config?.Invoke(services);
+ services.AddHybridCache();
+ ServiceProvider provider = services.BuildServiceProvider();
+ cache = Assert.IsType(provider.GetRequiredService());
+ return provider;
+ }
+
+ [Fact]
+ public async Task GlobalInvalidateNoTags()
+ {
+ using var services = GetDefaultCache(out var cache);
+ var value = await cache.GetOrCreateAsync("abc", ct => new(Guid.NewGuid()));
+
+ // should work immediately as-is
+ Assert.Equal(value, await cache.GetOrCreateAsync("abc", ct => new(Guid.NewGuid())));
+
+ // invalidating a normal tag should have no effect
+ await cache.RemoveByTagAsync("foo");
+ Assert.Equal(value, await cache.GetOrCreateAsync("abc", ct => new(Guid.NewGuid())));
+
+ // invalidating everything should force a re-fetch
+ await cache.RemoveByTagAsync("*");
+ var newValue = await cache.GetOrCreateAsync("abc", ct => new(Guid.NewGuid()));
+ Assert.NotEqual(value, newValue);
+
+ // which should now be repeatable again
+ Assert.Equal(newValue, await cache.GetOrCreateAsync("abc", ct => new(Guid.NewGuid())));
+ }
+
+ [Theory]
+ [InlineData(false, false)]
+ [InlineData(true, false)]
+ [InlineData(false, true)]
+ [InlineData(true, true)]
+ public async Task TagBasedInvalidate(bool withL2, bool withExtraTag)
+ {
+ using IMemoryCache l1 = new MemoryCache(new MemoryCacheOptions());
+ IDistributedCache? l2 = null;
+ if (withL2)
+ {
+ MemoryDistributedCacheOptions options = new();
+ MemoryDistributedCache mdc = new(Options.Create(options));
+ l2 = new LoggingCache(log, mdc);
+ }
+
+ Guid lastValue = Guid.Empty;
+
+ // loop because we want to test pre-existing L1/L2 impact
+ for (int i = 0; i < 3; i++)
+ {
+ using var services = GetDefaultCache(out var cache, svc =>
+ {
+ svc.AddSingleton(l1);
+ if (l2 is not null)
+ {
+ svc.AddSingleton(l2);
+ }
+ });
+ var clock = services.GetRequiredService();
+
+ string key = "mykey";
+ string tag = "abc";
+ string[] tags = withExtraTag ? [tag, "other"] : [tag];
+ var value = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"First value: {value}");
+ if (lastValue != Guid.Empty)
+ {
+ Assert.Equal(lastValue, value);
+ }
+
+ // should work immediately as-is
+ var tmp = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"Second value: {tmp} (should be {value})");
+ Assert.Equal(value, tmp);
+
+ // invalidating a normal tag should have no effect
+ await cache.RemoveByTagAsync("foo");
+ tmp = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"Value after invalidating tag foo: {tmp} (should be {value})");
+ Assert.Equal(value, tmp);
+
+ // invalidating a tag we have should force a re-fetch
+ await cache.RemoveByTagAsync(tag);
+ var newValue = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"Value after invalidating tag {tag}: {newValue} (should not be {value})");
+ Assert.NotEqual(value, newValue);
+
+ // which should now be repeatable again
+ tmp = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"And repeating: {tmp} (should be {newValue})");
+ Assert.Equal(newValue, tmp);
+ value = newValue;
+
+ // invalidating everything should force a re-fetch
+ await cache.RemoveByTagAsync("*");
+ newValue = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"Value after invalidating tag *: {newValue} (should not be {value})");
+ Assert.NotEqual(value, newValue);
+
+ // which should now be repeatable again
+ tmp = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+ log.WriteLine($"And repeating: {tmp} (should be {newValue})");
+ Assert.Equal(newValue, tmp);
+ lastValue = newValue;
+
+ var now = clock.GetUtcNow().UtcTicks;
+ do
+ {
+ await Task.Delay(10);
+ }
+ while (clock.GetUtcNow().UtcTicks == now);
+ }
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj
index b32d9224462..fb8863cf776 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj
@@ -21,7 +21,6 @@
-
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Options.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Options.cs
new file mode 100644
index 00000000000..ab7b1ac83db
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Options.cs
@@ -0,0 +1,24 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.Options;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+
+internal static class Options
+{
+ public static IOptions Create(T value)
+ where T : class
+ => new OptionsImpl(value);
+
+ private sealed class OptionsImpl : IOptions
+ where T : class
+ {
+ public OptionsImpl(T value)
+ {
+ Value = value;
+ }
+
+ public T Value { get; }
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/PayloadTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/PayloadTests.cs
new file mode 100644
index 00000000000..0c3d90ad239
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/PayloadTests.cs
@@ -0,0 +1,324 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Xunit.Abstractions;
+using static Microsoft.Extensions.Caching.Hybrid.Tests.DistributedCacheTests;
+using static Microsoft.Extensions.Caching.Hybrid.Tests.L2Tests;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+public class PayloadTests(ITestOutputHelper log) : IClassFixture
+{
+ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
+ {
+ var services = new ServiceCollection();
+ config?.Invoke(services);
+ services.AddHybridCache();
+ ServiceProvider provider = services.BuildServiceProvider();
+ cache = Assert.IsType(provider.GetRequiredService());
+ return provider;
+ }
+
+ [Theory]
+ [InlineData("", 1054, 0)]
+ [InlineData("some_tag", 1063, 1)]
+ [InlineData("some_tag,another_tag", 1075, 2)]
+ public void RoundTrip_Success(string delimitedTags, int expectedLength, int tagCount)
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = string.IsNullOrEmpty(delimitedTags)
+ ? TagSet.Empty : TagSet.Create(delimitedTags.Split(','));
+ Assert.Equal(tagCount, tags.Count);
+
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+
+ Assert.Equal(expectedLength, actualLength);
+
+ clock.Add(TimeSpan.FromSeconds(10));
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ log.WriteLine($"Entropy: {entropy}; Flags: {flags}");
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.Success, result);
+ Assert.True(payload.SequenceEqual(bytes));
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public void RoundTrip_SelfExpiration()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ clock.Add(TimeSpan.FromSeconds(58));
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.Success, result);
+ Assert.True(payload.SequenceEqual(bytes));
+ Assert.True(pendingTags.IsEmpty);
+
+ clock.Add(TimeSpan.FromSeconds(4));
+ result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out payload, out flags, out entropy, out pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.ExpiredByEntry, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public async Task RoundTrip_WildcardExpiration()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ clock.Add(TimeSpan.FromSeconds(2));
+ await cache.RemoveByTagAsync("*");
+
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.ExpiredByWildcard, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public async Task RoundTrip_TagExpiration()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ clock.Add(TimeSpan.FromSeconds(2));
+ await cache.RemoveByTagAsync("other_tag");
+
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.Success, result);
+ Assert.True(payload.SequenceEqual(bytes));
+ Assert.True(pendingTags.IsEmpty);
+
+ await cache.RemoveByTagAsync("some_tag");
+ result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out payload, out flags, out entropy, out pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.ExpiredByTag, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public async Task RoundTrip_TagExpiration_Pending()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ var creation = cache.CurrentTimestamp();
+ int actualLength = HybridCachePayload.Write(oversized, key, creation, TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ clock.Add(TimeSpan.FromSeconds(2));
+
+ var tcs = new TaskCompletionSource();
+ cache.DebugInvalidateTag("some_tag", tcs.Task);
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.Success, result);
+ Assert.True(payload.SequenceEqual(bytes));
+ Assert.Equal(1, pendingTags.Count);
+ Assert.Equal("some_tag", pendingTags[0]);
+
+ tcs.SetResult(cache.CurrentTimestamp());
+ Assert.True(await cache.IsAnyTagExpiredAsync(pendingTags, creation));
+ }
+
+ [Fact]
+ public void Gibberish()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ var result = HybridCachePayload.TryParse(new(bytes), "whatever", TagSet.Empty, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.FormatNotRecognized, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public void RoundTrip_Truncated()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length);
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength - 1), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.InvalidData, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public void RoundTrip_Oversized()
+ {
+ var clock = new FakeTime();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ config.AddSingleton(clock);
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key";
+ var tags = TagSet.Create(["some_tag"]);
+ var maxLen = HybridCachePayload.GetMaxBytes(key, tags, bytes.Length) + 1;
+ var oversized = ArrayPool.Shared.Rent(maxLen);
+
+ int actualLength = HybridCachePayload.Write(oversized, key, cache.CurrentTimestamp(), TimeSpan.FromMinutes(1), 0, tags, new(bytes));
+ log.WriteLine($"bytes written: {actualLength}");
+ Assert.Equal(1063, actualLength);
+
+ var result = HybridCachePayload.TryParse(new(oversized, 0, actualLength + 1), key, tags, cache, out var payload, out var flags, out var entropy, out var pendingTags, out _);
+ Assert.Equal(HybridCachePayload.HybridCachePayloadParseResult.InvalidData, result);
+ Assert.Equal(0, payload.Count);
+ Assert.True(pendingTags.IsEmpty);
+ }
+
+ [Fact]
+ public async Task MalformedKeyDetected()
+ {
+ using var collector = new LogCollector();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ var localCache = new MemoryDistributedCache(Options.Create(new MemoryDistributedCacheOptions()));
+ config.AddSingleton(new LoggingCache(log, localCache));
+ config.AddLogging(options =>
+ {
+ options.ClearProviders();
+ options.AddProvider(collector);
+ });
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my\uD801\uD802key"; // malformed
+ string[] tags = ["mytag"];
+
+ _ = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+
+ collector.WriteTo(log);
+ collector.AssertErrors([Log.IdKeyInvalidUnicode]);
+ }
+
+ [Fact]
+ public async Task MalformedTagDetected()
+ {
+ using var collector = new LogCollector();
+ using var provider = GetDefaultCache(out var cache, config =>
+ {
+ var localCache = new MemoryDistributedCache(Options.Create(new MemoryDistributedCacheOptions()));
+ config.AddSingleton(new LoggingCache(log, localCache));
+ config.AddLogging(options =>
+ {
+ options.ClearProviders();
+ options.AddProvider(collector);
+ });
+ });
+
+ byte[] bytes = new byte[1024];
+ new Random().NextBytes(bytes);
+
+ string key = "my key"; // malformed
+ string[] tags = ["my\uD801\uD802tag"];
+
+ _ = await cache.GetOrCreateAsync(key, ct => new(Guid.NewGuid()), tags: tags);
+
+ collector.WriteTo(log);
+ collector.AssertErrors([Log.IdTagInvalidUnicode]);
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs
index 0172525b128..f03803fa2bc 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SampleUsage.cs
@@ -8,7 +8,7 @@
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class SampleUsage
+public class SampleUsage : IClassFixture
{
[Fact]
public async Task DistributedCacheWorks()
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SerializerTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SerializerTests.cs
new file mode 100644
index 00000000000..3a9ad47cd25
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SerializerTests.cs
@@ -0,0 +1,71 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+
+public class SerializerTests
+{
+ [Fact]
+ public void RoundTripString()
+ {
+ IHybridCacheSerializer serializer = InbuiltTypeSerializer.Instance;
+
+ using var target = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ serializer.Serialize("test value", target);
+ Assert.True("test value"u8.SequenceEqual(target.GetCommittedMemory().Span));
+ Assert.Equal("test value", serializer.Deserialize(target.AsSequence()));
+
+ // and deserialize with multi-chunk
+ Assert.Equal("test value", serializer.Deserialize(Split(target.AsSequence())));
+ }
+
+ [Fact]
+ public void RoundTripByteArray()
+ {
+ IHybridCacheSerializer serializer = InbuiltTypeSerializer.Instance;
+ var value = "test value"u8.ToArray();
+ using var target = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ serializer.Serialize(value, target);
+ Assert.True("test value"u8.SequenceEqual(target.GetCommittedMemory().Span));
+ Assert.Equal(value, serializer.Deserialize(target.AsSequence()));
+
+ // and deserialize with multi-chunk
+ Assert.Equal(value, serializer.Deserialize(Split(target.AsSequence())));
+ }
+
+ private static ReadOnlySequence Split(ReadOnlySequence value)
+ {
+ // ensure the value is a multi-chunk segment
+ if (!value.IsSingleSegment || value.Length <= 1)
+ {
+ // already multiple chunks, or cannot be split
+ return value;
+ }
+
+ var chunk = value.First; // actually, single
+
+ Segment first = new(chunk.Slice(0, 1), null);
+ Segment second = new(chunk.Slice(1), first);
+ var result = new ReadOnlySequence(first, 0, second, chunk.Length - 1);
+ Assert.False(result.IsSingleSegment, "should be multi-segment");
+ return result;
+ }
+
+ private sealed class Segment : ReadOnlySequenceSegment
+ {
+ public Segment(ReadOnlyMemory memory, Segment? previous)
+ {
+ if (previous is not null)
+ {
+ RunningIndex = previous.RunningIndex + previous.Memory.Length;
+ previous.Next = this;
+ }
+
+ Memory = memory;
+ }
+ }
+
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs
index decc47d3964..72307d1f642 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/ServiceConstructionTests.cs
@@ -20,7 +20,7 @@
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class ServiceConstructionTests
+public class ServiceConstructionTests : IClassFixture
{
[Fact]
public void CanCreateDefaultService()
@@ -204,6 +204,82 @@ public void SubclassMemoryCacheIsNotIgnored(bool manual)
Assert.NotNull(cache.BackendCache);
}
+ [Theory]
+
+ // first 4 tests; regardless of which options objects are supplied, since nothing specified: defaults are assumed
+ [InlineData(false, null, null, null, false, null, null, null)]
+ [InlineData(true, null, null, null, false, null, null, null)]
+ [InlineData(false, null, null, null, true, null, null, null)]
+ [InlineData(true, null, null, null, true, null, null, null)]
+
+ // flags; per-item wins, without merge
+ [InlineData(false, null, null, null, true, null, null, HybridCacheEntryFlags.None)]
+ [InlineData(false, null, null, null, true, null, null, HybridCacheEntryFlags.DisableLocalCacheRead, null, null, HybridCacheEntryFlags.DisableLocalCacheRead)]
+ [InlineData(true, null, null, HybridCacheEntryFlags.None, true, null, null, HybridCacheEntryFlags.DisableLocalCacheRead, null, null, HybridCacheEntryFlags.DisableLocalCacheRead)]
+ [InlineData(true, null, null, HybridCacheEntryFlags.DisableLocalCacheWrite, true, null, null, HybridCacheEntryFlags.DisableLocalCacheRead, null, null, HybridCacheEntryFlags.DisableLocalCacheRead)]
+
+ // flags; global wins if per-item omits, or no per-item flags
+ [InlineData(true, null, null, HybridCacheEntryFlags.DisableLocalCacheWrite, true, null, null, null, null, null, HybridCacheEntryFlags.DisableLocalCacheWrite)]
+ [InlineData(true, null, null, HybridCacheEntryFlags.DisableLocalCacheWrite, false, null, null, null, null, null, HybridCacheEntryFlags.DisableLocalCacheWrite)]
+
+ // local expiration; per-item wins; expiration bleeds into local expiration (but not the other way around)
+ [InlineData(false, null, null, null, true, 42, null, null, 42, 42)]
+ [InlineData(false, null, null, null, true, 42, 43, null, 42, 43)]
+ [InlineData(false, null, null, null, true, null, 43, null, null, 43)]
+
+ // global expiration; expiration bleeds into local expiration (but not the other way around)
+ [InlineData(true, 42, null, null, false, null, null, null, 42, 42)]
+ [InlineData(true, 42, 43, null, false, null, null, null, 42, 43)]
+ [InlineData(true, null, 43, null, false, null, null, null, null, 43)]
+
+ // both expirations specified; expiration bleeds into local expiration (but not the other way around)
+ [InlineData(true, 42, 43, null, true, null, null, null, 42, 43)]
+ [InlineData(true, 42, 43, null, true, 44, null, null, 44, 44)]
+ [InlineData(true, 42, 43, null, true, 44, 45, null, 44, 45)]
+ [InlineData(true, 42, 43, null, true, null, 45, null, 42, 45)]
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S107:Methods should not have too many parameters",
+ Justification = "Most pragmatic and readable way of expressing multiple scenarios.")]
+ public void VerifyCacheEntryOptionsScenarios(
+ bool defaultsSpecified, int? defaultExpiration, int? defaultLocalCacheExpiration, HybridCacheEntryFlags? defaultFlags,
+ bool perItemSpecified, int? perItemExpiration, int? perItemLocalCacheExpiration, HybridCacheEntryFlags? perItemFlags,
+ int? expectedExpiration = null, int? expectedLocalCacheExpiration = null, HybridCacheEntryFlags expectedFlags = HybridCacheEntryFlags.None)
+ {
+ expectedFlags |= HybridCacheEntryFlags.DisableDistributedCache; // hard flag because no L2 present
+
+ var services = new ServiceCollection();
+ services.AddHybridCache(options =>
+ {
+ if (defaultsSpecified)
+ {
+ options.DefaultEntryOptions = new()
+ {
+ Expiration = defaultExpiration is null ? null : TimeSpan.FromMinutes(defaultExpiration.GetValueOrDefault()),
+ LocalCacheExpiration = defaultLocalCacheExpiration is null ? null : TimeSpan.FromMinutes(defaultLocalCacheExpiration.GetValueOrDefault()),
+ Flags = defaultFlags,
+ };
+ }
+ });
+
+ using ServiceProvider provider = services.BuildServiceProvider();
+ var cache = Assert.IsType(provider.GetRequiredService());
+
+ HybridCacheEntryOptions? itemOptions = null;
+ if (perItemSpecified)
+ {
+ itemOptions = new()
+ {
+ Expiration = perItemExpiration is null ? null : TimeSpan.FromMinutes(perItemExpiration.GetValueOrDefault()),
+ LocalCacheExpiration = perItemLocalCacheExpiration is null ? null : TimeSpan.FromMinutes(perItemLocalCacheExpiration.GetValueOrDefault()),
+ Flags = perItemFlags,
+ };
+ }
+
+ Assert.Equal(expectedFlags, cache.GetEffectiveFlags(itemOptions));
+ Assert.Equal(TimeSpan.FromMinutes(expectedExpiration ?? DefaultHybridCache.DefaultExpirationMinutes), cache.GetL2AbsoluteExpirationRelativeToNow(itemOptions));
+ Assert.Equal(TimeSpan.FromMinutes(expectedLocalCacheExpiration ?? DefaultHybridCache.DefaultExpirationMinutes), cache.GetL1AbsoluteExpirationRelativeToNow(itemOptions));
+ }
+
private class CustomMemoryCache : MemoryCache
{
public CustomMemoryCache(IOptions options)
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs
index 66f4fc7628d..8085a4318c0 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/SizeTests.cs
@@ -11,7 +11,7 @@
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class SizeTests(ITestOutputHelper log)
+public class SizeTests(ITestOutputHelper log) : IClassFixture
{
[Theory]
[InlineData("abc", null, true, null, null)] // does not enforce size limits
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs
index 4680f589f98..d9addf03aa2 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/StampedeTests.cs
@@ -10,7 +10,7 @@
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class StampedeTests
+public class StampedeTests : IClassFixture
{
private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache)
{
@@ -59,6 +59,24 @@ public void Dispose()
bool IMemoryCache.TryGetValue(object key, out object? value) => throw new NotSupportedException("Intentionally not provided");
}
+ [Fact]
+ public void ToString_Key()
+ {
+ var services = new ServiceCollection();
+ services.AddHybridCache();
+ using var provider = services.BuildServiceProvider();
+ var cache = Assert.IsType(provider.GetRequiredService());
+
+ var key = new DefaultHybridCache.StampedeKey("test_key", HybridCacheEntryFlags.DisableLocalCache);
+
+ const string Expected = "test_key (DisableLocalCache)";
+
+ Assert.Equal(Expected, key.ToString());
+
+ var state = new DefaultHybridCache.StampedeState(cache, in key, TagSet.Empty, true);
+ Assert.Equal(Expected, state.ToString());
+ }
+
[Theory]
[InlineData(1, false)]
[InlineData(1, true)]
@@ -436,6 +454,8 @@ public void ValidatePartitioning()
for (int i = 0; i < 1024; i++)
{
var key = new DefaultHybridCache.StampedeKey(Guid.NewGuid().ToString(), default);
+ Assert.True(key.Equals(key), "typed equality self");
+ Assert.True(key.Equals((object)key), "object equality self");
var obj = cache.GetPartitionedSyncLock(in key);
if (!counts.TryGetValue(obj, out var count))
{
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/TagSetTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/TagSetTests.cs
new file mode 100644
index 00000000000..1c63ff5e5c2
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/TagSetTests.cs
@@ -0,0 +1,180 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+public class TagSetTests
+{
+ [Fact]
+ public void DefaultEmpty()
+ {
+ var tags = TagSet.Empty;
+ Assert.Equal(0, tags.Count);
+ Assert.True(tags.IsEmpty);
+ Assert.False(tags.IsArray);
+ Assert.Equal("(no tags)", tags.ToString());
+ tags.CopyTo(default);
+ }
+
+ [Fact]
+ public void EmptyArray()
+ {
+ var tags = TagSet.Create([]);
+ Assert.Equal(0, tags.Count);
+ Assert.True(tags.IsEmpty);
+ Assert.False(tags.IsArray);
+ Assert.Equal("(no tags)", tags.ToString());
+ tags.CopyTo(default);
+ }
+
+ [Fact]
+ public void EmptyCustom()
+ {
+ var tags = TagSet.Create(Custom());
+ Assert.Equal(0, tags.Count);
+ Assert.True(tags.IsEmpty);
+ Assert.False(tags.IsArray);
+ Assert.Equal("(no tags)", tags.ToString());
+ tags.CopyTo(default);
+
+ static IEnumerable Custom()
+ {
+ yield break;
+ }
+ }
+
+ [Fact]
+ public void SingleFromArray()
+ {
+ string[] arr = ["abc"];
+ var tags = TagSet.Create(arr);
+ arr.AsSpan().Clear(); // to check defensive copy
+ Assert.Equal(1, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.False(tags.IsArray);
+ Assert.Equal("abc", tags.ToString());
+ var scratch = tags.ToArray();
+ Assert.Equal("abc", scratch[0]);
+ }
+
+ [Fact]
+ public void SingleFromCustom()
+ {
+ var tags = TagSet.Create(Custom());
+ Assert.Equal(1, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.False(tags.IsArray);
+ Assert.Equal("abc", tags.ToString());
+ var scratch = tags.ToArray();
+ Assert.Equal("abc", scratch[0]);
+
+ static IEnumerable Custom()
+ {
+ yield return "abc";
+ }
+ }
+
+ [Fact]
+ public void MultipleFromArray()
+ {
+ string[] arr = ["abc", "def", "ghi"];
+ var tags = TagSet.Create(arr);
+ arr.AsSpan().Clear(); // to check defensive copy
+ Assert.Equal(3, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.True(tags.IsArray);
+ Assert.Equal("abc, def, ghi", tags.ToString());
+ var scratch = tags.ToArray();
+ Assert.Equal("abc", scratch[0]);
+ Assert.Equal("def", scratch[1]);
+ Assert.Equal("ghi", scratch[2]);
+ }
+
+ [Fact]
+ public void MultipleFromCustom()
+ {
+ var tags = TagSet.Create(Custom());
+ Assert.Equal(3, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.True(tags.IsArray);
+ Assert.Equal("abc, def, ghi", tags.ToString());
+ var scratch = tags.ToArray();
+ Assert.Equal("abc", scratch[0]);
+ Assert.Equal("def", scratch[1]);
+ Assert.Equal("ghi", scratch[2]);
+
+ static IEnumerable Custom()
+ {
+ yield return "abc";
+ yield return "def";
+ yield return "ghi";
+ }
+ }
+
+ [Fact]
+ public void ManyFromArray()
+ {
+ string[] arr = LongCustom().ToArray();
+ var tags = TagSet.Create(arr);
+ arr.AsSpan().Clear(); // to check defensive copy
+ Assert.Equal(128, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.True(tags.IsArray);
+ var scratch = tags.ToArray();
+ Assert.Equal(128, scratch.Length);
+ }
+
+ [Fact]
+ public void ManyFromCustom()
+ {
+ var tags = TagSet.Create(LongCustom());
+ Assert.Equal(128, tags.Count);
+ Assert.False(tags.IsEmpty);
+ Assert.True(tags.IsArray);
+ var scratch = tags.ToArray();
+ Assert.Equal(128, scratch.Length);
+ }
+
+ [Fact]
+ public void InvalidEmpty()
+ {
+ var ex = Assert.Throws(() => TagSet.Create(["abc", "", "ghi"]));
+ Assert.Equal("tags", ex.ParamName);
+ Assert.StartsWith("Tags cannot be empty.", ex.Message);
+ }
+
+ [Fact]
+ public void InvalidReserved()
+ {
+ var ex = Assert.Throws(() => TagSet.Create(["abc", "*", "ghi"]));
+ Assert.Equal("tags", ex.ParamName);
+ Assert.StartsWith("The tag '*' is reserved and cannot be used in this context.", ex.Message);
+ }
+
+ private static IEnumerable LongCustom()
+ {
+ var rand = new Random();
+ for (int i = 0; i < 128; i++)
+ {
+ yield return Create();
+ }
+
+ string Create()
+ {
+ const string Alphabet = "abcdefghijklmnopqrstuvwxyz0123456789";
+ var len = rand.Next(3, 8);
+#if NET462
+ char[] chars = new char[len];
+#else
+ Span chars = stackalloc char[len];
+#endif
+ for (int i = 0; i < chars.Length; i++)
+ {
+ chars[i] = Alphabet[rand.Next(0, Alphabet.Length)];
+ }
+
+ return new string(chars);
+ }
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/UnreliableL2Tests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/UnreliableL2Tests.cs
index 7af85f9cba2..f6232357651 100644
--- a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/UnreliableL2Tests.cs
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/UnreliableL2Tests.cs
@@ -12,7 +12,7 @@
namespace Microsoft.Extensions.Caching.Hybrid.Tests;
// validate HC stability when the L2 is unreliable
-public class UnreliableL2Tests(ITestOutputHelper testLog)
+public class UnreliableL2Tests(ITestOutputHelper testLog) : IClassFixture
{
[Theory]
[InlineData(BreakType.None)]