Explorar el Código

HybridCache - minor post-PR fixes (#55251)

* additional HybridCache post-PR fixes
Marc Gravell hace 1 año
padre
commit
b27c028f67
Se han modificado 23 ficheros con 511 adiciones y 367 borrados
  1. 0 5
      src/Caching/Hybrid/src/HybridCacheBuilderExtensions.cs
  2. 0 7
      src/Caching/Hybrid/src/HybridCacheOptions.cs
  3. 0 6
      src/Caching/Hybrid/src/HybridCacheServiceExtensions.cs
  4. 0 5
      src/Caching/Hybrid/src/IHybridCacheBuilder.cs
  5. 6 1
      src/Caching/Hybrid/src/Internal/BufferChunk.cs
  6. 58 8
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.CacheItem.cs
  7. 10 10
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.Debug.cs
  8. 15 14
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.ImmutableCacheItem.cs
  9. 9 6
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.L2.cs
  10. 7 37
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.MutableCacheItem.cs
  11. 0 2
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.Serialization.cs
  12. 11 1
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.Stampede.cs
  13. 13 1
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeKey.cs
  14. 22 27
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeState.cs
  15. 58 35
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeStateT.cs
  16. 34 0
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.SyncLock.cs
  17. 1 1
      src/Caching/Hybrid/src/Internal/DefaultHybridCache.cs
  18. 16 0
      src/Caching/Hybrid/src/Internal/RecyclableArrayBufferWriter.cs
  19. 18 12
      src/Caching/Hybrid/test/BufferReleaseTests.cs
  20. 0 188
      src/Caching/Hybrid/test/DisposableValueTests.cs
  21. 200 0
      src/Caching/Hybrid/test/SampleUsage.cs
  22. 32 0
      src/Caching/Hybrid/test/StampedeTests.cs
  23. 1 1
      src/Caching/SqlServer/src/DatabaseOperations.cs

+ 0 - 5
src/Caching/Hybrid/src/HybridCacheBuilderExtensions.cs

@@ -1,12 +1,7 @@
 // Licensed to the .NET Foundation under one or more agreements.
 // The .NET Foundation licenses this file to you under the MIT license.
 
-using System;
-using System.Collections.Generic;
 using System.Diagnostics.CodeAnalysis;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 
 namespace Microsoft.Extensions.Caching.Hybrid;

+ 0 - 7
src/Caching/Hybrid/src/HybridCacheOptions.cs

@@ -1,13 +1,6 @@
 // Licensed to the .NET Foundation under one or more agreements.
 // The .NET Foundation licenses this file to you under the MIT license.
 
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Options;
-
 namespace Microsoft.Extensions.Caching.Hybrid;
 
 /// <summary>

+ 0 - 6
src/Caching/Hybrid/src/HybridCacheServiceExtensions.cs

@@ -2,15 +2,9 @@
 // The .NET Foundation licenses this file to you under the MIT license.
 
 using System;
-using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
 using Microsoft.Extensions.Caching.Hybrid.Internal;
 using Microsoft.Extensions.DependencyInjection;
 using Microsoft.Extensions.DependencyInjection.Extensions;
-using Microsoft.Extensions.Internal;
 
 namespace Microsoft.Extensions.Caching.Hybrid;
 

+ 0 - 5
src/Caching/Hybrid/src/IHybridCacheBuilder.cs

@@ -1,11 +1,6 @@
 // Licensed to the .NET Foundation under one or more agreements.
 // The .NET Foundation licenses this file to you under the MIT license.
 
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
 using Microsoft.Extensions.DependencyInjection;
 
 namespace Microsoft.Extensions.Caching.Hybrid;

+ 6 - 1
src/Caching/Hybrid/src/Internal/BufferChunk.cs

@@ -9,7 +9,8 @@ using System.Runtime.CompilerServices;
 namespace Microsoft.Extensions.Caching.Hybrid.Internal;
 
 // used to convey buffer status; like ArraySegment<byte>, but Offset is always
-// zero, and we use the MSB of the length to track whether or not to recycle this value
+// zero, and we use the most significant bit (MSB, the sign flag) of the length
+// to track whether or not to recycle this value
 internal readonly struct BufferChunk
 {
     private const int MSB = (1 << 31);
@@ -40,6 +41,10 @@ internal readonly struct BufferChunk
         Array = array;
         _lengthAndPoolFlag = array.Length;
         // assume not pooled, if exact-sized
+        // (we don't expect array.Length to be negative; we're really just saying
+        // "we expect the result of assigning array.Length to _lengthAndPoolFlag
+        // to give the expected Length *and* not have the MSB set; we're just
+        // checking that we haven't fat-fingered our MSB logic)
         Debug.Assert(!ReturnToPool, "do not return right-sized arrays");
         Debug.Assert(Length == array.Length, "array length not respected");
     }

+ 58 - 8
src/Caching/Hybrid/src/Internal/DefaultHybridCache.CacheItem.cs

@@ -2,6 +2,8 @@
 // The .NET Foundation licenses this file to you under the MIT license.
 
 using System;
+using System.Diagnostics;
+using System.Threading;
 using Microsoft.Extensions.Caching.Memory;
 
 namespace Microsoft.Extensions.Caching.Hybrid.Internal;
@@ -10,37 +12,85 @@ partial class DefaultHybridCache
 {
     internal abstract class CacheItem
     {
+        private int _refCount = 1; // the number of pending operations against this cache item
+        // note: the ref count is the number of callers anticipating this value at any given time; initially,
+        // it is one for a simple "get the value" flow, but if another call joins with us, it'll be incremented;
+        // if either cancels, it will get decremented, with the entire flow being cancelled if it ever becomes
+        // zero
+        // this counter also drives cache lifetime, with the cache itself incrementing the count by one; in the
+        // case of mutable data, cache eviction may reduce this to zero (in cooperation with any concurrent readers,
+        // who incr/decr around their fetch), allowing safe buffer recycling
+
+        internal int RefCount => Volatile.Read(ref _refCount);
+
         internal static readonly PostEvictionDelegate _sharedOnEviction = static (key, value, reason, state) =>
         {
             if (value is CacheItem item)
             {
-                // perform per-item clean-up; this could be buffer recycling (if defensive copies needed),
-                // or could be disposal
-                item.OnEviction();
+                item.Release();
             }
         };
 
-        public virtual void Release() { } // for recycling purposes
+        public virtual bool NeedsEvictionCallback => false; // do we need to call Release when evicted?
 
-        public abstract bool NeedsEvictionCallback { get; } // do we need to call Release when evicted?
-
-        public virtual void OnEviction() { } // only invoked if NeedsEvictionCallback reported true
+        protected virtual void OnFinalRelease() { } // any required release semantics
 
         public abstract bool TryReserveBuffer(out BufferChunk buffer);
 
         public abstract bool DebugIsImmutable { get; }
+
+        public bool Release() // returns true ONLY for the final release step
+        {
+            var newCount = Interlocked.Decrement(ref _refCount);
+            Debug.Assert(newCount >= 0, "over-release detected");
+            if (newCount == 0)
+            {
+                // perform per-item clean-up, i.e. buffer recycling (if defensive copies needed)
+                OnFinalRelease();
+                return true;
+            }
+            return false;
+        }
+
+        public bool TryReserve()
+        {
+            // this is basically interlocked increment, but with a check against:
+            // a) incrementing upwards from zero
+            // b) overflowing *back* to zero
+            var oldValue = Volatile.Read(ref _refCount);
+            do
+            {
+                if (oldValue is 0 or -1)
+                {
+                    return false; // already burned, or about to roll around back to zero
+                }
+
+                var updated = Interlocked.CompareExchange(ref _refCount, oldValue + 1, oldValue);
+                if (updated == oldValue)
+                {
+                    return true; // we exchanged
+                }
+                oldValue = updated; // we failed, but we have an updated state
+            } while (true);
+        }
+
     }
 
     internal abstract class CacheItem<T> : CacheItem
     {
+        internal static CacheItem<T> Create() => ImmutableTypeCache<T>.IsImmutable ? new ImmutableCacheItem<T>() : new MutableCacheItem<T>();
+
+        // attempt to get a value that was *not* previously reserved
         public abstract bool TryGetValue(out T value);
 
-        public T GetValue()
+        // get a value that *was* reserved, countermanding our reservation in the process
+        public T GetReservedValue()
         {
             if (!TryGetValue(out var value))
             {
                 Throw();
             }
+            Release();
             return value;
 
             static void Throw() => throw new ObjectDisposedException("The cache item has been recycled before the value was obtained");

+ 10 - 10
src/Caching/Hybrid/src/Internal/DefaultHybridCache.Debug.cs

@@ -24,17 +24,17 @@ partial class DefaultHybridCache
 
     private int _outstandingBufferCount;
 
-    internal int DebugGetOutstandingBuffers(bool flush = false)
+    internal int DebugOnlyGetOutstandingBuffers(bool flush = false)
                 => flush ? Interlocked.Exchange(ref _outstandingBufferCount, 0) : Volatile.Read(ref _outstandingBufferCount);
 
     [Conditional("DEBUG")]
-    internal void DebugDecrementOutstandingBuffers()
+    internal void DebugOnlyDecrementOutstandingBuffers()
     {
         Interlocked.Decrement(ref _outstandingBufferCount);
     }
 
     [Conditional("DEBUG")]
-    internal void DebugIncrementOutstandingBuffers()
+    internal void DebugOnlyIncrementOutstandingBuffers()
     {
         Interlocked.Increment(ref _outstandingBufferCount);
     }
@@ -42,27 +42,27 @@ partial class DefaultHybridCache
 
     partial class MutableCacheItem<T>
     {
-        partial void DebugDecrementOutstandingBuffers();
-        partial void DebugTrackBufferCore(DefaultHybridCache cache);
+        partial void DebugOnlyDecrementOutstandingBuffers();
+        partial void DebugOnlyTrackBufferCore(DefaultHybridCache cache);
 
         [Conditional("DEBUG")]
-        internal void DebugTrackBuffer(DefaultHybridCache cache) => DebugTrackBufferCore(cache);
+        internal void DebugOnlyTrackBuffer(DefaultHybridCache cache) => DebugOnlyTrackBufferCore(cache);
 
 #if DEBUG
         private DefaultHybridCache? _cache; // for buffer-tracking - only enabled in DEBUG
-        partial void DebugDecrementOutstandingBuffers()
+        partial void DebugOnlyDecrementOutstandingBuffers()
         {
             if (_buffer.ReturnToPool)
             {
-                _cache?.DebugDecrementOutstandingBuffers();
+                _cache?.DebugOnlyDecrementOutstandingBuffers();
             }
         }
-        partial void DebugTrackBufferCore(DefaultHybridCache cache)
+        partial void DebugOnlyTrackBufferCore(DefaultHybridCache cache)
         {
             _cache = cache;
             if (_buffer.ReturnToPool)
             {
-                _cache?.DebugIncrementOutstandingBuffers();
+                _cache?.DebugOnlyIncrementOutstandingBuffers();
             }
         }
 #endif

+ 15 - 14
src/Caching/Hybrid/src/Internal/DefaultHybridCache.ImmutableCacheItem.cs

@@ -1,9 +1,7 @@
 // Licensed to the .NET Foundation under one or more agreements.
 // The .NET Foundation licenses this file to you under the MIT license.
 
-using System;
-using System.Diagnostics;
-using System.Diagnostics.CodeAnalysis;
+using System.Threading;
 
 namespace Microsoft.Extensions.Caching.Hybrid.Internal;
 
@@ -11,23 +9,26 @@ partial class DefaultHybridCache
 {
     private sealed class ImmutableCacheItem<T> : CacheItem<T> // used to hold types that do not require defensive copies
     {
-        private readonly T _value;
-        public ImmutableCacheItem(T value) => _value = value;
+        private T _value = default!; // deferred until SetValue
 
-        private static ImmutableCacheItem<T>? _sharedDefault;
+        public void SetValue(T value) => _value = value;
 
-        // this is only used when the underlying store is disabled; we don't need 100% singleton; "good enough is"
-        public static ImmutableCacheItem<T> Default => _sharedDefault ??= new(default!);
+        private static ImmutableCacheItem<T>? _sharedDefault;
 
-        public override void OnEviction()
+        // get a shared instance that passes as "reserved"; doesn't need to be 100% singleton,
+        // but we don't want to break the reservation rules either; if we can't reserve: create new
+        public static ImmutableCacheItem<T> GetReservedShared()
         {
-            var obj = _value as IDisposable;
-            Debug.Assert(obj is not null, "shouldn't be here for non-disposable types");
-            obj?.Dispose();
+            var obj = Volatile.Read(ref _sharedDefault);
+            if (obj is null || !obj.TryReserve())
+            {
+                obj = new();
+                obj.TryReserve(); // this is reliable on a new instance
+                Volatile.Write(ref _sharedDefault, obj);
+            }
+            return obj;
         }
 
-        public override bool NeedsEvictionCallback => ImmutableTypeCache<T>.IsDisposable;
-
         public override bool TryGetValue(out T value)
         {
             value = _value;

+ 9 - 6
src/Caching/Hybrid/src/Internal/DefaultHybridCache.L2.cs

@@ -113,13 +113,16 @@ partial class DefaultHybridCache
 
     internal void SetL1<T>(string key, CacheItem<T> value, HybridCacheEntryOptions? options)
     {
-        // based on CacheExtensions.Set<TItem>, but with post-eviction recycling
-        using var cacheEntry = _localCache.CreateEntry(key);
-        cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration;
-        cacheEntry.Value = value;
-        if (value.NeedsEvictionCallback)
+        if (value.TryReserve()) // incr ref-count for the the cache itself; this *may* be released via the NeedsEvictionCallback path
         {
-            cacheEntry.RegisterPostEvictionCallback(CacheItem._sharedOnEviction);
+            // based on CacheExtensions.Set<TItem>, but with post-eviction recycling
+            using var cacheEntry = _localCache.CreateEntry(key);
+            cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration;
+            cacheEntry.Value = value;
+            if (value.NeedsEvictionCallback)
+            {
+                cacheEntry.RegisterPostEvictionCallback(CacheItem._sharedOnEviction);
+            }
         }
     }
 }

+ 7 - 37
src/Caching/Hybrid/src/Internal/DefaultHybridCache.MutableCacheItem.cs

@@ -1,28 +1,23 @@
 // Licensed to the .NET Foundation under one or more agreements.
 // The .NET Foundation licenses this file to you under the MIT license.
 
-using System;
-using System.Diagnostics;
-using System.Threading;
-
 namespace Microsoft.Extensions.Caching.Hybrid.Internal;
 
 partial class DefaultHybridCache
 {
     private sealed partial class MutableCacheItem<T> : CacheItem<T> // used to hold types that require defensive copies
     {
-        private readonly IHybridCacheSerializer<T> _serializer;
-        private readonly BufferChunk _buffer;
-        private int _refCount = 1; // buffer released when this becomes zero
+        private IHybridCacheSerializer<T> _serializer = null!; // deferred until SetValue
+        private BufferChunk _buffer;
 
-        public MutableCacheItem(ref BufferChunk buffer, IHybridCacheSerializer<T> serializer)
+        public void SetValue(ref BufferChunk buffer, IHybridCacheSerializer<T> serializer)
         {
             _serializer = serializer;
             _buffer = buffer;
             buffer = default; // we're taking over the lifetime; the caller no longer has it!
         }
 
-        public MutableCacheItem(T value, IHybridCacheSerializer<T> serializer, int maxLength)
+        public void SetValue(T value, IHybridCacheSerializer<T> serializer, int maxLength)
         {
             _serializer = serializer;
             var writer = RecyclableArrayBufferWriter<byte>.Create(maxLength);
@@ -34,35 +29,10 @@ partial class DefaultHybridCache
 
         public override bool NeedsEvictionCallback => _buffer.ReturnToPool;
 
-        public override void OnEviction() => Release();
-
-        public override void Release()
+        protected override void OnFinalRelease()
         {
-            var newCount = Interlocked.Decrement(ref _refCount);
-            if (newCount == 0)
-            {
-                DebugDecrementOutstandingBuffers();
-                _buffer.RecycleIfAppropriate();
-            }
-        }
-
-        public bool TryReserve()
-        {
-            var oldValue = Volatile.Read(ref _refCount);
-            do
-            {
-                if (oldValue is 0 or -1)
-                {
-                    return false; // already burned, or about to roll around back to zero
-                }
-
-                var updated = Interlocked.CompareExchange(ref _refCount, oldValue + 1, oldValue);
-                if (updated == oldValue)
-                {
-                    return true; // we exchanged
-                }
-                oldValue = updated; // we failed, but we have an updated state
-            } while (true);
+            DebugOnlyDecrementOutstandingBuffers();
+            _buffer.RecycleIfAppropriate();
         }
 
         public override bool TryGetValue(out T value)

+ 0 - 2
src/Caching/Hybrid/src/Internal/DefaultHybridCache.Serialization.cs

@@ -57,8 +57,6 @@ partial class DefaultHybridCache
     {
         // note for blittable types: a pure struct will be a full copy every time - nothing shared to mutate
         public static readonly bool IsImmutable = (typeof(T).IsValueType && IsBlittable<T>()) || IsImmutable(typeof(T));
-
-        public static bool IsDisposable => typeof(IDisposable).IsAssignableFrom(typeof(T));
     }
 
     private static bool IsBlittable<T>() // minimize the generic portion

+ 11 - 1
src/Caching/Hybrid/src/Internal/DefaultHybridCache.Stampede.cs

@@ -43,7 +43,7 @@ partial class DefaultHybridCache
         // hmm; failed to add - there's concurrent activity on the same key; we're now
         // in very rare race condition territory; go ahead and take a lock while we
         // collect our thoughts
-        lock (_currentOperations)
+        lock (GetPartitionedSyncLock(in stampedeKey)) // see notes in SyncLock.cs
         {
             // check again while we hold the lock
             if (TryJoinExistingSession(this, stampedeKey, out existing))
@@ -57,6 +57,16 @@ partial class DefaultHybridCache
                 // the floor; in the grand scheme of things, that's OK; this is a rare outcome
             }
 
+            // and check whether the value was L1-cached by an outgoing operation (for *us* to check needs local-cache-read,
+            // and for *them* to have updated needs local-cache-write, but since the shared us/them key includes flags,
+            // we can skip this if *either* flag is set)
+            if ((flags & HybridCacheEntryFlags.DisableLocalCache) == 0 && _localCache.TryGetValue(key, out var untyped)
+                && untyped is CacheItem<T> typed && typed.TryReserve())
+            {
+                stampedeState.SetResultDirect(typed);
+                return false; // the work has ALREADY been done
+            }
+
             // otherwise, either nothing existed - or the thing that already exists can't be joined;
             // in that case, go ahead and use the state that we invented a moment ago (outside of the lock)
             _currentOperations[stampedeKey] = stampedeState;

+ 13 - 1
src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeKey.cs

@@ -15,10 +15,18 @@ partial class DefaultHybridCache
         private readonly int _hashCode; // we know we'll need it; compute it once only
         public StampedeKey(string key, HybridCacheEntryFlags flags)
         {
+            // We'll use both the key *and* the flags as combined flag; in reality, we *expect*
+            // the flags to be consistent between calls on the same operation, and it must be
+            // noted that the *cache items* only use the key (not the flags), but: it gets
+            // very hard to grok what the correct behaviour should be if combining two calls
+            // with different flags, since they could have mutually exclusive behaviours!
+
+            // As such, we'll treat conflicting calls entirely separately from a stampede
+            // perspective.
             _key = key;
             _flags = flags;
 #if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
-            _hashCode = HashCode.Combine(key, flags);
+            _hashCode = System.HashCode.Combine(key, flags);
 #else
             _hashCode = key.GetHashCode() ^ (int)flags;
 #endif
@@ -27,6 +35,10 @@ partial class DefaultHybridCache
         public string Key => _key;
         public HybridCacheEntryFlags Flags => _flags;
 
+        // allow direct access to the pre-computed hash-code, semantically emphasizing that
+        // this is a constant-time operation against a known value
+        internal int HashCode => _hashCode;
+
         public bool Equals(StampedeKey other) => _flags == other._flags & _key == other._key;
 
         public override bool Equals([NotNullWhen(true)] object? obj)

+ 22 - 27
src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeState.cs

@@ -15,22 +15,27 @@ partial class DefaultHybridCache
 #endif
     {
         private readonly DefaultHybridCache _cache;
-        private int _activeCallers = 1;
+        private readonly CacheItem _cacheItem;
 
         // because multiple callers can enlist, we need to track when the *last* caller cancels
         // (and keep going until then); that means we need to run with custom cancellation
         private readonly CancellationTokenSource? _sharedCancellation;
         internal readonly CancellationToken SharedToken; // this might have a value even when _sharedCancellation is null
 
-        public StampedeKey Key { get; }
+        // we expose the key as a by-ref readonly; this minimizes the stack work involved in passing the key around
+        // (both in terms of width and copy-semantics)
+        private readonly StampedeKey _key;
+        public ref readonly StampedeKey Key => ref _key;
+        protected CacheItem CacheItem => _cacheItem;
 
         /// <summary>
         /// Create a stamped token optionally with shared cancellation support
         /// </summary>
-        protected StampedeState(DefaultHybridCache cache, in StampedeKey key, bool canBeCanceled)
+        protected StampedeState(DefaultHybridCache cache, in StampedeKey key, CacheItem cacheItem, bool canBeCanceled)
         {
             _cache = cache;
-            Key = key;
+            _key = key;
+            _cacheItem = cacheItem;
             if (canBeCanceled)
             {
                 // if the first (or any) caller can't be cancelled; we'll never get to zero; no point tracking
@@ -47,10 +52,11 @@ partial class DefaultHybridCache
         /// <summary>
         /// Create a stamped token using a fixed cancellation token
         /// </summary>
-        protected StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationToken token)
+        protected StampedeState(DefaultHybridCache cache, in StampedeKey key, CacheItem cacheItem, CancellationToken token)
         {
             _cache = cache;
-            Key = key;
+            _key = key;
+            _cacheItem = cacheItem;
             SharedToken = token;
         }
 
@@ -68,14 +74,14 @@ partial class DefaultHybridCache
 
         public abstract void SetCanceled();
 
-        public int DebugCallerCount => Volatile.Read(ref _activeCallers);
+        public int DebugCallerCount => _cacheItem.RefCount;
 
         public abstract Type Type { get; }
 
-        public void RemoveCaller()
+        public void CancelCaller()
         {
             // note that TryAddCaller has protections to avoid getting back from zero
-            if (Interlocked.Decrement(ref _activeCallers) == 0)
+            if (_cacheItem.Release())
             {
                 // we're the last to leave; turn off the lights
                 _sharedCancellation?.Cancel();
@@ -83,25 +89,14 @@ partial class DefaultHybridCache
             }
         }
 
-        public bool TryAddCaller() // essentially just interlocked-increment, but with a leading zero check and overflow detection
+        public bool TryAddCaller() => _cacheItem.TryReserve();
+    }
+
+    private void RemoveStampedeState(in StampedeKey key)
+    {
+        lock (GetPartitionedSyncLock(in key)) // see notes in SyncLock.cs
         {
-            var oldValue = Volatile.Read(ref _activeCallers);
-            do
-            {
-                if (oldValue is 0 or -1)
-                {
-                    return false; // already burned or about to roll around back to zero
-                }
-
-                var updated = Interlocked.CompareExchange(ref _activeCallers, oldValue + 1, oldValue);
-                if (updated == oldValue)
-                {
-                    return true; // we exchanged
-                }
-                oldValue = updated; // we failed, but we have an updated state
-            } while (true);
+            _currentOperations.TryRemove(key, out _);
         }
     }
-
-    private void RemoveStampede(StampedeKey key) => _currentOperations.TryRemove(key, out _);
 }

+ 58 - 35
src/Caching/Hybrid/src/Internal/DefaultHybridCache.StampedeStateT.cs

@@ -3,9 +3,9 @@
 
 using System;
 using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
 using System.Threading;
 using System.Threading.Tasks;
-using Microsoft.Extensions.Caching.Memory;
 
 namespace Microsoft.Extensions.Caching.Hybrid.Internal;
 
@@ -20,7 +20,7 @@ partial class DefaultHybridCache
         private Task<T>? _sharedUnwrap; // allows multiple non-cancellable callers to share a single task (when no defensive copy needed)
 
         public StampedeState(DefaultHybridCache cache, in StampedeKey key, bool canBeCanceled)
-            : base(cache, key, canBeCanceled)
+            : base(cache, key, CacheItem<T>.Create(), canBeCanceled)
         {
             _result = new(TaskCreationOptions.RunContinuationsAsynchronously);
         }
@@ -28,7 +28,7 @@ partial class DefaultHybridCache
         public override Type Type => typeof(T);
 
         public StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationToken token)
-            : base(cache, key, token) { } // no TCS in this case - this is for SetValue only
+            : base(cache, key, CacheItem<T>.Create(), token) { } // no TCS in this case - this is for SetValue only
 
         public void QueueUserWorkItem(in TState state, Func<TState, CancellationToken, ValueTask<T>> underlying, HybridCacheEntryOptions? options)
         {
@@ -134,11 +134,15 @@ partial class DefaultHybridCache
         {
             if (_result is not null)
             {
-                Cache.RemoveStampede(Key);
+                Cache.RemoveStampedeState(in Key);
                 _result.TrySetException(ex);
             }
         }
 
+        // ONLY set the result, without any other side-effects
+        internal void SetResultDirect(CacheItem<T> value)
+            => _result?.TrySetResult(value);
+
         private void SetResult(CacheItem<T> value)
         {
             if ((Key.Flags & HybridCacheEntryFlags.DisableLocalCacheWrite) == 0)
@@ -148,7 +152,7 @@ partial class DefaultHybridCache
 
             if (_result is not null)
             {
-                Cache.RemoveStampede(Key);
+                Cache.RemoveStampedeState(in Key);
                 _result.TrySetResult(value);
             }
         }
@@ -158,8 +162,8 @@ partial class DefaultHybridCache
             // note we don't store this dummy result in L1 or L2
             if (_result is not null)
             {
-                Cache.RemoveStampede(Key);
-                _result.TrySetResult(ImmutableCacheItem<T>.Default);
+                Cache.RemoveStampedeState(in Key);
+                _result.TrySetResult(ImmutableCacheItem<T>.GetReservedShared());
             }
         }
 
@@ -170,36 +174,50 @@ partial class DefaultHybridCache
 
             var serializer = Cache.GetSerializer<T>();
             CacheItem<T> cacheItem;
-            if (ImmutableTypeCache<T>.IsImmutable)
-            {
-                // deserialize; and store object; buffer can be recycled now
-                cacheItem = new ImmutableCacheItem<T>(serializer.Deserialize(new(value.Array!, 0, value.Length)));
-                value.RecycleIfAppropriate();
-            }
-            else
+            switch (CacheItem)
             {
-                // use the buffer directly as the backing in the cache-item; do *not* recycle now
-                var tmp = new MutableCacheItem<T>(ref value, serializer);
-                tmp.DebugTrackBuffer(Cache); // conditional: DEBUG
-                cacheItem = tmp;
+                case ImmutableCacheItem<T> immutable:
+                    // deserialize; and store object; buffer can be recycled now
+                    immutable.SetValue(serializer.Deserialize(new(value.Array!, 0, value.Length)));
+                    value.RecycleIfAppropriate();
+                    cacheItem = immutable;
+                    break;
+                case MutableCacheItem<T> mutable:
+                    // use the buffer directly as the backing in the cache-item; do *not* recycle now
+                    mutable.SetValue(ref value, serializer);
+                    mutable.DebugOnlyTrackBuffer(Cache);
+                    cacheItem = mutable;
+                    break;
+                default:
+                    cacheItem = ThrowUnexpectedCacheItem();
+                    break;
             }
-
             SetResult(cacheItem);
         }
 
+        [DoesNotReturn]
+        private static CacheItem<T> ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item");
+
         private CacheItem<T> SetResult(T value)
         {
             // set a result from a value we calculated directly
             CacheItem<T> cacheItem;
-            if (ImmutableTypeCache<T>.IsImmutable)
-            {
-                cacheItem = new ImmutableCacheItem<T>(value); // no serialize needed
-            }
-            else
+            switch (CacheItem)
             {
-                var tmp = new MutableCacheItem<T>(value, Cache.GetSerializer<T>(), MaximumPayloadBytes); // serialization happens here
-                tmp.DebugTrackBuffer(Cache); // conditional: DEBUG
-                cacheItem = tmp;
+                case ImmutableCacheItem<T> immutable:
+                    // no serialize needed
+                    immutable.SetValue(value);
+                    cacheItem = immutable;
+                    break;
+                case MutableCacheItem<T> mutable:
+                    // serialization happens here
+                    mutable.SetValue(value, Cache.GetSerializer<T>(), MaximumPayloadBytes);
+                    mutable.DebugOnlyTrackBuffer(Cache);
+                    cacheItem = mutable;
+                    break;
+                default:
+                    cacheItem = ThrowUnexpectedCacheItem();
+                    break;
             }
             SetResult(cacheItem);
             return cacheItem;
@@ -207,7 +225,7 @@ partial class DefaultHybridCache
 
         public override void SetCanceled() => _result?.TrySetCanceled(SharedToken);
 
-        internal ValueTask<T> UnwrapAsync()
+        internal ValueTask<T> UnwrapReservedAsync()
         {
             var task = Task;
 #if NETCOREAPP2_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER
@@ -216,22 +234,23 @@ partial class DefaultHybridCache
             if (task.Status == TaskStatus.RanToCompletion)
 #endif
             {
-                return new(task.Result.GetValue());
+                return new(task.Result.GetReservedValue());
             }
 
-            // if the type is immutable, callers can share the final step too
+            // if the type is immutable, callers can share the final step too (this may leave dangling
+            // reservation counters, but that's OK)
             var result = ImmutableTypeCache<T>.IsImmutable ? (_sharedUnwrap ??= Awaited(Task)) : Awaited(Task);
             return new(result);
 
             static async Task<T> Awaited(Task<CacheItem<T>> task)
-                => (await task.ConfigureAwait(false)).GetValue();
+                => (await task.ConfigureAwait(false)).GetReservedValue();
         }
 
         public ValueTask<T> JoinAsync(CancellationToken token)
         {
             // if the underlying has already completed, and/or our local token can't cancel: we
             // can simply wrap the shared task; otherwise, we need our own cancellation state
-            return token.CanBeCanceled && !Task.IsCompleted ? WithCancellation(this, token) : UnwrapAsync();
+            return token.CanBeCanceled && !Task.IsCompleted ? WithCancellation(this, token) : UnwrapReservedAsync();
 
             static async ValueTask<T> WithCancellation(StampedeState<TState, T> stampede, CancellationToken token)
             {
@@ -241,6 +260,7 @@ partial class DefaultHybridCache
                     ((TaskCompletionSource<bool>)obj!).TrySetResult(true);
                 }, cancelStub);
 
+                CacheItem<T> result;
                 try
                 {
                     var first = await System.Threading.Tasks.Task.WhenAny(stampede.Task, cancelStub.Task).ConfigureAwait(false);
@@ -252,12 +272,15 @@ partial class DefaultHybridCache
                     Debug.Assert(ReferenceEquals(first, stampede.Task));
 
                     // this has already completed, but we'll get the stack nicely
-                    return (await stampede.Task.ConfigureAwait(false)).GetValue();
+                    result = await stampede.Task.ConfigureAwait(false);
                 }
-                finally
+                catch
                 {
-                    stampede.RemoveCaller();
+                    stampede.CancelCaller();
+                    throw;
                 }
+                // outside the catch, so we know we only decrement one way or the other
+                return result.GetReservedValue();
             }
         }
     }

+ 34 - 0
src/Caching/Hybrid/src/Internal/DefaultHybridCache.SyncLock.cs

@@ -0,0 +1,34 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+partial class DefaultHybridCache
+{
+    // HybridCache's stampede protection requires some level of synchronization to avoid unnecessary runs
+    // of the underlying data fetch; this is *minimized* by the use of double-checked locking and
+    // interlocked join (adding a new request to an existing execution), but: that would leave a race
+    // condition where the *remove* step of the stampede would be in a race with the *add new* step; the
+    // *add new* step is inside a lock, but we need to *remove* step to share that lock, to avoid
+    // the race. We deal with that by taking the same lock during remove, but *that* means we're locking
+    // on all executions.
+    //
+    // To minimize lock contention, we will therefore use partitioning of the lock-token, by using the
+    // low 3 bits of the hash-code (which we calculate eagerly only once, so: already known). This gives
+    // us a fast way to split contention by 8, almost an order-of-magnitude, which is sufficient. We *could*
+    // use an array for this, but: for directness, let's inline it instead (avoiding bounds-checks,
+    // an extra layer of dereferencing, and the allocation; I will acknowledge these are miniscule, but:
+    // it costs us nothing to do)
+
+    private readonly object _syncLock0 = new(), _syncLock1 = new(), _syncLock2 = new(), _syncLock3 = new(),
+        _syncLock4 = new(), _syncLock5 = new(), _syncLock6 = new(), _syncLock7 = new();
+
+    internal object GetPartitionedSyncLock(in StampedeKey key)
+        => (key.HashCode & 0b111) switch // generate 8 partitions using the low 3 bits
+        {
+            0 => _syncLock0, 1 => _syncLock1,
+            2 => _syncLock2, 3 => _syncLock3,
+            4 => _syncLock4, 5 => _syncLock5,
+            6 => _syncLock6, _ => _syncLock7,
+        };
+}

+ 1 - 1
src/Caching/Hybrid/src/Internal/DefaultHybridCache.cs

@@ -137,7 +137,7 @@ internal sealed partial class DefaultHybridCache : HybridCache
             {
                 // we're going to run to completion; no need to get complicated
                 _ = stampede.ExecuteDirectAsync(in state, underlyingDataCallback, options); // this larger task includes L2 write etc
-                return stampede.UnwrapAsync();
+                return stampede.UnwrapReservedAsync();
             }
         }
 

+ 16 - 0
src/Caching/Hybrid/src/Internal/RecyclableArrayBufferWriter.cs

@@ -13,6 +13,22 @@ namespace Microsoft.Extensions.Caching.Hybrid.Internal;
 // except it uses the array pool for allocations
 internal sealed class RecyclableArrayBufferWriter<T> : IBufferWriter<T>, IDisposable
 {
+    // Usage note: *normally* you might want to use "using" for this, and that is fine;
+    // however, caution should be exercised in exception scenarios where we don't 100%
+    // know that the caller has stopped touching the buffer; in particular, this means
+    // scenarios involving a combination of external code and (for example) "async".
+    // In those cases, it may be preferable to manually dispose in the success case,
+    // and just drop the buffers in the failure case, i.e. instead of:
+    //
+    // using (writer)
+    // { DoStuff(); }
+    //
+    // simply:
+    //
+    // DoStuff();
+    // writer.Dispose();
+    //
+    // This does not represent a problem, and is consistent with many ArrayPool use-cases.
 
     // Copy of Array.MaxLength.
     // Used by projects targeting .NET Framework.

+ 18 - 12
src/Caching/Hybrid/test/BufferReleaseTests.cs

@@ -29,17 +29,17 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
     {
         using var provider = GetDefaultCache(out var cache);
 #if DEBUG
-        cache.DebugGetOutstandingBuffers(flush: true);
+        cache.DebugOnlyGetOutstandingBuffers(flush: true);
 #endif
 
         var key = Me();
 #if DEBUG
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         var first = await cache.GetOrCreateAsync(key, _ => GetAsync());
         Assert.NotNull(first);
 #if DEBUG
-        Assert.Equal(1, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(1, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
 
@@ -52,6 +52,7 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
         Assert.NotNull(second);
         Assert.NotSame(first, second);
 
+        Assert.Equal(1, cacheItem.RefCount);
         await cache.RemoveKeyAsync(key);
         var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
         Assert.Null(third);
@@ -62,10 +63,11 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
             await Task.Delay(250);
         }
 #if DEBUG
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         // assert that we can *no longer* reserve this buffer, because we've already recycled it
         Assert.False(cacheItem.TryReserveBuffer(out _));
+        Assert.Equal(0, cacheItem.RefCount);
         Assert.False(cacheItem.NeedsEvictionCallback, "should be recycled now");
         static ValueTask<Customer> GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
     }
@@ -116,13 +118,13 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
             cache.BackendCache.Set(key, writer.ToArray());
         }
 #if DEBUG
-        cache.DebugGetOutstandingBuffers(flush: true);
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        cache.DebugOnlyGetOutstandingBuffers(flush: true);
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         var first = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying); // we expect this to come from L2, hence NoUnderlying
         Assert.NotNull(first);
 #if DEBUG
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
 
@@ -135,6 +137,7 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
         Assert.NotNull(second);
         Assert.NotSame(first, second);
 
+        Assert.Equal(1, cacheItem.RefCount);
         await cache.RemoveKeyAsync(key);
         var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
         Assert.Null(third);
@@ -146,11 +149,12 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
             await Task.Delay(250);
         }
 #if DEBUG
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         // assert that we can *no longer* reserve this buffer, because we've already recycled it
         Assert.True(cacheItem.TryReserveBuffer(out _)); // always readable
         cacheItem.Release();
+        Assert.Equal(1, cacheItem.RefCount); // not decremented because there was no need to add the hook
 
         Assert.False(cacheItem.NeedsEvictionCallback, "should still not need recycling");
         static ValueTask<Customer> GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
@@ -172,13 +176,13 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
             cache.BackendCache.Set(key, writer.ToArray());
         }
 #if DEBUG
-        cache.DebugGetOutstandingBuffers(flush: true);
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        cache.DebugOnlyGetOutstandingBuffers(flush: true);
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         var first = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying); // we expect this to come from L2, hence NoUnderlying
         Assert.NotNull(first);
 #if DEBUG
-        Assert.Equal(1, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(1, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
 
@@ -191,6 +195,7 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
         Assert.NotNull(second);
         Assert.NotSame(first, second);
 
+        Assert.Equal(1, cacheItem.RefCount);
         await cache.RemoveKeyAsync(key);
         var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
         Assert.Null(third);
@@ -202,10 +207,11 @@ public class BufferReleaseTests // note that buffer ref-counting is only enabled
             await Task.Delay(250);
         }
 #if DEBUG
-        Assert.Equal(0, cache.DebugGetOutstandingBuffers());
+        Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
 #endif
         // assert that we can *no longer* reserve this buffer, because we've already recycled it
         Assert.False(cacheItem.TryReserveBuffer(out _)); // released now
+        Assert.Equal(0, cacheItem.RefCount);
 
         Assert.False(cacheItem.NeedsEvictionCallback, "should be recycled by now");
         static ValueTask<Customer> GetAsync() => new(new Customer { Id = 42, Name = "Fred" });

+ 0 - 188
src/Caching/Hybrid/test/DisposableValueTests.cs

@@ -1,188 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Text;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Caching.Hybrid.Internal;
-using Microsoft.Extensions.DependencyInjection;
-
-namespace Microsoft.Extensions.Caching.Hybrid.Tests;
-public class DisposableValueTests
-{
-    // We can only reasonable be expected to be responsible for disposal (IDisposable) of objects
-    // if we're keeping hold of references, which means: things we consider immutable.
-    // It is noted that this creates an oddity whereby for *mutable* types, the caller needs to dispose
-    // per fetch (GetOrCreateAsync), and for *immutable* types: they're not - but that is unavoidable.
-    // In reality, it is expected to be pretty rare to hold disposable types here.
-
-    private static ServiceProvider GetCache(out DefaultHybridCache cache)
-    {
-        var services = new ServiceCollection();
-        services.AddHybridCache();
-        var provider = services.BuildServiceProvider();
-        cache = Assert.IsType<DefaultHybridCache>(provider.GetRequiredService<HybridCache>());
-        return provider;
-    }
-
-    [Fact]
-    public async void NonDisposableImmutableTypeDoesNotNeedEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        var s = await cache.GetOrCreateAsync(key, _ => GetSomeString());
-        Assert.NotNull(s);
-        Assert.False(string.IsNullOrWhiteSpace(s));
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.False(cacheItem.NeedsEvictionCallback);
-
-        static ValueTask<string> GetSomeString() => new(Guid.NewGuid().ToString());
-    }
-
-    [Fact]
-    public async void NonDisposableBlittableTypeDoesNotNeedEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        var g = await cache.GetOrCreateAsync(key, _ => GetSomeGuid());
-        Assert.NotEqual(Guid.Empty, g);
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.False(cacheItem.NeedsEvictionCallback);
-
-        static ValueTask<Guid> GetSomeGuid() => new(Guid.NewGuid());
-    }
-
-    [Fact]
-    public async Task DispsableRefTypeNeedsEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        var inst = new DisposableTestClass();
-        var obj = await cache.GetOrCreateAsync(key, _ => new ValueTask<DisposableTestClass>(inst));
-        Assert.Same(inst, obj);
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.True(cacheItem.NeedsEvictionCallback);
-
-        Assert.Equal(0, inst.DisposeCount);
-
-        // now remove it
-        await cache.RemoveKeyAsync(key);
-
-        // give it a moment for the eviction callback to kick in
-        for (var i = 0; i < 10; i++)
-        {
-            await Task.Delay(250);
-            if (inst.DisposeCount != 0)
-            {
-                break;
-            }
-        }
-        Assert.Equal(1, inst.DisposeCount);
-    }
-
-    [Fact]
-    public async Task DisposableValueTypeNeedsEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        // disposal of value-type
-        var inst = new DisposableTestClass();
-        var v = await cache.GetOrCreateAsync(key, _ => new ValueTask<DisposableTestValue>(new DisposableTestValue(inst)));
-        Assert.Same(inst, v.Wrapped);
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.True(cacheItem.NeedsEvictionCallback);
-
-        Assert.Equal(0, inst.DisposeCount);
-
-        // now remove it
-        await cache.RemoveKeyAsync(key);
-
-        // give it a moment for the eviction callback to kick in
-        for (var i = 0; i < 10; i++)
-        {
-            await Task.Delay(250);
-            if (inst.DisposeCount != 0)
-            {
-                break;
-            }
-        }
-        Assert.Equal(1, inst.DisposeCount);
-    }
-
-    [Fact]
-    public async Task NonDispsableRefTypeDoesNotNeedEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        var inst = new NonDisposableTestClass();
-        var obj = await cache.GetOrCreateAsync(key, _ => new ValueTask<NonDisposableTestClass>(inst));
-        Assert.Same(inst, obj);
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.False(cacheItem.NeedsEvictionCallback);
-    }
-
-    [Fact]
-    public async Task NonDisposableValueTypeDoesNotNeedEvictionCallback()
-    {
-        using var provider = GetCache(out var cache);
-        var key = Me();
-
-        // disposal of value-type
-        var inst = new DisposableTestClass();
-        var v = await cache.GetOrCreateAsync(key, _ => new ValueTask<NonDisposableTestValue>(new NonDisposableTestValue(inst)));
-        Assert.Same(inst, v.Wrapped);
-        Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
-        Assert.True(cacheItem.DebugIsImmutable);
-        Assert.False(cacheItem.NeedsEvictionCallback);
-    }
-
-    [ImmutableObject(true)]
-    public sealed class DisposableTestClass : IDisposable
-    {
-        private int _disposeCount;
-        public void Dispose() => Interlocked.Increment(ref _disposeCount);
-
-        public int DisposeCount => Volatile.Read(ref _disposeCount);
-    }
-
-    [ImmutableObject(true)]
-    public readonly struct DisposableTestValue : IDisposable
-    {
-        public DisposableTestClass Wrapped { get; }
-        public DisposableTestValue(DisposableTestClass inner) => Wrapped = inner;
-        public void Dispose() => Wrapped.Dispose();
-    }
-
-    [ImmutableObject(true)]
-    public sealed class NonDisposableTestClass
-    {
-        private int _disposeCount;
-        public void Dispose() => Interlocked.Increment(ref _disposeCount);
-
-        public int DisposeCount => Volatile.Read(ref _disposeCount);
-    }
-
-    [ImmutableObject(true)]
-    public readonly struct NonDisposableTestValue
-    {
-        public DisposableTestClass Wrapped { get; }
-        public NonDisposableTestValue(DisposableTestClass inner) => Wrapped = inner;
-        public void Dispose() => Wrapped.Dispose();
-    }
-
-    private static string Me([CallerMemberName] string caller = "") => caller;
-}

+ 200 - 0
src/Caching/Hybrid/test/SampleUsage.cs

@@ -0,0 +1,200 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.DependencyInjection;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+
+public class SampleUsage
+{
+    [Fact]
+    public async void DistributedCacheWorks()
+    {
+        var services = new ServiceCollection();
+        services.AddDistributedMemoryCache();
+        services.AddTransient<SomeDCService>();
+        using var provider = services.BuildServiceProvider();
+
+        var obj = provider.GetRequiredService<SomeDCService>();
+        string name = "abc";
+        int id = 42;
+        var x = await obj.GetSomeInformationAsync(name, id);
+        var y = await obj.GetSomeInformationAsync(name, id);
+        Assert.NotSame(x, y);
+        Assert.Equal(id, x.Id);
+        Assert.Equal(name, x.Name);
+        Assert.Equal(id, y.Id);
+        Assert.Equal(name, y.Name);
+    }
+
+    [Fact]
+    public async void HybridCacheWorks()
+    {
+        var services = new ServiceCollection();
+        services.AddHybridCache();
+        services.AddTransient<SomeHCService>();
+        using var provider = services.BuildServiceProvider();
+
+        var obj = provider.GetRequiredService<SomeHCService>();
+        string name = "abc";
+        int id = 42;
+        var x = await obj.GetSomeInformationAsync(name, id);
+        var y = await obj.GetSomeInformationAsync(name, id);
+        Assert.NotSame(x, y);
+        Assert.Equal(id, x.Id);
+        Assert.Equal(name, x.Name);
+        Assert.Equal(id, y.Id);
+        Assert.Equal(name, y.Name);
+    }
+
+    [Fact]
+    public async void HybridCacheNoCaptureWorks()
+    {
+        var services = new ServiceCollection();
+        services.AddHybridCache();
+        services.AddTransient<SomeHCServiceNoCapture>();
+        using var provider = services.BuildServiceProvider();
+
+        var obj = provider.GetRequiredService<SomeHCServiceNoCapture>();
+        string name = "abc";
+        int id = 42;
+        var x = await obj.GetSomeInformationAsync(name, id);
+        var y = await obj.GetSomeInformationAsync(name, id);
+        Assert.NotSame(x, y);
+        Assert.Equal(id, x.Id);
+        Assert.Equal(name, x.Name);
+        Assert.Equal(id, y.Id);
+        Assert.Equal(name, y.Name);
+    }
+
+    [Fact]
+    public async void HybridCacheNoCaptureObjReuseWorks()
+    {
+        var services = new ServiceCollection();
+        services.AddHybridCache();
+        services.AddTransient<SomeHCServiceNoCaptureObjReuse>();
+        using var provider = services.BuildServiceProvider();
+
+        var obj = provider.GetRequiredService<SomeHCServiceNoCaptureObjReuse>();
+        string name = "abc";
+        int id = 42;
+        var x = await obj.GetSomeInformationAsync(name, id);
+        var y = await obj.GetSomeInformationAsync(name, id);
+        Assert.Same(x, y);
+        Assert.Equal(id, x.Id);
+        Assert.Equal(name, x.Name);
+    }
+
+    public class SomeDCService(IDistributedCache cache)
+    {
+        public async Task<SomeInformation> GetSomeInformationAsync(string name, int id, CancellationToken token = default)
+        {
+            var key = $"someinfo:{name}:{id}"; // unique key for this combination
+
+            var bytes = await cache.GetAsync(key, token); // try to get from cache
+            SomeInformation info;
+            if (bytes is null)
+            {
+                // cache miss; get the data from the real source
+                info = await SomeExpensiveOperationAsync(name, id, token);
+
+                // serialize and cache it
+                bytes = SomeSerializer.Serialize(info);
+                await cache.SetAsync(key, bytes, token);
+            }
+            else
+            {
+                // cache hit; deserialize it
+                info = SomeSerializer.Deserialize<SomeInformation>(bytes);
+            }
+            return info;
+        }
+    }
+
+    public class SomeHCService(HybridCache cache)
+    {
+        public async Task<SomeInformation> GetSomeInformationAsync(string name, int id, CancellationToken token = default)
+        {
+            return await cache.GetOrCreateAsync(
+                $"someinfo:{name}:{id}", // unique key for this combination
+                async ct => await SomeExpensiveOperationAsync(name, id, ct),
+                token: token
+                );
+        }
+    }
+
+    // this is the work we're trying to cache
+    private static Task<SomeInformation> SomeExpensiveOperationAsync(string name, int id,
+        CancellationToken token = default)
+    {
+        return Task.FromResult(new SomeInformation { Id = id, Name = name });
+    }
+    private static Task<SomeInformationReuse> SomeExpensiveOperationReuseAsync(string name, int id,
+    CancellationToken token = default)
+    {
+        return Task.FromResult(new SomeInformationReuse { Id = id, Name = name });
+    }
+
+    public class SomeHCServiceNoCapture(HybridCache cache)
+    {
+        public async Task<SomeInformation> GetSomeInformationAsync(string name, int id, CancellationToken token = default)
+        {
+            return await cache.GetOrCreateAsync(
+                $"someinfo:{name}:{id}", // unique key for this combination
+                (name, id), // all of the state we need for the final call, if needed
+                static async (state, token) =>
+                    await SomeExpensiveOperationAsync(state.name, state.id, token),
+                token: token
+            );
+        }
+    }
+
+    public class SomeHCServiceNoCaptureObjReuse(HybridCache cache, CancellationToken token = default)
+    {
+        public async Task<SomeInformationReuse> GetSomeInformationAsync(string name, int id)
+        {
+            return await cache.GetOrCreateAsync(
+                $"someinfo:{name}:{id}", // unique key for this combination
+                (name, id), // all of the state we need for the final call, if needed
+                static async (state, token) =>
+                    await SomeExpensiveOperationReuseAsync(state.name, state.id, token),
+                token: token
+            );
+        }
+    }
+
+    static class SomeSerializer
+    {
+        internal static T Deserialize<T>(byte[] bytes)
+        {
+            return JsonSerializer.Deserialize<T>(bytes)!;
+        }
+
+        internal static byte[] Serialize<T>(T info)
+        {
+            using var ms = new MemoryStream();
+            JsonSerializer.Serialize(ms, info);
+            return ms.ToArray();
+        }
+    }
+    public class SomeInformation
+    {
+        public int Id { get; set; }
+        public string? Name { get; set; }
+    }
+
+    [ImmutableObject(true)]
+    public sealed class SomeInformationReuse
+    {
+        public int Id { get; set; }
+        public string? Name { get; set; }
+    }
+}

+ 32 - 0
src/Caching/Hybrid/test/StampedeTests.cs

@@ -365,6 +365,38 @@ public class StampedeTests
         Assert.NotSame(x, y);
     }
 
+    [Fact]
+    public void ValidatePartitioning()
+    {
+        // we just want to validate that key-level partitioning is
+        // happening to some degree, i.e. it isn't fundamentally broken
+        using var scope = GetDefaultCache(out var cache);
+        Dictionary<object, int> counts = [];
+        for(int i = 0; i < 1024; i++)
+        {
+            var key = new DefaultHybridCache.StampedeKey(Guid.NewGuid().ToString(), default);
+            var obj = cache.GetPartitionedSyncLock(in key);
+            if (!counts.TryGetValue(obj, out var count))
+            {
+                count = 0;
+            }
+            counts[obj] = count + 1;
+        }
+
+        // We just want to prove that we got 8 non-empty partitions.
+        // This is *technically* non-deterministic, but: we'd
+        // need to be having a very bad day for the math gods
+        // to conspire against us that badly - if this test
+        // starts failing, maybe buy a lottery ticket?
+        Assert.Equal(8, counts.Count);
+        foreach (var pair in counts)
+        {
+            // the *median* should be 128 here; let's
+            // not be aggressive about it, though
+            Assert.True(pair.Value > 16);
+        }
+    }
+
     class Mutable(Guid value)
     {
         public Guid Value => value;

+ 1 - 1
src/Caching/SqlServer/src/DatabaseOperations.cs

@@ -308,7 +308,7 @@ internal sealed class DatabaseOperations : IDatabaseOperations
         return value;
     }
 
-    static long StreamOut(SqlDataReader source, int ordinal, IBufferWriter<byte> destination)
+    private static long StreamOut(SqlDataReader source, int ordinal, IBufferWriter<byte> destination)
     {
         long dataIndex = 0;
         int read = 0;