diff --git a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs index dc592913..dc22398c 100644 --- a/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs +++ b/BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs @@ -348,6 +348,58 @@ public void WhenKeysAreContinuouslyRequestedInTheOrderTheyAreAddedCountIsBounded testOutputHelper.WriteLine($"Total: {lru.Count} Hot: {lru.HotCount} Warm: {lru.WarmCount} Cold: {lru.ColdCount}"); lru.Count.Should().BeLessOrEqualTo(capacity + 1); } + } + + public class KeysInOrderTestDataGenerator : IEnumerable + { + private readonly List _data = new List + { + new object[] { new EqualCapacityPartition(hotCap + warmCap + coldCap) }, + new object[] { new EqualCapacityPartition(128) }, + new object[] { new EqualCapacityPartition(256) }, + new object[] { new EqualCapacityPartition(1024) }, + new object[] { new FavorWarmPartition(128) }, + new object[] { new FavorWarmPartition(256) }, + new object[] { new FavorWarmPartition(1024) }, + new object[] { new FavorWarmPartition(128, 0.6) }, + new object[] { new FavorWarmPartition(256, 0.6) }, + new object[] { new FavorWarmPartition(1024, 0.6) }, + //new object[] { new FavorWarmPartition(10*1024, 0.6) }, + //new object[] { new FavorWarmPartition(100*1024, 0.6) }, + }; + + public IEnumerator GetEnumerator() => _data.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + } + + [Theory] + [ClassData(typeof(KeysInOrderTestDataGenerator))] + public void WhenKeysAreContinuouslyRequestedInTheOrderTheyAreAddedCountIsBounded2(ICapacityPartition p) + { + int capacity = p.Hot + p.Cold + p.Warm; + lru = new ConcurrentLru(capacity, p, EqualityComparer.Default); + + testOutputHelper.WriteLine($"Capacity: {lru.Capacity} (Hot: {p.Hot} Warm: {p.Warm} Cold: {p.Cold})"); + + for (int i = 0; i < capacity + 10; i++) + { + lru.GetOrAdd(i, valueFactory.Create); + + // Touch all items already cached in hot, warm and cold. + // This is worst case scenario, since we touch them in the exact order they + // were added. + for (int j = 0; j < i; j++) + { + lru.GetOrAdd(j, valueFactory.Create); + } + } + + // For larger cache sizes, I have observed capacity + 5. This is linked to the number of attempts. + // This is clearly a bug that needs further investigation, but considered not harmful at this point + // since growth is bounded, we just allow 4 more items than we should in the absolute worst case. + testOutputHelper.WriteLine($"Total: {lru.Count} Hot: {lru.HotCount} Warm: {lru.WarmCount} Cold: {lru.ColdCount}"); + lru.Count.Should().BeLessOrEqualTo(capacity + 1); } [Fact] diff --git a/BitFaster.Caching/Lru/ConcurrentLruCore.cs b/BitFaster.Caching/Lru/ConcurrentLruCore.cs index f26b4fce..fc0eac56 100644 --- a/BitFaster.Caching/Lru/ConcurrentLruCore.cs +++ b/BitFaster.Caching/Lru/ConcurrentLruCore.cs @@ -541,13 +541,14 @@ private void Cycle(int hotCount) // Attempt to recover. It is possible that multiple threads read the same queue count here, // so this process has races that could reduce cache size below capacity. This manifests // in 'off by one' which is considered harmless. - (dest, count) = CycleWarm(Volatile.Read(ref counter.warm)); + + (dest, count) = CycleCold(Volatile.Read(ref counter.cold)); if (dest != ItemDestination.Remove) { continue; } - (dest, count) = CycleCold(Volatile.Read(ref counter.cold)); + (dest, count) = CycleWarm(Volatile.Read(ref counter.warm)); if (dest != ItemDestination.Remove) { continue; @@ -637,7 +638,7 @@ private void CycleDuringWarmup(int hotCount) [MethodImpl(MethodImplOptions.AggressiveInlining)] private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason) { - Interlocked.Decrement(ref this.counter.warm); + int wc = Interlocked.Decrement(ref this.counter.warm); if (this.warmQueue.TryDequeue(out var item)) { @@ -646,7 +647,7 @@ private void CycleDuringWarmup(int hotCount) // When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold. // This only happens when hit rate is high, in which case we can consider all items relatively equal in // terms of which was least recently used. - if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm) + if (where == ItemDestination.Warm && wc <= this.capacity.Warm) { return (ItemDestination.Warm, this.Move(item, where, removedReason)); }