Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions BitFaster.Caching.UnitTests/Lru/ConcurrentLruTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,58 @@ public void WhenKeysAreContinuouslyRequestedInTheOrderTheyAreAddedCountIsBounded
testOutputHelper.WriteLine($"Total: {lru.Count} Hot: {lru.HotCount} Warm: {lru.WarmCount} Cold: {lru.ColdCount}");
lru.Count.Should().BeLessOrEqualTo(capacity + 1);
}
}

public class KeysInOrderTestDataGenerator : IEnumerable<object[]>
{
private readonly List<object[]> _data = new List<object[]>
{
new object[] { new EqualCapacityPartition(hotCap + warmCap + coldCap) },
new object[] { new EqualCapacityPartition(128) },
new object[] { new EqualCapacityPartition(256) },
new object[] { new EqualCapacityPartition(1024) },
new object[] { new FavorWarmPartition(128) },
new object[] { new FavorWarmPartition(256) },
new object[] { new FavorWarmPartition(1024) },
new object[] { new FavorWarmPartition(128, 0.6) },
new object[] { new FavorWarmPartition(256, 0.6) },
new object[] { new FavorWarmPartition(1024, 0.6) },
//new object[] { new FavorWarmPartition(10*1024, 0.6) },
//new object[] { new FavorWarmPartition(100*1024, 0.6) },
};

public IEnumerator<object[]> GetEnumerator() => _data.GetEnumerator();

IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}

[Theory]
[ClassData(typeof(KeysInOrderTestDataGenerator))]
public void WhenKeysAreContinuouslyRequestedInTheOrderTheyAreAddedCountIsBounded2(ICapacityPartition p)
{
int capacity = p.Hot + p.Cold + p.Warm;
lru = new ConcurrentLru<int, string>(capacity, p, EqualityComparer<int>.Default);

testOutputHelper.WriteLine($"Capacity: {lru.Capacity} (Hot: {p.Hot} Warm: {p.Warm} Cold: {p.Cold})");

for (int i = 0; i < capacity + 10; i++)
{
lru.GetOrAdd(i, valueFactory.Create);

// Touch all items already cached in hot, warm and cold.
// This is worst case scenario, since we touch them in the exact order they
// were added.
for (int j = 0; j < i; j++)
{
lru.GetOrAdd(j, valueFactory.Create);
}
}

// For larger cache sizes, I have observed capacity + 5. This is linked to the number of attempts.
// This is clearly a bug that needs further investigation, but considered not harmful at this point
// since growth is bounded, we just allow 4 more items than we should in the absolute worst case.
testOutputHelper.WriteLine($"Total: {lru.Count} Hot: {lru.HotCount} Warm: {lru.WarmCount} Cold: {lru.ColdCount}");
lru.Count.Should().BeLessOrEqualTo(capacity + 1);
}

[Fact]
Expand Down
9 changes: 5 additions & 4 deletions BitFaster.Caching/Lru/ConcurrentLruCore.cs
Original file line number Diff line number Diff line change
Expand Up @@ -541,13 +541,14 @@ private void Cycle(int hotCount)
// Attempt to recover. It is possible that multiple threads read the same queue count here,
// so this process has races that could reduce cache size below capacity. This manifests
// in 'off by one' which is considered harmless.
(dest, count) = CycleWarm(Volatile.Read(ref counter.warm));

(dest, count) = CycleCold(Volatile.Read(ref counter.cold));
if (dest != ItemDestination.Remove)
{
continue;
}

(dest, count) = CycleCold(Volatile.Read(ref counter.cold));
(dest, count) = CycleWarm(Volatile.Read(ref counter.warm));
if (dest != ItemDestination.Remove)
{
continue;
Expand Down Expand Up @@ -637,7 +638,7 @@ private void CycleDuringWarmup(int hotCount)
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private (ItemDestination, int) CycleWarmUnchecked(ItemRemovedReason removedReason)
{
Interlocked.Decrement(ref this.counter.warm);
int wc = Interlocked.Decrement(ref this.counter.warm);

if (this.warmQueue.TryDequeue(out var item))
{
Expand All @@ -646,7 +647,7 @@ private void CycleDuringWarmup(int hotCount)
// When the warm queue is full, we allow an overflow of 1 item before redirecting warm items to cold.
// This only happens when hit rate is high, in which case we can consider all items relatively equal in
// terms of which was least recently used.
if (where == ItemDestination.Warm && Volatile.Read(ref this.counter.warm) <= this.capacity.Warm)
if (where == ItemDestination.Warm && wc <= this.capacity.Warm)
{
return (ItemDestination.Warm, this.Move(item, where, removedReason));
}
Expand Down