C#C
C#8mo ago
Core

LRU cache thread safe implementation

Hello,
I have a basic implementation of a LRU cache, but it's not thread safe. A dictionary has a thread safe implementation, but not a list. Currently I haven't used the lock.

There are 2 options:
  1. Use ConcurrentDictionary, and use the lock only for operations on the list (not atomic, but could anything go wrong) ?
  2. Use Dictionary, and use the lock for both operations (this is atomic for sure)
I kinda feel like the 2nd option is better, what do you think?

c#
public sealed class UaDetectorMemoryCache<T>
{
    private readonly UaDetectorCacheOptions _cacheOptions;
    private readonly ConcurrentDictionary<string, LinkedListNode<T>> _cache = new();
    private readonly LinkedList<T> _lruList = [];
    private readonly object _lock = new();

    public UaDetectorMemoryCache(UaDetectorCacheOptions cacheOptions)
    {
        _cacheOptions = cacheOptions;
    }
    
    public bool TryGet(string key, [NotNullWhen(true)] out T? result)
    {
        if (_cache.TryGetValue(key, out var node))
        {
            _lruList.Remove(node);
            _lruList.AddFirst(node);
            
            result = node.Value;
        }
        else
        {
            result = default;
        }
        
        return result is not null;
    } 

    public void Put(string key, T value)
    {
        if (_cache.TryGetValue(key, out var node))
        {
            _lruList.Remove(node);
            _lruList.AddFirst(node);
        }
        else
        {
            node = new LinkedListNode<T>(value);

            if (_cache.Count == _cacheOptions.Capacity)
            {
                _lruList.RemoveLast();
            }

            _lruList.AddFirst(node);
            _cache.TryAdd(key, node);
        }
    } 
}
Was this page helpful?