正确使用.NET MemoryCache 的锁定模式

我假设这段代码存在并发性问题:

const string CacheKey = "CacheKey";
static string GetCachedData()
{
string expensiveString =null;
if (MemoryCache.Default.Contains(CacheKey))
{
expensiveString = MemoryCache.Default[CacheKey] as string;
}
else
{
CacheItemPolicy cip = new CacheItemPolicy()
{
AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
};
expensiveString = SomeHeavyAndExpensiveCalculation();
MemoryCache.Default.Set(CacheKey, expensiveString, cip);
}
return expensiveString;
}

并发性问题的原因是多个线程可以获得一个空键,然后尝试将数据插入到缓存中。

什么是保证这个代码并发性的最短和最干净的方法?我喜欢在我的缓存相关代码中遵循一个好的模式。一个在线文章的链接将是一个很大的帮助。

更新:

我根据@Scott Chamberlain 的回答编写了这个代码。有人发现这种方法存在性能或并发性问题吗? 如果这种方法有效,它将节省许多代码行和错误。

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.Caching;


namespace CachePoc
{
class Program
{
static object everoneUseThisLockObject4CacheXYZ = new object();
const string CacheXYZ = "CacheXYZ";
static object everoneUseThisLockObject4CacheABC = new object();
const string CacheABC = "CacheABC";


static void Main(string[] args)
{
string xyzData = MemoryCacheHelper.GetCachedData<string>(CacheXYZ, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
string abcData = MemoryCacheHelper.GetCachedData<string>(CacheABC, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
}


private static string SomeHeavyAndExpensiveXYZCalculation() {return "Expensive";}
private static string SomeHeavyAndExpensiveABCCalculation() {return "Expensive";}


public static class MemoryCacheHelper
{
public static T GetCachedData<T>(string cacheKey, object cacheLock, int cacheTimePolicyMinutes, Func<T> GetData)
where T : class
{
//Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
T cachedData = MemoryCache.Default.Get(cacheKey, null) as T;


if (cachedData != null)
{
return cachedData;
}


lock (cacheLock)
{
//Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
cachedData = MemoryCache.Default.Get(cacheKey, null) as T;


if (cachedData != null)
{
return cachedData;
}


//The value still did not exist so we now write it in to the cache.
CacheItemPolicy cip = new CacheItemPolicy()
{
AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(cacheTimePolicyMinutes))
};
cachedData = GetData();
MemoryCache.Default.Set(cacheKey, cachedData, cip);
return cachedData;
}
}
}
}
}
116931 次浏览

这是我对代码的第二次迭代。因为 MemoryCache是线程安全的,所以不需要锁定初始读取,只需要读取,如果缓存返回 null,那么就进行锁定检查,看看是否需要创建字符串。它极大地简化了代码。

const string CacheKey = "CacheKey";
static readonly object cacheLock = new object();
private static string GetCachedData()
{


//Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;


if (cachedString != null)
{
return cachedString;
}


lock (cacheLock)
{
//Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
cachedString = MemoryCache.Default.Get(CacheKey, null) as string;


if (cachedString != null)
{
return cachedString;
}


//The value still did not exist so we now write it in to the cache.
var expensiveString = SomeHeavyAndExpensiveCalculation();
CacheItemPolicy cip = new CacheItemPolicy()
{
AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
};
MemoryCache.Default.Set(CacheKey, expensiveString, cip);
return expensiveString;
}
}

编辑 : 下面的代码是不必要的,但是我想留下它来显示原始的方法。对于使用具有线程安全读取但非线程安全写入的不同集合(System.Collections名称空间下的几乎所有类都是这样的)的未来访问者来说,它可能是有用的。

下面是我将如何使用 ReaderWriterLockSlim来保护访问。您需要做一种“ 双重检查锁定”,看看是否有其他人创建了缓存的项目,而我们在等待采取的锁。

const string CacheKey = "CacheKey";
static readonly ReaderWriterLockSlim cacheLock = new ReaderWriterLockSlim();
static string GetCachedData()
{
//First we do a read lock to see if it already exists, this allows multiple readers at the same time.
cacheLock.EnterReadLock();
try
{
//Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;


if (cachedString != null)
{
return cachedString;
}
}
finally
{
cacheLock.ExitReadLock();
}


//Only one UpgradeableReadLock can exist at one time, but it can co-exist with many ReadLocks
cacheLock.EnterUpgradeableReadLock();
try
{
//We need to check again to see if the string was created while we where waiting to enter the EnterUpgradeableReadLock
var cachedString = MemoryCache.Default.Get(CacheKey, null) as string;


if (cachedString != null)
{
return cachedString;
}


//The entry still does not exist so we need to create it and enter the write lock
var expensiveString = SomeHeavyAndExpensiveCalculation();
cacheLock.EnterWriteLock(); //This will block till all the Readers flush.
try
{
CacheItemPolicy cip = new CacheItemPolicy()
{
AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
};
MemoryCache.Default.Set(CacheKey, expensiveString, cip);
return expensiveString;
}
finally
{
cacheLock.ExitWriteLock();
}
}
finally
{
cacheLock.ExitUpgradeableReadLock();
}
}

我已经解决了这个问题,方法是使用 MemoryCache 上的 存在方法和使用 惰性初始模式

实际上,我的代码看起来是这样的:

static string GetCachedData(string key, DateTimeOffset offset)
{
Lazy<String> lazyObject = new Lazy<String>(() => SomeHeavyAndExpensiveCalculationThatReturnsAString());
var returnedLazyObject = MemoryCache.Default.AddOrGetExisting(key, lazyObject, offset);
if (returnedLazyObject == null)
return lazyObject.Value;
return ((Lazy<String>) returnedLazyObject).Value;
}

这里最坏的情况是创建相同的 Lazy对象两次。但这是微不足道的。使用 AddOrGetExisting可以保证您只能获得 Lazy对象的一个实例,因此还可以保证只调用一次开销较大的初始化方法。

我假设这段代码存在并发性问题:

事实上,这很可能是好的,虽然可能有所改善。

现在,一般来说,我们有多个线程在第一次使用时设置一个共享值,而不锁定正在获取和设置的值的模式可以是:

  1. 灾难——其他代码将假设只存在一个实例。
  2. 灾难性的——获取实例的代码不能仅仅容忍一个(或者可能是一定数量的)并发操作。
  3. 灾难性的——存储方式不是线程安全的(例如,将两个线程添加到一个字典中,你可能会得到各种各样令人讨厌的错误)。
  4. 次优——总体性能比锁定只保证一个线程完成获取值的工作时要差。
  5. 最佳情况——让多个线程执行冗余工作的成本低于防止冗余工作的成本,尤其是因为这种情况只能在相对较短的时间内发生。

然而,考虑到 MemoryCache可能会驱逐这些条目:

  1. 如果拥有多个实例是灾难性的,那么 MemoryCache就是错误的方法。
  2. 如果必须防止同时创建,则应该在创建点进行。
  3. 就对该对象的访问而言,MemoryCache是线程安全的,因此这里不需要关心。

当然,这两种可能性都需要考虑,但是只有当同一个字符串存在两个实例时才会出现问题,如果您正在进行非常特殊的优化,而这些优化并不适用于此处 * 。

所以,我们只剩下可能性:

  1. 避免重复调用 SomeHeavyAndExpensiveCalculation()的成本更低。
  2. 不避免重复调用 SomeHeavyAndExpensiveCalculation()的成本是比较便宜的。

解决这个问题可能很困难(事实上,这种事情值得分析,而不是假设你可以解决)。这里值得考虑的是,最明显的插入锁定方法将防止向缓存添加 所有,包括那些不相关的。

这意味着,如果有50个线程试图设置50个不同的值,那么我们必须让所有50个线程相互等待,即使它们甚至不会进行相同的计算。

因此,使用现有的代码可能比使用避免竞态条件的代码更好。如果竞态条件是一个问题,那么您很可能需要在其他地方处理它,或者需要一种不同的缓存策略,而不是一种驱逐旧条目的策略。

我要改变的一件事情是,我将把对 Set()的调用替换为对 AddOrGetExisting()的调用。从上面可以清楚地看到,这可能是不必要的,但是它将允许收集新获得的项目,减少总体内存使用,并允许较高的低代与高代收集的比率。

所以,是的,您可以使用双重锁定来防止并发,但是要么并发实际上不是一个问题,要么您以错误的方式存储值,要么在存储上使用双重锁定不是解决问题的最佳方法。

* 如果你知道一组字符串中只有一个存在,你可以优化等式比较,这是唯一一次有两个字符串副本可能是不正确的,而不仅仅是次优的,但你想要做非常不同类型的缓存,以便有意义。例如 XmlReader在内部进行的排序。

? 很可能是无限期存储的,或者是使用弱引用的,因此只有在没有现有用途的情况下才会驱逐条目。

有一个开放源码库[免责声明: 我写] : LazyCache,IMO 覆盖您的需求有两行代码:

IAppCache cache = new CachingService();
var cachedResults = cache.GetOrAdd("CacheKey",
() => SomeHeavyAndExpensiveCalculation());

默认情况下,它内置了锁,因此每次缓存丢失时,可缓存方法只执行一次,并且它使用了 lambda,因此您可以一次性执行“获取或添加”操作。默认20分钟过期。

甚至还有 NuGet 软件包;)

记忆缓存的控制台示例 ,“ How to save/get simple class object”

Esc外,启动和按下 Any key后的输出:

保存到缓存!
从缓存中获取!
有人
有人

    class Some
{
public String text { get; set; }


public Some(String text)
{
this.text = text;
}


public override string ToString()
{
return text;
}
}


public static MemoryCache cache = new MemoryCache("cache");


public static string cache_name = "mycache";


static void Main(string[] args)
{


Some some1 = new Some("some1");
Some some2 = new Some("some2");


List<Some> list = new List<Some>();
list.Add(some1);
list.Add(some2);


do {


if (cache.Contains(cache_name))
{
Console.WriteLine("Getting from cache!");
List<Some> list_c = cache.Get(cache_name) as List<Some>;
foreach (Some s in list_c) Console.WriteLine(s);
}
else
{
Console.WriteLine("Saving to cache!");
cache.Set(cache_name, list, DateTime.Now.AddMinutes(10));
}


} while (Console.ReadKey(true).Key != ConsoleKey.Escape);


}
public interface ILazyCacheProvider : IAppCache
{
/// <summary>
/// Get data loaded - after allways throw cached result (even when data is older then needed) but very fast!
/// </summary>
/// <param name="key"></param>
/// <param name="getData"></param>
/// <param name="slidingExpiration"></param>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
T GetOrAddPermanent<T>(string key, Func<T> getData, TimeSpan slidingExpiration);
}


/// <summary>
/// Initialize LazyCache in runtime
/// </summary>
public class LazzyCacheProvider: CachingService, ILazyCacheProvider
{
private readonly Logger _logger = LogManager.GetLogger("MemCashe");
private readonly Hashtable _hash = new Hashtable();
private readonly List<string>  _reloader = new List<string>();
private readonly ConcurrentDictionary<string, DateTime> _lastLoad = new ConcurrentDictionary<string, DateTime>();




T ILazyCacheProvider.GetOrAddPermanent<T>(string dataKey, Func<T> getData, TimeSpan slidingExpiration)
{
var currentPrincipal = Thread.CurrentPrincipal;
if (!ObjectCache.Contains(dataKey) && !_hash.Contains(dataKey))
{
_hash[dataKey] = null;
_logger.Debug($"{dataKey} - first start");
_lastLoad[dataKey] = DateTime.Now;
_hash[dataKey] = ((object)GetOrAdd(dataKey, getData, slidingExpiration)).CloneObject();
_lastLoad[dataKey] = DateTime.Now;
_logger.Debug($"{dataKey} - first");
}
else
{
if ((!ObjectCache.Contains(dataKey) || _lastLoad[dataKey].AddMinutes(slidingExpiration.Minutes) < DateTime.Now) && _hash[dataKey] != null)
Task.Run(() =>
{
if (_reloader.Contains(dataKey)) return;
lock (_reloader)
{
if (ObjectCache.Contains(dataKey))
{
if(_lastLoad[dataKey].AddMinutes(slidingExpiration.Minutes) > DateTime.Now)
return;
_lastLoad[dataKey] = DateTime.Now;
Remove(dataKey);
}
_reloader.Add(dataKey);
Thread.CurrentPrincipal = currentPrincipal;
_logger.Debug($"{dataKey} - reload start");
_hash[dataKey] = ((object)GetOrAdd(dataKey, getData, slidingExpiration)).CloneObject();
_logger.Debug($"{dataKey} - reload");
_reloader.Remove(dataKey);
}
});
}
if (_hash[dataKey] != null) return (T) (_hash[dataKey]);


_logger.Debug($"{dataKey} - dummy start");
var data = GetOrAdd(dataKey, getData, slidingExpiration);
_logger.Debug($"{dataKey} - dummy");
return (T)((object)data).CloneObject();
}
}

有点晚了,不过..。 全面实施:

    [HttpGet]
public async Task<HttpResponseMessage> GetPageFromUriOrBody(RequestQuery requestQuery)
{
log(nameof(GetPageFromUriOrBody), nameof(requestQuery));
var responseResult = await _requestQueryCache.GetOrCreate(
nameof(GetPageFromUriOrBody)
, requestQuery
, (x) => getPageContent(x).Result);
return Request.CreateResponse(System.Net.HttpStatusCode.Accepted, responseResult);
}
static MemoryCacheWithPolicy<RequestQuery, string> _requestQueryCache = new MemoryCacheWithPolicy<RequestQuery, string>();

这是 getPageContent的签名:

async Task<string> getPageContent(RequestQuery requestQuery);

这是 MemoryCacheWithPolicy的实现:

public class MemoryCacheWithPolicy<TParameter, TResult>
{
static ILogger _nlogger = new AppLogger().Logger;
private MemoryCache _cache = new MemoryCache(new MemoryCacheOptions()
{
//Size limit amount: this is actually a memory size limit value!
SizeLimit = 1024
});


/// <summary>
/// Gets or creates a new memory cache record for a main data
/// along with parameter data that is assocciated with main main.
/// </summary>
/// <param name="key">Main data cache memory key.</param>
/// <param name="param">Parameter model that assocciated to main model (request result).</param>
/// <param name="createCacheData">A delegate to create a new main data to cache.</param>
/// <returns></returns>
public async Task<TResult> GetOrCreate(object key, TParameter param, Func<TParameter, TResult> createCacheData)
{
// this key is used for param cache memory.
var paramKey = key + nameof(param);


if (!_cache.TryGetValue(key, out TResult cacheEntry))
{
// key is not in the cache, create data through the delegate.
cacheEntry = createCacheData(param);
createMemoryCache(key, cacheEntry, paramKey, param);


_nlogger.Warn(" cache is created.");
}
else
{
// data is chached so far..., check if param model is same (or changed)?
if(!_cache.TryGetValue(paramKey, out TParameter cacheParam))
{
//exception: this case should not happened!
}


if (!cacheParam.Equals(param))
{
// request param is changed, create data through the delegate.
cacheEntry = createCacheData(param);
createMemoryCache(key, cacheEntry, paramKey, param);
_nlogger.Warn(" cache is re-created (param model has been changed).");
}
else
{
_nlogger.Trace(" cache is used.");
}


}
return await Task.FromResult<TResult>(cacheEntry);
}
MemoryCacheEntryOptions createMemoryCacheEntryOptions(TimeSpan slidingOffset, TimeSpan relativeOffset)
{
// Cache data within [slidingOffset] seconds,
// request new result after [relativeOffset] seconds.
return new MemoryCacheEntryOptions()


// Size amount: this is actually an entry count per
// key limit value! not an actual memory size value!
.SetSize(1)


// Priority on removing when reaching size limit (memory pressure)
.SetPriority(CacheItemPriority.High)


// Keep in cache for this amount of time, reset it if accessed.
.SetSlidingExpiration(slidingOffset)


// Remove from cache after this time, regardless of sliding expiration
.SetAbsoluteExpiration(relativeOffset);
//
}
void createMemoryCache(object key, TResult cacheEntry, object paramKey, TParameter param)
{
// Cache data within 2 seconds,
// request new result after 5 seconds.
var cacheEntryOptions = createMemoryCacheEntryOptions(
TimeSpan.FromSeconds(2)
, TimeSpan.FromSeconds(5));


// Save data in cache.
_cache.Set(key, cacheEntry, cacheEntryOptions);


// Save param in cache.
_cache.Set(paramKey, param, cacheEntryOptions);
}
void checkCacheEntry<T>(object key, string name)
{
_cache.TryGetValue(key, out T value);
_nlogger.Fatal("Key: {0}, Name: {1}, Value: {2}", key, name, value);
}
}

nlogger只是跟踪 MemoryCacheWithPolicy行为的 nLog对象。 如果请求对象(RequestQuery requestQuery)通过委托(Func<TParameter, TResult> createCacheData)更改,则重新创建内存缓存; 如果滑动或绝对时间达到其极限,则重新创建内存缓存。请注意,所有内容也都是异步的;)

很难选择哪个更好; lock 还是 ReaderWriterLockSlim。你需要读写数字和比率等的真实世界的统计数据。

但是如果你认为使用“ lock”是正确的方法。那么,针对不同的需求,这里有一个不同的解决方案。我也在代码中包含了许的解决方案。因为两者都可以满足不同的需求。

以下是驱使我采取这种解决方案的要求:

  1. 由于某些原因,您不想或不能提供“ GetData”函数。也许‘ GetData’函数位于其他一些带有重构造函数的类中,您甚至不想创建一个实例,直到确保它是不可逃避的。
  2. 您需要从应用程序的不同位置/层访问相同的缓存数据。而且这些不同的地点不能接触到同样的储物柜物品。
  3. 您没有常量缓存键。例如,需要使用 sessionId 缓存键缓存一些数据。

密码:

using System;
using System.Runtime.Caching;
using System.Collections.Concurrent;
using System.Collections.Generic;


namespace CachePoc
{
class Program
{
static object everoneUseThisLockObject4CacheXYZ = new object();
const string CacheXYZ = "CacheXYZ";
static object everoneUseThisLockObject4CacheABC = new object();
const string CacheABC = "CacheABC";


static void Main(string[] args)
{
//Allan Xu's usage
string xyzData = MemoryCacheHelper.GetCachedDataOrAdd<string>(CacheXYZ, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);
string abcData = MemoryCacheHelper.GetCachedDataOrAdd<string>(CacheABC, everoneUseThisLockObject4CacheXYZ, 20, SomeHeavyAndExpensiveXYZCalculation);


//My usage
string sessionId = System.Web.HttpContext.Current.Session["CurrentUser.SessionId"].ToString();
string yvz = MemoryCacheHelper.GetCachedData<string>(sessionId);
if (string.IsNullOrWhiteSpace(yvz))
{
object locker = MemoryCacheHelper.GetLocker(sessionId);
lock (locker)
{
yvz = MemoryCacheHelper.GetCachedData<string>(sessionId);
if (string.IsNullOrWhiteSpace(yvz))
{
DatabaseRepositoryWithHeavyConstructorOverHead dbRepo = new DatabaseRepositoryWithHeavyConstructorOverHead();
yvz = dbRepo.GetDataExpensiveDataForSession(sessionId);
MemoryCacheHelper.AddDataToCache(sessionId, yvz, 5);
}
}
}
}




private static string SomeHeavyAndExpensiveXYZCalculation() { return "Expensive"; }
private static string SomeHeavyAndExpensiveABCCalculation() { return "Expensive"; }


public static class MemoryCacheHelper
{
//Allan Xu's solution
public static T GetCachedDataOrAdd<T>(string cacheKey, object cacheLock, int minutesToExpire, Func<T> GetData) where T : class
{
//Returns null if the string does not exist, prevents a race condition where the cache invalidates between the contains check and the retreival.
T cachedData = MemoryCache.Default.Get(cacheKey, null) as T;


if (cachedData != null)
return cachedData;


lock (cacheLock)
{
//Check to see if anyone wrote to the cache while we where waiting our turn to write the new value.
cachedData = MemoryCache.Default.Get(cacheKey, null) as T;


if (cachedData != null)
return cachedData;


cachedData = GetData();
MemoryCache.Default.Set(cacheKey, cachedData, DateTime.Now.AddMinutes(minutesToExpire));
return cachedData;
}
}


#region "My Solution"


readonly static ConcurrentDictionary<string, object> Lockers = new ConcurrentDictionary<string, object>();
public static object GetLocker(string cacheKey)
{
CleanupLockers();


return Lockers.GetOrAdd(cacheKey, item => (cacheKey, new object()));
}


public static T GetCachedData<T>(string cacheKey) where T : class
{
CleanupLockers();


T cachedData = MemoryCache.Default.Get(cacheKey) as T;
return cachedData;
}


public static void AddDataToCache(string cacheKey, object value, int cacheTimePolicyMinutes)
{
CleanupLockers();


MemoryCache.Default.Add(cacheKey, value, DateTimeOffset.Now.AddMinutes(cacheTimePolicyMinutes));
}


static DateTimeOffset lastCleanUpTime = DateTimeOffset.MinValue;
static void CleanupLockers()
{
if (DateTimeOffset.Now.Subtract(lastCleanUpTime).TotalMinutes > 1)
{
lock (Lockers)//maybe a better locker is needed?
{
try//bypass exceptions
{
List<string> lockersToRemove = new List<string>();
foreach (var locker in Lockers)
{
if (!MemoryCache.Default.Contains(locker.Key))
lockersToRemove.Add(locker.Key);
}


object dummy;
foreach (string lockerKey in lockersToRemove)
Lockers.TryRemove(lockerKey, out dummy);


lastCleanUpTime = DateTimeOffset.Now;
}
catch (Exception)
{ }
}
}


}
#endregion
}
}


class DatabaseRepositoryWithHeavyConstructorOverHead
{
internal string GetDataExpensiveDataForSession(string sessionId)
{
return "Expensive data from database";
}
}


}

为了避免全局锁,您可以使用 SingletonCache 来实现每个键一个锁,而不会造成内存使用爆炸(锁对象在不再被引用时被删除,并且通过比较和交换保证只有一个实例被使用)。

使用它看起来像这样:

SingletonCache<string, object> keyLocks = new SingletonCache<string, object>();


const string CacheKey = "CacheKey";
static string GetCachedData()
{
string expensiveString =null;
if (MemoryCache.Default.Contains(CacheKey))
{
return MemoryCache.Default[CacheKey] as string;
}


// double checked lock
using (var lifetime = keyLocks.Acquire(url))
{
lock (lifetime.Value)
{
if (MemoryCache.Default.Contains(CacheKey))
{
return MemoryCache.Default[CacheKey] as string;
}


cacheItemPolicy cip = new CacheItemPolicy()
{
AbsoluteExpiration = new DateTimeOffset(DateTime.Now.AddMinutes(20))
};
expensiveString = SomeHeavyAndExpensiveCalculation();
MemoryCache.Default.Set(CacheKey, expensiveString, cip);
return expensiveString;
}
}
}

代码在 GitHub: https://github.com/bitfaster/BitFaster.Caching

Install-Package BitFaster.Caching

还有一个 LRU 实现,它比 MemoryCache 更轻量级,并且有几个优点——更快的并发读写、有界大小、没有后台线程、内部 perf 计数器等等(免责声明,我编写了它)。

有点过时的问题,但可能仍然有用: 你可以看看我最近发布的 FusionCache something 融合缓存

您正在寻找的特性被描述为 给你,您可以这样使用它:

const string CacheKey = "CacheKey";
static string GetCachedData()
{
return fusionCache.GetOrSet(
CacheKey,
_ => SomeHeavyAndExpensiveCalculation(),
TimeSpan.FromMinutes(20)
);
}

您可能还会发现其他一些有趣的特性,比如 自动防故障装置提前超时,它们具有后台工厂完成和对可选的分布式 二楼缓存的支持。

如果你愿意给它一个机会,请让我知道你的想法。

/无耻-插头