Inspired by the non-locking implementation proposed in this post this post, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the Azure cache I'd appreciate input on best practices or pitfalls. I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Inspired by the non-locking implementation proposed in this post, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the Azure cache I'd appreciate input on best practices or pitfalls. I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Inspired by the non-locking implementation proposed in this post, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the Azure cache I'd appreciate input on best practices or pitfalls. I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
azure Azure cache getoradd (without locking)
Inspired by the non-locking implementation proposed in this thread: Locking during cache populationthis post, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the azureAzure cache I'd appreciate input on best practices or pitfalls... I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
azure cache getoradd (without locking)
Inspired by the non-locking implementation proposed in this thread: Locking during cache population, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the azure cache I'd appreciate input on best practices or pitfalls... I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Azure cache getoradd (without locking)
Inspired by the non-locking implementation proposed in this post, the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the Azure cache I'd appreciate input on best practices or pitfalls. I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Inspired by the non-locking implementation proposed in this thread: Locking during cache population , the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the azure cache I'd appreciate input on best practices or pitfalls... I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
var cacheFactory = new DataCacheFactory();
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Inspired by the non-locking implementation proposed in this thread: Locking during cache population , the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the azure cache I'd appreciate input on best practices or pitfalls... I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper()
{
var cacheFactory = new DataCacheFactory();
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}
Inspired by the non-locking implementation proposed in this thread: Locking during cache population , the following code is an attempt to (about) do the same using the azure cache. As I'm totally green on the azure cache I'd appreciate input on best practices or pitfalls... I should add that this method is assumed to be the only interaction with the azure cache in the application.
public class AzureCacheWrapper
{
private readonly DataCache cache;
public AzureCacheWrapper(DataCacheFactory cacheFactory)
{
this.cache = cacheFactory.GetDefaultCache();
}
public T GetOrAdd<T>(string key, Func<T> factoryMethod)
{
var factoryMethodAsLazy = new Lazy<T>(factoryMethod);
var cachedFactory = cache.Get(key) as Lazy<T>;
if (cachedFactory == null)
{
try
{
cache.Add(key, factoryMethodAsLazy);
cachedFactory = factoryMethodAsLazy;
}
catch (DataCacheException ex)
{
if (ex.ErrorCode != DataCacheErrorCode.KeyAlreadyExists)
{
throw;
}
// We know for sure that the key exists at this point:
// Two concurrent callers tried to get it, and this thread
// happened to be deadlock victim.
cachedFactory = (Lazy<T>)cache.Get(key);
}
}
return cachedFactory.Value;
}
}