18
18
EXPIRATION_TIME = config_instance ().CACHE_SETTINGS .CACHE_DEFAULT_TIMEOUT
19
19
20
20
21
+ class RedisErrorManager :
22
+ def __init__ (self , use_redis : bool = True ):
23
+ self .use_redis : bool = use_redis
24
+ self ._permanent_off = use_redis
25
+
26
+ self .cache_errors : int = 0
27
+
28
+ self .error_threshold : int = 10
29
+ self .min_error_threshold : int = 5
30
+ self .initial_off_time : int = 60
31
+ self .max_off_time : int = 3600
32
+ self .time_since_last_error : int = 0
33
+
34
+ async def turn_off_redis (self , off_time : int ):
35
+ self .use_redis = False
36
+ self .time_since_last_error = 0
37
+ # additional code to shut down Redis or perform other tasks
38
+ if off_time == 0 :
39
+ self ._permanent_off = False
40
+ return
41
+
42
+ await asyncio .sleep (off_time )
43
+
44
+ async def turn_on_redis (self ):
45
+ self .use_redis = True
46
+ # additional code to initialize Redis or perform other tasks
47
+
48
+ async def check_error_threshold (self ):
49
+ if self .cache_errors >= self .error_threshold and self .time_since_last_error <= self .max_off_time :
50
+ off_time = self .initial_off_time * 2 ** (self .cache_errors - self .min_error_threshold )
51
+ off_time = min (off_time , self .max_off_time )
52
+ await self .turn_off_redis (off_time )
53
+ elif self .cache_errors < self .min_error_threshold and not self .use_redis :
54
+ await self .turn_on_redis ()
55
+ else :
56
+ self .time_since_last_error += 1
57
+
58
+ async def increment_cache_errors (self ):
59
+ self .cache_errors += 1
60
+
61
+ async def can_use_redis (self ):
62
+ return self .use_redis and self ._permanent_off
63
+
64
+
21
65
class Cache :
22
66
"""
23
67
A class to handle caching of data, both in-memory and in Redis.
@@ -38,12 +82,13 @@ def __init__(self, cache_name: str = "mem", max_size: int = MEM_CACHE_SIZE, expi
38
82
self .max_size = max_size
39
83
self .expiration_time = expiration_time
40
84
self ._cache_name = cache_name
85
+ self .redis_errors = RedisErrorManager (use_redis = use_redis )
41
86
self ._cache = {}
42
87
self ._cache_lock = threading .Lock ()
43
- self . _use_redis = use_redis
88
+
44
89
self ._logger = init_logger (camel_to_snake (self .__class__ .__name__ ))
45
90
46
- if self ._use_redis :
91
+ if self .redis_errors . use_redis :
47
92
redis_host = config_instance ().REDIS_CACHE .CACHE_REDIS_HOST
48
93
redis_port = config_instance ().REDIS_CACHE .CACHE_REDIS_PORT
49
94
password = config_instance ().REDIS_CACHE .REDIS_PASSWORD
@@ -58,14 +103,11 @@ def __init__(self, cache_name: str = "mem", max_size: int = MEM_CACHE_SIZE, expi
58
103
config_instance ().DEBUG and self ._logger .info ("Cache -- Redis connected" )
59
104
except (ConnectionError , AuthenticationError ):
60
105
config_instance ().DEBUG and self ._logger .error (msg = "Redis failed to connect...." )
61
- self .turn_off_redis ()
106
+ self .redis_errors . turn_off_redis (off_time = 0 )
62
107
63
108
@property
64
- async def can_use_redis (self ):
65
- return self ._use_redis
66
-
67
- async def turn_off_redis (self ):
68
- self ._use_redis = False
109
+ async def can_use_redis (self ) -> bool :
110
+ return await self .redis_errors .can_use_redis ()
69
111
70
112
async def on_delete (self ):
71
113
"""
@@ -87,7 +129,7 @@ async def _serialize_value(self, value: Any, default=None) -> str:
87
129
except (JSONDecodeError , pickle .PicklingError ):
88
130
config_instance ().DEBUG and self ._logger .error (f"Serializer Error" )
89
131
return default
90
- except TypeError as e :
132
+ except TypeError :
91
133
config_instance ().DEBUG and self ._logger .error (f"Serializer Error" )
92
134
return default
93
135
@@ -142,10 +184,11 @@ async def set(self, key: str, value: Any, ttl: int = 0):
142
184
await self ._remove_oldest_entry ()
143
185
144
186
try :
145
- if self ._use_redis :
187
+ if await self .redis_errors . can_use_redis () :
146
188
self ._redis_client .set (key , value , ex = exp_time )
147
189
except (redis .exceptions .ConnectionError , redis .exceptions .TimeoutError ):
148
190
# TODO -- keep a count of redis errors if they pass a thresh-hold then switch-off redis
191
+ await self .redis_errors .increment_cache_errors ()
149
192
pass
150
193
try :
151
194
await self ._set_mem_cache (key = key , value = value , ttl = exp_time )
@@ -187,20 +230,23 @@ async def _async_redis_get(get: Callable, _key: str):
187
230
value = await asyncio .wait_for (self ._get_memcache (key = key ), timeout = timeout )
188
231
except (asyncio .TimeoutError , KeyError ):
189
232
# Timed out waiting for the memcache lookup, or KeyError - as a result of cache eviction
233
+ await self .redis_errors .increment_cache_errors ()
190
234
value = None
191
235
192
236
# will only try and return a value in redis if memcache value does not exist
193
- if self ._use_redis and (value is None ):
237
+ if await self .redis_errors . can_use_redis () and (value is None ):
194
238
try :
195
239
# Wait for the result of the redis lookup with a timeout
196
240
redis_get = functools .partial (_async_redis_get , get = self ._redis_client .get )
197
241
value = await asyncio .wait_for (redis_get (_key = key ), timeout = timeout )
198
242
except (redis .exceptions .TimeoutError , asyncio .TimeoutError ):
199
243
# Timed out waiting for the redis lookup
200
244
config_instance ().DEBUG and self ._logger .error ("Timeout Error Reading from redis" )
245
+ await self .redis_errors .increment_cache_errors ()
201
246
value = None
202
247
except redis .exceptions .ConnectionError :
203
248
config_instance ().DEBUG and self ._logger .error ("ConnectionError Reading from redis" )
249
+ await self .redis_errors .increment_cache_errors ()
204
250
value = None
205
251
206
252
return await self ._deserialize_value (value , value ) if value else None
0 commit comments