class CacheModule: def __init__(self, name, max_size=128, from_remote=None): ''' :param max_size: 最大容量 :param from_remote: 查询数据函数,返回值必须是一个dict :param ttl: 有效时间,单位:秒 ''' self.local_cache = LRU(max_size=max_size) self.from_remote = from_remote self.empty = {} self.name = name def get_all(self, keys: list) -> dict: result, missing_keys = {}, [] for key in keys: val = self.local_cache.get(key) if val is not None: if val != self.empty: result[key] = val else: missing_keys.append(key) # 缓存未命中 if len(missing_keys) > 0: missing_dict = self.reload(missing_keys) result = {**result, **missing_dict} return result def remove(self, keys: list): for key in keys: self.local_cache.__delitem__(key) def statistics(self): hitRateAsString = '%.3f' % ( self.local_cache.hit_count / (self.local_cache.hit_count + self.local_cache.miss_count)) return { "name": self.name, "hit_count": self.local_cache.hit_count, "miss_count": self.local_cache.miss_count, "size": len(self.local_cache.keys()), "hitRateAsString": hitRateAsString + "%", } def reload(self, keys: list) -> dict: d = {} missing_vals = self.from_remote(keys) # 不存在的值,设置为空 if missing_vals: for key, val in missing_vals.items(): self.local_cache.setdefault(key, val) d[key] = val for missing_key in keys: self.local_cache.setdefault(missing_key, self.empty) return d
class Cache(MutableMapping): """Cache that supports saving the items to files Set `cache_location` to save all newly set items to .npy files in cache_location. .. warning :: Items that have been set before setting `cache_location` won't be saved to files! """ def __init__(self, cache_location=None, pickle_cache=False, memory_cache_size=None): if memory_cache_size: self._cache = LRU(max_size=memory_cache_size) else: self._cache = {} self.cache_location = cache_location self.pickle_cache = pickle_cache def clear(self): """ Clear memory cache""" self._cache = {} def filename(self, key): return os.path.join(self.cache_location, '{}.npy'.format(key)) def __getitem__(self, key): if not key in self._cache: if self.cache_location is not None: filename = self.filename(key) if os.path.exists(filename): value = np.load(filename) self._cache[key] = value else: raise KeyError('Key {} neither in cache nor on disk'.format(key)) return self._cache[key] def __setitem__(self, key, value): if not isinstance(key, str): raise TypeError('Only string keys are supported right now!') if self.cache_location is not None: if not os.path.exists(self.cache_location): os.makedirs(self.cache_location) filename = self.filename(key) np.save(filename, value) self._cache[key] = value def __delitem__(self, key): if self.cache_location is not None: filename = self.filename(key) if os.path.exists(filename): os.remove(filename) del self._cache[key] def __iter__(self): if self.cache_location is not None: filenames = iglob(self.filename('*')) keys = map(lambda f: os.path.splitext(os.path.basename(f))[0], filenames) new_keys = filterfalse(lambda key: key in self._cache.keys(), keys) return chain(iterkeys(self._cache), new_keys) else: return iterkeys(self._cache) def __len__(self): i = iter(self) return len(list(i)) def __getstate__(self): # we don't want to save the cache state = dict(self.__dict__) if not self.pickle_cache: state.pop('_cache') return state def __setstate__(self, state): if not '_cache' in state: if state.get('memory_cache_size'): state['_cache'] = LRU(max_size=memory_cache_size) else: state['_cache'] = {} self.__dict__ = dict(state)