def __init__(self, gc): self.backend = HardCacheBackend(gc)
class HardCache(CacheUtils): backend = None permanent = True def __init__(self, gc): self.backend = HardCacheBackend(gc) def _split_key(self, key): tokens = key.split("-", 1) if len(tokens) != 2: raise ValueError("key %s has no dash" % key) category, ids = tokens return category, ids def set(self, key, val, time=0): if val == NoneResult: # NoneResult caching is for other parts of the chain return category, ids = self._split_key(key) self.backend.set(category, ids, val, time) def simple_get_multi(self, keys): results = {} category_bundles = {} for key in keys: category, ids = self._split_key(key) category_bundles.setdefault(category, []).append(ids) for category in category_bundles: idses = category_bundles[category] chunks = in_chunks(idses, size=50) for chunk in chunks: new_results = self.backend.get_multi(category, chunk) results.update(new_results) return results def set_multi(self, keys, prefix='', time=0): for k, v in keys.iteritems(): if v != NoneResult: self.set(prefix + str(k), v, time=time) def get(self, key, default=None): category, ids = self._split_key(key) r = self.backend.get(category, ids) if r is None: return default return r def delete(self, key, time=0): # Potential optimization: When on a negative-result caching chain, # shove NoneResult throughout the chain when a key is deleted. category, ids = self._split_key(key) self.backend.delete(category, ids) def add(self, key, value, time=0): category, ids = self._split_key(key) return self.backend.add(category, ids, value, time=time) def incr(self, key, delta=1, time=0): category, ids = self._split_key(key) return self.backend.incr(category, ids, delta=delta, time=time)
class HardCache(CacheUtils): backend = None permanent = True def __init__(self, gc): self.backend = HardCacheBackend(gc) def _split_key(self, key): tokens = key.split("-", 1) if len(tokens) != 2: raise ValueError("key %s has no dash" % key) category, ids = tokens return category, ids def set(self, key, val, time=0): if val == NoneResult: # NoneResult caching is for other parts of the chain return category, ids = self._split_key(key) self.backend.set(category, ids, val, time) def simple_get_multi(self, keys): results = {} category_bundles = {} for key in keys: category, ids = self._split_key(key) category_bundles.setdefault(category, []).append(ids) for category in category_bundles: idses = category_bundles[category] chunks = in_chunks(idses, size=50) for chunk in chunks: new_results = self.backend.get_multi(category, chunk) results.update(new_results) return results def set_multi(self, keys, prefix='', time=0): for k,v in keys.iteritems(): if v != NoneResult: self.set(prefix+str(k), v, time=time) def get(self, key, default=None): category, ids = self._split_key(key) r = self.backend.get(category, ids) if r is None: return default return r def delete(self, key, time=0): # Potential optimization: When on a negative-result caching chain, # shove NoneResult throughout the chain when a key is deleted. category, ids = self._split_key(key) self.backend.delete(category, ids) def add(self, key, value, time=0): category, ids = self._split_key(key) return self.backend.add(category, ids, value, time=time) def incr(self, key, delta=1, time=0): category, ids = self._split_key(key) return self.backend.incr(category, ids, delta=delta, time=time)