def iterator(self): superiter = self._no_monkey.iterator cache_this = self._cacheprofile is not None and 'fetch' in self._cacheops if cache_this: cache_key = self._cache_key() if not self._cache_write_only: # Trying get data from cache cache_data = redis_conn.get(cache_key) if cache_data is not None: results = pickle.loads(cache_data) for obj in results: yield obj raise StopIteration # Cache miss - fallback to overriden implementation results = [] for obj in superiter(self): if cache_this: results.append(obj) yield obj if cache_this: self._cache_results(cache_key, results) raise StopIteration
def iterator(self): superiter = self._no_monkey.iterator cache_this = self._cacheprofile and 'fetch' in self._cacheops if cache_this: cache_key = self._cache_key() if not self._cache_write_only: # Trying get data from cache cache_data = redis_conn.get(cache_key) if cache_data is not None: results = pickle.loads(cache_data) for obj in results: signals.pre_init.send(sender=obj.__class__, *[], **obj.__dict__) signals.post_init.send(sender=obj.__class__, instance=obj) yield obj raise StopIteration # Cache miss - fallback to overriden implementation results = [] for obj in superiter(self): if cache_this: results.append(obj) yield obj if cache_this: self._cache_results(cache_key, results) raise StopIteration
def wrapper(*args): # NOTE: These args must not effect function result. # I'm keeping them to cache view functions. cache_data = redis_conn.get(cache_key) if cache_data is not None: return pickle.loads(cache_data) result = func(*args) cache_thing(queryset.model, cache_key, result, cond_dnf, timeout or queryset._cachetimeout) return result
def wrapper(self, *args, **kwargs): cache_this = self._cacheprofile is not None and action in self._cacheops if cache_this: cache_key = self._cache_key(extra=key_extra) cache_data = redis_conn.get(cache_key) if cache_data is not None: return pickle.loads(cache_data) result = func(self, *args, **kwargs) if cache_this: self._cache_results(cache_key, result) return result