def iterator(self): # TODO: do not cache empty queries in Django 1.6 superiter = self._no_monkey.iterator cache_this = self._cacheprofile and 'fetch' in self._cacheconf['ops'] if cache_this: cache_key = self._cache_key() if not self._cacheconf['write_only']: # Trying get data from cache cache_data = redis_client.get(cache_key) if cache_data is not None: results = pickle.loads(cache_data) for obj in results: yield obj raise StopIteration # Cache miss - fallback to overriden implementation results = [] for obj in superiter(self): if cache_this: results.append(obj) yield obj if cache_this: self._cache_results(cache_key, results) raise StopIteration
def iterator(self): superiter = self._no_monkey.iterator cache_this = self._cacheprofile and 'fetch' in self._cacheops if cache_this: cache_key = self._cache_key() if not self._cache_write_only: # Trying get data from cache cache_data = redis_client.get(cache_key) if cache_data is not None: results = pickle.loads(cache_data) for obj in results: yield obj raise StopIteration # Cache miss - fallback to overriden implementation results = [] for obj in superiter(self): if cache_this: results.append(obj) yield obj if cache_this: self._cache_results(cache_key, results) raise StopIteration
def iterator(self): superiter = self._no_monkey.iterator cache_this = self._cacheprofile and "fetch" in self._cacheops if cache_this: cache_key = self._cache_key() if not self._cache_write_only: # Trying get data from cache cache_data = redis_client.get(cache_key) if cache_data is not None: results = pickle.loads(cache_data) for obj in results: yield obj raise StopIteration # Cache miss - fallback to overriden implementation results = [] for obj in superiter(self): if cache_this: results.append(obj) yield obj if cache_this: self._cache_results(cache_key, results) raise StopIteration
def wrapper(*args): # NOTE: These args must not effect function result. # I'm keeping them to cache view functions. cache_data = redis_client.get(cache_key) if cache_data is not None: return pickle.loads(cache_data) result = func(*args) queryset._cache_results(cache_key, result, timeout) return result
def wrapper(*args): argstr = '' for arg in args: argstr += str(arg) if arg else '' cache_key = queryset._cache_key(extra=key_extra+'args%s'%argstr) cache_data = redis_client.get(cache_key) if cache_data is not None: return pickle.loads(cache_data) result = func(*args) queryset._cache_results(cache_key, result, timeout) return result