def _cache_results(self, cache_key, results): cond_dnf = dnf(self.query.where, self.model._meta.db_table) cache_thing(self.model, cache_key, results, cond_dnf, timeout=self._cachetimeout)
def cache_page(cache_key, cache_querysets, content): """ Sets page cache content and adds invalidators for cache_querysets.""" only_conj = False min_timeout = None querysets = [] for object_or_queryset in cache_querysets: # Getting queryset for model instance or queryset if isinstance(object_or_queryset, Model): # ConcreteModel.objects.filter(pk=obj.pk) model = object_or_queryset.__class__ objects = getattr(model, 'objects') qs = objects.filter(pk=object_or_queryset.pk) else: qs = object_or_queryset querysets.append(qs) timeout = get_cache_timeout(qs) if not min_timeout or min_timeout > timeout: min_timeout = timeout for qs in querysets: # Computing DNF for queryset # (see `cacheops.query.QuerySetMixin._cache_results`) conj_dnf = dnf(qs) timeout = get_cache_timeout(qs) # Set cache key value only for minimum timeout cache_data = not min_timeout or timeout == min_timeout cache_page_by_queryset(qs.model, cache_key, content, conj_dnf, only_conj=not cache_data)
def exists(self): """ HACK: handling invalidation in post_save signal requires both old and new object data, to get old data without extra db request we use exists() call from django's Model.save_base(). Yes, if you use .exists() yourself this can cause memory leak. """ # TODO: refactor this one to more understandable something if self._cacheprofile: query_dnf = dnf(self) if len(query_dnf) == 1 and len(query_dnf[0]) == 1 and query_dnf[0][0][0] == self.model._meta.pk.name: result = len(self.nocache()) > 0 if result: _old_objs[get_model_name(self.model)][query_dnf[0][0][1]] = self._result_cache[0] return result return self._no_monkey.exists(self)
def exists(self): """ HACK: handling invalidation in post_save signal requires both old and new object data, to get old data without extra db request we use exists() call from django's Model.save_base(). Yes, if you use .exists() yourself this can cause memory leak. """ # TODO: refactor this one to more understandable something if self._cacheprofile: query_dnf = dnf(self) if len(query_dnf) == 1 and len(query_dnf[0]) == 1 \ and query_dnf[0][0][0] == self.model._meta.pk.name: result = len(self.nocache()) > 0 if result: _old_objs[self.model][query_dnf[0][0] [1]] = self._result_cache[0] return result return self._no_monkey.exists(self)
def decorator(func): if extra: key_extra = extra else: key_extra = '%s.%s' % (func.__module__, func.__name__) cache_key = queryset._cache_key(extra=key_extra) cond_dnf = dnf(queryset.query.where, queryset.model._meta.db_table) @wraps(func) def wrapper(*args): # NOTE: These args must not effect function result. # I'm keeping them to cache view functions. cache_data = redis_conn.get(cache_key) if cache_data is not None: return pickle.loads(cache_data) result = func(*args) cache_thing(queryset.model, cache_key, result, cond_dnf, timeout or queryset._cachetimeout) return result return wrapper
def _cache_results(self, cache_key, results, timeout=None): cond_dnf = dnf(self) cache_thing(self.model, cache_key, results, cond_dnf, timeout or self._cacheconf['timeout'])
def _cache_results(self, cache_key, results, timeout=None): cond_dnf = dnf(self) cache_thing(self.model, cache_key, results, cond_dnf, timeout or self._cachetimeout)
def _dnf(self): qs = self.get_cached_queryset() return dnf(qs)