def cache_thing(model, cache_key, data, cond_dnf=[[]], timeout=None): """ Writes data to cache and creates appropriate invalidators. """ if timeout is None: profile = model_profile(model) timeout = profile['timeout'] # Ensure that all schemes of current query are "known" schemes = map(conj_scheme, cond_dnf) cache_schemes.ensure_known(model, schemes) txn = redis_conn.pipeline() # Write data to cache pickled_data = pickle.dumps(data, -1) if timeout is not None: txn.setex(cache_key, pickled_data, timeout) else: txn.set(cache_key, pickled_data) # Add new cache_key to list of dependencies for every conjunction in dnf for conj in cond_dnf: conj_key = conj_cache_key(model, conj) txn.sadd(conj_key, cache_key) if timeout is not None: # Invalidator timeout should be larger than timeout of any key it references # So we take timeout from profile which is our upper limit # Add few extra seconds to be extra safe txn.expire(conj_key, model._cacheprofile['timeout'] + 10) txn.execute()
def cache_thing(model, cache_key, data, cond_dnf=[[]], timeout=None): """ Writes data to cache and creates appropriate invalidators. """ model = non_proxy(model) if timeout is None: profile = model_profile(model) timeout = profile['timeout'] # Ensure that all schemes of current query are "known" schemes = map(conj_scheme, cond_dnf) cache_schemes.ensure_known(model, schemes) txn = redis_client.pipeline() # Write data to cache pickled_data = pickle.dumps(data, -1) if timeout is not None: txn.setex(cache_key, pickled_data, timeout) else: txn.set(cache_key, pickled_data) # Add new cache_key to list of dependencies for every conjunction in dnf for conj in cond_dnf: conj_key = conj_cache_key(model, conj) txn.sadd(conj_key, cache_key) if timeout is not None: # Invalidator timeout should be larger than timeout of any key it references # So we take timeout from profile which is our upper limit # Add few extra seconds to be extra safe txn.expire(conj_key, model._cacheprofile['timeout'] + 10) txn.execute()
def _install_cacheops(self, cls): cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs: # Setting up signals post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[get_model_name(cls)] = {}
def cache_page_by_queryset(model, cache_key, data, cond_dnf=[[]], timeout=None, only_conj=False): """ Overridden method `cacheops.query.cache_thing` which doesn't pickle data and can set only invalidation conjunctions. """ model = non_proxy(model) if timeout is None: profile = model_profile(model) timeout = profile['timeout'] schemes = map(conj_scheme, cond_dnf) cache_schemes.ensure_known(model, schemes) txn = redis_client.pipeline() # Here was data pickling, we don't need it because of caching raw value # pickled_data = pickle.dumps(data, -1) # Check whether setting data is allowed in `only_conj` argument if not only_conj: if timeout is not None: txn.setex(cache_key, timeout, data) else: txn.set(cache_key, data) for conj in cond_dnf: conj_key = conj_cache_key(model, conj) txn.sadd(conj_key, cache_key) if timeout is not None: txn.expire(conj_key, model._cacheprofile['timeout'] + 10) txn.execute()
def __init__(self, *args, **kwargs): self._no_monkey.__init__(self, *args, **kwargs) self._cloning = 1000 self._cacheprofile = model_profile(self.model) if self._cacheprofile: self._cacheconf = self._cacheprofile.copy() self._cacheconf['write_only'] = False
def contribute_to_class(self, cls, name): self._no_monkey.contribute_to_class(self, cls, name) cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs: # Setting up signals post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[get_model_name(cls)] = {}
def _install_cacheops(self, cls): cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs: # Set up signals post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[get_model_name(cls)] = {} # Install auto-created models as their module attributes to make them picklable module = sys.modules[cls.__module__] if not hasattr(module, cls.__name__): setattr(module, cls.__name__, cls)
def __init__(self, *args, **kwargs): self._no_monkey.__init__(self, *args, **kwargs) self._cloning = 1000 if not hasattr(self, '_cacheprofile') and self.model: self._cacheprofile = model_profile(self.model) self._cache_write_only = False if self._cacheprofile is not None: self._cacheops = self._cacheprofile['ops'] self._cachetimeout = self._cacheprofile['timeout'] else: self._cacheops = None self._cachetimeout = None
def _install_cacheops(self, cls): cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and get_model_name( cls) not in _old_objs: # Set up signals post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[get_model_name(cls)] = {} # Install auto-created models as their module attributes to make them picklable module = sys.modules[cls.__module__] if not hasattr(module, cls.__name__): setattr(module, cls.__name__, cls)
def __init__(self, *args, **kwargs): self._no_monkey.__init__(self, *args, **kwargs) self._cloning = 1000 self._cacheprofile = None if self.model: self._cacheprofile = model_profile(self.model) self._cache_write_only = False if self._cacheprofile is not None: self._cacheops = self._cacheprofile['ops'] self._cachetimeout = self._cacheprofile['timeout'] else: self._cacheops = None self._cachetimeout = None
def __init__(self, *args, **kwargs): self._no_monkey.__init__(self, *args, **kwargs) self._cloning = 1000 self._cacheprofile = None if self.model: self._cacheprofile = model_profile(self.model) self._cache_write_only = False if self._cacheprofile is not None: self._cacheops = self._cacheprofile["ops"] self._cachetimeout = self._cacheprofile["timeout"] else: self._cacheops = None self._cachetimeout = None
def _install_cacheops(self, cls): cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and cls not in _old_objs: # Set up signals if not getattr(cls._meta, "select_on_save", True): # Django 1.6+ doesn't make select on save by default, # which we use to fetch old state, so we fetch it ourselves in pre_save handler pre_save.connect(self._pre_save, sender=cls) post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[cls] = {} # Install auto-created models as their module attributes to make them picklable module = sys.modules[cls.__module__] if not hasattr(module, cls.__name__): setattr(module, cls.__name__, cls)
def _install_cacheops(self, cls): cls._cacheprofile = model_profile(cls) if cls._cacheprofile is not None and cls not in _old_objs: # Set up signals if not getattr(cls._meta, 'select_on_save', True): # Django 1.6+ doesn't make select on save by default, # which we use to fetch old state, so we fetch it ourselves in pre_save handler pre_save.connect(self._pre_save, sender=cls) post_save.connect(self._post_save, sender=cls) post_delete.connect(self._post_delete, sender=cls) _old_objs[cls] = {} # Install auto-created models as their module attributes to make them picklable module = sys.modules[cls.__module__] if not hasattr(module, cls.__name__): setattr(module, cls.__name__, cls)
def cache_thing(model, cache_key, data, cond_dnf=[[]], timeout=None): u""" По факту - скопированный метод cache_thing из кешопса с двумя изменениями: - просто функция, а не метод объекта - убрана сериализация data с помощью pickle.dumps """ model = non_proxy(model) if timeout is None: profile = model_profile(model) timeout = profile['timeout'] # Ensure that all schemes of current query are "known" schemes = map(conj_scheme, cond_dnf) cache_schemes.ensure_known(model, schemes) txn = redis_client.pipeline() # Write data to cache if timeout is not None: txn.setex(cache_key, timeout, data) else: txn.set(cache_key, data) # Add new cache_key to list of dependencies for # every conjunction in dnf for conj in cond_dnf: conj_key = conj_cache_key(model, conj) txn.sadd(conj_key, cache_key) if timeout is not None: # Invalidator timeout should be larger than # timeout of any key it references # So we take timeout from profile which is our upper limit # Add few extra seconds to be extra safe txn.expire(conj_key, model._cacheprofile['timeout'] + 10) txn.execute()
def cache_thing(model, cache_key, data, cond_dnf=[[]], timeout=None): """ Writes data to cache and creates appropriate invalidators. """ model = non_proxy(model) if timeout is None: profile = model_profile(model) timeout = profile['timeout'] pickled_data = pickle.dumps(data, -1) load_script('cache_thing')( keys=[cache_key], args=[ pickled_data, get_model_name(model), json.dumps(cond_dnf, default=str), timeout, # Invalidator timeout should be larger than timeout of any key it references # So we take timeout from profile which is our upper limit # Add few extra seconds to be extra safe model._cacheprofile['timeout'] + 10 ] )
def _cacheprofile(self): profile = model_profile(self.model) if profile: self._cacheconf = profile.copy() self._cacheconf['write_only'] = False return profile