Example #1
0
 def _install_cacheops(self, cls):
     cls._cacheprofile = model_profile(cls)
     if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs:
         # Setting up signals
         post_save.connect(self._post_save, sender=cls)
         post_delete.connect(self._post_delete, sender=cls)
         _old_objs[get_model_name(cls)] = {}
Example #2
0
 def contribute_to_class(self, cls, name):
     self._no_monkey.contribute_to_class(self, cls, name)
     cls._cacheprofile = model_profile(cls)
     if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs:
         # Setting up signals
         post_save.connect(self._post_save, sender=cls)
         post_delete.connect(self._post_delete, sender=cls)
         _old_objs[get_model_name(cls)] = {}
Example #3
0
    def _install_cacheops(self, cls):
        cls._cacheprofile = model_profile(cls)
        if cls._cacheprofile is not None and get_model_name(cls) not in _old_objs:
            # Set up signals
            post_save.connect(self._post_save, sender=cls)
            post_delete.connect(self._post_delete, sender=cls)
            _old_objs[get_model_name(cls)] = {}

            # Install auto-created models as their module attributes to make them picklable
            module = sys.modules[cls.__module__]
            if not hasattr(module, cls.__name__):
                setattr(module, cls.__name__, cls)
Example #4
0
    def _install_cacheops(self, cls):
        cls._cacheprofile = model_profile(cls)
        if cls._cacheprofile is not None and get_model_name(
                cls) not in _old_objs:
            # Set up signals
            post_save.connect(self._post_save, sender=cls)
            post_delete.connect(self._post_delete, sender=cls)
            _old_objs[get_model_name(cls)] = {}

            # Install auto-created models as their module attributes to make them picklable
            module = sys.modules[cls.__module__]
            if not hasattr(module, cls.__name__):
                setattr(module, cls.__name__, cls)
Example #5
0
    def ensure_known(self, model, new_schemes):
        """
        Ensure that `new_schemes` are known or know them
        """
        new_schemes = set(new_schemes)
        model_name = get_model_name(model)
        loaded = False

        if model_name not in self.local:
            self.load_schemes(model)
            loaded = True
        schemes = self.local[model_name]

        if new_schemes - schemes:
            if not loaded:
                schemes = self.load_schemes(model)
            if new_schemes - schemes:
                # Write new schemes to redis
                txn = redis_conn.pipeline()
                txn.incr(self.get_version_key(
                    model_name))  # Увеличиваем версию схем

                lookup_key = self.get_lookup_key(model_name)
                for scheme in new_schemes - schemes:
                    txn.sadd(lookup_key, serialize_scheme(scheme))
                txn.execute()

                # Updating local version
                self.local[model_name].update(new_schemes)
                # We increment here instead of using incr result from redis,
                # because even our updated collection could be already obsolete
                self.versions[model_name] += 1
Example #6
0
    def ensure_known(self, model, new_schemes):
        """
        Ensure that `new_schemes` are known or know them
        """
        new_schemes = set(new_schemes)
        model_name = get_model_name(model)
        loaded = False

        if model_name not in self.local:
            self.load_schemes(model)
            loaded = True
        schemes = self.local[model_name]

        if new_schemes - schemes:
            if not loaded:
                schemes = self.load_schemes(model)
            if new_schemes - schemes:
                # Write new schemes to redis
                txn = redis_client.pipeline()
                txn.incr(self.get_version_key(model_name))  # Увеличиваем версию схем

                lookup_key = self.get_lookup_key(model_name)
                for scheme in new_schemes - schemes:
                    txn.sadd(lookup_key, serialize_scheme(scheme))
                txn.execute()

                # Updating local version
                self.local[model_name].update(new_schemes)
                # We increment here instead of using incr result from redis,
                # because even our updated collection could be already obsolete
                self.versions[model_name] += 1
Example #7
0
def invalidate_obj(obj):
    """
    Invalidates caches that can possibly be influenced by object
    """
    model = non_proxy(obj.__class__)
    load_script('invalidate')(args=[
        get_model_name(model),
        serialize_object(model, obj)
    ])
Example #8
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys('conj:%s:*' % get_model_name(model))
    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))
Example #9
0
    def load_schemes(self, model):
        model_name = get_model_name(model)

        txn = redis_client.pipeline()
        txn.get(self.get_version_key(model))
        txn.smembers(self.get_lookup_key(model_name))
        version, members = txn.execute()

        self.local[model_name] = set(map(deserialize_scheme, members))
        self.local[model_name].add(())  # Всегда добавляем пустую схему
        self.versions[model_name] = int(version or 0)
        return self.local[model_name]
Example #10
0
    def load_schemes(self, model):
        model_name = get_model_name(model)

        txn = redis_conn.pipeline()
        txn.get(self.get_version_key(model))
        txn.smembers(self.get_lookup_key(model_name))
        version, members = txn.execute()

        self.local[model_name] = set(map(deserialize_scheme, members))
        self.local[model_name].add(())  # Всегда добавляем пустую схему
        self.versions[model_name] = int(version or 0)
        return self.local[model_name]
Example #11
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request, 
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_conn.keys('conj:%s:*' % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_conn.sunion(conjs_keys)
        redis_conn.delete(*(list(cache_keys) + conjs_keys))

    cache_schemes.clear(model)
Example #12
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request, 
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_conn.keys('conj:%s:*' % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_conn.sunion(conjs_keys)
        redis_conn.delete(*(list(cache_keys) + conjs_keys))

    cache_schemes.clear(model)
Example #13
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys('conj:%s:*' % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))

    # BUG: a race bug here, ignoring since invalidate_model() is not for hot production use
    cache_schemes.clear(model)
Example #14
0
 def exists(self):
     """
     HACK: handling invalidation in post_save signal requires both
           old and new object data, to get old data without extra db request
           we use exists() call from django's Model.save_base().
           Yes, if you use .exists() yourself this can cause memory leak.
     """
     # TODO: refactor this one to more understandable something
     if self._cacheprofile:
         query_dnf = dnf(self)
         if len(query_dnf) == 1 and len(query_dnf[0]) == 1 and query_dnf[0][0][0] == self.model._meta.pk.name:
             result = len(self.nocache()) > 0
             if result:
                 _old_objs[get_model_name(self.model)][query_dnf[0][0][1]] = self._result_cache[0]
             return result
     return self._no_monkey.exists(self)
Example #15
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys("conj:%s:*" % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))

    # BUG: a race bug here, ignoring since invalidate_model() is not for hot production use
    cache_schemes.clear(model)
Example #16
0
    def _post_save(self, sender, instance, **kwargs):
        """
        Invokes invalidations for both old and new versions of saved object
        """
        old = _old_objs[get_model_name(instance.__class__)].pop(
            instance.pk, None)
        if old:
            invalidate_obj(old)
        invalidate_obj(instance)

        # Enabled cache_on_save makes us write saved object to cache.
        # Later it can be retrieved with .get(<cache_on_save_field>=<value>)
        # <cache_on_save_field> is pk unless specified.
        # This sweet trick saves a db request and helps with slave lag.
        cache_on_save = instance._cacheprofile.get('cache_on_save')
        if cache_on_save:
            # HACK: We get this object "from field" so it can contain
            #       some undesirable attributes or other objects attached.
            #       RelatedField accessors do that, for example.
            #
            #       So we strip down any _*_cache attrs before saving
            #       and later reassign them
            # Stripping up undesirable attributes
            unwanted_attrs = [
                k for k in instance.__dict__.keys()
                if k.startswith('_') and k.endswith('_cache')
            ]
            unwanted_dict = dict(
                (k, instance.__dict__[k]) for k in unwanted_attrs)
            for k in unwanted_attrs:
                del instance.__dict__[k]

            key = cache_on_save if isinstance(cache_on_save,
                                              basestring) else 'pk'
            # Django doesn't allow filters like related_id = 1337.
            # So we just hacky strip _id from end of a key
            # TODO: make it right, _meta.get_field() should help
            filter_key = key[:-3] if key.endswith('_id') else key
            cached_as(instance.__class__.objects \
                .filter(**{filter_key: getattr(instance, key)}), extra='') \
                (lambda: [instance])()

            # Reverting stripped attributes
            instance.__dict__.update(unwanted_dict)
Example #17
0
 def exists(self):
     """
     HACK: handling invalidation in post_save signal requires both
           old and new object data, to get old data without extra db request
           we use exists() call from django's Model.save_base().
           Yes, if you use .exists() yourself this can cause memory leak.
     """
     # TODO: refactor this one to more understandable something
     if self._cacheprofile:
         query_dnf = dnf(self)
         if len(query_dnf) == 1 and len(
                 query_dnf[0]
         ) == 1 and query_dnf[0][0][0] == self.model._meta.pk.name:
             result = len(self.nocache()) > 0
             if result:
                 _old_objs[get_model_name(self.model)][
                     query_dnf[0][0][1]] = self._result_cache[0]
             return result
     return self._no_monkey.exists(self)
Example #18
0
    def _post_save(self, sender, instance, **kwargs):
        """
        Invokes invalidations for both old and new versions of saved object
        """
        old = _old_objs[get_model_name(instance.__class__)].pop(instance.pk, None)
        if old:
            invalidate_obj(old)
        invalidate_obj(instance)

        # Enabled cache_on_save makes us write saved object to cache.
        # Later it can be retrieved with .get(<cache_on_save_field>=<value>)
        # <cache_on_save_field> is pk unless specified.
        # This sweet trick saves a db request and helps with slave lag.
        cache_on_save = instance._cacheprofile.get('cache_on_save')
        if cache_on_save:
            # HACK: We get this object "from field" so it can contain
            #       some undesirable attributes or other objects attached.
            #       RelatedField accessors do that, for example.
            #
            #       So we strip down any _*_cache attrs before saving
            #       and later reassign them
            # Stripping up undesirable attributes
            unwanted_attrs = [k for k in instance.__dict__.keys()
                                if k.startswith('_') and k.endswith('_cache')]
            unwanted_dict = dict((k, instance.__dict__[k]) for k in unwanted_attrs)
            for k in unwanted_attrs:
                del instance.__dict__[k]

            key = cache_on_save if isinstance(cache_on_save, basestring) else 'pk'
            # Django doesn't allow filters like related_id = 1337.
            # So we just hacky strip _id from end of a key
            # TODO: make it right, _meta.get_field() should help
            filter_key = key[:-3] if key.endswith('_id') else key

            cond = {filter_key: getattr(instance, key)}
            qs = instance.__class__.objects.inplace().filter(**cond).order_by()
            qs._cache_results(qs._cache_key(), [instance])

            # Reverting stripped attributes
            instance.__dict__.update(unwanted_dict)
Example #19
0
def cache_thing(model, cache_key, data, cond_dnf=[[]], timeout=None):
    """
    Writes data to cache and creates appropriate invalidators.
    """
    model = non_proxy(model)

    if timeout is None:
        profile = model_profile(model)
        timeout = profile['timeout']

    pickled_data = pickle.dumps(data, -1)
    load_script('cache_thing')(
        keys=[cache_key],
        args=[
            pickled_data,
            get_model_name(model),
            json.dumps(cond_dnf, default=str),
            timeout,
            # Invalidator timeout should be larger than timeout of any key it references
            # So we take timeout from profile which is our upper limit
            # Add few extra seconds to be extra safe
            model._cacheprofile['timeout'] + 10
        ]
    )
Example #20
0
 def get_version_key(self, model_or_name):
     if not isinstance(model_or_name, str):
         model_or_name = get_model_name(model_or_name)
     return 'schemes:%s:version' % model_or_name
Example #21
0
 def get_lookup_key(self, model_or_name):
     if not isinstance(model_or_name, str):
         model_or_name = get_model_name(model_or_name)
     return 'schemes:%s' % model_or_name
Example #22
0
def conj_cache_key_from_scheme(model, scheme, values):
    return 'conj:%s:' % get_model_name(model) + '&'.join('%s=%s' %
                                                         (f, values[f])
                                                         for f in scheme)
Example #23
0
 def schemes(self, model):
     model_name = get_model_name(model)
     try:
         return self.local[model_name]
     except KeyError:
         return self.load_schemes(model)
Example #24
0
 def _require_cacheprofile(self):
     if self._cacheprofile is None:
         raise ImproperlyConfigured(
             'Cacheops is not enabled for %s model.\n'
             'If you don\'t want to cache anything by default you can "just_enable" it.'
                 % get_model_name(self.model))
Example #25
0
def conj_cache_key_from_scheme(model, scheme, values):
    return "conj:%s:" % get_model_name(model) + "&".join("%s=%s" % (f, values[f]) for f in scheme)
Example #26
0
 def version(self, model):
     try:
         return self.versions[get_model_name(model)]
     except KeyError:
         return 0
Example #27
0
 def _require_cacheprofile(self):
     if self._cacheprofile is None:
         raise ImproperlyConfigured(
             'Cacheops is not enabled for %s model.\n'
             'If you don\'t want to cache anything by default you can "just_enable" it.'
             % get_model_name(self.model))
Example #28
0
 def get_version_key(self, model_or_name):
     if not isinstance(model_or_name, str):
         model_or_name = get_model_name(model_or_name)
     return "schemes:%s:version" % model_or_name
Example #29
0
def conj_cache_key_from_scheme(model, scheme, values):
    return 'conj:%s:' % get_model_name(model) + '&'.join('%s=%s' % (f, values[f]) for f in scheme)
Example #30
0
 def version(self, model):
     try:
         return self.versions[get_model_name(model)]
     except KeyError:
         return 0
Example #31
0
 def get_lookup_key(self, model_or_name):
     if not isinstance(model_or_name, basestring):
         model_or_name = get_model_name(model_or_name)
     return 'schemes:%s' % model_or_name
Example #32
0
 def schemes(self, model):
     model_name = get_model_name(model)
     try:
         return self.local[model_name]
     except KeyError:
         return self.load_schemes(model)
Example #33
0
 def get_version_key(self, model_or_name):
     if not isinstance(model_or_name, basestring):
         model_or_name = get_model_name(model_or_name)
     return 'schemes:%s:version' % model_or_name
Example #34
0
def conj_cache_key_from_scheme(model, scheme, obj):
    return 'conj:%s:' % get_model_name(model) \
         + '&'.join('%s=%s' % (f, getattr(obj, f)) for f in scheme)
Example #35
0
def conj_cache_key(model, conj):
    return "conj:%s:" % get_model_name(model) + "&".join("%s=%s" % t for t in sorted(conj))
Example #36
0
def conj_cache_key(model, conj):
    return 'conj:%s:' % get_model_name(model) + '&'.join('%s=%s' % t for t in sorted(conj))
Example #37
0
def conj_cache_key(model, conj):
    return 'conj:%s:' % get_model_name(model) + '&'.join('%s=%s' % t
                                                         for t in sorted(conj))
Example #38
0
 def get_lookup_key(self, model_or_name):
     if not isinstance(model_or_name, str):
         model_or_name = get_model_name(model_or_name)
     return "schemes:%s" % model_or_name
Example #39
0
def conj_cache_key_from_scheme(model, scheme, obj):
    return 'conj:%s:' % get_model_name(model) \
         + '&'.join('%s=%s' % (f, getattr(obj, f)) for f in scheme)