예제 #1
0
def invalidate_from_dict(model, values):
    """
    Invalidates caches that can possibly be influenced by object
    """
    def _invalidate(pipe):
        # Create a list of invalidators from list of schemes and values of object fields
        schemes = cache_schemes.schemes(model)
        conjs_keys = [conj_cache_key_from_scheme(model, scheme, values) for scheme in schemes]

        # Optimistic locking: we hope schemes and invalidators won't change while we remove them
        # Ignoring this could lead to cache key hanging with it's invalidator removed
        version_key = cache_schemes.get_version_key(model)
        pipe.watch(version_key, *conjs_keys)

        # Check if our version of schemes for model is obsolete, update them and redo if needed
        # This shouldn't be happen too often once schemes are filled a bit
        version = pipe.get(version_key)
        if int(version or 0) != cache_schemes.version(model):
            cache_schemes.load_schemes(model)
            pipe.reset()
            raise WatchError() # redo

        # Get a union of all cache keys registered in invalidators
        cache_keys = pipe.sunion(conjs_keys)
        if cache_keys or conjs_keys:
            # `conjs_keys` are keys of sets containing `cache_keys` we are going to delete,
            # so we'll remove them too.
            # NOTE: There could be some other invalidators not matched with current object,
            #       which reference cache keys we delete, they will be hanging out for a while.
            # A transation will fail if schemes or any of invalidator sets is changed in between,
            # in that case we redo the whole thing
            pipe.multi()
            pipe.delete(*(list(cache_keys) + conjs_keys))

    redis_client.transaction(_invalidate)
예제 #2
0
def invalidate_obj(obj):
    """
    Invalidates caches that can possibly be influenced by object
    """
    model = non_proxy(obj.__class__)

    # Loading model schemes from local memory (or from redis)
    schemes = cache_schemes.schemes(model)

    # We hope that our schemes are valid, but if not we will update them and redo invalidation
    # on second pass
    for _ in (1, 2):
        # Create a list of invalidators from list of schemes and values of object fields
        conjs_keys = [
            conj_cache_key_from_scheme(model, scheme, obj)
            for scheme in schemes
        ]

        # Reading scheme version, cache_keys and deleting invalidators in
        # a single transaction.
        def _invalidate_conjs(pipe):
            # get schemes version to check later that it's not obsolete
            pipe.get(cache_schemes.get_version_key(model))
            # Get a union of all cache keys registered in invalidators
            pipe.sunion(conjs_keys)
            # `conjs_keys` are keys of sets containing `cache_keys` we are going to delete,
            # so we'll remove them too.
            # NOTE: There could be some other invalidators not matched with current object,
            #       which reference cache keys we delete, they will be hanging out for a while.
            pipe.delete(*conjs_keys)

        # NOTE: we delete fetched cache_keys later which makes a tiny possibility that something
        #       will fail in the middle leaving those cache keys hanging without invalidators.
        #       The alternative WATCH-based optimistic locking proved to be pessimistic.
        version, cache_keys, _ = redis_client.transaction(_invalidate_conjs)
        if cache_keys:
            redis_client.delete(*cache_keys)

        # OK, we invalidated all conjunctions we had in memory, but model schema
        # may have been changed in redis long time ago. If this happened,
        # schema version will not match and we should load new schemes and redo our thing
        if int(version or 0) != cache_schemes.version(model):
            schemes = cache_schemes.load_schemes(model)
        else:
            break
예제 #3
0
def invalidate_from_dict(model, values):
    """
    Invalidates caches that can possibly be influenced by object
    """
    # Loading model schemes from local memory (or from redis)
    schemes = cache_schemes.schemes(model)

    # In case when redis is unavailable
    if schemes is None:
        return

    # We hope that our schemes are valid, but if not we will update them and redo invalidation
    # on second pass
    for _ in (1, 2):
        # Create a list of invalidators from list of schemes and values of object fields
        conjs_keys = [conj_cache_key_from_scheme(model, scheme, values) for scheme in schemes]

        # Reading scheme version, cache_keys and deleting invalidators in
        # a single transaction.
        def _invalidate_conjs(pipe):
            pipe.multi()
            pipe.get(cache_schemes.get_version_key(model))
            # Get a union of all cache keys registered in invalidators
            pipe.sunion(conjs_keys)
            # `conjs_keys` are keys of sets containing `cache_keys` we are going to delete,
            # so we'll remove them too.
            # NOTE: There could be some other invalidators not matched with current object,
            #       which reference cache keys we delete, they will be hanging out for a while.
            pipe.delete(*conjs_keys)

        # NOTE: we delete fetched cache_keys later which makes a tiny possibility that something
        #       will fail in the middle leaving those cache keys hanging without invalidators.
        #       The alternative WATCH-based optimistic locking proved to be pessimistic.
        version, cache_keys, _ = redis_client.transaction(_invalidate_conjs)
        if cache_keys:
            redis_client.delete(*cache_keys)

        # OK, we invalidated all conjunctions we had in memory, but model schema
        # may have been changed in redis long time ago. If this happened,
        # schema version will not match and we should load new schemes and redo our thing
        if int(version or 0) != cache_schemes.version(model):
            schemes = cache_schemes.load_schemes(model)
        else:
            break