Beispiel #1
0
def cacheops_stats(request):
    if request.GET.get('reset'):
        redis_client.delete('stats_models')
        for key in redis_client.keys('cache_stats:*'):
            redis_client.delete(key)
        return redirect('cacheops_stats')

    data = {}
    graph_data = dict(zip(KEYS, [0, 0, 0]))
    for model in redis_client.smembers('stats_models'):
        total = dict(zip(KEYS[:2], [0, 0]))
        values = {}
        for i, k in enumerate(KEYS):
            d = redis_client.hgetall('cache_stats:%s:%d' % (model, i))
            if d:
                values[k] = d
                total[k] = sum(map(int, values[k].values()))
                graph_data[k] += total[k]

        total['total'] = sum(total.values())
        data[model] = {'data': values}
        data[model].update(total)

    graph_data['total'] = sum(graph_data.values())
    return render(request, 'cacheops/stats.html', {
        'data_list': data, 'graph_data': graph_data
    })
Beispiel #2
0
 def clear(self, model):
     """
     Clears schemes for models
     """
     try:
         redis_client.delete(self.get_lookup_key(model))
         redis_client.incr(self.get_version_key(model))
     except ConnectionError:
         return
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys('conj:%s:*' % get_model_name(model))
    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))
Beispiel #4
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys("conj:%s:*" % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))

    # BUG: a race bug here, ignoring since invalidate_model() is not for hot production use
    cache_schemes.clear(model)
Beispiel #5
0
def invalidate_model(model):
    """
    Invalidates all caches for given model.
    NOTE: This is a heavy artilery which uses redis KEYS request,
          which could be relatively slow on large datasets.
    """
    conjs_keys = redis_client.keys('conj:%s:*' % get_model_name(model))
    if isinstance(conjs_keys, str):
        conjs_keys = conjs_keys.split()

    if conjs_keys:
        cache_keys = redis_client.sunion(conjs_keys)
        redis_client.delete(*(list(cache_keys) + conjs_keys))

    # BUG: a race bug here, ignoring since invalidate_model() is not for hot production use
    cache_schemes.clear(model)
def invalidate_obj(obj):
    """
    Invalidates caches that can possibly be influenced by object
    """
    model = non_proxy(obj.__class__)

    # Loading model schemes from local memory (or from redis)
    schemes = cache_schemes.schemes(model)

    # We hope that our schemes are valid, but if not we will update them and redo invalidation
    # on second pass
    for _ in (1, 2):
        # Create a list of invalidators from list of schemes and values of object fields
        conjs_keys = [
            conj_cache_key_from_scheme(model, scheme, obj)
            for scheme in schemes
        ]

        # Reading scheme version, cache_keys and deleting invalidators in
        # a single transaction.
        def _invalidate_conjs(pipe):
            # get schemes version to check later that it's not obsolete
            pipe.get(cache_schemes.get_version_key(model))
            # Get a union of all cache keys registered in invalidators
            pipe.sunion(conjs_keys)
            # `conjs_keys` are keys of sets containing `cache_keys` we are going to delete,
            # so we'll remove them too.
            # NOTE: There could be some other invalidators not matched with current object,
            #       which reference cache keys we delete, they will be hanging out for a while.
            pipe.delete(*conjs_keys)

        # NOTE: we delete fetched cache_keys later which makes a tiny possibility that something
        #       will fail in the middle leaving those cache keys hanging without invalidators.
        #       The alternative WATCH-based optimistic locking proved to be pessimistic.
        version, cache_keys, _ = redis_client.transaction(_invalidate_conjs)
        if cache_keys:
            redis_client.delete(*cache_keys)

        # OK, we invalidated all conjunctions we had in memory, but model schema
        # may have been changed in redis long time ago. If this happened,
        # schema version will not match and we should load new schemes and redo our thing
        if int(version or 0) != cache_schemes.version(model):
            schemes = cache_schemes.load_schemes(model)
        else:
            break
Beispiel #7
0
def invalidate_from_dict(model, values):
    """
    Invalidates caches that can possibly be influenced by object
    """
    # Loading model schemes from local memory (or from redis)
    schemes = cache_schemes.schemes(model)

    # In case when redis is unavailable
    if schemes is None:
        return

    # We hope that our schemes are valid, but if not we will update them and redo invalidation
    # on second pass
    for _ in (1, 2):
        # Create a list of invalidators from list of schemes and values of object fields
        conjs_keys = [conj_cache_key_from_scheme(model, scheme, values) for scheme in schemes]

        # Reading scheme version, cache_keys and deleting invalidators in
        # a single transaction.
        def _invalidate_conjs(pipe):
            pipe.multi()
            pipe.get(cache_schemes.get_version_key(model))
            # Get a union of all cache keys registered in invalidators
            pipe.sunion(conjs_keys)
            # `conjs_keys` are keys of sets containing `cache_keys` we are going to delete,
            # so we'll remove them too.
            # NOTE: There could be some other invalidators not matched with current object,
            #       which reference cache keys we delete, they will be hanging out for a while.
            pipe.delete(*conjs_keys)

        # NOTE: we delete fetched cache_keys later which makes a tiny possibility that something
        #       will fail in the middle leaving those cache keys hanging without invalidators.
        #       The alternative WATCH-based optimistic locking proved to be pessimistic.
        version, cache_keys, _ = redis_client.transaction(_invalidate_conjs)
        if cache_keys:
            redis_client.delete(*cache_keys)

        # OK, we invalidated all conjunctions we had in memory, but model schema
        # may have been changed in redis long time ago. If this happened,
        # schema version will not match and we should load new schemes and redo our thing
        if int(version or 0) != cache_schemes.version(model):
            schemes = cache_schemes.load_schemes(model)
        else:
            break
Beispiel #8
0
def invalidate_get():
    redis_client.delete(get_key)
Beispiel #9
0
def invalidate_fetch():
    redis_client.delete(fetch_key)
Beispiel #10
0
def invalidate_count():
    redis_client.delete(count_key)
Beispiel #11
0
 def clear(self, model):
     """
     Clears schemes for models
     """
     redis_client.delete(self.get_lookup_key(model))
     redis_client.incr(self.get_version_key(model))
 def clear(self, model):
     """
     Clears schemes for models
     """
     redis_client.delete(self.get_lookup_key(model))
     redis_client.incr(self.get_version_key(model))
Beispiel #13
0
def invalidate_count():
    redis_client.delete(count_key)
Beispiel #14
0
def invalidate_fetch():
    redis_client.delete(fetch_key)
Beispiel #15
0
def invalidate_get():
    redis_client.delete(get_key)