def cacheops_stats(request): if request.GET.get('reset'): redis_client.delete('stats_models') for key in redis_client.keys('cache_stats:*'): redis_client.delete(key) return redirect('cacheops_stats') data = {} graph_data = dict(zip(KEYS, [0, 0, 0])) for model in redis_client.smembers('stats_models'): total = dict(zip(KEYS[:2], [0, 0])) values = {} for i, k in enumerate(KEYS): d = redis_client.hgetall('cache_stats:%s:%d' % (model, i)) if d: values[k] = d total[k] = sum(map(int, values[k].values())) graph_data[k] += total[k] total['total'] = sum(total.values()) data[model] = {'data': values} data[model].update(total) graph_data['total'] = sum(graph_data.values()) return render(request, 'cacheops/stats.html', { 'data_list': data, 'graph_data': graph_data })
def invalidate_model(model): """ Invalidates all caches for given model. NOTE: This is a heavy artilery which uses redis KEYS request, which could be relatively slow on large datasets. """ set_key = 'conj:%s' % get_model_name(model) conjs_keys = redis_client.smembers(set_key) if isinstance(conjs_keys, str): conjs_keys = conjs_keys.split() if isinstance(conjs_keys, set): conjs_keys = list(conjs_keys) if conjs_keys: cache_keys = redis_client.sunion(conjs_keys) redis_client.delete(*(list(cache_keys) + conjs_keys)) for k in conjs_keys: redis_client.srem(set_key, k) # BUG: a race bug here, ignoring since invalidate_model() is not for hot production use cache_schemes.clear(model)