def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = getRedisClient() kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit( config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, { 'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode }
def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = redis.StrictRedis(host=config['redis']['host'], port=config['redis']['port']) kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit(config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, {'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode}
def test_process_for_readahead_no(): redis_cli = MockRedis() process_for_readahead(EX_CONFIG, redis_cli, 'tscached:kquery:WAT', 'http://wooo?edit', HEADER_NO) assert redis_cli.sadd_parms == []
def test_process_for_readahead_yes(): redis_cli = MockRedis() process_for_readahead(EX_CONFIG, redis_cli, 'tscached:kquery:WAT', 'http://wooo?edit', HEADER_YES) assert redis_cli.sadd_parms == [['tscached:shadow_list', 'tscached:kquery:WAT']]