def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = getRedisClient() kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit( config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, { 'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode }
def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = redis.StrictRedis(host=config['redis']['host'], port=config['redis']['port']) kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit(config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, {'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode}
def test_from_request(): redis_cli = MockRedis() example_request = { "metrics": [{"hello": "some query"}, {"goodbye": "another_query"}], "start_relative": {"value": "1", "unit": "hours"}, } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ctr = 0 for kq in ret_vals: assert isinstance(kq, KQuery) assert kq.query == example_request["metrics"][ctr] ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_request_replace_align_sampling(): redis_cli = MockRedis() aggregator = { 'name': 'sum', 'align_sampling': True, 'sampling': { 'value': '1', 'unit': 'minutes' } } example_request = { 'metrics': [{ 'hello': 'some query', 'aggregators': [aggregator] }], 'start_relative': { 'value': '1', 'unit': 'hours' } } agg_out = { 'name': 'sum', 'align_start_time': True, 'sampling': { 'value': '1', 'unit': 'minutes' } } request_out = { 'metrics': [{ 'hello': 'some query', 'aggregators': [agg_out] }], 'start_relative': { 'value': '1', 'unit': 'hours' } } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ret_vals = list(ret_vals) assert len(ret_vals) == 1 assert isinstance(ret_vals[0], KQuery) assert ret_vals[0].query == request_out['metrics'][0] assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_request_replace_align_sampling(): redis_cli = MockRedis() aggregator = {"name": "sum", "align_sampling": True, "sampling": {"value": "1", "unit": "minutes"}} example_request = { "metrics": [{"hello": "some query", "aggregators": [aggregator]}], "start_relative": {"value": "1", "unit": "hours"}, } agg_out = {"name": "sum", "align_start_time": True, "sampling": {"value": "1", "unit": "minutes"}} request_out = { "metrics": [{"hello": "some query", "aggregators": [agg_out]}], "start_relative": {"value": "1", "unit": "hours"}, } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ret_vals = list(ret_vals) assert len(ret_vals) == 1 assert isinstance(ret_vals[0], KQuery) assert ret_vals[0].query == request_out["metrics"][0] assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_request(): redis_cli = MockRedis() example_request = { 'metrics': [{ 'hello': 'some query' }, { 'goodbye': 'another_query' }], 'start_relative': { 'value': '1', 'unit': 'hours' } } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ctr = 0 for kq in ret_vals: assert isinstance(kq, KQuery) assert kq.query == example_request['metrics'][ctr] ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0