def test_proxy_to_kairos_chunked_happy(m_query_kairos): m_query_kairos.return_value = { 'queries': [{ 'name': 'first' }, {'name', 'second'}] } kq = KQuery(MockRedis()) kq.query = {'hello': 'goodbye'} then = datetime.datetime.fromtimestamp(1234567890) diff = datetime.timedelta(minutes=30) time_ranges = [(then - diff, then), (then - diff - diff, then - diff)] results = kq.proxy_to_kairos_chunked('localhost', 8080, time_ranges) assert len(results) == 2 assert m_query_kairos.call_count == 2 expected_query = {'cache_time': 0, 'metrics': [{'hello': 'goodbye'}]} expected_query['start_absolute'] = int((then - diff).strftime('%s')) * 1000 expected_query['end_absolute'] = int((then).strftime('%s')) * 1000 assert m_query_kairos.call_args_list[0] == (('localhost', 8080, expected_query), { 'propagate': False }) expected_query['start_absolute'] = int( (then - diff - diff).strftime('%s')) * 1000 expected_query['end_absolute'] = int((then - diff).strftime('%s')) * 1000 assert m_query_kairos.call_args_list[1] == (('localhost', 8080, expected_query), { 'propagate': False })
def test_perform_readahead_happy_path(m_process, m_from_cache, m_release_leader, m_become_leader): redis_cli = MockRedis() def _smem(_): return set(['tscached:kquery:superspecial']) redis_cli.smembers = _smem m_become_leader.return_value = True kqueries = [] for ndx in xrange(10): kq = KQuery(redis_cli) kq.cached_data = {'last_add_data': int(datetime.datetime.now().strftime('%s')) - 1800, 'redis_key': 'tscached:kquery:' + str(ndx)} kqueries.append(kq) m_from_cache.return_value = kqueries m_process.return_value = {'sample_size': 666}, 'warm_append' assert perform_readahead({}, redis_cli) is None assert m_become_leader.call_count == 1 assert m_release_leader.call_count == 1 assert m_from_cache.call_count == 1 assert m_from_cache.call_args_list[0][0] == (['tscached:kquery:superspecial'], redis_cli) assert m_process.call_count == 10 k_t_r = {'start_relative': {'unit': 'minutes', 'value': '24194605'}} for ndx in xrange(10): assert m_process.call_args_list[ndx][0] == ({}, redis_cli, kqueries[ndx], k_t_r)
def test_hot(m_from_cache): redis_cli = MockRedis() def _fake_build_response(_b, response_kquery, _c=True): response_kquery['sample_size'] += 100 response_kquery['results'].append({'hello': 'goodbye'}) return response_kquery mts_list = [] for i in xrange(3): mts = MTS(redis_cli) mts.build_response = _fake_build_response mts_list.append(mts) m_from_cache.return_value = mts_list kq = KQuery(redis_cli) kq.cached_data = { 'mts_keys': ['kquery:mts:1', 'kquery:mts:2', 'kquery:mts:3'] } kairos_time_range = {'start_relative': {'unit': 'hours', 'value': '1'}} out = cache_calls.hot(redis_cli, kq, kairos_time_range) assert out['sample_size'] == 300 assert len(out['results']) == 3
def test_proxy_to_kairos(m_query_kairos): m_query_kairos.return_value = {"queries": [{"name": "first"}, {"name", "second"}]} kq = KQuery(MockRedis()) kq.query = {"hello": "goodbye"} time_range = {"start_absolute": 1234567890000} kq.proxy_to_kairos("localhost", 8080, time_range) called_query = {"metrics": [{"hello": "goodbye"}], "cache_time": 0, "start_absolute": 1234567890000} m_query_kairos.assert_called_once_with("localhost", 8080, called_query)
def test_proxy_to_kairos_chunked_raises_except(m_query_kairos): m_query_kairos.return_value = {"error": "some error message", "status_code": 500} kq = KQuery(MockRedis()) kq.query = {"hello": "goodbye"} then = datetime.datetime.fromtimestamp(1234567890) diff = datetime.timedelta(minutes=30) time_ranges = [(then - diff, then), (then - diff - diff, then - diff)] with pytest.raises(BackendQueryFailure): kq.proxy_to_kairos_chunked("localhost", 8080, time_ranges)
def test_upsert(): class FakeMTS(): def get_key(self): return 'rick-and-morty' redis_cli = MockRedis() kq = KQuery(redis_cli) kq.query = {'hello': 'some_query'} kq.add_mts(FakeMTS()) kq.upsert(datetime.datetime.fromtimestamp(1234567890), None) assert redis_cli.set_call_count == 1 assert redis_cli.get_call_count == 0 assert kq.query['mts_keys'] == ['rick-and-morty'] assert kq.query['last_add_data'] == time.time() assert kq.query['earliest_data'] == 1234567890 assert sorted(kq.query.keys()) == [ 'earliest_data', 'hello', 'last_add_data', 'mts_keys' ] kq.upsert(datetime.datetime.fromtimestamp(1234567890), datetime.datetime.fromtimestamp(1234569890)) assert redis_cli.set_call_count == 2 assert redis_cli.get_call_count == 0 assert kq.query['last_add_data'] == 1234569890 assert kq.query['earliest_data'] == 1234567890
def test_proxy_to_kairos_chunked_raises_except(m_query_kairos): m_query_kairos.return_value = { 'error': 'some error message', 'status_code': 500 } kq = KQuery(MockRedis()) kq.query = {'hello': 'goodbye'} then = datetime.datetime.fromtimestamp(1234567890) diff = datetime.timedelta(minutes=30) time_ranges = [(then - diff, then), (then - diff - diff, then - diff)] with pytest.raises(BackendQueryFailure): kq.proxy_to_kairos_chunked('localhost', 8080, time_ranges)
def test__init__etc(): """ Test __init__, key_basis, add_mts. """ kq = KQuery(MockRedis()) kq.query = {'wubbalubba': 'dubdub'} assert kq.related_mts == set() kq.add_mts('hello') kq.add_mts('goodbye') kq.add_mts('hello') testset = set() testset.add('hello') testset.add('goodbye') assert kq.related_mts == testset assert kq.key_basis() == {'wubbalubba': 'dubdub'}
def test_from_result(): """ Test from_result """ redis_cli = MockRedis() results = {'results': [{'wubba-lubba': 'dub-dub'}, {'thats-the-way': 'the-news-goes'}]} kq = KQuery(redis_cli) kq.query = 'wat' ret_vals = MTS.from_result(results, redis_cli, kq) assert isinstance(ret_vals, GeneratorType) ctr = 0 for mts in ret_vals: assert isinstance(mts, MTS) assert mts.result == results['results'][ctr] assert mts.expiry == 10800 assert mts.cache_type == 'mts' assert mts.query_mask == 'wat' ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test__init__etc(): """ Test __init__, key_basis, add_mts. """ kq = KQuery(MockRedis()) kq.query = {"wubbalubba": "dubdub"} assert kq.related_mts == set() kq.add_mts("hello") kq.add_mts("goodbye") kq.add_mts("hello") testset = set() testset.add("hello") testset.add("goodbye") assert kq.related_mts == testset assert kq.key_basis() == {"wubbalubba": "dubdub"}
def test_proxy_to_kairos(m_query_kairos): m_query_kairos.return_value = { 'queries': [{ 'name': 'first' }, {'name', 'second'}] } kq = KQuery(MockRedis()) kq.query = {'hello': 'goodbye'} time_range = {'start_absolute': 1234567890000} kq.proxy_to_kairos('localhost', 8080, time_range) called_query = { 'metrics': [{ 'hello': 'goodbye' }], 'cache_time': 0, 'start_absolute': 1234567890000 } m_query_kairos.assert_called_once_with('localhost', 8080, called_query)
def test_perform_readahead_backend_error(m_process, m_from_cache, m_release_leader, m_become_leader): redis_cli = MockRedis() def _smem(_): return set(['tscached:kquery:superspecial']) redis_cli.smembers = _smem m_become_leader.return_value = True kqueries = [] for ndx in xrange(10): kq = KQuery(redis_cli) kq.cached_data = {'last_add_data': int(datetime.datetime.now().strftime('%s')) - 1800, 'redis_key': 'tscached:kquery:' + str(ndx)} kqueries.append(kq) m_from_cache.return_value = kqueries m_process.side_effect = BackendQueryFailure('OOPS!') assert perform_readahead({}, redis_cli) is None assert m_become_leader.call_count == 1 assert m_release_leader.call_count == 1 assert m_from_cache.call_count == 1 assert m_process.call_count == 1
def test_proxy_to_kairos_chunked_happy(m_query_kairos): m_query_kairos.return_value = {"queries": [{"name": "first"}, {"name", "second"}]} kq = KQuery(MockRedis()) kq.query = {"hello": "goodbye"} then = datetime.datetime.fromtimestamp(1234567890) diff = datetime.timedelta(minutes=30) time_ranges = [(then - diff, then), (then - diff - diff, then - diff)] results = kq.proxy_to_kairos_chunked("localhost", 8080, time_ranges) assert len(results) == 2 assert m_query_kairos.call_count == 2 expected_query = {"cache_time": 0, "metrics": [{"hello": "goodbye"}]} expected_query["start_absolute"] = int((then - diff).strftime("%s")) * 1000 expected_query["end_absolute"] = int((then).strftime("%s")) * 1000 assert m_query_kairos.call_args_list[0] == (("localhost", 8080, expected_query), {"propagate": False}) expected_query["start_absolute"] = int((then - diff - diff).strftime("%s")) * 1000 expected_query["end_absolute"] = int((then - diff).strftime("%s")) * 1000 assert m_query_kairos.call_args_list[1] == (("localhost", 8080, expected_query), {"propagate": False})
def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = getRedisClient() kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit( config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, { 'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode }
def test_from_result(): """ Test from_result """ redis_cli = MockRedis() results = { 'results': [{ 'wubba-lubba': 'dub-dub' }, { 'thats-the-way': 'the-news-goes' }] } kq = KQuery(redis_cli) kq.query = 'wat' ret_vals = MTS.from_result(results, redis_cli, kq) assert isinstance(ret_vals, GeneratorType) ctr = 0 for mts in ret_vals: assert isinstance(mts, MTS) assert mts.result == results['results'][ctr] assert mts.expiry == 10800 assert mts.cache_type == 'mts' assert mts.query_mask == 'wat' ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def handle_query(): try: if request.method == 'POST': payload = json.loads(request.data) # dict else: payload = json.loads(request.args.get('query')) except: err = 'Cannot deserialize JSON payload.' logging.error(err) return json.dumps({'error': err}), 500 config = app.config['tscached'] logging.info('Query') redis_client = redis.StrictRedis(host=config['redis']['host'], port=config['redis']['port']) kairos_time_range = populate_time_range(payload) ret_data = {'queries': []} overall_cache_mode = None # HTTP request may contain one or more kqueries for kquery in KQuery.from_request(payload, redis_client): try: # get whatever is in redis for this kquery kq_result = kquery.get_cached() # readahead shadow load support process_for_readahead(config, redis_client, kquery.get_key(), request.referrer, request.headers) if kq_result: kq_resp, cache_mode = process_cache_hit(config, redis_client, kquery, kairos_time_range) else: kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_miss' except BackendQueryFailure as e: # KairosDB is broken so we fail fast. logging.error('BackendQueryFailure: %s' % e.message) return json.dumps({'error': e.message}), 500 except redis.exceptions.RedisError as e: # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions. logging.error('RedisError: ' + e.message) kq_resp = cold(config, redis_client, kquery, kairos_time_range) cache_mode = 'cold_proxy' ret_data['queries'].append(kq_resp) if not overall_cache_mode: overall_cache_mode = cache_mode elif cache_mode != overall_cache_mode: overall_cache_mode = 'mixed' return json.dumps(ret_data), 200, {'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode}
def test_hot(m_from_cache): redis_cli = MockRedis() def _fake_build_response(_b, response_kquery, _c=True): response_kquery['sample_size'] += 100 response_kquery['results'].append({'hello': 'goodbye'}) return response_kquery mts_list = [] for i in xrange(3): mts = MTS(redis_cli) mts.build_response = _fake_build_response mts_list.append(mts) m_from_cache.return_value = mts_list kq = KQuery(redis_cli) kq.cached_data = {'mts_keys': ['kquery:mts:1', 'kquery:mts:2', 'kquery:mts:3']} kairos_time_range = {'start_relative': {'unit': 'hours', 'value': '1'}} out = cache_calls.hot(redis_cli, kq, kairos_time_range) assert out['sample_size'] == 300 assert len(out['results']) == 3
def test_from_request(): redis_cli = MockRedis() example_request = { "metrics": [{"hello": "some query"}, {"goodbye": "another_query"}], "start_relative": {"value": "1", "unit": "hours"}, } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ctr = 0 for kq in ret_vals: assert isinstance(kq, KQuery) assert kq.query == example_request["metrics"][ctr] ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_request_replace_align_sampling(): redis_cli = MockRedis() aggregator = { 'name': 'sum', 'align_sampling': True, 'sampling': { 'value': '1', 'unit': 'minutes' } } example_request = { 'metrics': [{ 'hello': 'some query', 'aggregators': [aggregator] }], 'start_relative': { 'value': '1', 'unit': 'hours' } } agg_out = { 'name': 'sum', 'align_start_time': True, 'sampling': { 'value': '1', 'unit': 'minutes' } } request_out = { 'metrics': [{ 'hello': 'some query', 'aggregators': [agg_out] }], 'start_relative': { 'value': '1', 'unit': 'hours' } } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ret_vals = list(ret_vals) assert len(ret_vals) == 1 assert isinstance(ret_vals[0], KQuery) assert ret_vals[0].query == request_out['metrics'][0] assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_cache(): redis_cli = MockRedis() keys = ["tscached:kquery:deadbeef", "tscached:kquery:deadcafe"] ret = KQuery.from_cache(keys, redis_cli) assert isinstance(ret, GeneratorType) values = list(ret) assert redis_cli.derived_pipeline.pipe_get_call_count == 2 assert redis_cli.derived_pipeline.execute_count == 1 ctr = 0 for kq in values: assert isinstance(kq, KQuery) assert kq.redis_key == keys[ctr] assert kq.query == {"hello": "goodbye"} # see testing.MockRedisPipeline assert kq.cached_data == kq.query ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0 assert redis_cli.derived_pipeline.pipe_set_call_count == 0
def test_from_cache(): redis_cli = MockRedis() keys = ['tscached:kquery:deadbeef', 'tscached:kquery:deadcafe'] ret = KQuery.from_cache(keys, redis_cli) assert isinstance(ret, GeneratorType) values = list(ret) assert redis_cli.derived_pipeline.pipe_get_call_count == 2 assert redis_cli.derived_pipeline.execute_count == 1 ctr = 0 for kq in values: assert isinstance(kq, KQuery) assert kq.redis_key == keys[ctr] assert kq.query == { 'hello': 'goodbye' } # see testing.MockRedisPipeline assert kq.cached_data == kq.query ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0 assert redis_cli.derived_pipeline.pipe_set_call_count == 0
def test_upsert(): class FakeMTS: def get_key(self): return "rick-and-morty" redis_cli = MockRedis() kq = KQuery(redis_cli) kq.query = {"hello": "some_query"} kq.add_mts(FakeMTS()) kq.upsert(datetime.datetime.fromtimestamp(1234567890), None) assert redis_cli.set_call_count == 1 assert redis_cli.get_call_count == 0 assert kq.query["mts_keys"] == ["rick-and-morty"] assert kq.query["last_add_data"] == time.time() assert kq.query["earliest_data"] == 1234567890 assert sorted(kq.query.keys()) == ["earliest_data", "hello", "last_add_data", "mts_keys"] kq.upsert(datetime.datetime.fromtimestamp(1234567890), datetime.datetime.fromtimestamp(1234569890)) assert redis_cli.set_call_count == 2 assert redis_cli.get_call_count == 0 assert kq.query["last_add_data"] == 1234569890 assert kq.query["earliest_data"] == 1234567890
def test_from_request_replace_align_sampling(): redis_cli = MockRedis() aggregator = {"name": "sum", "align_sampling": True, "sampling": {"value": "1", "unit": "minutes"}} example_request = { "metrics": [{"hello": "some query", "aggregators": [aggregator]}], "start_relative": {"value": "1", "unit": "hours"}, } agg_out = {"name": "sum", "align_start_time": True, "sampling": {"value": "1", "unit": "minutes"}} request_out = { "metrics": [{"hello": "some query", "aggregators": [agg_out]}], "start_relative": {"value": "1", "unit": "hours"}, } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ret_vals = list(ret_vals) assert len(ret_vals) == 1 assert isinstance(ret_vals[0], KQuery) assert ret_vals[0].query == request_out["metrics"][0] assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0
def test_from_request(): redis_cli = MockRedis() example_request = { 'metrics': [{ 'hello': 'some query' }, { 'goodbye': 'another_query' }], 'start_relative': { 'value': '1', 'unit': 'hours' } } ret_vals = KQuery.from_request(example_request, redis_cli) assert isinstance(ret_vals, GeneratorType) ctr = 0 for kq in ret_vals: assert isinstance(kq, KQuery) assert kq.query == example_request['metrics'][ctr] ctr += 1 assert redis_cli.set_call_count == 0 and redis_cli.get_call_count == 0