def test_hot(m_from_cache): redis_cli = MockRedis() def _fake_build_response(_b, response_kquery, _c=True): response_kquery['sample_size'] += 100 response_kquery['results'].append({'hello': 'goodbye'}) return response_kquery mts_list = [] for i in xrange(3): mts = MTS(redis_cli) mts.build_response = _fake_build_response mts_list.append(mts) m_from_cache.return_value = mts_list kq = KQuery(redis_cli) kq.cached_data = { 'mts_keys': ['kquery:mts:1', 'kquery:mts:2', 'kquery:mts:3'] } kairos_time_range = {'start_relative': {'unit': 'hours', 'value': '1'}} out = cache_calls.hot(redis_cli, kq, kairos_time_range) assert out['sample_size'] == 300 assert len(out['results']) == 3
def test_perform_readahead_happy_path(m_process, m_from_cache, m_release_leader, m_become_leader): redis_cli = MockRedis() def _smem(_): return set(['tscached:kquery:superspecial']) redis_cli.smembers = _smem m_become_leader.return_value = True kqueries = [] for ndx in xrange(10): kq = KQuery(redis_cli) kq.cached_data = {'last_add_data': int(datetime.datetime.now().strftime('%s')) - 1800, 'redis_key': 'tscached:kquery:' + str(ndx)} kqueries.append(kq) m_from_cache.return_value = kqueries m_process.return_value = {'sample_size': 666}, 'warm_append' assert perform_readahead({}, redis_cli) is None assert m_become_leader.call_count == 1 assert m_release_leader.call_count == 1 assert m_from_cache.call_count == 1 assert m_from_cache.call_args_list[0][0] == (['tscached:kquery:superspecial'], redis_cli) assert m_process.call_count == 10 k_t_r = {'start_relative': {'unit': 'minutes', 'value': '24194605'}} for ndx in xrange(10): assert m_process.call_args_list[ndx][0] == ({}, redis_cli, kqueries[ndx], k_t_r)
def test_perform_readahead_backend_error(m_process, m_from_cache, m_release_leader, m_become_leader): redis_cli = MockRedis() def _smem(_): return set(['tscached:kquery:superspecial']) redis_cli.smembers = _smem m_become_leader.return_value = True kqueries = [] for ndx in xrange(10): kq = KQuery(redis_cli) kq.cached_data = {'last_add_data': int(datetime.datetime.now().strftime('%s')) - 1800, 'redis_key': 'tscached:kquery:' + str(ndx)} kqueries.append(kq) m_from_cache.return_value = kqueries m_process.side_effect = BackendQueryFailure('OOPS!') assert perform_readahead({}, redis_cli) is None assert m_become_leader.call_count == 1 assert m_release_leader.call_count == 1 assert m_from_cache.call_count == 1 assert m_process.call_count == 1
def test_hot(m_from_cache): redis_cli = MockRedis() def _fake_build_response(_b, response_kquery, _c=True): response_kquery['sample_size'] += 100 response_kquery['results'].append({'hello': 'goodbye'}) return response_kquery mts_list = [] for i in xrange(3): mts = MTS(redis_cli) mts.build_response = _fake_build_response mts_list.append(mts) m_from_cache.return_value = mts_list kq = KQuery(redis_cli) kq.cached_data = {'mts_keys': ['kquery:mts:1', 'kquery:mts:2', 'kquery:mts:3']} kairos_time_range = {'start_relative': {'unit': 'hours', 'value': '1'}} out = cache_calls.hot(redis_cli, kq, kairos_time_range) assert out['sample_size'] == 300 assert len(out['results']) == 3