def test_get_needed_absolute_time_range(m_dt): m_dt.now.return_value = datetime.datetime.fromtimestamp(1455390419) # Magic: http://www.voidspace.org.uk/python/mock/examples.html#partial-mocking m_dt.side_effect = lambda *args, **kw: datetime.datetime(*args, **kw) example = {'start_absolute': 1234567890000} s, e = get_needed_absolute_time_range(example) assert e is None assert s == datetime.datetime.fromtimestamp(1234567890) example = {'start_absolute': 1234567890000, 'end_absolute': 1234657890000} s, e = get_needed_absolute_time_range(example) assert s == datetime.datetime.fromtimestamp(1234567890) assert e == datetime.datetime.fromtimestamp(1234657890) example = { 'start_relative': { 'value': '1', 'unit': 'hours' }, 'end_relative': { 'value': '1', 'unit': 'minutes' } } s, e = get_needed_absolute_time_range(example) assert s == datetime.datetime.now() - datetime.timedelta(hours=1) assert e == datetime.datetime.now() - datetime.timedelta(minutes=1)
def test_get_needed_absolute_time_range(m_dt): m_dt.now.return_value = datetime.datetime.fromtimestamp(1455390419) # Magic: http://www.voidspace.org.uk/python/mock/examples.html#partial-mocking m_dt.side_effect = lambda *args, **kw: datetime.datetime(*args, **kw) example = {'start_absolute': 1234567890000} s, e = get_needed_absolute_time_range(example) assert e is None assert s == datetime.datetime.fromtimestamp(1234567890) example = {'start_absolute': 1234567890000, 'end_absolute': 1234657890000} s, e = get_needed_absolute_time_range(example) assert s == datetime.datetime.fromtimestamp(1234567890) assert e == datetime.datetime.fromtimestamp(1234657890) example = { 'start_relative': {'value': '1', 'unit': 'hours'}, 'end_relative': {'value': '1', 'unit': 'minutes'} } s, e = get_needed_absolute_time_range(example) assert s == datetime.datetime.now() - datetime.timedelta(hours=1) assert e == datetime.datetime.now() - datetime.timedelta(minutes=1)
def process_cache_hit(config, redis_client, kquery, kairos_time_range): """ KQuery found in cache. Decide whether to return solely cached data or to update cached data. If cached data should be updated, figure out how to do it. :param config: 'tscached' level from config file. :param redis_client: redis.StrictRedis :param kquery: kquery.KQuery object :param kairos_time_range: dict, time range straight from the HTTP request payload :return: 2-tuple: (dict: kquery resp to be added to HTTP resp, str: type of cache operation) :raise: utils.BackendQueryFailure, if a Kairos lookup failed. """ # this relies on KQuery.get_cached() having a side effect. it must be called before this function. kq_result = kquery.cached_data try: start_cache = datetime.datetime.fromtimestamp( float(kq_result['earliest_data'])) end_cache = datetime.datetime.fromtimestamp( float(kq_result['last_add_data'])) except: # some sort of cache malformation or error, doesn't matter what. start_cache = None end_cache = None start_request, end_request = get_needed_absolute_time_range( kairos_time_range) staleness_threshold = config['data']['staleness_threshold'] range_needed = get_range_needed(start_request, end_request, start_cache, end_cache, staleness_threshold, kquery.window_size) if not range_needed: # hot cache return hot(redis_client, kquery, kairos_time_range), 'hot' else: merge_method = range_needed[2] if merge_method == FETCH_ALL: # warm, but data doesn't support merging. logging.info('Odd COLD scenario: data exists.') return cold(config, redis_client, kquery, kairos_time_range), 'cold_overwrite' elif merge_method in [FETCH_BEFORE, FETCH_AFTER]: # warm, merging supported. mode = 'warm_' + merge_method return warm(config, redis_client, kquery, kairos_time_range, range_needed), mode else: raise BackendQueryFailure( "Received unsupported range_needed value: %s" % range_needed[2])
def process_cache_hit(config, redis_client, kquery, kairos_time_range): """ KQuery found in cache. Decide whether to return solely cached data or to update cached data. If cached data should be updated, figure out how to do it. :param config: 'tscached' level from config file. :param redis_client: redis.StrictRedis :param kquery: kquery.KQuery object :param kairos_time_range: dict, time range straight from the HTTP request payload :return: 2-tuple: (dict: kquery resp to be added to HTTP resp, str: type of cache operation) :raise: utils.BackendQueryFailure, if a Kairos lookup failed. """ # this relies on KQuery.get_cached() having a side effect. it must be called before this function. kq_result = kquery.cached_data try: start_cache = datetime.datetime.fromtimestamp(float(kq_result['earliest_data'])) end_cache = datetime.datetime.fromtimestamp(float(kq_result['last_add_data'])) except: # some sort of cache malformation or error, doesn't matter what. start_cache = None end_cache = None start_request, end_request = get_needed_absolute_time_range(kairos_time_range) staleness_threshold = config['data']['staleness_threshold'] range_needed = get_range_needed(start_request, end_request, start_cache, end_cache, staleness_threshold, kquery.window_size) if not range_needed: # hot cache return hot(redis_client, kquery, kairos_time_range), 'hot' else: merge_method = range_needed[2] if merge_method == FETCH_ALL: # warm, but data doesn't support merging. logging.info('Odd COLD scenario: data exists.') return cold(config, redis_client, kquery, kairos_time_range), 'cold_overwrite' elif merge_method in [FETCH_BEFORE, FETCH_AFTER]: # warm, merging supported. mode = 'warm_' + merge_method return warm(config, redis_client, kquery, kairos_time_range, range_needed), mode else: raise BackendQueryFailure("Received unsupported range_needed value: %s" % range_needed[2])