def test_get_range_needed(): now = datetime.datetime.now() def past(mins): return now - datetime.timedelta(minutes=mins) # Test broken cache data, and cache miss. assert get_range_needed(past(60), None, None, now) == (past(60), now, FETCH_ALL) # Test for hot cache: Cache has [-60, -5], Request wants [-30, -10]. assert get_range_needed(past(30), past(10), past(60), past(5)) is False # Test for "stale-but-not-stale-enough": with 2 min expiry: cache [-60, -1]; request [-60, 0]. assert get_range_needed(past(60), None, past(60), past(1), 120) is False # Test for too-stale (append): same test as above, but 30 second threshold. assert get_range_needed(past(60), None, past(60), past(1), 30) == (past(1), now, FETCH_AFTER) # Test for new-enough but missing old data (prepend): cache has 30m, user wants 2h. assert get_range_needed(past(120), now, past(30), now, 10) == (past(120), past(30), FETCH_BEFORE) # Test for data in middle, but missing beginning *and* end: cached [-20, -10], request [-30, 0]. assert get_range_needed(past(30), None, past(20), past(10)) == (past(30), now, FETCH_ALL)
def process_cache_hit(config, redis_client, kquery, kairos_time_range): """ KQuery found in cache. Decide whether to return solely cached data or to update cached data. If cached data should be updated, figure out how to do it. :param config: 'tscached' level from config file. :param redis_client: redis.StrictRedis :param kquery: kquery.KQuery object :param kairos_time_range: dict, time range straight from the HTTP request payload :return: 2-tuple: (dict: kquery resp to be added to HTTP resp, str: type of cache operation) :raise: utils.BackendQueryFailure, if a Kairos lookup failed. """ # this relies on KQuery.get_cached() having a side effect. it must be called before this function. kq_result = kquery.cached_data try: start_cache = datetime.datetime.fromtimestamp( float(kq_result['earliest_data'])) end_cache = datetime.datetime.fromtimestamp( float(kq_result['last_add_data'])) except: # some sort of cache malformation or error, doesn't matter what. start_cache = None end_cache = None start_request, end_request = get_needed_absolute_time_range( kairos_time_range) staleness_threshold = config['data']['staleness_threshold'] range_needed = get_range_needed(start_request, end_request, start_cache, end_cache, staleness_threshold, kquery.window_size) if not range_needed: # hot cache return hot(redis_client, kquery, kairos_time_range), 'hot' else: merge_method = range_needed[2] if merge_method == FETCH_ALL: # warm, but data doesn't support merging. logging.info('Odd COLD scenario: data exists.') return cold(config, redis_client, kquery, kairos_time_range), 'cold_overwrite' elif merge_method in [FETCH_BEFORE, FETCH_AFTER]: # warm, merging supported. mode = 'warm_' + merge_method return warm(config, redis_client, kquery, kairos_time_range, range_needed), mode else: raise BackendQueryFailure( "Received unsupported range_needed value: %s" % range_needed[2])
def process_cache_hit(config, redis_client, kquery, kairos_time_range): """ KQuery found in cache. Decide whether to return solely cached data or to update cached data. If cached data should be updated, figure out how to do it. :param config: 'tscached' level from config file. :param redis_client: redis.StrictRedis :param kquery: kquery.KQuery object :param kairos_time_range: dict, time range straight from the HTTP request payload :return: 2-tuple: (dict: kquery resp to be added to HTTP resp, str: type of cache operation) :raise: utils.BackendQueryFailure, if a Kairos lookup failed. """ # this relies on KQuery.get_cached() having a side effect. it must be called before this function. kq_result = kquery.cached_data try: start_cache = datetime.datetime.fromtimestamp(float(kq_result['earliest_data'])) end_cache = datetime.datetime.fromtimestamp(float(kq_result['last_add_data'])) except: # some sort of cache malformation or error, doesn't matter what. start_cache = None end_cache = None start_request, end_request = get_needed_absolute_time_range(kairos_time_range) staleness_threshold = config['data']['staleness_threshold'] range_needed = get_range_needed(start_request, end_request, start_cache, end_cache, staleness_threshold, kquery.window_size) if not range_needed: # hot cache return hot(redis_client, kquery, kairos_time_range), 'hot' else: merge_method = range_needed[2] if merge_method == FETCH_ALL: # warm, but data doesn't support merging. logging.info('Odd COLD scenario: data exists.') return cold(config, redis_client, kquery, kairos_time_range), 'cold_overwrite' elif merge_method in [FETCH_BEFORE, FETCH_AFTER]: # warm, merging supported. mode = 'warm_' + merge_method return warm(config, redis_client, kquery, kairos_time_range, range_needed), mode else: raise BackendQueryFailure("Received unsupported range_needed value: %s" % range_needed[2])