示例#1
0
文件: shadow.py 项目: zlim/tscached
def perform_readahead(config, redis_client):
    """ The heart of the readahead script.
        :param config: dict, tscached level of config.
        :param redis_client: redis.StrictRedis
        :return: void
    """
    lock = become_leader(config, redis_client)
    if not lock:
        logging.info('Could not become leader; exiting.')
        return

    try:
        redis_keys = list(redis_client.smembers(SHADOW_LIST))
        logging.info('Found %d KQuery keys in the shadow list' % len(redis_keys))

        for kq in kquery.KQuery.from_cache(redis_keys, redis_client):
            last_ts = kq.cached_data['last_add_data']  # unix timestamp, seconds
            mins_in_past = (last_ts / 60) - 5  # add 5m of margin

            # all that really matters is that end_ values are unset.
            kairos_time_range = {'start_relative': {'unit': 'minutes', 'value': str(mins_in_past)}}
            # throw away the diagnostic mode info for the moment.
            kq_resp, _ = cache_calls.process_cache_hit(config, redis_client, kq, kairos_time_range)
            size = kq_resp.get('sample_size', -1)
            logging.debug('Processed KQuery %s; sample size now at %d' % (kq.redis_key, size))
    except BackendQueryFailure as e:
        logging.error('BackendQueryFailure: %s' % e.message)
    except redis.exceptions.RedisError as e:
        logging.error('RedisError: ' + e.message)

    release_leader(lock, redis_client)
示例#2
0
def handle_query():
    try:
        if request.method == 'POST':
            payload = json.loads(request.data)  # dict
        else:
            payload = json.loads(request.args.get('query'))
    except:
        err = 'Cannot deserialize JSON payload.'
        logging.error(err)
        return json.dumps({'error': err}), 500

    config = app.config['tscached']

    logging.info('Query')
    redis_client = getRedisClient()
    kairos_time_range = populate_time_range(payload)
    ret_data = {'queries': []}
    overall_cache_mode = None
    # HTTP request may contain one or more kqueries
    for kquery in KQuery.from_request(payload, redis_client):
        try:
            # get whatever is in redis for this kquery
            kq_result = kquery.get_cached()

            # readahead shadow load support
            process_for_readahead(config, redis_client, kquery.get_key(),
                                  request.referrer, request.headers)
            if kq_result:
                kq_resp, cache_mode = process_cache_hit(
                    config, redis_client, kquery, kairos_time_range)
            else:
                kq_resp = cold(config, redis_client, kquery, kairos_time_range)
                cache_mode = 'cold_miss'
        except BackendQueryFailure as e:
            # KairosDB is broken so we fail fast.
            logging.error('BackendQueryFailure: %s' % e.message)
            return json.dumps({'error': e.message}), 500
        except redis.exceptions.RedisError as e:
            # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions.
            logging.error('RedisError: ' + e.message)
            kq_resp = cold(config, redis_client, kquery, kairos_time_range)
            cache_mode = 'cold_proxy'
        ret_data['queries'].append(kq_resp)

        if not overall_cache_mode:
            overall_cache_mode = cache_mode
        elif cache_mode != overall_cache_mode:
            overall_cache_mode = 'mixed'
    return json.dumps(ret_data), 200, {
        'Content-Type': 'application/json',
        'X-tscached-mode': overall_cache_mode
    }
示例#3
0
def handle_query():
    try:
        if request.method == 'POST':
            payload = json.loads(request.data)  # dict
        else:
            payload = json.loads(request.args.get('query'))
    except:
        err = 'Cannot deserialize JSON payload.'
        logging.error(err)
        return json.dumps({'error': err}), 500

    config = app.config['tscached']

    logging.info('Query')
    redis_client = redis.StrictRedis(host=config['redis']['host'], port=config['redis']['port'])
    kairos_time_range = populate_time_range(payload)
    ret_data = {'queries': []}
    overall_cache_mode = None

    # HTTP request may contain one or more kqueries
    for kquery in KQuery.from_request(payload, redis_client):
        try:
            # get whatever is in redis for this kquery
            kq_result = kquery.get_cached()

            # readahead shadow load support
            process_for_readahead(config, redis_client, kquery.get_key(), request.referrer,
                                  request.headers)
            if kq_result:
                kq_resp, cache_mode = process_cache_hit(config, redis_client, kquery, kairos_time_range)
            else:
                kq_resp = cold(config, redis_client, kquery, kairos_time_range)
                cache_mode = 'cold_miss'
        except BackendQueryFailure as e:
            # KairosDB is broken so we fail fast.
            logging.error('BackendQueryFailure: %s' % e.message)
            return json.dumps({'error': e.message}), 500
        except redis.exceptions.RedisError as e:
            # Redis is broken, so we pretend it's a cache miss. This will eat any further exceptions.
            logging.error('RedisError: ' + e.message)
            kq_resp = cold(config, redis_client, kquery, kairos_time_range)
            cache_mode = 'cold_proxy'
        ret_data['queries'].append(kq_resp)

        if not overall_cache_mode:
            overall_cache_mode = cache_mode
        elif cache_mode != overall_cache_mode:
            overall_cache_mode = 'mixed'

    return json.dumps(ret_data), 200, {'Content-Type': 'application/json', 'X-tscached-mode': overall_cache_mode}