示例#1
0
def update_query_to_limit_nb_points_for_query(backend_host, backend_port, user,
                                              password, from_parts, query,
                                              parsed_query,
                                              aggregation_properties,
                                              max_nb_points_per_query):

    schema = from_parts['schema']

    # counter_aggregation_mode = get_counter_aggregation_mode(from_parts, aggregation_properties)
    # if counter_aggregation_mode is None:
    #     counter_aggregation_mode = 'mean'

    expected_nb_points_per_query = get_expected_nb_points_for_query(
        backend_host, backend_port, user, password, schema, query,
        parsed_query)
    if expected_nb_points_per_query is None:
        return None
    if expected_nb_points_per_query['nb_points'] > max_nb_points_per_query:
        logging.info('Expected nb of points per query ' +
                     str(expected_nb_points_per_query['nb_points']) +
                     ' is bigger than max allowed one (' +
                     str(max_nb_points_per_query) + ')')

        my_factor = expected_nb_points_per_query[
            'nb_points'] / max_nb_points_per_query
        split_group_by_time_interval = influx_date_manipulation.split_influx_time(
            expected_nb_points_per_query['group_by_time_interval'])
        adjusted_group_by_time_value = int(
            math.ceil(my_factor * split_group_by_time_interval['number']))
        new_group_by_time_interval = str(
            adjusted_group_by_time_value
        ) + split_group_by_time_interval['unit']

        parsed_query = influx_query_modification.change_group_by_time_interval(
            parsed_query, new_group_by_time_interval)

        # if counter_aggregation_mode == 'sum' and influx_query_parsing.is_sum_group_by_time(parsed_query):
        if influx_query_parsing.is_sum_group_by_time(parsed_query):
            parsed_query = influx_query_modification.change_sum_group_by_time_factor(
                parsed_query, '1/' + str(my_factor))

        query = influx_query_parsing.stringify_sqlparsed(parsed_query)
        logging.info('Reworked query (limit nb points per query): ' + query)

        return query

    return None
示例#2
0
def update_query_with_right_rp(from_parts,
                               query,
                               parsed_query,
                               known_retention_policies,
                               aggregation_properties,
                               override_explicit_rp=False):
    output = get_right_rp_for_query(from_parts['schema'], query, parsed_query,
                                    known_retention_policies,
                                    override_explicit_rp)

    if output is None:
        return None

    counter_aggregation_mode = get_counter_aggregation_mode(
        from_parts, aggregation_properties)
    if counter_aggregation_mode is None:
        counter_aggregation_mode = 'mean'

    is_changed = False

    if 'rp' in output:
        from_id = influx_query_parsing.extract_from_helper(
            parsed_query, 'index')
        parsed_query.tokens[
            from_id] = '"' + from_parts['schema'] + '"."' + output[
                'rp'] + '"."' + from_parts['measurement'] + '"'
        is_changed = True
    if 'group_by_time_interval' in output:
        parsed_query = influx_query_modification.change_group_by_time_interval(
            parsed_query, output['group_by_time_interval'])
        is_changed = True
    if 'sum_group_by_time_interval_factor' in output:
        if counter_aggregation_mode == 'sum':
            parsed_query = influx_query_modification.change_sum_group_by_time_factor(
                parsed_query, output['sum_group_by_time_interval_factor'])
            is_changed = True

    if is_changed:
        query = influx_query_parsing.stringify_sqlparsed(parsed_query)

    from_parts['rp'] = output['rp']

    logging.info('Reworked query (auto RP): ' + query)

    return query