Exemple #1
0
def get_new_group_by_time_interval_according_to_rp(
        current_group_by_time_interval, measurement, new_rp):
    # TODO: we might want to be able to (re)define it on a per-measurement basis
    current = influx_date_manipulation.influx_interval_to_timedelta(
        current_group_by_time_interval)
    new = influx_date_manipulation.influx_interval_to_timedelta(
        new_rp['interval'])
    if current < new:
        logging.debug('Selected RP (' + new_rp['name'] +
                      ') has lower precision interval (' + new_rp['interval'] +
                      ') than what query asks for (' +
                      current_group_by_time_interval + '), changing to: ' +
                      new_rp['interval'])
        return new_rp['interval']
    else:
        logging.debug('Selected RP (' + new_rp['name'] +
                      ') has higher or equal precision interval (' +
                      new_rp['interval'] + ') than what query asks for (' +
                      current_group_by_time_interval + '), no change')
Exemple #2
0
def extract_lower_time_bound(query):
    from_time = None
    match_from_interval = lower_time_bound_relative_re.match(query)
    if match_from_interval:
        from_time_interval = match_from_interval.groupdict()['from']
        from_timedelta = influx_date_manipulation.influx_interval_to_timedelta(
            from_time_interval)
        from_time = influx_date_manipulation.apply_timedelta_on_now(
            -1 * from_timedelta)
    else:
        match_from = lower_time_bound_absolute_re.match(query)
        if match_from:
            from_time = match_from.groupdict()['from']
            from_time = influx_date_manipulation.influx_timestamp_to_datetime(
                from_time)
    return from_time
Exemple #3
0
def extract_upper_time_bound(query):
    to_time = None
    match_to_interval = upper_time_bound_relative_re.match(query)
    match_to_interval_2 = upper_time_bound_is_now_re.match(query)
    if match_to_interval:
        to_time_interval = match_to_interval.groupdict()['to']
        to_timedelta = influx_date_manipulation.influx_interval_to_timedelta(
            to_time_interval)
        to_time = influx_date_manipulation.apply_timedelta_on_now(-1 *
                                                                  to_timedelta)
    elif match_to_interval_2:
        return None
    else:
        match_to = upper_time_bound_absolute_re.match(query)
        if match_to:
            to_time = match_to.groupdict()['to']
            to_time = influx_date_manipulation.influx_timestamp_to_datetime(
                to_time)
    return to_time
Exemple #4
0
    def rework_query(query, parsed_query):
        group_by_interval_influx = influx_query_parsing.extract_time_interval_group_by(
            parsed_query)
        if group_by_interval_influx is None:
            logging.error(
                'Could not extract group by time interval from query')
            return None

        group_by_interval = influx_date_manipulation.influx_interval_to_timedelta(
            group_by_interval_influx)
        group_by_interval_parts = influx_date_manipulation.split_influx_time(
            group_by_interval_influx)
        number_group_by_interval = group_by_interval_parts['number']
        unit_group_by_interval = group_by_interval_parts['unit']

        # NB: we extend lower time bound by one group by interval to compensate for it being removed
        query_time_shift = str(
            2 * number_group_by_interval) + unit_group_by_interval
        query = influx_query_modification.extend_lower_time_bound(
            query, query_time_shift)
        # logging.debug("new query: {0}".format(query))

        return [query, group_by_interval]
Exemple #5
0
    def action(self, user, password, schema, query, more=None):

        overflow_value = more['overflow_value']

        nnd_interval_list = influx_query_parsing.extract_non_negative_derivative_time_interval(
            query)
        nnd_column_list = influx_query_parsing.extract_non_negative_derivative_column_name(
            query)

        nnd_interval_ms_list = []
        for nnd_interval in nnd_interval_list:
            nnd_interval_delta = influx_date_manipulation.influx_interval_to_timedelta(
                nnd_interval)
            nnd_interval_ms = int(nnd_interval_delta.total_seconds() * 1000)
            nnd_interval_ms_list.append(nnd_interval_ms)

        nb_default_column_name = 0
        default_column_name_map = {}
        for i, nnd_column in enumerate(nnd_column_list):
            if nnd_column == 'non_negative_derivative':
                if nb_default_column_name >= 1:
                    nnd_column_list[i] = nnd_column + '_' + str(i)
                nb_default_column_name += 1
                default_column_name_map[i] = nnd_column

        alt_query = influx_query_modification.remove_non_negative_derivative(
            query, None, forced_column_name_map=default_column_name_map)
        if alt_query is None:
            return None

        group_by_interval_influx = influx_query_parsing.extract_time_interval_group_by(
            query)
        if group_by_interval_influx is None:
            logging.error(
                'Could not extract group by time interval from query')
            return None
        group_by_interval_parts = influx_date_manipulation.split_influx_time(
            group_by_interval_influx)
        number_group_by_interval = group_by_interval_parts['number']
        unit_group_by_interval = group_by_interval_parts['unit']
        query_time_shift = str(
            2 * number_group_by_interval) + unit_group_by_interval
        alt_query = influx_query_modification.extend_lower_time_bound(
            alt_query, query_time_shift)
        result_df_dict = pd_query(self.backend_host, self.backend_port, user,
                                  password, schema, alt_query)

        # remove counter wrapping
        for series_name in result_df_dict:
            df = result_df_dict[series_name]
            prev_value = None
            for index, row in df.iterrows():

                for nnd_column in nnd_column_list:
                    value = row[nnd_column]

                    if numpy.isnan(value):
                        continue

                    if prev_value is None:
                        prev_value = value
                        continue

                    diff = value - prev_value
                    if diff < 0:

                        shift = overflow_value - abs(diff)
                        while shift <= 0:
                            shift += overflow_value

                        new_value = prev_value + shift
                        df.at[index, nnd_column] = new_value
                        prev_value = new_value
                    else:
                        prev_value = value
                result_df_dict[series_name] = df

        # apply nnd
        for series_name in result_df_dict:
            df = result_df_dict[series_name]
            prev_value = None
            prev_index = None
            first_index = None
            for index, row in df.iterrows():
                for i, nnd_column in enumerate(nnd_column_list):
                    value = row[nnd_column]

                    if numpy.isnan(value):
                        continue

                    if prev_value is None:
                        prev_value = value
                        prev_index = index
                        first_index = index
                        df.at[index, nnd_column] = 0
                        continue

                    diff = value - prev_value

                    if diff < 0:
                        df.at[index, nnd_column] = 0
                    else:
                        time_diff = index.value - prev_index.value
                        new_value = diff * nnd_interval_ms_list[i] / time_diff
                        df.at[index, nnd_column] = new_value

                    prev_value = value
                    prev_index = index
            if first_index is not None:
                df = df.drop(first_index)
            result_df_dict[series_name] = df

        return result_df_dict