Beispiel #1
0
def test_group_aggregate_all_forward(dtstart, delta, N, step):
    """Aggregate all data and check result"""
    nseries = 10
    begin = dtstart
    end = dtstart + delta*(N + 1)
    agg_funcs = ["min", "max", "count", "sum"]
    query = att.make_group_aggregate_query("test", begin, end, 
                                           agg_funcs, 
                                           step,
                                           output=dict(format='csv'),
                                           where={"tag3": "D", "tag2": "C"})
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            tserrormsg = "Unexpected timestamp value: {0}".format(columns[1].strip())
            if timestamp.second != dtstart.second:
                raise ValueError(tserrormsg)
            if timestamp.microsecond != dtstart.microsecond:
                raise ValueError(tserrormsg)

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations == 0:
        raise ValueError("Results incomplete")
Beispiel #2
0
def group_aggregate_query(metric, begin, end, agg_funcs, step, **extra_args):
    '''Query database, return series of tuples'''
    query = att.make_group_aggregate_query(metric, begin, end, 
                                           agg_funcs, 
                                           "{0}ms".format(step), 
                                           output=dict(format='csv'),
                                           **extra_args)

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    tuple_len = len(agg_funcs)
    for line in response:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        output = {"seris": tagline, "timestamp": timestamp}
        for i in range(0, tuple_len):
            value = float(columns[2 + i].strip())
            output[agg_funcs[i]] = value
        yield output
Beispiel #3
0
def group_aggregate_query(metric, begin, end, agg_funcs, step, **extra_args):
    '''Query database, return series of tuples'''
    query = att.make_group_aggregate_query(metric,
                                           begin,
                                           end,
                                           agg_funcs,
                                           "{0}ms".format(step),
                                           output=dict(format='csv'),
                                           **extra_args)

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    tuple_len = len(agg_funcs)
    for line in response:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        output = {"seris": tagline, "timestamp": timestamp}
        for i in range(0, tuple_len):
            value = float(columns[2 + i].strip())
            output[agg_funcs[i]] = value
        yield output
Beispiel #4
0
def group_aggregate_nonexistent_time_range(dtstart, delta, N):
    begin = dtstart + delta*(N*2)
    end = dtstart + delta*(N*3)
    query = att.make_group_aggregate_query("test", begin, end, ["sum"], "10ms")
    require_empty_response(query)
Beispiel #5
0
def group_aggregate_nonexistent_metric(dtstart, delta, N):
    begin = dtstart
    end = dtstart + delta*(N + 1)
    query = att.make_group_aggregate_query("err", begin, end, ["sum"], "10ms")
    msg = "-not found"
    check_error_message(dtstart, delta, N, query, msg)
Beispiel #6
0
def test_group_aggregate_all_backward(dtstart, delta, N, nsteps):
    """Aggregate all data and check result"""
    nseries = 10
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    step = int((delta * N * 1000).total_seconds() / nsteps)
    agg_funcs = ["min", "max", "count", "sum"]
    query = att.make_group_aggregate_query("test", begin, end, 
                                           agg_funcs, 
                                           "{0}ms".format(step), 
                                           output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    registerd_values = {}
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            min_value = float(columns[2].strip())
            max_value = float(columns[3].strip())
            cnt_value = float(columns[4].strip())
            sum_value = float(columns[4].strip())
            max_index = len(expected_tags) - 1
            exp_tag = expected_tags[max_index - (iterations % len(expected_tags))]

            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)

            cnt_expected = N/nsteps/nseries
            if cnt_expected == 0:
                # expected count is less then 1 but not 0
                # there is more than 1 step per value in raw series
                cnt_expected = 1

            if cnt_value != cnt_expected:
                msg = "Invalid cnt value, expected: {0}, actual: {1}".format(cnt_expected, cnt_value)
                raise ValueError(msg)


            prev_val = registerd_values.get(tagline)
            if prev_val is not None:
                if abs(prev_val['min'] - max_value) - nseries > 10E-5:
                    msg = "Invalid value, expected: {0}, actual: {1}".format(prev_val['min'], max_value)
                    raise ValueError(msg)

            new_val = dict(max=max_value, min=min_value, cnt=cnt_value, sum=sum_value)
            registerd_values[tagline] = new_val

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations == 0:
        raise ValueError("Results incomplete")