Exemple #1
0
def test_aggregate_all(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    query = att.make_aggregate_query("test",
                                     begin,
                                     end,
                                     "sum",
                                     output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    M = N / 10
    expected_values = [
        5 * M**2 - 5 * M,
        5 * M**2 - 4 * M,
        5 * M**2 - 3 * M,
        5 * M**2 - 2 * M,
        5 * M**2 - M,
        5 * M**2,
        5 * M**2 + M,
        5 * M**2 + 2 * M,
        5 * M**2 + 3 * M,
        5 * M**2 + 4 * M,
        5 * M**2 + 5 * M,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tag = expected_tags[iterations % len(expected_tags)]
            exp_val = expected_values[iterations % len(expected_values)]
            if abs(value - exp_val) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(
                    exp_val, value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(
                    tagline, exp_tag)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags) * 2:
        raise ValueError("Results incomplete")
Exemple #2
0
def count_elements(metric, tag, val, begin, end):
    query_params = {"output": {"format": "csv"}, "where": {tag: [val]}}
    query = att.make_aggregate_query(metric, begin, end, "count",
                                     **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = list(urlopen(queryurl, json.dumps(query)))
    for line in response:
        arr = line.split(',')
        return int(arr[-1])
    raise ValueError("Empty response")
Exemple #3
0
def count_elements(metric, tag, val, begin, end):
    query_params = {
        "output": { "format":  "csv" },
        "where": {
            tag: [val]
            }
        }
    query = att.make_aggregate_query(metric, begin, end, "count", **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = list(urlopen(queryurl, json.dumps(query)))
    for line in response:
        arr = line.split(',')
        return int(arr[-1])
    raise ValueError("Empty response")
def test_aggregate_all(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    query = att.make_aggregate_query("test", begin, end, "sum", output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    M = N/10
    expected_values = [
        5*M**2 - 5*M,
        5*M**2 - 4*M,
        5*M**2 - 3*M,
        5*M**2 - 2*M,
        5*M**2 - M,
        5*M**2,
        5*M**2 + M,
        5*M**2 + 2*M,
        5*M**2 + 3*M,
        5*M**2 + 4*M,
        5*M**2 + 5*M,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tag = expected_tags[iterations % len(expected_tags)]
            exp_val = expected_values[iterations % len(expected_values)]
            if abs(value - exp_val) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(exp_val, value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags)*2:
        raise ValueError("Results incomplete")
Exemple #5
0
def test_aggregate_last_timestamp(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart
    end = dtstart + delta*N
    query = att.make_aggregate_query("test", begin, end, "last_timestamp", output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    expected_timestamps = [
        dtstart + (N-10)*delta,
        dtstart + (N-9)*delta,
        dtstart + (N-8)*delta,
        dtstart + (N-7)*delta,
        dtstart + (N-6)*delta,
        dtstart + (N-5)*delta,
        dtstart + (N-4)*delta,
        dtstart + (N-3)*delta,
        dtstart + (N-2)*delta,
        dtstart + (N-1)*delta,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            if len(columns) != 2:
                msg = "Invalid reply format, 2 columns expected, actual: {0}".format(len(columns))
                raise ValueError(msg)
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            exp_ts = expected_timestamps[iterations % len(expected_timestamps)]
            exp_tag = expected_tags[iterations % len(expected_tags)]
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)
            if timestamp != exp_ts:
                msg = "Unexpected timestamp: {0}, expected: {1}".format(timestamp, exp_ts)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags)*2:
        raise ValueError("Results incomplete")
Exemple #6
0
def test_aggregate_all_group_by(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": ["tag1"],
    }
    query = att.make_aggregate_query("test", begin, end, "sum", **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    # All values will be collapsed into one!
    expected_tags = [
        "tag1=A",
    ]
    expected_values = [0.5 * (N**2 - N)]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            if abs(value - expected_values[0]) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(
                    expected_values[0], value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(expected_tags[0]) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(
                    tagline, expected_tags[0])
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != 1:
        raise ValueError("Bad number of results")
def test_aggregate_all_group_by(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    query_params = {
        "output": { "format":  "csv" },
        "group-by": [ "tag1" ],
    }
    query = att.make_aggregate_query("test", begin, end, "sum", **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    # All values will be collapsed into one!
    expected_tags = [
        "tag1=A",
    ]
    expected_values = [
        0.5*(N**2 - N)
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            if abs(value - expected_values[0]) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(expected_values[0], value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(expected_tags[0]) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, expected_tags[0])
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != 1:
        raise ValueError("Bad number of results")
Exemple #8
0
def aggregate_nonexistent_time_range(dtstart, delta, N):
    begin = dtstart + delta*(N*2)
    end = dtstart + delta*(N*3)
    query = att.make_aggregate_query("test", begin, end, "sum")
    require_empty_response(query)
Exemple #9
0
def aggregate_nonexistent_metric(dtstart, delta, N):
    begin = dtstart
    end = dtstart + delta*(N + 1)
    query = att.make_aggregate_query("err", begin, end, "sum")
    msg = "-not found"
    check_error_message(dtstart, delta, N, query, msg)