예제 #1
0
def line2tup(seq):
    for line in seq:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        value = float(columns[2].strip())
        yield tagline, timestamp, value
예제 #2
0
def line2tup(seq):
    for line in seq:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        value = float(columns[2].strip())
        yield tagline, timestamp, value
예제 #3
0
def test_group_aggregate_all_forward(dtstart, delta, N, step):
    """Aggregate all data and check result"""
    nseries = 10
    begin = dtstart
    end = dtstart + delta*(N + 1)
    agg_funcs = ["min", "max", "count", "sum"]
    query = att.make_group_aggregate_query("test", begin, end, 
                                           agg_funcs, 
                                           step,
                                           output=dict(format='csv'),
                                           where={"tag3": "D", "tag2": "C"})
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            tserrormsg = "Unexpected timestamp value: {0}".format(columns[1].strip())
            if timestamp.second != dtstart.second:
                raise ValueError(tserrormsg)
            if timestamp.microsecond != dtstart.microsecond:
                raise ValueError(tserrormsg)

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations == 0:
        raise ValueError("Results incomplete")
예제 #4
0
def test_aggregate_all(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    query = att.make_aggregate_query("test",
                                     begin,
                                     end,
                                     "sum",
                                     output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    M = N / 10
    expected_values = [
        5 * M**2 - 5 * M,
        5 * M**2 - 4 * M,
        5 * M**2 - 3 * M,
        5 * M**2 - 2 * M,
        5 * M**2 - M,
        5 * M**2,
        5 * M**2 + M,
        5 * M**2 + 2 * M,
        5 * M**2 + 3 * M,
        5 * M**2 + 4 * M,
        5 * M**2 + 5 * M,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tag = expected_tags[iterations % len(expected_tags)]
            exp_val = expected_values[iterations % len(expected_values)]
            if abs(value - exp_val) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(
                    exp_val, value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(
                    tagline, exp_tag)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags) * 2:
        raise ValueError("Results incomplete")
예제 #5
0
def test_group_aggregate_join_backward(dtstart, delta, N, step, agg_func):
    """Aggregate all data and check result"""
    begin = dtstart + delta * N
    end = dtstart - delta
    metrics = ["cpu.user", "cpu.syst"]
    query = att.make_group_aggregate_join_query(metrics,
                                                agg_func,
                                                begin,
                                                end,
                                                step,
                                                output={"format": "csv"},
                                                where={
                                                    "tag3": "D",
                                                    "tag2": "C"
                                                },
                                                apply=[{
                                                    "name":
                                                    "eval",
                                                    "expr":
                                                    "cpu.user - cpu.syst"
                                                }])

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    exptimestamp = begin
    for line in response:
        try:
            columns = line.split(',')
            if len(columns) != 3:
                raise ValueError("Unexpected number of columns in the output")
            sname = columns[0]
            if not sname.startswith("|".join(metrics)):
                raise ValueError("Unexpected series name {0}".format(
                    columns[0]))

            timestamp = att.parse_timestamp(columns[1].strip())
            if timestamp != exptimestamp:
                tserrormsg = "Actual timestamp value: {0}\nExpected timestamp value {1}".format(
                    columns[1].strip(), exptimestamp)
                raise ValueError(tserrormsg)
            exptimestamp -= delta

            # Check that all three values are the same
            zero = int(columns[2])
            if zero != 0:
                raise ValueError("Unexpected value {0}".format(zero))

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != N:
        raise ValueError(
            "Invalid number of result {0} expected {1} received".format(
                N, iterations))
예제 #6
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer',
                                    target=writer,
                                    args=[dtstart, delta, N])
    wproc.start()

    try:
        window = att.get_window_width()
        end = dtstart + delta * (N - 1) - 2 * window
        begin = dtstart
        timedelta = end - begin
        points_required = int(
            math.ceil(
                (timedelta.seconds * 1000000.0 + timedelta.microseconds) /
                (delta.seconds * 1000000.0 + delta.microseconds))) + 1
        query_params = {"output": {"format": "csv"}}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = begin
        exp_value = 0
        iterations = 0

        print("Test #1 - continuous queries")

        for line in response:
            try:
                columns = line.split(',')
                tagline = columns[0].strip()
                timestamp = att.parse_timestamp(columns[1].strip())
                value = float(columns[2].strip())

                exp_tags = 'test tag=Foo'

                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                                 exp_value * 1.0, value, iterations)

                exp_ts += delta
                exp_value += 1
                iterations += 1
            except:
                print("Error at line: {0}".format(line))
                raise

        print("Query completed")
        # Check that we received all values
        if iterations != points_required:
            raise ValueError(
                "Expect {0} data points, get {1} data points".format(
                    points_required, iterations))
        print("Test #1 passed")
    finally:
        wproc.join()
예제 #7
0
def test_group_aggregate_join_forward(dtstart, delta, N, step, agg_func):
    """Aggregate all data and check result"""
    begin = dtstart
    end = dtstart + delta * (N + 1)
    metrics = ["cpu.user", "cpu.system", "cpu.idle"]
    query = att.make_group_aggregate_join_query(metrics,
                                                agg_func,
                                                begin,
                                                end,
                                                step,
                                                output={"format": "csv"},
                                                where={
                                                    "tag3": "D",
                                                    "tag2": "C"
                                                })

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    exptimestamp = begin
    for line in response:
        try:
            columns = line.split(',')

            if len(columns) != 5:
                raise ValueError("Unexpected number of columns in the output")

            sname = columns[0]

            if not sname.startswith("|".join(metrics)):
                raise ValueError("Unexpected series name {0}".format(
                    columns[0]))

            timestamp = att.parse_timestamp(columns[1].strip())
            if timestamp != exptimestamp:
                tserrormsg = "Actual timestamp value: {0}\nExpected timestamp value {1}".format(
                    columns[1].strip(), exptimestamp)
                raise ValueError(tserrormsg)
            exptimestamp += delta

            # Check that all three values are the same
            user, syst, idle = tuple([float(it) for it in columns[2:]])
            if user != syst or syst != idle:
                raise ValueError("Unexpected value {0} {1} {2}".format(
                    user, syst, idle))

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != N:
        raise ValueError(
            "Invalid number of result {0} expected {1} received".format(
                N, iterations))
예제 #8
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    window = att.get_window_width()
    end = dtstart + delta * (N - 1) - window
    begin = dtstart
    timedelta = end - begin
    points_required = int(
        math.ceil((timedelta.seconds * 1000000.0 + timedelta.microseconds) /
                  (delta.seconds * 1000000.0 + delta.microseconds))) + 1
    # We need to add 1 because query will include both begin and end timestamps.

    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    print("Test #6 - filter by tag")
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != points_required:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
    print("Test #6 passed")
예제 #9
0
def line2tup(seq):
    for ix, line in enumerate(seq):
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            yield tagline, timestamp, value
        except:
            print("Error in line: {0}".format(ix))
            print(line)
            raise
예제 #10
0
def line2tup(seq):
    for ix, line in enumerate(seq):
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            yield tagline, timestamp, value
        except:
            print("Error in line: {0}".format(ix))
            print(line)
            raise
예제 #11
0
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N):
    """Filter data by tag and group by another tag"""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": {
            "tag": "tag3"
        },
        "where": {
            "tag2": ["C"],  # read only odd
        }
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    print("Test #4 - where + group-by")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= 2 * delta
            exp_value -= 2
            iterations += 2
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test #4 passed")
예제 #12
0
def test_group_by_tag_in_backward_direction(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": {
            "tag": "tag3"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    print("Test #2 - group by tag in backward direction")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test #2 passed")
예제 #13
0
def test_aggregate_all(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    query = att.make_aggregate_query("test", begin, end, "sum", output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    M = N/10
    expected_values = [
        5*M**2 - 5*M,
        5*M**2 - 4*M,
        5*M**2 - 3*M,
        5*M**2 - 2*M,
        5*M**2 - M,
        5*M**2,
        5*M**2 + M,
        5*M**2 + 2*M,
        5*M**2 + 3*M,
        5*M**2 + 4*M,
        5*M**2 + 5*M,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tag = expected_tags[iterations % len(expected_tags)]
            exp_val = expected_values[iterations % len(expected_values)]
            if abs(value - exp_val) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(exp_val, value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags)*2:
        raise ValueError("Results incomplete")
예제 #14
0
def test_aggregate_last_timestamp(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart
    end = dtstart + delta*N
    query = att.make_aggregate_query("test", begin, end, "last_timestamp", output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    expected_timestamps = [
        dtstart + (N-10)*delta,
        dtstart + (N-9)*delta,
        dtstart + (N-8)*delta,
        dtstart + (N-7)*delta,
        dtstart + (N-6)*delta,
        dtstart + (N-5)*delta,
        dtstart + (N-4)*delta,
        dtstart + (N-3)*delta,
        dtstart + (N-2)*delta,
        dtstart + (N-1)*delta,
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            if len(columns) != 2:
                msg = "Invalid reply format, 2 columns expected, actual: {0}".format(len(columns))
                raise ValueError(msg)
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            exp_ts = expected_timestamps[iterations % len(expected_timestamps)]
            exp_tag = expected_tags[iterations % len(expected_tags)]
            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)
            if timestamp != exp_ts:
                msg = "Unexpected timestamp: {0}, expected: {1}".format(timestamp, exp_ts)
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(expected_tags)*2:
        raise ValueError("Results incomplete")
예제 #15
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer', target=writer, args=[dtstart, delta, N])
    wproc.start()

    try:
        window = att.get_window_width()
        end = dtstart + delta*(N-1) - 2*window
        begin = dtstart
        timedelta = end - begin
        points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / 
                                        (delta.seconds*1000000.0 + delta.microseconds))) + 1
        query_params = {"output": { "format":  "csv" }}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = begin
        exp_value = 0
        iterations = 0

        print("Test #1 - continuous queries")

        for line in response:
            try:
                columns = line.split(',')
                tagline = columns[0].strip()
                timestamp = att.parse_timestamp(columns[1].strip())
                value = float(columns[2].strip())

                exp_tags = 'test tag=Foo'

                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

                exp_ts += delta
                exp_value += 1
                iterations += 1
            except:
                print("Error at line: {0}".format(line))
                raise

        print("Query completed")
        # Check that we received all values
        if iterations != points_required:
            raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
        print("Test #1 passed")
    finally:
        wproc.join()
예제 #16
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    window = att.get_window_width()
    end = dtstart + delta*(N-1) - window
    begin = dtstart
    timedelta = end - begin
    points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / (delta.seconds*1000000.0 + delta.microseconds))) + 1
    # We need to add 1 because query will include both begin and end timestamps.

    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    print("Test #6 - filter by tag")
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != points_required:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
    print("Test #6 passed")
예제 #17
0
def test_join_query_backward(columns, dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    timedelta = begin - end

    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.make_join_query(columns, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    expected_tags = [
        "tag2=B",
        "tag2=C",
        "tag2=D",
    ]
    print("Test #2 - read forward, order by time")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            values = [float(it.strip()) for it in columns[2:]]
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            for value in values:
                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                                 exp_value * 1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
    print("Test #2 - passed")
예제 #18
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart
    end = dtstart + delta * (N + 1)
    timedelta = end - begin

    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
예제 #19
0
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N):
    """Filter data by tag and group by another tag"""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query_params = {
        "output": { "format":  "csv" },
        "group-by": { "tag": "tag3" },
        "where": {
            "tag2": ["C"], # read only odd
        }
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N-1
    iterations = 0
    print("Test #4 - where + group-by")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= 2*delta
            exp_value -= 2
            iterations += 2
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test #4 passed")
예제 #20
0
def test_group_by_tag_in_backward_direction(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query_params = {
        "output": { "format":  "csv" },
        "group-by": {  "tag": "tag3" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N-1
    iterations = 0
    print("Test #2 - group by tag in backward direction")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test #2 passed")
예제 #21
0
def test_select_events_backward(dtstart, delta, N):
    """Read events in backward direction"""
    nseries = 10
    end = dtstart - delta
    begin = dtstart + delta * (N + 1)
    query = {
        "select-events": "!foo",
        "range": {
            "from": begin.strftime('%Y%m%dT%H%M%S.%f'),
            "to": end.strftime('%Y%m%dT%H%M%S.%f'),
        },
        "order-by": "time",
        "output": {
            "format": "csv"
        }
    }
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    for line in response:
        try:
            expts, expname = allevents[-(iterations + 1)]
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())
            event = columns[2].lstrip().rstrip('\n')

            if timestamp != expts:
                print("Unexpected timestamp in line {0}".format(line))
                raise ValueError("Wrong timestamp {0}, expected {1}".format(
                    str(timestamp), str(expts)))

            if expname != event:
                print("Unexpected value in line {0}".format(line))
                raise ValueError("Wrong value {0}, expected {1}".format(
                    event, expname))

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != len(allevents):
        raise ValueError(
            "Results incomplete, {0} received, {1} expected".format(
                iterations, len(allevents)))
예제 #22
0
def test_join_query_backward(columns, dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart + delta*(N - 1)
    end = dtstart - delta
    timedelta = begin - end

    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.make_join_query(columns, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    expected_tags = [
        "tag2=B",
        "tag2=C",
        "tag2=D",
    ]
    print("Test #2 - read forward, order by time")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            values = [float(it.strip()) for it in columns[2:]]
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            for value in values:
                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
    print("Test #2 - passed")
예제 #23
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * N
    end = dtstart - delta
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = dtstart + delta * (N - 1)
    exp_value = N - 1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test passed")
예제 #24
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart
    end = dtstart + delta*(N + 1)
    timedelta = end - begin

    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
예제 #25
0
def group_aggregate_query(metric, begin, end, agg_funcs, step, **extra_args):
    '''Query database, return series of tuples'''
    query = att.make_group_aggregate_query(metric, begin, end, 
                                           agg_funcs, 
                                           "{0}ms".format(step), 
                                           output=dict(format='csv'),
                                           **extra_args)

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    tuple_len = len(agg_funcs)
    for line in response:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        output = {"seris": tagline, "timestamp": timestamp}
        for i in range(0, tuple_len):
            value = float(columns[2 + i].strip())
            output[agg_funcs[i]] = value
        yield output
예제 #26
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*N
    end = dtstart - delta
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = dtstart + delta*(N-1)
    exp_value = N-1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test passed")
예제 #27
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = None
    exp_value = N - 1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            if exp_ts is None:
                exp_ts = timestamp

            if exp_ts and exp_ts != timestamp:
                raise ValueError("Invalid timestamp at {0}".format(iterations))

            exp_ts -= delta
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Unable to read any data")
    print("Test passed")
예제 #28
0
파일: test_kill.py 프로젝트: mindis/Akumuli
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = None
    exp_value = N-1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            if exp_ts is None:
                exp_ts = timestamp

            if exp_ts and exp_ts != timestamp:
                raise ValueError("Invalid timestamp at {0}".format(iterations))

            exp_ts -= delta
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Unable to read any data")
    print("Test passed")
예제 #29
0
def test_read_all(exp_tags, dtstart, delta, N):
    """Read all series one by one in backward direction.
    All data should be received as expected."""
    for tags in exp_tags:
        begin = dtstart + delta*(N-1)
        end = dtstart
        query_params = {
            "output": { "format":  "csv" },
            "where": tags
        }
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = None
        print("Test - read all data in backward direction")
        prev_line = ''
        iterations = 0
        for line in response:
            try:
                columns = line.split(',')
                timestamp = att.parse_timestamp(columns[1].strip())
                if exp_ts is None:
                    exp_ts = timestamp

                if exp_ts and exp_ts != timestamp:
                    raise ValueError("Invalid timestamp at {0}, expected {1}, actual {2}".format(iterations, exp_ts, timestamp))

                exp_ts -= delta
                iterations += 1
                prev_line = line
            except ValueError as err:
                print(err)
                raise

        # Check that we received all values
        if iterations == 0:
            raise ValueError("Unable to read any data")

    print("Test passed")
예제 #30
0
def test_aggregate_all_group_by(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": ["tag1"],
    }
    query = att.make_aggregate_query("test", begin, end, "sum", **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    # All values will be collapsed into one!
    expected_tags = [
        "tag1=A",
    ]
    expected_values = [0.5 * (N**2 - N)]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            if abs(value - expected_values[0]) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(
                    expected_values[0], value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(expected_tags[0]) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(
                    tagline, expected_tags[0])
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != 1:
        raise ValueError("Bad number of results")
예제 #31
0
def test_read_all(exp_tags, dtstart, delta, N):
    """Read all series one by one in backward direction.
    All data should be received as expected."""
    for tags in exp_tags:
        begin = dtstart + delta * (N - 1)
        end = dtstart
        query_params = {"output": {"format": "csv"}, "where": tags}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = None
        print("Test - read all data in backward direction")
        prev_line = ''
        iterations = 0
        for line in response:
            try:
                columns = line.split(',')
                timestamp = att.parse_timestamp(columns[1].strip())
                if exp_ts is None:
                    exp_ts = timestamp

                if exp_ts and exp_ts != timestamp:
                    raise ValueError(
                        "Invalid timestamp at {0}, expected {1}, actual {2}".
                        format(iterations, exp_ts, timestamp))

                exp_ts -= delta
                iterations += 1
                prev_line = line
            except ValueError as err:
                print(err)
                raise

        # Check that we received all values
        if iterations == 0:
            raise ValueError("Unable to read any data")

    print("Test passed")
예제 #32
0
def group_aggregate_query(metric, begin, end, agg_funcs, step, **extra_args):
    '''Query database, return series of tuples'''
    query = att.make_group_aggregate_query(metric,
                                           begin,
                                           end,
                                           agg_funcs,
                                           "{0}ms".format(step),
                                           output=dict(format='csv'),
                                           **extra_args)

    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    tuple_len = len(agg_funcs)
    for line in response:
        columns = line.split(',')
        tagline = columns[0].strip()
        timestamp = att.parse_timestamp(columns[1].strip())
        output = {"seris": tagline, "timestamp": timestamp}
        for i in range(0, tuple_len):
            value = float(columns[2 + i].strip())
            output[agg_funcs[i]] = value
        yield output
예제 #33
0
def test_aggregate_all_group_by(dtstart, delta, N):
    """Aggregate all data and check result"""
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    query_params = {
        "output": { "format":  "csv" },
        "group-by": [ "tag1" ],
    }
    query = att.make_aggregate_query("test", begin, end, "sum", **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    # All values will be collapsed into one!
    expected_tags = [
        "tag1=A",
    ]
    expected_values = [
        0.5*(N**2 - N)
    ]
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            if abs(value - expected_values[0]) > 10E-5:
                msg = "Invalid value, expected: {0}, actual: {1}".format(expected_values[0], value)
                print(msg)
                raise ValueError(msg)
            if tagline.endswith(expected_tags[0]) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, expected_tags[0])
                raise ValueError(msg)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations != 1:
        raise ValueError("Bad number of results")
예제 #34
0
def test_group_aggregate_all_backward(dtstart, delta, N, nsteps):
    """Aggregate all data and check result"""
    nseries = 10
    begin = dtstart + delta*(N-1)
    end = dtstart - delta
    step = int((delta * N * 1000).total_seconds() / nsteps)
    agg_funcs = ["min", "max", "count", "sum"]
    query = att.make_group_aggregate_query("test", begin, end, 
                                           agg_funcs, 
                                           "{0}ms".format(step), 
                                           output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    registerd_values = {}
    iterations = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            min_value = float(columns[2].strip())
            max_value = float(columns[3].strip())
            cnt_value = float(columns[4].strip())
            sum_value = float(columns[4].strip())
            max_index = len(expected_tags) - 1
            exp_tag = expected_tags[max_index - (iterations % len(expected_tags))]

            if tagline.endswith(exp_tag) == False:
                msg = "Unexpected tag value: {0}, expected: {1}".format(tagline, exp_tag)
                raise ValueError(msg)

            cnt_expected = N/nsteps/nseries
            if cnt_expected == 0:
                # expected count is less then 1 but not 0
                # there is more than 1 step per value in raw series
                cnt_expected = 1

            if cnt_value != cnt_expected:
                msg = "Invalid cnt value, expected: {0}, actual: {1}".format(cnt_expected, cnt_value)
                raise ValueError(msg)


            prev_val = registerd_values.get(tagline)
            if prev_val is not None:
                if abs(prev_val['min'] - max_value) - nseries > 10E-5:
                    msg = "Invalid value, expected: {0}, actual: {1}".format(prev_val['min'], max_value)
                    raise ValueError(msg)

            new_val = dict(max=max_value, min=min_value, cnt=cnt_value, sum=sum_value)
            registerd_values[tagline] = new_val

            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise
    if iterations == 0:
        raise ValueError("Results incomplete")
예제 #35
0
def test_paa_in_backward_direction(testname, dtstart, delta, N, fn, query):
    expected_values = [
        reversed(range(9, 100000, 10)),
        reversed(range(8, 100000, 10)),
        reversed(range(7, 100000, 10)),
        reversed(range(6, 100000, 10)),
        reversed(range(5, 100000, 10)),
        reversed(range(4, 100000, 10)),
        reversed(range(3, 100000, 10)),
        reversed(range(2, 100000, 10)),
        reversed(range(1, 100000, 10)),
        reversed(range(0, 100000, 10)),
    ]

    def sliding_window(values, winlen, func):
        top = [0] * winlen
        for ix, it in enumerate(values):
            k = ix % winlen
            top[k] = it
            if (ix + 1) % winlen == 0:
                yield func(top)

    def round_robin(sequences, maxlen):
        l = len(sequences)
        for i in xrange(0, maxlen):
            seq = sequences[i % l]
            it = seq.next()
            yield it

    begin = dtstart + delta * N
    end = dtstart
    query_params = {
        "sample": [{
            "name": query
        }],
        "output": {
            "format": "csv"
        },
        "group-by": {
            "time": "1s"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    exp_ts = begin
    iterations = 0
    print(testname)
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    sequences = [sliding_window(it, 100, fn) for it in expected_values]
    exp_values = round_robin(sequences, N)
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            exp_tags = expected_tags[iterations % len(expected_tags)]
            exp_value = exp_values.next()
            if timestamp != exp_ts:
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_ts, timestamp))
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_tags, tagline))

            if (iterations + 1) % 10 == 0:
                exp_ts -= datetime.timedelta(seconds=1)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != 990:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            990, iterations))
    print("{0} passed".format(testname[:testname.index(" - ")]))
예제 #36
0
def test_paa_in_backward_direction(dtstart, delta, N, fn, query):
    expected_values = [
        reversed(range(9, 100000, 10)),
        reversed(range(8, 100000, 10)),
        reversed(range(7, 100000, 10)),
        reversed(range(6, 100000, 10)),
        reversed(range(5, 100000, 10)),
        reversed(range(4, 100000, 10)),
        reversed(range(3, 100000, 10)),
        reversed(range(2, 100000, 10)),
        reversed(range(1, 100000, 10)),
        reversed(range(0, 100000, 10)),
    ]

    def sliding_window(values, winlen, func):
        top = [0]*winlen
        for ix, it in enumerate(values):
            k = ix % winlen
            top[k] = it
            if (ix + 1) % winlen == 0:
                yield func(top)

    def round_robin(sequences, maxlen):
        l = len(sequences)
        for i in xrange(0, maxlen):
            seq = sequences[i % l]
            it = seq.next()
            yield it

    begin = dtstart + delta*N
    end = dtstart
    query_params = {
        "sample": [{   "name": query }],
        "output":  { "format": "csv" },
        "group-by":{   "time": "1s"  },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    exp_ts = begin
    iterations = 0
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    sequences = [sliding_window(it, 100, fn) for it in expected_values]
    exp_values = round_robin(sequences, N)
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            exp_tags = expected_tags[iterations % len(expected_tags)]
            exp_value = exp_values.next()
            if timestamp != exp_ts:
                raise ValueError("Expected {0}, actual {1}".format(exp_ts, timestamp))
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(exp_tags, tagline))

            if (iterations + 1) % 10 == 0:
                exp_ts -= datetime.timedelta(seconds=1)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != 990:
        raise ValueError("Expect {0} data points, get {1} data points".format(990, iterations))
예제 #37
0
def test_join_query_backward_by_series(columns, dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart + delta * (N - 1)
    end = dtstart - delta
    timedelta = begin - end

    query_params = {"output": {"format": "csv"}, "order-by": "series"}
    query = att.make_join_query(columns, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    expected_tags = [
        "tag2=B",
        "tag2=C",
        "tag2=D",
    ]
    bsize = count_elements("col1", "tag2", "B", begin, end)
    csize = count_elements("col1", "tag2", "C", begin, end)
    dsize = count_elements("col1", "tag2", "D", begin, end)
    sizes = [
        bsize,
        csize,
        dsize,
    ]
    steps = [
        bsize,
        bsize + csize,
        bsize + csize + dsize,
    ]
    nseries = len(expected_tags)
    print("Test #4 - read forward, order by series")
    prev_tag = None
    reset_ix = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            values = [float(it.strip()) for it in columns[2:]]
            tagix = 0
            while iterations >= steps[tagix]:
                tagix += 1

            exp_tags = expected_tags[tagix]

            if prev_tag != tagline:
                exp_ts = dtstart + reset_ix * delta + delta * (sizes[tagix] -
                                                               1) * nseries
                exp_value = reset_ix + (sizes[tagix] - 1) * nseries
                prev_tag = tagline
                reset_ix += 1

            for value in values:
                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                                 exp_value * 1.0, value, iterations)

            exp_ts -= nseries * delta
            exp_value -= nseries
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
    print("Test #4 - passed")
예제 #38
0
def test_join_query_backward_by_series(columns, dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart + delta*(N - 1)
    end = dtstart - delta
    timedelta = begin - end

    query_params = {
        "output": { "format":  "csv" },
        "order-by": "series"
    }
    query = att.make_join_query(columns, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N-1
    iterations = 0
    expected_tags = [
        "tag2=B",
        "tag2=C",
        "tag2=D",
    ]
    bsize = count_elements("col1", "tag2", "B", begin, end)
    csize = count_elements("col1", "tag2", "C", begin, end)
    dsize = count_elements("col1", "tag2", "D", begin, end)
    sizes = [
            bsize,
            csize,
            dsize,
            ]
    steps = [
            bsize,
            bsize + csize,
            bsize + csize + dsize,
            ]
    nseries = len(expected_tags)
    print("Test #4 - read forward, order by series")
    prev_tag = None
    reset_ix = 0
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            values = [float(it.strip()) for it in columns[2:]]
            tagix = 0
            while iterations >= steps[tagix]:
                tagix += 1

            exp_tags = expected_tags[tagix]

            if prev_tag != tagline:
                exp_ts = dtstart + reset_ix*delta + delta*(sizes[tagix]-1)*nseries
                exp_value = reset_ix + (sizes[tagix]-1)*nseries
                prev_tag = tagline
                reset_ix += 1

            for value in values:
                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= nseries*delta
            exp_value -= nseries
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
    print("Test #4 - passed")