Esempio n. 1
0
def require_empty(metric_name, dtstart, delta, N):
    """Try to read all series one by one in backward direction.
    The result should be empty."""

    begin = dtstart - delta * N
    end = dtstart + delta * N
    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.makequery(metric_name, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    print("Test - check that response is empty")
    errormsg = False
    for line in response:
        # There are two variants:
        # - the data is fully recovered (which means that the wal is enabled)
        # - the query returns '-no data' response (which means that the metadata wasn't saved)
        # - the query returns epty response (which means that the metadata was saved)
        if not errormsg and line.startswith('-'):
            errormsg = True
        else:
            raise ValueError("Unexpected value")

    print("Test passed")
Esempio n. 2
0
def require_empty(metric_name, dtstart, delta, N):
    """Try to read all series one by one in backward direction.
    The result should be empty."""

    begin = dtstart - delta*N
    end = dtstart + delta*N
    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.makequery(metric_name, begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    print("Test - check that response is empty")
    errormsg = False
    for line in response:
        # There are two variants:
        # - the data is fully recovered (which means that the wal is enabled)
        # - the query returns '-no data' response (which means that the metadata wasn't saved)
        # - the query returns epty response (which means that the metadata was saved)
        if not errormsg and line.startswith('-'):
            errormsg = True
        else:
            raise ValueError("Unexpected value")

    print("Test passed")
Esempio n. 3
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer',
                                    target=writer,
                                    args=[dtstart, delta, N])
    wproc.start()

    def cmp_tuples(lhs, rhs):
        # ignore tags
        timedelta = lhs[1] - rhs[1]
        if timedelta != delta:
            raise ValueError(
                "Invalid timestamps, current {0}, previous {1}".format(
                    lhs[1], rhs[1]))
        valdelta = lhs[2] - rhs[2]
        if valdelta - 1.0 > 0.000001:
            raise ValueError("Invalid value, current {0}, previous {1}".format(
                lhs[2], rhs[2]))

    try:
        print("Test #1")
        end = dtstart + delta * N
        begin = dtstart
        timedelta = end - begin
        query_params = {"output": {"format": "csv"}}
        http_err_cnt = 0
        while True:
            try:
                query = att.makequery("test", begin, end, **query_params)
                print("Query: {0}".format(query))
                queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
                response = urlopen(queryurl, json.dumps(query))

                def count_lines(seq):
                    global processed
                    for msg in seq:
                        yield msg
                        processed += 1

                tuples = line2tup(count_lines(response))
                first, last = require_continuous(tuples, cmp_tuples)
                print("First: {0}".format(
                    first and first[1].strftime("%Y%m%dT%H%M%S.%f") or "None"))
                print("Last : {0}".format(
                    last and last[1].strftime("%Y%m%dT%H%M%S.%f") or "None"))
                if last is not None:
                    begin = last[1]
                if first[1] == (end - delta):
                    break
            except HTTPError as err:
                print("HTTP error: {0}".format(err))
                http_err_cnt += 1
                if http_err_cnt == 10:
                    raise

        print("Test passed")
    finally:
        print("{0} messages processed".format(processed))
        wproc.join()
Esempio n. 4
0
def test_sax_in_backward_direction(dtstart, delta, N):
    begin = dtstart + delta * N
    end = dtstart
    query_params = {
        "sample": [{
            "name": "sax",
            "alphabet_size": "5",
            "window_width": "10"
        }],
        "output": {
            "format": "csv"
        },
        "group-by": {
            "time": "1ms"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    print(query)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    print("Test")
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    exp_value = "aabbccddee"
    for line in response:
        print(line)
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            value = columns[2].strip()

            exp_tags = expected_tags[iterations % len(expected_tags)]
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_tags, tagline))

            if (iterations + 1) % 50 == 0:
                exp_ts -= datetime.timedelta(seconds=5)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N / 10:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N / 10, iterations))
    print("Test passed")
Esempio n. 5
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer',
                                    target=writer,
                                    args=[dtstart, delta, N])
    wproc.start()

    try:
        window = att.get_window_width()
        end = dtstart + delta * (N - 1) - 2 * window
        begin = dtstart
        timedelta = end - begin
        points_required = int(
            math.ceil(
                (timedelta.seconds * 1000000.0 + timedelta.microseconds) /
                (delta.seconds * 1000000.0 + delta.microseconds))) + 1
        query_params = {"output": {"format": "csv"}}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = begin
        exp_value = 0
        iterations = 0

        print("Test #1 - continuous queries")

        for line in response:
            try:
                columns = line.split(',')
                tagline = columns[0].strip()
                timestamp = att.parse_timestamp(columns[1].strip())
                value = float(columns[2].strip())

                exp_tags = 'test tag=Foo'

                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                                 exp_value * 1.0, value, iterations)

                exp_ts += delta
                exp_value += 1
                iterations += 1
            except:
                print("Error at line: {0}".format(line))
                raise

        print("Query completed")
        # Check that we received all values
        if iterations != points_required:
            raise ValueError(
                "Expect {0} data points, get {1} data points".format(
                    points_required, iterations))
        print("Test #1 passed")
    finally:
        wproc.join()
Esempio n. 6
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    window = att.get_window_width()
    end = dtstart + delta * (N - 1) - window
    begin = dtstart
    timedelta = end - begin
    points_required = int(
        math.ceil((timedelta.seconds * 1000000.0 + timedelta.microseconds) /
                  (delta.seconds * 1000000.0 + delta.microseconds))) + 1
    # We need to add 1 because query will include both begin and end timestamps.

    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    print("Test #6 - filter by tag")
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != points_required:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
    print("Test #6 passed")
Esempio n. 7
0
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N):
    """Filter data by tag and group by another tag"""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": {
            "tag": "tag3"
        },
        "where": {
            "tag2": ["C"],  # read only odd
        }
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    print("Test #4 - where + group-by")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= 2 * delta
            exp_value -= 2
            iterations += 2
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test #4 passed")
Esempio n. 8
0
def test_group_by_tag_in_backward_direction(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query_params = {
        "output": {
            "format": "csv"
        },
        "group-by": {
            "tag": "tag3"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N - 1
    iterations = 0
    print("Test #2 - group by tag in backward direction")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test #2 passed")
Esempio n. 9
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer', target=writer, args=[dtstart, delta, N])
    wproc.start()

    def cmp_tuples(lhs, rhs):
        # ignore tags
        timedelta = lhs[1] - rhs[1]
        if timedelta != delta:
            raise ValueError("Invalid timestamps, current {0}, previous {1}".format(lhs[1], rhs[1]))
        valdelta = lhs[2] - rhs[2]
        if valdelta - 1.0 > 0.000001:
            raise ValueError("Invalid value, current {0}, previous {1}".format(lhs[2], rhs[2]))

    try:
        print("Test #1")
        end = dtstart + delta*N
        begin = dtstart
        timedelta = end - begin
        query_params = {"output": { "format":  "csv" }}
        http_err_cnt = 0
        while True:
            try:
                query = att.makequery("test", begin, end, **query_params)
                print("Query: {0}".format(query))
                queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
                response = urlopen(queryurl, json.dumps(query))
                def count_lines(seq):
                    global processed
                    for msg in seq:
                        yield msg
                        processed += 1
                tuples = line2tup(count_lines(response))
                first, last = require_continuous(tuples, cmp_tuples)
                print("First: {0}".format(first and first[1].strftime("%Y%m%dT%H%M%S.%f") or "None"))
                print("Last : {0}".format( last and  last[1].strftime("%Y%m%dT%H%M%S.%f") or "None"))
                if last is not None:
                    begin = last[1]
                if first[1] == (end - delta):
                    break
            except HTTPError as err:
                print("HTTP error: {0}".format(err))
                http_err_cnt += 1
                if http_err_cnt == 10:
                    raise

        print("Test passed")
    finally:
        print("{0} messages processed".format(processed))
        wproc.join()
Esempio n. 10
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    window = att.get_window_width()
    end = dtstart + delta*(N-1) - window
    begin = dtstart
    timedelta = end - begin
    points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / (delta.seconds*1000000.0 + delta.microseconds))) + 1
    # We need to add 1 because query will include both begin and end timestamps.

    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    print("Test #6 - filter by tag")
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != points_required:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
    print("Test #6 passed")
Esempio n. 11
0
def reader(dtstart, delta, N):
    # Start writer process
    wproc = multiprocessing.Process(name='Writer', target=writer, args=[dtstart, delta, N])
    wproc.start()

    try:
        window = att.get_window_width()
        end = dtstart + delta*(N-1) - 2*window
        begin = dtstart
        timedelta = end - begin
        points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / 
                                        (delta.seconds*1000000.0 + delta.microseconds))) + 1
        query_params = {"output": { "format":  "csv" }}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = begin
        exp_value = 0
        iterations = 0

        print("Test #1 - continuous queries")

        for line in response:
            try:
                columns = line.split(',')
                tagline = columns[0].strip()
                timestamp = att.parse_timestamp(columns[1].strip())
                value = float(columns[2].strip())

                exp_tags = 'test tag=Foo'

                att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

                exp_ts += delta
                exp_value += 1
                iterations += 1
            except:
                print("Error at line: {0}".format(line))
                raise

        print("Query completed")
        # Check that we received all values
        if iterations != points_required:
            raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
        print("Test #1 passed")
    finally:
        wproc.join()
Esempio n. 12
0
def test_sax_in_backward_direction(dtstart, delta, N):
    begin = dtstart + delta*N
    end = dtstart
    query_params = {
            "sample": [{          "name": "sax", 
                         "alphabet_size": "5", 
                          "window_width": "10" }],
            "output":  {        "format": "csv" },
            "group-by":{          "time": "1ms" },
    }
    query = att.makequery("test", begin, end, **query_params)
    print(query)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    iterations = 0
    print("Test")
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    exp_value = "aabbccddee"
    for line in response:
        print(line)
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            value = columns[2].strip()

            exp_tags = expected_tags[iterations % len(expected_tags)]
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(exp_tags, tagline))

            if (iterations + 1) % 50 == 0:
                exp_ts -= datetime.timedelta(seconds=5)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N/10:
        raise ValueError("Expect {0} data points, get {1} data points".format(N/10, iterations))
    print("Test passed")
Esempio n. 13
0
def require_empty_response(metric_name):
    """Read all data in backward direction.
    No data should be returned, otherwise the error will be generated.
    """
    begin = datetime.datetime(year=2100, month=1, day=1)
    end = datetime.datetime(year=1970, month=1, day=1)
    query = att.makequery(metric_name, begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(host, httpport)
    response = urllib.urlopen(queryurl, json.dumps(query))

    iterations = 0
    print("Test #2 - read evicted")
    for line in response:
        raise ValueError("Unexpected value returned: " + line)

    print("Test passed")
Esempio n. 14
0
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N):
    """Filter data by tag and group by another tag"""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query_params = {
        "output": { "format":  "csv" },
        "group-by": { "tag": "tag3" },
        "where": {
            "tag2": ["C"], # read only odd
        }
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N-1
    iterations = 0
    print("Test #4 - where + group-by")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= 2*delta
            exp_value -= 2
            iterations += 2
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test #4 passed")
Esempio n. 15
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart
    end = dtstart + delta * (N + 1)
    timedelta = end - begin

    query_params = {
        "output": {
            "format": "csv"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            points_required, iterations))
Esempio n. 16
0
def test_group_by_tag_in_backward_direction(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query_params = {
        "output": { "format":  "csv" },
        "group-by": {  "tag": "tag3" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = N-1
    iterations = 0
    print("Test #2 - group by tag in backward direction")
    expected_tags = [
        "test tag3=D",
        "test tag3=E",
        "test tag3=F",
        "test tag3=G",
        "test tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test #2 passed")
Esempio n. 17
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * N
    end = dtstart - delta
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = dtstart + delta * (N - 1)
    exp_value = N - 1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp,
                             exp_value * 1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            N, iterations))
    print("Test passed")
Esempio n. 18
0
def test_read_in_forward_direction(dtstart, delta, N):
    """Read data in forward direction"""
    begin = dtstart
    end = dtstart + delta*(N + 1)
    timedelta = end - begin

    query_params = {
        "output": { "format":  "csv" },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    exp_ts = begin
    exp_value = 0
    iterations = 0
    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(iterations) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts += delta
            exp_value += 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
Esempio n. 19
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta * (N - 1)
    end = dtstart
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = None
    exp_value = N - 1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            if exp_ts is None:
                exp_ts = timestamp

            if exp_ts and exp_ts != timestamp:
                raise ValueError("Invalid timestamp at {0}".format(iterations))

            exp_ts -= delta
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Unable to read any data")
    print("Test passed")
Esempio n. 20
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*N
    end = dtstart - delta
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = dtstart + delta*(N-1)
    exp_value = N-1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())
            exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)]

            att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations)

            exp_ts -= delta
            exp_value -= 1
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != N:
        raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations))
    print("Test passed")
Esempio n. 21
0
def test_read_all(dtstart, delta, N):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = dtstart + delta*(N-1)
    end = dtstart
    query = att.makequery("test", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))

    expected_tags = [
        "tag3=D",
        "tag3=E",
        "tag3=F",
        "tag3=G",
        "tag3=H",
    ]
    exp_ts = None
    exp_value = N-1
    iterations = 0
    print("Test - read all data in backward direction")
    for line in response:
        try:
            columns = line.split(',')
            timestamp = att.parse_timestamp(columns[1].strip())

            if exp_ts is None:
                exp_ts = timestamp

            if exp_ts and exp_ts != timestamp:
                raise ValueError("Invalid timestamp at {0}".format(iterations))

            exp_ts -= delta
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Unable to read any data")
    print("Test passed")
Esempio n. 22
0
def test_read_all(exp_tags, dtstart, delta, N):
    """Read all series one by one in backward direction.
    All data should be received as expected."""
    for tags in exp_tags:
        begin = dtstart + delta*(N-1)
        end = dtstart
        query_params = {
            "output": { "format":  "csv" },
            "where": tags
        }
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = None
        print("Test - read all data in backward direction")
        prev_line = ''
        iterations = 0
        for line in response:
            try:
                columns = line.split(',')
                timestamp = att.parse_timestamp(columns[1].strip())
                if exp_ts is None:
                    exp_ts = timestamp

                if exp_ts and exp_ts != timestamp:
                    raise ValueError("Invalid timestamp at {0}, expected {1}, actual {2}".format(iterations, exp_ts, timestamp))

                exp_ts -= delta
                iterations += 1
                prev_line = line
            except ValueError as err:
                print(err)
                raise

        # Check that we received all values
        if iterations == 0:
            raise ValueError("Unable to read any data")

    print("Test passed")
Esempio n. 23
0
def test_read_all(exp_tags, dtstart, delta, N):
    """Read all series one by one in backward direction.
    All data should be received as expected."""
    for tags in exp_tags:
        begin = dtstart + delta * (N - 1)
        end = dtstart
        query_params = {"output": {"format": "csv"}, "where": tags}
        query = att.makequery("test", begin, end, **query_params)
        queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
        response = urlopen(queryurl, json.dumps(query))

        exp_ts = None
        print("Test - read all data in backward direction")
        prev_line = ''
        iterations = 0
        for line in response:
            try:
                columns = line.split(',')
                timestamp = att.parse_timestamp(columns[1].strip())
                if exp_ts is None:
                    exp_ts = timestamp

                if exp_ts and exp_ts != timestamp:
                    raise ValueError(
                        "Invalid timestamp at {0}, expected {1}, actual {2}".
                        format(iterations, exp_ts, timestamp))

                exp_ts -= delta
                iterations += 1
                prev_line = line
            except ValueError as err:
                print(err)
                raise

        # Check that we received all values
        if iterations == 0:
            raise ValueError("Unable to read any data")

    print("Test passed")
Esempio n. 24
0
def read_in_backward_direction(batch_size):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = datetime.datetime(year=2100, month=1, day=1)
    end = datetime.datetime(year=1970, month=1, day=1)
    query = att.makequery("temp", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}".format(host, httpport)
    response = urllib.urlopen(queryurl, json.dumps(query))

    iterations = 0
    print("Test #1 - read all data in backward direction")
    pivot = None
    exp_value = None
    val_count = 0
    num_off = 0
    failcnt = 0
    for line in response:
        try:
            columns = line.split(',')
            #tagline = columns[0].strip()
            #timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            if exp_value is None and pivot is None:
                pivot = int(float(value))

            if pivot is not None and exp_value is None:
                if pivot != int(float(value)):
                    print("Off-elements count: %d" % num_off)
                    exp_value = int(float(value))
                else:
                    num_off += 1

            if exp_value:
                val_count += 1
                if float(exp_value) != value:
                    failcnt += 1
                    print("Unexpected value at {0}, actual {1}, expected {2}".format(iterations, value, exp_value))
                    exp_value = None
                    pivot = None
                    val_count = 0
                    num_off = 0

            if exp_value:
                if val_count % batch_size == 0:
                    exp_value -= 1

            if iterations % batch_size == 0:
                if iterations % (batch_size*1000) == 0:
                    print("Read {0}".format(iterations))

            iterations += 1
        except:
            print("Error at line {0}: `{1}`".format(iterations, line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Expect {0} data points, get {1} data points".format('--', iterations))

    if failcnt != 0:
        raise ValueError("Some data was lost")

    print("Test passed")
Esempio n. 25
0
def test_paa_in_backward_direction(dtstart, delta, N, fn, query):
    expected_values = [
        reversed(range(9, 100000, 10)),
        reversed(range(8, 100000, 10)),
        reversed(range(7, 100000, 10)),
        reversed(range(6, 100000, 10)),
        reversed(range(5, 100000, 10)),
        reversed(range(4, 100000, 10)),
        reversed(range(3, 100000, 10)),
        reversed(range(2, 100000, 10)),
        reversed(range(1, 100000, 10)),
        reversed(range(0, 100000, 10)),
    ]

    def sliding_window(values, winlen, func):
        top = [0]*winlen
        for ix, it in enumerate(values):
            k = ix % winlen
            top[k] = it
            if (ix + 1) % winlen == 0:
                yield func(top)

    def round_robin(sequences, maxlen):
        l = len(sequences)
        for i in xrange(0, maxlen):
            seq = sequences[i % l]
            it = seq.next()
            yield it

    begin = dtstart + delta*N
    end = dtstart
    query_params = {
        "sample": [{   "name": query }],
        "output":  { "format": "csv" },
        "group-by":{   "time": "1s"  },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    exp_ts = begin
    iterations = 0
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    sequences = [sliding_window(it, 100, fn) for it in expected_values]
    exp_values = round_robin(sequences, N)
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            exp_tags = expected_tags[iterations % len(expected_tags)]
            exp_value = exp_values.next()
            if timestamp != exp_ts:
                raise ValueError("Expected {0}, actual {1}".format(exp_ts, timestamp))
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(exp_tags, tagline))

            if (iterations + 1) % 10 == 0:
                exp_ts -= datetime.timedelta(seconds=1)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != 990:
        raise ValueError("Expect {0} data points, get {1} data points".format(990, iterations))
Esempio n. 26
0
def read_in_backward_direction(batch_size):
    """Read all data in backward direction.
    All data should be received as expected."""
    begin = datetime.datetime(year=2100, month=1, day=1)
    end = datetime.datetime(year=1970, month=1, day=1)
    query = att.makequery("temp", begin, end, output=dict(format='csv'))
    queryurl = "http://{0}:{1}/api/query".format(host, httpport)
    response = urllib.urlopen(queryurl, json.dumps(query))

    iterations = 0
    print("Test #1 - read all data in backward direction")
    pivot = None
    exp_value = None
    val_count = 0
    num_off = 0
    failcnt = 0
    for line in response:
        try:
            columns = line.split(',')
            #tagline = columns[0].strip()
            #timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            if exp_value is None and pivot is None:
                pivot = int(float(value))

            if pivot is not None and exp_value is None:
                if pivot != int(float(value)):
                    print("Off-elements count: %d" % num_off)
                    exp_value = int(float(value))
                else:
                    num_off += 1

            if exp_value:
                val_count += 1
                if float(exp_value) != value:
                    failcnt += 1
                    print("Unexpected value at {0}, actual {1}, expected {2}".
                          format(iterations, value, exp_value))
                    exp_value = None
                    pivot = None
                    val_count = 0
                    num_off = 0

            if exp_value:
                if val_count % batch_size == 0:
                    exp_value -= 1

            if iterations % batch_size == 0:
                if iterations % (batch_size * 1000) == 0:
                    print("Read {0}".format(iterations))

            iterations += 1
        except:
            print("Error at line {0}: `{1}`".format(iterations, line))
            raise

    # Check that we received all values
    if iterations == 0:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            '--', iterations))

    if failcnt != 0:
        raise ValueError("Some data was lost")

    print("Test passed")
Esempio n. 27
0
def test_paa_in_backward_direction(testname, dtstart, delta, N, fn, query):
    expected_values = [
        reversed(range(9, 100000, 10)),
        reversed(range(8, 100000, 10)),
        reversed(range(7, 100000, 10)),
        reversed(range(6, 100000, 10)),
        reversed(range(5, 100000, 10)),
        reversed(range(4, 100000, 10)),
        reversed(range(3, 100000, 10)),
        reversed(range(2, 100000, 10)),
        reversed(range(1, 100000, 10)),
        reversed(range(0, 100000, 10)),
    ]

    def sliding_window(values, winlen, func):
        top = [0] * winlen
        for ix, it in enumerate(values):
            k = ix % winlen
            top[k] = it
            if (ix + 1) % winlen == 0:
                yield func(top)

    def round_robin(sequences, maxlen):
        l = len(sequences)
        for i in xrange(0, maxlen):
            seq = sequences[i % l]
            it = seq.next()
            yield it

    begin = dtstart + delta * N
    end = dtstart
    query_params = {
        "sample": [{
            "name": query
        }],
        "output": {
            "format": "csv"
        },
        "group-by": {
            "time": "1s"
        },
    }
    query = att.makequery("test", begin, end, **query_params)
    queryurl = "http://{0}:{1}".format(HOST, HTTPPORT)
    response = urlopen(queryurl, json.dumps(query))
    exp_ts = begin
    iterations = 0
    print(testname)
    expected_tags = [
        "tag3=H",
        "tag3=G",
        "tag3=F",
        "tag3=E",
        "tag3=D",
    ]
    sequences = [sliding_window(it, 100, fn) for it in expected_values]
    exp_values = round_robin(sequences, N)
    for line in response:
        try:
            columns = line.split(',')
            tagline = columns[0].strip()
            timestamp = att.parse_timestamp(columns[1].strip())
            value = float(columns[2].strip())

            exp_tags = expected_tags[iterations % len(expected_tags)]
            exp_value = exp_values.next()
            if timestamp != exp_ts:
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_ts, timestamp))
            if value != exp_value:
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_value, value))
            if not tagline.endswith(exp_tags):
                raise ValueError("Expected {0}, actual {1}".format(
                    exp_tags, tagline))

            if (iterations + 1) % 10 == 0:
                exp_ts -= datetime.timedelta(seconds=1)
            iterations += 1
        except:
            print("Error at line: {0}".format(line))
            raise

    # Check that we received all values
    if iterations != 990:
        raise ValueError("Expect {0} data points, get {1} data points".format(
            990, iterations))
    print("{0} passed".format(testname[:testname.index(" - ")]))