def reader(dtstart, delta, N): # Start writer process wproc = multiprocessing.Process(name='Writer', target=writer, args=[dtstart, delta, N]) wproc.start() try: window = att.get_window_width() end = dtstart + delta * (N - 1) - 2 * window begin = dtstart timedelta = end - begin points_required = int( math.ceil( (timedelta.seconds * 1000000.0 + timedelta.microseconds) / (delta.seconds * 1000000.0 + delta.microseconds))) + 1 query_params = {"output": {"format": "csv"}} query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 print("Test #1 - continuous queries") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = 'test tag=Foo' att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise print("Query completed") # Check that we received all values if iterations != points_required: raise ValueError( "Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #1 passed") finally: wproc.join()
def test_read_in_forward_direction(dtstart, delta, N): """Read data in forward direction""" window = att.get_window_width() end = dtstart + delta * (N - 1) - window begin = dtstart timedelta = end - begin points_required = int( math.ceil((timedelta.seconds * 1000000.0 + timedelta.microseconds) / (delta.seconds * 1000000.0 + delta.microseconds))) + 1 # We need to add 1 because query will include both begin and end timestamps. query_params = { "output": { "format": "csv" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 print("Test #6 - filter by tag") expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(iterations) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != points_required: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #6 passed")
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N): """Filter data by tag and group by another tag""" begin = dtstart + delta * (N - 1) end = dtstart query_params = { "output": { "format": "csv" }, "group-by": { "tag": "tag3" }, "where": { "tag2": ["C"], # read only odd } } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 print("Test #4 - where + group-by") expected_tags = [ "test tag3=D", "test tag3=E", "test tag3=F", "test tag3=G", "test tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= 2 * delta exp_value -= 2 iterations += 2 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( N, iterations)) print("Test #4 passed")
def test_group_by_tag_in_backward_direction(dtstart, delta, N): """Read all data in backward direction. All data should be received as expected.""" begin = dtstart + delta * (N - 1) end = dtstart query_params = { "output": { "format": "csv" }, "group-by": { "tag": "tag3" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 print("Test #2 - group by tag in backward direction") expected_tags = [ "test tag3=D", "test tag3=E", "test tag3=F", "test tag3=G", "test tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( N, iterations)) print("Test #2 passed")
def test_join_query_backward(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta * (N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] print("Test #2 - read forward, order by time") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #2 - passed")
def reader(dtstart, delta, N): # Start writer process wproc = multiprocessing.Process(name='Writer', target=writer, args=[dtstart, delta, N]) wproc.start() try: window = att.get_window_width() end = dtstart + delta*(N-1) - 2*window begin = dtstart timedelta = end - begin points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / (delta.seconds*1000000.0 + delta.microseconds))) + 1 query_params = {"output": { "format": "csv" }} query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 print("Test #1 - continuous queries") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = 'test tag=Foo' att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise print("Query completed") # Check that we received all values if iterations != points_required: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #1 passed") finally: wproc.join()
def test_read_in_forward_direction(dtstart, delta, N): """Read data in forward direction""" window = att.get_window_width() end = dtstart + delta*(N-1) - window begin = dtstart timedelta = end - begin points_required = int(math.ceil((timedelta.seconds*1000000.0 + timedelta.microseconds) / (delta.seconds*1000000.0 + delta.microseconds))) + 1 # We need to add 1 because query will include both begin and end timestamps. query_params = { "output": { "format": "csv" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 print("Test #6 - filter by tag") expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(iterations) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != points_required: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #6 passed")
def test_read_in_forward_direction(dtstart, delta, N): """Read data in forward direction""" begin = dtstart end = dtstart + delta * (N + 1) timedelta = end - begin query_params = { "output": { "format": "csv" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(iterations) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations))
def test_where_clause_with_groupby_in_backward_direction(dtstart, delta, N): """Filter data by tag and group by another tag""" begin = dtstart + delta*(N-1) end = dtstart query_params = { "output": { "format": "csv" }, "group-by": { "tag": "tag3" }, "where": { "tag2": ["C"], # read only odd } } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N-1 iterations = 0 print("Test #4 - where + group-by") expected_tags = [ "test tag3=D", "test tag3=E", "test tag3=F", "test tag3=G", "test tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= 2*delta exp_value -= 2 iterations += 2 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations)) print("Test #4 passed")
def test_group_by_tag_in_backward_direction(dtstart, delta, N): """Read all data in backward direction. All data should be received as expected.""" begin = dtstart + delta*(N-1) end = dtstart query_params = { "output": { "format": "csv" }, "group-by": { "tag": "tag3" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N-1 iterations = 0 print("Test #2 - group by tag in backward direction") expected_tags = [ "test tag3=D", "test tag3=E", "test tag3=F", "test tag3=G", "test tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'EQ', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations)) print("Test #2 passed")
def test_join_query_backward(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta*(N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] print("Test #2 - read forward, order by time") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #2 - passed")
def test_read_all(dtstart, delta, N): """Read all data in backward direction. All data should be received as expected.""" begin = dtstart + delta * N end = dtstart - delta query = att.makequery("test", begin, end, output=dict(format='csv')) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] exp_ts = dtstart + delta * (N - 1) exp_value = N - 1 iterations = 0 print("Test - read all data in backward direction") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( N, iterations)) print("Test passed")
def test_read_in_forward_direction(dtstart, delta, N): """Read data in forward direction""" begin = dtstart end = dtstart + delta*(N + 1) timedelta = end - begin query_params = { "output": { "format": "csv" }, } query = att.makequery("test", begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = 0 iterations = 0 expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(iterations) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts += delta exp_value += 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations))
def test_read_all(dtstart, delta, N): """Read all data in backward direction. All data should be received as expected.""" begin = dtstart + delta*N end = dtstart - delta query = att.makequery("test", begin, end, output=dict(format='csv')) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) expected_tags = [ "tag3=D", "tag3=E", "tag3=F", "tag3=G", "tag3=H", ] exp_ts = dtstart + delta*(N-1) exp_value = N-1 iterations = 0 print("Test - read all data in backward direction") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) value = float(columns[2].strip()) exp_tags = expected_tags[(N-iterations-1) % len(expected_tags)] att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(N, iterations)) print("Test passed")
def test_join_query_backward_by_series(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta*(N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, "order-by": "series" } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N-1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] bsize = count_elements("col1", "tag2", "B", begin, end) csize = count_elements("col1", "tag2", "C", begin, end) dsize = count_elements("col1", "tag2", "D", begin, end) sizes = [ bsize, csize, dsize, ] steps = [ bsize, bsize + csize, bsize + csize + dsize, ] nseries = len(expected_tags) print("Test #4 - read forward, order by series") prev_tag = None reset_ix = 0 for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] tagix = 0 while iterations >= steps[tagix]: tagix += 1 exp_tags = expected_tags[tagix] if prev_tag != tagline: exp_ts = dtstart + reset_ix*delta + delta*(sizes[tagix]-1)*nseries exp_value = reset_ix + (sizes[tagix]-1)*nseries prev_tag = tagline reset_ix += 1 for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= nseries*delta exp_value -= nseries iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #4 - passed")
def test_join_query_backward_by_series(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta * (N - 1) end = dtstart - delta timedelta = begin - end query_params = {"output": {"format": "csv"}, "order-by": "series"} query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] bsize = count_elements("col1", "tag2", "B", begin, end) csize = count_elements("col1", "tag2", "C", begin, end) dsize = count_elements("col1", "tag2", "D", begin, end) sizes = [ bsize, csize, dsize, ] steps = [ bsize, bsize + csize, bsize + csize + dsize, ] nseries = len(expected_tags) print("Test #4 - read forward, order by series") prev_tag = None reset_ix = 0 for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] tagix = 0 while iterations >= steps[tagix]: tagix += 1 exp_tags = expected_tags[tagix] if prev_tag != tagline: exp_ts = dtstart + reset_ix * delta + delta * (sizes[tagix] - 1) * nseries exp_value = reset_ix + (sizes[tagix] - 1) * nseries prev_tag = tagline reset_ix += 1 for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= nseries * delta exp_value -= nseries iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #4 - passed")