def test_join_query_backward(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta * (N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] print("Test #2 - read forward, order by time") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #2 - passed")
def run_join_query(columns, thresholds, begin, end, **query_params): flt = {} for ix, column in enumerate(columns): flt[column] = dict(gt=thresholds[ix][0], lt=thresholds[ix][1]) query_params["output"] = { "format": "csv" } query_params["filter"] = flt query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) iterations = 0 col1_range = range(thresholds[0][0], thresholds[0][1]) col2_range = range(thresholds[1][0], thresholds[1][1]) expected_tags = [ "col1|col2 tag1=A tag2=B", "col1|col2 tag1=A tag2=C", "col1|col2 tag1=A tag2=D", ] for ix, line in enumerate(response): try: columns = line.split(',') tagline = columns[0].strip() values = [it.strip() for it in columns[2:]] if tagline not in expected_tags: raise ValueError("Unexpected series name") if values[0] != '' and float(values[0]) not in col1_range: raise ValueError("Unexpected col1 value") if values[1] != '' and float(values[1]) not in col2_range: raise ValueError("Unexpected col2 value") iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations == 0: raise ValueError("No data returned")
def run_join_query(columns, thresholds, begin, end, **query_params): flt = {} for ix, column in enumerate(columns): flt[column] = dict(gt=thresholds[ix][0], lt=thresholds[ix][1]) query_params["output"] = {"format": "csv"} query_params["filter"] = flt query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) iterations = 0 col1_range = range(thresholds[0][0], thresholds[0][1]) col2_range = range(thresholds[1][0], thresholds[1][1]) expected_tags = [ "col1|col2 tag1=A tag2=B", "col1|col2 tag1=A tag2=C", "col1|col2 tag1=A tag2=D", ] for ix, line in enumerate(response): try: columns = line.split(',') tagline = columns[0].strip() values = [it.strip() for it in columns[2:]] if tagline not in expected_tags: raise ValueError("Unexpected series name") if values[0] != '' and float(values[0]) not in col1_range: raise ValueError("Unexpected col1 value") if values[1] != '' and float(values[1]) not in col2_range: raise ValueError("Unexpected col2 value") iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations == 0: raise ValueError("No data returned")
def test_join_query_backward(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta*(N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] print("Test #2 - read forward, order by time") for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] exp_tags = expected_tags[(N - iterations - 1) % len(expected_tags)] for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= delta exp_value -= 1 iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #2 - passed")
def test_join_query_backward_by_series(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta*(N - 1) end = dtstart - delta timedelta = begin - end query_params = { "output": { "format": "csv" }, "order-by": "series" } query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N-1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] bsize = count_elements("col1", "tag2", "B", begin, end) csize = count_elements("col1", "tag2", "C", begin, end) dsize = count_elements("col1", "tag2", "D", begin, end) sizes = [ bsize, csize, dsize, ] steps = [ bsize, bsize + csize, bsize + csize + dsize, ] nseries = len(expected_tags) print("Test #4 - read forward, order by series") prev_tag = None reset_ix = 0 for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] tagix = 0 while iterations >= steps[tagix]: tagix += 1 exp_tags = expected_tags[tagix] if prev_tag != tagline: exp_ts = dtstart + reset_ix*delta + delta*(sizes[tagix]-1)*nseries exp_value = reset_ix + (sizes[tagix]-1)*nseries prev_tag = tagline reset_ix += 1 for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value*1.0, value, iterations) exp_ts -= nseries*delta exp_value -= nseries iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format(points_required, iterations)) print("Test #4 - passed")
def test_join_query_backward_by_series(columns, dtstart, delta, N): """Read data in forward direction""" begin = dtstart + delta * (N - 1) end = dtstart - delta timedelta = begin - end query_params = {"output": {"format": "csv"}, "order-by": "series"} query = att.make_join_query(columns, begin, end, **query_params) queryurl = "http://{0}:{1}/api/query".format(HOST, HTTPPORT) response = urlopen(queryurl, json.dumps(query)) exp_ts = begin exp_value = N - 1 iterations = 0 expected_tags = [ "tag2=B", "tag2=C", "tag2=D", ] bsize = count_elements("col1", "tag2", "B", begin, end) csize = count_elements("col1", "tag2", "C", begin, end) dsize = count_elements("col1", "tag2", "D", begin, end) sizes = [ bsize, csize, dsize, ] steps = [ bsize, bsize + csize, bsize + csize + dsize, ] nseries = len(expected_tags) print("Test #4 - read forward, order by series") prev_tag = None reset_ix = 0 for line in response: try: columns = line.split(',') tagline = columns[0].strip() timestamp = att.parse_timestamp(columns[1].strip()) values = [float(it.strip()) for it in columns[2:]] tagix = 0 while iterations >= steps[tagix]: tagix += 1 exp_tags = expected_tags[tagix] if prev_tag != tagline: exp_ts = dtstart + reset_ix * delta + delta * (sizes[tagix] - 1) * nseries exp_value = reset_ix + (sizes[tagix] - 1) * nseries prev_tag = tagline reset_ix += 1 for value in values: att.check_values(exp_tags, tagline, 'ENDS', exp_ts, timestamp, exp_value * 1.0, value, iterations) exp_ts -= nseries * delta exp_value -= nseries iterations += 1 except: print("Error at line: {0}".format(line)) raise # Check that we received all values if iterations != N: raise ValueError("Expect {0} data points, get {1} data points".format( points_required, iterations)) print("Test #4 - passed")
def join_nonexistent_time_range(dtstart, delta, N): begin = dtstart + delta*(N*2) end = dtstart + delta*(N*3) query = att.make_join_query(["test", "test"], begin, end) require_empty_response(query)
def join_nonexistent_metrics(dtstart, delta, N): begin = dtstart end = dtstart + delta*(N + 1) query = att.make_join_query(["foo", "bar"], begin, end) msg = "-not found" check_error_message(dtstart, delta, N, query, msg)