def test_create_retention(): with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester', 'RETENTION', 1000) assert r.execute_command('TS.ADD', 'tester', 500, 10) expected_result = [[500, b'10']] actual_result = r.execute_command('TS.range', 'tester', '-', '+') assert expected_result == actual_result # check for (lastTimestamp - retension < 0) assert _get_ts_info(r, 'tester').total_samples == 1 assert r.execute_command('TS.ADD', 'tester', 1001, 20) expected_result = [[500, b'10'], [1001, b'20']] actual_result = r.execute_command('TS.range', 'tester', '-', '+') assert expected_result == actual_result assert _get_ts_info(r, 'tester').total_samples == 2 assert r.execute_command('TS.ADD', 'tester', 2000, 30) expected_result = [[1001, b'20'], [2000, b'30']] actual_result = r.execute_command('TS.range', 'tester', '-', '+') assert expected_result == actual_result assert _get_ts_info(r, 'tester').total_samples == 2 with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.CREATE', 'negative', 'RETENTION', -10)
def test_delete_rule(self): key_name = 'tester{abc}' with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', key_name) assert r.execute_command('TS.CREATE', '{}_agg_max_10'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_min_20'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_avg_30'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_last_40'.format(key_name)) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_max_10'.format(key_name), 'AGGREGATION', 'MAX', 10) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_min_20'.format(key_name), 'AGGREGATION', 'MIN', 20) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_avg_30'.format(key_name), 'AGGREGATION', 'AVG', 30) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_last_40'.format(key_name), 'AGGREGATION', 'LAST', 40) with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.DELETERULE', key_name, 'non_existent') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.DELETERULE', 'non_existent', key_name) assert len(_get_ts_info(r, key_name).rules) == 4 assert r.execute_command('TS.DELETERULE', key_name, '{}_agg_avg_30'.format(key_name)) assert len(_get_ts_info(r, key_name).rules) == 3 assert r.execute_command('TS.DELETERULE', key_name, '{}_agg_max_10'.format(key_name)) assert len(_get_ts_info(r, key_name).rules) == 2
def test_uncompressed(): with Env().getClusterConnectionIfNeeded() as r: # test simple commands r.execute_command('ts.create', 'not_compressed', 'UNCOMPRESSED') assert 1 == r.execute_command('ts.add', 'not_compressed', 1, 3.5) assert 3.5 == float(r.execute_command('ts.get', 'not_compressed')[1]) assert 2 == r.execute_command('ts.add', 'not_compressed', 2, 4.5) assert 3 == r.execute_command('ts.add', 'not_compressed', 3, 5.5) assert 5.5 == float(r.execute_command('ts.get', 'not_compressed')[1]) assert [[1, b'3.5'], [2, b'4.5'], [3, b'5.5']] == \ r.execute_command('ts.range', 'not_compressed', 0, -1) info = _get_ts_info(r, 'not_compressed') assert info.total_samples == 3 and info.memory_usage == 4136 # rdb load data = r.execute_command('dump', 'not_compressed') r.execute_command('del', 'not_compressed') with Env().getClusterConnectionIfNeeded() as r: r.execute_command('RESTORE', 'not_compressed', 0, data) assert [[1, b'3.5'], [2, b'4.5'], [3, b'5.5']] == \ r.execute_command('ts.range', 'not_compressed', 0, -1) info = _get_ts_info(r, 'not_compressed') assert info.total_samples == 3 and info.memory_usage == 4136 # test deletion assert r.delete('not_compressed')
def test_rdb(): start_ts = 1511885909 samples_count = 1500 data = None key_name = 'tester{abc}' with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', key_name, 'RETENTION', '0', 'CHUNK_SIZE', '360', 'LABELS', 'name', 'brown', 'color', 'pink') assert r.execute_command('TS.CREATE', '{}_agg_avg_10'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_max_10'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_sum_10'.format(key_name)) assert r.execute_command('TS.CREATE', '{}_agg_stds_10'.format(key_name)) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_avg_10'.format(key_name), 'AGGREGATION', 'AVG', 10) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_max_10'.format(key_name), 'AGGREGATION', 'MAX', 10) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_sum_10'.format(key_name), 'AGGREGATION', 'SUM', 10) assert r.execute_command('TS.CREATERULE', key_name, '{}_agg_stds_10'.format(key_name), 'AGGREGATION', 'STD.S', 10) _insert_data(r, key_name, start_ts, samples_count, 5) data = r.execute_command('DUMP', key_name) avg_data = r.execute_command('DUMP', '{}_agg_avg_10'.format(key_name)) r.execute_command('DEL', key_name, '{}_agg_avg_10'.format(key_name)) r.execute_command('RESTORE', key_name, 0, data) r.execute_command('RESTORE', '{}_agg_avg_10'.format(key_name), 0, avg_data) expected_result = [[start_ts + i, b'5'] for i in range(samples_count)] actual_result = r.execute_command('TS.range', key_name, start_ts, start_ts + samples_count) assert expected_result == actual_result actual_result = r.execute_command('TS.range', key_name, start_ts, start_ts + samples_count, 'count', 3) assert expected_result[:3] == actual_result assert _get_ts_info(r, key_name).rules == [] assert _get_ts_info(r, '{}_agg_avg_10'.format(key_name)).sourceKey == None
def test_trim(): with Env().getClusterConnectionIfNeeded() as r: for mode in ["UNCOMPRESSED", "COMPRESSED"]: samples = 2000 chunk_size = 64 * SAMPLE_SIZE total_chunk_count = math.ceil( float(samples) / float(chunk_size) * SAMPLE_SIZE) r.execute_command('ts.create', 'trim_me', 'CHUNK_SIZE', chunk_size, 'RETENTION', 10, mode) r.execute_command('ts.create', 'dont_trim_me', 'CHUNK_SIZE', chunk_size, mode) for i in range(samples): r.execute_command('ts.add', 'trim_me', i, i * 1.1) r.execute_command('ts.add', 'dont_trim_me', i, i * 1.1) trimmed_info = _get_ts_info(r, 'trim_me') untrimmed_info = _get_ts_info(r, 'dont_trim_me') assert 2 == trimmed_info.chunk_count assert samples == untrimmed_info.total_samples # extra test for uncompressed if mode == "UNCOMPRESSED": assert 11 == trimmed_info.total_samples assert total_chunk_count == untrimmed_info.chunk_count r.delete("trim_me") r.delete("dont_trim_me")
def test_downsampling_current(): with Env().getClusterConnectionIfNeeded() as r: key = 'src{a}' agg_key = 'dest{a}' type_list = ['', 'uncompressed'] agg_list = [ 'avg', 'sum', 'min', 'max', 'count', 'range', 'first', 'last', 'std.p', 'std.s', 'var.p', 'var.s' ] # more for chunk_type in type_list: for agg_type in agg_list: assert r.execute_command('TS.CREATE', key, chunk_type, "DUPLICATE_POLICY", "LAST") assert r.execute_command('TS.CREATE', agg_key, chunk_type) assert r.execute_command('TS.CREATERULE', key, agg_key, "AGGREGATION", agg_type, 10) # present update assert r.execute_command('TS.ADD', key, 3, 3) == 3 assert r.execute_command('TS.ADD', key, 5, 5) == 5 assert r.execute_command('TS.ADD', key, 7, 7) == 7 assert r.execute_command('TS.ADD', key, 5, 2) == 5 assert r.execute_command('TS.ADD', key, 10, 10) == 10 expected_result = r.execute_command('TS.RANGE', key, 0, -1, 'aggregation', agg_type, 10) actual_result = r.execute_command('TS.RANGE', agg_key, 0, -1) assert expected_result[0] == actual_result[0] # present add assert r.execute_command('TS.ADD', key, 11, 11) == 11 assert r.execute_command('TS.ADD', key, 15, 15) == 15 assert r.execute_command('TS.ADD', key, 14, 14) == 14 assert r.execute_command('TS.ADD', key, 20, 20) == 20 expected_result = r.execute_command('TS.RANGE', key, 0, -1, 'aggregation', agg_type, 10) actual_result = r.execute_command('TS.RANGE', agg_key, 0, -1) assert expected_result[0:1] == actual_result[0:1] # present + past add assert r.execute_command('TS.ADD', key, 23, 23) == 23 assert r.execute_command('TS.ADD', key, 15, 22) == 15 assert r.execute_command('TS.ADD', key, 27, 27) == 27 assert r.execute_command('TS.ADD', key, 23, 25) == 23 assert r.execute_command('TS.ADD', key, 30, 30) == 30 expected_result = r.execute_command('TS.RANGE', key, 0, -1, 'aggregation', agg_type, 10) actual_result = r.execute_command('TS.RANGE', agg_key, 0, -1) assert expected_result[0:3] == actual_result[0:3] assert 3 == _get_ts_info(r, agg_key).total_samples assert 11 == _get_ts_info(r, key).total_samples r.execute_command('DEL', key) r.execute_command('DEL', agg_key)
def test_empty(): with Env().getClusterConnectionIfNeeded() as r: r.execute_command('ts.create', 'empty') info = _get_ts_info(r, 'empty') assert info.total_samples == 0 assert [] == r.execute_command('TS.range', 'empty', 0, -1) assert [] == r.execute_command('TS.get', 'empty') r.execute_command('ts.create', 'empty_uncompressed', 'uncompressed') info = _get_ts_info(r, 'empty_uncompressed') assert info.total_samples == 0 assert [] == r.execute_command('TS.range', 'empty_uncompressed', 0, -1) assert [] == r.execute_command('TS.get', 'empty')
def test_lazy_del_src(): with Env().getClusterConnectionIfNeeded() as r: r.execute_command("ts.create", 'src{test}') r.execute_command("ts.create", 'dst{test}') r.execute_command("ts.createrule", 'src{test}', 'dst{test}', 'AGGREGATION', 'avg', 60000) assert _get_ts_info(r, 'dst{test}').sourceKey.decode() == 'src{test}' assert len(_get_ts_info(r, 'src{test}').rules) == 1 assert _get_ts_info(r, 'src{test}').rules[0][0].decode() == 'dst{test}' r.execute_command('DEL', 'src{test}') assert _get_ts_info(r, 'dst{test}').sourceKey == None assert len(_get_ts_info(r, 'dst{test}').rules) == 0
def test_empty(): with Env().getConnection() as r: r.execute_command('ts.create empty') info = _get_ts_info(r, 'empty') assert info.total_samples == 0 assert [] == r.execute_command('TS.range empty 0 -1') assert [] == r.execute_command('TS.get empty') r.execute_command('ts.create empty_uncompressed uncompressed') info = _get_ts_info(r, 'empty_uncompressed') assert info.total_samples == 0 assert [] == r.execute_command('TS.range empty_uncompressed 0 -1') assert [] == r.execute_command('TS.get empty')
def test_add_create_key(): with Env().getClusterConnectionIfNeeded() as r: ts = time.time() assert r.execute_command('TS.ADD', 'tester1', str(int(ts)), str(ts), 'RETENTION', '666', 'LABELS', 'name', 'blabla') == int(ts) info = _get_ts_info(r, 'tester1') assert info.total_samples == 1 assert info.retention_msecs == 666 assert info.labels == {b'name': b'blabla'} assert r.execute_command('TS.ADD', 'tester2', str(int(ts)), str(ts), 'LABELS', 'name', 'blabla2', 'location', 'earth') info = _get_ts_info(r, 'tester2') assert info.total_samples == 1 assert info.labels == {b'location': b'earth', b'name': b'blabla2'}
def test_compaction_rules(self): with Env().getConnection() as r: assert r.execute_command('TS.CREATE', 'tester', 'CHUNK_SIZE', '360') assert r.execute_command('TS.CREATE', 'tester_agg_max_10') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', -10) with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', 0) assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', 10) start_ts = 1488823384 samples_count = 1500 _insert_data(r, 'tester', start_ts, samples_count, 5) last_ts = start_ts + samples_count + 10 r.execute_command('TS.ADD', 'tester', last_ts, 5) actual_result = r.execute_command('TS.RANGE', 'tester_agg_max_10', start_ts, start_ts + samples_count) assert len(actual_result) == samples_count / 10 info = _get_ts_info(r, 'tester') assert info.rules == [[b'tester_agg_max_10', 10, b'AVG']]
def test_compaction_rules(self): with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', key_name, 'CHUNK_SIZE', '360') assert r.execute_command('TS.CREATE', agg_key_name) with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', -10) with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', 0) assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', 10) start_ts = 1488823384 samples_count = 1500 _insert_data(r, key_name, start_ts, samples_count, 5) last_ts = start_ts + samples_count + 10 r.execute_command('TS.ADD', key_name, last_ts, 5) actual_result = r.execute_command('TS.RANGE', agg_key_name, start_ts, start_ts + samples_count) assert len(actual_result) == samples_count / 10 info = _get_ts_info(r, key_name) assert info.rules == [[agg_key_name.encode('ascii'), 10, b'AVG']]
def testRDB(): env = Env() env.skipOnCluster() with Env().getConnection() as r: assert r.execute_command('TS.CREATE', 'ts1', 'RETENTION', '1000', 'CHUNK_SIZE', '1024', 'ENCODING', 'UNCOMPRESSED', 'DUPLICATE_POLICY', 'min', 'LABELS', 'name', 'brown', 'color', 'pink') r.execute_command('TS.ADD', 'ts1', 100, 99) r.execute_command('TS.ADD', 'ts1', 110, 500.5) dump = r.execute_command("dump", "ts1") assert r.execute_command("restore", "ts2", "0", dump) info1 = _get_ts_info(r, 'ts1', 'DEBUG') info2 = _get_ts_info(r, 'ts2', 'DEBUG') info1.key_SelfName = b'ts2' assert info1 == info2
def test_533_dump_rules(): with Env().getClusterConnectionIfNeeded() as r: key1 = 'ts1{a}' key2 = 'ts2{a}' r.execute_command('TS.CREATE', key1) r.execute_command('TS.CREATE', key2) r.execute_command('TS.CREATERULE', key1, key2, 'AGGREGATION', 'avg', 60000) assert _get_ts_info(r, key2).sourceKey.decode() == key1 assert len(_get_ts_info(r, key1).rules) == 1 data = r.execute_command('DUMP', key1) r.execute_command('DEL', key1) r.execute_command('restore', key1, 0, data) assert len(_get_ts_info(r, key1).rules) == 1 assert _get_ts_info(r, key2).sourceKey.decode() == key1
def test_check_retention_64bit(): with Env().getClusterConnectionIfNeeded() as r: huge_timestamp = 4000000000 # larger than uint32 r.execute_command('TS.CREATE', 'tester', 'RETENTION', huge_timestamp) assert _get_ts_info(r, 'tester').retention_msecs == huge_timestamp for i in range(10): r.execute_command('TS.ADD', 'tester', int(huge_timestamp * i / 4), i) assert r.execute_command('TS.RANGE', 'tester', 0, "+") == \ [[5000000000, b'5'], [6000000000, b'6'], [7000000000, b'7'], [8000000000, b'8'], [9000000000, b'9']]
def test_ts_del_multi_chunk(): for CHUNK_TYPE in ["compressed","uncompressed"]: sample_len = 1 e = Env() with e.getClusterConnectionIfNeeded() as r: r.execute_command("ts.create", 'test_key', CHUNK_TYPE) while(_get_ts_info(r, 'test_key').chunk_count<2): assert sample_len == r.execute_command("ts.add", 'test_key', sample_len, '1') sample_len = sample_len + 1 sample_len = sample_len -1 res = r.execute_command('ts.range', 'test_key', 0, sample_len - 1) i = 1 for sample in res: e.assertEqual(sample, [i, '1'.encode('ascii')]) i += 1 assert sample_len - 1 == r.execute_command('ts.del', 'test_key', 0, sample_len - 1) res = r.execute_command('ts.range', 'test_key', 0, sample_len) e.assertEqual(_get_ts_info(r, 'test_key').chunk_count,1) e.assertEqual(len(res), 1) e.flush()
def test_delete_key(): with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', key_name, 'CHUNK_SIZE', '360') assert r.execute_command('TS.CREATE', agg_key_name) assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', 10) assert r.delete(agg_key_name) assert _get_ts_info(r, key_name).rules == [] assert r.execute_command('TS.CREATE', agg_key_name) assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', 11) assert r.delete(key_name) assert _get_ts_info(r, agg_key_name).sourceKey == None assert r.execute_command('TS.CREATE', key_name) assert r.execute_command('TS.CREATERULE', key_name, agg_key_name, 'AGGREGATION', 'avg', 12) assert _get_ts_info( r, key_name).rules == [[agg_key_name.encode('ascii'), 12, b'AVG']]
def test_delete_key(): with Env().getConnection() as r: assert r.execute_command('TS.CREATE', 'tester', 'CHUNK_SIZE', '360') assert r.execute_command('TS.CREATE', 'tester_agg_max_10') assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', 10) assert r.delete('tester_agg_max_10') assert _get_ts_info(r, 'tester').rules == [] assert r.execute_command('TS.CREATE', 'tester_agg_max_10') assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', 11) assert r.delete('tester') assert _get_ts_info(r, 'tester_agg_max_10').sourceKey == None assert r.execute_command('TS.CREATE', 'tester') assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'avg', 12) assert _get_ts_info( r, 'tester').rules == [[b'tester_agg_max_10', 12, b'AVG']]
def test_downsampling_rules(self): """ Test downsmapling rules - avg,min,max,count,sum with 4 keys each. Downsample in resolution of: 1sec (should be the same length as the original series), 3sec (number of samples is divisible by 10), 10s (number of samples is not divisible by 10), 1000sec (series should be empty since there are not enough samples) Insert some data and check that the length, the values and the info of the downsample series are as expected. """ with Env().getConnection() as r: assert r.execute_command('TS.CREATE', 'tester') rules = ['avg', 'sum', 'count', 'max', 'min'] resolutions = [1, 3, 10, 1000] for rule in rules: for resolution in resolutions: assert r.execute_command( 'TS.CREATE', 'tester_{}_{}'.format(rule, resolution)) assert r.execute_command( 'TS.CREATERULE', 'tester', 'tester_{}_{}'.format(rule, resolution), 'AGGREGATION', rule, resolution) start_ts = 0 samples_count = 501 end_ts = start_ts + samples_count values = list(range(samples_count)) _insert_data(r, 'tester', start_ts, samples_count, values) r.execute_command('TS.ADD', 'tester', 3000, 7.77) for rule in rules: for resolution in resolutions: actual_result = r.execute_command( 'TS.RANGE', 'tester_{}_{}'.format(rule, resolution), start_ts, end_ts) assert len(actual_result) == math.ceil(samples_count / float(resolution)) expected_result = calc_rule(rule, values, resolution) assert _get_series_value(actual_result) == expected_result # last time stamp should be the beginning of the last bucket assert _get_ts_info(r, 'tester_{}_{}'.format(rule, resolution)).last_time_stamp == \ (samples_count - 1) - (samples_count - 1) % resolution # test for results after empty buckets r.execute_command('TS.ADD', 'tester', 6000, 0) for rule in rules: for resolution in resolutions: actual_result = r.execute_command( 'TS.RANGE', 'tester_{}_{}'.format(rule, resolution), 3000, 6000) assert len(actual_result) == 1 assert _get_series_value(actual_result) == [7.77] or \ _get_series_value(actual_result) == [1]
def test_delete_rule(self): with Env().getConnection() as r: assert r.execute_command('TS.CREATE', 'tester') assert r.execute_command('TS.CREATE', 'tester_agg_max_10') assert r.execute_command('TS.CREATE', 'tester_agg_min_20') assert r.execute_command('TS.CREATE', 'tester_agg_avg_30') assert r.execute_command('TS.CREATE', 'tester_agg_last_40') assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_max_10', 'AGGREGATION', 'MAX', 10) assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_min_20', 'AGGREGATION', 'MIN', 20) assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_avg_30', 'AGGREGATION', 'AVG', 30) assert r.execute_command('TS.CREATERULE', 'tester', 'tester_agg_last_40', 'AGGREGATION', 'LAST', 40) with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.DELETERULE', 'tester', 'non_existent') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.DELETERULE', 'non_existent', 'tester') assert len(_get_ts_info(r, 'tester').rules) == 4 assert r.execute_command('TS.DELETERULE', 'tester', 'tester_agg_avg_30') assert len(_get_ts_info(r, 'tester').rules) == 3 assert r.execute_command('TS.DELETERULE', 'tester', 'tester_agg_max_10') assert len(_get_ts_info(r, 'tester').rules) == 2
def test_rule_timebucket_64bit(self): with Env().getConnection() as r: BELOW_32BIT_LIMIT = 2147483647 ABOVE_32BIT_LIMIT = 2147483648 r.execute_command("ts.create", 'test_key', 'RETENTION', ABOVE_32BIT_LIMIT) r.execute_command("ts.create", 'below_32bit_limit') r.execute_command("ts.create", 'above_32bit_limit') r.execute_command("ts.createrule", 'test_key', 'below_32bit_limit', 'AGGREGATION', 'max', BELOW_32BIT_LIMIT) r.execute_command("ts.createrule", 'test_key', 'above_32bit_limit', 'AGGREGATION', 'max', ABOVE_32BIT_LIMIT) info = _get_ts_info(r, 'test_key') assert info.rules[0][1] == BELOW_32BIT_LIMIT assert info.rules[1][1] == ABOVE_32BIT_LIMIT
def test_ooo_split(self): with Env().getClusterConnectionIfNeeded() as r: quantity = 5000 type_list = ['', 'UNCOMPRESSED'] for chunk_type in type_list: r.execute_command('ts.create', 'split', chunk_type) r.execute_command('ts.add', 'split', quantity, 42) for i in range(quantity): r.execute_command('ts.add', 'split', i, i * 1.01) assert _get_ts_info(r, 'split').chunk_count in [13, 32] res = r.execute_command('ts.range', 'split', '-', '+') for i in range(quantity - 1): assert res[i][0] + 1 == res[i + 1][0] assert round(float(res[i][1]) + 1.01, 2) == round(float(res[i + 1][1]), 2) r.execute_command('DEL', 'split')
def test_sanity(): start_ts = 1511885909 samples_count = 1500 with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester', 'RETENTION', '0', 'CHUNK_SIZE', '1024', 'LABELS', 'name', 'brown', 'color', 'pink') _insert_data(r, 'tester', start_ts, samples_count, 5) expected_result = [[start_ts + i, str(5).encode('ascii')] for i in range(samples_count)] actual_result = r.execute_command('TS.range', 'tester', start_ts, start_ts + samples_count) assert expected_result == actual_result expected_result = [ b'totalSamples', 1500, b'memoryUsage', 1166, b'firstTimestamp', start_ts, b'chunkCount', 1, b'labels', [[b'name', b'brown'], [b'color', b'pink']], b'lastTimestamp', start_ts + samples_count - 1, b'chunkSize', 1024, b'retentionTime', 0, b'sourceKey', None, b'rules', [] ] assert TSInfo(expected_result) == _get_ts_info(r, 'tester')