def test_mrange_expire_issue549(): Env().skipOnDebugger() env = Env() set_hertz(env) with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('ts.add', 'k1', 1, 10, 'LABELS', 'l', '1') == 1 assert r.execute_command('ts.add', 'k2', 2, 20, 'LABELS', 'l', '1') == 2 assert r.execute_command('expire', 'k1', '1') == 1 for i in range(0, 5000): assert env.getConnection().execute_command( 'ts.mrange - + aggregation avg 10 withlabels filter l=1' ) is not None
def test_mget_with_expire_cmd(): set_hertz(Env()) with Env().getClusterConnectionIfNeeded() as r: # Lower hz value to make it more likely that mget triggers key expiration assert r.execute_command("TS.ADD", "X" ,"*" ,"1" ,"LABELS", "type", "DELAYED") assert r.execute_command("TS.ADD", "Y" ,"*" ,"1" ,"LABELS", "type", "DELAYED") assert r.execute_command("TS.ADD", "Z" ,"*" ,"1" ,"LABELS", "type", "DELAYED") current_ts = time.time() assert r.execute_command("EXPIRE","X", 5) assert r.execute_command("EXPIRE","Y", 6) assert r.execute_command("EXPIRE","Z", 7) while time.time() < (current_ts+10): reply = r.execute_command('TS.MGET', 'FILTER', 'type=DELAYED') assert(len(reply)>=0 and len(reply)<=3) assert r.execute_command("PING")
def test_groupby_reduce_errors(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.ADD', 's1', 1, 100, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's2', 2, 55, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's3', 2, 40, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'system') assert r.execute_command('TS.ADD', 's1', 2, 95) # test wrong arity with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'abc', 'abc') with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'REDUCE', 'bla')
def test_mrange_withlabels(): start_ts = 1511885909 samples_count = 50 with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester1', 'LABELS', 'name', 'bob', 'class', 'middle', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester2', 'LABELS', 'name', 'rudy', 'class', 'junior', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester3', 'LABELS', 'name', 'fabi', 'class', 'top', 'generation', 'x') _insert_data(r, 'tester1', start_ts, samples_count, 5) _insert_data(r, 'tester2', start_ts, samples_count, 15) _insert_data(r, 'tester3', start_ts, samples_count, 25) expected_result = [[start_ts + i, str(5).encode('ascii')] for i in range(samples_count)] actual_result = r.execute_command('TS.mrange', start_ts, start_ts + samples_count, 'WITHLABELS', 'FILTER', 'name=bob') assert [[ b'tester1', [[b'name', b'bob'], [b'class', b'middle'], [b'generation', b'x']], expected_result ]] == actual_result actual_result = r.execute_command('TS.mrange', start_ts + 1, start_ts + samples_count, 'WITHLABELS', 'AGGREGATION', 'COUNT', 1, 'FILTER', 'generation=x') # assert the labels length is 3 (name,class,generation) for each of the returned time-series assert len(actual_result[0][1]) == 3 assert len(actual_result[1][1]) == 3 assert len(actual_result[2][1]) == 3
def test_groupby_reduce_multiple_groups(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.ADD', 's1', 1, 100, 'LABELS', 'HOST', 'A', 'REGION', 'EU', 'PROVIDER', 'AWS') assert r.execute_command('TS.ADD', 's2', 1, 55, 'LABELS', 'HOST', 'B', 'REGION', 'EU', 'PROVIDER', 'AWS') assert r.execute_command('TS.ADD', 's2', 2, 90, 'LABELS', 'HOST', 'B', 'REGION', 'EU', 'PROVIDER', 'AWS') assert r.execute_command('TS.ADD', 's3', 2, 40, 'LABELS', 'HOST', 'C', 'REGION', 'US', 'PROVIDER', 'AWS') actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'PROVIDER=AWS', 'GROUPBY', 'REGION', 'REDUCE', 'max') serie1 = actual_result[0] serie1_name = serie1[0] serie1_labels = serie1[1] serie1_values = serie1[2] env.assertEqual(serie1_values, [[1, b'100'],[2, b'90']]) env.assertEqual(serie1_name, b'REGION=EU') env.assertEqual(serie1_labels[0][0], b'REGION') env.assertEqual(serie1_labels[0][1], b'EU') env.assertEqual(serie1_labels[1][0], b'__reducer__') env.assertEqual(serie1_labels[1][1], b'max') env.assertEqual(serie1_labels[2][0], b'__source__') env.assertEqual(sorted(serie1_labels[2][1].decode("ascii").split(",")), ['s1', 's2']) serie2 = actual_result[1] serie2_name = serie2[0] serie2_labels = serie2[1] serie2_values = serie2[2] env.assertEqual(serie2_values, [[2, b'40']]) env.assertEqual(serie2_name, b'REGION=US') env.assertEqual(serie2_labels[0][0], b'REGION') env.assertEqual(serie2_labels[0][1], b'US') env.assertEqual(serie2_labels[1][0], b'__reducer__') env.assertEqual(serie2_labels[1][1], b'max') env.assertEqual(serie2_labels[2][0], b'__source__') env.assertEqual(serie2_labels[2][1], b's3')
def test_evict(): env = Env() env.skipOnCluster() skip_on_rlec() with env.getClusterConnectionIfNeeded() as r: info = r.execute_command('INFO') max_mem = info['used_memory'] + 1024 * 1024 assert r.execute_command('CONFIG', 'SET', 'maxmemory', str(max_mem) + 'b') assert r.execute_command('CONFIG', 'SET', 'maxmemory-policy', 'allkeys-lru') init(env, r) # make sure t{1} deleted res = r.execute_command('keys *') i = 4 while b't{1}' in res: assert r.execute_command('TS.CREATE', 't{%s}' % (i, )) i += 1 res = r.execute_command('keys *') res = r.execute_command('TS.QUERYINDEX', 'name=(mush)') env.assertEqual(res, []) # restore maxmemory assert r.execute_command('CONFIG', 'SET', 'maxmemory', '0')
def test_multilabel_filter(): with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester1', 'LABELS', 'name', 'bob', 'class', 'middle', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester2', 'LABELS', 'name', 'rudy', 'class', 'junior', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester3', 'LABELS', 'name', 'fabi', 'class', 'top', 'generation', 'x') assert r.execute_command('TS.ADD', 'tester1', 0, 1) == 0 assert r.execute_command('TS.ADD', 'tester2', 0, 2) == 0 assert r.execute_command('TS.ADD', 'tester3', 0, 3) == 0 actual_result = r.execute_command('TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'name=(bob,rudy)') assert set(item[0] for item in actual_result) == set([b'tester1', b'tester2']) actual_result = r.execute_command('TS.mrange', 0, -1, 'WITHLABELS', 'FILTER', 'name=(bob,rudy)', 'class!=(middle,top)') assert actual_result[0][0] == b'tester2' actual_result = r.execute_command('TS.mget', 'WITHLABELS', 'FILTER', 'name=(bob,rudy)') assert set(item[0] for item in actual_result) == set([b'tester1', b'tester2']) actual_result = r.execute_command('TS.mget', 'WITHLABELS', 'FILTER', 'name=(bob,rudy)', 'class!=(middle,top)') assert actual_result[0][0] == b'tester2'
def test_non_local_filtered_data(): env = Env() with env.getClusterConnectionIfNeeded() as r: r.execute_command('TS.ADD', '{host1}_metric_1', 1, 100, 'LABELS', 'metric', 'cpu') r.execute_command('TS.ADD', '{host1}_metric_2', 2, 40, 'LABELS', 'metric', 'cpu') r.execute_command('TS.ADD', '{host1}_metric_1', 2, 95) r.execute_command('TS.ADD', '{host1}_metric_1', 10, 99) previous_results = [] # ensure that initiating the query on different shards always replies with the same series for shard in range(0, env.shardsCount): shard_conn = env.getConnection(shard) # send undordered timestamps to test for sorting actual_result = shard_conn.execute_command( 'TS.MRANGE - + FILTER_BY_TS 11 5 25 55 101 18 9 1900 2 FILTER metric=cpu' ) env.assertEqual(len(actual_result), 2) # ensure reply is properly filtered by TS for serie in actual_result: serie_ts = serie[2] # ensure only timestamp 2 is present on reply env.assertEqual(len(serie_ts), 1) env.assertEqual(serie_ts[0][0], 2) for previous_result in previous_results: ensure_replies_series_match(env, previous_result, actual_result) previous_results.append(actual_result)
def test_revrange(): start_ts = 1511885908 samples_count = 200 expected_results = [] with Env().getClusterConnectionIfNeeded() as r: r.execute_command('TS.CREATE', 'tester1', 'uncompressed') for i in range(samples_count): r.execute_command('TS.ADD', 'tester1', start_ts + i, i) actual_results = r.execute_command('TS.RANGE', 'tester1', 0, "+") actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 0, "+") actual_results_rev.reverse() assert actual_results == actual_results_rev actual_results = r.execute_command('TS.RANGE', 'tester1', 1511885910, 1511886000) actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 1511885910, 1511886000) actual_results_rev.reverse() assert actual_results == actual_results_rev actual_results = r.execute_command('TS.RANGE', 'tester1', 0, '+', 'AGGREGATION', 'sum', 50) actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 0, '+', 'AGGREGATION', 'sum', 50) actual_results_rev.reverse() assert actual_results == actual_results_rev # with compression r.execute_command('DEL', 'tester1') r.execute_command('TS.CREATE', 'tester1') for i in range(samples_count): r.execute_command('TS.ADD', 'tester1', start_ts + i, i) actual_results = r.execute_command('TS.RANGE', 'tester1', 0, '+') actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 0, '+') actual_results_rev.reverse() assert actual_results == actual_results_rev actual_results = r.execute_command('TS.RANGE', 'tester1', 1511885910, 1511886000) actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 1511885910, 1511886000) actual_results_rev.reverse() assert actual_results == actual_results_rev actual_results = r.execute_command('TS.RANGE', 'tester1', 0, '+', 'AGGREGATION', 'sum', 50) actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 0, '+', 'AGGREGATION', 'sum', 50) actual_results_rev.reverse() assert actual_results == actual_results_rev actual_results_rev = r.execute_command('TS.REVRANGE', 'tester1', 0, '+', 'COUNT', 5) actual_results = r.execute_command('TS.RANGE', 'tester1', 0, '+') actual_results.reverse() assert len(actual_results_rev) == 5 assert actual_results[0:5] == actual_results_rev[0:5]
def test_rename_dst(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'a{2}') assert r.execute_command('TS.CREATE', 'b{2}') assert r.execute_command('TS.CREATERULE', 'a{2}', 'b{2}', 'AGGREGATION', 'AVG', 5000) env.assertTrue(r.execute_command('RENAME', 'b{2}', 'b1{2}')) aInfo = TSInfo(r.execute_command('TS.INFO', 'a{2}')) env.assertEqual(aInfo.sourceKey, None) env.assertEqual(aInfo.rules[0][0], b'b1{2}') assert r.execute_command('TS.CREATE', 'c{2}') assert r.execute_command('TS.CREATERULE', 'a{2}', 'c{2}', 'AGGREGATION', 'COUNT', 2000) assert r.execute_command('TS.CREATE', 'd{2}') assert r.execute_command('TS.CREATERULE', 'a{2}', 'd{2}', 'AGGREGATION', 'SUM', 3000) env.assertTrue(r.execute_command('RENAME', 'c{2}', 'c1{2}')) aInfo = TSInfo(r.execute_command('TS.INFO', 'a{2}')) env.assertEqual(aInfo.sourceKey, None) env.assertEqual(aInfo.rules[0][0], b'b1{2}') env.assertEqual(aInfo.rules[1][0], b'c1{2}') env.assertEqual(aInfo.rules[2][0], b'd{2}')
def test_mrevrange(): start_ts = 1511885909 samples_count = 50 with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester1', 'LABELS', 'name', 'bob', 'class', 'middle', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester2', 'LABELS', 'name', 'rudy', 'class', 'junior', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester3', 'LABELS', 'name', 'fabi', 'class', 'top', 'generation', 'x') _insert_data(r, 'tester1', start_ts, samples_count, 5) _insert_data(r, 'tester2', start_ts, samples_count, 15) _insert_data(r, 'tester3', start_ts, samples_count, 25) expected_result = [[start_ts + i, str(5).encode('ascii')] for i in range(samples_count)] expected_result.reverse() actual_result = r.execute_command('TS.mrevrange', start_ts, start_ts + samples_count, 'FILTER', 'name=bob') assert [[b'tester1', [], expected_result]] == actual_result actual_result = r.execute_command('TS.mrevrange', start_ts, start_ts + samples_count, 'COUNT', '5', 'FILTER', 'generation=x') actual_result.sort(key=lambda x: x[0]) assert actual_result == [[ b'tester1', [], [[1511885958, b'5'], [1511885957, b'5'], [1511885956, b'5'], [1511885955, b'5'], [1511885954, b'5']] ], [ b'tester2', [], [[1511885958, b'15'], [1511885957, b'15'], [1511885956, b'15'], [1511885955, b'15'], [1511885954, b'15']] ], [ b'tester3', [], [[1511885958, b'25'], [1511885957, b'25'], [1511885956, b'25'], [1511885955, b'25'], [1511885954, b'25']] ]] agg_result = r.execute_command('TS.mrange', 0, -1, 'AGGREGATION', 'sum', 50, 'FILTER', 'name=bob')[0][2] rev_agg_result = r.execute_command('TS.mrevrange', 0, -1, 'AGGREGATION', 'sum', 50, 'FILTER', 'name=bob')[0][2] rev_agg_result.reverse() assert rev_agg_result == agg_result last_results = list(agg_result) last_results.reverse() last_results = last_results[0:3] assert r.execute_command('TS.mrevrange', 0, -1, 'AGGREGATION', 'sum', 50, 'COUNT', 3, 'FILTER', 'name=bob')[0][2] == last_results
def main(args, prt): config = tf.ConfigProto() #tensorflow的参数配置对象 config.gpu_options.allow_growth = True #动态申请显存,需要多少就申请多少 sess = tf.Session(config=config) # 加载任务特定的类 DataGenerator, Env, reward_func, AttentionActor, AttentionCritic = \ load_task_specific_components(args['task_name']) dataGen = DataGenerator(args) dataGen.reset() env = Env(args) #创建一个 RL(强化学习)代理 agent = RLAgent(args, prt, env, dataGen, reward_func, AttentionActor, AttentionCritic, is_train=args['is_train']) agent.Initialize(sess) #训练或评估 start_time = time.time() # 计算时间用的,记录开始时间 if args['is_train']:# 如果参数是训练 prt.print_out('训练开始') train_time_beg = time.time() # 开始训练的时间 for step in range(args['n_train']): summary = agent.run_train_step() _, _ , actor_loss_val, critic_loss_val, actor_gra_and_var_val, critic_gra_and_var_val,\ R_val, v_val, logprobs_val,probs_val, actions_val, idxs_val= summary if step%args['save_interval'] == 0: agent.saver.save(sess,args['model_dir']+'/model.ckpt', global_step=step)#sess是之前tensorflow的初始化对象 if step%args['log_interval'] == 0: train_time_end = time.time()-train_time_beg prt.print_out('训练步数: {} -- 时间: {} -- 训练奖励: {} -- 值: {}'\ .format(step,time.strftime("%H:%M:%S", time.gmtime(\ train_time_end)),np.mean(R_val),np.mean(v_val))) prt.print_out(' actor loss: {} -- critic loss: {}'\ .format(np.mean(actor_loss_val),np.mean(critic_loss_val))) train_time_beg = time.time() if step%args['test_interval'] == 0: agent.inference(args['infer_type']) else: # 否则就是在推论 prt.print_out('评估开始') agent.inference(args['infer_type']) prt.print_out('总时间为: {}'.format(\ time.strftime("%H:%M:%S", time.gmtime(time.time()-start_time)))) # 运行完毕之后输出运行总时间
def test_rename_indexed(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.ADD', 'a{3}', 100, 200, 'LABELS', 'sensor_id', '2', 'area_id', '32') env.assertEqual(r.execute_command('TS.MGET', 'FILTER', 'area_id=32'), [[b'a{3}', [], [100, b'200']]]) env.assertTrue(r.execute_command('RENAME', 'a{3}', 'a1{3}')) env.assertEqual(r.execute_command('TS.MGET', 'FILTER', 'area_id=32'), [[b'a1{3}', [], [100, b'200']]])
def test_unlink(): env = Env() with env.getClusterConnectionIfNeeded() as r: init(env, r) res = r.execute_command('UNLINK', 't{1}') res = r.execute_command('TS.QUERYINDEX', 'name=(mush,zavi,rex)') env.assertEqual(sorted(res), sorted([b't{2}', b't{1}_agg'])) res = r.execute_command('TS.MGET', 'filter', 'name=(mush,zavi,rex)') env.assertEqual(sorted(res), sorted([[b't{2}', [], []], [b't{1}_agg', [], []]]))
def test_groupby_reduce_empty(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.ADD', 's1', 1, 100, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's2', 2, 55, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's3', 2, 40, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'system') assert r.execute_command('TS.ADD', 's1', 2, 95) actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'labelX', 'REDUCE', 'max') env.assertEqual(actual_result, [])
def test_label_index(): with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'tester1', 'LABELS', 'name', 'bob', 'class', 'middle', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester2', 'LABELS', 'name', 'rudy', 'class', 'junior', 'generation', 'x') assert r.execute_command('TS.CREATE', 'tester3', 'LABELS', 'name', 'fabi', 'class', 'top', 'generation', 'x', 'x', '2') assert r.execute_command('TS.CREATE', 'tester4', 'LABELS', 'name', 'anybody', 'class', 'top', 'type', 'noone', 'x', '2', 'z', '3') def assert_data(query, expected_data): assert sorted(expected_data) == sorted(r.execute_command(*query)) assert_data(['TS.QUERYINDEX', 'generation=x'], [b'tester1', b'tester2', b'tester3']) assert_data(['TS.QUERYINDEX', 'generation=x', 'x='], [b'tester1', b'tester2']) assert_data(['TS.QUERYINDEX', 'generation=x', 'x=2'], [b'tester3']) assert_data(['TS.QUERYINDEX', 'x=2'], [b'tester3', b'tester4']) assert_data(['TS.QUERYINDEX', 'generation=x', 'class!=middle', 'x='], [b'tester2']) assert_data(['TS.QUERYINDEX', 'generation=x', 'class=top', 'x='], []) assert_data(['TS.QUERYINDEX', 'generation=x', 'class=top', 'z='], [b'tester3']) assert_data(['TS.QUERYINDEX', 'z=', 'x=2'], [b'tester3']) with pytest.raises(redis.ResponseError): r.execute_command('TS.QUERYINDEX', 'z=', 'x!=2') # Test filter list assert_data(['TS.QUERYINDEX', 'generation=x', 'class=(middle,junior)'], [b'tester1', b'tester2']) assert_data(['TS.QUERYINDEX', 'generation=x', 'class=(a,b,c)'], []) assert sorted(r.execute_command( 'TS.QUERYINDEX', 'generation=x')) == sorted( r.execute_command('TS.QUERYINDEX', 'generation=(x)')) assert_data(['TS.QUERYINDEX', 'generation=x', 'class=()'], []) assert_data([ 'TS.QUERYINDEX', 'class=(middle,junior,top)', 'name!=(bob,rudy,fabi)' ], [b'tester4']) with pytest.raises(redis.ResponseError): assert r.execute_command('TS.QUERYINDEX', 'generation=x', 'class=(') with pytest.raises(redis.ResponseError): assert r.execute_command('TS.QUERYINDEX', 'generation=x', 'class=(ab') with pytest.raises(redis.ResponseError): assert r.execute_command('TS.QUERYINDEX', 'generation!=(x,y)')
def test_issue400(): with Env().getClusterConnectionIfNeeded() as r: times = 300 r.execute_command('ts.create', 'issue376', 'UNCOMPRESSED') for i in range(1, times): r.execute_command('ts.add', 'issue376', i * 5, i) for i in range(1, times): range_res = r.execute_command('ts.range', 'issue376', i * 5 - 1, i * 5 + 60) assert len(range_res) > 0 for i in range(1, times): range_res = r.execute_command('ts.revrange', 'issue376', i * 5 - 1, i * 5 + 60) assert len(range_res) > 0
def test_alter_cmd(): start_ts = 1511885909 samples_count = 1500 end_ts = start_ts + samples_count key = 'tester' with Env().getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', key, 'CHUNK_SIZE', '360', 'LABELS', 'name', 'brown', 'color', 'pink') _insert_data(r, key, start_ts, samples_count, 5) expected_data = [[start_ts + i, str(5).encode('ascii')] for i in range(samples_count)] # test alter retention, chunk size and labels expected_labels = [[b'A', b'1'], [b'B', b'2'], [b'C', b'3']] expected_retention = 500 expected_chunk_size = 100 _ts_alter_cmd(r, key, expected_retention, expected_chunk_size, expected_labels) _assert_alter_cmd(r, key, end_ts - 501, end_ts, expected_data[-501:], expected_retention, expected_chunk_size, expected_labels) # test alter retention expected_retention = 200 _ts_alter_cmd(r, key, set_retention=expected_retention) _assert_alter_cmd(r, key, end_ts - 201, end_ts, expected_data[-201:], expected_retention, expected_chunk_size, expected_labels) # test alter chunk size expected_chunk_size = 100 expected_labels = [[b'A', b'1'], [b'B', b'2'], [b'C', b'3']] _ts_alter_cmd(r, key, set_chunk_size=expected_chunk_size) _assert_alter_cmd(r, key, end_ts - 201, end_ts, expected_data[-201:], expected_retention, expected_chunk_size, expected_labels) # test alter labels expected_labels = [[b'A', b'1']] _ts_alter_cmd(r, key, expected_retention, set_labels=expected_labels) _assert_alter_cmd(r, key, end_ts - 201, end_ts, expected_data[-201:], expected_retention, expected_chunk_size, expected_labels) # test indexer was updated assert r.execute_command('TS.QUERYINDEX', 'A=1') == [key.encode('ascii')] assert r.execute_command('TS.QUERYINDEX', 'name=brown') == []
def test_large_key_value_pairs(): with Env().getClusterConnectionIfNeeded() as r: number_series = 100 for i in range(0,number_series): assert r.execute_command('TS.CREATE', 'ts-{}'.format(i), 'LABELS', 'baseAsset', '17049', 'counterAsset', '840', 'source', '1000', 'dataType', 'PRICE_TICK') kv_label1 = 'baseAsset=(13830,10249,16019,10135,17049,10777,10138,11036,11292,15778,11043,10025,11436,12207,13359,10807,12216,11833,10170,10811,12864,12738,10053,11334,12487,12619,12364,13266,11219,15827,12374,11223,10071,12249,11097,14430,13282,16226,13667,11365,12261,12646,12650,12397,12785,13941,10231,16254,12159,15103)' kv_label2 = 'counterAsset=(840)' kv_label3 = 'source=(1000)' kv_label4 = 'dataType=(PRICE_TICK)' kv_labels = [kv_label1, kv_label2, kv_label3, kv_label4] for kv_label in kv_labels: res = r.execute_command('TS.QUERYINDEX', kv_label1) assert len(res) == number_series
def test_flush(): env = Env() with env.getClusterConnectionIfNeeded() as r: init(env, r) assert r.execute_command('FLUSHALL') res = r.execute_command('TS.QUERYINDEX', 'name=(mush,zavi,rex)') env.assertEqual(res, []) init(env, r) assert r.execute_command('FLUSHDB') res = r.execute_command('TS.QUERYINDEX', 'name=(mush,zavi,rex)') env.assertEqual(res, [])
def test_groupby_reduce(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.ADD', 's1', 1, 100, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's2', 2, 55, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'user') assert r.execute_command('TS.ADD', 's3', 2, 40, 'LABELS', 'metric_family', 'cpu', 'metric_name', 'system') assert r.execute_command('TS.ADD', 's1', 2, 95) actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'REDUCE', 'max') serie1 = actual_result[0] serie1_name = serie1[0] serie1_labels = serie1[1] serie1_values = serie1[2] env.assertEqual(serie1_values, [[2, b'40']]) env.assertEqual(serie1_name, b'metric_name=system') env.assertEqual(serie1_labels[0][0], b'metric_name') env.assertEqual(serie1_labels[0][1], b'system') serie2 = actual_result[1] serie2_name = serie2[0] serie2_labels = serie2[1] serie2_values = serie2[2] env.assertEqual(serie2_name, b'metric_name=user') env.assertEqual(serie2_labels[0][0], b'metric_name') env.assertEqual(serie2_labels[0][1], b'user') env.assertEqual(serie2_labels[1][0], b'__reducer__') env.assertEqual(serie2_labels[1][1], b'max') env.assertEqual(serie2_labels[2][0], b'__source__') env.assertEqual(sorted(serie2_labels[2][1].decode("ascii").split(",")), ['s1', 's2']) env.assertEqual(serie2_values, [[1, b'100'], [2, b'95']]) actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'REDUCE', 'sum') serie2 = actual_result[1] serie2_values = serie2[2] env.assertEqual(serie2_values, [[1, b'100'], [2, b'150']]) actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'REDUCE', 'min') serie2 = actual_result[1] serie2_values = serie2[2] env.assertEqual(serie2_values, [[1, b'100'], [2, b'55']]) actual_result = r.execute_command( 'TS.mrange', '-', '+', 'WITHLABELS', 'COUNT', 1, 'FILTER', 'metric_family=cpu', 'GROUPBY', 'metric_name', 'REDUCE', 'min') serie2 = actual_result[1] serie2_values = serie2[2] env.assertEqual(serie2_values, [[1, b'100']])
def test_incrby_with_timestamp(): with Env().getClusterConnectionIfNeeded() as r: r.execute_command('ts.create', 'tester') for i in range(20): assert r.execute_command('ts.incrby', 'tester', '5', 'TIMESTAMP', i) == i result = r.execute_command('TS.RANGE', 'tester', 0, 20) assert len(result) == 20 assert result[19][1] == b'100' query_res = r.execute_command('ts.incrby', 'tester', '5', 'TIMESTAMP', '*') / 1000 cur_time = int(time.time()) assert query_res >= cur_time assert query_res <= cur_time + 1 with pytest.raises(redis.ResponseError) as excinfo: assert r.execute_command('ts.incrby', 'tester', '5', 'TIMESTAMP', '10')
def test_rename_none_ts(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'a{4}') assert r.execute_command('SET', 'key1{4}', 'val1') assert r.execute_command('SET', 'key2{4}', 'val2') env.assertTrue(r.execute_command('RENAME', 'key1{4}', 'key3{4}')) env.assertTrue(r.execute_command('RENAME', 'key2{4}', 'key1{4}')) assert r.execute_command('SET', 'key1{4}', 'val3') assert r.execute_command('SET', 'key3{4}', 'val4') aInfo = TSInfo(r.execute_command('TS.INFO', 'a{4}')) env.assertEqual(aInfo.sourceKey, None) env.assertEqual(aInfo.rules, [])
def test_alter_key(self): with Env().getClusterConnectionIfNeeded() as r: key = 'tester' r.execute_command('TS.CREATE', key) date_ranges = _fill_data(r, key) overrided_ts = date_ranges[0][0] + 10 with pytest.raises(redis.ResponseError): r.execute_command('TS.ADD', key, overrided_ts, 10) r.execute_command('TS.ALTER', key, 'DUPLICATE_POLICY', 'LAST') assert r.execute_command('TS.RANGE', key, overrided_ts, overrided_ts) == [[ overrided_ts, str(overrided_ts).encode("ascii") ]] r.execute_command('TS.ADD', key, date_ranges[0][0] + 10, 10) assert r.execute_command('TS.RANGE', key, overrided_ts, overrided_ts) == [[overrided_ts, b'10']]
def test_filterby(): env = Env() high_temps = defaultdict(lambda: defaultdict(lambda: 0)) specific_days = defaultdict(lambda: defaultdict(lambda: 0)) days = [1335830400000, 1338508800000] for row in create_test_rdb_file.read_from_disk(): timestamp = create_test_rdb_file.parse_timestamp(row[0]) country = row[create_test_rdb_file.Country].replace('(', '[').replace( ')', ']') if timestamp in days: specific_days[country][timestamp] += 1 if row[1] and float(row[1]) >= 30: if timestamp > 0: high_temps[country][timestamp] += 1 with env.getClusterConnectionIfNeeded() as r: create_test_rdb_file.load_into_redis(r) def assert_results(results, expected_results): for row in results: country = row[1][0][1].decode() points = dict([(point[0], int(point[1])) for point in row[2]]) for k in points: env.assertEqual(points[k], expected_results[country][k], message="timestamp {} not equal".format(k)) env.assertEqual(points, expected_results[country], message="country {} not eq".format(country)) results = r.execute_command("TS.MRANGE", "-", "+", "withlabels", "FILTER_BY_VALUE", 30, 100, "AGGREGATION", "count", 3600000, "filter", "metric=temperature", "groupby", "country", "reduce", "sum") assert_results(results, high_temps) results = r.execute_command("TS.MRANGE", "-", "+", "withlabels", "FILTER_BY_TS", 1335830400000, 1338508800000, "AGGREGATION", "count", 3600000, "filter", "metric=temperature", "groupby", "country", "reduce", "sum") assert_results(results, specific_days)
def test_incrby_with_update_latest(): with Env().getClusterConnectionIfNeeded() as r: r.execute_command('ts.create', 'tester') for i in range(1, 21): assert r.execute_command('ts.incrby', 'tester', '5', 'TIMESTAMP', i) == i result = r.execute_command('TS.RANGE', 'tester', 0, 20) assert len(result) == 20 assert result[19] == [20, b'100'] assert r.execute_command('ts.incrby', 'tester', '5', 'TIMESTAMP', 20) == i result = r.execute_command('TS.RANGE', 'tester', 0, 20) assert len(result) == 20 assert result[19] == [20, b'105'] assert r.execute_command('ts.decrby', 'tester', '10', 'TIMESTAMP', 20) == i result = r.execute_command('TS.RANGE', 'tester', 0, 20) assert len(result) == 20 assert result[19] == [20, b'95']
def test_mrange_with_expire_cmd(): env = Env() set_hertz(env) with env.getClusterConnectionIfNeeded() as r: assert r.execute_command("TS.ADD", "X", "*", "1", "LABELS", "type", "DELAYED") assert r.execute_command("TS.ADD", "Y", "*", "1", "LABELS", "type", "DELAYED") assert r.execute_command("TS.ADD", "Z", "*", "1", "LABELS", "type", "DELAYED") current_ts = time.time() assert r.execute_command("EXPIRE", "X", 5) assert r.execute_command("EXPIRE", "Y", 6) assert r.execute_command("EXPIRE", "Z", 7) while time.time() < (current_ts + 10): reply = r.execute_command('TS.mrange', '-', '+', 'FILTER', 'type=DELAYED') assert (len(reply) >= 0 and len(reply) <= 3) assert r.execute_command("PING")
def test_non_local_data(): env = Env() with env.getClusterConnectionIfNeeded() as r: r.execute_command('TS.ADD', '{host1}_metric_1', 1, 100, 'LABELS', 'metric', 'cpu') r.execute_command('TS.ADD', '{host1}_metric_2', 2, 40, 'LABELS', 'metric', 'cpu') r.execute_command('TS.ADD', '{host1}_metric_1', 2, 95) r.execute_command('TS.ADD', '{host1}_metric_1', 10, 99) previous_results = [] # ensure that initiating the query on different shards always replies with the same series for shard in range(0, env.shardsCount): shard_conn = env.getConnection(shard) actual_result = shard_conn.execute_command( 'TS.MRANGE - + FILTER metric=cpu') env.assertEqual(len(actual_result), 2) for previous_result in previous_results: ensure_replies_series_match(env, previous_result, actual_result) previous_results.append(actual_result)
def test_incrby(): with Env().getClusterConnectionIfNeeded() as r: r.execute_command('ts.create', 'tester') start_incr_time = int(time.time() * 1000) for i in range(20): r.execute_command('ts.incrby', 'tester', '5') time.sleep(0.001) start_decr_time = int(time.time() * 1000) for i in range(20): r.execute_command('ts.decrby', 'tester', '1.5') time.sleep(0.001) now = int(time.time() * 1000) result = r.execute_command('TS.RANGE', 'tester', 0, now) assert result[-1][1] == b'70' assert result[-1][0] <= now assert result[0][0] >= start_incr_time assert len(result) <= 40
def test_rename_src(): env = Env() with env.getClusterConnectionIfNeeded() as r: assert r.execute_command('TS.CREATE', 'a1{1}') assert r.execute_command('TS.CREATE', 'b{1}') env.assertTrue(r.execute_command('RENAME', 'a1{1}', 'a2{1}')) aInfo = TSInfo(r.execute_command('TS.INFO', 'a2{1}')) env.assertEqual(aInfo.sourceKey, None) env.assertEqual(aInfo.rules, []) assert r.execute_command('TS.CREATERULE', 'a2{1}', 'b{1}', 'AGGREGATION', 'AVG', 5000) bInfo = TSInfo(r.execute_command('TS.INFO', 'b{1}')) env.assertEqual(bInfo.sourceKey, b'a2{1}') env.assertEqual(bInfo.rules, []) env.assertTrue(r.execute_command('RENAME', 'a2{1}', 'a3{1}')) bInfo = TSInfo(r.execute_command('TS.INFO', 'b{1}')) env.assertEqual(bInfo.sourceKey, b'a3{1}') env.assertEqual(bInfo.rules, [])