def test_history_loooks_at_first_node_only(chain, requests_mock): chain.instances[1] = None # shouldn't interfere ip = chain.instances[0].public_ip_address t = datetime.now(tz=pytz.UTC) requests_mock.add( requests_mock.GET, re.compile(r'http://' + ip + r':46657/blockchain\?minHeight=1\&maxHeight=10'), json={ "result": [ 0, { 'block_metas': 10 * [{ 'header': { 'app_hash': "", 'height': "", 'time': t.isoformat() } }] } ] }, status=200) Chainmanager.get_history(chain, 1, 10)
def test_history_default_to_fromm(requests_mock, chain, mock_ethermint_requests): ip = chain.instances[0].public_ip_address t = datetime.now(tz=pytz.UTC) def reset_mocks(): mock_ethermint_requests( 10, t, "hash", [inst.public_ip_address for inst in chain.instances]) requests_mock.add( requests_mock.GET, re.compile(r'http://' + ip + r':46657/blockchain\?minHeight=9\&maxHeight=10'), json={ "result": [ 0, { 'block_metas': [{ 'header': { 'app_hash': "", 'height': "", 'time': t.isoformat() } }] } ] }, status=200) reset_mocks() history1 = Chainmanager.get_history(chain, 9) reset_mocks() history2 = Chainmanager.get_history(chain) assert history1 == history2
def create(regions, ethermint_version, name_root, num_processes, no_ami_cache, chain_file): chainmanager = Chainmanager(num_processes=num_processes) chain = chainmanager.create_ethermint_network(regions, ethermint_version, name_root, no_ami_cache=no_ami_cache) chain_file.write(json.dumps(chain.serialize())) logger.info("Created a chain ".format(chain))
def network_fault(chain_file, num_steps, delay_step, interval): chain = Chain.deserialize(json.loads(chain_file.read())) result = Chainmanager.get_network_fault(chain, num_steps, delay_step, interval) print(json.dumps(result))
def test_network_fault(requests_mock, chain, mock_ethermint_requests, mocksubprocess): # FIXME: test-all smoke test for now, consider splitting into separate logical assertions ip = chain.instances[0].public_ip_address t = datetime.now(tz=pytz.UTC) mock_ethermint_requests(1, t, "hash", [ip]) mock_ethermint_requests( 10, t + timedelta(seconds=10), "hash2", [inst.public_ip_address for inst in chain.instances]) requests_mock.add( requests_mock.GET, re.compile(r'http://' + ip + r':46657/blockchain\?minHeight=1\&maxHeight=10'), json={ "result": [ 0, { 'block_metas': 10 * [{ 'header': { 'app_hash': "", 'height': "", 'time': t.isoformat() } }] } ] }, status=200) # fake time when a remote machine should report having run it's tc command delay_step_time1 = (t + timedelta(seconds=5)).isoformat() delay_step_time2 = (t + timedelta(seconds=7)).isoformat() delay_step_time3 = (t + timedelta(seconds=7)).isoformat() def _side_effect(*args, **kwargs): if "get_datetime.sh" in args[0]: assert ip in args[0] # only instance 0 is queried for date return t.isoformat() elif "run_tcs.sh" in args[0]: assert "run_tcs.sh 2 123 1 eth0 {}".format((t + timedelta( seconds=(NETWORK_FAULT_PREPARATION_TIME_PER_INSTANCE * len(chain.instances)))).isoformat()) in args[0] assert any([ inst.public_ip_address in args[0] for inst in chain.instances ]) return "{}\n{}\n{}".format(delay_step_time1, delay_step_time2, delay_step_time3) assert False mocksubprocess.side_effect = _side_effect result = Chainmanager.get_network_fault(chain, 2, 123, 1) check_history(result["blocktimes"], 9, 0, "", t) assert len(result["delay_steps"]) == 3 assert result["delay_steps"] == [(123, delay_step_time1), (246, delay_step_time2), (0, delay_step_time3)]
def test_history_gets_blocks(requests_mock, chain): ip = chain.instances[0].public_ip_address t = datetime.now(tz=pytz.UTC) height = 18 requests_mock.add( requests_mock.GET, re.compile(r'http://' + ip + r':46657/blockchain\?minHeight=1\&maxHeight=10'), json={ "result": [ 0, { 'block_metas': 10 * [{ 'header': { 'app_hash': "", 'height': height, 'time': t.isoformat() } }] } ] }, status=200) history = Chainmanager.get_history(chain, 1, 10) check_history(history, 9, 0, height, t)
def roster(chain_files): chain_objects = [] for chain_file in chain_files: with open(chain_file, 'r') as json_data: chain_objects.append( Chain.deserialize(json.loads(json_data.read()))) print yaml.dump(Chainmanager().get_roster(chain_objects), default_flow_style=False)
def test_chainmanager_calls_mints(monkeypatch, mockossystem, mocksubprocess, mockregions, ethermint_version, mockamibuilder, tmp_files_dir, moto): # mock out all reading of *mint calls results monkeypatch.setattr('chainmanager.fill_validators', MagicMock) chainmanager = Chainmanager() chainmanager.create_ethermint_network(mockregions, ethermint_version) calls = mockossystem.call_args_list ethermint_calls = filter( lambda call: all(x in call[0][0] for x in ["ethermint -datadir", "init"]), calls) tendermint_calls = filter( lambda call: "tendermint gen_validator | tail -n +3 > " in call[0][0], calls) assert len(ethermint_calls) == 1 assert len(tendermint_calls) == len(mockregions)
def chainmanager(monkeypatch, mockossystem, mocksubprocess, mockamibuilder, tmp_files_dir, fake_ethermint_files, moto): # generic "all mocked out" instance return Chainmanager()
def history(chain_file, fromm, to): chain = Chain.deserialize(json.loads(chain_file.read())) to_print = "\n".join( [str(c) for c in Chainmanager.get_history(chain, fromm, to)]) print(to_print)
def status(chain_file): chain = Chain.deserialize(json.loads(chain_file.read())) print(json.dumps(Chainmanager.get_status(chain)))
def isalive(chain_file): chain = Chain.deserialize(json.loads(chain_file.read())) print(Chainmanager.isalive(chain))
def test_history_invalid_from_to(chain): with pytest.raises(ValueError): Chainmanager.get_history(chain, 123, 1)