def get_private_key( key_path: Path, password_path: Optional[Path] = None) -> Optional[PrivateKey]: """Open a JSON-encoded private key and return it If a password file is provided, uses it to decrypt the key. If not, the password is asked interactively. Raw hex-encoded private keys are supported, but deprecated.""" if not key_path: log.fatal(f"key_path has to be something but got {key_path}") return None if not os.path.exists(key_path): log.fatal("%s: no such file", key_path) return None if not check_permission_safety(key_path): log.fatal("Private key file %s must be readable only by its owner.", key_path) return None if password_path and not check_permission_safety(password_path): log.fatal("Password file %s must be readable only by its owner.", password_path) return None with open(key_path) as keyfile: raw_keyfile = keyfile.readline().strip() if is_hex(raw_keyfile) and len(decode_hex(raw_keyfile)) == 32: log.warning( "Private key in raw format. Consider switching to JSON-encoded" ) return PrivateKey(decode_hex(raw_keyfile)) else: keyfile.seek(0) try: json_data = json.load(keyfile) if password_path: with open(password_path) as password_file: password = password_file.readline().strip() else: password = getpass.getpass( "Enter the private key password: "******"crypto"]["kdf"] == "pbkdf2": password = password.encode() # type: ignore return PrivateKey(decode_keyfile_json(json_data, password)) except ValueError: log.fatal("Invalid private key format or password!") return None
def monitoring_service_mock() -> Generator[MonitoringService, None, None]: web3_mock = Web3Mock() mock_udc = Mock(address=bytes([8] * 20)) mock_udc.functions.effectiveBalance.return_value.call.return_value = 10000 mock_udc.functions.token.return_value.call.return_value = to_checksum_address( bytes([7] * 20)) ms = MonitoringService( web3=web3_mock, private_key=PrivateKey( decode_hex( "3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266" )), db_filename=":memory:", contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: Mock(address=bytes([9] * 20)), CONTRACT_USER_DEPOSIT: mock_udc, CONTRACT_MONITORING_SERVICE: Mock(address=bytes([1] * 20)), CONTRACT_SERVICE_REGISTRY: Mock(address=bytes([2] * 20)), }, sync_start_block=BlockNumber(0), required_confirmations=BlockTimeout(0), poll_interval=0, ) yield ms
def pathfinding_service_mock_empty() -> Generator[PathfindingService, None, None]: with patch("pathfinding_service.service.MatrixListener", new=Mock): web3_mock = Web3Mock() mock_udc = Mock(address=bytes([8] * 20)) mock_udc.functions.effectiveBalance.return_value.call.return_value = 10000 mock_udc.functions.token.return_value.call.return_value = to_checksum_address( bytes([7] * 20) ) pathfinding_service = PathfindingService( web3=web3_mock, contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: Mock(address=bytes([9] * 20)), CONTRACT_USER_DEPOSIT: mock_udc, }, sync_start_block=BlockNumber(0), required_confirmations=BlockTimeout(0), poll_interval=0, private_key=PrivateKey( decode_hex("3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266") ), db_filename=":memory:", ) yield pathfinding_service pathfinding_service.stop()
def build_request_monitoring(): non_closing_privkey = PrivateKey(get_random_privkey()) non_closing_address = private_key_to_address(non_closing_privkey) def f( chain_id: ChainID = TEST_CHAIN_ID, amount: TokenAmount = TokenAmount(50), nonce: Nonce = Nonce(1), channel_id: ChannelID = ChannelID(1), ) -> RequestMonitoring: balance_proof = HashedBalanceProof( channel_identifier=channel_id, token_network_address=TokenNetworkAddress(b"1" * 20), chain_id=chain_id, nonce=nonce, additional_hash="", balance_hash=encode_hex(bytes([amount])), priv_key=PrivateKey(get_random_privkey()), ) request_monitoring = balance_proof.get_request_monitoring( privkey=non_closing_privkey, reward_amount=TokenAmount(55), monitoring_service_contract_address=TEST_MSC_ADDRESS, ) # usually not a property of RequestMonitoring, but added for convenience in these tests request_monitoring.non_closing_signer = to_checksum_address(non_closing_address) return request_monitoring return f
def test_account_check(web3, capsys): private_key = PrivateKey( decode_hex("0F951D6EAF7685D420AACCA3900127E669892FE5CA6C8E4C572A59B0609AAE6B") ) check_gas_reserve(web3, private_key) out = capsys.readouterr().out assert "Your account's balance is below the estimated gas reserve of" in out
def test_get_iou(api_sut: PFSApi, api_url: str, token_network_model: TokenNetwork, make_iou): privkey = PrivateKey(get_random_privkey()) sender = private_key_to_address(privkey) url = api_url + f"/v1/{to_checksum_address(token_network_model.address)}/payment/iou" def make_params(timestamp: str): params = { "sender": to_checksum_address(sender), "receiver": to_checksum_address(api_sut.pathfinding_service.address), "timestamp": timestamp, } local_signer = LocalSigner(private_key=privkey) params["signature"] = encode_hex( local_signer.sign( to_canonical_address(params["sender"]) + to_canonical_address(params["receiver"]) + params["timestamp"].encode("utf8") ) ) return params # Request without IOU in database params = make_params(datetime.utcnow().isoformat()) response = requests.get(url, params=params) assert response.status_code == 404, response.json() assert response.json() == {"last_iou": None} # Add IOU to database iou = make_iou( privkey, api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address ) iou.claimed = False api_sut.pathfinding_service.database.upsert_iou(iou) # Is returned IOU the one save into the db? response = requests.get(url, params=params) assert response.status_code == 200, response.json() iou_dict = IOU.Schema(exclude=["claimed"]).dump(iou) assert response.json()["last_iou"] == iou_dict # Invalid signatures must fail params["signature"] = encode_hex((int(params["signature"], 16) + 1).to_bytes(65, "big")) response = requests.get(url, params=params) assert response.status_code == 400, response.json() assert response.json()["error_code"] == exceptions.InvalidSignature.error_code # Timestamp must no be too old to prevent replay attacks old_timestamp = datetime.utcnow() - timedelta(days=1) params = make_params(old_timestamp.isoformat()) response = requests.get(url, params=params) assert response.status_code == 400, response.json() assert response.json()["error_code"] == exceptions.RequestOutdated.error_code # Timestamp with timezone info is invalid for timestamp in (datetime.now(tz=timezone.utc).isoformat(), "2019-11-07T12:52:25.079Z"): params = make_params(timestamp) response = requests.get(url, params=params) assert response.status_code == 400, response.json() assert response.json()["error_code"] == exceptions.InvalidRequest.error_code
def test_sign_privatekey_not_string(get_private_key: Callable, get_accounts: Callable) -> None: """sign() raises when the private key is not a string""" A = get_accounts(1)[0] privatekey = get_private_key(A) with pytest.raises(TypeError): sign(PrivateKey(bytes(privatekey, "ascii")), bytes("a" * 32, "ascii"), v=27)
def test_payment_with_new_iou_rejected( # pylint: disable=too-many-locals api_sut, api_url: str, addresses: List[Address], token_network_model: TokenNetwork, make_iou: Callable, ): """Regression test for https://github.com/raiden-network/raiden-services/issues/624""" initiator_address = to_checksum_address(addresses[0]) target_address = to_checksum_address(addresses[1]) url = api_url + "/v1/" + to_checksum_address( token_network_model.address) + "/paths" default_params = { "from": initiator_address, "to": target_address, "value": 5, "max_paths": 3 } def request_path_with(status_code=400, **kwargs): params = default_params.copy() params.update(kwargs) response = requests.post(url, json=params) assert response.status_code == status_code, response.json() return response # test with payment api_sut.service_fee = 100 sender = PrivateKey(get_random_privkey()) iou = make_iou( sender, api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address, amount=100, expiration_block=1_234_567, ) first_iou_dict = iou.Schema().dump(iou) second_iou = make_iou( sender, api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address, amount=200, expiration_block=1_234_568, ) second_iou_dict = second_iou.Schema().dump(second_iou) response = request_path_with(status_code=200, iou=first_iou_dict) assert response.status_code == 200 response = request_path_with(iou=second_iou_dict) assert response.json()["error_code"] == exceptions.UseThisIOU.error_code
def test_mr_with_unknown_signatures(context: Context): """The signatures are valid but don't belong to the participants.""" context = setup_state_with_closed_channel(context) def assert_mr_is_ignored(mr): context.database.upsert_monitor_request(mr) event = ActionMonitoringTriggeredEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, non_closing_participant=DEFAULT_PARTICIPANT2, ) action_monitoring_triggered_event_handler(event, context) assert not context.monitoring_service_contract.functions.monitor.called assert_mr_is_ignored( create_signed_monitor_request( closing_privkey=PrivateKey(get_random_privkey()))) assert_mr_is_ignored( create_signed_monitor_request( nonclosing_privkey=PrivateKey(get_random_privkey())))
def _open_keystore(keystore_file: str, password: str) -> PrivateKey: with open(keystore_file, mode="r", encoding="utf-8") as keystore: try: private_key = bytes( Account.decrypt(keyfile_json=json.load(keystore), password=password)) return PrivateKey(private_key) except ValueError as error: log.critical( "Could not decode keyfile with given password. Please try again.", reason=str(error), ) sys.exit(1)
def get_random_privkey() -> PrivateKey: """Returns a random private key""" return PrivateKey( random.randint(1, UINT256_MAX).to_bytes(32, byteorder="big"))
def test_pfs_with_mocked_client( # pylint: disable=too-many-arguments web3, token_network_registry_contract, channel_descriptions_case_1: List, get_accounts, user_deposit_contract, token_network, custom_token, create_channel, get_private_key, ): # pylint: disable=too-many-locals """Instantiates some MockClients and the PathfindingService. Mocks blockchain events to setup a token network with a given topology, specified in the channel_description fixture. Tests all PFS methods w.r.t. to that topology """ clients = get_accounts(7) token_network_address = TokenNetworkAddress( to_canonical_address(token_network.address)) with patch("pathfinding_service.service.MatrixListener", new=Mock): pfs = PathfindingService( web3=web3, contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: token_network_registry_contract, CONTRACT_USER_DEPOSIT: user_deposit_contract, }, required_confirmations=BlockTimeout(1), db_filename=":memory:", poll_interval=0.1, sync_start_block=BlockNumber(0), private_key=PrivateKey( decode_hex( "3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266" )), ) # greenlet needs to be started and context switched to pfs.start() pfs.updated.wait(timeout=5) # there should be one token network registered assert len(pfs.token_networks) == 1 token_network_model = pfs.token_networks[token_network_address] graph = token_network_model.G channel_identifiers = [] for ( p1_index, p1_capacity, _p1_fee, _p1_reveal_timeout, _p1_reachability, p2_index, p2_capacity, _p2_fee, _p2_reveal_timeout, _p2_reachability, ) in channel_descriptions_case_1: # order is important here because we check order later channel_id = create_channel(clients[p1_index], clients[p2_index])[0] channel_identifiers.append(channel_id) for address, partner_address, amount in [ (clients[p1_index], clients[p2_index], p1_capacity), (clients[p2_index], clients[p1_index], p2_capacity), ]: if amount == 0: continue custom_token.functions.mint(amount).transact({"from": address}) custom_token.functions.approve(token_network.address, amount).transact({"from": address}) token_network.functions.setTotalDeposit( channel_id, address, amount, partner_address).transact({"from": address}) web3.testing.mine(1) # 1 confirmation block pfs.updated.wait(timeout=5) # there should be as many open channels as described assert len(token_network_model.channel_id_to_addresses.keys()) == len( channel_descriptions_case_1) # check that deposits, settle_timeout and transfers got registered for index in range(len(channel_descriptions_case_1)): channel_identifier = channel_identifiers[index] p1_address, p2_address = token_network_model.channel_id_to_addresses[ channel_identifier] view1: ChannelView = graph[p1_address][p2_address]["view"] view2: ChannelView = graph[p2_address][p1_address]["view"] assert view1.reveal_timeout == DEFAULT_REVEAL_TIMEOUT assert view2.reveal_timeout == DEFAULT_REVEAL_TIMEOUT # now close all channels for ( index, ( p1_index, _p1_capacity, _p1_fee, _p1_reveal_timeout, _p1_reachability, p2_index, _p2_capacity, _p2_fee, _p2_reveal_timeout, _p2_reachability, ), ) in enumerate(channel_descriptions_case_1): channel_id = channel_identifiers[index] balance_proof = HashedBalanceProof( nonce=Nonce(1), transferred_amount=0, priv_key=get_private_key(clients[p2_index]), channel_identifier=channel_id, token_network_address=TokenNetworkAddress( to_canonical_address(token_network.address)), chain_id=TEST_CHAIN_ID, additional_hash="0x%064x" % 0, locked_amount=0, locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), ) token_network.functions.closeChannel( channel_id, clients[p2_index], clients[p1_index], balance_proof.balance_hash, balance_proof.nonce, balance_proof.additional_hash, balance_proof.signature, balance_proof.get_counter_signature( get_private_key(clients[p1_index])), ).transact({ "from": clients[p1_index], "gas": 200_000 }) web3.testing.mine(1) # 1 confirmation block pfs.updated.wait(timeout=5) # there should be no channels assert len(token_network_model.channel_id_to_addresses.keys()) == 0 pfs.stop()
def private_keys() -> List[PrivateKey]: offset = 14789632 return [PrivateKey(keccak(offset + i)) for i in range(NUMBER_OF_NODES)]
def test_get_paths_validation( api_sut: PFSApi, api_url: str, addresses: List[Address], token_network_model: TokenNetwork, make_iou: Callable, ): initiator_address = to_checksum_address(addresses[0]) target_address = to_checksum_address(addresses[1]) url = api_url + "/v1/" + to_checksum_address( token_network_model.address) + "/paths" default_params = { "from": initiator_address, "to": target_address, "value": 5, "max_paths": 3 } def request_path_with(status_code=400, **kwargs): params = default_params.copy() params.update(kwargs) response = requests.post(url, json=params) assert response.status_code == status_code, response.json() return response response = requests.post(url) assert response.status_code == 400 assert response.json()["errors"].startswith("JSON payload expected") for address in ["notanaddress", to_normalized_address(initiator_address)]: response = request_path_with(**{"from": address}) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code assert "from" in response.json()["error_details"] response = request_path_with(to=address) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code assert "to" in response.json()["error_details"] response = request_path_with(value=-10) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code assert "value" in response.json()["error_details"] response = request_path_with(max_paths=-1) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code assert "max_paths" in response.json()["error_details"] # successful request without payment request_path_with(status_code=200) # Exemplary test for payment errors. Different errors are serialized the # same way in the rest API. Checking for specific errors is tested in # payment_tests. api_sut.service_fee = TokenAmount(1) response = request_path_with() assert response.json()["error_code"] == exceptions.MissingIOU.error_code # prepare iou for payment tests iou = make_iou( PrivateKey(get_random_privkey()), api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address, ) good_iou_dict = iou.Schema().dump(iou) # malformed iou bad_iou_dict = good_iou_dict.copy() del bad_iou_dict["amount"] response = request_path_with(iou=bad_iou_dict) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code # malformed iou bad_iou_dict = { "amount": { "_hex": "0x64" }, "chain_id": { "_hex": "0x05" }, "expiration_block": { "_hex": "0x188cba" }, "one_to_n_address": "0x0000000000000000000000000000000000000000", "receiver": "0x94DEe8e391410A9ebbA791B187df2d993212c849", "sender": "0x2046F7341f15D0211ca1EBeFb19d029c4Bc4c4e7", "signature": ("0x0c3066e6a954d660028695f96dfe88cabaf0bc8a385e51781ac4d21003d0b6cd7a8b2" "a1134115845655d1a509061f48459cd401565b5df7845c913ed329cd2351b"), } response = request_path_with(iou=bad_iou_dict) assert response.json( )["error_code"] == exceptions.InvalidRequest.error_code # bad signature bad_iou_dict = good_iou_dict.copy() bad_iou_dict["signature"] = "0x" + "1" * 130 response = request_path_with(iou=bad_iou_dict) assert response.json( )["error_code"] == exceptions.InvalidSignature.error_code # with successful payment request_path_with(iou=good_iou_dict, status_code=200)
from monitoring_service.states import ( Channel, HashedBalanceProof, MonitorRequest, OnChainUpdateStatus, ) from raiden_contracts.constants import ChannelState from raiden_contracts.utils.type_aliases import PrivateKey from raiden_libs.utils import private_key_to_address from tests.constants import TEST_CHAIN_ID, TEST_MSC_ADDRESS DEFAULT_TOKEN_NETWORK_ADDRESS = TokenNetworkAddress(bytes([1] * 20)) DEFAULT_TOKEN_ADDRESS = TokenAddress(bytes([9] * 20)) DEFAULT_CHANNEL_IDENTIFIER = ChannelID(3) DEFAULT_PRIVATE_KEY1 = PrivateKey(decode_hex("0x" + "1" * 64)) DEFAULT_PRIVATE_KEY2 = PrivateKey(decode_hex("0x" + "2" * 64)) DEFAULT_PARTICIPANT1 = private_key_to_address(DEFAULT_PRIVATE_KEY1) DEFAULT_PARTICIPANT2 = private_key_to_address(DEFAULT_PRIVATE_KEY2) DEFAULT_PRIVATE_KEY_OTHER = PrivateKey(decode_hex("0x" + "3" * 64)) DEFAULT_PARTICIPANT_OTHER = private_key_to_address(DEFAULT_PRIVATE_KEY_OTHER) DEFAULT_REWARD_AMOUNT = TokenAmount(1) DEFAULT_SETTLE_TIMEOUT = 100 * 15 # time in seconds def create_signed_monitor_request( chain_id: ChainID = TEST_CHAIN_ID, nonce: Nonce = Nonce(5), reward_amount: TokenAmount = DEFAULT_REWARD_AMOUNT, closing_privkey: PrivateKey = DEFAULT_PRIVATE_KEY1, nonclosing_privkey: PrivateKey = DEFAULT_PRIVATE_KEY2,
def test_crash(tmpdir, mockchain): # pylint: disable=too-many-locals """Process blocks and compare results with/without crash A somewhat meaninful crash handling is simulated by not including the UpdatedHeadBlockEvent in every block. """ token_address = TokenAddress(bytes([1] * 20)) token_network_address = TokenNetworkAddress(bytes([2] * 20)) channel_id = ChannelID(1) p1 = Address(bytes([3] * 20)) p2 = Address(bytes([4] * 20)) events = [ [ ReceiveTokenNetworkCreatedEvent( token_address=token_address, token_network_address=token_network_address, settle_timeout=DEFAULT_TOKEN_NETWORK_SETTLE_TIMEOUT, block_number=BlockNumber(1), ) ], [UpdatedHeadBlockEvent(BlockNumber(2))], [ ReceiveChannelOpenedEvent( token_network_address=token_network_address, channel_identifier=channel_id, participant1=p1, participant2=p2, block_number=BlockNumber(3), ) ], [UpdatedHeadBlockEvent(BlockNumber(4))], ] mockchain(events) server_private_key = PrivateKey(get_random_privkey()) contracts = { CONTRACT_TOKEN_NETWORK_REGISTRY: ContractMock(), CONTRACT_USER_DEPOSIT: ContractMock(), } def new_service(filename): service = PathfindingService( web3=Web3Mock(), private_key=server_private_key, contracts=contracts, sync_start_block=BlockNumber(0), required_confirmations=BlockTimeout(0), poll_interval=0, db_filename=os.path.join(tmpdir, filename), ) return service # initialize stable service stable_service = new_service("stable.db") # process each block and compare results between crashy and stable service for to_block in range(len(events)): crashy_service = new_service("crashy.db") # new instance to simulate crash result_state: List[dict] = [] for service in [stable_service, crashy_service]: service._process_new_blocks(BlockNumber(to_block)) # pylint: disable=protected-access result_state.append(dict(db_dump=list(service.database.conn.iterdump()))) # both instances should have the same state after processing for stable_state, crashy_state in zip(result_state[0].values(), result_state[1].values()): if isinstance(stable_state, BlockchainState): assert stable_state.chain_id == crashy_state.chain_id assert ( stable_state.token_network_registry_address == crashy_state.token_network_registry_address ) assert stable_state.latest_committed_block == crashy_state.latest_committed_block assert ( stable_state.monitor_contract_address == crashy_state.monitor_contract_address ) # Do not compare `current_event_filter_interval`, this is allowed to be different else: assert stable_state == crashy_state crashy_service.database.conn.close() # close the db connection so we can access it again
def test_crash( tmpdir, get_accounts, get_private_key, mockchain ): # pylint: disable=too-many-locals """Process blocks and compare results with/without crash A somewhat meaningful crash handling is simulated by not including the UpdatedHeadBlockEvent in every block. """ channel_identifier = ChannelID(3) c1, c2 = get_accounts(2) token_network_address = TokenNetworkAddress(to_canonical_address(get_random_address())) balance_proof = HashedBalanceProof( nonce=Nonce(1), transferred_amount=TokenAmount(2), priv_key=get_private_key(c1), channel_identifier=channel_identifier, token_network_address=token_network_address, chain_id=ChainID(61), additional_hash="0x%064x" % 0, locked_amount=0, locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), ) monitor_request = balance_proof.get_monitor_request( get_private_key(c2), reward_amount=TokenAmount(0), msc_address=TEST_MSC_ADDRESS ) events = [ [ ReceiveChannelOpenedEvent( token_network_address=token_network_address, channel_identifier=channel_identifier, participant1=c1, participant2=c2, block_number=BlockNumber(0), ) ], [UpdatedHeadBlockEvent(BlockNumber(1))], [ ActionMonitoringTriggeredEvent( token_network_address=token_network_address, channel_identifier=channel_identifier, non_closing_participant=c2, ) ], [UpdatedHeadBlockEvent(BlockNumber(3))], ] mockchain(events) server_private_key = PrivateKey(get_random_privkey()) contracts = { CONTRACT_TOKEN_NETWORK_REGISTRY: ContractMock(), CONTRACT_MONITORING_SERVICE: ContractMock(), CONTRACT_USER_DEPOSIT: ContractMock(), CONTRACT_SERVICE_REGISTRY: ContractMock(), } def new_ms(filename): ms = MonitoringService( web3=Web3Mock(), private_key=server_private_key, contracts=contracts, db_filename=os.path.join(tmpdir, filename), poll_interval=0, required_confirmations=BlockTimeout(0), sync_start_block=BlockNumber(0), ) msc = Mock() ms.context.monitoring_service_contract = msc ms.monitor_mock = msc.functions.monitor.return_value.transact ms.monitor_mock.return_value = bytes(0) return ms # initialize both monitoring services stable_ms = new_ms("stable.db") crashy_ms = new_ms("crashy.db") for ms in [stable_ms, crashy_ms]: # mock database time to make results reproducible ms.database.conn.create_function("CURRENT_TIMESTAMP", 1, lambda: "2000-01-01") ms.database.conn.execute( "INSERT INTO token_network (address, settle_timeout) VALUES (?, ?)", [to_checksum_address(token_network_address), DEFAULT_TOKEN_NETWORK_SETTLE_TIMEOUT], ) ms.context.ms_state.blockchain_state.token_network_addresses = [token_network_address] ms.database.upsert_monitor_request(monitor_request) ms.database.conn.commit() # process each block and compare results between crashy and stable ms for to_block in range(len(events)): crashy_ms = new_ms("crashy.db") # new instance to simulate crash stable_ms.monitor_mock.reset_mock() # clear calls from last block result_state: List[dict] = [] for ms in [stable_ms, crashy_ms]: ms._process_new_blocks(BlockNumber(to_block)) # pylint: disable=protected-access result_state.append( dict( blockchain_state=ms.context.ms_state.blockchain_state, db_dump=list(ms.database.conn.iterdump()), monitor_calls=ms.monitor_mock.mock_calls, ) ) # both instances should have the same state after processing for stable_state, crashy_state in zip(result_state[0].values(), result_state[1].values()): if isinstance(stable_state, BlockchainState): assert stable_state.chain_id == crashy_state.chain_id assert ( stable_state.token_network_registry_address == crashy_state.token_network_registry_address ) assert stable_state.latest_committed_block == crashy_state.latest_committed_block assert ( stable_state.monitor_contract_address == crashy_state.monitor_contract_address ) # Do not compare `current_event_filter_interval`, this is allowed to be different else: assert stable_state == crashy_state
Locksroot, PrivateKey, Signature, ) UINT256_MAX = 2**256 - 1 NOT_ADDRESS = "0xaaa" FAKE_ADDRESS = HexAddress(HexStr("0x00112233445566778899aabbccddeeff00112233")) EMPTY_HEXADDRESS = "0x0000000000000000000000000000000000000000" EMPTY_BALANCE_HASH = BalanceHash(b"\x00" * 32) EMPTY_ADDITIONAL_HASH = AdditionalHash(b"\x00" * 32) EMPTY_LOCKSROOT = Locksroot(b"\x00" * 32) EMPTY_SIGNATURE = Signature(b"\x00" * 65) passphrase = "0" FAUCET_PRIVATE_KEY = PrivateKey( decode_hex( "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")) FAUCET_ADDRESS = private_key_to_address(FAUCET_PRIVATE_KEY) FAUCET_ALLOWANCE = 100 * int(units["ether"]) DEPLOYER_ADDRESS = FAUCET_ADDRESS NONEXISTENT_LOCKSROOT = b"\x00" * 32 SECONDS_PER_DAY = 60 * 60 * 24 # Constants for ServiceRegistry testing SERVICE_DEPOSIT = 5000 * (10**18) DEFAULT_BUMP_NUMERATOR = 6 DEFAULT_BUMP_DENOMINATOR = 5 DEFAULT_DECAY_CONSTANT = 200 * SECONDS_PER_DAY DEFAULT_REGISTRATION_DURATION = 180 * SECONDS_PER_DAY DEFAULT_MIN_PRICE = 1000