def test_waiting_messages(pathfinding_service_mock): participant1_privkey, participant1 = make_privkey_address() token_network_address = TokenNetworkAddress(b"1" * 20) channel_id = ChannelID(1) # register token network internally database = pathfinding_service_mock.database database.conn.execute( "INSERT INTO token_network(address) VALUES (?)", [to_checksum_address(token_network_address)], ) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=participant1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) fee_update.sign(LocalSigner(participant1_privkey)) capacity_update = PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=make_address(), other_participant=make_address(), updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=TokenAmount(100), other_capacity=TokenAmount(111), reveal_timeout=50, signature=EMPTY_SIGNATURE, ) capacity_update.sign(LocalSigner(participant1_privkey)) for message in (fee_update, capacity_update): database.insert_waiting_message(message) recovered_messages = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id ) ) assert len(recovered_messages) == 1 assert message == recovered_messages[0] recovered_messages2 = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id ) ) assert len(recovered_messages2) == 0
def populate_token_network_random(token_network_model: TokenNetwork, private_keys: List[str]) -> None: # seed for pseudo-randomness from config constant, that changes from time to time random.seed(NUMBER_OF_CHANNELS) for channel_id_int in range(NUMBER_OF_CHANNELS): channel_id = ChannelID(channel_id_int) private_key1, private_key2 = random.sample(private_keys, 2) address1 = Address(private_key_to_address(private_key1)) address2 = Address(private_key_to_address(private_key2)) settle_timeout = 15 token_network_model.handle_channel_opened_event( channel_id, address1, address2, settle_timeout) # deposit to channels deposit1 = TokenAmount(random.randint(0, 1000)) deposit2 = TokenAmount(random.randint(0, 1000)) address1, address2 = token_network_model.channel_id_to_addresses[ channel_id] token_network_model.handle_channel_new_deposit_event( channel_id, address1, deposit1) token_network_model.handle_channel_new_deposit_event( channel_id, address2, deposit2) token_network_model.handle_channel_balance_update_message( UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), channel_identifier=channel_id, token_network_address=TokenNetworkAddressBytes( decode_hex(token_network_model.address)), ), updating_participant=decode_hex(address1), other_participant=decode_hex(address2), updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=deposit1, other_capacity=deposit2, reveal_timeout=2, mediation_fee=FeeAmount(0), )) token_network_model.handle_channel_balance_update_message( UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), channel_identifier=channel_id, token_network_address=TokenNetworkAddressBytes( decode_hex(token_network_model.address)), ), updating_participant=decode_hex(address2), other_participant=decode_hex(address1), updating_nonce=Nonce(2), other_nonce=Nonce(1), updating_capacity=deposit2, other_capacity=deposit1, reveal_timeout=2, mediation_fee=FeeAmount(0), ))
def fetch_all_events_for_a_deployment( contract_manager: ContractManager, web3: Web3, token_network_registry_address: TokenNetworkRegistryAddress, secret_registry_address: SecretRegistryAddress, start_block: BlockNumber, target_block: BlockNumber, ) -> Iterable[Dict]: """ Read all the events of a whole deployment, starting at the network registry, and following the registered networks. """ chain_id = ChainID(web3.eth.chainId) filters = [ token_network_registry_events(token_network_registry_address, contract_manager), secret_registry_events(secret_registry_address, contract_manager), ] blockchain_events = BlockchainEvents( web3=web3, chain_id=chain_id, contract_manager=contract_manager, last_fetched_block=start_block, event_filters=filters, block_batch_size_config=BlockBatchSizeConfig(), ) while target_block > blockchain_events.last_fetched_block: poll_result = blockchain_events.fetch_logs_in_batch(target_block) if poll_result is None: # No blocks could be fetched (due to timeout), retry continue for event in poll_result.events: yield event.event_data
def get_updatepfs_message( updating_participant: Address, other_participant: Address, chain_identifier=ChainID(1), channel_identifier=ChannelID(0), token_network_address: TokenNetworkAddressBytes = DEFAULT_TOKEN_NETWORK_ADDRESS_BYTES, updating_nonce=Nonce(1), other_nonce=Nonce(0), updating_capacity=TokenAmount(90), other_capacity=TokenAmount(110), reveal_timeout: int = 2, mediation_fee: FeeAmount = FeeAmount(0), privkey_signer: bytes = PRIVAT_KEY_EXAMPLE_1, ) -> UpdatePFS: updatepfs_message = UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_identifier, channel_identifier=channel_identifier, token_network_address=token_network_address, ), updating_participant=decode_hex(updating_participant), other_participant=decode_hex(other_participant), updating_nonce=updating_nonce, other_nonce=other_nonce, updating_capacity=updating_capacity, other_capacity=other_capacity, reveal_timeout=reveal_timeout, mediation_fee=mediation_fee, ) updatepfs_message.sign(LocalSigner(privkey_signer)) return updatepfs_message
def test_pfs_rejects_capacity_update_with_wrong_channel_identifier( pathfinding_service_web3_mock, ): pathfinding_service_web3_mock.chain_id = ChainID(1) token_network = TokenNetwork( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS) pathfinding_service_web3_mock.token_networks[ token_network.address] = token_network token_network.handle_channel_opened_event( channel_identifier=ChannelID(0), participant1=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), participant2=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), settle_timeout=15, ) # Check that the new channel has id == 0 assert token_network.channel_id_to_addresses[ChannelID(0)] == ( private_key_to_address(PRIVAT_KEY_EXAMPLE_1), private_key_to_address(PRIVAT_KEY_EXAMPLE_2), ) message = get_updatepfs_message( channel_identifier=ChannelID(35), updating_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), other_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), privkey_signer=PRIVAT_KEY_EXAMPLE_1, ) with pytest.raises(InvalidCapacityUpdate) as exinfo: pathfinding_service_web3_mock.on_pfs_update(message) assert "unknown channel identifier in token network" in str(exinfo.value)
def f( chain_id: ChainID = ChainID(1), amount: TokenAmount = TokenAmount(50), nonce: Nonce = Nonce(1), ) -> RequestMonitoring: balance_proof = HashedBalanceProof( channel_identifier=ChannelID(1), token_network_address=TokenNetworkAddress(b"1" * 20), chain_id=chain_id, nonce=nonce, additional_hash="", balance_hash=encode_hex(bytes([amount])), priv_key=get_random_privkey(), ) request_monitoring = balance_proof.get_request_monitoring( privkey=non_closing_privkey, reward_amount=TokenAmount(55), monitoring_service_contract_address=Address(bytes([11] * 20)), ) # usually not a property of RequestMonitoring, but added for convenience in these tests request_monitoring.non_closing_signer = to_checksum_address( # type: ignore non_closing_address ) return request_monitoring
def test_serialize_contract_send_subclass(chain_state): """Serializing must preserve class Regression test for https://github.com/raiden-network/raiden/issues/6075 """ canonical_identifier = CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=TokenNetworkAddress(factories.make_address()), channel_identifier=factories.make_channel_identifier(), ) chain_state.pending_transactions = [ ContractSendChannelClose( canonical_identifier=canonical_identifier, triggered_by_block_hash=factories.make_block_hash(), balance_proof=None, ) ] serialized_chain_state = JSONSerializer.serialize(chain_state) deserialized_chain_state = JSONSerializer.deserialize( serialized_chain_state) assert ( chain_state.pending_transactions[0].__class__.__name__ == deserialized_chain_state.pending_transactions[0].__class__.__name__) assert chain_state == deserialized_chain_state
def connect_to_blockchain( eth_rpc: str, used_contracts: List[str], address_overwrites: Dict[str, Address] ) -> Tuple[Web3, Dict[str, Contract], BlockNumber]: try: provider = HTTPProvider(eth_rpc) web3 = Web3(provider) # Will throw ConnectionError on bad Ethereum client chain_id = ChainID(int(web3.net.version)) except requests.exceptions.ConnectionError: log.error( "Can not connect to the Ethereum client. Please check that it is running and that " "your settings are correct.", eth_rpc=eth_rpc, ) sys.exit(1) # Add POA middleware for geth POA chains, no/op for other chains web3.middleware_stack.inject(geth_poa_middleware, layer=0) # give web3 some time between retries before failing provider.middlewares.replace("http_retry_request", http_retry_with_backoff_middleware) addresses, start_block = get_contract_addresses_and_start_block( chain_id=chain_id, contracts=used_contracts, address_overwrites=address_overwrites) contracts = { c: web3.eth.contract(abi=CONTRACT_MANAGER.get_contract_abi(c), address=address) for c, address in addresses.items() } return web3, contracts, start_block
def get_fee_update_message( # pylint: disable=too-many-arguments updating_participant: Address, chain_id=ChainID(61), channel_identifier=DEFAULT_CHANNEL_ID, token_network_address: TokenNetworkAddress = DEFAULT_TOKEN_NETWORK_ADDRESS, fee_schedule: FeeScheduleState = FeeScheduleState( cap_fees=True, flat=FeeAmount(1), proportional=ProportionalFeeAmount(1)), timestamp: datetime = datetime.utcnow(), privkey_signer: bytes = PRIVATE_KEY_1, ) -> PFSFeeUpdate: fee_message = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_id, channel_identifier=channel_identifier, token_network_address=token_network_address, ), updating_participant=updating_participant, fee_schedule=fee_schedule, timestamp=timestamp, signature=EMPTY_SIGNATURE, ) fee_message.sign(LocalSigner(privkey_signer)) return fee_message
def get_capacity_update_message( # pylint: disable=too-many-arguments updating_participant: Address, other_participant: Address, chain_id=ChainID(61), channel_identifier=DEFAULT_CHANNEL_ID, token_network_address: TokenNetworkAddress = DEFAULT_TOKEN_NETWORK_ADDRESS, updating_nonce=Nonce(1), other_nonce=Nonce(0), updating_capacity=TA(90), other_capacity=TA(110), reveal_timeout: BlockTimeout = BlockTimeout(2), privkey_signer: bytes = PRIVATE_KEY_1, ) -> PFSCapacityUpdate: updatepfs_message = PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_id, channel_identifier=channel_identifier, token_network_address=token_network_address, ), updating_participant=updating_participant, other_participant=other_participant, updating_nonce=updating_nonce, other_nonce=other_nonce, updating_capacity=updating_capacity, other_capacity=other_capacity, reveal_timeout=reveal_timeout, signature=EMPTY_SIGNATURE, ) updatepfs_message.sign(LocalSigner(privkey_signer)) return updatepfs_message
def test_update_fee(order, pathfinding_service_mock, token_network_model): pathfinding_service_mock.database.insert( "token_network", dict(address=token_network_model.address)) if order == "normal": setup_channel(pathfinding_service_mock, token_network_model) fee_schedule = FeeScheduleState( flat=FeeAmount(1), proportional=ProportionalFeeAmount(int(0.1e9)), imbalance_penalty=[(TokenAmount(0), FeeAmount(0)), (TokenAmount(10), FeeAmount(10))], ) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_model.address, channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=fee_schedule, timestamp=datetime.now(timezone.utc), signature=EMPTY_SIGNATURE, ) fee_update.sign(LocalSigner(PARTICIPANT1_PRIVKEY)) pathfinding_service_mock.handle_message(fee_update) if order == "fee_update_before_channel_open": setup_channel(pathfinding_service_mock, token_network_model) cv = token_network_model.G[PARTICIPANT1][PARTICIPANT2]["view"] for key in ("flat", "proportional", "imbalance_penalty"): assert getattr(cv.fee_schedule_sender, key) == getattr(fee_schedule, key)
def main( private_key: str, state_db: str, web3: Web3, contracts: Dict[str, Contract], start_block: BlockNumber, rdn_per_eth: float, expires_within: BlockNumber, ) -> None: pfs_address = private_key_to_address(private_key) chain_id = ChainID(web3.eth.chainId) database = PFSDatabase(filename=state_db, chain_id=chain_id, pfs_address=pfs_address, sync_start_block=start_block) claim_cost_rdn = calc_claim_cost_rdn(web3, rdn_per_eth) ious = list( get_claimable_ious( database, expires_after=web3.eth.blockNumber, expires_before=BlockNumber(web3.eth.blockNumber + expires_within), claim_cost_rdn=claim_cost_rdn, )) print(f"Found {len(ious)} claimable IOUs") _, failures = claim_ious(ious, claim_cost_rdn, contracts[CONTRACT_ONE_TO_N], web3, database) if failures: sys.exit(1)
def __init__(self, jsonrpc_client: JSONRPCClient, contract_manager: ContractManager): self.address_to_discovery: Dict[Address, Discovery] = dict() self.address_to_secret_registry: Dict[Address, SecretRegistry] = dict() self.address_to_token: Dict[Address, Token] = dict() self.address_to_token_network: Dict[TokenNetworkAddress, TokenNetwork] = dict() self.address_to_token_network_registry: Dict[ Address, TokenNetworkRegistry] = dict() self.address_to_user_deposit: Dict[Address, UserDeposit] = dict() self.address_to_service_registry: Dict[Address, ServiceRegistry] = dict() self.identifier_to_payment_channel: Dict[Tuple[TokenNetworkAddress, ChannelID], PaymentChannel] = dict() self.client = jsonrpc_client self.contract_manager = contract_manager # Ask for the network id only once and store it here self.network_id = ChainID(int(self.client.web3.version.network)) self._token_creation_lock = Semaphore() self._discovery_creation_lock = Semaphore() self._token_network_creation_lock = Semaphore() self._token_network_registry_creation_lock = Semaphore() self._secret_registry_creation_lock = Semaphore() self._service_registry_creation_lock = Semaphore() self._payment_channel_creation_lock = Semaphore() self._user_deposit_creation_lock = Semaphore()
def get_updatepfs_message( # pylint: disable=too-many-arguments updating_participant: Address, other_participant: Address, chain_identifier=ChainID(1), channel_identifier=DEFAULT_CHANNEL_ID, token_network_address: TokenNetworkAddress = DEFAULT_TOKEN_NETWORK_ADDRESS, updating_nonce=Nonce(1), other_nonce=Nonce(0), updating_capacity=TokenAmount(90), other_capacity=TokenAmount(110), reveal_timeout: int = 2, mediation_fee: FeeAmount = FeeAmount(0), privkey_signer: bytes = PRIVATE_KEY_1, ) -> UpdatePFS: updatepfs_message = UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_identifier, channel_identifier=channel_identifier, token_network_address=token_network_address, ), updating_participant=updating_participant, other_participant=other_participant, updating_nonce=updating_nonce, other_nonce=other_nonce, updating_capacity=updating_capacity, other_capacity=other_capacity, reveal_timeout=reveal_timeout, mediation_fee=mediation_fee, signature=EMPTY_SIGNATURE, ) updatepfs_message.sign(LocalSigner(privkey_signer)) return updatepfs_message
def check_ethereum_network_id(given_network_id: ChainID, web3: Web3) -> None: """ Takes the given network id and checks it against the connected network If they don't match, exits the program with an error. If they do adds it to the configuration and then returns it and whether it is a known network """ node_network_id = ChainID(int(web3.version.network)) # pylint: disable=no-member if node_network_id != given_network_id: given_name = ID_TO_NETWORKNAME.get(given_network_id) network_name = ID_TO_NETWORKNAME.get(node_network_id) given_description = f'{given_name or "Unknown"} (id {given_network_id})' network_description = f'{network_name or "Unknown"} (id {node_network_id})' # TODO: fix cyclic import from raiden.ui.cli import ETH_NETWORKID_OPTION raise RaidenError( f"The configured network {given_description} differs " f"from the Ethereum client's network {network_description}. The " f"network_id can be configured using the flag {ETH_NETWORKID_OPTION}" f"Please check your settings." )
def from_dict(cls, data: Dict[str, Any]) -> "CanonicalIdentifier": return cls( chain_identifier=ChainID(int(data["chain_identifier"])), token_network_address=TokenNetworkAddress( to_bytes(hexstr=data["token_network_address"])), channel_identifier=ChannelID(int(data["channel_identifier"])), )
def test_get_ious_via_debug_endpoint( api_sut_with_debug: ServiceApi, api_url: str, addresses: List[Address] ): hex_addrs = [to_checksum_address(addr) for addr in addresses] iou = IOU( sender=addresses[0], receiver=addresses[4], amount=TokenAmount(111), expiration_block=BlockNumber(7619644), signature=Signature( decode_hex("118a93e9fd0a3a1c3d6edbad194b5c9d95715c754881d80e23e985793b1e13de") ), claimed=False, chain_id=ChainID(1), one_to_n_address=api_sut_with_debug.one_to_n_address, ) api_sut_with_debug.pathfinding_service.database.upsert_iou(iou) # now there must be an iou debug endpoint for a request of a sender in the database url_iou_debug = api_url + f"/_debug/ious/{hex_addrs[0]}" response_debug = requests.get(url_iou_debug) assert response_debug.status_code == 200 response_iou = response_debug.json() assert response_iou == {"sender": hex_addrs[0], "amount": 111, "expiration_block": 7619644} # but there is no iou debug endpoint for a request of a sender not in the database url_iou_debug = api_url + f"/_debug/ious/{hex_addrs[1]}" response_debug = requests.get(url_iou_debug) assert response_debug.status_code == 200 ious = response_debug.json() assert ious == {}
def create_signed_monitor_request( nonce: Nonce = Nonce(5), reward_amount: TokenAmount = DEFAULT_REWARD_AMOUNT, closing_privkey: str = DEFAULT_PRIVATE_KEY1, nonclosing_privkey: str = DEFAULT_PRIVATE_KEY2, ) -> MonitorRequest: bp = HashedBalanceProof( channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, chain_id=ChainID(1), balance_hash="", nonce=nonce, additional_hash="", priv_key=closing_privkey, ) monitor_request = bp.get_monitor_request(privkey=nonclosing_privkey, reward_amount=reward_amount, msc_address=TEST_MSC_ADDRESS) # Some signature correctness checks assert monitor_request.signer == private_key_to_address(closing_privkey) assert monitor_request.non_closing_signer == private_key_to_address( nonclosing_privkey) assert monitor_request.reward_proof_signer == private_key_to_address( nonclosing_privkey) return monitor_request
def _get_all_token_network_events( contract_manager: ContractManager, web3: Web3, token_network_address: TokenNetworkAddress, start_block: BlockNumber, target_block: BlockNumber, ) -> Iterable[Dict]: """ Read all TokenNetwork events up to the current confirmed head. """ chain_id = ChainID(web3.eth.chainId) blockchain_events = BlockchainEvents( web3=web3, chain_id=chain_id, contract_manager=contract_manager, last_fetched_block=start_block, event_filter=RaidenContractFilter( token_network_addresses={token_network_address}), block_batch_size_config=BlockBatchSizeConfig(), node_address=Address(b"1" * 20), # only relevant if filtering for channels ) while target_block > blockchain_events.last_fetched_block: poll_result = blockchain_events.fetch_logs_in_batch(target_block) if poll_result is None: # No blocks could be fetched (due to timeout), retry continue for event in poll_result.events: yield event.event_data
def main( private_key: str, state_db: str, web3: Web3, contracts: Dict[str, Contract], start_block: BlockNumber, rdn_per_eth: float, expires_within: BlockNumber, ) -> None: pfs_address = private_key_to_address(private_key) chain_id = ChainID(int(web3.net.version)) database = PFSDatabase(filename=state_db, chain_id=chain_id, pfs_address=pfs_address) one_to_n_contract = contracts[CONTRACT_ONE_TO_N] claim_cost_eth = 90897 claim_cost_rdn = TokenAmount(int(claim_cost_eth / rdn_per_eth)) ious = list( get_claimable_ious( database, expires_before=web3.eth.blockNumber + expires_within, claim_cost_rdn=claim_cost_rdn, )) print(f"Found {len(ious)} claimable IOUs") _, failures = claim_ious(ious, claim_cost_rdn, one_to_n_contract, web3, database) if failures: sys.exit(1)
def _get_onchain_locksroots( raiden: "RaidenService", storage: SQLiteStorage, token_network: Dict[str, Any], channel: Dict[str, Any], ) -> Tuple[Locksroot, Locksroot]: channel_new_state_change = _find_channel_new_state_change( storage=storage, token_network_address=token_network["address"], channel_identifier=channel["identifier"], ) if not channel_new_state_change: raise RaidenUnrecoverableError( f'Could not find the state change for channel {channel["identifier"]}, ' f'token network address: {token_network["address"]} being created. ' ) canonical_identifier = CanonicalIdentifier( chain_identifier=ChainID(-1), token_network_address=TokenNetworkAddress( to_canonical_address(token_network["address"])), channel_identifier=ChannelID(int(channel["identifier"])), ) our_locksroot, partner_locksroot = get_onchain_locksroots( chain=raiden.chain, canonical_identifier=canonical_identifier, participant1=to_canonical_address(channel["our_state"]["address"]), participant2=to_canonical_address(channel["partner_state"]["address"]), block_identifier="latest", ) return our_locksroot, partner_locksroot
def test_events_loaded_from_storage_should_deserialize(tmp_path): filename = Path(f"{tmp_path}/v{RAIDEN_DB_VERSION}_log.db") storage = SerializedSQLiteStorage(filename, serializer=JSONSerializer()) # Satisfy the foreign-key constraint for state change ID ids = storage.write_state_changes( [ ActionInitChain( pseudo_random_generator=random.Random(), block_number=BlockNumber(1), block_hash=factories.make_block_hash(), our_address=factories.make_address(), chain_id=ChainID(1), ) ] ) canonical_identifier = factories.make_canonical_identifier() recipient = factories.make_address() participant = factories.make_address() event = SendWithdrawRequest( recipient=recipient, canonical_identifier=canonical_identifier, message_identifier=factories.make_message_identifier(), total_withdraw=WithdrawAmount(1), participant=participant, expiration=BlockExpiration(10), nonce=Nonce(15), ) storage.write_events([(ids[0], event)]) stored_events = storage.get_events() assert stored_events[0] == event
def __init__( # pylint: disable=too-many-arguments self, web3: Web3, contracts: Dict[str, Contract], private_key: PrivateKey, db_filename: str, sync_start_block: BlockNumber, required_confirmations: BlockTimeout, poll_interval: float, matrix_servers: Optional[List[str]] = None, ): super().__init__() self.web3 = web3 self.registry_address = contracts[ CONTRACT_TOKEN_NETWORK_REGISTRY].address self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT] self.service_token_address = self.user_deposit_contract.functions.token( ).call() self.chain_id = ChainID(web3.eth.chainId) self.address = private_key_to_address(private_key) self.required_confirmations = required_confirmations self._poll_interval = poll_interval self._is_running = gevent.event.Event() log.info("PFS payment address", address=self.address) self.database = PFSDatabase( filename=db_filename, pfs_address=self.address, sync_start_block=sync_start_block, token_network_registry_address=to_canonical_address( self.registry_address), chain_id=self.chain_id, user_deposit_contract_address=to_canonical_address( self.user_deposit_contract.address), allow_create=True, ) self.blockchain_state = BlockchainState( latest_committed_block=self.database.get_latest_committed_block(), token_network_registry_address=to_canonical_address( self.registry_address), chain_id=self.chain_id, ) self.matrix_listener = MatrixListener( private_key=private_key, chain_id=self.chain_id, device_id=DeviceIDs.PFS, message_received_callback=self.handle_message, servers=matrix_servers, ) self.token_networks = self._load_token_networks() self.updated = gevent.event.Event( ) # set whenever blocks are processed self.startup_finished = gevent.event.AsyncResult() self._init_metrics()
def ms_database() -> Database: return Database( filename=":memory:", chain_id=ChainID(61), msc_address=TEST_MSC_ADDRESS, registry_address=Address(bytes([3] * 20)), receiver=Address(bytes([4] * 20)), )
def test_invalid_chain_id(): with pytest.raises(SystemExit): get_contract_addresses_and_start_block( chain_id=ChainID(123456789), contracts_version=DEFAULT_VERSION, contracts=[], address_overwrites={}, )
def ms_database(): return Database( filename=":memory:", chain_id=ChainID(1), msc_address=Address(bytes([2] * 20)), registry_address=Address(bytes([3] * 20)), receiver=Address(bytes([4] * 20)), )
def test_contract_info_returns_nothing_with_partial_invalid_config(): address1 = Address('0x' + '1' * 40) infos = get_contract_addresses_and_start_block( chain_id=ChainID(123456789), contracts_version=DEFAULT_VERSION, token_network_registry_address=address1, ) assert infos is None
def test_pfs_rejects_capacity_update_with_wrong_nonces( pathfinding_service_web3_mock, ): pathfinding_service_web3_mock.chain_id = ChainID(1) token_network = TokenNetwork( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS) pathfinding_service_web3_mock.token_networks[ token_network.address] = token_network token_network.handle_channel_opened_event( channel_identifier=ChannelID(0), participant1=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), participant2=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), settle_timeout=15, ) token_network.handle_channel_new_deposit_event( channel_identifier=ChannelID(0), receiver=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), total_deposit=100, ) token_network.handle_channel_new_deposit_event( channel_identifier=ChannelID(0), receiver=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), total_deposit=100, ) # Check that the new channel has id == 0 assert token_network.channel_id_to_addresses[ChannelID(0)] == ( private_key_to_address(PRIVAT_KEY_EXAMPLE_1), private_key_to_address(PRIVAT_KEY_EXAMPLE_2), ) message = get_updatepfs_message( updating_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), other_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), privkey_signer=PRIVAT_KEY_EXAMPLE_1, ) # Check first capacity update succeeded pathfinding_service_web3_mock.on_pfs_update(message) view_to_partner, view_from_partner = token_network.get_channel_views_for_partner( channel_identifier=ChannelID(0), updating_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_1), other_participant=private_key_to_address(PRIVAT_KEY_EXAMPLE_2), ) assert view_to_partner.capacity == 90 assert view_to_partner.update_nonce == 1 assert view_from_partner.capacity == 110 assert view_from_partner.update_nonce == 0 # Send the same Capacity Update again - leads to an exception with pytest.raises(InvalidCapacityUpdate) as exinfo: pathfinding_service_web3_mock.on_pfs_update(message) assert "Capacity Update already received" in str(exinfo.value)
def test_client_manager_start(get_accounts, get_private_key): server_urls = [f"https://example0{i}.com" for i in range(5)] (c1, ) = get_accounts(1) private_key = get_private_key(c1) client_mock = Mock() client_mock.api.base_url = "https://example00.com" client_mock.user_id = "1" client_mock.sync_worker = AsyncResult() start_client_counter = 0 def mock_start_client(server_url: str): # pylint: disable=unused-argument nonlocal start_client_counter client_mock.sync_worker = AsyncResult() start_client_counter += 1 return client_mock with patch.multiple( "raiden_libs.matrix", make_client=Mock(return_value=client_mock), get_matrix_servers=Mock(return_value=server_urls), login=Mock(), join_broadcast_room=Mock(), ): client_manager = ClientManager( available_servers=[f"https://example0{i}.com" for i in range(5)], broadcast_room_alias_prefix="_service", chain_id=ChainID(1), device_id=DeviceIDs.PFS, private_key=private_key, handle_matrix_sync=lambda s: True, ) client_manager._start_client = mock_start_client # pylint: disable=protected-access assert client_manager.known_servers == server_urls uam = MultiClientUserAddressManager( client=client_manager.main_client, displayname_cache=DisplayNameCache(), ) uam.start() client_manager.user_manager = uam client_manager.stop_event.clear() gevent.spawn(client_manager.connect_client_forever, client_mock.api.base_url) gevent.sleep(2) client_mock.sync_worker.set(True) gevent.sleep(2) client_manager.stop_event.set() client_mock.sync_worker.set(True) assert start_client_counter == 2
def __init__( # pylint: disable=too-many-arguments self, web3: Web3, contracts: Dict[str, Contract], private_key: str, db_filename: str, sync_start_block: BlockNumber = BlockNumber(0), required_confirmations: int = 8, poll_interval: float = 10, matrix_servers: List[str] = None, ): super().__init__() self.web3 = web3 self.registry_address = contracts[ CONTRACT_TOKEN_NETWORK_REGISTRY].address self.user_deposit_contract = contracts[CONTRACT_USER_DEPOSIT] self.chain_id = ChainID(int(web3.net.version)) self.address = private_key_to_address(private_key) self.required_confirmations = required_confirmations self._poll_interval = poll_interval self._is_running = gevent.event.Event() log.info("PFS payment address", address=self.address) self.blockchain_state = BlockchainState( latest_commited_block=BlockNumber(0), token_network_registry_address=self.registry_address, chain_id=self.chain_id, ) self.database = PFSDatabase( filename=db_filename, pfs_address=self.address, sync_start_block=sync_start_block, token_network_registry_address=self.registry_address, chain_id=self.chain_id, user_deposit_contract_address=self.user_deposit_contract.address, allow_create=True, ) self.matrix_listener = MatrixListener( private_key=private_key, chain_id=self.chain_id, service_room_suffix=PATH_FINDING_BROADCASTING_ROOM, message_received_callback=self.handle_message, address_reachability_changed_callback=self. handle_reachability_change, servers=matrix_servers, ) self.address_to_reachability: Dict[Address, AddressReachability] = dict() self.token_networks = self._load_token_networks()