def test_logging_processor(): # test if our logging processor changes bytes to checksum addresses # even if bytes-addresses are entangled into events logger = Mock() log_method = Mock() address = b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd9" address_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(address=address) ) assert to_checksum_address(address) == address_log["address"] address2 = b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd1" event = ReceiveTokenNetworkCreatedEvent( token_address=Address(address), token_network_address=TokenNetworkAddress(address2), block_number=BlockNumber(1), ) event_log = format_to_hex(_logger=logger, _log_method=log_method, event_dict=dict(event=event)) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == event_log["event"]["token_address"] ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address2) == event_log["event"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object event_log["event"]["type_name"] == "ReceiveTokenNetworkCreatedEvent" ) message = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=TokenNetworkAddress(address), channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) message_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(message=message) ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == message_log["message"]["canonical_identifier"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object message_log["message"]["type_name"] == "PFSFeeUpdate" )
def get_feedback_token(self, token_id: UUID, token_network_address: TokenNetworkAddress, route: List[Address]) -> Optional[FeedbackToken]: hexed_route = [to_checksum_address(e) for e in route] token = self.conn.execute( """SELECT * FROM feedback WHERE token_id = ? AND token_network_address = ? AND route = ?; """, [ token_id.hex, to_checksum_address(token_network_address), json.dumps(hexed_route) ], ).fetchone() if token: return FeedbackToken( token_network_address=TokenNetworkAddress( to_canonical_address(token["token_network_address"])), id=UUID(token["token_id"]), creation_time=token["creation_time"], ) return None
def test_token_channel_opened(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1 # Test invalid token network address channel_event = ReceiveChannelOpenedEvent( token_network_address=TokenNetworkAddress(bytes([2] * 20)), channel_identifier=ChannelID(1), participant1=PARTICIPANT1, participant2=PARTICIPANT2, settle_timeout=20, block_number=BlockNumber(1), ) pathfinding_service_mock.handle_event(channel_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1 # Check that presence of these addresses is followed pathfinding_service_mock.matrix_listener.follow_address_presence.assert_has_calls( [ call(bytes([1] * 20), refresh=True), call(bytes([2] * 20), refresh=True) ])
def test_get_pfs_iou(): token_network_address = TokenNetworkAddress(bytes([1] * 20)) privkey = bytes([2] * 32) sender = to_checksum_address(privatekey_to_address(privkey)) receiver = factories.make_checksum_address() with patch('raiden.network.pathfinding.requests.get') as get_mock: # No previous IOU get_mock.return_value.json.return_value = {'last_iou': None} assert get_last_iou('http://example.com', token_network_address, sender, receiver) is None # Previous IOU iou = dict(sender=sender, receiver=receiver, amount=10, expiration_block=1000) iou['signature'] = sign_one_to_n_iou( privatekey=encode_hex(privkey), sender=sender, receiver=receiver, amount=iou['amount'], expiration=iou['expiration_block'], ) get_mock.return_value.json.return_value = {'last_iou': iou} assert get_last_iou('http://example.com', token_network_address, sender, receiver) == iou
def test_prometheus_event_handling_raise_exception(pathfinding_service_mock_empty): metrics_state = save_metrics_state(metrics.REGISTRY) pfs = pathfinding_service_mock_empty event = ReceiveTokenNetworkCreatedEvent( token_address=TokenAddress(bytes([1] * 20)), token_network_address=TokenNetworkAddress(bytes([2] * 20)), block_number=BlockNumber(1), ) pfs.handle_token_network_created = Mock(side_effect=KeyError()) with pytest.raises(KeyError): pfs.handle_event(event) # The exceptions raised in the wrapped part of the prometheus logging # will not be handled anywhere at the moment. # Force an exception and test correct logging of it anyways, # since at some point higher in the call stack we could catch exceptions. assert ( metrics_state.get_delta( "events_exceptions_total", labels={"event_type": "ReceiveTokenNetworkCreatedEvent"}, ) == 1.0 )
def __init__(self, channels: List[dict], default_capacity: TA = TA(1000)): super().__init__(token_network_address=TokenNetworkAddress(a(255))) # open channels channel_ids = itertools.count(100) for chan in channels: self.handle_channel_opened_event( channel_identifier=ChannelID(next(channel_ids)), participant1=a(chan["participant1"]), participant2=a(chan["participant2"]), settle_timeout=100, ) cv1: ChannelView = self.G[a(chan["participant1"])][a( chan["participant2"])]["view"] cv1.capacity = chan.get("capacity1", default_capacity) cv2: ChannelView = self.G[a(chan["participant2"])][a( chan["participant1"])]["view"] cv2.capacity = chan.get("capacity2", default_capacity) # create reachability mapping for testing self.address_to_reachability: Dict[Address, AddressReachability] = { node: AddressReachability.REACHABLE for node in self.G.nodes }
def test_get_pfs_iou(): token_network_address = TokenNetworkAddress(bytes([1] * 20)) privkey = bytes([2] * 32) sender = privatekey_to_address(privkey) receiver = factories.make_address() with patch("raiden.network.pathfinding.requests.get") as get_mock: # No previous IOU get_mock.return_value.json.return_value = {"last_iou": None} assert (get_last_iou("http://example.com", token_network_address, sender, receiver, PRIVKEY) is None) # Previous IOU iou = dict(sender=sender, receiver=receiver, amount=10, expiration_block=1000) iou["signature"] = sign_one_to_n_iou( privatekey=encode_hex(privkey), sender=to_checksum_address(sender), receiver=to_checksum_address(receiver), amount=iou["amount"], expiration=iou["expiration_block"], ) get_mock.return_value.json.return_value = {"last_iou": iou} assert (get_last_iou("http://example.com", token_network_address, sender, receiver, PRIVKEY) == iou)
def install_all_blockchain_filters( self, token_network_registry_proxy: TokenNetworkRegistry, secret_registry_proxy: SecretRegistry, from_block: BlockNumber, ): with self.event_poll_lock: node_state = views.state_from_raiden(self) token_networks = views.get_token_network_identifiers( node_state, token_network_registry_proxy.address, ) self.blockchain_events.add_token_network_registry_listener( token_network_registry_proxy=token_network_registry_proxy, contract_manager=self.contract_manager, from_block=from_block, ) self.blockchain_events.add_secret_registry_listener( secret_registry_proxy=secret_registry_proxy, contract_manager=self.contract_manager, from_block=from_block, ) for token_network in token_networks: token_network_proxy = self.chain.token_network( TokenNetworkAddress(token_network), ) self.blockchain_events.add_token_network_listener( token_network_proxy=token_network_proxy, contract_manager=self.contract_manager, from_block=from_block, )
def test_get_pfs_iou(one_to_n_address): token_network_address = TokenNetworkAddress(bytes([1] * 20)) privkey = bytes([2] * 32) sender = privatekey_to_address(privkey) receiver = factories.make_address() response = mocked_json_response(response_data={"last_iou": None}) with patch.object(session, "get", return_value=response): assert (get_last_iou("http://example.com", token_network_address, sender, receiver, PRIVKEY) is None) # Previous IOU iou = IOU( sender=sender, receiver=receiver, amount=10, expiration_block=1000, one_to_n_address=one_to_n_address, chain_id=4, ) iou.sign(privkey) response = mocked_json_response(response_data={"last_iou": iou.as_json()}) with patch.object(session, "get", return_value=response): assert (get_last_iou("http://example.com", token_network_address, sender, receiver, PRIVKEY) == iou)
def maybe_add_tokennetwork( chain_state: ChainState, payment_network_identifier: PaymentNetworkID, token_network_state: TokenNetworkState, ) -> None: token_network_identifier = token_network_state.address token_address = token_network_state.token_address payment_network_state, token_network_state_previous = get_networks( chain_state, payment_network_identifier, token_address) if payment_network_state is None: payment_network_state = PaymentNetworkState(payment_network_identifier, [token_network_state]) ids_to_payments = chain_state.identifiers_to_paymentnetworks ids_to_payments[payment_network_identifier] = payment_network_state if token_network_state_previous is None: ids_to_tokens = payment_network_state.tokenidentifiers_to_tokennetworks addresses_to_ids = payment_network_state.tokenaddresses_to_tokenidentifiers ids_to_tokens[token_network_identifier] = token_network_state addresses_to_ids[token_address] = token_network_identifier mapping = chain_state.tokennetworkaddresses_to_paymentnetworkaddresses # FIXME: Remove cast once TokenNetworkAddress or TokenNetworkID are removed mapping[TokenNetworkAddress( token_network_identifier)] = payment_network_identifier
def _get_onchain_locksroots( raiden: "RaidenService", storage: SQLiteStorage, token_network: Dict[str, Any], channel: Dict[str, Any], ) -> Tuple[Locksroot, Locksroot]: channel_new_state_change = _find_channel_new_state_change( storage=storage, token_network_address=token_network["address"], channel_identifier=channel["identifier"], ) if not channel_new_state_change: raise RaidenUnrecoverableError( f'Could not find the state change for channel {channel["identifier"]}, ' f'token network address: {token_network["address"]} being created. ' ) canonical_identifier = CanonicalIdentifier( chain_identifier=ChainID(-1), token_network_address=TokenNetworkAddress( to_canonical_address(token_network["address"])), channel_identifier=ChannelID(int(channel["identifier"])), ) our_locksroot, partner_locksroot = get_onchain_locksroots( chain=raiden.chain, canonical_identifier=canonical_identifier, participant1=to_canonical_address(channel["our_state"]["address"]), participant2=to_canonical_address(channel["partner_state"]["address"]), block_identifier="latest", ) return our_locksroot, partner_locksroot
def from_event(cls, event: 'SendLockedTransfer') -> 'LockedTransfer': transfer = event.transfer lock = transfer.lock balance_proof = transfer.balance_proof lock = Lock( amount=lock.amount, expiration=lock.expiration, secrethash=lock.secrethash, ) fee = 0 return cls( chain_id=balance_proof.chain_id, message_identifier=event.message_identifier, payment_identifier=transfer.payment_identifier, nonce=balance_proof.nonce, token_network_address=TokenNetworkAddress( balance_proof.token_network_identifier), token=transfer.token, channel_identifier=balance_proof.channel_identifier, transferred_amount=balance_proof.transferred_amount, locked_amount=balance_proof.locked_amount, recipient=event.recipient, locksroot=balance_proof.locksroot, lock=lock, target=transfer.target, initiator=transfer.initiator, fee=fee, )
def payment_channel( self, canonical_identifier: CanonicalIdentifier) -> PaymentChannel: token_network_address = TokenNetworkAddress( canonical_identifier.token_network_address) channel_id = canonical_identifier.channel_identifier if not is_binary_address(token_network_address): raise ValueError("address must be a valid address") if not isinstance(channel_id, T_ChannelID): raise ValueError("channel identifier must be of type T_ChannelID") with self._payment_channel_creation_lock: dict_key = (token_network_address, channel_id) if dict_key not in self.identifier_to_payment_channel: token_network = self.token_network(token_network_address) self.identifier_to_payment_channel[dict_key] = PaymentChannel( token_network=token_network, channel_identifier=channel_id, contract_manager=self.contract_manager, ) return self.identifier_to_payment_channel[dict_key]
def f( chain_id: ChainID = ChainID(1), amount: TokenAmount = TokenAmount(50), nonce: Nonce = Nonce(1), channel_id: ChannelID = ChannelID(1), ) -> RequestMonitoring: balance_proof = HashedBalanceProof( channel_identifier=channel_id, token_network_address=TokenNetworkAddress(b"1" * 20), chain_id=chain_id, nonce=nonce, additional_hash="", balance_hash=encode_hex(bytes([amount])), priv_key=get_random_privkey(), ) request_monitoring = balance_proof.get_request_monitoring( privkey=non_closing_privkey, reward_amount=TokenAmount(55), monitoring_service_contract_address=TEST_MSC_ADDRESS, ) # usually not a property of RequestMonitoring, but added for convenience in these tests request_monitoring.non_closing_signer = to_checksum_address( # type: ignore non_closing_address) return request_monitoring
def test_feedback_stats(pathfinding_service_mock): token_network_address = TokenNetworkAddress(b"1" * 20) default_path = [b"1" * 20, b"2" * 20, b"3" * 20] feedback_token = FeedbackToken(token_network_address) database = pathfinding_service_mock.database database.prepare_feedback(feedback_token, default_path) assert database.get_num_routes_feedback() == 1 assert database.get_num_routes_feedback(only_with_feedback=True) == 0 assert database.get_num_routes_feedback(only_successful=True) == 0 database.update_feedback(feedback_token, default_path, False) assert database.get_num_routes_feedback() == 1 assert database.get_num_routes_feedback(only_with_feedback=True) == 1 assert database.get_num_routes_feedback(only_successful=True) == 0 default_path2 = default_path[1:] feedback_token2 = FeedbackToken(token_network_address) database.prepare_feedback(feedback_token2, default_path2) assert database.get_num_routes_feedback() == 2 assert database.get_num_routes_feedback(only_with_feedback=True) == 1 assert database.get_num_routes_feedback(only_successful=True) == 0 database.update_feedback(feedback_token2, default_path2, True) assert database.get_num_routes_feedback() == 2 assert database.get_num_routes_feedback(only_with_feedback=True) == 2 assert database.get_num_routes_feedback(only_successful=True) == 1
def from_dict(cls, data: Dict[str, Any]) -> "CanonicalIdentifier": return cls( chain_identifier=ChainID(int(data["chain_identifier"])), token_network_address=TokenNetworkAddress( to_bytes(hexstr=data["token_network_address"])), channel_identifier=ChannelID(int(data["channel_identifier"])), )
def test_save_and_load_token_networks(pathfinding_service_mock_empty): pfs = pathfinding_service_mock_empty token_address = Address(bytes([1] * 20)) token_network_address = TokenNetworkAddress(bytes([2] * 20)) channel_id = ChannelID(1) p1 = Address(bytes([3] * 20)) p2 = Address(bytes([4] * 20)) events = [ ReceiveTokenNetworkCreatedEvent( token_address=token_address, token_network_address=token_network_address, block_number=BlockNumber(1), ), ReceiveChannelOpenedEvent( token_network_address=token_network_address, channel_identifier=channel_id, participant1=p1, participant2=p2, settle_timeout=2 ** 65, # larger than max_uint64 to check hex storage block_number=BlockNumber(2), ), ] for event in events: pfs.handle_event(event) assert len(pfs.token_networks) == 1 loaded_networks = pfs._load_token_networks() # pylint: disable=protected-access assert len(loaded_networks) == 1 orig = list(pfs.token_networks.values())[0] loaded = list(loaded_networks.values())[0] assert loaded.address == orig.address assert loaded.channel_id_to_addresses == orig.channel_id_to_addresses assert loaded.G.nodes == orig.G.nodes
def get_contractreceivechannelnew_data_from_event( chain_state: ChainState, event: DecodedEvent) -> Optional[NewChannelDetails]: token_network_address = TokenNetworkAddress(event.originating_contract) data = event.event_data args = data["args"] participant1 = args["participant1"] participant2 = args["participant2"] our_address = chain_state.our_address if our_address == participant1: partner_address = participant2 elif our_address == participant2: partner_address = participant1 else: # Not a channel which this node is a participant return None token_network_registry = views.get_token_network_registry_by_token_network_address( chain_state, token_network_address) assert token_network_registry is not None, "Token network registry missing" token_network = views.get_token_network_by_address( chain_state=chain_state, token_network_address=token_network_address) assert token_network is not None, "Token network missing" return NewChannelDetails( chain_id=event.chain_id, token_network_registry_address=token_network_registry.address, token_address=token_network.token_address, token_network_address=token_network_address, our_address=our_address, partner_address=partner_address, )
def test_serialize_contract_send_subclass(chain_state): """Serializing must preserve class Regression test for https://github.com/raiden-network/raiden/issues/6075 """ canonical_identifier = CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=TokenNetworkAddress(factories.make_address()), channel_identifier=factories.make_channel_identifier(), ) chain_state.pending_transactions = [ ContractSendChannelClose( canonical_identifier=canonical_identifier, triggered_by_block_hash=factories.make_block_hash(), balance_proof=None, ) ] serialized_chain_state = JSONSerializer.serialize(chain_state) deserialized_chain_state = JSONSerializer.deserialize( serialized_chain_state) assert ( chain_state.pending_transactions[0].__class__.__name__ == deserialized_chain_state.pending_transactions[0].__class__.__name__) assert chain_state == deserialized_chain_state
def test_feedback(pathfinding_service_mock): token_network_address = TokenNetworkAddress(b"1" * 20) route = [Address(b"2" * 20), Address(b"3" * 20)] other_route = [Address(b"2" * 20), Address(b"4" * 20)] token = FeedbackToken(token_network_address=token_network_address) other_token = FeedbackToken(token_network_address=token_network_address) database = pathfinding_service_mock.database assert not db_has_feedback_for(database=database, token=token, route=route) assert not db_has_feedback_for(database=database, token=token, route=other_route) assert not db_has_feedback_for(database=database, token=other_token, route=route) database.prepare_feedback(token=token, route=route) assert not db_has_feedback_for(database=database, token=token, route=route) assert not db_has_feedback_for(database=database, token=other_token, route=route) assert not db_has_feedback_for(database=database, token=token, route=other_route) rowcount = database.update_feedback(token=token, route=route, successful=True) assert rowcount == 1 assert db_has_feedback_for(database=database, token=token, route=route) assert not db_has_feedback_for(database=database, token=other_token, route=route) assert not db_has_feedback_for(database=database, token=token, route=other_route) rowcount = database.update_feedback(token=token, route=route, successful=True) assert rowcount == 0
def test_insert_feedback_token(pathfinding_service_mock): token_network_address = TokenNetworkAddress(b"1" * 20) route = [Address(b"2" * 20), Address(b"3" * 20)] estimated_fee = -123 token = FeedbackToken(token_network_address=token_network_address) database = pathfinding_service_mock.database database.prepare_feedback(token=token, route=route, estimated_fee=estimated_fee) # Test round-trip stored = database.get_feedback_token( token_id=token.uuid, token_network_address=token_network_address, route=route) assert stored == token # Test different UUID stored = database.get_feedback_token( token_id=uuid4(), token_network_address=token_network_address, route=route) assert stored is None # Test different token network address token_network_address_wrong = TokenNetworkAddress(b"9" * 20) stored = database.get_feedback_token( token_id=uuid4(), token_network_address=token_network_address_wrong, route=route) assert stored is None # Test different route route_wrong = [Address(b"2" * 20), Address(b"3" * 20), Address(b"4" * 20)] stored = database.get_feedback_token( token_id=uuid4(), token_network_address=token_network_address, route=route_wrong) assert stored is None # Test empty route stored = database.get_feedback_token( token_id=uuid4(), token_network_address=token_network_address, route=[]) assert stored is None
def test_waiting_messages(pathfinding_service_mock): participant1_privkey, participant1 = make_privkey_address() token_network_address = TokenNetworkAddress(b"1" * 20) channel_id = ChannelID(1) # register token network internally database = pathfinding_service_mock.database database.conn.execute( "INSERT INTO token_network(address) VALUES (?)", [to_checksum_address(token_network_address)], ) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=participant1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) fee_update.sign(LocalSigner(participant1_privkey)) capacity_update = PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=make_address(), other_participant=make_address(), updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=TokenAmount(100), other_capacity=TokenAmount(111), reveal_timeout=50, signature=EMPTY_SIGNATURE, ) capacity_update.sign(LocalSigner(participant1_privkey)) for message in (fee_update, capacity_update): database.insert_waiting_message(message) recovered_messages = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id ) ) assert len(recovered_messages) == 1 assert message == recovered_messages[0] recovered_messages2 = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id ) ) assert len(recovered_messages2) == 0
def handle_token_network_created( self, event: ReceiveTokenNetworkCreatedEvent) -> None: network_address = TokenNetworkAddress(event.token_network_address) if not self.follows_token_network(network_address): log.info("Found new token network", event_=event) self.token_networks[network_address] = TokenNetwork( network_address) self.database.upsert_token_network(network_address)
def get( # pylint: disable=no-self-use self, token_network_address: str, source_address: str, target_address: Optional[str] = None, ) -> Tuple[dict, int]: request_count = 0 responses = [] for req in last_failed_requests: log.debug("Last Requests Values:", req=req) matches_params = is_same_address( token_network_address, req["token_network_address"]) and is_same_address( source_address, req["source"]) if target_address is not None: matches_params = matches_params and is_same_address( target_address, req["target"]) if matches_params: request_count += 1 responses.append( dict(source=req["source"], target=req["target"], routes=req["routes"])) decoded_target_address: Optional[Address] = None if target_address: decoded_target_address = to_canonical_address(target_address) feedback_routes = self.pathfinding_service.database.get_feedback_routes( TokenNetworkAddress(to_canonical_address(token_network_address)), to_canonical_address(source_address), decoded_target_address, ) # Group routes after request (each request shares the `token_id`) grouped_routes: Dict[str, List[Dict]] = collections.defaultdict(list) for route in feedback_routes: grouped_routes[route["token_id"]].append(route) for requests in grouped_routes.values(): routes = [{ "path": route["route"], "estimated_fee": route["estimated_fee"] } for route in requests] responses.append({ "source": requests[0]["source_address"], "target": requests[0]["target_address"], "routes": routes, }) request_count += 1 return dict(request_count=request_count, responses=responses), 200
def create_scheduled_event(row: sqlite3.Row) -> ScheduledEvent: event_type = EVENT_ID_TYPE_MAP[row["event_type"]] sub_event = event_type( TokenNetworkAddress( to_canonical_address(row["token_network_address"])), row["channel_identifier"], row["non_closing_participant"], ) return ScheduledEvent( trigger_block_number=row["trigger_block_number"], event=sub_event)
def _validate_token_network_argument(self, token_network_address: str) -> TokenNetwork: if not is_checksum_address(token_network_address): raise exceptions.InvalidTokenNetwork( msg="The token network needs to be given as a checksummed address", token_network=token_network_address, ) token_network = self.pathfinding_service.get_token_network( TokenNetworkAddress(to_canonical_address(token_network_address)) ) if token_network is None: raise exceptions.UnsupportedTokenNetwork(token_network=token_network_address) return token_network
def test_feedback_token_validity(): token_network_address = TokenNetworkAddress(b"1" * 20) # Newly created token is valid valid_token = FeedbackToken(token_network_address=token_network_address) assert valid_token.is_valid() # Test expiry in is_valid invalid_token = FeedbackToken( creation_time=datetime.utcnow() - MAX_AGE_OF_FEEDBACK_REQUESTS - timedelta(seconds=1), token_network_address=token_network_address, ) assert not invalid_token.is_valid()
def contractreceivechannelnew_from_event( new_channel_details: NewChannelDetails, channel_config: ChannelConfig, event: DecodedEvent) -> ContractReceiveChannelNew: data = event.event_data args = data["args"] settle_timeout = args["settle_timeout"] block_number = event.block_number identifier = args["channel_identifier"] token_network_address = TokenNetworkAddress(event.originating_contract) our_state = NettingChannelEndState(new_channel_details.our_address, Balance(0)) partner_state = NettingChannelEndState(new_channel_details.partner_address, Balance(0)) open_transaction = TransactionExecutionStatus( None, block_number, TransactionExecutionStatus.SUCCESS) # If the node was offline for a long period, the channel may have been # closed already, if that is the case during initialization the node will # process the other events and update the channel's state close_transaction: Optional[TransactionExecutionStatus] = None settle_transaction: Optional[TransactionExecutionStatus] = None channel_state = NettingChannelState( canonical_identifier=CanonicalIdentifier( chain_identifier=new_channel_details.chain_id, token_network_address=token_network_address, channel_identifier=identifier, ), token_address=new_channel_details.token_address, token_network_registry_address=new_channel_details. token_network_registry_address, reveal_timeout=channel_config.reveal_timeout, settle_timeout=settle_timeout, fee_schedule=channel_config.fee_schedule, our_state=our_state, partner_state=partner_state, open_transaction=open_transaction, close_transaction=close_transaction, settle_transaction=settle_transaction, ) return ContractReceiveChannelNew( channel_state=channel_state, transaction_hash=event.transaction_hash, block_number=block_number, block_hash=event.block_hash, )
def test_pfs_rejects_capacity_update_with_wrong_token_network_address( pathfinding_service_web3_mock: PathfindingService, ): setup_channel(pathfinding_service_web3_mock) message = get_updatepfs_message( token_network_address=TokenNetworkAddress(decode_hex("0x" + "1" * 40)), updating_participant=PRIVATE_KEY_1_ADDRESS, other_participant=PRIVATE_KEY_2_ADDRESS, privkey_signer=PRIVATE_KEY_1, ) with pytest.raises(InvalidCapacityUpdate) as exinfo: pathfinding_service_web3_mock.on_pfs_update(message) assert "unknown token network" in str(exinfo.value)
def _canonical_id_from_string(string: str) -> CanonicalIdentifier: try: chain_id_str, token_network_address_hex, channel_id_str = string.split( "|") return CanonicalIdentifier( chain_identifier=ChainID(int(chain_id_str)), token_network_address=TokenNetworkAddress( to_canonical_address(token_network_address_hex)), channel_identifier=ChannelID(int(channel_id_str)), ) except ValueError: raise ValueError( f"Could not reconstruct canonical identifier from string: {string}" )