def test_channel_settled_event_handler_leaves_existing_channel(context: Context): context = setup_state_with_closed_channel(context) event = ReceiveChannelSettledEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=ChannelID(4), block_number=BlockNumber(52), ) channel_settled_event_handler(event, context) assert context.database.channel_count() == 1 assert_channel_state(context, ChannelState.CLOSED)
def from_dict(cls, data: Dict[str, Any]) -> "SendSecretRequest": restored = cls( recipient=to_canonical_address(data["recipient"]), channel_identifier=ChannelID(int(data["channel_identifier"])), message_identifier=MessageID(int(data["message_identifier"])), payment_identifier=PaymentID(int(data["payment_identifier"])), amount=PaymentWithFeeAmount(int(data["amount"])), expiration=BlockExpiration(int(data["expiration"])), secrethash=deserialize_secret_hash(data["secrethash"]), ) return restored
def from_dict(cls, data: Dict[str, Any]) -> "SendBalanceProof": restored = cls( recipient=to_canonical_address(data["recipient"]), channel_identifier=ChannelID(int(data["channel_identifier"])), message_identifier=MessageID(int(data["message_identifier"])), payment_identifier=PaymentID(int(data["payment_identifier"])), token_address=to_canonical_address(data["token_address"]), secret=deserialize_secret(data["secret"]), balance_proof=data["balance_proof"], ) return restored
def from_dict(cls, data: Dict[str, Any]) -> 'ContractReceiveRouteNew': return cls( transaction_hash=deserialize_transactionhash(data['transaction_hash']), canonical_identifier=CanonicalIdentifier( chain_identifier=CHAIN_ID_UNSPECIFIED, token_network_address=to_canonical_address(data['token_network_identifier']), channel_identifier=ChannelID(int(data['channel_identifier'])), ), participant1=to_canonical_address(data['participant1']), participant2=to_canonical_address(data['participant2']), block_number=BlockNumber(int(data['block_number'])), block_hash=BlockHash(deserialize_bytes(data['block_hash'])), )
def from_dict(cls, data: Dict[str, Any]) -> 'ContractReceiveChannelSettled': return cls( transaction_hash=deserialize_transactionhash(data['transaction_hash']), canonical_identifier=CanonicalIdentifier( chain_identifier=CHAIN_ID_UNSPECIFIED, token_network_address=to_canonical_address(data['token_network_identifier']), channel_identifier=ChannelID(int(data['channel_identifier'])), ), our_onchain_locksroot=deserialize_locksroot(data['our_onchain_locksroot']), partner_onchain_locksroot=deserialize_locksroot(data['partner_onchain_locksroot']), block_number=BlockNumber(int(data['block_number'])), block_hash=BlockHash(deserialize_bytes(data['block_hash'])), )
def test_channel_closed_event_handler_leaves_existing_channel(context: Context,): context = setup_state_with_open_channel(context) event = ReceiveChannelClosedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=ChannelID(4), closing_participant=DEFAULT_PARTICIPANT2, block_number=BlockNumber(52), ) channel_closed_event_handler(event, context) assert context.db.channel_count() == 1 assert_channel_state(context, ChannelState.OPENED)
def test_get_initial_payment_for_final_target_amount( flat_fee: FeeAmount, prop_fee: ProportionalFeeAmount, balance: TokenAmount, final_amount: PaymentAmount, initial_amount: PaymentWithFeeAmount, expected_fees: List[FeeAmount], ): prop_fee = ppm_fee_per_channel(prop_fee) channel_set = make_channel_set([ NettingChannelStateProperties( canonical_identifier=factories.create( CanonicalIdentifierProperties( channel_identifier=ChannelID(1))), our_state=NettingChannelEndStateProperties(balance=TokenAmount(0)), partner_state=NettingChannelEndStateProperties(balance=balance), fee_schedule=FeeScheduleState(flat=flat_fee, proportional=prop_fee), ), NettingChannelStateProperties( canonical_identifier=factories.create( CanonicalIdentifierProperties( channel_identifier=ChannelID(2))), our_state=NettingChannelEndStateProperties(balance=balance), partner_state=NettingChannelEndStateProperties( balance=TokenAmount(0)), fee_schedule=FeeScheduleState(flat=flat_fee, proportional=prop_fee), ), ]) calculation = get_initial_amount_for_amount_after_fees( amount_after_fees=final_amount, channels=[(channel_set.channels[0], channel_set.channels[1])], ) assert calculation is not None assert calculation.total_amount == initial_amount assert calculation.mediation_fees == expected_fees
def from_dict(cls, data: Dict[str, Any]) -> 'ContractSendChannelBatchUnlock': restored = cls( token_address=to_canonical_address(data['token_address']), token_network_identifier=to_canonical_address( data['token_network_identifier']), channel_identifier=ChannelID(int(data['channel_identifier'])), participant=to_canonical_address(data['participant']), triggered_by_block_hash=BlockHash( deserialize_bytes(data['triggered_by_block_hash'])), ) return restored
def test_channel_constraints(pathfinding_service_mock): """Regression test for https://github.com/raiden-network/raiden-services/issues/693""" # Participants need to be ordered parts = sorted([make_address(), make_address()]) token_network_address1 = make_token_network_address() token_network_address2 = make_token_network_address() # register token network internally database = pathfinding_service_mock.database database.upsert_token_network(token_network_address1) database.upsert_token_network(token_network_address2) channel1 = Channel( token_network_address=token_network_address1, channel_id=ChannelID(1), participant1=parts[0], participant2=parts[1], settle_timeout=BlockTimeout(100), ) channel2 = Channel( token_network_address=token_network_address2, channel_id=ChannelID(1), participant1=parts[0], participant2=parts[1], settle_timeout=BlockTimeout(100), ) # Test `upsert_channel` and `get_channels` database.upsert_channel(channel1) assert [chan.channel_id for chan in database.get_channels()] == [channel1.channel_id] database.upsert_channel(channel2) assert [chan.channel_id for chan in database.get_channels()] == [ channel1.channel_id, channel2.channel_id, ]
def from_dict(cls, data: Dict[str, Any]) -> 'SendSecretRequest': restored = cls( recipient=to_canonical_address(data['recipient']), channel_identifier=ChannelID(int(data['channel_identifier'])), message_identifier=MessageID(int(data['message_identifier'])), payment_identifier=PaymentID(int(data['payment_identifier'])), amount=TokenAmount(int(data['amount'])), expiration=BlockExpiration(int(data['expiration'])), secrethash=SecretHash( serialization.deserialize_bytes(data['secrethash'])), ) return restored
def from_dict(cls, data: Dict[str, Any]) -> 'ContractSendChannelClose': restored = cls( canonical_identifier=CanonicalIdentifier( chain_identifier=CHAIN_ID_UNSPECIFIED, token_network_address=to_canonical_address(data['token_network_identifier']), channel_identifier=ChannelID(int(data['channel_identifier'])), ), token_address=to_canonical_address(data['token_address']), balance_proof=data['balance_proof'], triggered_by_block_hash=BlockHash(deserialize_bytes(data['triggered_by_block_hash'])), ) return restored
def test_pfs_rejects_capacity_update_with_wrong_channel_identifier( pathfinding_service_web3_mock: PathfindingService, ): setup_channel(pathfinding_service_web3_mock) message = get_capacity_update_message( channel_identifier=ChannelID(35), updating_participant=PRIVATE_KEY_1_ADDRESS, other_participant=PRIVATE_KEY_2_ADDRESS, privkey_signer=PRIVATE_KEY_1, ) with pytest.raises(DeferMessage): pathfinding_service_web3_mock.on_capacity_update(message)
def from_dict(cls, data: Dict[str, Any]) -> 'ContractSendChannelUpdateTransfer': restored = cls( expiration=int(data['expiration']), channel_identifier=ChannelID(int(data['channel_identifier'])), token_network_identifier=to_canonical_address( data['token_network_identifier']), balance_proof=data['balance_proof'], triggered_by_block_hash=BlockHash( deserialize_bytes(data['triggered_by_block_hash'])), ) return restored
def open_channel(self, partner_address: Address) -> ChannelID: """Opens channel with a single partner Parameters: partner_address - a valid ethereum address of the partner Return: channel_id - id of the channel """ assert is_checksum_address(partner_address) assert partner_address in self.client_registry # disallow multiple open channels with a same partner if partner_address in self.partner_to_channel_id: return self.partner_to_channel_id[partner_address] # if it doesn't exist, register new channel txid = self.contract.functions.openChannel(self.address, partner_address, 15).transact( {"from": self.address} ) assert txid is not None tx = self.web3.eth.getTransactionReceipt(txid) assert tx is not None assert len(tx["logs"]) == 1 event = get_event_data( find_matching_event_abi(self.contract.abi, "ChannelOpened"), tx["logs"][0] ) channel_id = event["args"]["channel_identifier"] assert isinstance(channel_id, T_ChannelID) assert 0 < channel_id <= UINT256_MAX assert is_same_address(event["args"]["participant1"], self.address) or is_same_address( event["args"]["participant2"], self.address ) assert is_same_address(event["args"]["participant1"], partner_address) or is_same_address( event["args"]["participant2"], partner_address ) self.partner_to_channel_id[partner_address] = ChannelID(channel_id) self.client_registry[partner_address].open_channel(self.address) return ChannelID(channel_id)
def test_pfs_rejects_capacity_update_with_wrong_channel_identifier( pathfinding_service_web3_mock: PathfindingService): setup_channel(pathfinding_service_web3_mock) message = get_updatepfs_message( channel_identifier=ChannelID(35), updating_participant=PRIVATE_KEY_1_ADDRESS, other_participant=PRIVATE_KEY_2_ADDRESS, privkey_signer=PRIVATE_KEY_1, ) with pytest.raises(InvalidCapacityUpdate) as exinfo: pathfinding_service_web3_mock.on_pfs_update(message) assert "unknown channel identifier in token network" in str(exinfo.value)
def test_logging_processor(): # test if our logging processor changes bytes to checksum addresses # even if bytes-addresses are entangled into events logger = Mock() log_method = Mock() address = b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd9" address_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(address=address) ) assert to_checksum_address(address) == address_log["address"] address2 = b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd1" event = ReceiveTokenNetworkCreatedEvent( token_address=Address(address), token_network_address=TokenNetworkAddress(address2), block_number=BlockNumber(1), ) event_log = format_to_hex(_logger=logger, _log_method=log_method, event_dict=dict(event=event)) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == event_log["event"]["token_address"] ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address2) == event_log["event"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object event_log["event"]["type_name"] == "ReceiveTokenNetworkCreatedEvent" ) message = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=TokenNetworkAddress(address), channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) message_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(message=message) ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == message_log["message"]["canonical_identifier"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object message_log["message"]["type_name"] == "PFSFeeUpdate" )
def _canonical_id_from_string(string: str) -> CanonicalIdentifier: try: chain_id_str, token_network_address_hex, channel_id_str = string.split( "|") return CanonicalIdentifier( chain_identifier=ChainID(int(chain_id_str)), token_network_address=to_bytes( hexstr=token_network_address_hex), channel_identifier=ChannelID(int(channel_id_str)), ) except ValueError: raise ValueError( f"Could not reconstruct canonical identifier from string: {string}" )
def test_update_fee(order, pathfinding_service_mock, token_network_model): metrics_state = save_metrics_state(metrics.REGISTRY) pathfinding_service_mock.database.insert( "token_network", dict(address=token_network_model.address) ) if order == "normal": setup_channel(pathfinding_service_mock, token_network_model) exception_expected = False else: exception_expected = True fee_schedule = FeeScheduleState( flat=FeeAmount(1), proportional=ProportionalFeeAmount(int(0.1e9)), imbalance_penalty=[(TokenAmount(0), FeeAmount(0)), (TokenAmount(10), FeeAmount(10))], ) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), token_network_address=token_network_model.address, channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=fee_schedule, timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) fee_update.sign(LocalSigner(PARTICIPANT1_PRIVKEY)) pathfinding_service_mock.handle_message(fee_update) # Test for metrics having seen the processing of the message assert ( metrics_state.get_delta( "messages_processing_duration_seconds_sum", labels={"message_type": "PFSFeeUpdate"}, ) > 0.0 ) assert metrics_state.get_delta( "messages_exceptions_total", labels={"message_type": "PFSFeeUpdate"} ) == float(exception_expected) if order == "fee_update_before_channel_open": setup_channel(pathfinding_service_mock, token_network_model) cv = token_network_model.G[PARTICIPANT1][PARTICIPANT2]["view"] for key in ("flat", "proportional", "imbalance_penalty"): assert getattr(cv.fee_schedule_sender, key) == getattr(fee_schedule, key)
def test_tn_multiple_channels_for_two_participants_opened( token_network_model: TokenNetwork, addresses: List[Address] ): token_network_model.handle_channel_opened_event( channel_identifier=ChannelID(1), participant1=addresses[0], participant2=addresses[1], settle_timeout=15, ) token_network_model.handle_channel_opened_event( channel_identifier=ChannelID(2), participant1=addresses[0], participant2=addresses[1], settle_timeout=15, ) # now there should be two channels assert len(token_network_model.channel_id_to_addresses) == 2 # now close one channel token_network_model.handle_channel_closed_event(channel_identifier=ChannelID(1)) # there should be one channel left assert len(token_network_model.channel_id_to_addresses) == 1
def test_purge_old_monitor_requests( ms_database: Database, build_request_monitoring, request_collector, monitoring_service: MonitoringService, ): # We'll test the purge on MRs for three different channels req_mons = [ build_request_monitoring(channel_id=1), build_request_monitoring(channel_id=2), build_request_monitoring(channel_id=3), ] for req_mon in req_mons: request_collector.on_monitor_request(req_mon) # Channel 1 exists in the db token_network_address = req_mons[0].balance_proof.token_network_address ms_database.conn.execute("INSERT INTO token_network VALUES (?)", [to_checksum_address(token_network_address)]) ms_database.upsert_channel( Channel( identifier=ChannelID(1), token_network_address=token_network_address, participant1=Address(b"1" * 20), participant2=Address(b"2" * 20), settle_timeout=10, )) # The request for channel 2 is recent (default), but the one for channel 3 # has been added 16 minutes ago. ms_database.conn.execute( """ UPDATE monitor_request SET saved_at = datetime('now', '-16 minutes') WHERE channel_identifier = ? """, [hex256(3)], ) monitoring_service._purge_old_monitor_requests() # pylint: disable=protected-access remaining_mrs = ms_database.conn.execute(""" SELECT channel_identifier, waiting_for_channel FROM monitor_request ORDER BY channel_identifier """).fetchall() assert [tuple(mr) for mr in remaining_mrs] == [(1, False), (2, True)]
def populate_token_network_random(token_network_model: TokenNetwork, private_keys: List[str]) -> None: # seed for pseudo-randomness from config constant, that changes from time to time random.seed(NUMBER_OF_CHANNELS) for channel_id_int in range(NUMBER_OF_CHANNELS): channel_id = ChannelID(channel_id_int) private_key1, private_key2 = random.sample(private_keys, 2) address1 = Address(private_key_to_address(private_key1)) address2 = Address(private_key_to_address(private_key2)) settle_timeout = 15 token_network_model.handle_channel_opened_event( channel_id, address1, address2, settle_timeout) # deposit to channels deposit1, deposit2 = random.sample(range(1000), 2) address1, address2 = token_network_model.channel_id_to_addresses[ channel_id] token_network_model.handle_channel_new_deposit_event( channel_id, address1, deposit1) token_network_model.handle_channel_new_deposit_event( channel_id, address2, deposit2) token_network_model.handle_channel_balance_update_message( channel_identifier=channel_id, updating_participant=address1, other_participant=address2, updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=TokenAmount(deposit1), other_capacity=TokenAmount(deposit2), reveal_timeout=2, mediation_fee=FeeAmount(0), ) token_network_model.handle_channel_balance_update_message( channel_identifier=channel_id, updating_participant=address2, other_participant=address1, updating_nonce=Nonce(2), other_nonce=Nonce(1), updating_capacity=TokenAmount(deposit1), other_capacity=TokenAmount(deposit2), reveal_timeout=2, mediation_fee=FeeAmount(0), )
def test_token_channel_opened(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1 # Test invalid token network address channel_event = ReceiveChannelOpenedEvent( token_network_address=TokenNetworkAddress("0x" + "0" * 40), channel_identifier=ChannelID(1), participant1=PARTICIPANT1, participant2=PARTICIPANT2, settle_timeout=20, block_number=BlockNumber(1), ) pathfinding_service_mock.handle_event(channel_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1
def test_invalid_fee_update(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_model.address, channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=FeeScheduleState(), timestamp=datetime.now(timezone.utc), signature=EMPTY_SIGNATURE, ) # bad/missing signature with pytest.raises(exceptions.InvalidPFSFeeUpdate): pathfinding_service_mock.on_fee_update(fee_update)
def test_prometheus_event_handling_no_exceptions(pathfinding_service_mock_empty): metrics_state = save_metrics_state(metrics.REGISTRY) pfs = pathfinding_service_mock_empty token_address = TokenAddress(bytes([1] * 20)) token_network_address = TokenNetworkAddress(bytes([2] * 20)) channel_id = ChannelID(1) p1 = Address(bytes([3] * 20)) p2 = Address(bytes([4] * 20)) events = [ ReceiveTokenNetworkCreatedEvent( token_address=token_address, token_network_address=token_network_address, block_number=BlockNumber(1), ), ReceiveChannelOpenedEvent( token_network_address=token_network_address, channel_identifier=channel_id, participant1=p1, participant2=p2, settle_timeout=BlockTimeout(10), block_number=BlockNumber(2), ), ] for event in events: pfs.handle_event(event) # check that we have non-zero processing time for the events we created assert ( metrics_state.get_delta( "events_processing_duration_seconds_sum", labels={"event_type": event.__class__.__name__}, ) > 0.0 ) # there should be no exception raised assert ( metrics_state.get_delta( "events_exceptions_total", labels={"event_type": event.__class__.__name__} ) == 0.0 )
def test_channel_closed_event_handler_channel_not_in_database( context: Context): metrics_state = save_metrics_state(metrics.REGISTRY) # only setup the token network without channels create_default_token_network(context) event = ReceiveChannelClosedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=ChannelID(4), closing_participant=DEFAULT_PARTICIPANT2, block_number=BlockNumber(52), ) assert context.database.channel_count() == 0 channel_closed_event_handler(event, context) assert context.database.channel_count() == 0 assert (metrics_state.get_delta( "events_log_errors_total", labels=metrics.ErrorCategory.STATE.to_label_dict()) == 1.0)
def request_monitoring_message(token_network, get_accounts, get_private_key) -> RequestMonitoring: c1, c2 = get_accounts(2) balance_proof_c2 = HashedBalanceProof( channel_identifier=ChannelID(1), token_network_address=TokenNetworkAddress(to_canonical_address(token_network.address)), chain_id=ChainID(61), nonce=Nonce(2), additional_hash="0x%064x" % 0, transferred_amount=TokenAmount(1), locked_amount=TokenAmount(0), locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), priv_key=get_private_key(c2), ) return balance_proof_c2.get_request_monitoring( privkey=get_private_key(c1), reward_amount=TokenAmount(1), monitoring_service_contract_address=MonitoringServiceAddress(bytes([11] * 20)), )
def __init__(self, channels: List[dict]): super().__init__(token_network_address=TokenNetworkAddress(a(255))) # open channels for chan in channels: self.handle_channel_opened_event( channel_identifier=ChannelID(100), participant1=a(chan["participant1"]), participant2=a(chan["participant2"]), settle_timeout=100, ) # set default capacity for _, _, cv in self.G.edges(data="view"): cv.capacity = 100 # create reachability mapping for testing self.address_to_reachability: Dict[Address, AddressReachability] = { node: AddressReachability.REACHABLE for node in self.G.nodes }
def test_update_fee(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) fee_schedule = FeeSchedule( flat=FeeAmount(1), proportional=0.1, imbalance_penalty=[(TokenAmount(0), FeeAmount(0)), (TokenAmount(10), FeeAmount(10))], ) fee_update = FeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), token_network_address=token_network_model.address, channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, other_participant=PARTICIPANT2, fee_schedule=fee_schedule, ) pathfinding_service_mock.handle_message(fee_update) assert (token_network_model.G[PARTICIPANT1][PARTICIPANT2] ["view"].fee_schedule_sender == fee_schedule)
def test_token_channel_closed(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) # Test invalid token network address close_event = ReceiveChannelClosedEvent( token_network_address=TokenNetworkAddress("0x" + "0" * 40), channel_identifier=ChannelID(1), closing_participant=PARTICIPANT1, block_number=BlockNumber(2), ) pathfinding_service_mock.handle_event(close_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1 # Test proper token network address close_event.token_network_address = token_network_model.address pathfinding_service_mock.handle_event(close_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 0
def test_token_channel_new_deposit(pathfinding_service_mock, token_network_model): setup_channel(pathfinding_service_mock, token_network_model) deposit_event = ReceiveChannelNewDepositEvent( token_network_address=token_network_model.address, channel_identifier=ChannelID(1), participant_address=PARTICIPANT1, total_deposit=TokenAmount(123), block_number=BlockNumber(2), ) pathfinding_service_mock.handle_event(deposit_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1 # Test invalid token network address deposit_event.token_network_address = TokenNetworkAddress("0x" + "0" * 40) pathfinding_service_mock.handle_event(deposit_event) assert len(pathfinding_service_mock.token_networks) == 1 assert len(token_network_model.channel_id_to_addresses) == 1