def test_action_monitoring_triggered_event_handler_does_not_trigger_monitor_call_when_nonce_to_small( # noqa context: Context, ): context = setup_state_with_closed_channel(context) event3 = ReceiveMonitoringNewBalanceProofEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, reward_amount=TokenAmount(1), nonce=Nonce(5), ms_address=Address(bytes([3] * 20)), raiden_node_address=DEFAULT_PARTICIPANT2, block_number=BlockNumber(23), ) channel = context.db.get_channel(event3.token_network_address, event3.channel_identifier) assert channel assert channel.update_status is None monitor_new_balance_proof_event_handler(event3, context) # add MR to DB, with nonce being smaller than in event3 context.db.upsert_monitor_request(create_signed_monitor_request(nonce=Nonce(4))) event4 = ActionMonitoringTriggeredEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, non_closing_participant=DEFAULT_PARTICIPANT2, ) channel = context.db.get_channel(event4.token_network_address, event4.channel_identifier) assert channel assert channel.update_status is not None assert channel.monitor_tx_hash is None action_monitoring_triggered_event_handler(event4, context) assert context.db.channel_count() == 1 assert channel assert channel.monitor_tx_hash is None
def test_channel_bp_updated_event_handler_sets_update_status_if_not_set(context: Context,): context = setup_state_with_closed_channel(context) event_bp = ReceiveNonClosingBalanceProofUpdatedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, closing_participant=DEFAULT_PARTICIPANT2, nonce=Nonce(2), block_number=BlockNumber(23), ) channel = context.db.get_channel(event_bp.token_network_address, event_bp.channel_identifier) assert channel assert channel.update_status is None non_closing_balance_proof_updated_event_handler(event_bp, context) assert context.db.channel_count() == 1 channel = context.db.get_channel(event_bp.token_network_address, event_bp.channel_identifier) assert channel assert channel.update_status is not None assert channel.update_status.nonce == 2 assert channel.update_status.update_sender_address == DEFAULT_PARTICIPANT1 event_bp2 = ReceiveNonClosingBalanceProofUpdatedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, closing_participant=DEFAULT_PARTICIPANT2, nonce=Nonce(5), block_number=BlockNumber(53), ) non_closing_balance_proof_updated_event_handler(event_bp2, context) assert context.db.channel_count() == 1 channel = context.db.get_channel(event_bp.token_network_address, event_bp.channel_identifier) assert channel assert channel.update_status is not None assert channel.update_status.nonce == 5 assert channel.update_status.update_sender_address == DEFAULT_PARTICIPANT1
class ChannelView: """ Unidirectional view of a bidirectional channel. """ token_network_address: TokenNetworkAddress channel_id: ChannelID participant1: Address participant2: Address settle_timeout: int capacity: TokenAmount = None # type: ignore reveal_timeout: int = DEFAULT_REVEAL_TIMEOUT deposit: TokenAmount = TokenAmount(0) update_nonce: Nonce = Nonce(0) absolute_fee: FeeAmount = FeeAmount(0) relative_fee: float = 0 Schema: ClassVar[Type[marshmallow.Schema]] def __post_init__(self) -> None: assert is_checksum_address(self.participant1) assert is_checksum_address(self.participant2) if self.capacity is None: self.capacity = self.deposit # TODO: define another function update_deposit def update_capacity( self, nonce: Nonce = Nonce(0), capacity: TokenAmount = TokenAmount(0), reveal_timeout: int = None, deposit: TokenAmount = None, mediation_fee: FeeAmount = FeeAmount(0), ) -> None: self.update_nonce = nonce self.capacity = capacity if reveal_timeout is not None: self.reveal_timeout = reveal_timeout # FIXME: think about edge cases if deposit is not None: self.deposit = deposit if self.capacity is not None: self.capacity = TokenAmount(self.capacity + deposit) self.absolute_fee = mediation_fee def fee(self, amount: TokenAmount) -> int: """Return the mediation fee for this channel when transferring the given amount""" return int(self.absolute_fee + amount * self.relative_fee) def __repr__(self) -> str: return "<ChannelView from={} to={} capacity={}>".format( self.participant1, self.participant2, self.capacity)
def handle_contract_send_channelclose( raiden: "RaidenService", chain_state: ChainState, channel_close_event: ContractSendChannelClose, ) -> None: balance_proof = channel_close_event.balance_proof if balance_proof: nonce = balance_proof.nonce balance_hash = balance_proof.balance_hash signature_in_proof = balance_proof.signature message_hash = balance_proof.message_hash canonical_identifier = balance_proof.canonical_identifier else: nonce = Nonce(0) balance_hash = EMPTY_BALANCE_HASH signature_in_proof = EMPTY_SIGNATURE message_hash = EMPTY_MESSAGE_HASH canonical_identifier = channel_close_event.canonical_identifier closing_data = pack_signed_balance_proof( msg_type=MessageTypeId.BALANCE_PROOF, nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, canonical_identifier=canonical_identifier, partner_signature=signature_in_proof, ) our_signature = raiden.signer.sign(data=closing_data) confirmed_block_identifier = state_from_raiden(raiden).block_hash channel_state = get_channelstate_by_canonical_identifier( chain_state=chain_state, canonical_identifier=channel_close_event.canonical_identifier ) if channel_state is None: raise RaidenUnrecoverableError("ContractSendChannelClose for non-existing channel.") channel_proxy = raiden.proxy_manager.payment_channel( channel_state=channel_state, block_identifier=confirmed_block_identifier ) channel_proxy.close( nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, non_closing_signature=signature_in_proof, closing_signature=our_signature, block_identifier=channel_close_event.triggered_by_block_hash, )
class ChannelView: """ Unidirectional view of a bidirectional channel. """ channel_id: ChannelID participant1: Address = field( metadata={"marshmallow_field": ChecksumAddress(required=True)}) participant2: Address = field( metadata={"marshmallow_field": ChecksumAddress(required=True)}) settle_timeout: int token_network_address: TokenNetworkAddress = field( metadata={"marshmallow_field": ChecksumAddress(required=True)}) capacity: TokenAmount = None # type: ignore reveal_timeout: int = DEFAULT_REVEAL_TIMEOUT deposit: TokenAmount = TokenAmount(0) update_nonce: Nonce = Nonce(0) fee_schedule_sender: FeeSchedule = field(default_factory=FeeSchedule) fee_schedule_receiver: FeeSchedule = field(default_factory=FeeSchedule) Schema: ClassVar[Type[marshmallow.Schema]] def __post_init__(self) -> None: if self.capacity is None: self.capacity = self.deposit def update_deposit(self, total_deposit: TokenAmount) -> None: if total_deposit > self.deposit: self.capacity = TokenAmount(self.capacity + total_deposit - self.deposit) self.deposit = TokenAmount(total_deposit) def update_capacity(self, capacity: TokenAmount, nonce: Nonce = Nonce(0), reveal_timeout: int = None) -> None: self.update_nonce = nonce self.capacity = capacity if reveal_timeout is not None: self.reveal_timeout = reveal_timeout def fee_sender(self, amount: TokenAmount) -> FeeAmount: """Return the mediation fee for this channel when transferring the given amount""" return self.fee_schedule_sender.fee(amount, self.capacity) def fee_receiver(self, amount: TokenAmount) -> FeeAmount: """Return the mediation fee for this channel when receiving the given amount""" return self.fee_schedule_receiver.fee(amount, self.capacity) def __repr__(self) -> str: return "<ChannelView from={} to={} capacity={}>".format( self.participant1, self.participant2, self.capacity)
def test_received_lockedtransfer_closedchannel(raiden_network, reveal_timeout, token_addresses, deposit): app0, app1 = raiden_network registry_address = app0.raiden.default_registry.address token_address = token_addresses[0] token_network_address = views.get_token_network_address_by_token_address( views.state_from_app(app0), app0.raiden.default_registry.address, token_address) assert token_network_address channel0 = get_channelstate(app0, app1, token_network_address) RaidenAPI(app1.raiden).channel_close(registry_address, token_address, app0.raiden.address) app0.raiden.proxy_manager.wait_until_block( target_block_number=app0.raiden.rpc_client.block_number() + 1) # Now receive one mediated transfer for the closed channel lock_amount = TokenAmount(10) payment_identifier = PaymentID(1) expiration = reveal_timeout * 2 mediated_transfer_message = LockedTransfer( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=payment_identifier, nonce=Nonce(1), token_network_address=token_network_address, token=token_address, channel_identifier=channel0.identifier, transferred_amount=TokenAmount(0), locked_amount=lock_amount, recipient=app1.raiden.address, locksroot=make_locksroot(), lock=Lock( amount=PaymentWithFeeAmount(lock_amount), expiration=expiration, secrethash=UNIT_SECRETHASH, ), target=app1.raiden.address, initiator=app0.raiden.address, signature=EMPTY_SIGNATURE, metadata=Metadata(routes=[RouteMetadata(route=[app1.raiden.address])]), ) sign_and_inject(mediated_transfer_message, app0.raiden.signer, app1) # The local state must not change since the channel is already closed assert_synced_channel_state(token_network_address, app0, deposit, [], app1, deposit, [])
def handle_contract_send_channelclose( raiden: "RaidenService", chain_state: ChainState, channel_close_event: ContractSendChannelClose, ) -> None: balance_proof = channel_close_event.balance_proof if balance_proof: nonce = balance_proof.nonce balance_hash = balance_proof.balance_hash signature_in_proof = balance_proof.signature message_hash = balance_proof.message_hash canonical_identifier = balance_proof.canonical_identifier else: nonce = Nonce(0) balance_hash = EMPTY_BALANCE_HASH signature_in_proof = EMPTY_SIGNATURE message_hash = EMPTY_MESSAGE_HASH canonical_identifier = channel_close_event.canonical_identifier closing_data = pack_signed_balance_proof( msg_type=MessageTypeId.BALANCE_PROOF, nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, canonical_identifier=canonical_identifier, partner_signature=signature_in_proof, ) our_signature = raiden.signer.sign(data=closing_data) channel_proxy = raiden.proxy_manager.payment_channel( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_state.chain_id, token_network_address=channel_close_event.token_network_address, channel_identifier=channel_close_event.channel_identifier, ) ) channel_proxy.close( nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, non_closing_signature=signature_in_proof, closing_signature=our_signature, block_identifier=channel_close_event.triggered_by_block_hash, )
def request_monitoring_message(token_network, get_accounts, get_private_key) -> RequestMonitoring: c1, c2 = get_accounts(2) balance_proof_c2 = HashedBalanceProof( token_network_address=token_network.address, channel_identifier=ChannelID(1), chain_id=ChainID(1), nonce=Nonce(2), additional_hash="0x%064x" % 0, transferred_amount=TokenAmount(1), locked_amount=TokenAmount(0), locksroot=encode_hex(EMPTY_LOCKSROOT), priv_key=get_private_key(c2), ) return balance_proof_c2.get_request_monitoring( privkey=get_private_key(c1), reward_amount=TokenAmount(1))
def update_capacity( self, nonce: Nonce = Nonce(0), capacity: TokenAmount = TokenAmount(0), reveal_timeout: int = None, deposit: TokenAmount = None, mediation_fee: FeeAmount = FeeAmount(0), ) -> None: self.update_nonce = nonce self.capacity = capacity if reveal_timeout is not None: self.reveal_timeout = reveal_timeout # FIXME: think about edge cases if deposit is not None: self.deposit = deposit if self.capacity is not None: self.capacity = TokenAmount(self.capacity + deposit) self.absolute_fee = mediation_fee
def update(self, amount, lock): self._pending_locks = channel.compute_locks_with( self._pending_locks, lock) assert self._pending_locks if self.properties: self.properties = factories.replace( self.properties, locked_amount=self.properties.locked_amount + amount, locksroot=compute_locksroot(self._pending_locks), nonce=self.properties.nonce + 1, ) else: self.properties = factories.BalanceProofProperties( transferred_amount=TokenAmount(0), locked_amount=amount, nonce=Nonce(1), locksroot=compute_locksroot(self._pending_locks), canonical_identifier=self._canonical_identifier, )
def test_receive_lockedtransfer_invalidrecipient(raiden_network, token_addresses, reveal_timeout, deposit): app0, app1 = raiden_network token_address = token_addresses[0] token_network_address = views.get_token_network_address_by_token_address( views.state_from_app(app0), app0.raiden.default_registry.address, token_address) assert token_network_address channel0 = get_channelstate(app0, app1, token_network_address) payment_identifier = PaymentID(1) invalid_recipient = make_address() lock_amount = TokenAmount(10) expiration = reveal_timeout * 2 mediated_transfer_message = LockedTransfer( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=payment_identifier, nonce=Nonce(1), token_network_address=token_network_address, token=token_address, channel_identifier=channel0.identifier, transferred_amount=TokenAmount(0), locked_amount=lock_amount, recipient=invalid_recipient, locksroot=make_locksroot(), lock=Lock( amount=PaymentWithFeeAmount(lock_amount), expiration=expiration, secrethash=UNIT_SECRETHASH, ), target=app1.raiden.address, initiator=app0.raiden.address, signature=EMPTY_SIGNATURE, metadata=Metadata(routes=[RouteMetadata(route=[app1.raiden.address])]), ) sign_and_inject(mediated_transfer_message, app0.raiden.signer, app1) assert_synced_channel_state(token_network_address, app0, deposit, [], app1, deposit, [])
def test_monitor_new_balance_proof_event_handler_idempotency(context: Context): context = setup_state_with_closed_channel(context) new_balance_event = ReceiveMonitoringNewBalanceProofEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, reward_amount=TokenAmount(1), nonce=Nonce(2), ms_address=Address(bytes([3] * 20)), raiden_node_address=DEFAULT_PARTICIPANT2, block_number=BlockNumber(23), ) channel = context.database.get_channel( new_balance_event.token_network_address, new_balance_event.channel_identifier) assert channel assert channel.update_status is None monitor_new_balance_proof_event_handler(new_balance_event, context) assert context.database.scheduled_event_count() == 1 assert context.database.channel_count() == 1 channel = context.database.get_channel( new_balance_event.token_network_address, new_balance_event.channel_identifier) assert channel assert channel.update_status is not None assert channel.update_status.nonce == 2 assert channel.update_status.update_sender_address == bytes([3] * 20) monitor_new_balance_proof_event_handler(new_balance_event, context) assert context.database.scheduled_event_count() == 1 assert context.database.channel_count() == 1 channel = context.database.get_channel( new_balance_event.token_network_address, new_balance_event.channel_identifier) assert channel assert channel.update_status is not None assert channel.update_status.nonce == 2 assert channel.update_status.update_sender_address == bytes([3] * 20)
def handle_contract_send_channelclose( raiden: "RaidenService", chain_state: ChainState, channel_close_event: ContractSendChannelClose, ): balance_proof = channel_close_event.balance_proof if balance_proof: nonce = balance_proof.nonce balance_hash = balance_proof.balance_hash signature = balance_proof.signature message_hash = balance_proof.message_hash else: nonce = Nonce(0) balance_hash = EMPTY_BALANCE_HASH signature = EMPTY_SIGNATURE message_hash = EMPTY_MESSAGE_HASH channel_proxy = raiden.chain.payment_channel( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_state.chain_id, token_network_address=channel_close_event. token_network_identifier, channel_identifier=channel_close_event.channel_identifier, )) if channel_close_event.signed_close_tx is None: channel_proxy.close( nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, signature=signature, block_identifier=channel_close_event.triggered_by_block_hash) else: channel_proxy.close_light( nonce=nonce, balance_hash=balance_hash, additional_hash=message_hash, signature=signature, block_identifier=channel_close_event.triggered_by_block_hash, signed_close_tx=channel_close_event.signed_close_tx)
def request_monitoring_message(token_network, get_accounts, get_private_key) -> RequestMonitoring: c1, c2 = get_accounts(2) balance_proof_c2 = HashedBalanceProof( channel_identifier=ChannelID(1), token_network_address=TokenNetworkAddress(to_canonical_address(token_network.address)), chain_id=ChainID(61), nonce=Nonce(2), additional_hash="0x%064x" % 0, transferred_amount=TokenAmount(1), locked_amount=TokenAmount(0), locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), priv_key=get_private_key(c2), ) return balance_proof_c2.get_request_monitoring( privkey=get_private_key(c1), reward_amount=TokenAmount(1), monitoring_service_contract_address=MonitoringServiceAddress(bytes([11] * 20)), )
def test_receive_lockedtransfer_invalidsender( raiden_network: List[RaidenService], token_addresses, deposit, reveal_timeout ): app0, app1 = raiden_network token_address = token_addresses[0] other_key, other_address = make_privkey_address() token_network_address = views.get_token_network_address_by_token_address( views.state_from_raiden(app0), app0.default_registry.address, token_address ) assert token_network_address channel0 = get_channelstate(app0, app1, token_network_address) lock_amount = LockedAmount(10) expiration = reveal_timeout * 2 mediated_transfer_message = LockedTransfer( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=PaymentID(1), nonce=Nonce(1), token_network_address=token_network_address, token=token_address, channel_identifier=channel0.identifier, transferred_amount=TokenAmount(0), locked_amount=lock_amount, recipient=app0.address, locksroot=make_locksroot(), lock=Lock( amount=PaymentWithFeeAmount(lock_amount), expiration=expiration, secrethash=UNIT_SECRETHASH, ), target=TargetAddress(app0.address), initiator=InitiatorAddress(other_address), signature=EMPTY_SIGNATURE, metadata=Metadata(routes=[RouteMetadata(route=[app0.address])]), ) sign_and_inject(mediated_transfer_message, LocalSigner(other_key), app0) assert_synced_channel_state(token_network_address, app0, deposit, [], app1, deposit, [])
def test_action_monitoring_rescheduling_when_user_lacks_funds( context: Context): reward_amount = TokenAmount(10) context = setup_state_with_closed_channel(context) context.database.upsert_monitor_request( create_signed_monitor_request(nonce=Nonce(6), reward_amount=reward_amount)) event = ActionMonitoringTriggeredEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, non_closing_participant=DEFAULT_PARTICIPANT2, ) scheduled_events_before = context.database.get_scheduled_events( max_trigger_block=BlockNumber(10000)) # Try to call monitor when the user has insufficient funds with patch("monitoring_service.handlers.get_pessimistic_udc_balance", Mock(return_value=0)): action_monitoring_triggered_event_handler(event, context) assert not context.monitoring_service_contract.functions.monitor.called # Now the event must have been rescheduled # TODO: check that the event is rescheduled to trigger at the right block scheduled_events_after = context.database.get_scheduled_events( max_trigger_block=BlockNumber(10000)) new_events = set(scheduled_events_after) - set(scheduled_events_before) assert len(new_events) == 1 assert new_events.pop().event == event # With sufficient funds it must succeed with patch( "monitoring_service.handlers.get_pessimistic_udc_balance", Mock(return_value=reward_amount * UDC_SECURITY_MARGIN_FACTOR_MS), ): action_monitoring_triggered_event_handler(event, context) assert context.monitoring_service_contract.functions.monitor.called
def test_channel_bp_updated_event_handler_invalid_closing_participant( context: Context): metrics_state = save_metrics_state(metrics.REGISTRY) context = setup_state_with_closed_channel(context) event_bp = ReceiveNonClosingBalanceProofUpdatedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, closing_participant=DEFAULT_PARTICIPANT_OTHER, nonce=Nonce(2), block_number=BlockNumber(23), ) channel = context.database.get_channel(event_bp.token_network_address, event_bp.channel_identifier) assert context.database.channel_count() == 1 assert channel assert channel.update_status is None non_closing_balance_proof_updated_event_handler(event_bp, context) assert (metrics_state.get_delta( "events_log_errors_total", labels=metrics.ErrorCategory.PROTOCOL.to_label_dict()) == 1.0)
def test_channel_bp_updated_event_handler_channel_not_in_database( context: Context): metrics_state = save_metrics_state(metrics.REGISTRY) # only setup the token network without channels create_default_token_network(context) event_bp = ReceiveNonClosingBalanceProofUpdatedEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=DEFAULT_CHANNEL_IDENTIFIER, closing_participant=DEFAULT_PARTICIPANT2, nonce=Nonce(2), block_number=BlockNumber(23), ) channel = context.database.get_channel(event_bp.token_network_address, event_bp.channel_identifier) assert channel is None assert context.database.channel_count() == 0 non_closing_balance_proof_updated_event_handler(event_bp, context) assert (metrics_state.get_delta( "events_log_errors_total", labels=metrics.ErrorCategory.STATE.to_label_dict()) == 1.0)
def test_pfs_with_mocked_client( # pylint: disable=too-many-arguments web3, token_network_registry_contract, channel_descriptions_case_1: List, get_accounts, wait_for_blocks, user_deposit_contract, token_network, custom_token, create_channel, get_private_key, ): # pylint: disable=too-many-locals """ Instantiates some MockClients and the PathfindingService. Mocks blockchain events to setup a token network with a given topology, specified in the channel_description fixture. Tests all PFS methods w.r.t. to that topology """ clients = get_accounts(7) token_network_address = decode_hex(token_network.address) with patch("pathfinding_service.service.MatrixListener", new=Mock): pfs = PathfindingService( web3=web3, contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: token_network_registry_contract, CONTRACT_USER_DEPOSIT: user_deposit_contract, }, required_confirmations=1, db_filename=":memory:", poll_interval=0.1, sync_start_block=BlockNumber(0), private_key="3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266", ) # greenlet needs to be started and context switched to pfs.start() wait_for_blocks(1) gevent.sleep(0.1) # there should be one token network registered assert len(pfs.token_networks) == 1 token_network_model = pfs.token_networks[token_network_address] graph = token_network_model.G channel_identifiers = [] for ( p1_index, p1_deposit, _p1_capacity, _p1_fee, _p1_reveal_timeout, _p1_reachability, p2_index, p2_deposit, _p2_capacity, _p2_fee, _p2_reveal_timeout, _p2_reachability, _settle_timeout, ) in channel_descriptions_case_1: # order is important here because we check order later channel_id = create_channel(clients[p1_index], clients[p2_index])[0] channel_identifiers.append(channel_id) for address, partner_address, amount in [ (clients[p1_index], clients[p2_index], p1_deposit), (clients[p2_index], clients[p1_index], p2_deposit), ]: custom_token.functions.mint(amount).transact({"from": address}) custom_token.functions.approve(token_network.address, amount).transact( {"from": address} ) token_network.functions.setTotalDeposit( channel_id, address, amount, partner_address ).transact({"from": address}) gevent.sleep() wait_for_blocks(1) gevent.sleep(0.1) # there should be as many open channels as described assert len(token_network_model.channel_id_to_addresses.keys()) == len( channel_descriptions_case_1 ) # check that deposits, settle_timeout and transfers got registered for ( index, ( _p1_index, p1_deposit, _p1_capacity, _p1_fee, _p1_reveal_timeout, _p1_reachability, _p2_index, p2_deposit, _p2_capacity, _p2_fee, _p2_reveal_timeout, _p2_reachability, _settle_timeout, ), ) in enumerate(channel_descriptions_case_1): channel_identifier = channel_identifiers[index] p1_address, p2_address = token_network_model.channel_id_to_addresses[channel_identifier] view1: ChannelView = graph[p1_address][p2_address]["view"] view2: ChannelView = graph[p2_address][p1_address]["view"] assert view1.deposit == p1_deposit assert view2.deposit == p2_deposit assert view1.settle_timeout == TEST_SETTLE_TIMEOUT_MIN assert view2.settle_timeout == TEST_SETTLE_TIMEOUT_MIN assert view1.reveal_timeout == DEFAULT_REVEAL_TIMEOUT assert view2.reveal_timeout == DEFAULT_REVEAL_TIMEOUT # now close all channels for ( index, ( p1_index, _p1_deposit, _p1_capacity, _p1_fee, _p1_reveal_timeout, _p1_reachability, p2_index, _p2_deposit, _p2_capacity, _p2_fee, _p2_reveal_timeout, _p2_reachability, _settle_timeout, ), ) in enumerate(channel_descriptions_case_1): channel_id = channel_identifiers[index] balance_proof = HashedBalanceProof( nonce=Nonce(1), transferred_amount=0, priv_key=get_private_key(clients[p2_index]), channel_identifier=channel_id, token_network_address=token_network.address, chain_id=ChainID(1), additional_hash="0x%064x" % 0, locked_amount=0, locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), ) token_network.functions.closeChannel( channel_id, clients[p2_index], balance_proof.balance_hash, balance_proof.nonce, balance_proof.additional_hash, balance_proof.signature, ).transact({"from": clients[p1_index], "gas": 200_000}) wait_for_blocks(1) gevent.sleep(0.1) # there should be no channels assert len(token_network_model.channel_id_to_addresses.keys()) == 0 pfs.stop()
def parity_discover_next_available_nonce(web3: Web3, address: AddressHex) -> Nonce: """Returns the next available nonce for `address`.""" next_nonce_encoded = web3.manager.request_blocking("parity_nextNonce", [address]) return Nonce(int(next_nonce_encoded, 16))
def test_crash(tmpdir, get_accounts, get_private_key, mockchain): # pylint: disable=too-many-locals """ Process blocks and compare results with/without crash A somewhat meaninful crash handling is simulated by not including the UpdatedHeadBlockEvent in every block. """ channel_identifier = ChannelID(3) c1, c2 = get_accounts(2) token_network_address = TokenNetworkAddress( to_canonical_address(get_random_address())) balance_proof = HashedBalanceProof( nonce=Nonce(1), transferred_amount=TokenAmount(2), priv_key=get_private_key(c1), channel_identifier=channel_identifier, token_network_address=token_network_address, chain_id=ChainID(1), additional_hash="0x%064x" % 0, locked_amount=0, locksroot=encode_hex(LOCKSROOT_OF_NO_LOCKS), ) monitor_request = balance_proof.get_monitor_request( get_private_key(c2), reward_amount=TokenAmount(0), msc_address=TEST_MSC_ADDRESS) events = [ [ ReceiveChannelOpenedEvent( token_network_address=token_network_address, channel_identifier=channel_identifier, participant1=c1, participant2=c2, settle_timeout=20, block_number=BlockNumber(0), ) ], [UpdatedHeadBlockEvent(BlockNumber(1))], [ ActionMonitoringTriggeredEvent( token_network_address=token_network_address, channel_identifier=channel_identifier, non_closing_participant=c2, ) ], [UpdatedHeadBlockEvent(BlockNumber(3))], ] mockchain(events) server_private_key = get_random_privkey() contracts = { CONTRACT_TOKEN_NETWORK_REGISTRY: ContractMock(), CONTRACT_MONITORING_SERVICE: ContractMock(), CONTRACT_USER_DEPOSIT: ContractMock(), CONTRACT_SERVICE_REGISTRY: ContractMock(), } def new_ms(filename): ms = MonitoringService( web3=Web3Mock(), private_key=server_private_key, contracts=contracts, db_filename=os.path.join(tmpdir, filename), ) msc = Mock() ms.context.monitoring_service_contract = msc ms.monitor_mock = msc.functions.monitor.return_value.transact # type: ignore ms.monitor_mock.return_value = bytes(0) # type: ignore return ms # initialize both monitoring services stable_ms = new_ms("stable.db") crashy_ms = new_ms("crashy.db") for ms in [stable_ms, crashy_ms]: ms.database.conn.execute( "INSERT INTO token_network(address) VALUES (?)", [to_checksum_address(token_network_address)], ) ms.context.ms_state.blockchain_state.token_network_addresses = [ token_network_address ] ms.database.upsert_monitor_request(monitor_request) ms.database.conn.commit() # process each block and compare results between crashy and stable ms for to_block in range(len(events)): crashy_ms = new_ms("crashy.db") # new instance to simulate crash stable_ms.monitor_mock.reset_mock() # clear calls from last block result_state: List[dict] = [] for ms in [stable_ms, crashy_ms]: ms._process_new_blocks(to_block) # pylint: disable=protected-access result_state.append( dict( blockchain_state=ms.context.ms_state.blockchain_state, db_dump=list(ms.database.conn.iterdump()), monitor_calls=ms.monitor_mock.mock_calls, )) # both instances should have the same state after processing for stable_state, crashy_state in zip(result_state[0].values(), result_state[1].values()): # do asserts for each key separately to get better error messages assert stable_state == crashy_state
def test_e2e( # pylint: disable=too-many-arguments,too-many-locals web3, monitoring_service_contract, user_deposit_contract, wait_for_blocks, service_registry, monitoring_service: MonitoringService, request_collector: RequestCollector, contracts_manager, deposit_to_udc, create_channel, token_network, get_accounts, get_private_key, ): """Test complete message lifecycle 1) client opens channel & submits monitoring request 2) other client closes channel 3) MS registers channelClose event 4) MS calls monitoring contract update 5) wait for channel settle 6) MS claims the reward """ query = create_ms_contract_events_query( web3, contracts_manager, monitoring_service_contract.address) initial_balance = user_deposit_contract.functions.balances( monitoring_service.address).call() c1, c2 = get_accounts(2) # add deposit for c1 node_deposit = 10 deposit_to_udc(c1, node_deposit) deposit = service_registry.functions.deposits( monitoring_service.address).call() assert deposit > 0 # each client does a transfer channel_id = create_channel( c1, c2, settle_timeout=5)[0] # TODO: reduce settle_timeout to speed up test shared_bp_args = dict( channel_identifier=channel_id, token_network_address=token_network.address, chain_id=ChainID(1), additional_hash="0x%064x" % 0, locked_amount=TokenAmount(0), locksroot=encode_hex(EMPTY_LOCKSROOT), ) transferred_c1 = 5 balance_proof_c1 = HashedBalanceProof(nonce=Nonce(1), transferred_amount=transferred_c1, priv_key=get_private_key(c1), **shared_bp_args) transferred_c2 = 6 balance_proof_c2 = HashedBalanceProof(nonce=Nonce(2), transferred_amount=transferred_c2, priv_key=get_private_key(c2), **shared_bp_args) ms_greenlet = gevent.spawn(monitoring_service.start, gevent.sleep) # need to wait here till the MS has some time to react gevent.sleep() assert monitoring_service.context.ms_state.blockchain_state.token_network_addresses # c1 asks MS to monitor the channel reward_amount = TokenAmount(1) request_monitoring = balance_proof_c2.get_request_monitoring( get_private_key(c1), reward_amount) request_collector.on_monitor_request(request_monitoring) # c2 closes the channel token_network.functions.closeChannel( channel_id, c1, balance_proof_c1.balance_hash, balance_proof_c1.nonce, balance_proof_c1.additional_hash, balance_proof_c1.signature, ).transact({"from": c2}) # Wait until the MS reacts, which it does after giving the client some time # to update the channel itself. wait_for_blocks(3) # 1 block for close + 30% of 5 blocks = 2 # Now give the monitoring service a chance to submit the missing BP gevent.sleep(0.1) assert [e.event for e in query() ] == [MonitoringServiceEvent.NEW_BALANCE_PROOF_RECEIVED] # wait for settle timeout wait_for_blocks( 2) # timeout is 5, but we've already waited 3 blocks before token_network.functions.settleChannel( channel_id, c1, # participant_B transferred_c1, # participant_B_transferred_amount 0, # participant_B_locked_amount EMPTY_LOCKSROOT, # participant_B_locksroot c2, # participant_A transferred_c2, # participant_A_transferred_amount 0, # participant_A_locked_amount EMPTY_LOCKSROOT, # participant_A_locksroot ).transact() # Wait until the ChannelSettled is confirmed # Let the MS claim its reward gevent.sleep(0.1) assert [e.event for e in query()] == [ MonitoringServiceEvent.NEW_BALANCE_PROOF_RECEIVED, MonitoringServiceEvent.REWARD_CLAIMED, ] final_balance = user_deposit_contract.functions.balances( monitoring_service.address).call() assert final_balance == (initial_balance + reward_amount) ms_greenlet.kill()
def populate_token_network_random(token_network_model: TokenNetwork, private_keys: List[str]) -> None: number_of_channels = 300 # seed for pseudo-randomness from config constant, that changes from time to time random.seed(number_of_channels) for channel_id_int in range(number_of_channels): channel_id = ChannelID(channel_id_int) private_key1, private_key2 = random.sample(private_keys, 2) address1 = private_key_to_address(private_key1) address2 = private_key_to_address(private_key2) settle_timeout = BlockTimeout(15) token_network_model.handle_channel_opened_event( channel_identifier=channel_id, participant1=address1, participant2=address2, settle_timeout=settle_timeout, ) # deposit to channels deposit1 = TokenAmount(random.randint(0, 1000)) deposit2 = TokenAmount(random.randint(0, 1000)) address1, address2 = token_network_model.channel_id_to_addresses[ channel_id] token_network_model.handle_channel_balance_update_message( PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), channel_identifier=channel_id, token_network_address=TokenNetworkAddress( token_network_model.address), ), updating_participant=address1, other_participant=address2, updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=deposit1, other_capacity=deposit2, reveal_timeout=BlockTimeout(2), signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(0), other_capacity_partner=TokenAmount(0), ) token_network_model.handle_channel_balance_update_message( PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), channel_identifier=channel_id, token_network_address=TokenNetworkAddress( token_network_model.address), ), updating_participant=address2, other_participant=address1, updating_nonce=Nonce(2), other_nonce=Nonce(1), updating_capacity=deposit2, other_capacity=deposit1, reveal_timeout=BlockTimeout(2), signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(deposit1), other_capacity_partner=TokenAmount(deposit2), )
def populate_token_network( token_network: TokenNetwork, reachability_state: SimpleReachabilityContainer, addresses: List[Address], channel_descriptions: List, ): for ( channel_id, ( p1_index, p1_capacity, _p1_fee, p1_reveal_timeout, p1_reachability, p2_index, p2_capacity, _p2_fee, p2_reveal_timeout, p2_reachability, settle_timeout, ), ) in enumerate(channel_descriptions): participant1 = addresses[p1_index] participant2 = addresses[p2_index] token_network.handle_channel_opened_event( channel_identifier=ChannelID(channel_id), participant1=participant1, participant2=participant2, settle_timeout=settle_timeout, ) token_network.handle_channel_balance_update_message( PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), channel_identifier=ChannelID(channel_id), token_network_address=TokenNetworkAddress( token_network.address), ), updating_participant=addresses[p1_index], other_participant=addresses[p2_index], updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=p1_capacity, other_capacity=p2_capacity, reveal_timeout=p1_reveal_timeout, signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(0), other_capacity_partner=TokenAmount(0), ) token_network.handle_channel_balance_update_message( PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), channel_identifier=ChannelID(channel_id), token_network_address=TokenNetworkAddress( token_network.address), ), updating_participant=addresses[p2_index], other_participant=addresses[p1_index], updating_nonce=Nonce(2), other_nonce=Nonce(1), updating_capacity=p2_capacity, other_capacity=p1_capacity, reveal_timeout=p2_reveal_timeout, signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(p1_capacity), other_capacity_partner=TokenAmount(p2_capacity), ) # Update presence state according to scenario reachability_state.reachabilities[participant1] = p1_reachability reachability_state.reachabilities[participant2] = p2_reachability
def test_receive_secrethashtransfer_unknown( raiden_network: List[RaidenService], token_addresses): app0 = raiden_network[0] token_address = token_addresses[0] token_network_address = views.get_token_network_address_by_token_address( views.state_from_raiden(app0), app0.default_registry.address, token_address) assert token_network_address other_key = HOP1_KEY other_signer = LocalSigner(other_key) canonical_identifier = factories.make_canonical_identifier( token_network_address=token_network_address) amount = TokenAmount(10) locksroot = Locksroot(make_32bytes()) refund_transfer_message = factories.create( factories.RefundTransferProperties( payment_identifier=PaymentID(1), nonce=Nonce(1), token=token_address, canonical_identifier=canonical_identifier, transferred_amount=amount, recipient=TargetAddress(app0.address), locksroot=locksroot, amount=amount, secret=UNIT_SECRET, )) sign_and_inject(refund_transfer_message, other_signer, app0) unlock = Unlock( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=PaymentID(1), nonce=Nonce(1), channel_identifier=canonical_identifier.channel_identifier, token_network_address=token_network_address, transferred_amount=amount, locked_amount=LockedAmount(0), locksroot=locksroot, secret=UNIT_SECRET, signature=EMPTY_SIGNATURE, ) sign_and_inject(unlock, other_signer, app0) secret_request_message = SecretRequest( message_identifier=make_message_identifier(), payment_identifier=PaymentID(1), secrethash=UNIT_SECRETHASH, amount=PaymentAmount(1), expiration=refund_transfer_message.lock.expiration, signature=EMPTY_SIGNATURE, ) sign_and_inject(secret_request_message, other_signer, app0) reveal_secret_message = RevealSecret( message_identifier=make_message_identifier(), secret=UNIT_SECRET, signature=EMPTY_SIGNATURE) sign_and_inject(reveal_secret_message, other_signer, app0)
def test_regression_multiple_revealsecret( raiden_network: List[App], token_addresses: List[TokenAddress]) -> None: """ Multiple RevealSecret messages arriving at the same time must be handled properly. Unlock handling followed these steps: The Unlock message arrives The secret is registered The channel is updated and the correspoding lock is removed * A balance proof for the new channel state is created and sent to the payer The channel is unregistered for the given secrethash The step marked with an asterisk above introduced a context-switch. This allowed a second Reveal Unlock message to be handled before the channel was unregistered. And because the channel was already updated an exception was raised for an unknown secret. """ app0, app1 = raiden_network token = token_addresses[0] token_network_address = views.get_token_network_address_by_token_address( views.state_from_app(app0), app0.raiden.default_registry.address, token) assert token_network_address channelstate_0_1 = get_channelstate(app0, app1, token_network_address) payment_identifier = PaymentID(1) secret, secrethash = make_secret_with_hash() expiration = BlockExpiration(app0.raiden.get_block_number() + 100) lock_amount = PaymentWithFeeAmount(10) lock = Lock(amount=lock_amount, expiration=expiration, secrethash=secrethash) nonce = Nonce(1) transferred_amount = TokenAmount(0) mediated_transfer = LockedTransfer( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=payment_identifier, nonce=nonce, token_network_address=token_network_address, token=token, channel_identifier=channelstate_0_1.identifier, transferred_amount=transferred_amount, locked_amount=LockedAmount(lock_amount), recipient=app1.raiden.address, locksroot=Locksroot(lock.lockhash), lock=lock, target=TargetAddress(app1.raiden.address), initiator=InitiatorAddress(app0.raiden.address), signature=EMPTY_SIGNATURE, metadata=Metadata(routes=[ RouteMetadata(route=[app0.raiden.address, app1.raiden.address]) ]), ) app0.raiden.sign(mediated_transfer) app1.raiden.on_messages([mediated_transfer]) reveal_secret = RevealSecret(message_identifier=make_message_identifier(), secret=secret, signature=EMPTY_SIGNATURE) app0.raiden.sign(reveal_secret) token_network_address = channelstate_0_1.token_network_address unlock = Unlock( chain_id=UNIT_CHAIN_ID, message_identifier=make_message_identifier(), payment_identifier=payment_identifier, nonce=Nonce(mediated_transfer.nonce + 1), token_network_address=token_network_address, channel_identifier=channelstate_0_1.identifier, transferred_amount=TokenAmount(lock_amount), locked_amount=LockedAmount(0), locksroot=LOCKSROOT_OF_NO_LOCKS, secret=secret, signature=EMPTY_SIGNATURE, ) app0.raiden.sign(unlock) messages = [unlock, reveal_secret] receive_method = app1.raiden.on_messages wait = set( gevent.spawn_later(0.1, receive_method, [data]) for data in messages) gevent.joinall(wait, raise_error=True)
def populate_token_network( token_network: TokenNetwork, address_to_reachability: Dict[Address, AddressReachability], addresses: List[Address], channel_descriptions: List, ): for ( channel_id, ( p1_index, p1_deposit, p1_capacity, _p1_fee, p1_reveal_timeout, p1_reachability, p2_index, p2_deposit, p2_capacity, _p2_fee, p2_reveal_timeout, p2_reachability, settle_timeout, ), ) in enumerate(channel_descriptions): participant1 = addresses[p1_index] participant2 = addresses[p2_index] token_network.handle_channel_opened_event( channel_identifier=ChannelID(channel_id), participant1=participant1, participant2=participant2, settle_timeout=settle_timeout, ) token_network.handle_channel_new_deposit_event( channel_identifier=ChannelID(channel_id), receiver=participant1, total_deposit=p1_deposit, ) token_network.handle_channel_new_deposit_event( channel_identifier=ChannelID(channel_id), receiver=participant2, total_deposit=p2_deposit, ) token_network.handle_channel_balance_update_message( UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), channel_identifier=ChannelID(channel_id), token_network_address=TokenNetworkAddress( token_network.address), ), updating_participant=addresses[p1_index], other_participant=addresses[p2_index], updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=p1_capacity, other_capacity=p2_capacity, reveal_timeout=p1_reveal_timeout, mediation_fee=FeeAmount(0), signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(0), other_capacity_partner=TokenAmount(0), ) token_network.handle_channel_balance_update_message( UpdatePFS( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(1), channel_identifier=ChannelID(channel_id), token_network_address=TokenNetworkAddress( token_network.address), ), updating_participant=addresses[p2_index], other_participant=addresses[p1_index], updating_nonce=Nonce(2), other_nonce=Nonce(1), updating_capacity=p2_capacity, other_capacity=p1_capacity, reveal_timeout=p2_reveal_timeout, mediation_fee=FeeAmount(0), signature=EMPTY_SIGNATURE, ), updating_capacity_partner=TokenAmount(p1_capacity), other_capacity_partner=TokenAmount(p2_capacity), ) # Update presence state according to scenario address_to_reachability[participant1] = p1_reachability address_to_reachability[participant2] = p2_reachability
def test_payment_channel_proxy_basics( token_network_registry_address: TokenNetworkRegistryAddress, token_network_proxy: TokenNetwork, token_proxy: Token, chain_id: ChainID, private_keys: List[PrivateKey], web3: Web3, contract_manager: ContractManager, reveal_timeout: BlockTimeout, ) -> None: token_network_address = token_network_proxy.address partner = privatekey_to_address(private_keys[0]) rpc_client = JSONRPCClient(web3, private_keys[1]) proxy_manager = ProxyManager( rpc_client=rpc_client, contract_manager=contract_manager, metadata=ProxyManagerMetadata( token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER, filters_start_at=GENESIS_BLOCK_NUMBER, ), ) token_network_proxy = proxy_manager.token_network( address=token_network_address, block_identifier=BLOCK_ID_LATEST ) start_block = web3.eth.blockNumber channel_details = token_network_proxy.new_netting_channel( partner=partner, settle_timeout=TEST_SETTLE_TIMEOUT_MIN, given_block_identifier=BLOCK_ID_LATEST, ) channel_identifier = channel_details.channel_identifier assert channel_identifier is not None channel_state = NettingChannelState( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_id, token_network_address=token_network_address, channel_identifier=channel_identifier, ), token_address=token_network_proxy.token_address(), token_network_registry_address=token_network_registry_address, reveal_timeout=reveal_timeout, settle_timeout=BlockTimeout(TEST_SETTLE_TIMEOUT_MIN), fee_schedule=FeeScheduleState(), our_state=NettingChannelEndState( address=token_network_proxy.client.address, contract_balance=Balance(0) ), partner_state=NettingChannelEndState(address=partner, contract_balance=Balance(0)), open_transaction=SuccessfulTransactionState(finished_block_number=BlockNumber(0)), ) channel_proxy_1 = proxy_manager.payment_channel( channel_state=channel_state, block_identifier=BLOCK_ID_LATEST ) assert channel_proxy_1.channel_identifier == channel_identifier assert channel_proxy_1.opened(BLOCK_ID_LATEST) is True # Test deposit initial_token_balance = 100 token_proxy.transfer(rpc_client.address, TokenAmount(initial_token_balance)) assert token_proxy.balance_of(rpc_client.address) == initial_token_balance assert token_proxy.balance_of(partner) == 0 channel_proxy_1.approve_and_set_total_deposit( total_deposit=TokenAmount(10), block_identifier=BLOCK_ID_LATEST ) # ChannelOpened, ChannelNewDeposit channel_events = get_all_netting_channel_events( proxy_manager=proxy_manager, token_network_address=token_network_address, netting_channel_identifier=channel_proxy_1.channel_identifier, contract_manager=contract_manager, from_block=start_block, to_block=web3.eth.blockNumber, ) assert len(channel_events) == 2 block_before_close = web3.eth.blockNumber empty_balance_proof = BalanceProof( channel_identifier=channel_proxy_1.channel_identifier, token_network_address=token_network_address, balance_hash=EMPTY_BALANCE_HASH, nonce=0, chain_id=chain_id, transferred_amount=TokenAmount(0), ) closing_data = ( empty_balance_proof.serialize_bin(msg_type=MessageTypeId.BALANCE_PROOF) + EMPTY_SIGNATURE ) channel_proxy_1.close( nonce=Nonce(0), balance_hash=EMPTY_BALANCE_HASH, additional_hash=EMPTY_MESSAGE_HASH, non_closing_signature=EMPTY_SIGNATURE, closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data), block_identifier=BLOCK_ID_LATEST, ) assert channel_proxy_1.closed(BLOCK_ID_LATEST) is True # ChannelOpened, ChannelNewDeposit, ChannelClosed channel_events = get_all_netting_channel_events( proxy_manager=proxy_manager, token_network_address=token_network_address, netting_channel_identifier=channel_proxy_1.channel_identifier, contract_manager=contract_manager, from_block=start_block, to_block=web3.eth.blockNumber, ) assert len(channel_events) == 3 # check the settlement timeouts again assert channel_proxy_1.settle_timeout() == TEST_SETTLE_TIMEOUT_MIN # update transfer -- we need to wait on +1 since we use the latest block on parity for # estimate gas and at the time the latest block is the settle timeout block. # More info: https://github.com/raiden-network/raiden/pull/3699#discussion_r270477227 rpc_client.wait_until_block( target_block_number=BlockNumber(rpc_client.block_number() + TEST_SETTLE_TIMEOUT_MIN + 1) ) transaction_hash = channel_proxy_1.settle( transferred_amount=TokenAmount(0), locked_amount=LockedAmount(0), locksroot=LOCKSROOT_OF_NO_LOCKS, partner_transferred_amount=TokenAmount(0), partner_locked_amount=LockedAmount(0), partner_locksroot=LOCKSROOT_OF_NO_LOCKS, block_identifier=BLOCK_ID_LATEST, ) assert is_tx_hash_bytes(transaction_hash) assert channel_proxy_1.settled(BLOCK_ID_LATEST) is True # ChannelOpened, ChannelNewDeposit, ChannelClosed, ChannelSettled channel_events = get_all_netting_channel_events( proxy_manager=proxy_manager, token_network_address=token_network_address, netting_channel_identifier=channel_proxy_1.channel_identifier, contract_manager=contract_manager, from_block=start_block, to_block=web3.eth.blockNumber, ) assert len(channel_events) == 4 channel_details = token_network_proxy.new_netting_channel( partner=partner, settle_timeout=TEST_SETTLE_TIMEOUT_MIN, given_block_identifier=BLOCK_ID_LATEST, ) new_channel_identifier = channel_details.channel_identifier assert new_channel_identifier is not None channel_state = NettingChannelState( canonical_identifier=CanonicalIdentifier( chain_identifier=chain_id, token_network_address=token_network_address, channel_identifier=new_channel_identifier, ), token_address=token_network_proxy.token_address(), token_network_registry_address=token_network_registry_address, reveal_timeout=reveal_timeout, settle_timeout=BlockTimeout(TEST_SETTLE_TIMEOUT_MIN), fee_schedule=FeeScheduleState(), our_state=NettingChannelEndState( address=token_network_proxy.client.address, contract_balance=Balance(0) ), partner_state=NettingChannelEndState(address=partner, contract_balance=Balance(0)), open_transaction=SuccessfulTransactionState(finished_block_number=BlockNumber(0)), ) channel_proxy_2 = proxy_manager.payment_channel( channel_state=channel_state, block_identifier=BLOCK_ID_LATEST ) assert channel_proxy_2.channel_identifier == new_channel_identifier assert channel_proxy_2.opened(BLOCK_ID_LATEST) is True msg = "The channel was already closed, the second call must fail" with pytest.raises(RaidenRecoverableError): channel_proxy_1.close( nonce=Nonce(0), balance_hash=EMPTY_BALANCE_HASH, additional_hash=EMPTY_MESSAGE_HASH, non_closing_signature=EMPTY_SIGNATURE, closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data), block_identifier=block_before_close, ) pytest.fail(msg) msg = "The channel is not open at latest, this must raise" with pytest.raises(RaidenUnrecoverableError): channel_proxy_1.close( nonce=Nonce(0), balance_hash=EMPTY_BALANCE_HASH, additional_hash=EMPTY_MESSAGE_HASH, non_closing_signature=EMPTY_SIGNATURE, closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data), block_identifier=BLOCK_ID_LATEST, ) pytest.fail(msg) msg = ( "The channel was not opened at the provided block (latest). " "This call should never have been attempted." ) with pytest.raises(BrokenPreconditionError): channel_proxy_1.approve_and_set_total_deposit( total_deposit=TokenAmount(20), block_identifier=BLOCK_ID_LATEST ) pytest.fail(msg)
def test_discover_next_available_nonce(deploy_client: JSONRPCClient) -> None: """`parity_discover_next_available_nonce` returns the *next available nonce*. Notes: - This is not the same as the *highest unused nonce*, additional details on issue #4976. - The behaviour of `geth_discover_next_available_nonce` and `parity_discover_next_available_nonce` should match. """ web3 = deploy_client.web3 random_address = make_address() gas_price = web3.eth.gasPrice # pylint: disable=no-member eth_node = deploy_client.eth_node next_nonce = discover_next_available_nonce(web3, eth_node, deploy_client.address) # Should be larger than the number of transactions that can fit in a single # block, to ensure all transactions from the pool are accounted for. QTY_TRANSACTIONS = 1000 # Test the next available nonce for _ in range(QTY_TRANSACTIONS): transaction = { "to": to_checksum_address(random_address), "gas": TRANSACTION_INTRINSIC_GAS, "nonce": next_nonce, "value": 1, "gasPrice": gas_price, } signed_txn = deploy_client.web3.eth.account.sign_transaction( transaction, deploy_client.privkey) deploy_client.web3.eth.sendRawTransaction(signed_txn.rawTransaction) next_nonce = Nonce(next_nonce + 1) msg = "The nonce must increment when a new transaction is sent." assert (discover_next_available_nonce( web3, eth_node, deploy_client.address) == next_nonce), msg skip_nonce = next_nonce + 1 # Test the next available nonce is not the same as the highest unused # nonce. for _ in range(QTY_TRANSACTIONS): transaction = { "to": to_checksum_address(random_address), "gas": TRANSACTION_INTRINSIC_GAS, "nonce": skip_nonce, "value": 1, "gasPrice": gas_price, } signed_txn = deploy_client.web3.eth.account.sign_transaction( transaction, deploy_client.privkey) deploy_client.web3.eth.sendRawTransaction(signed_txn.rawTransaction) available_nonce = discover_next_available_nonce( web3, eth_node, deploy_client.address) msg = "Expected the latest unused nonce." assert available_nonce == next_nonce, msg assert available_nonce != skip_nonce, msg skip_nonce = Nonce(skip_nonce + 1)
def get_next_transaction(self) -> TransactionSlot: with self._nonce_lock: slot = TransactionSlot(self, self._available_nonce) self._available_nonce = Nonce(self._available_nonce + 1) return slot