def test_handle_onchain_secretreveal(): """ The target node must update the lock state when the secret is registered in the blockchain. """ setup = make_target_state( block_number=1, expiration=1 + factories.UNIT_REVEAL_TIMEOUT, ) assert factories.UNIT_SECRETHASH in setup.channel.partner_state.secrethashes_to_lockedlocks offchain_secret_reveal_iteration = target.state_transition( target_state=setup.new_state, state_change=ReceiveSecretReveal(UNIT_SECRET, setup.initiator), channel_state=setup.channel, pseudo_random_generator=setup.pseudo_random_generator, block_number=setup.block_number, ) assert UNIT_SECRETHASH in setup.channel.partner_state.secrethashes_to_unlockedlocks assert UNIT_SECRETHASH not in setup.channel.partner_state.secrethashes_to_lockedlocks # Make sure that an emptyhash on chain reveal is rejected. block_number_prior_the_expiration = setup.expiration - 2 onchain_reveal = ContractReceiveSecretReveal( transaction_hash=factories.make_address(), secret_registry_address=factories.make_address(), secrethash=EMPTY_HASH_KECCAK, secret=EMPTY_HASH, block_number=block_number_prior_the_expiration, ) onchain_secret_reveal_iteration = target.state_transition( target_state=offchain_secret_reveal_iteration.new_state, state_change=onchain_reveal, channel_state=setup.channel, pseudo_random_generator=setup.pseudo_random_generator, block_number=block_number_prior_the_expiration, ) unlocked_onchain = setup.channel.partner_state.secrethashes_to_onchain_unlockedlocks assert EMPTY_HASH_KECCAK not in unlocked_onchain # now let's go for the actual secret onchain_reveal.secret = UNIT_SECRET onchain_reveal.secrethash = UNIT_SECRETHASH onchain_secret_reveal_iteration = target.state_transition( target_state=offchain_secret_reveal_iteration.new_state, state_change=onchain_reveal, channel_state=setup.channel, pseudo_random_generator=setup.pseudo_random_generator, block_number=block_number_prior_the_expiration, ) unlocked_onchain = setup.channel.partner_state.secrethashes_to_onchain_unlockedlocks assert UNIT_SECRETHASH in unlocked_onchain # Check that after we register a lock on-chain handling the block again will # not cause us to attempt an onchain re-register extra_block_handle_transition = target.handle_block( target_state=onchain_secret_reveal_iteration.new_state, channel_state=setup.channel, block_number=block_number_prior_the_expiration + 1, ) assert len(extra_block_handle_transition.events) == 0
def make_from_route_from_counter(counter): from_channel = factories.make_channel( partner_balance=next(counter), partner_address=factories.HOP1, token_address=factories.make_address(), channel_identifier=next(counter), ) from_route = factories.route_from_channel(from_channel) expiration = factories.UNIT_REVEAL_TIMEOUT + 1 from_transfer = factories.make_signed_transfer_for( from_channel, factories.LockedTransferSignedStateProperties( transfer=factories.LockedTransferProperties( balance_proof=factories.BalanceProofProperties( transferred_amount=0, token_network_identifier=from_channel.token_network_identifier, ), amount=1, expiration=expiration, secret=sha3(factories.make_secret(next(counter))), initiator=factories.make_address(), target=factories.make_address(), payment_identifier=next(counter), ), sender=factories.HOP1, pkey=factories.HOP1_KEY, ), ) return from_route, from_transfer
def make_signed_transfer_from_counter(counter): lock = Lock( amount=next(counter), expiration=next(counter), secrethash=sha3(factories.make_secret(next(counter))), ) signed_transfer = factories.make_signed_transfer( amount=next(counter), initiator=factories.make_address(), target=factories.make_address(), expiration=next(counter), secret=factories.make_secret(next(counter)), payment_identifier=next(counter), message_identifier=next(counter), nonce=next(counter), transferred_amount=next(counter), locked_amount=next(counter), locksroot=sha3(lock.as_bytes), recipient=factories.make_address(), channel_identifier=next(counter), token_network_address=factories.make_address(), token=factories.make_address(), pkey=factories.HOP1_KEY, sender=factories.HOP1, ) return signed_transfer
def initialize(self, block_number, random, random_seed): self.random_seed = random_seed self.block_number = block_number self.random = random self.private_key, self.address = factories.make_privkey_address() self.chain_state = ChainState( self.random, self.block_number, self.address, factories.UNIT_CHAIN_ID, ) self.token_network_id = factories.make_address() self.token_id = factories.make_address() self.token_network_state = TokenNetworkState(self.token_network_id, self.token_id) self.payment_network_id = factories.make_payment_network_identifier() self.payment_network_state = PaymentNetworkState( self.payment_network_id, [self.token_network_state], ) self.chain_state.identifiers_to_paymentnetworks[ self.payment_network_id ] = self.payment_network_state return self.new_channel_with_transaction()
def make_transfer_from_counter(counter): return factories.make_transfer( amount=next(counter), initiator=factories.make_address(), target=factories.make_address(), expiration=next(counter), secret=factories.make_secret(next(counter)), )
def test_handle_contract_send_channelunlock_already_unlocked(): """This is a test for the scenario where the onchain unlock has already happened when we get to handle our own send unlock transaction. Regression test for https://github.com/raiden-network/raiden/issues/3152 """ channel_identifier = 1 token_network_identifier = make_address() token_address = make_address() participant = make_address() raiden = MockRaidenService() def detail_participants(participant1, participant2, channel_identifier): transferred_amount = 1 locked_amount = 1 locksroot = make_32bytes() balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot) our_details = ParticipantDetails( address=raiden.address, deposit=5, withdrawn=0, is_closer=False, balance_hash=balance_hash, nonce=1, locksroot=locksroot, locked_amount=locked_amount, ) transferred_amount = 1 locked_amount = 1 # Let's mock here that partner locksroot is 0x0 locksroot = EMPTY_HASH balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot) partner_details = ParticipantDetails( address=participant, deposit=5, withdrawn=0, is_closer=True, balance_hash=balance_hash, nonce=1, locksroot=locksroot, locked_amount=locked_amount, ) return ParticipantsDetails(our_details, partner_details) # make sure detail_participants returns partner data with a locksroot of 0x0 raiden.chain.token_network.detail_participants = detail_participants event = ContractSendChannelBatchUnlock( token_address=token_address, token_network_identifier=token_network_identifier, channel_identifier=channel_identifier, participant=participant, ) # This should not throw an unrecoverable error RaidenEventHandler().on_raiden_event(raiden=raiden, event=event)
def test_channel_settle_must_properly_cleanup(): open_block_number = 10 pseudo_random_generator = random.Random() token_network_id = factories.make_address() token_id = factories.make_address() token_network_state = TokenNetworkState(token_network_id, token_id) amount = 30 our_balance = amount + 50 channel_state = factories.make_channel(our_balance=our_balance) channel_new_state_change = ContractReceiveChannelNew(token_network_id, channel_state) channel_new_iteration = token_network.state_transition( token_network_state, channel_new_state_change, pseudo_random_generator, open_block_number, ) closed_block_number = open_block_number + 10 channel_close_state_change = ContractReceiveChannelClosed( token_network_id, channel_state.identifier, channel_state.partner_state.address, closed_block_number, ) channel_closed_iteration = token_network.state_transition( channel_new_iteration.new_state, channel_close_state_change, pseudo_random_generator, closed_block_number, ) settle_block_number = closed_block_number + channel_state.settle_timeout + 1 channel_settled_state_change = ContractReceiveChannelSettled( token_network_id, channel_state.identifier, settle_block_number, ) channel_settled_iteration = token_network.state_transition( channel_closed_iteration.new_state, channel_settled_state_change, pseudo_random_generator, closed_block_number, ) token_network_state_after_settle = channel_settled_iteration.new_state ids_to_channels = token_network_state_after_settle.channelidentifiers_to_channels assert channel_state.identifier not in ids_to_channels
def test_contract_receive_channelnew_must_be_idempotent(): block_number = 10 pseudo_random_generator = random.Random() token_network_id = factories.make_address() token_id = factories.make_address() token_network_state = TokenNetworkState(token_network_id, token_id) payment_network_identifier = factories.make_payment_network_identifier() amount = 30 our_balance = amount + 50 channel_state1 = factories.make_channel(our_balance=our_balance) channel_state2 = copy.deepcopy(channel_state1) state_change1 = ContractReceiveChannelNew( factories.make_transaction_hash(), token_network_id, channel_state1, block_number, ) token_network.state_transition( payment_network_identifier, token_network_state, state_change1, pseudo_random_generator, block_number, ) state_change2 = ContractReceiveChannelNew( factories.make_transaction_hash(), token_network_id, channel_state2, block_number + 1, ) # replay the ContractReceiveChannelNew state change iteration = token_network.state_transition( payment_network_identifier, token_network_state, state_change2, pseudo_random_generator, block_number, ) msg = 'the channel must not have been overwritten' channelmap_by_id = iteration.new_state.channelidentifiers_to_channels assert channelmap_by_id[channel_state1.identifier] == channel_state1, msg channelmap_by_address = iteration.new_state.partneraddresses_to_channelidentifiers partner_channels_ids = channelmap_by_address[channel_state1.partner_state.address] assert channel_state1.identifier in partner_channels_ids, msg
def test_initiator_handle_contract_receive_after_channel_closed(): """ Initiator must accept on-chain secret reveal if the channel is closed. However, the off-chain unlock must not be done! This will happen because secrets are registered after a channel is closed, during the settlement window. """ block_number = 10 setup = setup_initiator_tests(amount=UNIT_TRANSFER_AMOUNT * 2, block_number=block_number) transfer = setup.current_state.initiator.transfer assert transfer.lock.secrethash in setup.channel.our_state.secrethashes_to_lockedlocks channel_closed = ContractReceiveChannelClosed( transaction_hash=factories.make_transaction_hash(), transaction_from=factories.make_address(), token_network_identifier=setup.channel.token_network_identifier, channel_identifier=setup.channel.identifier, block_number=block_number, ) channel_close_transition = channel.state_transition( channel_state=setup.channel, state_change=channel_closed, pseudo_random_generator=setup.prng, block_number=block_number, ) channel_state = channel_close_transition.new_state state_change = ContractReceiveSecretReveal( transaction_hash=factories.make_transaction_hash(), secret_registry_address=factories.make_address(), secrethash=transfer.lock.secrethash, secret=UNIT_SECRET, block_number=transfer.lock.expiration, ) channel_map = { channel_state.identifier: channel_state, } iteration = initiator_manager.handle_onchain_secretreveal( payment_state=setup.current_state, state_change=state_change, channelidentifiers_to_channels=channel_map, pseudo_random_generator=setup.prng, ) secrethash = setup.current_state.initiator.transfer_description.secrethash assert secrethash in channel_state.our_state.secrethashes_to_onchain_unlockedlocks msg = 'The channel is closed already, the balance proof must not be sent off-chain' assert not events.must_contain_entry(iteration.events, SendBalanceProof, {}), msg
def test_channel_close_called_only_once(): class MockCheckCallsToClose: def __init__(self): self.address = 'mockcheckcallstoclosemockcheckcallstoclo' self.close_calls = 0 def opened(self): return 1 def closed(self): return 0 def close(self, nonce, transferred_amount, locksroot, extra_hash, signature): self.close_calls += 1 netting_channel = NettingChannelMock() token_address = make_address() privkey1, address1 = make_privkey_address() address2 = make_address() balance1 = 70 balance2 = 110 reveal_timeout = 5 settle_timeout = 15 our_state = ChannelEndState(address1, balance1, None, EMPTY_MERKLE_TREE) partner_state = ChannelEndState(address2, balance2, None, EMPTY_MERKLE_TREE) channel_for_hashlock = list() netting_channel = MockCheckCallsToClose() external_state = ChannelExternalState( lambda *args: channel_for_hashlock.append(args), netting_channel, ) test_channel = Channel( our_state, partner_state, external_state, token_address, reveal_timeout, settle_timeout, ) test_channel.external_state.close(None) test_channel.external_state.close(None) assert netting_channel.close_calls == 1
def test_timestamped_event(): event = EventPaymentSentFailed( factories.make_payment_network_identifier(), factories.make_address(), 1, factories.make_address(), 'whatever', ) log_time = '2018-09-07T20:02:35.000' timestamped = TimestampedEvent(event, log_time) assert timestamped.log_time == log_time assert timestamped.reason == timestamped.wrapped_event.reason == 'whatever' assert timestamped.identifier == 1
def test_contract_receive_channelnew_must_be_idempotent(): block_number = 10 pseudo_random_generator = random.Random() token_network_id = factories.make_address() token_id = factories.make_address() token_network_state = TokenNetworkState(token_network_id, token_id) amount = 30 our_balance = amount + 50 channel_state1 = factories.make_channel(our_balance=our_balance) channel_state2 = copy.deepcopy(channel_state1) state_change1 = ContractReceiveChannelNew(token_network_id, channel_state1) token_network.state_transition( token_network_state, state_change1, pseudo_random_generator, block_number, ) # change the existing channel payment_identifier = 1 message_identifier = random.randint(0, UINT64_MAX) channel.send_directtransfer( channel_state1, amount, message_identifier, payment_identifier, ) state_change2 = ContractReceiveChannelNew(token_network_id, channel_state2) # replay the ContractReceiveChannelNew state change iteration = token_network.state_transition( token_network_state, state_change2, pseudo_random_generator, block_number, ) msg = 'the channel must not been overwritten' channelmap_by_id = iteration.new_state.channelidentifiers_to_channels assert channelmap_by_id[channel_state1.identifier] == channel_state1, msg channelmap_by_address = iteration.new_state.partneraddresses_to_channels assert channelmap_by_address[channel_state1.partner_state.address] == channel_state1, msg
def test_invalid_timeouts(): token_address = make_address() reveal_timeout = 5 settle_timeout = 15 address1 = make_address() address2 = make_address() balance1 = 10 balance2 = 10 our_state = ChannelEndState(address1, balance1, None, EMPTY_MERKLE_TREE) partner_state = ChannelEndState(address2, balance2, None, EMPTY_MERKLE_TREE) external_state = make_external_state() # do not allow a reveal timeout larger than the settle timeout with pytest.raises(ValueError): large_reveal_timeout = 50 small_settle_timeout = 49 Channel( our_state, partner_state, external_state, token_address, large_reveal_timeout, small_settle_timeout, ) for invalid_value in (-1, 0, 1.1, 1.0, 'a', [], {}): with pytest.raises(ValueError): Channel( our_state, partner_state, external_state, token_address, invalid_value, settle_timeout, ) with pytest.raises(ValueError): Channel( our_state, partner_state, external_state, token_address, reveal_timeout, invalid_value, )
def test_actioninitchain_restore(): """ ActionInitChain *must* restore the previous pseudo random generator state. Message identifiers are used for confirmation messages, e.g. delivered and processed messages, therefore it's important for each message identifier to not collide with a previous identifier, for this reason the PRNG is used. Additionally, during restarts the state changes are reapplied, and it's really important for the re-execution of the state changes to be deterministic, otherwise undefined behavior may happen. For this reason the state of the PRNG must be restored. If the above is not respected, the message ids generated during restart will not match the previous IDs and the message queues won't be properly cleared up. """ pseudo_random_generator = random.Random() block_number = 577 our_address = factories.make_address() chain_id = 777 original_obj = state_change.ActionInitChain( pseudo_random_generator, block_number, our_address, chain_id, ) decoded_obj = JSONSerializer.deserialize( JSONSerializer.serialize(original_obj), ) assert original_obj == decoded_obj
def test_endpointregistry(private_keys, blockchain_services, contract_manager): chain = blockchain_services.blockchain_services[0] my_address = privatekey_to_address(private_keys[0]) endpointregistry_address = deploy_contract_web3( contract_name=CONTRACT_ENDPOINT_REGISTRY, deploy_client=chain.client, contract_manager=contract_manager, ) discovery_proxy = chain.discovery(endpointregistry_address) contract_discovery = ContractDiscovery(my_address, discovery_proxy) unregistered_address = make_address() # get should raise for unregistered addresses with pytest.raises(UnknownAddress): contract_discovery.get(my_address) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address) contract_discovery.register(my_address, '127.0.0.1', 44444) assert contract_discovery.get(my_address) == ('127.0.0.1', 44444) contract_discovery.register(my_address, '127.0.0.1', 88888) assert contract_discovery.get(my_address) == ('127.0.0.1', 88888) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address)
def test_initiator_handle_contract_receive_secret_reveal(): """ Initiator must unlock off-chain if the secret is revealed on-chain and the channel is open. """ setup = setup_initiator_tests(amount=UNIT_TRANSFER_AMOUNT * 2, block_number=10) transfer = setup.current_state.initiator.transfer assert transfer.lock.secrethash in setup.channel.our_state.secrethashes_to_lockedlocks state_change = ContractReceiveSecretReveal( transaction_hash=factories.make_transaction_hash(), secret_registry_address=factories.make_address(), secrethash=transfer.lock.secrethash, secret=UNIT_SECRET, block_number=transfer.lock.expiration, ) message_identifier = message_identifier_from_prng(deepcopy(setup.prng)) iteration = initiator_manager.handle_onchain_secretreveal( payment_state=setup.current_state, state_change=state_change, channelidentifiers_to_channels=setup.channel_map, pseudo_random_generator=setup.prng, ) payment_identifier = setup.current_state.initiator.transfer_description.payment_identifier assert events.must_contain_entry(iteration.events, SendBalanceProof, { 'message_identifier': message_identifier, 'payment_identifier': payment_identifier, })
def test_update_must_fail_with_a_channel_address(tester_channels, private_keys): """ updateTransfer must not accept a transfer signed with the wrong channel address. """ pkey0, pkey1, nettingchannel, channel0, channel1 = tester_channels[0] opened_block = nettingchannel.opened(sender=pkey0) wrong_channel = make_address() # make a transfer where pkey1 is the target transfer_wrong_recipient = DirectTransfer( identifier=1, nonce=1 + (opened_block * (2 ** 32)), token=channel0.token_address, channel=wrong_channel, transferred_amount=10, recipient=channel1.our_address, locksroot=EMPTY_MERKLE_ROOT, ) our_address = privatekey_to_address(pkey0) our_sign_key = PrivateKey(pkey0) transfer_wrong_recipient.sign(our_sign_key, our_address) nettingchannel.close(sender=pkey0) transfer_wrong_recipient_hash = sha3(transfer_wrong_recipient.packed().data[:-65]) with pytest.raises(TransactionFailed): nettingchannel.updateTransfer( transfer_wrong_recipient.nonce, transfer_wrong_recipient.transferred_amount, transfer_wrong_recipient.locksroot, transfer_wrong_recipient_hash, transfer_wrong_recipient.signature, sender=pkey1, )
def test_initiator_handle_contract_receive_secret_reveal_expired(): """ Initiator must *not* unlock off-chain if the secret is revealed on-chain *after* the lock expiration. """ setup = setup_initiator_tests(amount=UNIT_TRANSFER_AMOUNT * 2, block_number=10) transfer = setup.current_state.initiator.transfer assert transfer.lock.secrethash in setup.channel.our_state.secrethashes_to_lockedlocks state_change = ContractReceiveSecretReveal( transaction_hash=factories.make_transaction_hash(), secret_registry_address=factories.make_address(), secrethash=transfer.lock.secrethash, secret=UNIT_SECRET, block_number=transfer.lock.expiration + 1, ) iteration = initiator_manager.handle_onchain_secretreveal( payment_state=setup.current_state, state_change=state_change, channelidentifiers_to_channels=setup.channel_map, pseudo_random_generator=setup.prng, ) assert events.must_contain_entry(iteration.events, SendBalanceProof, {}) is None
def test_channel_must_accept_expired_locks(): """ A node may go offline for an undetermined period of time, and when it comes back online it must accept the messages that are waiting, otherwise the partner node won't make progress with its queue. If a N node goes offline for a number B of blocks, and the partner does not close the channel, when N comes back online some of the messages from its partner may become expired. Neverthless these messages are ordered and must be accepted for the partner to make progress with its queue. Note: Accepting a message with an expired lock does *not* imply the token transfer happened, and the receiver node must *not* forward the transfer, only accept the message allowing the partner to progress with its message queue. """ balance1 = 70 balance2 = 110 reveal_timeout = 5 settle_timeout = 15 privkey1, address1 = make_privkey_address() privkey2, address2 = make_privkey_address() token_address = make_address() our_state = ChannelEndState( address1, balance1, None, EMPTY_MERKLE_TREE, ) partner_state = ChannelEndState( address2, balance2, None, EMPTY_MERKLE_TREE, ) external_state = make_external_state() test_channel = Channel( our_state, partner_state, external_state, token_address, reveal_timeout, settle_timeout, ) block_number = 10 transfer = make_mediated_transfer( nonce=test_channel.get_next_nonce(), token=test_channel.token_address, channel=test_channel.channel_address, expiration=block_number + settle_timeout, recipient=address1, ) transfer.sign(privkey2, address2) test_channel.register_transfer( block_number + settle_timeout + 1, transfer, )
def test_endpointregistry(private_keys, blockchain_services): chain = blockchain_services.blockchain_services[0] my_address = privatekey_to_address(private_keys[0]) endpointregistry_address = chain.deploy_contract( 'EndpointRegistry', get_contract_path('EndpointRegistry.sol'), ) discovery_proxy = chain.discovery(endpointregistry_address) contract_discovery = ContractDiscovery(my_address, discovery_proxy) unregistered_address = make_address() # get should raise for unregistered addresses with pytest.raises(UnknownAddress): contract_discovery.get(my_address) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 44444)) is None contract_discovery.register(my_address, '127.0.0.1', 44444) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 44444)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 44444) contract_discovery.register(my_address, '127.0.0.1', 88888) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 88888)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 88888) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address)
def mock_udp( raiden_udp_ports, throttle_capacity, throttle_fill_rate, retry_interval, retries_before_backoff, nat_invitation_timeout, nat_keepalive_retries, nat_keepalive_timeout, ): throttle_policy = TokenBucket(throttle_capacity, throttle_fill_rate) host = '127.0.0.1' port = raiden_udp_ports[0] address = make_address() config = dict( retry_interval=retry_interval, retries_before_backoff=retries_before_backoff, nat_invitation_timeout=nat_invitation_timeout, nat_keepalive_retries=nat_keepalive_retries, nat_keepalive_timeout=nat_keepalive_timeout, ) transport = UDPTransport( address, MockDiscovery, server._udp_socket((host, port)), # pylint: disable=protected-access throttle_policy, config, ) transport.raiden = MockRaidenService(ADDR) return transport
def test_initiator_handle_contract_receive_emptyhash_secret_reveal(): """ Initiator must not accept contract receive secret reveal with emptyhash """ setup = setup_initiator_tests(amount=UNIT_TRANSFER_AMOUNT * 2, block_number=10) transfer = setup.current_state.initiator.transfer assert transfer.lock.secrethash in setup.channel.our_state.secrethashes_to_lockedlocks state_change = ContractReceiveSecretReveal( transaction_hash=factories.make_transaction_hash(), secret_registry_address=factories.make_address(), secrethash=transfer.lock.secrethash, secret=EMPTY_HASH, block_number=transfer.lock.expiration, ) iteration = initiator_manager.handle_onchain_secretreveal( payment_state=setup.current_state, state_change=state_change, channelidentifiers_to_channels=setup.channel_map, pseudo_random_generator=setup.prng, ) assert len(iteration.events) == 0 # make sure the original lock wasn't moved assert transfer.lock.secrethash in setup.channel.our_state.secrethashes_to_lockedlocks
def test_close_wrong_channel(tester_channels): """ Close must not accept a transfer aimed at a different channel. """ pkey0, pkey1, nettingchannel, channel0, _ = tester_channels[0] opened_block = nettingchannel.opened(sender=pkey0) wrong_address = make_address() # make a transfer where the recipient is totally wrong transfer_wrong_channel = DirectTransfer( identifier=1, nonce=1 + (opened_block * (2 ** 32)), token=channel0.token_address, channel=wrong_address, transferred_amount=10, recipient=channel0.our_address, locksroot=EMPTY_MERKLE_ROOT, ) transfer_wrong_channel.sign(PrivateKey(pkey1), privatekey_to_address(pkey1)) transfer_wrong_channel_hash = sha3(transfer_wrong_channel.packed().data[:-65]) with pytest.raises(TransactionFailed): nettingchannel.close( transfer_wrong_channel.nonce, transfer_wrong_channel.transferred_amount, transfer_wrong_channel.locksroot, transfer_wrong_channel_hash, transfer_wrong_channel.signature, sender=pkey0, )
def test_delivered_message_must_clean_unordered_messages(chain_id): pseudo_random_generator = random.Random() block_number = 10 our_address = factories.make_address() recipient = factories.make_address() channel_identifier = 1 message_identifier = random.randint(0, 2 ** 16) secret = factories.random_secret() chain_state = state.ChainState( pseudo_random_generator, block_number, our_address, chain_id, ) queue_identifier = QueueIdentifier( recipient, events.CHANNEL_IDENTIFIER_GLOBAL_QUEUE, ) # Regression test: # The code delivered_message handler worked only with a queue of one # element first_message = events.SendSecretReveal( recipient, channel_identifier, message_identifier, secret, ) second_message = events.SendSecretReveal( recipient, channel_identifier, random.randint(0, 2 ** 16), secret, ) chain_state.queueids_to_queues[queue_identifier] = [first_message, second_message] delivered_message = state_change.ReceiveDelivered(recipient, message_identifier) iteration = node.handle_delivered(chain_state, delivered_message) new_queue = iteration.new_state.queueids_to_queues.get(queue_identifier, []) assert first_message not in new_queue
def test_token_network_registry( deploy_client, contract_manager, token_network_registry_proxy: TokenNetworkRegistry, ): assert token_network_registry_proxy.settlement_timeout_min() == TEST_SETTLE_TIMEOUT_MIN assert token_network_registry_proxy.settlement_timeout_max() == TEST_SETTLE_TIMEOUT_MAX bad_token_address = make_address() # try to register non-existing token network with pytest.raises(RaidenUnrecoverableError): token_network_registry_proxy.add_token(bad_token_address) # create token network & register it test_token = deploy_token( deploy_client=deploy_client, contract_manager=contract_manager, initial_amount=1000, decimals=0, token_name='TKN', token_symbol='TKN', ) test_token_address = to_canonical_address(test_token.contract.address) event_filter = token_network_registry_proxy.tokenadded_filter() token_network_address = token_network_registry_proxy.add_token( test_token_address, ) with pytest.raises(RaidenRecoverableError) as exc: token_network_address = token_network_registry_proxy.add_token( test_token_address, ) assert 'Token already registered' in str(exc) logs = event_filter.get_all_entries() assert len(logs) == 1 decoded_event = token_network_registry_proxy.proxy.decode_event(logs[0]) assert is_same_address(decoded_event['args']['token_address'], test_token.contract.address) assert is_same_address( decoded_event['args']['token_network_address'], token_network_address, ) # test other getters assert token_network_registry_proxy.get_token_network(bad_token_address) is None assert is_same_address( token_network_registry_proxy.get_token_network(test_token_address), token_network_address, ) with pytest.raises(ValueError): assert token_network_registry_proxy.get_token_network(None) is None assert token_network_registry_proxy.get_token_network(bad_token_address) is None assert token_network_registry_proxy.get_token_network(token_network_address) is None assert token_network_registry_proxy.get_token_network(test_token_address) is not None
def test_channel_increase_nonce_and_transferred_amount(): """ The nonce must increase with each new transfer. """ token_address = make_address() privkey1, address1 = make_privkey_address() address2 = make_address() balance1 = 70 balance2 = 110 reveal_timeout = 5 settle_timeout = 15 our_state = ChannelEndState(address1, balance1, None, EMPTY_MERKLE_TREE) partner_state = ChannelEndState(address2, balance2, None, EMPTY_MERKLE_TREE) external_state = make_external_state() test_channel = Channel( our_state, partner_state, external_state, token_address, reveal_timeout, settle_timeout, ) previous_nonce = test_channel.get_next_nonce() previous_transferred = test_channel.transferred_amount amount = 7 block_number = 1 for _ in range(10): direct_transfer = test_channel.create_directtransfer(amount, identifier=1) direct_transfer.sign(privkey1, address1) test_channel.register_transfer(block_number, direct_transfer) new_nonce = test_channel.get_next_nonce() new_transferred = test_channel.transferred_amount assert new_nonce == previous_nonce + 1 assert new_transferred == previous_transferred + amount previous_nonce = new_nonce previous_transferred = new_transferred
def make_balance_proof_from_counter(counter) -> BalanceProofUnsignedState: return BalanceProofUnsignedState( nonce=next(counter), transferred_amount=next(counter), locked_amount=next(counter), locksroot=sha3(next(counter).to_bytes(1, 'big')), token_network_identifier=factories.make_address(), channel_identifier=next(counter), chain_id=next(counter), )
def test_delivered_processed_message_cleanup(): recipient = factories.make_address() channel_identifier = 1 secret = factories.random_secret() first_message = events.SendSecretReveal( recipient, channel_identifier, random.randint(0, 2 ** 16), secret, ) second_message = events.SendSecretReveal( recipient, channel_identifier, random.randint(0, 2 ** 16), secret, ) message_queue = [first_message, second_message] fake_message_identifier = random.randint(0, 2 ** 16) node.inplace_delete_message( message_queue, state_change.ReceiveDelivered(recipient, fake_message_identifier), ) assert first_message in message_queue, 'invalid message id must be ignored' assert second_message in message_queue, 'invalid message id must be ignored' invalid_sender_address = factories.make_address() node.inplace_delete_message( message_queue, state_change.ReceiveDelivered(invalid_sender_address, first_message.message_identifier), ) assert first_message in message_queue, 'invalid sender id must be ignored' assert second_message in message_queue, 'invalid sender id must be ignored' node.inplace_delete_message( message_queue, state_change.ReceiveProcessed(recipient, first_message.message_identifier), ) msg = 'message must be cleared when a valid delivered is received' assert first_message not in message_queue, msg assert second_message in message_queue, msg
def test_v1_event_payment_sent_failed_schema(): event = EventPaymentSentFailed( payment_network_identifier=factories.make_payment_network_identifier(), token_network_identifier=factories.make_address(), identifier=1, target=factories.make_address(), reason='whatever', ) log_time = '2018-09-07T20:02:35.000' timestamped = TimestampedEvent(event, log_time) dumped = EventPaymentSentFailedSchema().dump(timestamped) expected = { 'event': 'EventPaymentSentFailed', 'log_time': log_time, 'reason': 'whatever', } assert all(dumped.data.get(key) == value for key, value in expected.items())
def test_addchannel_must_not_overwrite(): # pylint: disable=too-many-locals """ Calling add_channel for an existing channel must not overwrite it. """ our_address = make_address() partner_address = make_address() channel_manager_address = make_address() token_address = make_address() channel_address = make_address() our_state = ParticipantStateMock(our_address) partner_state = ParticipantStateMock(partner_address) netting_channel = NettingChannelMock(channel_address) external_state = ExternalStateMock(netting_channel) reveal_timeout = 5 settle_timeout = 10 channel_detail = ChannelDetails( channel_address, our_state, partner_state, external_state, reveal_timeout, settle_timeout, ) edge_list = [] channel_detail_list = [channel_detail] graph = ChannelGraph( our_address, channel_manager_address, token_address, edge_list, channel_detail_list, ) first_instance = graph.address_to_channel[channel_address] graph.add_channel(channel_detail) assert first_instance is graph.address_to_channel[channel_address]
def test_update_must_fail_with_a_channel_address(tester_channels): """ updateTransfer must not accept a transfer signed with the wrong channel address. """ pkey0, pkey1, nettingchannel, channel0, channel1 = tester_channels[0] opened_block = nettingchannel.opened(sender=pkey0) wrong_channel = factories.make_address() # make a transfer where pkey1 is the target message_identifier = random.randint(0, UINT64_MAX) transfer_wrong_recipient = DirectTransfer( message_identifier=message_identifier, payment_identifier=1, nonce=1 + (opened_block * (2**32)), token=channel0.token_address, channel=wrong_channel, transferred_amount=10, recipient=channel1.our_state.address, locksroot=EMPTY_MERKLE_ROOT, ) our_address = privatekey_to_address(pkey0) our_sign_key = PrivateKey(pkey0) transfer_wrong_recipient.sign(our_sign_key, our_address) nettingchannel.close(sender=pkey0) transfer_wrong_recipient_hash = sha3( transfer_wrong_recipient.packed().data[:-65]) with pytest.raises(TransactionFailed): nettingchannel.updateTransfer( transfer_wrong_recipient.nonce, transfer_wrong_recipient.transferred_amount, transfer_wrong_recipient.locksroot, transfer_wrong_recipient_hash, transfer_wrong_recipient.signature, sender=pkey1, )
def test_channelstate_directtransfer_overspent(): """Receiving a direct transfer with an amount large than distributable must be ignored. """ our_model1, _ = create_model(70) partner_model1, privkey2 = create_model(100) channel_state = create_channel_from_models(our_model1, partner_model1) payment_network_identifier = factories.make_address() distributable = channel.get_distributable(channel_state.partner_state, channel_state.our_state) nonce = 1 transferred_amount = distributable + 1 receive_lockedtransfer = make_receive_transfer_direct( payment_network_identifier, channel_state, privkey2, nonce, transferred_amount, ) is_valid, _ = channel.is_valid_directtransfer( receive_lockedtransfer, channel_state, channel_state.partner_state, channel_state.our_state, ) assert not is_valid, 'message is invalid because it is spending more than the distributable' iteration = channel.handle_receive_directtransfer( channel_state, receive_lockedtransfer, ) assert must_contain_entry(iteration.events, EventTransferReceivedInvalidDirectTransfer, {}) assert_partner_state(channel_state.our_state, channel_state.partner_state, our_model1) assert_partner_state(channel_state.partner_state, channel_state.our_state, partner_model1)
def test_endpointregistry(private_keys, blockchain_services): chain = blockchain_services.blockchain_services[0] my_address = privatekey_to_address(private_keys[0]) endpointregistry_address = chain.deploy_contract( 'EndpointRegistry', get_contract_path('EndpointRegistry.sol'), ) discovery_proxy = chain.discovery(endpointregistry_address) contract_discovery = ContractDiscovery(my_address, discovery_proxy) unregistered_address = make_address() # get should raise for unregistered addresses with pytest.raises(UnknownAddress): contract_discovery.get(my_address) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 44444)) is None contract_discovery.register(my_address, '127.0.0.1', 44444) assert contract_discovery.nodeid_by_host_port( ('127.0.0.1', 44444)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 44444) contract_discovery.register(my_address, '127.0.0.1', 88888) assert contract_discovery.nodeid_by_host_port( ('127.0.0.1', 88888)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 88888) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address)
def test_endpointregistry(private_keys, blockchain_services): chain = blockchain_services.blockchain_services[0] my_address = privatekey_to_address(private_keys[0]) endpointregistry_address = deploy_contract_web3( CONTRACT_ENDPOINT_REGISTRY, chain.client, num_confirmations=None, ) discovery_proxy = chain.discovery(endpointregistry_address) contract_discovery = ContractDiscovery(my_address, discovery_proxy) unregistered_address = make_address() # get should raise for unregistered addresses with pytest.raises(UnknownAddress): contract_discovery.get(my_address) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 44444)) is None contract_discovery.register(my_address, '127.0.0.1', 44444) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 44444)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 44444) contract_discovery.register(my_address, '127.0.0.1', 88888) assert contract_discovery.nodeid_by_host_port(('127.0.0.1', 88888)) == my_address assert contract_discovery.get(my_address) == ('127.0.0.1', 88888) with pytest.raises(UnknownAddress): contract_discovery.get(unregistered_address)
def test_handle_node_change_network_state(chain_state, netting_channel_state, monkeypatch): state_change = ActionChangeNodeNetworkState( node_address=factories.make_address(), network_state=NetworkState.REACHABLE ) transition_result = handle_action_change_node_network_state(chain_state, state_change) # no events if no mediator tasks are there to apply to assert not transition_result.events mediator_state = MediatorTransferState( secrethash=UNIT_SECRETHASH, routes=[ RouteState( route=[netting_channel_state.partner_state.address], forward_channel_id=netting_channel_state.canonical_identifier.channel_identifier, ) ], ) subtask = MediatorTask( token_network_address=netting_channel_state.canonical_identifier.token_network_address, mediator_state=mediator_state, ) chain_state.payment_mapping.secrethashes_to_task[UNIT_SECRETHASH] = subtask lock = factories.HashTimeLockState(amount=0, expiration=2, secrethash=UNIT_SECRETHASH) netting_channel_state.partner_state.secrethashes_to_lockedlocks[UNIT_SECRETHASH] = lock netting_channel_state.partner_state.pending_locks = PendingLocksState([bytes(lock.encoded)]) result = object() monkeypatch.setattr( raiden.transfer.node, "subdispatch_mediatortask", lambda *args, **kwargs: TransitionResult(chain_state, [result]), ) transition_result = handle_action_change_node_network_state(chain_state, state_change) assert transition_result.events == [result]
def make_initiator_state(routes, transfer_description, channelmap, block_number, payment_network_identifier=None): if payment_network_identifier is None: payment_network_identifier = factories.make_address() init_state_change = ActionInitInitiator( payment_network_identifier, transfer_description, routes, ) inital_state = None iteration = initiator_manager.state_transition( inital_state, init_state_change, channelmap, block_number, ) return iteration.new_state
def test_trigger_scheduled_events(monitoring_service: MonitoringService): monitoring_service.context.required_confirmations = 5 create_default_token_network(monitoring_service.context) triggered_event = ActionMonitoringTriggeredEvent( token_network_address=DEFAULT_TOKEN_NETWORK_ADDRESS, channel_identifier=make_channel_identifier(), non_closing_participant=make_address(), ) current_confirmed_block = monitoring_service.context.latest_confirmed_block # Trigger the event on a currently unconfirmed block trigger_block = BlockNumber(current_confirmed_block + 1) assert len(monitoring_service.database.get_scheduled_events(trigger_block)) == 0 monitoring_service.context.database.upsert_scheduled_event( ScheduledEvent(trigger_block_number=trigger_block, event=triggered_event) ) assert len(monitoring_service.database.get_scheduled_events(trigger_block)) == 1 # Now run `_trigger_scheduled_events` and see if the event is removed monitoring_service._trigger_scheduled_events() # pylint: disable=protected-access assert len(monitoring_service.database.get_scheduled_events(trigger_block)) == 0
def test_geth_discover_next_available_nonce( deploy_client, skip_if_parity # pylint: disable=unused-argument ): """ Test that geth_discover_next_available nonce works correctly Reproduced the problem seen here: https://github.com/raiden-network/raiden/pull/3683#issue-264551799 """ greenlets = set() for _ in range(100): greenlets.add( gevent.spawn(deploy_client.send_transaction, make_address(), 50000) # to # startgas ) gevent.sleep(0.5) nonce = geth_discover_next_available_nonce(web3=deploy_client.web3, address=deploy_client.address) assert nonce > 0 assert nonce < 100 gevent.joinall(greenlets, raise_error=True)
def test_receive_directtransfer_invalidtoken(raiden_network, deposit, token_addresses): app0, app1 = raiden_network token_address = token_addresses[0] channel0 = get_channelstate(app0, app1, token_address) identifier = 1 invalid_token_address = make_address() channel_identifier = channel0.identifier direct_transfer_message = DirectTransfer( identifier=identifier, nonce=1, token=invalid_token_address, channel=channel_identifier, transferred_amount=0, recipient=app1.raiden.address, locksroot=EMPTY_MERKLE_ROOT, ) sign_and_inject( direct_transfer_message, app0.raiden.private_key, app0.raiden.address, app1, ) assert_synched_channel_state( token_address, app0, deposit, [], app1, deposit, [], )
def test_transfer_update_event(tester_channels, tester_events): """ The event TransferUpdated is emitted after a successful call to updateTransfer. """ pkey0, pkey1, nettingchannel, channel0, channel1 = tester_channels[0] address1 = privatekey_to_address(pkey1) payment_network_identifier = factories.make_address() direct0 = make_direct_transfer_from_channel( payment_network_identifier, channel0, channel1, amount=90, pkey=pkey0, ) nettingchannel.close(sender=pkey0) previous_events = list(tester_events) direct0_hash = sha3(direct0.packed().data[:-65]) nettingchannel.updateTransfer( direct0.nonce, direct0.transferred_amount, direct0.locksroot, direct0_hash, direct0.signature, sender=pkey1, ) assert len(previous_events) + 1 == len(tester_events) last_event = event_decoder(tester_events[-1], nettingchannel.translator) assert last_event == { '_event_type': b'TransferUpdated', 'node_address': address_encoder(address1), }
def test_receive_directtransfer_invalidtoken(raiden_network, deposit, token_addresses): app0, app1 = raiden_network registry = app0.raiden.default_registry.address token_address = token_addresses[0] channel0 = get_channelstate(app0, app1, token_address) message_identifier = random.randint(0, UINT64_MAX) payment_identifier = 1 invalid_token_address = make_address() channel_identifier = channel0.identifier direct_transfer_message = DirectTransfer( message_identifier=message_identifier, payment_identifier=payment_identifier, nonce=1, registry_address=registry, token=invalid_token_address, channel=channel_identifier, transferred_amount=0, locked_amount=0, recipient=app1.raiden.address, locksroot=EMPTY_MERKLE_ROOT, ) sign_and_inject( direct_transfer_message, app0.raiden.private_key, app0.raiden.address, app1, ) assert_synched_channel_state( token_address, app0, deposit, [], app1, deposit, [], )
def test_setup_proxies_all_addresses_are_known(routing_mode): """ Test that startup for proxies works fine if all addresses are given and routing is basic """ network_id = 42 config = { 'environment_type': Environment.DEVELOPMENT, 'chain_id': network_id, 'services': {}, } contracts, contract_addresses_known = setup_contracts_or_exit( config, network_id) blockchain_service = MockChain(network_id=network_id, node_address=make_address()) with patched_get_for_succesful_pfs_info(): proxies = setup_proxies_or_exit( config=config, tokennetwork_registry_contract_address=None, secret_registry_contract_address=None, endpoint_registry_contract_address=None, user_deposit_contract_address=None, service_registry_contract_address=None, contract_addresses_known=contract_addresses_known, blockchain_service=blockchain_service, contracts=contracts, routing_mode=routing_mode, pathfinding_service_address='my-pfs', pathfinding_eth_address=make_checksum_address(), ) assert proxies assert proxies.token_network_registry assert proxies.secret_registry assert proxies.user_deposit assert proxies.service_registry
def test_receive_hashlocktransfer_unknown(raiden_network): app0 = raiden_network[0] # pylint: disable=unbalanced-tuple-unpacking graph0 = app0.raiden.token_to_channelgraph.values()[0] other_key = PrivateKey(HASH2) other_address = privatekey_to_address(HASH2) amount = 10 refund_transfer = make_refund_transfer( identifier=1, nonce=1, token=graph0.token_address, channel=other_address, transferred_amount=amount, recipient=app0.raiden.address, locksroot=UNIT_HASHLOCK, amount=amount, hashlock=UNIT_HASHLOCK, ) sign_and_send(refund_transfer, other_key, other_address, app0) secret = Secret( identifier=1, nonce=1, channel=make_address(), transferred_amount=amount, locksroot=UNIT_HASHLOCK, secret=UNIT_SECRET, ) sign_and_send(secret, other_key, other_address, app0) secret_request = SecretRequest(1, UNIT_HASHLOCK, 1) sign_and_send(secret_request, other_key, other_address, app0) reveal_secret = RevealSecret(UNIT_SECRET) sign_and_send(reveal_secret, other_key, other_address, app0)
def test_write_read_log(): wal = new_wal() block_number = 1337 block = Block(block_number) unlocked_amount = 10 returned_amount = 5 participant = factories.make_address() partner = factories.make_address() locksroot = sha3(b'test_write_read_log') contract_receive_unlock = ContractReceiveChannelBatchUnlock( factories.make_transaction_hash(), factories.make_address(), participant, partner, locksroot, unlocked_amount, returned_amount, ) state_changes1 = wal.storage.get_statechanges_by_identifier( from_identifier=0, to_identifier='latest', ) count1 = len(state_changes1) wal.log_and_dispatch(block) state_changes2 = wal.storage.get_statechanges_by_identifier( from_identifier=0, to_identifier='latest', ) count2 = len(state_changes2) assert count1 + 1 == count2 wal.log_and_dispatch(contract_receive_unlock) state_changes3 = wal.storage.get_statechanges_by_identifier( from_identifier=0, to_identifier='latest', ) count3 = len(state_changes3) assert count2 + 1 == count3 result1, result2 = state_changes3[-2:] assert isinstance(result1, Block) assert result1.block_number == block_number assert isinstance(result2, ContractReceiveChannelBatchUnlock) assert result2.participant == participant assert result2.partner == partner assert result2.locksroot == locksroot assert result2.unlocked_amount == unlocked_amount assert result2.returned_tokens == returned_amount # Make sure state snapshot can only go for corresponding state change ids with pytest.raises(sqlite3.IntegrityError): wal.storage.write_state_snapshot(34, 'AAAA') # Make sure we can only have a single state snapshot assert wal.storage.get_latest_state_snapshot() is None wal.storage.write_state_snapshot(1, 'AAAA') assert wal.storage.get_latest_state_snapshot() == (1, 'AAAA') wal.storage.write_state_snapshot(2, 'BBBB') assert wal.storage.get_latest_state_snapshot() == (2, 'BBBB')
def test_channelstate_filters(): test_state = factories.make_chain_state(number_of_channels=5) chain_state = test_state.chain_state token_network_registry_address = test_state.token_network_registry_address token_address = test_state.token_address channel_open, channel_closing, channel_closed, channel_settling, channel_settled = ( test_state.channels ) in_progress = TransactionExecutionStatus(started_block_number=chain_state.block_number) done = TransactionExecutionStatus( started_block_number=chain_state.block_number, finished_block_number=chain_state.block_number, result=TransactionExecutionStatus.SUCCESS, ) channel_closing.close_transaction = in_progress channel_closed.close_transaction = done channel_settling.close_transaction = done channel_settling.settle_transaction = in_progress channel_settled.close_transaction = done channel_settled.settle_transaction = done unknown_token = factories.make_address() assert ( views.get_channelstate_open( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=unknown_token, ) == [] ) opened = views.get_channelstate_open( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=token_address, ) assert opened == [channel_open] closing = views.get_channelstate_closing( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=token_address, ) assert closing == [channel_closing] closed = views.get_channelstate_closed( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=token_address, ) assert closed == [channel_closed] settling = views.get_channelstate_settling( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=token_address, ) assert settling == [channel_settling] settled = views.get_channelstate_settled( chain_state=chain_state, token_network_registry_address=token_network_registry_address, token_address=token_address, ) assert settled == [channel_settled]
def test_python_channel(): token_address = make_address() privkey1, address1 = make_privkey_address() address2 = make_address() balance1 = 70 balance2 = 110 reveal_timeout = 5 settle_timeout = 15 block_number = 10 our_state = ChannelEndState(address1, balance1, None, EMPTY_MERKLE_TREE) partner_state = ChannelEndState(address2, balance2, None, EMPTY_MERKLE_TREE) external_state = make_external_state() test_channel = Channel( our_state, partner_state, external_state, token_address, reveal_timeout, settle_timeout, ) assert test_channel.contract_balance == our_state.contract_balance assert test_channel.transferred_amount == our_state.transferred_amount assert test_channel.distributable == our_state.contract_balance assert test_channel.outstanding == our_state.amount_locked assert test_channel.outstanding == 0 assert test_channel.locked == partner_state.amount_locked assert test_channel.our_state.amount_locked == 0 assert test_channel.partner_state.amount_locked == 0 assert test_channel.get_next_nonce() == 1 with pytest.raises(ValueError): test_channel.create_directtransfer( -10, identifier=1, ) with pytest.raises(ValueError): test_channel.create_directtransfer( balance1 + 10, identifier=1, ) amount1 = 10 directtransfer = test_channel.create_directtransfer( amount1, identifier=1, ) directtransfer.sign(privkey1, address1) test_channel.register_transfer( block_number, directtransfer, ) assert test_channel.contract_balance == balance1 assert test_channel.balance == balance1 - amount1 assert test_channel.transferred_amount == amount1 assert test_channel.distributable == balance1 - amount1 assert test_channel.outstanding == 0 assert test_channel.locked == 0 assert test_channel.our_state.amount_locked == 0 assert test_channel.partner_state.amount_locked == 0 assert test_channel.get_next_nonce() == 2 secret = sha3('test_channel') hashlock = sha3(secret) amount2 = 10 fee = 0 expiration = block_number + settle_timeout - 5 identifier = 1 mediatedtransfer = test_channel.create_mediatedtransfer( address1, address2, fee, amount2, identifier, expiration, hashlock, ) mediatedtransfer.sign(privkey1, address1) test_channel.register_transfer( block_number, mediatedtransfer, ) assert test_channel.contract_balance == balance1 assert test_channel.balance == balance1 - amount1 assert test_channel.transferred_amount == amount1 assert test_channel.distributable == balance1 - amount1 - amount2 assert test_channel.outstanding == 0 assert test_channel.locked == amount2 assert test_channel.our_state.amount_locked == amount2 assert test_channel.partner_state.amount_locked == 0 assert test_channel.get_next_nonce() == 3 secret_message = test_channel.create_secret(identifier, secret) secret_message.sign(privkey1, address1) test_channel.register_transfer(block_number, secret_message) assert test_channel.contract_balance == balance1 assert test_channel.balance == balance1 - amount1 - amount2 assert test_channel.transferred_amount == amount1 + amount2 assert test_channel.distributable == balance1 - amount1 - amount2 assert test_channel.outstanding == 0 assert test_channel.locked == 0 assert test_channel.our_state.amount_locked == 0 assert test_channel.partner_state.amount_locked == 0 assert test_channel.get_next_nonce() == 4
def test_invalid_timeouts(): token_address = factories.make_address() reveal_timeout = 5 settle_timeout = 10 identifier = factories.make_address() address1 = factories.make_address() address2 = factories.make_address() balance1 = 10 balance2 = 10 opened_transaction = TransactionExecutionStatus( None, 1, TransactionExecutionStatus.SUCCESS, ) closed_transaction = None settled_transaction = None our_state = NettingChannelEndState(address1, balance1) partner_state = NettingChannelEndState(address2, balance2) # do not allow a reveal timeout larger than the settle timeout with pytest.raises(ValueError): large_reveal_timeout = 50 small_settle_timeout = 49 NettingChannelState( identifier, token_address, large_reveal_timeout, small_settle_timeout, our_state, partner_state, opened_transaction, closed_transaction, settled_transaction, ) # TypeError: 'a', [], {} for invalid_value in (-1, 0, 1.1, 1.0): with pytest.raises(ValueError): NettingChannelState( identifier, token_address, invalid_value, settle_timeout, our_state, partner_state, opened_transaction, closed_transaction, settled_transaction, ) with pytest.raises(ValueError): NettingChannelState( identifier, token_address, reveal_timeout, invalid_value, our_state, partner_state, opened_transaction, closed_transaction, settled_transaction, )
def test_end_state(): token_address = make_address() privkey1, address1 = make_privkey_address() address2 = make_address() channel_address = make_address() balance1 = 70 balance2 = 110 lock_secret = sha3('test_end_state') lock_amount = 30 lock_expiration = 10 lock_hashlock = sha3(lock_secret) state1 = ChannelEndState(address1, balance1, None, EMPTY_MERKLE_TREE) state2 = ChannelEndState(address2, balance2, None, EMPTY_MERKLE_TREE) assert state1.contract_balance == balance1 assert state2.contract_balance == balance2 assert state1.balance(state2) == balance1 assert state2.balance(state1) == balance2 assert state1.is_locked(lock_hashlock) is False assert state2.is_locked(lock_hashlock) is False assert merkleroot(state1.merkletree) == EMPTY_MERKLE_ROOT assert merkleroot(state2.merkletree) == EMPTY_MERKLE_ROOT assert state1.nonce is None assert state2.nonce is None lock = Lock( lock_amount, lock_expiration, lock_hashlock, ) lock_hash = sha3(lock.as_bytes) transferred_amount = 0 locksroot = state2.compute_merkleroot_with(lock) locked_transfer = LockedTransfer( 1, nonce=1, token=token_address, channel=channel_address, transferred_amount=transferred_amount, recipient=state2.address, locksroot=locksroot, lock=lock, ) transfer_target = make_address() transfer_initiator = make_address() fee = 0 mediated_transfer = locked_transfer.to_mediatedtransfer( transfer_target, transfer_initiator, fee, ) mediated_transfer.sign(privkey1, address1) state1.register_locked_transfer(mediated_transfer) assert state1.contract_balance == balance1 assert state2.contract_balance == balance2 assert state1.balance(state2) == balance1 assert state2.balance(state1) == balance2 assert state1.distributable(state2) == balance1 - lock_amount assert state2.distributable(state1) == balance2 assert state1.amount_locked == lock_amount assert state2.amount_locked == 0 assert state1.is_locked(lock_hashlock) is True assert state2.is_locked(lock_hashlock) is False assert merkleroot(state1.merkletree) == lock_hash assert merkleroot(state2.merkletree) == EMPTY_MERKLE_ROOT assert state1.nonce is 1 assert state2.nonce is None with pytest.raises(ValueError): state1.update_contract_balance(balance1 - 10) state1.update_contract_balance(balance1 + 10) assert state1.contract_balance == balance1 + 10 assert state2.contract_balance == balance2 assert state1.balance(state2) == balance1 + 10 assert state2.balance(state1) == balance2 assert state1.distributable(state2) == balance1 - lock_amount + 10 assert state2.distributable(state1) == balance2 assert state1.amount_locked == lock_amount assert state2.amount_locked == 0 assert state1.is_locked(lock_hashlock) is True assert state2.is_locked(lock_hashlock) is False assert merkleroot(state1.merkletree) == lock_hash assert merkleroot(state2.merkletree) == EMPTY_MERKLE_ROOT assert state1.nonce is 1 assert state2.nonce is None # registering the secret should not change the locked amount state1.register_secret(lock_secret) assert state1.contract_balance == balance1 + 10 assert state2.contract_balance == balance2 assert state1.balance(state2) == balance1 + 10 assert state2.balance(state1) == balance2 assert state1.is_locked(lock_hashlock) is False assert state2.is_locked(lock_hashlock) is False assert merkleroot(state1.merkletree) == lock_hash assert merkleroot(state2.merkletree) == EMPTY_MERKLE_ROOT assert state1.nonce is 1 assert state2.nonce is None secret_message = Secret( identifier=1, nonce=2, channel=channel_address, transferred_amount=transferred_amount + lock_amount, locksroot=EMPTY_MERKLE_ROOT, secret=lock_secret, ) secret_message.sign(privkey1, address1) state1.register_secretmessage(secret_message) assert state1.contract_balance == balance1 + 10 assert state2.contract_balance == balance2 assert state1.balance(state2) == balance1 + 10 - lock_amount assert state2.balance(state1) == balance2 + lock_amount assert state1.distributable(state2) == balance1 + 10 - lock_amount assert state2.distributable(state1) == balance2 + lock_amount assert state1.amount_locked == 0 assert state2.amount_locked == 0 assert state1.is_locked(lock_hashlock) is False assert state2.is_locked(lock_hashlock) is False assert merkleroot(state1.merkletree) == EMPTY_MERKLE_ROOT assert merkleroot(state2.merkletree) == EMPTY_MERKLE_ROOT assert state1.nonce is 2 assert state2.nonce is None
def test_deposit_must_wait_for_confirmation(): block_number = 10 confirmed_deposit_block_number = block_number + DEFAULT_NUMBER_OF_CONFIRMATIONS_BLOCK + 1 our_model1, _ = create_model(0) partner_model1, _ = create_model(0) channel_state = create_channel_from_models(our_model1, partner_model1) payment_network_identifier = factories.make_address() token_address = factories.make_address() deposit_amount = 10 balance1_new = our_model1.balance + deposit_amount our_model2 = our_model1._replace( balance=balance1_new, distributable=balance1_new, contract_balance=balance1_new, ) partner_model2 = partner_model1 assert channel_state.our_state.contract_balance == 0 assert channel_state.partner_state.contract_balance == 0 deposit_transaction = TransactionChannelNewBalance( channel_state.our_state.address, deposit_amount, block_number, ) new_balance = ContractReceiveChannelNewBalance( payment_network_identifier, token_address, channel_state.identifier, deposit_transaction, ) pseudo_random_generator = random.Random() iteration = channel.state_transition( deepcopy(channel_state), new_balance, pseudo_random_generator, block_number, ) unconfirmed_state = iteration.new_state for block_number in range(block_number, confirmed_deposit_block_number): unconfirmed_block = Block(block_number) iteration = channel.state_transition( deepcopy(unconfirmed_state), unconfirmed_block, pseudo_random_generator, block_number, ) unconfirmed_state = iteration.new_state assert_partner_state( unconfirmed_state.our_state, unconfirmed_state.partner_state, our_model1, ) assert_partner_state( unconfirmed_state.partner_state, unconfirmed_state.our_state, partner_model1, ) confirmed_block = Block(confirmed_deposit_block_number) iteration = channel.state_transition( deepcopy(unconfirmed_state), confirmed_block, pseudo_random_generator, confirmed_deposit_block_number, ) confirmed_state = iteration.new_state assert_partner_state(confirmed_state.our_state, confirmed_state.partner_state, our_model2) assert_partner_state(confirmed_state.partner_state, confirmed_state.our_state, partner_model2)
def test_token_identifiers_empty_list_for_token_network_registry_none( chain_state): assert (get_token_identifiers( chain_state=chain_state, token_network_registry_address=factories.make_address()) == list())
def test_handle_contract_send_channelunlock_already_unlocked(): """This is a test for the scenario where the onchain unlock has already happened when we get to handle our own send unlock transaction. Regression test for https://github.com/raiden-network/raiden/issues/3152 """ channel_identifier = 1 payment_network_identifier = make_address() token_network_identifier = make_address() participant = make_address() raiden = make_raiden_service_mock( payment_network_identifier=payment_network_identifier, token_network_identifier=token_network_identifier, channel_identifier=channel_identifier, partner=participant, ) channel_state = get_channelstate_by_token_network_and_partner( chain_state=state_from_raiden(raiden), token_network_id=token_network_identifier, partner_address=participant, ) channel_state.our_state.onchain_locksroot = EMPTY_MERKLE_ROOT channel_state.partner_state.onchain_locksroot = EMPTY_MERKLE_ROOT def detail_participants( # pylint: disable=unused-argument participant1, participant2, block_identifier, channel_identifier): transferred_amount = 1 locked_amount = 1 locksroot = make_32bytes() balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot) our_details = ParticipantDetails( address=raiden.address, deposit=5, withdrawn=0, is_closer=False, balance_hash=balance_hash, nonce=1, locksroot=locksroot, locked_amount=locked_amount, ) transferred_amount = 1 locked_amount = 1 # Let's mock here that partner locksroot is 0x0 balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot) partner_details = ParticipantDetails( address=participant, deposit=5, withdrawn=0, is_closer=True, balance_hash=balance_hash, nonce=1, locksroot=EMPTY_HASH, locked_amount=locked_amount, ) return ParticipantsDetails(our_details, partner_details) # make sure detail_participants returns partner data with a locksroot of 0x0 raiden.chain.token_network.detail_participants = detail_participants event = ContractSendChannelBatchUnlock( canonical_identifier=make_canonical_identifier( token_network_address=token_network_identifier, channel_identifier=channel_identifier), participant=participant, triggered_by_block_hash=make_block_hash(), ) # This should not throw an unrecoverable error RaidenEventHandler().on_raiden_event( raiden=raiden, chain_state=raiden.wal.state_manager.current_state, event=event)
def test_discover_next_available_nonce(deploy_client: JSONRPCClient) -> None: """`parity_discover_next_available_nonce` returns the *next available nonce*. Notes: - This is not the same as the *highest unused nonce*, additional details on issue #4976. - The behaviour of `geth_discover_next_available_nonce` and `parity_discover_next_available_nonce` should match. """ web3 = deploy_client.web3 random_address = make_address() gas_price = web3.eth.gasPrice # pylint: disable=no-member eth_node = deploy_client.eth_node next_nonce = discover_next_available_nonce(web3, eth_node, deploy_client.address) # Should be larger than the number of transactions that can fit in a single # block, to ensure all transactions from the pool are accounted for. QTY_TRANSACTIONS = 1000 # Test the next available nonce for _ in range(QTY_TRANSACTIONS): transaction = { "to": to_checksum_address(random_address), "gas": TRANSACTION_INTRINSIC_GAS, "nonce": next_nonce, "value": 1, "gasPrice": gas_price, } signed_txn = deploy_client.web3.eth.account.sign_transaction( transaction, deploy_client.privkey) deploy_client.web3.eth.sendRawTransaction(signed_txn.rawTransaction) next_nonce = Nonce(next_nonce + 1) msg = "The nonce must increment when a new transaction is sent." assert (discover_next_available_nonce( web3, eth_node, deploy_client.address) == next_nonce), msg skip_nonce = next_nonce + 1 # Test the next available nonce is not the same as the highest unused # nonce. for _ in range(QTY_TRANSACTIONS): transaction = { "to": to_checksum_address(random_address), "gas": TRANSACTION_INTRINSIC_GAS, "nonce": skip_nonce, "value": 1, "gasPrice": gas_price, } signed_txn = deploy_client.web3.eth.account.sign_transaction( transaction, deploy_client.privkey) deploy_client.web3.eth.sendRawTransaction(signed_txn.rawTransaction) available_nonce = discover_next_available_nonce( web3, eth_node, deploy_client.address) msg = "Expected the latest unused nonce." assert available_nonce == next_nonce, msg assert available_nonce != skip_nonce, msg skip_nonce = Nonce(skip_nonce + 1)
def test_count_token_network_channels_no_token_network(chain_state): assert (count_token_network_channels( chain_state=chain_state, token_network_registry_address=factories.make_address(), token_address=factories.make_address(), ) == 0)
def test_matrix_message_retry( local_matrix_servers, retry_interval_initial, retry_interval_max, retries_before_backoff, broadcast_rooms, ): """ Test the retry mechanism implemented into the matrix client. The test creates a transport and sends a message. Given that the receiver was online, the initial message is sent but the receiver doesn't respond in time and goes offline. The retrier should then wait for the `retry_interval` duration to pass and send the message again but this won't work because the receiver is offline. Once the receiver comes back again, the message should be sent again. """ partner_address = factories.make_address() transport = MatrixTransport( config=MatrixTransportConfig( broadcast_rooms=broadcast_rooms, retries_before_backoff=retries_before_backoff, retry_interval_initial=retry_interval_initial, retry_interval_max=retry_interval_max, server=local_matrix_servers[0], available_servers=[local_matrix_servers[0]], ), environment=Environment.DEVELOPMENT, ) transport._send_raw = MagicMock() raiden_service = MockRaidenService(None) transport.start(raiden_service, [], None) transport.log = MagicMock() # Receiver is online transport._address_mgr._address_to_reachabilitystate[partner_address] = ReachabilityState( AddressReachability.REACHABLE, datetime.now() ) queueid = QueueIdentifier( recipient=partner_address, canonical_identifier=CANONICAL_IDENTIFIER_UNORDERED_QUEUE ) chain_state = raiden_service.wal.state_manager.current_state retry_queue: _RetryQueue = transport._get_retrier(partner_address) assert bool(retry_queue), "retry_queue not running" # Send the initial message message = Processed(message_identifier=0, signature=EMPTY_SIGNATURE) transport._raiden_service.sign(message) chain_state.queueids_to_queues[queueid] = [message] retry_queue.enqueue_unordered(message) gevent.idle() assert transport._send_raw.call_count == 1 # Receiver goes offline transport._address_mgr._address_to_reachabilitystate[partner_address] = ReachabilityState( AddressReachability.UNREACHABLE, datetime.now() ) with gevent.Timeout(retry_interval_initial + 2): wait_assert( transport.log.debug.assert_called_with, "Partner not reachable. Skipping.", partner=to_checksum_address(partner_address), status=AddressReachability.UNREACHABLE, ) # Retrier did not call send_raw given that the receiver is still offline assert transport._send_raw.call_count == 1 # Receiver comes back online transport._address_mgr._address_to_reachabilitystate[partner_address] = ReachabilityState( AddressReachability.REACHABLE, datetime.now() ) # Retrier should send the message again with gevent.Timeout(retry_interval_initial + 2): while transport._send_raw.call_count != 2: gevent.sleep(0.1) transport.stop() transport.greenlet.get()
def test_matrix_message_retry( local_matrix_servers, private_rooms, retry_interval, retries_before_backoff, ): """ Test the retry mechanism implemented into the matrix client. The test creates a transport and sends a message. Given that the receiver was online, the initial message is sent but the receiver doesn't respond in time and goes offline. The retrier should then wait for the `retry_interval` duration to pass and send the message again but this won't work because the receiver is offline. Once the receiver comes back again, the message should be sent again. """ partner_address = make_address() transport = MatrixTransport({ 'discovery_room': 'discovery', 'retries_before_backoff': retries_before_backoff, 'retry_interval': retry_interval, 'server': local_matrix_servers[0], 'server_name': local_matrix_servers[0].netloc, 'available_servers': [], 'private_rooms': private_rooms, }) transport._send_raw = MagicMock() raiden_service = MockRaidenService(None) transport.start( raiden_service, raiden_service.message_handler, None, ) transport.log = MagicMock() # Receiver is online transport._address_to_presence[partner_address] = UserPresence.ONLINE queueid = QueueIdentifier( recipient=partner_address, channel_identifier=CHANNEL_IDENTIFIER_GLOBAL_QUEUE, ) chain_state = raiden_service.wal.state_manager.current_state retry_queue: _RetryQueue = transport._get_retrier(partner_address) assert bool(retry_queue), 'retry_queue not running' # Send the initial message message = Processed(0) message.sign(transport._raiden_service.private_key) chain_state.queueids_to_queues[queueid] = [message] retry_queue.enqueue_global(message) gevent.sleep(1) transport._send_raw.call_count = 1 # Receiver goes offline transport._address_to_presence[partner_address] = UserPresence.OFFLINE gevent.sleep(retry_interval) transport.log.debug.assert_called_with( 'Partner not reachable. Skipping.', partner=pex(partner_address), status=UserPresence.OFFLINE, ) # Retrier did not call send_raw given that the receiver is still offline assert transport._send_raw.call_count == 1 # Receiver comes back online transport._address_to_presence[partner_address] = UserPresence.ONLINE gevent.sleep(retry_interval) # Retrier now should have sent the message again assert transport._send_raw.call_count == 2 transport.stop() transport.get()
def test_get_participants_addresses_no_token_network(chain_state): assert (get_participants_addresses( chain_state=chain_state, token_network_registry_address=factories.make_address(), token_address=factories.make_address(), ) == set())
def test_get_event_with_balance_proof(): """ All events which contain a balance proof must be found by when querying the database. """ serializer = JSONSerializer storage = SQLiteStorage(':memory:', serializer) counter = itertools.count() lock_expired = SendLockExpired( recipient=factories.make_address(), message_identifier=next(counter), balance_proof=make_balance_proof_from_counter(counter), secrethash=sha3(factories.make_secret(next(counter))), ) locked_transfer = SendLockedTransfer( recipient=factories.make_address(), channel_identifier=factories.make_channel_identifier(), message_identifier=next(counter), transfer=make_transfer_from_counter(counter), ) balance_proof = SendBalanceProof( recipient=factories.make_address(), channel_identifier=factories.make_channel_identifier(), message_identifier=next(counter), payment_identifier=next(counter), token_address=factories.make_address(), secret=factories.make_secret(next(counter)), balance_proof=make_balance_proof_from_counter(counter), ) refund_transfer = SendRefundTransfer( recipient=factories.make_address(), channel_identifier=factories.make_channel_identifier(), message_identifier=next(counter), transfer=make_transfer_from_counter(counter), ) events_balanceproofs = [ (lock_expired, lock_expired.balance_proof), (locked_transfer, locked_transfer.balance_proof), (balance_proof, balance_proof.balance_proof), (refund_transfer, refund_transfer.transfer.balance_proof), ] timestamp = datetime.utcnow().isoformat(timespec='milliseconds') state_change = '' for event, _ in events_balanceproofs: state_change_identifier = storage.write_state_change( state_change, timestamp, ) storage.write_events( state_change_identifier=state_change_identifier, events=[event], log_time=timestamp, ) for event, balance_proof in events_balanceproofs: event_record = get_event_with_balance_proof( storage=storage, chain_id=balance_proof.chain_id, token_network_identifier=balance_proof.token_network_identifier, channel_identifier=balance_proof.channel_identifier, balance_hash=balance_proof.balance_hash, ) assert event_record.data == event
def make_receive_transfer_mediated( channel_state, privkey, nonce, transferred_amount, lock, merkletree_leaves=None, token_network_address=UNIT_REGISTRY_IDENTIFIER, locked_amount=None, chain_id=UNIT_CHAIN_ID, ): if not isinstance(lock, HashTimeLockState): raise ValueError('lock must be of type HashTimeLockState') address = privatekey_to_address(privkey.secret) if address not in (channel_state.our_state.address, channel_state.partner_state.address): raise ValueError('Private key does not match any of the participants.') if merkletree_leaves is None: layers = [[lock.lockhash]] else: assert lock.lockhash in merkletree_leaves layers = compute_layers(merkletree_leaves) if locked_amount is None: locked_amount = lock.amount assert locked_amount >= lock.amount locksroot = layers[MERKLEROOT][0] payment_identifier = nonce transfer_target = factories.make_address() transfer_initiator = factories.make_address() mediated_transfer_msg = LockedTransfer( chain_id=chain_id, message_identifier=random.randint(0, UINT64_MAX), payment_identifier=payment_identifier, nonce=nonce, token_network_address=token_network_address, token=channel_state.token_address, channel_identifier=channel_state.identifier, transferred_amount=transferred_amount, locked_amount=locked_amount, recipient=channel_state.partner_state.address, locksroot=locksroot, lock=lock, target=transfer_target, initiator=transfer_initiator, ) mediated_transfer_msg.sign(privkey) balance_proof = balanceproof_from_envelope(mediated_transfer_msg) receive_lockedtransfer = LockedTransferSignedState( random.randint(0, UINT64_MAX), payment_identifier, channel_state.token_address, balance_proof, lock, transfer_initiator, transfer_target, ) return receive_lockedtransfer
def test_regression_onchain_secret_reveal_must_update_channel_state(): """ If a secret is learned off-chain and then on-chain, the state of the lock must be updated in the channel. """ pseudo_random_generator = random.Random() setup = factories.make_transfers_pair(2, block_number=10) mediator_state = MediatorTransferState( secrethash=UNIT_SECRETHASH, routes=setup.channels.get_routes(), ) mediator_state.transfers_pair = setup.transfers_pair secret = UNIT_SECRET secrethash = UNIT_SECRETHASH payer_channel = mediator.get_payer_channel(setup.channel_map, setup.transfers_pair[0]) payee_channel = mediator.get_payee_channel(setup.channel_map, setup.transfers_pair[0]) lock = payer_channel.partner_state.secrethashes_to_lockedlocks[secrethash] mediator.state_transition( mediator_state=mediator_state, state_change=ReceiveSecretReveal(secret, payee_channel.partner_state.address), channelidentifiers_to_channels=setup.channel_map, nodeaddresses_to_networkstates=setup.channels. nodeaddresses_to_networkstates, pseudo_random_generator=pseudo_random_generator, block_number=setup.block_number, block_hash=setup.block_hash, ) assert secrethash in payer_channel.partner_state.secrethashes_to_unlockedlocks secret_registry_address = factories.make_address() transaction_hash = factories.make_address() mediator.state_transition( mediator_state=mediator_state, state_change=ContractReceiveSecretReveal( transaction_hash=transaction_hash, secret_registry_address=secret_registry_address, secrethash=secrethash, secret=secret, block_number=setup.block_number, block_hash=setup.block_hash, ), channelidentifiers_to_channels=setup.channel_map, nodeaddresses_to_networkstates=setup.channels. nodeaddresses_to_networkstates, pseudo_random_generator=pseudo_random_generator, block_number=setup.block_number, block_hash=setup.block_hash, ) assert secrethash in payer_channel.partner_state.secrethashes_to_onchain_unlockedlocks # Creates a transfer as it was from the *partner* send_lock_expired, _ = channel.create_sendexpiredlock( sender_end_state=payer_channel.partner_state, locked_lock=lock, pseudo_random_generator=pseudo_random_generator, chain_id=payer_channel.chain_id, token_network_identifier=payer_channel.token_network_identifier, channel_identifier=payer_channel.identifier, recipient=payer_channel.our_state.address, ) assert send_lock_expired expired_message = message_from_sendevent(send_lock_expired, setup.channels.our_address(0)) expired_message.sign(LocalSigner(setup.channels.partner_privatekeys[0])) balance_proof = balanceproof_from_envelope(expired_message) message_identifier = message_identifier_from_prng(pseudo_random_generator) expired_block_number = channel.get_sender_expiration_threshold(lock) mediator.state_transition( mediator_state=mediator_state, state_change=ReceiveLockExpired( balance_proof=balance_proof, secrethash=secrethash, message_identifier=message_identifier, ), channelidentifiers_to_channels=setup.channel_map, nodeaddresses_to_networkstates=setup.channels. nodeaddresses_to_networkstates, pseudo_random_generator=pseudo_random_generator, block_number=expired_block_number, block_hash=factories.make_block_hash(), ) assert secrethash in payer_channel.partner_state.secrethashes_to_onchain_unlockedlocks
def test_get_token_network_registry_by_token_network_address_is_none( chain_state): assert (get_token_network_registry_by_token_network_address( chain_state=chain_state, token_network_address=factories.make_address()) is None)