Beispiel #1
0
def test_check_pfs_for_production(service_registry_address, private_keys, web3,
                                  contract_manager) -> None:
    chain_id = ChainID(int(web3.net.version))
    service_registry, _ = deploy_service_registry_and_set_urls(
        private_keys=private_keys,
        web3=web3,
        contract_manager=contract_manager,
        service_registry_address=service_registry_address,
    )

    # Configuring an address that doesn't match the registered url should error
    pfs_info = PFSInfo(
        url="http://ourgivenaddress",
        price=TokenAmount(0),
        chain_id=chain_id,
        token_network_registry_address=
        token_network_registry_address_test_default,
        payment_address=privatekey_to_address(private_keys[0]),
        message="",
        operator="",
        version="",
        user_deposit_address=privatekey_to_address(private_keys[1]),
        confirmed_block_number=BlockNumber(10),
        matrix_server="http://matrix.example.com",
    )
    with pytest.raises(RaidenError):
        check_pfs_for_production(service_registry=service_registry,
                                 pfs_info=pfs_info)

    # Configuring an pfs payment address that isn't registered should error
    pfs_info = PFSInfo(
        url="http://foo",
        price=TokenAmount(0),
        chain_id=chain_id,
        token_network_registry_address=
        token_network_registry_address_test_default,
        payment_address=to_canonical_address(
            "0x2222222222222222222222222222222222222221"),
        message="",
        operator="",
        version="",
        user_deposit_address=privatekey_to_address(private_keys[1]),
        confirmed_block_number=BlockNumber(10),
        matrix_server="http://matrix.example.com",
    )
    with pytest.raises(RaidenError):
        check_pfs_for_production(service_registry=service_registry,
                                 pfs_info=pfs_info)
Beispiel #2
0
def deploy_user_deposit_and_return_address(
    proxy_manager: ProxyManager,
    deploy_client: JSONRPCClient,
    contract_manager: ContractManager,
    token_proxy: Token,
    private_keys: List[PrivateKey],
    environment_type: Environment,
) -> Optional[Address]:
    """ Deploy UserDeposit and fund accounts with some balances """
    if environment_type != Environment.DEVELOPMENT:
        return None

    constructor_arguments = [token_proxy.address, UINT256_MAX]
    user_deposit_address = deploy_contract_web3(
        contract_name=CONTRACT_USER_DEPOSIT,
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        constructor_arguments=constructor_arguments,
    )

    user_deposit = proxy_manager.user_deposit(
        UserDepositAddress(user_deposit_address))

    participants = [privatekey_to_address(key) for key in private_keys]
    for transfer_to in participants:
        user_deposit.deposit(
            beneficiary=transfer_to,
            total_deposit=MONITORING_REWARD,
            given_block_identifier="latest",
        )

    return user_deposit_address
Beispiel #3
0
def test_token(deploy_client, token_proxy, private_keys, web3,
               contract_manager):
    privkey = private_keys[1]
    address = privatekey_to_address(privkey)
    address = to_canonical_address(address)
    other_client = JSONRPCClient(web3, privkey)
    other_token_proxy = Token(
        jsonrpc_client=other_client,
        token_address=to_canonical_address(token_proxy.proxy.address),
        contract_manager=contract_manager,
        block_identifier=BLOCK_ID_LATEST,
    )

    # send some funds from deployer to generated address
    transfer_funds = 100
    transaction_hash = token_proxy.transfer(address, transfer_funds)
    assert is_tx_hash_bytes(transaction_hash)
    assert transfer_funds == token_proxy.balance_of(address)
    allow_funds = 100

    transaction_hash = token_proxy.approve(address, allow_funds)
    assert is_tx_hash_bytes(transaction_hash)
    assert allow_funds == token_proxy.proxy.functions.allowance(
        deploy_client.address, address).call(block_identifier=BLOCK_ID_LATEST)
    other_token_proxy.transfer(deploy_client.address, transfer_funds)
    assert token_proxy.balance_of(address) == 0
Beispiel #4
0
 def get_request_monitoring(
     self,
     privkey: PrivateKey,
     reward_amount: TokenAmount,
     monitoring_service_contract_address: MonitoringServiceAddress,
 ) -> RequestMonitoring:
     """Returns raiden client's RequestMonitoring object"""
     non_closing_signer = LocalSigner(privkey)
     partner_signed_self = SignedBlindedBalanceProof(
         channel_identifier=self.channel_identifier,
         token_network_address=self.token_network_address,
         nonce=self.nonce,
         additional_hash=AdditionalHash(decode_hex(self.additional_hash)),
         chain_id=self.chain_id,
         signature=self.signature,
         balance_hash=BalanceHash(decode_hex(self.balance_hash)),
     )
     request_monitoring = RequestMonitoring(
         balance_proof=partner_signed_self,
         non_closing_participant=privatekey_to_address(privkey),
         reward_amount=reward_amount,
         signature=EMPTY_SIGNATURE,
         monitoring_service_contract_address=
         monitoring_service_contract_address,
     )
     request_monitoring.sign(non_closing_signer)
     return request_monitoring
Beispiel #5
0
def test_get_pfs_iou(one_to_n_address):
    token_network_address = TokenNetworkAddress(bytes([1] * 20))
    privkey = bytes([2] * 32)
    sender = privatekey_to_address(privkey)
    receiver = factories.make_address()

    response = mocked_json_response(response_data={"last_iou": None})
    with patch.object(session, "get", return_value=response):
        assert (get_last_iou("http://example.com", token_network_address,
                             sender, receiver, PRIVKEY) is None)

        # Previous IOU
        iou = IOU(
            sender=sender,
            receiver=receiver,
            amount=10,
            expiration_block=1000,
            one_to_n_address=one_to_n_address,
            chain_id=4,
        )
        iou.sign(privkey)

    response = mocked_json_response(response_data={"last_iou": iou.as_json()})
    with patch.object(session, "get", return_value=response):
        assert (get_last_iou("http://example.com", token_network_address,
                             sender, receiver, PRIVKEY) == iou)
Beispiel #6
0
def test_update_iou():
    privkey = bytes([2] * 32)
    sender = Address(privatekey_to_address(privkey))
    receiver = Address(bytes([1] * 20))
    one_to_n_address = Address(bytes([2] * 20))

    # prepare iou
    iou = IOU(
        sender=sender,
        receiver=receiver,
        amount=10,
        expiration_block=1000,
        chain_id=4,
        one_to_n_address=one_to_n_address,
    )
    iou.sign(privkey)

    # update and compare
    added_amount = 10
    new_iou = update_iou(iou=replace(iou),
                         privkey=privkey,
                         added_amount=added_amount)
    assert new_iou.amount == iou.amount + added_amount
    assert new_iou.sender == iou.sender
    assert new_iou.receiver == iou.receiver
    assert new_iou.signature != iou.signature

    # Previous IOU with increased amount by evil PFS
    tampered_iou = replace(new_iou)
    tampered_iou.amount += 10
    with pytest.raises(ServiceRequestFailed):
        update_iou(iou=tampered_iou,
                   privkey=privkey,
                   added_amount=added_amount)
Beispiel #7
0
def setup_testchain(eth_client: EthClient, free_port_generator: Iterator[Port],
                    base_datadir: str,
                    base_logdir: str) -> Iterator[Dict[str, Any]]:

    ensure_executable(eth_client.value)

    rpc_port = next(free_port_generator)
    p2p_port = next(free_port_generator)

    eth_rpc_endpoint = URI(f"http://127.0.0.1:{rpc_port}")
    web3 = Web3(HTTPProvider(endpoint_uri=eth_rpc_endpoint))
    web3.middleware_onion.inject(make_sane_poa_middleware, layer=0)

    eth_nodes = [
        EthNodeDescription(
            private_key=TEST_PRIVKEY,
            rpc_port=rpc_port,
            p2p_port=p2p_port,
            miner=True,
            extra_config={},
            blockchain_type=eth_client.value,
        )
    ]

    random_marker = remove_0x_prefix(HexStr(hex(random.getrandbits(100))))
    genesis_description = GenesisDescription(
        prefunded_accounts=[
            AccountDescription(TEST_ACCOUNT_ADDRESS, DEFAULT_BALANCE)
        ],
        random_marker=random_marker,
        chain_id=CHAINNAME_TO_ID["smoketest"],
    )

    datadir = eth_node_to_datadir(privatekey_to_address(TEST_PRIVKEY),
                                  base_datadir)
    if eth_client is EthClient.GETH:
        keystore = geth_keystore(datadir)
    elif eth_client is EthClient.PARITY:
        keystore = parity_keystore(datadir)

    eth_node_runner = run_private_blockchain(
        web3=web3,
        eth_nodes=eth_nodes,
        base_datadir=base_datadir,
        log_dir=base_logdir,
        verbosity="info",
        genesis_description=genesis_description,
    )
    with eth_node_runner as node_executors:
        yield dict(
            eth_client=eth_client,
            base_datadir=base_datadir,
            eth_rpc_endpoint=eth_rpc_endpoint,
            keystore=keystore,
            node_executors=node_executors,
            web3=web3,
        )
Beispiel #8
0
    def __init__(
        self,
        web3: Web3,
        privkey: Optional[PrivateKey],
        gas_price_strategy: Callable = rpc_gas_price_strategy,
        gas_estimate_correction: Callable = lambda gas: gas,
        block_num_confirmations: int = 0,
    ) -> None:
        if privkey is None or len(privkey) != 32:
            raise ValueError("Invalid private key")

        if block_num_confirmations < 0:
            raise ValueError("Number of confirmations has to be positive")

        monkey_patch_web3(web3, gas_price_strategy)

        version = web3.version.node
        supported, eth_node, _ = is_supported_client(version)

        if not supported:
            raise EthNodeInterfaceError(
                f"Unsupported Ethereum client {version}")

        address = privatekey_to_address(privkey)
        address_checksummed = to_checksum_address(address)

        if eth_node is EthClient.PARITY:
            parity_assert_rpc_interfaces(web3)
            available_nonce = parity_discover_next_available_nonce(
                web3, address_checksummed)

        elif eth_node is EthClient.GETH:
            geth_assert_rpc_interfaces(web3)
            available_nonce = geth_discover_next_available_nonce(
                web3, address_checksummed)

        self.eth_node = eth_node
        self.privkey = privkey
        self.address = address
        self.web3 = web3
        self.default_block_num_confirmations = block_num_confirmations

        # Ask for the chain id only once and store it here
        self.chain_id = ChainID(int(self.web3.version.network))

        self._available_nonce = available_nonce
        self._nonce_lock = Semaphore()
        self._gas_estimate_correction = gas_estimate_correction

        log.debug(
            "JSONRPCClient created",
            node=to_checksum_address(self.address),
            available_nonce=available_nonce,
            client=version,
        )
def test_account_from_keystore_without_address_and_uuid():
    keystore = dict(KEYSTORE)
    keystore.pop("address")
    keystore.pop("id")
    account = Account(keystore)
    assert account.address is None

    account.unlock(PASSWORD)
    assert account.address == privatekey_to_address(PRIVKEY)
    assert account.uuid is None
    account.uuid = new_uuid = UUID(hex="1234567890abcdef1234567890abcdef")
    assert str(new_uuid) in repr(account)
def test_check_pfs_transport_configuration(chain_id, private_keys, caplog):
    matrix_server_url = "http://matrix.example.com"
    matrix_room_id = "!room-id:matrix.example.com"
    pfs_info = PFSInfo(
        url="http://foo",
        price=TokenAmount(0),
        chain_id=chain_id,
        token_network_registry_address=token_network_registry_address_test_default,
        payment_address=to_canonical_address("0x2222222222222222222222222222222222222221"),
        message="",
        operator="",
        version="",
        user_deposit_address=privatekey_to_address(private_keys[1]),
        confirmed_block_number=BlockNumber(10),
        matrix_server=matrix_server_url,
        matrix_room_id=matrix_room_id,
    )

    # Room id mismatch, must raise
    with pytest.raises(RaidenError):
        check_pfs_transport_configuration(
            pfs_info=pfs_info,
            pfs_was_autoselected=True,
            transport_pfs_broadcast_room_id="!this-is-not-the-room-youre-looking-for:example.com",
            matrix_server_url=matrix_server_url,
            matrix_server_was_autoselected=True,
        )

    # Room ids match, must not raise
    check_pfs_transport_configuration(
        pfs_info=pfs_info,
        pfs_was_autoselected=True,
        transport_pfs_broadcast_room_id=matrix_room_id,
        matrix_server_url=matrix_server_url,
        matrix_server_was_autoselected=True,
    )

    # With the matrix_room_id missing from the PFS response the check can't be performed
    pfs_info_no_room_id = dataclasses.replace(pfs_info, matrix_room_id=None)
    with caplog.at_level(logging.WARNING):
        check_pfs_transport_configuration(
            pfs_info=pfs_info_no_room_id,
            pfs_was_autoselected=True,
            transport_pfs_broadcast_room_id="!not-this-again:matrix.org",
            matrix_server_url=matrix_server_url,
            matrix_server_was_autoselected=True,
        )
        assert "Can't check PFS transport configuration" in (
            record.msg["event"] for record in caplog.records
        )
Beispiel #11
0
    def __init__(self,
                 message_handler=None,
                 state_transition=None,
                 private_key=None):
        if private_key is None:
            self.privkey, self.address = factories.make_privkey_address()
        else:
            self.privkey = private_key
            self.address = privatekey_to_address(private_key)

        self.rpc_client = MockJSONRPCClient(self.address)
        self.proxy_manager = MockProxyManager(node_address=self.address)
        self.signer = LocalSigner(self.privkey)

        self.message_handler = message_handler
        self.routing_mode = RoutingMode.PRIVATE
        self.config = RaidenConfig(chain_id=self.rpc_client.chain_id,
                                   environment_type=Environment.DEVELOPMENT)

        self.default_user_deposit = Mock()
        self.default_registry = Mock()
        self.default_registry.address = factories.make_address()
        self.default_one_to_n_address = factories.make_address()
        self.default_msc_address = factories.make_address()

        self.targets_to_identifiers_to_statuses: Dict[Address,
                                                      dict] = defaultdict(dict)
        self.route_to_feedback_token: dict = {}

        if state_transition is None:
            state_transition = node.state_transition

        serializer = JSONSerializer()
        state_manager = StateManager(state_transition, None)
        storage = SerializedSQLiteStorage(":memory:", serializer)
        self.wal = WriteAheadLog(state_manager, storage)

        state_change = ActionInitChain(
            pseudo_random_generator=random.Random(),
            block_number=BlockNumber(0),
            block_hash=factories.make_block_hash(),
            our_address=self.rpc_client.address,
            chain_id=self.rpc_client.chain_id,
        )
        with self.wal.process_state_change_atomically() as dispatcher:
            dispatcher.dispatch(state_change)

        self.transport = Mock()
def test_service_registry_random_pfs(service_registry_address, private_keys,
                                     web3, contract_manager):
    addresses = [privatekey_to_address(key) for key in private_keys]
    c1_service_proxy, urls = deploy_service_registry_and_set_urls(
        private_keys=private_keys,
        web3=web3,
        contract_manager=contract_manager,
        service_registry_address=service_registry_address,
    )
    assert c1_service_proxy.ever_made_deposits_len(BLOCK_ID_LATEST) == 3

    # Test that getting the url for each service address works
    for idx, address in enumerate(addresses):
        assert c1_service_proxy.get_service_url(BLOCK_ID_LATEST,
                                                address) == urls[idx]
    # Test that getting the url for a non-existing service address returns None
    assert c1_service_proxy.get_service_url(BLOCK_ID_LATEST, HOP1) is None

    # Test that get_service_address by index works
    for idx, address in enumerate(addresses):
        assert c1_service_proxy.ever_made_deposits(BLOCK_ID_LATEST,
                                                   idx) == address

    # Test that getting the address for an index out of bounds returns None
    assert not c1_service_proxy.ever_made_deposits(BLOCK_ID_LATEST, 9999)

    mock_get_pfs_info = Mock()
    mock_get_pfs_info.return_value.price = 100
    with patch("raiden.network.pathfinding.get_pfs_info", mock_get_pfs_info):
        # Make sure that too expensive PFSes are not considered valid
        assert not get_valid_pfs_url(c1_service_proxy,
                                     0,
                                     BLOCK_ID_LATEST,
                                     pathfinding_max_fee=FeeAmount(99))

        # ...but ones with the expected price are fine
        assert (get_valid_pfs_url(
            c1_service_proxy,
            0,
            BLOCK_ID_LATEST,
            pathfinding_max_fee=FeeAmount(100)) == urls[0])

        # Test that getting a random service from the proxy works
        assert (get_random_pfs(c1_service_proxy,
                               BLOCK_ID_LATEST,
                               pathfinding_max_fee=FeeAmount(100)) in urls)
Beispiel #13
0
def eth_node_config(node_pkey: bytes, p2p_port: Port, rpc_port: Port,
                    **extra_config: Any) -> Dict[str, Any]:
    address = privatekey_to_address(node_pkey)
    pub = privatekey_to_publickey(node_pkey).hex()

    config = extra_config.copy()
    config.update({
        "nodekey": node_pkey,
        "nodekeyhex": remove_0x_prefix(encode_hex(node_pkey)),
        "pub": pub,
        "address": address,
        "port": p2p_port,
        "rpcport": rpc_port,
        "enode": f"enode://{pub}@127.0.0.1:{p2p_port}",
    })

    return config
Beispiel #14
0
def web3(
    deploy_key,
    eth_nodes_configuration,
    private_keys,
    account_genesis_eth_balance,
    random_marker,
    tmpdir,
    chain_id,
    logs_storage,
    blockchain_type,
):
    """ Starts a private chain with accounts funded. """
    # include the deploy key in the list of funded accounts
    keys_to_fund = sorted(set(private_keys + [deploy_key]))

    host = "127.0.0.1"
    rpc_port = eth_nodes_configuration[0].rpc_port
    endpoint = f"http://{host}:{rpc_port}"
    web3 = Web3(HTTPProvider(URI(endpoint)))

    accounts_to_fund = [
        AccountDescription(privatekey_to_address(key), account_genesis_eth_balance)
        for key in keys_to_fund
    ]

    # The private chain data is always discarded on the CI
    base_datadir = str(tmpdir)

    # Save the Ethereum node's log for debugging
    base_logdir = os.path.join(logs_storage, blockchain_type)

    genesis_description = GenesisDescription(
        prefunded_accounts=accounts_to_fund, chain_id=chain_id, random_marker=random_marker
    )
    eth_node_runner = run_private_blockchain(
        web3=web3,
        eth_nodes=eth_nodes_configuration,
        base_datadir=base_datadir,
        log_dir=base_logdir,
        verbosity="info",
        genesis_description=genesis_description,
    )
    with eth_node_runner:
        yield web3

    cleanup_tasks()
Beispiel #15
0
def deploy_all_tokens_register_and_return_their_addresses(
    token_amount: TokenAmount,
    number_of_tokens: int,
    private_keys: List[PrivateKey],
    proxy_manager: ProxyManager,
    token_network_registry_address: TokenNetworkRegistryAddress,
    register_tokens: bool,
    contract_manager: ContractManager,
    token_contract_name: str,
) -> List[TokenAddress]:
    """ Fixture that yields `number_of_tokens` ERC20 token addresses, where the
    `token_amount` (per token) is distributed among the addresses behind `deploy_client` and
    potentially pre-registered with the Raiden Registry.
    The following arguments can control the behavior:

    Args:
        token_amount: the overall number of units minted per token
        number_of_tokens: the number of token instances
        register_tokens: controls if tokens will be registered with raiden Registry
    """

    participants = [privatekey_to_address(key) for key in private_keys]
    token_addresses = deploy_tokens_and_fund_accounts(
        token_amount=token_amount,
        number_of_tokens=number_of_tokens,
        proxy_manager=proxy_manager,
        participants=participants,
        contract_manager=contract_manager,
        token_contract_name=token_contract_name,
    )

    if register_tokens:
        for token in token_addresses:
            registry = proxy_manager.token_network_registry(
                token_network_registry_address)
            registry.add_token(
                token_address=token,
                channel_participant_deposit_limit=
                RED_EYES_PER_CHANNEL_PARTICIPANT_LIMIT,
                token_network_deposit_limit=RED_EYES_PER_TOKEN_NETWORK_LIMIT,
                block_identifier=proxy_manager.client.
                blockhash_from_blocknumber("latest"),
            )

    return token_addresses
Beispiel #16
0
 def get_monitor_request(
     self,
     privkey: PrivateKey,
     reward_amount: TokenAmount,
     msc_address: MonitoringServiceAddress,
 ) -> "MonitorRequest":
     """Get monitor request message for a given balance proof."""
     return UnsignedMonitorRequest(
         channel_identifier=self.channel_identifier,
         token_network_address=self.token_network_address,
         chain_id=self.chain_id,
         balance_hash=self.balance_hash,
         nonce=self.nonce,
         additional_hash=self.additional_hash,
         closing_signature=self.signature,
         reward_amount=reward_amount,
         non_closing_participant=privatekey_to_address(privkey),
         msc_address=msc_address,
     ).sign(privkey)
def test_make_iou():
    privkey = bytes([2] * 32)
    sender = Address(privatekey_to_address(privkey))
    receiver = Address(bytes([1] * 20))
    one_to_n_address = Address(bytes([2] * 20))
    chain_id = ChainID(4)
    max_fee = 100

    pfs_config_copy = replace(PFS_CONFIG)
    pfs_config_copy.info = replace(pfs_config_copy.info, payment_address=receiver)
    iou = make_iou(
        pfs_config=pfs_config_copy,
        our_address=sender,
        privkey=privkey,
        block_number=10,
        one_to_n_address=one_to_n_address,
        chain_id=chain_id,
        offered_fee=TokenAmount(1),
    )

    assert iou.sender == sender
    assert iou.receiver == receiver
    assert 0 < iou.amount <= max_fee
Beispiel #18
0
def test_token(deploy_client, token_proxy, private_keys, web3,
               contract_manager):
    privkey = private_keys[1]
    address = privatekey_to_address(privkey)
    address = to_canonical_address(address)
    other_client = JSONRPCClient(web3, privkey)
    other_token_proxy = Token(
        jsonrpc_client=other_client,
        token_address=to_canonical_address(token_proxy.proxy.contract.address),
        contract_manager=contract_manager,
    )

    # send some funds from deployer to generated address
    transfer_funds = 100
    token_proxy.transfer(address, transfer_funds)
    assert transfer_funds == token_proxy.balance_of(address)
    allow_funds = 100
    token_proxy.approve(address, allow_funds)
    assert allow_funds == token_proxy.proxy.contract.functions.allowance(
        to_checksum_address(deploy_client.address),
        to_checksum_address(address)).call(block_identifier="latest")
    other_token_proxy.transfer(deploy_client.address, transfer_funds)
    assert token_proxy.balance_of(address) == 0
Beispiel #19
0
def test_payment_channel_proxy_basics(
    token_network_registry_address: TokenNetworkRegistryAddress,
    token_network_proxy: TokenNetwork,
    token_proxy: Token,
    chain_id: ChainID,
    private_keys: List[PrivateKey],
    web3: Web3,
    contract_manager: ContractManager,
    reveal_timeout: BlockTimeout,
) -> None:
    token_network_address = token_network_proxy.address
    partner = privatekey_to_address(private_keys[0])

    rpc_client = JSONRPCClient(web3, private_keys[1])
    proxy_manager = ProxyManager(
        rpc_client=rpc_client,
        contract_manager=contract_manager,
        metadata=ProxyManagerMetadata(
            token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER,
            filters_start_at=GENESIS_BLOCK_NUMBER,
        ),
    )
    token_network_proxy = proxy_manager.token_network(
        address=token_network_address, block_identifier=BLOCK_ID_LATEST
    )
    start_block = web3.eth.blockNumber

    channel_details = token_network_proxy.new_netting_channel(
        partner=partner,
        settle_timeout=TEST_SETTLE_TIMEOUT_MIN,
        given_block_identifier=BLOCK_ID_LATEST,
    )
    channel_identifier = channel_details.channel_identifier
    assert channel_identifier is not None

    channel_state = NettingChannelState(
        canonical_identifier=CanonicalIdentifier(
            chain_identifier=chain_id,
            token_network_address=token_network_address,
            channel_identifier=channel_identifier,
        ),
        token_address=token_network_proxy.token_address(),
        token_network_registry_address=token_network_registry_address,
        reveal_timeout=reveal_timeout,
        settle_timeout=BlockTimeout(TEST_SETTLE_TIMEOUT_MIN),
        fee_schedule=FeeScheduleState(),
        our_state=NettingChannelEndState(
            address=token_network_proxy.client.address, contract_balance=Balance(0)
        ),
        partner_state=NettingChannelEndState(address=partner, contract_balance=Balance(0)),
        open_transaction=SuccessfulTransactionState(finished_block_number=BlockNumber(0)),
    )
    channel_proxy_1 = proxy_manager.payment_channel(
        channel_state=channel_state, block_identifier=BLOCK_ID_LATEST
    )

    assert channel_proxy_1.channel_identifier == channel_identifier
    assert channel_proxy_1.opened(BLOCK_ID_LATEST) is True

    # Test deposit
    initial_token_balance = 100
    token_proxy.transfer(rpc_client.address, TokenAmount(initial_token_balance))
    assert token_proxy.balance_of(rpc_client.address) == initial_token_balance
    assert token_proxy.balance_of(partner) == 0
    channel_proxy_1.approve_and_set_total_deposit(
        total_deposit=TokenAmount(10), block_identifier=BLOCK_ID_LATEST
    )

    # ChannelOpened, ChannelNewDeposit
    channel_events = get_all_netting_channel_events(
        proxy_manager=proxy_manager,
        token_network_address=token_network_address,
        netting_channel_identifier=channel_proxy_1.channel_identifier,
        contract_manager=contract_manager,
        from_block=start_block,
        to_block=web3.eth.blockNumber,
    )

    assert len(channel_events) == 2

    block_before_close = web3.eth.blockNumber
    empty_balance_proof = BalanceProof(
        channel_identifier=channel_proxy_1.channel_identifier,
        token_network_address=token_network_address,
        balance_hash=EMPTY_BALANCE_HASH,
        nonce=0,
        chain_id=chain_id,
        transferred_amount=TokenAmount(0),
    )
    closing_data = (
        empty_balance_proof.serialize_bin(msg_type=MessageTypeId.BALANCE_PROOF) + EMPTY_SIGNATURE
    )
    channel_proxy_1.close(
        nonce=Nonce(0),
        balance_hash=EMPTY_BALANCE_HASH,
        additional_hash=EMPTY_MESSAGE_HASH,
        non_closing_signature=EMPTY_SIGNATURE,
        closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data),
        block_identifier=BLOCK_ID_LATEST,
    )
    assert channel_proxy_1.closed(BLOCK_ID_LATEST) is True
    # ChannelOpened, ChannelNewDeposit, ChannelClosed
    channel_events = get_all_netting_channel_events(
        proxy_manager=proxy_manager,
        token_network_address=token_network_address,
        netting_channel_identifier=channel_proxy_1.channel_identifier,
        contract_manager=contract_manager,
        from_block=start_block,
        to_block=web3.eth.blockNumber,
    )
    assert len(channel_events) == 3

    # check the settlement timeouts again
    assert channel_proxy_1.settle_timeout() == TEST_SETTLE_TIMEOUT_MIN

    # update transfer -- we need to wait on +1 since we use the latest block on parity for
    # estimate gas and at the time the latest block is the settle timeout block.
    # More info: https://github.com/raiden-network/raiden/pull/3699#discussion_r270477227
    rpc_client.wait_until_block(
        target_block_number=BlockNumber(rpc_client.block_number() + TEST_SETTLE_TIMEOUT_MIN + 1)
    )

    transaction_hash = channel_proxy_1.settle(
        transferred_amount=TokenAmount(0),
        locked_amount=LockedAmount(0),
        locksroot=LOCKSROOT_OF_NO_LOCKS,
        partner_transferred_amount=TokenAmount(0),
        partner_locked_amount=LockedAmount(0),
        partner_locksroot=LOCKSROOT_OF_NO_LOCKS,
        block_identifier=BLOCK_ID_LATEST,
    )
    assert is_tx_hash_bytes(transaction_hash)
    assert channel_proxy_1.settled(BLOCK_ID_LATEST) is True
    # ChannelOpened, ChannelNewDeposit, ChannelClosed, ChannelSettled
    channel_events = get_all_netting_channel_events(
        proxy_manager=proxy_manager,
        token_network_address=token_network_address,
        netting_channel_identifier=channel_proxy_1.channel_identifier,
        contract_manager=contract_manager,
        from_block=start_block,
        to_block=web3.eth.blockNumber,
    )
    assert len(channel_events) == 4

    channel_details = token_network_proxy.new_netting_channel(
        partner=partner,
        settle_timeout=TEST_SETTLE_TIMEOUT_MIN,
        given_block_identifier=BLOCK_ID_LATEST,
    )
    new_channel_identifier = channel_details.channel_identifier
    assert new_channel_identifier is not None

    channel_state = NettingChannelState(
        canonical_identifier=CanonicalIdentifier(
            chain_identifier=chain_id,
            token_network_address=token_network_address,
            channel_identifier=new_channel_identifier,
        ),
        token_address=token_network_proxy.token_address(),
        token_network_registry_address=token_network_registry_address,
        reveal_timeout=reveal_timeout,
        settle_timeout=BlockTimeout(TEST_SETTLE_TIMEOUT_MIN),
        fee_schedule=FeeScheduleState(),
        our_state=NettingChannelEndState(
            address=token_network_proxy.client.address, contract_balance=Balance(0)
        ),
        partner_state=NettingChannelEndState(address=partner, contract_balance=Balance(0)),
        open_transaction=SuccessfulTransactionState(finished_block_number=BlockNumber(0)),
    )
    channel_proxy_2 = proxy_manager.payment_channel(
        channel_state=channel_state, block_identifier=BLOCK_ID_LATEST
    )

    assert channel_proxy_2.channel_identifier == new_channel_identifier
    assert channel_proxy_2.opened(BLOCK_ID_LATEST) is True

    msg = "The channel was already closed, the second call must fail"
    with pytest.raises(RaidenRecoverableError):
        channel_proxy_1.close(
            nonce=Nonce(0),
            balance_hash=EMPTY_BALANCE_HASH,
            additional_hash=EMPTY_MESSAGE_HASH,
            non_closing_signature=EMPTY_SIGNATURE,
            closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data),
            block_identifier=block_before_close,
        )
        pytest.fail(msg)

    msg = "The channel is not open at latest, this must raise"
    with pytest.raises(RaidenUnrecoverableError):
        channel_proxy_1.close(
            nonce=Nonce(0),
            balance_hash=EMPTY_BALANCE_HASH,
            additional_hash=EMPTY_MESSAGE_HASH,
            non_closing_signature=EMPTY_SIGNATURE,
            closing_signature=LocalSigner(private_keys[1]).sign(data=closing_data),
            block_identifier=BLOCK_ID_LATEST,
        )
        pytest.fail(msg)

    msg = (
        "The channel was not opened at the provided block (latest). "
        "This call should never have been attempted."
    )
    with pytest.raises(BrokenPreconditionError):
        channel_proxy_1.approve_and_set_total_deposit(
            total_deposit=TokenAmount(20), block_identifier=BLOCK_ID_LATEST
        )
        pytest.fail(msg)
Beispiel #20
0
def test_events_can_happen_in_the_deployment_block(web3: Web3,
                                                   deploy_key: bytes) -> None:
    """It is possible to send transactions to a smart contract that has not
    been mined yet, resulting in events being emitted in the same block the
    smart contract was deployed.
    """
    address = privatekey_to_address(deploy_key)

    contract_name = "RpcTest"
    contracts, contract_key = compile_test_smart_contract(contract_name)
    contract = contracts[contract_key]

    _, eth_node, _ = is_supported_client(web3.clientVersion)
    assert eth_node, "unknown eth_node."
    nonce = discover_next_available_nonce(web3, eth_node, address)

    retries = 5

    for _ in range(retries):
        contract_address = to_canonical_address(
            keccak(rlp.encode([address, nonce]))[:20])
        contract_object = web3.eth.contract(address=contract_address,
                                            abi=contract["abi"],
                                            bytecode=contract["bin"])

        deploy_transaction_data = contract_object.constructor(
        ).buildTransaction()
        call_transaction_data = contract_object.functions.createEvent(
            1).buildTransaction()

        deploy_transaction_data["nonce"] = nonce
        nonce = Nonce(nonce + 1)
        call_transaction_data["nonce"] = nonce
        nonce = Nonce(nonce + 1)

        deploy_signed_txn = web3.eth.account.sign_transaction(
            deploy_transaction_data, deploy_key)
        call_signed_txn = web3.eth.account.sign_transaction(
            call_transaction_data, deploy_key)

        deploy_tx_hash = web3.eth.sendRawTransaction(
            deploy_signed_txn.rawTransaction)
        call_tx_hash = web3.eth.sendRawTransaction(
            call_signed_txn.rawTransaction)

        while True:
            try:
                deploy_tx_receipt = web3.eth.getTransactionReceipt(
                    deploy_tx_hash)
                call_tx_receipt = web3.eth.getTransactionReceipt(call_tx_hash)

                # This is the condition this test is trying to hit, when both
                # the deployment of the transaction and it's first call happen
                # in the same block. As a consequence, because this can happen
                # in at least one Ethereum implementation (e.g. Geth 1.9.15),
                # all filters *must* start in the same block as the smart
                # contract deployment block.
                if deploy_tx_receipt["blockHash"] == call_tx_receipt[
                        "blockHash"]:
                    return

                break

            except TransactionNotFound:
                gevent.sleep(1.0)

    assert False, f"None of the {retries} transactions got mined in the same block."
Beispiel #21
0
def web3(
    blockchain_p2p_ports,
    blockchain_private_keys,
    blockchain_rpc_ports,
    blockchain_type,
    blockchain_extra_config,
    deploy_key,
    private_keys,
    account_genesis_eth_balance,
    random_marker,
    tmpdir,
    chain_id,
    logs_storage,
):
    """ Starts a private chain with accounts funded. """
    # include the deploy key in the list of funded accounts
    keys_to_fund = sorted(set(private_keys + [deploy_key]))

    if blockchain_type not in {client.value for client in EthClient}:
        raise ValueError(f"unknown blockchain_type {blockchain_type}")

    host = "127.0.0.1"
    rpc_port = blockchain_rpc_ports[0]
    endpoint = f"http://{host}:{rpc_port}"
    web3 = Web3(HTTPProvider(endpoint))

    assert len(blockchain_private_keys) == len(blockchain_rpc_ports)
    assert len(blockchain_private_keys) == len(blockchain_p2p_ports)

    eth_nodes = [
        EthNodeDescription(
            private_key=key,
            rpc_port=rpc,
            p2p_port=p2p,
            miner=(pos == 0),
            extra_config=blockchain_extra_config,
            blockchain_type=blockchain_type,
        ) for pos, (key, rpc, p2p) in enumerate(
            zip(blockchain_private_keys, blockchain_rpc_ports,
                blockchain_p2p_ports))
    ]

    accounts_to_fund = [
        AccountDescription(privatekey_to_address(key),
                           account_genesis_eth_balance) for key in keys_to_fund
    ]

    # The private chain data is always discarded on the CI
    base_datadir = str(tmpdir)

    # Save the Ethereum node's log for debugging
    base_logdir = os.path.join(logs_storage, blockchain_type)

    genesis_description = GenesisDescription(
        prefunded_accounts=accounts_to_fund,
        chain_id=chain_id,
        random_marker=random_marker)
    eth_node_runner = run_private_blockchain(
        web3=web3,
        eth_nodes=eth_nodes,
        base_datadir=base_datadir,
        log_dir=base_logdir,
        verbosity="info",
        genesis_description=genesis_description,
    )
    with eth_node_runner:
        yield web3

    cleanup_tasks()
Beispiel #22
0
    TEST_SETTLE_TIMEOUT_MAX,
    TEST_SETTLE_TIMEOUT_MIN,
)
from raiden_contracts.contract_manager import ContractManager, contracts_precompiled_path

if TYPE_CHECKING:
    # pylint: disable=unused-import
    from raiden.tests.utils.transport import ParsedURL  # noqa: F401

# the smoketest will assert that a different endpoint got successfully registered
TEST_DEPOSIT_AMOUNT = TokenAmount(5)

TEST_PRIVKEY = PrivateKey(
    b"\xad\xd4\xd3\x10\xba\x04$hy\x1d\xd7\xbf\x7fn\xae\x85\xac"
    b"\xc4\xdd\x14?\xfa\x81\x0e\xf1\x80\x9aj\x11\xf2\xbcD")
TEST_ACCOUNT_ADDRESS = privatekey_to_address(TEST_PRIVKEY)


class StepPrinter(Protocol):
    def __call__(self, description: str, error: bool = False) -> None:
        ...


def ensure_executable(cmd):
    """look for the given command and make sure it can be executed"""
    if not shutil.which(cmd):
        raise ValueError(
            "Error: unable to locate %s binary.\n"
            "Make sure it is installed and added to the PATH variable." % cmd)

Beispiel #23
0
 def _fill_address(self) -> None:
     if "address" in self.keystore:
         self._address = Address(decode_hex(self.keystore["address"]))
     elif not self.locked:
         assert self.privkey is not None, "`privkey` not set, maybe call `unlock` before."
         self._address = privatekey_to_address(self.privkey)
Beispiel #24
0
def run_private_blockchain(
    web3: Web3,
    eth_nodes: List[EthNodeDescription],
    base_datadir: str,
    log_dir: str,
    verbosity: str,
    genesis_description: GenesisDescription,
) -> Iterator[List[JSONRPCExecutor]]:
    """ Starts a private network with private_keys accounts funded.

    Args:
        web3: A Web3 instance used to check when the private chain is running.
        eth_nodes: A list of geth node
            description, containing the details of each node of the private
            chain.
        base_datadir: Directory used to store the geth databases.
        log_dir: Directory used to store the geth logs.
        verbosity: Verbosity used by the geth nodes.
    """
    # pylint: disable=too-many-locals,too-many-statements,too-many-arguments,too-many-branches

    password_path = os.path.join(base_datadir, "pw")
    with open(password_path, "w") as handler:
        handler.write(DEFAULT_PASSPHRASE)

    nodes_configuration = []
    for node in eth_nodes:
        config = eth_node_config(node.private_key, node.p2p_port,
                                 node.rpc_port, **node.extra_config)

        if node.miner:
            config["unlock"] = to_checksum_address(config["address"])
            config["mine"] = True
            config["password"] = os.path.join(base_datadir, "pw")

        nodes_configuration.append(config)

    eth_node_config_set_bootnodes(nodes_configuration)

    blockchain_type = eth_nodes[0].blockchain_type

    # This is not be configurable because it must be one of the running eth
    # nodes.
    seal_account = privatekey_to_address(eth_nodes[0].private_key)

    if blockchain_type == "geth":
        genesis_path = os.path.join(base_datadir, "custom_genesis.json")
        geth_generate_poa_genesis(
            genesis_path=genesis_path,
            genesis_description=genesis_description,
            seal_account=seal_account,
        )

        for config in nodes_configuration:
            if config.get("mine"):
                datadir = eth_node_to_datadir(config["address"], base_datadir)
                keyfile_path = geth_keyfile(datadir)
                eth_create_account_file(keyfile_path, config["nodekey"])

    elif blockchain_type == "parity":
        genesis_path = os.path.join(base_datadir, "chainspec.json")
        parity_generate_chain_spec(
            genesis_path=genesis_path,
            genesis_description=genesis_description,
            seal_account=seal_account,
        )

        for config in nodes_configuration:
            if config.get("mine"):
                datadir = eth_node_to_datadir(config["address"], base_datadir)
                keyfile_path = parity_keyfile(datadir)
                eth_create_account_file(keyfile_path, config["nodekey"])

    else:
        raise TypeError(f'Unknown blockchain client type "{blockchain_type}"')

    runner: ContextManager[List[JSONRPCExecutor]] = eth_run_nodes(
        eth_node_descs=eth_nodes,
        nodes_configuration=nodes_configuration,
        base_datadir=base_datadir,
        genesis_file=genesis_path,
        chain_id=genesis_description.chain_id,
        random_marker=genesis_description.random_marker,
        verbosity=verbosity,
        logdir=log_dir,
    )
    with runner as executors:
        eth_check_balance(web3, [
            account.address
            for account in genesis_description.prefunded_accounts
        ])
        yield executors
Beispiel #25
0
def deploy_smart_contract_bundle_concurrently(
    deploy_client: JSONRPCClient,
    contract_manager: ContractManager,
    proxy_manager: ProxyManager,
    chain_id: ChainID,
    environment_type: Environment,
    max_token_networks: int,
    number_of_tokens: int,
    private_keys: List[PrivateKey],
    register_tokens: bool,
    settle_timeout_max: int,
    settle_timeout_min: int,
    token_amount: TokenAmount,
    token_contract_name: str,
) -> FixtureSmartContracts:

    greenlets: Set[Greenlet] = set()
    participants = [privatekey_to_address(key) for key in private_keys]

    secret_registry_deploy_greenlet = gevent.spawn(
        deploy_secret_registry,
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        proxy_manager=proxy_manager,
    )
    greenlets.add(secret_registry_deploy_greenlet)

    token_network_registry_deploy_greenlet = gevent.spawn(
        deploy_token_network_registry,
        secret_registry_deploy_result=secret_registry_deploy_greenlet.get,
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        proxy_manager=proxy_manager,
        chain_id=chain_id,
        settle_timeout_min=settle_timeout_min,
        settle_timeout_max=settle_timeout_max,
        max_token_networks=max_token_networks,
    )
    greenlets.add(token_network_registry_deploy_greenlet)

    # ERC20 tokens used for token networks
    token_contracts_greenlets = list()
    for _ in range(number_of_tokens):
        token_deploy_greenlet = gevent.spawn(
            deploy_token,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            initial_amount=token_amount,
            decimals=2,
            token_name="raiden",
            token_symbol="Rd",
            token_contract_name=token_contract_name,
        )
        greenlets.add(token_deploy_greenlet)
        token_contracts_greenlets.append(token_deploy_greenlet)

        # Fund the nodes
        for transfer_to in participants:
            fund_node_greenlet = gevent.spawn(
                fund_node,
                token_result=token_deploy_greenlet.get,
                proxy_manager=proxy_manager,
                to_address=transfer_to,
                amount=TokenAmount(token_amount // len(participants)),
            )
            greenlets.add(fund_node_greenlet)

        if register_tokens:
            register_grenlet = gevent.spawn(
                register_token,
                token_deploy_result=token_deploy_greenlet.get,
                token_network_registry_deploy_result=
                token_network_registry_deploy_greenlet.get,
            )
            greenlets.add(register_grenlet)

        del token_deploy_greenlet

    if environment_type == Environment.DEVELOPMENT:
        utility_token_deploy_greenlet = gevent.spawn(
            deploy_token,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            initial_amount=TokenAmount(1000 * 10**18),
            decimals=0,
            token_name="TKN",
            token_symbol="TKN",
            token_contract_name=token_contract_name,
        )
        greenlets.add(utility_token_deploy_greenlet)

        if register_tokens:
            register_utility_token_grenlet = gevent.spawn(
                register_token,
                token_deploy_result=utility_token_deploy_greenlet.get,
                token_network_registry_deploy_result=
                token_network_registry_deploy_greenlet.get,
            )
            greenlets.add(register_utility_token_grenlet)

        service_registry_deploy_greenlet = gevent.spawn(
            deploy_service_registry,
            token_deploy_result=utility_token_deploy_greenlet.get,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            proxy_manager=proxy_manager,
        )
        greenlets.add(service_registry_deploy_greenlet)

        user_deposit_deploy_greenlet = gevent.spawn(
            deploy_user_deposit,
            token_deploy_result=utility_token_deploy_greenlet.get,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            proxy_manager=proxy_manager,
        )
        greenlets.add(user_deposit_deploy_greenlet)

        one_to_n_deploy_greenlet = gevent.spawn(
            deploy_one_to_n,
            user_deposit_deploy_result=user_deposit_deploy_greenlet.get,
            service_registry_deploy_result=service_registry_deploy_greenlet.
            get,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            proxy_manager=proxy_manager,
            chain_id=chain_id,
        )
        greenlets.add(one_to_n_deploy_greenlet)

        monitoring_service_deploy_greenlet = gevent.spawn(
            deploy_monitoring_service,
            token_deploy_result=utility_token_deploy_greenlet.get,
            user_deposit_deploy_result=user_deposit_deploy_greenlet.get,
            service_registry_deploy_result=service_registry_deploy_greenlet.
            get,
            token_network_registry_deploy_result=
            token_network_registry_deploy_greenlet.get,
            deploy_client=deploy_client,
            contract_manager=contract_manager,
            proxy_manager=proxy_manager,
        )
        greenlets.add(monitoring_service_deploy_greenlet)

        for transfer_to in participants:
            transfer_grenlet = gevent.spawn(
                transfer_user_deposit_tokens,
                user_deposit_deploy_result=user_deposit_deploy_greenlet.get,
                transfer_to=transfer_to,
            )
            greenlets.add(transfer_grenlet)

    gevent.joinall(greenlets, raise_error=True)

    secret_registry_proxy = secret_registry_deploy_greenlet.get()
    token_network_registry_proxy = token_network_registry_deploy_greenlet.get()
    token_contracts = [
        token_deploy_greenlet.get()
        for token_deploy_greenlet in token_contracts_greenlets
    ]

    services_smart_contracts: Optional[ServicesSmartContracts] = None
    if environment_type == Environment.DEVELOPMENT:
        one_to_n_proxy = one_to_n_deploy_greenlet.get()
        user_deposit_proxy = user_deposit_deploy_greenlet.get()
        service_registry_proxy = service_registry_deploy_greenlet.get()
        utility_token_contract = utility_token_deploy_greenlet.get()
        monitoring_service_proxy = monitoring_service_deploy_greenlet.get()

        utility_token_proxy = Token(deploy_client,
                                    utility_token_contract.address,
                                    contract_manager, BLOCK_ID_LATEST)

        utility_token_network_proxy: Optional[TokenNetwork] = None
        if register_tokens:
            utility_token_network_address = register_utility_token_grenlet.get(
            )
            utility_token_network_proxy = proxy_manager.token_network(
                utility_token_network_address, BLOCK_ID_LATEST)

        services_smart_contracts = ServicesSmartContracts(
            utility_token_proxy=utility_token_proxy,
            utility_token_network_proxy=utility_token_network_proxy,
            one_to_n_proxy=one_to_n_proxy,
            user_deposit_proxy=user_deposit_proxy,
            service_registry_proxy=service_registry_proxy,
            monitoring_service=monitoring_service_proxy,
        )

    return FixtureSmartContracts(
        secret_registry_proxy=secret_registry_proxy,
        token_network_registry_proxy=token_network_registry_proxy,
        token_contracts=token_contracts,
        services_smart_contracts=services_smart_contracts,
    )
Beispiel #26
0
 def _fill_address(self) -> None:
     if "address" in self.keystore:
         self._address = Address(decode_hex(self.keystore["address"]))
     elif not self.locked:
         assert self.privkey
         self._address = privatekey_to_address(self.privkey)
Beispiel #27
0
def test_configure_pfs(service_registry_address, private_keys, web3,
                       contract_manager):
    chain_id = ChainID(int(web3.net.version))
    service_registry, urls = deploy_service_registry_and_set_urls(
        private_keys=private_keys,
        web3=web3,
        contract_manager=contract_manager,
        service_registry_address=service_registry_address,
    )
    json_data = {
        "price_info":
        0,
        "network_info": {
            "chain_id":
            chain_id,
            "token_network_registry_address":
            to_checksum_address(token_network_registry_address_test_default),
            "user_deposit_address":
            to_checksum_address(privatekey_to_address(private_keys[1])),
            "confirmed_block": {
                "number": 10
            },
        },
        "message":
        "This is your favorite pathfinding service",
        "operator":
        "John Doe",
        "version":
        "0.0.1",
        "payment_address":
        to_checksum_address(privatekey_to_address(private_keys[0])),
    }

    response = mocked_json_response(response_data=json_data)

    # With local routing configure_pfs should raise assertion
    with pytest.raises(AssertionError):
        _ = configure_pfs_or_exit(
            pfs_url="",
            routing_mode=RoutingMode.LOCAL,
            service_registry=service_registry,
            node_network_id=chain_id,
            token_network_registry_address=
            token_network_registry_address_test_default,
            pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
        )

    # With private routing configure_pfs should raise assertion
    with pytest.raises(AssertionError):
        _ = configure_pfs_or_exit(
            pfs_url="",
            routing_mode=RoutingMode.PRIVATE,
            service_registry=service_registry,
            node_network_id=chain_id,
            token_network_registry_address=
            token_network_registry_address_test_default,
            pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
        )

    # Asking for auto address
    # To make this deterministic we need to patch the random selection function
    patch_random = patch("raiden.network.pathfinding.get_random_pfs",
                         return_value="http://foo")
    with patch.object(requests, "get", return_value=response), patch_random:
        config = configure_pfs_or_exit(
            pfs_url=MATRIX_AUTO_SELECT_SERVER,
            routing_mode=RoutingMode.PFS,
            service_registry=service_registry,
            node_network_id=chain_id,
            token_network_registry_address=
            token_network_registry_address_test_default,
            pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
        )
    assert config.url in urls
    assert is_canonical_address(config.payment_address)

    # Configuring a valid given address
    given_address = "http://foo"
    with patch.object(requests, "get", return_value=response):
        config = configure_pfs_or_exit(
            pfs_url=given_address,
            routing_mode=RoutingMode.PFS,
            service_registry=service_registry,
            node_network_id=chain_id,
            token_network_registry_address=
            token_network_registry_address_test_default,
            pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
        )
    assert config.url == given_address
    assert is_same_address(config.payment_address,
                           json_data["payment_address"])
    assert config.price == json_data["price_info"]

    # Bad address, should exit the program
    bad_address = "http://badaddress"
    with pytest.raises(RaidenError):
        with patch.object(requests,
                          "get",
                          side_effect=requests.RequestException()):
            # Configuring a given address
            _ = configure_pfs_or_exit(
                pfs_url=bad_address,
                routing_mode=RoutingMode.PFS,
                service_registry=service_registry,
                node_network_id=chain_id,
                token_network_registry_address=
                token_network_registry_address_test_default,
                pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
            )

    # Addresses of token network registries of pfs and client conflict, should exit the client
    response = mocked_json_response(response_data=json_data)
    with pytest.raises(RaidenError):
        with patch.object(requests, "get", return_value=response):
            _ = configure_pfs_or_exit(
                pfs_url="http://foo",
                routing_mode=RoutingMode.PFS,
                service_registry=service_registry,
                node_network_id=chain_id,
                token_network_registry_address=TokenNetworkRegistryAddress(
                    to_canonical_address(
                        "0x2222222222222222222222222222222222222221")),
                pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
            )

    # ChainIDs of pfs and client conflict, should exit the client
    response = mocked_json_response(response_data=json_data)
    with pytest.raises(RaidenError):
        with patch.object(requests, "get", return_value=response):
            configure_pfs_or_exit(
                pfs_url="http://foo",
                routing_mode=RoutingMode.PFS,
                service_registry=service_registry,
                node_network_id=ChainID(chain_id + 1),
                token_network_registry_address=
                token_network_registry_address_test_default,
                pathfinding_max_fee=DEFAULT_PATHFINDING_MAX_FEE,
            )
Beispiel #28
0
from raiden_contracts.constants import NETWORKNAME_TO_ID

NUM_GETH_NODES = 3
NUM_RAIDEN_ACCOUNTS = 10
START_PORT = 30301
START_RPCPORT = 8101

DEFAULT_ACCOUNTS_SEEDS = [
    "127.0.0.1:{}".format(START_PORT + i).encode()
    for i in range(NUM_RAIDEN_ACCOUNTS)
]
DEFAULT_ACCOUNTS_KEYS: List[PrivateKey] = [
    PrivateKey(keccak(seed)) for seed in DEFAULT_ACCOUNTS_SEEDS
]
DEFAULT_ACCOUNTS = [
    AccountDescription(privatekey_to_address(key),
                       TokenAmount(DEFAULT_BALANCE))
    for key in DEFAULT_ACCOUNTS_KEYS
]


def main() -> None:
    tmpdir = tempfile.mkdtemp()

    geth_nodes = []
    for i in range(NUM_GETH_NODES):
        is_miner = i == 0
        node_key = PrivateKey(sha3(f"node:{i}".encode()))
        p2p_port = Port(START_PORT + i)
        rpc_port = Port(START_RPCPORT + i)