Beispiel #1
0
    def open_channel_with_funding(
        self,
        registry_address_hex: AddressHex,
        token_address_hex: AddressHex,
        peer_address_hex: AddressHex,
        total_deposit: TokenAmount,
        settle_timeout: BlockTimeout = None,
    ) -> None:
        """ Convenience method to open a channel.

        Args:
            registry_address_hex: hex encoded address of the registry for the channel.
            token_address_hex: hex encoded address of the token for the channel.
            peer_address_hex: hex encoded address of the channel peer.
            total_deposit: amount of total funding for the channel.
            settle_timeout: amount of blocks for the settle time (if None use app defaults).

        Return:
            netting_channel: the (newly opened) netting channel object.
        """
        # Check, if peer is discoverable
        registry_address = TokenNetworkRegistryAddress(
            to_canonical_address(registry_address_hex))
        peer_address = to_canonical_address(peer_address_hex)
        token_address = TokenAddress(to_canonical_address(token_address_hex))

        self._api.channel_open(registry_address,
                               token_address,
                               peer_address,
                               settle_timeout=settle_timeout)

        self._api.set_total_channel_deposit(registry_address, token_address,
                                            peer_address, total_deposit)
Beispiel #2
0
    def register_token(
        self,
        registry_address_hex: AddressHex,
        token_address_hex: AddressHex,
        retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT,
    ) -> TokenNetwork:
        """ Register a token with the raiden token manager.

        Args:
            registry_address_hex: a hex encoded registry address.
            token_address_hex: a hex encoded token address.
            retry_timeout: the retry timeout

        Returns:
            The token network proxy.
        """
        registry_address = TokenNetworkRegistryAddress(
            to_canonical_address(registry_address_hex))
        token_address = TokenAddress(to_canonical_address(token_address_hex))

        registry = self._raiden.proxy_manager.token_network_registry(
            registry_address, BLOCK_ID_LATEST)

        token_network_address = registry.add_token(
            token_address=token_address,
            channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
            token_network_deposit_limit=TokenAmount(UINT256_MAX),
            given_block_identifier=BLOCK_ID_LATEST,
        )
        waiting.wait_for_token_network(self._raiden, registry.address,
                                       token_address, retry_timeout)

        return self._raiden.proxy_manager.token_network(
            token_network_address, BLOCK_ID_LATEST)
Beispiel #3
0
def wait_for_settle_all_channels(raiden: "RaidenService",
                                 retry_timeout: float) -> None:
    """Wait until all channels are settled.

    Note:
        This does not time out, use gevent.Timeout.
    """
    chain_state = views.state_from_raiden(raiden)

    id_paymentnetworkstate = chain_state.identifiers_to_paymentnetworks.items()
    for payment_network_id, payment_network_state in id_paymentnetworkstate:

        id_tokennetworkstate = payment_network_state.tokenidentifiers_to_tokennetworks.items(
        )
        for token_network_id, token_network_state in id_tokennetworkstate:
            channel_ids = cast(
                List[ChannelID],
                token_network_state.channelidentifiers_to_channels.keys())

            wait_for_settle(
                raiden=raiden,
                payment_network_id=payment_network_id,
                token_address=TokenAddress(token_network_id),
                channel_ids=channel_ids,
                retry_timeout=retry_timeout,
            )
Beispiel #4
0
def contractreceivenewtokennetwork_from_event(
    event: DecodedEvent,
    pendingtokenregistration: Dict[TokenNetworkAddress,
                                   Tuple[TokenNetworkRegistryAddress,
                                         TokenAddress]],
) -> ContractReceiveNewTokenNetwork:
    data = event.event_data
    args = data["args"]

    token_network_address = args["token_network_address"]
    token_address = TokenAddress(args["token_address"])
    token_network_registry_address = TokenNetworkRegistryAddress(
        event.originating_contract)

    pendingtokenregistration[token_network_address] = (
        token_network_registry_address,
        token_address,
    )

    return ContractReceiveNewTokenNetwork(
        token_network_registry_address=token_network_registry_address,
        token_network=TokenNetworkState(
            address=token_network_address,
            token_address=token_address,
            network_graph=TokenNetworkGraphState(token_network_address),
        ),
        transaction_hash=event.transaction_hash,
        block_number=event.block_number,
        block_hash=event.block_hash,
    )
Beispiel #5
0
def test_prometheus_event_handling_raise_exception(pathfinding_service_mock_empty):
    metrics_state = save_metrics_state(metrics.REGISTRY)
    pfs = pathfinding_service_mock_empty

    event = ReceiveTokenNetworkCreatedEvent(
        token_address=TokenAddress(bytes([1] * 20)),
        token_network_address=TokenNetworkAddress(bytes([2] * 20)),
        block_number=BlockNumber(1),
    )

    pfs.handle_token_network_created = Mock(side_effect=KeyError())

    with pytest.raises(KeyError):
        pfs.handle_event(event)
        # The exceptions raised in the wrapped part of the prometheus logging
        # will not be handled anywhere at the moment.
        # Force an exception and test correct logging of it anyways,
        # since at some point higher in the call stack we could catch exceptions.
        assert (
            metrics_state.get_delta(
                "events_exceptions_total",
                labels={"event_type": "ReceiveTokenNetworkCreatedEvent"},
            )
            == 1.0
        )
Beispiel #6
0
def test_save_and_load_token_networks(pathfinding_service_mock_empty):
    pfs = pathfinding_service_mock_empty

    token_address = TokenAddress(bytes([1] * 20))
    token_network_address = TokenNetworkAddress(bytes([2] * 20))
    channel_id = ChannelID(1)
    p1 = Address(bytes([3] * 20))
    p2 = Address(bytes([4] * 20))
    events = [
        ReceiveTokenNetworkCreatedEvent(
            token_address=token_address,
            token_network_address=token_network_address,
            block_number=BlockNumber(1),
        ),
        ReceiveChannelOpenedEvent(
            token_network_address=token_network_address,
            channel_identifier=channel_id,
            participant1=p1,
            participant2=p2,
            settle_timeout=BlockTimeout(2 ** 65),  # larger than max_uint64 to check hex storage
            block_number=BlockNumber(2),
        ),
    ]
    for event in events:
        pfs.handle_event(event)
    assert len(pfs.token_networks) == 1

    loaded_networks = pfs._load_token_networks()  # pylint: disable=protected-access
    assert len(loaded_networks) == 1

    orig = list(pfs.token_networks.values())[0]
    loaded = list(loaded_networks.values())[0]
    assert loaded.address == orig.address
    assert loaded.channel_id_to_addresses == orig.channel_id_to_addresses
    assert loaded.G.nodes == orig.G.nodes
Beispiel #7
0
def unregistered_token(token_amount, deploy_client, contract_manager) -> TokenAddress:
    contract_proxy, _ = deploy_client.deploy_single_contract(
        contract_name=CONTRACT_HUMAN_STANDARD_TOKEN,
        contract=contract_manager.get_contract(CONTRACT_HUMAN_STANDARD_TOKEN),
        constructor_parameters=(token_amount, 2, "raiden", "Rd"),
    )
    return TokenAddress(to_canonical_address(contract_proxy.address))
Beispiel #8
0
def unregistered_token(token_amount, deploy_client,
                       contract_manager) -> TokenAddress:
    return TokenAddress(
        deploy_contract_web3(
            CONTRACT_HUMAN_STANDARD_TOKEN,
            deploy_client,
            contract_manager=contract_manager,
            constructor_arguments=(token_amount, 2, "raiden", "Rd"),
        ))
Beispiel #9
0
def fund_node(
    token_result: Callable[[], Contract],
    proxy_manager: ProxyManager,
    to_address: Address,
    amount: TokenAmount,
) -> None:
    token_contract = token_result()
    token_proxy = proxy_manager.token(
        TokenAddress(to_canonical_address(token_contract.address)),
        BLOCK_ID_LATEST)
    token_proxy.transfer(to_address=to_address, amount=amount)
Beispiel #10
0
    def setup_token_contract_for_token_network(
            self, proxy_manager: ProxyManager) -> CustomToken:
        """Ensure there is a deployed token contract and return a `CustomToken`
        proxy to it. This token will be used for the scenario's token network.

        This will either:

        - Use the token from the address provided in the scenario
          configuration.
        - Use a previously deployed token, with the details loaded from the
          disk.
        - Deploy a new token if neither of the above options is used.
        """
        token_definition = self.definition.token
        reuse_token_from_file = token_definition.can_reuse_token

        if token_definition.address:
            token_address = to_canonical_address(token_definition.address)
        elif reuse_token_from_file:
            token_details = load_token_configuration_from_file(
                token_definition.token_file)
            token_address = to_canonical_address(token_details["address"])
        else:
            contract_data = proxy_manager.contract_manager.get_contract(
                CONTRACT_CUSTOM_TOKEN)
            contract, receipt = self.client.deploy_single_contract(
                contract_name=CONTRACT_CUSTOM_TOKEN,
                contract=contract_data,
                constructor_parameters=(
                    ORCHESTRATION_MAXIMUM_BALANCE,
                    token_definition.decimals,
                    token_definition.name,
                    token_definition.symbol,
                ),
            )
            token_address = to_canonical_address(contract.address)

            if token_definition.should_reuse_token:
                details = TokenDetails({
                    "name":
                    token_definition.name,
                    "address":
                    to_checksum_address(token_address),
                    "block":
                    receipt["blockNumber"],
                })
                save_token_configuration_to_file(token_definition.token_file,
                                                 details)

        return proxy_manager.custom_token(TokenAddress(token_address),
                                          "latest")
def test_logging_processor():
    # test if our logging processor changes bytes to checksum addresses
    # even if bytes-addresses are entangled into events
    logger = Mock()
    log_method = Mock()

    address = TokenAddress(
        b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd9")
    address_log = format_to_hex(_logger=logger,
                                _log_method=log_method,
                                event_dict=dict(address=address))
    assert to_checksum_address(address) == address_log["address"]

    address2 = Address(
        b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd1")
    event = ReceiveTokenNetworkCreatedEvent(
        token_address=address,
        token_network_address=TokenNetworkAddress(address2),
        block_number=BlockNumber(1),
    )
    event_log = format_to_hex(_logger=logger,
                              _log_method=log_method,
                              event_dict=dict(event=event))
    assert (  # pylint: disable=unsubscriptable-object
        to_checksum_address(address) == event_log["event"]["token_address"])
    assert (  # pylint: disable=unsubscriptable-object
        to_checksum_address(address2) == event_log["event"]
        ["token_network_address"])
    assert (  # pylint: disable=unsubscriptable-object
        event_log["event"]["type_name"] == "ReceiveTokenNetworkCreatedEvent")

    message = PFSFeeUpdate(
        canonical_identifier=CanonicalIdentifier(
            chain_identifier=ChainID(61),
            token_network_address=TokenNetworkAddress(address),
            channel_identifier=ChannelID(1),
        ),
        updating_participant=PARTICIPANT1,
        fee_schedule=FeeScheduleState(),
        timestamp=datetime.utcnow(),
        signature=EMPTY_SIGNATURE,
    )
    message_log = format_to_hex(_logger=logger,
                                _log_method=log_method,
                                event_dict=dict(message=message))
    assert (  # pylint: disable=unsubscriptable-object
        to_checksum_address(address) == message_log["message"]
        ["canonical_identifier"]["token_network_address"])
    assert (  # pylint: disable=unsubscriptable-object
        message_log["message"]["type_name"] == "PFSFeeUpdate")
def register_token(
    token_network_registry_deploy_result: Callable[[], TokenNetworkRegistry],
    token_deploy_result: Callable[[], Contract],
) -> TokenNetworkAddress:
    token_network_registry_proxy = token_network_registry_deploy_result()
    token_contract = token_deploy_result()

    return token_network_registry_proxy.add_token(
        token_address=TokenAddress(to_canonical_address(
            token_contract.address)),
        channel_participant_deposit_limit=
        RED_EYES_PER_CHANNEL_PARTICIPANT_LIMIT,
        token_network_deposit_limit=RED_EYES_PER_TOKEN_NETWORK_LIMIT,
        given_block_identifier=token_contract.web3.eth.blockNumber,
    )
Beispiel #13
0
def token_addresses_fixture(
    deploy_smart_contract_bundle_concurrently: FixtureSmartContracts,
) -> List[TokenAddress]:
    """Fixture that yields `number_of_tokens` ERC20 token addresses, where the
    `token_amount` (per token) is distributed among the addresses behind `deploy_client` and
    potentially pre-registered with the Raiden Registry.
    The following pytest arguments can control the behavior:

    Args:
        token_amount: the overall number of units minted per token
        number_of_tokens: the number of token instances
        register_tokens: controls if tokens will be registered with raiden Registry
    """
    return [
        TokenAddress(to_canonical_address(token.address))
        for token in deploy_smart_contract_bundle_concurrently.token_contracts
    ]
Beispiel #14
0
def deploy_token_and_return_proxy(deploy_client: JSONRPCClient,
                                  contract_manager: ContractManager,
                                  token_contract_name: str) -> Token:
    token_contract = deploy_token(
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        initial_amount=TokenAmount(1000 * 10**18),
        decimals=0,
        token_name="TKN",
        token_symbol="TKN",
        token_contract_name=token_contract_name,
    )

    return Token(
        jsonrpc_client=deploy_client,
        token_address=TokenAddress(token_contract.contract_address),
        contract_manager=contract_manager,
    )
Beispiel #15
0
def test_prometheus_event_handling_no_exceptions(pathfinding_service_mock_empty):

    metrics_state = save_metrics_state(metrics.REGISTRY)
    pfs = pathfinding_service_mock_empty

    token_address = TokenAddress(bytes([1] * 20))
    token_network_address = TokenNetworkAddress(bytes([2] * 20))
    channel_id = ChannelID(1)
    p1 = Address(bytes([3] * 20))
    p2 = Address(bytes([4] * 20))
    events = [
        ReceiveTokenNetworkCreatedEvent(
            token_address=token_address,
            token_network_address=token_network_address,
            block_number=BlockNumber(1),
        ),
        ReceiveChannelOpenedEvent(
            token_network_address=token_network_address,
            channel_identifier=channel_id,
            participant1=p1,
            participant2=p2,
            settle_timeout=BlockTimeout(10),
            block_number=BlockNumber(2),
        ),
    ]
    for event in events:
        pfs.handle_event(event)

        # check that we have non-zero processing time for the events we created
        assert (
            metrics_state.get_delta(
                "events_processing_duration_seconds_sum",
                labels={"event_type": event.__class__.__name__},
            )
            > 0.0
        )
        # there should be no exception raised
        assert (
            metrics_state.get_delta(
                "events_exceptions_total", labels={"event_type": event.__class__.__name__}
            )
            == 0.0
        )
Beispiel #16
0
def test_token_network_created(pathfinding_service_mock):
    token_address = TokenAddress(bytes([1] * 20))
    token_network_address = TokenNetworkAddress(bytes(bytes([2] * 20)))
    network_event = ReceiveTokenNetworkCreatedEvent(
        token_address=token_address,
        token_network_address=token_network_address,
        block_number=BlockNumber(1),
    )

    assert not pathfinding_service_mock.follows_token_network(token_network_address)
    assert len(pathfinding_service_mock.token_networks) == 1

    pathfinding_service_mock.handle_event(network_event)
    assert pathfinding_service_mock.follows_token_network(token_network_address)
    assert len(pathfinding_service_mock.token_networks) == 2

    # Test idempotency
    pathfinding_service_mock.handle_event(network_event)
    assert pathfinding_service_mock.follows_token_network(token_network_address)
    assert len(pathfinding_service_mock.token_networks) == 2
Beispiel #17
0
def contractreceivenewtokennetwork_from_event(
    event: DecodedEvent, ) -> ContractReceiveNewTokenNetwork:
    data = event.event_data
    args = data["args"]

    token_network_address = args["token_network_address"]
    token_address = TokenAddress(args["token_address"])
    token_network_registry_address = TokenNetworkRegistryAddress(
        event.originating_contract)

    return ContractReceiveNewTokenNetwork(
        token_network_registry_address=token_network_registry_address,
        token_network=TokenNetworkState(
            address=token_network_address,
            token_address=token_address,
        ),
        transaction_hash=event.transaction_hash,
        block_number=event.block_number,
        block_hash=event.block_hash,
    )
Beispiel #18
0
def deploy_tokens_and_fund_accounts(
    token_amount: TokenAmount,
    number_of_tokens: int,
    proxy_manager: ProxyManager,
    participants: List[Address],
    contract_manager: ContractManager,
    token_contract_name: str,
) -> List[TokenAddress]:
    """ Deploy `number_of_tokens` ERC20 token instances with `token_amount` minted and
    distributed among `blockchain_services`. Optionally the instances will be registered with
    the raiden registry.

    Args:
        token_amount: number of units that will be created per token
        number_of_tokens: number of token instances that will be created
        proxy_manager: the proxy manager used to create the token proxy
        participants: participant addresses that will receive tokens
    """
    result = list()
    for _ in range(number_of_tokens):
        token_address = TokenAddress(
            deploy_contract_web3(
                contract_name=token_contract_name,
                deploy_client=proxy_manager.client,
                contract_manager=contract_manager,
                constructor_arguments=(token_amount, 2, "raiden", "Rd"),
            ))

        result.append(token_address)

        # only the creator of the token starts with a balance (deploy_service),
        # transfer from the creator to the other nodes
        for transfer_to in participants:
            proxy_manager.token(token_address).transfer(
                to_address=transfer_to,
                amount=TokenAmount(token_amount // len(participants)))

    return result
Beispiel #19
0
def setup_raiden(
    matrix_server: str,
    print_step: StepPrinter,
    contracts_version,
    eth_rpc_endpoint: str,
    web3: Web3,
    base_datadir: Path,
    keystore: Path,
) -> RaidenTestSetup:
    print_step("Deploying Raiden contracts")

    client = JSONRPCClient(web3, get_private_key(keystore))
    contract_manager = ContractManager(
        contracts_precompiled_path(contracts_version))

    proxy_manager = ProxyManager(
        rpc_client=client,
        contract_manager=contract_manager,
        metadata=ProxyManagerMetadata(
            token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER,
            filters_start_at=GENESIS_BLOCK_NUMBER,
        ),
    )

    token = deploy_token(
        deploy_client=client,
        contract_manager=contract_manager,
        initial_amount=TokenAmount(1000),
        decimals=0,
        token_name="TKN",
        token_symbol="TKN",
        token_contract_name=CONTRACT_CUSTOM_TOKEN,
    )
    contract_addresses = deploy_smoketest_contracts(
        client=client,
        chain_id=CHAINNAME_TO_ID["smoketest"],
        contract_manager=contract_manager,
        token_address=token.address,
    )
    confirmed_block_identifier = client.get_confirmed_blockhash()
    registry = proxy_manager.token_network_registry(
        TokenNetworkRegistryAddress(
            contract_addresses[CONTRACT_TOKEN_NETWORK_REGISTRY]),
        block_identifier=confirmed_block_identifier,
    )

    registry.add_token(
        token_address=TokenAddress(to_canonical_address(token.address)),
        channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
        token_network_deposit_limit=TokenAmount(UINT256_MAX),
        given_block_identifier=confirmed_block_identifier,
    )

    print_step("Setting up Raiden")
    user_deposit_contract_address = to_checksum_address(
        contract_addresses[CONTRACT_USER_DEPOSIT])

    args = {
        "address": to_checksum_address(TEST_ACCOUNT_ADDRESS),
        "datadir": keystore,
        "eth_rpc_endpoint": eth_rpc_endpoint,
        "gas_price": "fast",
        "keystore_path": keystore,
        "matrix_server": matrix_server,
        "chain_id": str(CHAINNAME_TO_ID["smoketest"]),
        "password_file": click.File()(os.path.join(base_datadir, "pw")),
        "user_deposit_contract_address": user_deposit_contract_address,
        "sync_check": False,
        "environment_type": Environment.DEVELOPMENT,
    }

    # Wait until the secret registry is confirmed, otherwise the RaidenService
    # inialization will fail, needed for the check
    # `check_ethereum_confirmed_block_is_not_pruned`.
    current_block = client.block_number()
    target_block_number = current_block + DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS
    while current_block < target_block_number:
        current_block = client.block_number()
        sleep(0.5)

    return RaidenTestSetup(args=args,
                           token=token,
                           contract_addresses=contract_addresses)
Beispiel #20
0
CHECK_RDN_MIN_DEPOSIT_INTERVAL = 5 * 60
CHECK_GAS_RESERVE_INTERVAL = 5 * 60
CHECK_VERSION_INTERVAL = 3 * 60 * 60
CHECK_NETWORK_ID_INTERVAL = 5 * 60

DEFAULT_HTTP_REQUEST_TIMEOUT = 1.0  # seconds

DISCOVERY_DEFAULT_ROOM = "discovery"
MONITORING_BROADCASTING_ROOM = "monitoring"
PATH_FINDING_BROADCASTING_ROOM = "path_finding"

# According to the smart contracts as of 07/08:
# https://github.com/raiden-network/raiden-contracts/blob/fff8646ebcf2c812f40891c2825e12ed03cc7628/raiden_contracts/contracts/TokenNetwork.sol#L213
# channel_identifier can never be 0. We make this a requirement in the client and use this fact
# to signify that a channel_identifier of `0` passed to the messages adds them to the
# global queue
EMPTY_ADDRESS = b"\0" * 20

# Keep in sync with .circleci/config.yaml
HIGHEST_SUPPORTED_GETH_VERSION = "1.9.2"
LOWEST_SUPPORTED_GETH_VERSION = "1.8.21"
# this is the last stable version as of this comment
HIGHEST_SUPPORTED_PARITY_VERSION = "2.5.5"
LOWEST_SUPPORTED_PARITY_VERSION = "1.7.6"

WETH_TOKEN_ADDRESS = TokenAddress(
    to_canonical_address("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"))
DAI_TOKEN_ADDRESS = TokenAddress(
    to_canonical_address("0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359"))
Beispiel #21
0
# Thresholds for the ``eth.getLogs`` call. Used to automatically adjust the block batch size.
ETH_GET_LOGS_TIMEOUT = 10
ETH_GET_LOGS_THRESHOLD_FAST = ETH_GET_LOGS_TIMEOUT // 4
ETH_GET_LOGS_THRESHOLD_SLOW = ETH_GET_LOGS_TIMEOUT // 2

# Keep in sync with .circleci/config.yaml
HIGHEST_SUPPORTED_GETH_VERSION = "1.9.21"
LOWEST_SUPPORTED_GETH_VERSION = "1.9.7"
# this is the last stable version as of this comment
HIGHEST_SUPPORTED_PARITY_VERSION = "3.1.0"
LOWEST_SUPPORTED_PARITY_VERSION = "1.7.6"

WEB3_BLOCK_NOT_FOUND_RETRY_COUNT = 3

WETH_TOKEN_ADDRESS = TokenAddress(
    to_canonical_address("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2")
)
DAI_TOKEN_ADDRESS = TokenAddress(
    to_canonical_address("0x6B175474E89094C44Da98b954EedeAC495271d0F")
)

FLAT_MED_FEE_MIN = 0
PROPORTIONAL_MED_FEE_MIN = 0
# This needs to limit the total slope of the fee function < 1
# This is also the per-hop fee, so the actual value per-channel is X / (2 + X)
PROPORTIONAL_MED_FEE_MAX = 1_000_000
IMBALANCE_MED_FEE_MIN = 0
IMBALANCE_MED_FEE_MAX = 50_000


# Web RTC constants
Beispiel #22
0
def test_deposit_amount_must_be_smaller_than_the_token_network_limit(
        raiden_network: List[App], contract_manager: ContractManager,
        retry_timeout: float) -> None:
    """The Python API must properly check the requested deposit will not exceed
    the token network deposit limit.

    This is a regression test for #3135.

    As of version `v0.18.1` (commit 786347b23), the proxy was not properly
    checking that the requested deposit amount was smaller than the smart
    contract deposit limit. This led to two errors:

    - The error message was vague and incorrect: "Deposit amount decreased"
    - The exception used was not handled and crashed the node.

    This test checks the limit is properly check from the REST API.
    """
    app1 = raiden_network[0]

    registry_address = app1.raiden.default_registry.address

    token_supply = 1_000_000
    token_address = TokenAddress(
        deploy_contract_web3(
            contract_name=CONTRACT_HUMAN_STANDARD_TOKEN,
            deploy_client=app1.raiden.rpc_client,
            contract_manager=contract_manager,
            constructor_arguments=(token_supply, 2, "raiden", "Rd"),
        ))

    # Wait until Raiden can start using the token contract.
    # Here, the block at which the contract was deployed should be confirmed by Raiden.
    # Therefore, until that block is received.
    waiting.wait_for_block(
        raiden=app1.raiden,
        block_number=BlockNumber(app1.raiden.get_block_number() +
                                 DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS + 1),
        retry_timeout=retry_timeout,
    )

    api1 = RaidenAPI(app1.raiden)

    msg = "Token is not registered yet, it must not be in the token list."
    assert token_address not in api1.get_tokens_list(registry_address), msg

    token_network_deposit_limit = TokenAmount(100)
    api1.token_network_register(
        registry_address=registry_address,
        token_address=token_address,
        channel_participant_deposit_limit=token_network_deposit_limit,
        token_network_deposit_limit=token_network_deposit_limit,
    )

    exception = RuntimeError(
        "Did not see the token registration within 30 seconds")
    with gevent.Timeout(seconds=30, exception=exception):
        wait_for_state_change(
            app1.raiden,
            ContractReceiveNewTokenNetwork,
            {"token_network": {
                "token_address": token_address
            }},
            retry_timeout,
        )

    msg = "Token has been registered, yet must be available in the token list."
    assert token_address in api1.get_tokens_list(registry_address), msg

    partner_address = make_address()
    api1.channel_open(
        registry_address=app1.raiden.default_registry.address,
        token_address=token_address,
        partner_address=partner_address,
    )

    with pytest.raises(DepositOverLimit):
        api1.set_total_channel_deposit(
            registry_address=app1.raiden.default_registry.address,
            token_address=token_address,
            partner_address=partner_address,
            total_deposit=TokenAmount(token_network_deposit_limit + 1),
        )

        pytest.fail(
            "The deposit must fail if the requested deposit exceeds the token "
            "network deposit limit.")
Beispiel #23
0
def test_participant_deposit_amount_must_be_smaller_than_the_limit(
        raiden_network: List[App], contract_manager: ContractManager,
        retry_timeout: float) -> None:
    """The Python API must properly check the requested participant deposit
    will not exceed the smart contract limit.

    This is companion test for
    `test_deposit_amount_must_be_smaller_than_the_token_network_limit`. The
    participant deposit limit was introduced for the bug bounty with the PR
    https://github.com/raiden-network/raiden-contracts/pull/276/ , the limit is
    available since version 0.4.0 of the smart contract.
    """
    app1 = raiden_network[0]

    registry_address = app1.raiden.default_registry.address

    token_supply = 1_000_000
    token_address = TokenAddress(
        deploy_contract_web3(
            contract_name=CONTRACT_HUMAN_STANDARD_TOKEN,
            deploy_client=app1.raiden.rpc_client,
            contract_manager=contract_manager,
            constructor_arguments=(token_supply, 2, "raiden", "Rd"),
        ))

    api1 = RaidenAPI(app1.raiden)

    msg = "Token is not registered yet, it must not be in the token list."
    assert token_address not in api1.get_tokens_list(registry_address), msg

    # Wait until Raiden can start using the token contract.
    # Here, the block at which the contract was deployed should be confirmed by Raiden.
    # Therefore, until that block is received.
    waiting.wait_for_block(
        raiden=app1.raiden,
        block_number=BlockNumber(app1.raiden.get_block_number() +
                                 DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS + 1),
        retry_timeout=retry_timeout,
    )

    token_network_participant_deposit_limit = TokenAmount(100)
    api1.token_network_register(
        registry_address=registry_address,
        token_address=token_address,
        channel_participant_deposit_limit=
        token_network_participant_deposit_limit,
        token_network_deposit_limit=TokenAmount(UINT256_MAX),
    )

    exception = RuntimeError(
        "Did not see the token registration within 30 seconds")
    with gevent.Timeout(seconds=30, exception=exception):
        wait_for_state_change(
            app1.raiden,
            ContractReceiveNewTokenNetwork,
            {"token_network": {
                "token_address": token_address
            }},
            retry_timeout,
        )

    msg = "Token has been registered, yet must be available in the token list."
    assert token_address in api1.get_tokens_list(registry_address), msg

    partner_address = make_address()
    api1.channel_open(
        registry_address=app1.raiden.default_registry.address,
        token_address=token_address,
        partner_address=partner_address,
    )

    with pytest.raises(DepositOverLimit):
        api1.set_total_channel_deposit(
            registry_address=app1.raiden.default_registry.address,
            token_address=token_address,
            partner_address=partner_address,
            total_deposit=TokenAmount(token_network_participant_deposit_limit +
                                      1),
        )

        pytest.fail(
            "The deposit must fail if the requested deposit exceeds the participant deposit limit."
        )
def test_token_network_registry(
    deploy_client: JSONRPCClient,
    contract_manager: ContractManager,
    token_network_registry_address: TokenNetworkRegistryAddress,
    token_contract_name: str,
) -> None:
    proxy_manager = ProxyManager(
        rpc_client=deploy_client,
        contract_manager=contract_manager,
        metadata=ProxyManagerMetadata(
            token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER,
            filters_start_at=GENESIS_BLOCK_NUMBER,
        ),
    )

    confirmed_block_identifier = deploy_client.get_confirmed_blockhash()

    token_network_registry_proxy = proxy_manager.token_network_registry(
        token_network_registry_address,
        block_identifier=confirmed_block_identifier)

    assert (token_network_registry_proxy.settlement_timeout_min(
        BLOCK_ID_LATEST) == TEST_SETTLE_TIMEOUT_MIN)
    assert (token_network_registry_proxy.settlement_timeout_max(
        BLOCK_ID_LATEST) == TEST_SETTLE_TIMEOUT_MAX)
    assert (token_network_registry_proxy.get_token_network_created(
        block_identifier=BLOCK_ID_LATEST) == 0)

    bad_token_address = make_token_address()

    # Registering a non-existing token network should fail
    with pytest.raises(AddressWithoutCode):
        token_network_registry_proxy.add_token(
            token_address=bad_token_address,
            channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
            token_network_deposit_limit=TokenAmount(UINT256_MAX),
            given_block_identifier=confirmed_block_identifier,
        )

    test_token = deploy_token(
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        initial_amount=TokenAmount(1000),
        decimals=0,
        token_name="TKN",
        token_symbol="TKN",
        token_contract_name=token_contract_name,
    )
    test_token_address = TokenAddress(to_canonical_address(test_token.address))

    # Check the proper exception is raised if the token does not comply to the
    # ERC20 interface. In this case the token does not have the totalSupply()
    # function implemented #3697 which is validated in the smart contract.
    with patch.object(Token, "total_supply", return_value=None):
        with pytest.raises(InvalidToken):
            token_network_registry_proxy.add_token(
                token_address=test_token_address,
                channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
                token_network_deposit_limit=TokenAmount(UINT256_MAX),
                given_block_identifier=deploy_client.get_confirmed_blockhash(),
            )

    # Register a valid token
    preblockhash = deploy_client.get_confirmed_blockhash()
    token_network_address = token_network_registry_proxy.add_token(
        token_address=test_token_address,
        channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
        token_network_deposit_limit=TokenAmount(UINT256_MAX),
        given_block_identifier=preblockhash,
    )
    assert token_network_address is not None
    assert (token_network_registry_proxy.get_token_network_created(
        block_identifier=BLOCK_ID_LATEST) == 1)

    # Re-registering the same token should fail with a recoverable error
    # because it is a race condition.
    with pytest.raises(RaidenRecoverableError):
        token_network_registry_proxy.add_token(
            token_address=test_token_address,
            channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
            token_network_deposit_limit=TokenAmount(UINT256_MAX),
            given_block_identifier=preblockhash,
        )

    logs = token_network_registry_proxy.filter_token_added_events()
    assert is_same_address(logs[0]["args"]["token_address"],
                           test_token.address)
    assert is_same_address(logs[0]["args"]["token_network_address"],
                           token_network_address)
    assert (token_network_registry_proxy.get_token_network(
        bad_token_address, BLOCK_ID_LATEST) is None)

    result_address = token_network_registry_proxy.get_token_network(
        test_token_address, BLOCK_ID_LATEST)

    assert result_address
    assert to_normalized_address(result_address) == to_normalized_address(
        token_network_address)

    with pytest.raises(ValueError):
        assert token_network_registry_proxy.get_token_network(
            None,
            BLOCK_ID_LATEST  # type: ignore
        )

    # These are not registered token addresses
    assert (token_network_registry_proxy.get_token_network(
        bad_token_address, BLOCK_ID_LATEST) is None)
    assert (token_network_registry_proxy.get_token_network(
        test_token_address, BLOCK_ID_LATEST) is not None)
    address = token_network_registry_proxy.get_token_network(
        TokenAddress(token_network_address), BLOCK_ID_LATEST)
    assert address is None
def test_token_network_registry_allows_the_last_slot_to_be_used(
        deploy_client, token_network_registry_address, contract_manager,
        token_contract_name):
    proxy_manager = ProxyManager(
        rpc_client=deploy_client,
        contract_manager=contract_manager,
        metadata=ProxyManagerMetadata(
            token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER,
            filters_start_at=GENESIS_BLOCK_NUMBER,
        ),
    )
    confirmed_block_identifier = deploy_client.get_confirmed_blockhash()

    token_network_registry_proxy = proxy_manager.token_network_registry(
        token_network_registry_address,
        block_identifier=confirmed_block_identifier)

    assert (token_network_registry_proxy.get_token_network_created(
        block_identifier=BLOCK_ID_LATEST) == 0)

    test_token = deploy_token(
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        initial_amount=TokenAmount(1000),
        decimals=0,
        token_name="TKN",
        token_symbol="TKN",
        token_contract_name=token_contract_name,
    )
    first_token_address = TokenAddress(to_canonical_address(
        test_token.address))
    preblockhash = deploy_client.get_confirmed_blockhash()

    # Register a valid token, this is the last slot and should succeeded
    token_network_registry_proxy.add_token(
        token_address=first_token_address,
        channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
        token_network_deposit_limit=TokenAmount(UINT256_MAX),
        given_block_identifier=preblockhash,
    )

    test_token = deploy_token(
        deploy_client=deploy_client,
        contract_manager=contract_manager,
        initial_amount=TokenAmount(1000),
        decimals=0,
        token_name="TKN",
        token_symbol="TKN",
        token_contract_name=token_contract_name,
    )
    second_token_address = TokenAddress(
        to_canonical_address(test_token.address))
    preblockhash = deploy_client.get_confirmed_blockhash()

    # Tries to register a new valid token after all slots have been used. This
    # has to fail.
    with pytest.raises(MaxTokenNetworkNumberReached):
        token_network_registry_proxy.add_token(
            token_address=second_token_address,
            channel_participant_deposit_limit=TokenAmount(UINT256_MAX),
            token_network_deposit_limit=TokenAmount(UINT256_MAX),
            given_block_identifier=preblockhash,
        )
Beispiel #26
0
from raiden.utils.typing import BlockTimeout, FeeAmount, TokenAddress, TokenAmount
from scenario_player.utils.configuration.settings import (
    EnvironmentConfig,
    PFSSettingsConfig,
    ServiceSettingsConfig,
    SettingsConfig,
    UDCSettingsConfig,
    UDCTokenSettings,
)

dummy_env = EnvironmentConfig(
    pfs_fee=FeeAmount(100),
    environment_file_name="tests",
    environment_type="development",
    matrix_servers=[],
    transfer_token=TokenAddress(bytes([1] * 20)),
    pfs_with_fee=URI("http://www.example.com"),
    eth_rpc_endpoints=[URI("http://www.example.com")],
    ms_reward_with_margin=TokenAmount(1),
    settlement_timeout_min=BlockTimeout(100),
    raiden_client="raiden",
    wait_short=5,
    wait_long=10,
)


class TestSettingsConfig:
    @pytest.mark.parametrize("key", ["timeout", "gas_price"])
    def test_class_returns_expected_default_for_key(
        self, key, expected_defaults, minimal_definition_dict
    ):
Beispiel #27
0
def get_blockchain_events(
    web3: Web3,
    token_network_addresses: List[TokenNetworkAddress],
    chain_state: BlockchainState,
    from_block: BlockNumber,
    to_block: BlockNumber,
) -> List[Event]:
    # Check if the current block was already processed
    if from_block > to_block:
        return []

    log.info(
        "Querying new block(s)",
        from_block=from_block,
        to_block=to_block,
        # When `to_block` == `from_block` we query one block, so add one
        num_blocks=to_block - from_block + 1,
    )

    # first check for new token networks and add to state
    registry_events = query_blockchain_events(
        web3=web3,
        contract_addresses=[chain_state.token_network_registry_address],
        from_block=from_block,
        to_block=to_block,
    )

    events: List[Event] = []
    for event_dict in registry_events:
        token_network_address = TokenNetworkAddress(
            to_canonical_address(event_dict["args"]["token_network_address"]))
        events.append(
            ReceiveTokenNetworkCreatedEvent(
                token_network_address=token_network_address,
                token_address=TokenAddress(
                    to_canonical_address(event_dict["args"]["token_address"])),
                block_number=event_dict["blockNumber"],
            ))
        token_network_addresses.append(token_network_address)

    # then check all token networks
    network_events = query_blockchain_events(
        web3=web3,
        contract_addresses=token_network_addresses,  # type: ignore
        from_block=from_block,
        to_block=to_block,
    )

    for event_dict in network_events:
        event = parse_token_network_event(event_dict)
        if event:
            events.append(event)

    # get events from monitoring service contract, this only queries the chain
    # if the monitor contract address is set in chain_state
    monitoring_events = get_monitoring_blockchain_events(
        web3=web3,
        monitor_contract_address=chain_state.monitor_contract_address,
        from_block=from_block,
        to_block=to_block,
    )
    events.extend(monitoring_events)

    # commit new block number
    events.append(UpdatedHeadBlockEvent(head_block_number=to_block))

    return events
Beispiel #28
0
 def token_address(self, block_identifier: BlockIdentifier) -> TokenAddress:
     return TokenAddress(
         to_canonical_address(
             self.proxy.functions.token().call(block_identifier=block_identifier)
         )
     )
Beispiel #29
0
 def token_address(self,
                   block_identifier: BlockSpecification) -> TokenAddress:
     return TokenAddress(
         to_canonical_address(self.proxy.contract.functions.token().call(
             block_identifier=block_identifier)))
Beispiel #30
0
def run_smoketest(print_step: StepPrinter, setup: RaidenTestSetup) -> None:
    print_step("Starting Raiden")

    app = None
    try:
        app = run_raiden_service(**setup.args)
        raiden_api = app.raiden_api
        assert raiden_api is not None  # for mypy
        partner_address = Address(b"1" * 20)

        block = BlockNumber(app.get_block_number() +
                            DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS)
        # Proxies now use the confirmed block hash to query the chain for
        # prerequisite checks. Wait a bit here to make sure that the confirmed
        # block hash contains the deployed token network or else things break
        wait_for_block(raiden=app, block_number=block, retry_timeout=1.0)

        raiden_api.channel_open(
            registry_address=TokenNetworkRegistryAddress(
                setup.contract_addresses[CONTRACT_TOKEN_NETWORK_REGISTRY]),
            token_address=TokenAddress(
                to_canonical_address(setup.token.address)),
            partner_address=partner_address,
        )
        raiden_api.set_total_channel_deposit(
            registry_address=TokenNetworkRegistryAddress(
                setup.contract_addresses[CONTRACT_TOKEN_NETWORK_REGISTRY]),
            token_address=TokenAddress(
                to_canonical_address(setup.token.address)),
            partner_address=partner_address,
            total_deposit=TEST_DEPOSIT_AMOUNT,
        )
        token_addresses = [to_checksum_address(setup.token.address)
                           ]  # type: ignore

        print_step("Running smoketest")

        raiden_service = app
        token_network_added_events = raiden_service.default_registry.filter_token_added_events(
        )
        events_token_addresses = [
            event["args"]["token_address"]
            for event in token_network_added_events
        ]

        assert events_token_addresses == token_addresses

        token_networks = views.get_token_identifiers(
            views.state_from_raiden(raiden_service),
            raiden_service.default_registry.address)
        assert len(token_networks) == 1

        channel_state = views.get_channelstate_for(
            chain_state=views.state_from_raiden(raiden_service),
            token_network_registry_address=raiden_service.default_registry.
            address,
            token_address=token_networks[0],
            partner_address=partner_address,
        )
        assert channel_state

        distributable = channel.get_distributable(channel_state.our_state,
                                                  channel_state.partner_state)
        assert distributable == TEST_DEPOSIT_AMOUNT
        assert Balance(
            distributable) == channel_state.our_state.contract_balance
        assert channel.get_status(channel_state) == ChannelState.STATE_OPENED

        port_number = raiden_service.config.rest_api.port
        response = requests.get(
            f"http://localhost:{port_number}/api/v1/channels")

        assert response.status_code == HTTPStatus.OK

        response_json = json.loads(response.content)
        assert response_json[0]["partner_address"] == to_checksum_address(
            partner_address)
        assert response_json[0]["state"] == "opened"
        assert int(response_json[0]["balance"]) > 0
    finally:
        if app is not None:
            app.stop()
            app.greenlet.get()