Beispiel #1
0
    def _encode_abi(cls, abi, arguments, data=None):
        argument_types = get_abi_input_types(abi)

        if not check_if_arguments_can_be_encoded(abi, arguments, {}):
            raise TypeError(
                "One or more arguments could not be encoded to the necessary "
                "ABI type.  Expected types are: {0}".format(
                    ', '.join(argument_types),
                )
            )

        try:
            encoded_arguments = encode_abi(
                argument_types,
                force_obj_to_bytes(arguments),
            )
        except EncodingError as e:
            raise TypeError(
                "One or more arguments could not be encoded to the necessary "
                "ABI type: {0}".format(str(e))
            )

        if data:
            return add_0x_prefix(
                force_bytes(remove_0x_prefix(data)) +
                force_bytes(remove_0x_prefix(encode_hex(encoded_arguments)))
            )
        else:
            return encode_hex(encoded_arguments)
Beispiel #2
0
def to_hex(value):
    """
    Auto converts any supported value into it's hex representation.
    """
    if is_boolean(value):
        return "0x1" if value else "0x0"

    if is_dict(value):
        return encode_hex(json.dumps(value, sort_keys=True))

    if is_string(value):
        if is_prefixed(value, '-0x'):
            return from_decimal(value)
        elif is_0x_prefixed(value):
            return value
        else:
            return encode_hex(value)

    if is_integer(value):
        return from_decimal(value)

    raise TypeError(
        "Unsupported type: '{0}'.  Must be one of Boolean, Dictionary, String, "
        "or Integer.".format(repr(type(value)))
    )
Beispiel #3
0
def hex_encode_abi_type(abi_type, value, force_size=None):
    """
    Encodes value into a hex string in format of abi_type
    """
    validate_abi_type(abi_type)
    validate_abi_value(abi_type, value)

    data_size = force_size or size_of_type(abi_type)
    if is_array_type(abi_type):
        sub_type = sub_type_of_array_type(abi_type)
        return "".join([remove_0x_prefix(hex_encode_abi_type(sub_type, v, 256)) for v in value])
    elif is_bool_type(abi_type):
        return to_hex_with_size(value, data_size)
    elif is_uint_type(abi_type):
        return to_hex_with_size(value, data_size)
    elif is_int_type(abi_type):
        return to_hex_twos_compliment(value, data_size)
    elif is_address_type(abi_type):
        return pad_hex(value, data_size)
    elif is_bytes_type(abi_type):
        if is_bytes(value):
            return encode_hex(value)
        else:
            return value
    elif is_string_type(abi_type):
        return encode_hex(value)
    else:
        raise ValueError(
            "Unsupported ABI type: {0}".format(abi_type)
        )
Beispiel #4
0
def to_hex(value=None, hexstr=None, text=None):
    """
    Auto converts any supported value into it's hex representation.

    Trims leading zeros, as defined in:
    https://github.com/ethereum/wiki/wiki/JSON-RPC#hex-value-encoding
    """
    assert_one_val(value, hexstr=hexstr, text=text)

    if hexstr is not None:
        return add_0x_prefix(hexstr.lower())

    if text is not None:
        return encode_hex(text.encode('utf-8'))

    if is_boolean(value):
        return "0x1" if value else "0x0"

    if is_dict(value):
        return encode_hex(json.dumps(value, sort_keys=True))

    if isinstance(value, bytes):
        return encode_hex(value)
    elif is_string(value):
        return to_hex(text=value)

    if is_integer(value):
        return hex(value)

    raise TypeError(
        "Unsupported type: '{0}'.  Must be one of Boolean, Dictionary, String, "
        "or Integer.".format(repr(type(value)))
    )
Beispiel #5
0
 def to_dict(self):
     return {
         'type': self.__class__.__name__,
         'message_identifier': self.message_identifier,
         'secret': encode_hex(self.secret),
         'signature': encode_hex(self.signature),
     }
Beispiel #6
0
def construct_event_topic_set(event_abi, arguments=None):
    if arguments is None:
        arguments = {}
    if isinstance(arguments, (list, tuple)):
        if len(arguments) != len(event_abi['inputs']):
            raise ValueError(
                "When passing an argument list, the number of arguments must "
                "match the event constructor."
            )
        arguments = {
            arg['name']: [arg_value]
            for arg, arg_value
            in zip(event_abi['inputs'], arguments)
        }

    normalized_args = {
        key: value if is_list_like(value) else [value]
        for key, value in arguments.items()
    }

    event_topic = encode_hex(event_abi_to_log_topic(event_abi))
    indexed_args = get_indexed_event_inputs(event_abi)
    zipped_abi_and_args = [
        (arg, normalized_args.get(arg['name'], [None]))
        for arg in indexed_args
    ]
    encoded_args = [
        [
            None if option is None else encode_hex(encode_single(arg['type'], option))
            for option in arg_options]
        for arg, arg_options in zipped_abi_and_args
    ]

    topics = list(normalize_topic_list([event_topic] + encoded_args))
    return topics
Beispiel #7
0
 def to_dict(self):
     return {
         'type': self.__class__.__name__,
         'message_identifier': self.message_identifier,
         'payment_identifier': self.payment_identifier,
         'secrethash': encode_hex(self.secrethash),
         'amount': self.amount,
         'signature': encode_hex(self.signature),
     }
Beispiel #8
0
def generate_accounts(seeds):
    """Create private keys and addresses for all seeds.
    """
    return {
        seed: {
            'privatekey': encode_hex(sha3(seed)),
            'address': encode_hex(privatekey_to_address(sha3(seed))),
        }
        for seed in seeds
    }
Beispiel #9
0
def test_function_access(
    owner,
    get_accounts,
    uraiden_contract,
    uraiden_instance,
    token_instance,
    get_channel):
    (A, B, C, D) = get_accounts(4)
    uraiden_instance2 = uraiden_contract()
    channel = get_channel(uraiden_instance, token_instance, 100, A, B)[:3]
    (sender, receiver, open_block_number) = channel

    uraiden_instance.call().getKey(*channel)
    uraiden_instance.call().getChannelInfo(*channel)

    # even if TransactionFailed , this means the function is public / external
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().extractBalanceProofSignature(
            receiver,
            open_block_number,
            10,
            encode_hex(bytearray(65))
        )
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().tokenFallback(sender, 10, encode_hex(bytearray(20)))
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact({'from': C}).createChannel(D, 10)
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().topUp(receiver, open_block_number, 10)
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().uncooperativeClose(receiver, open_block_number, 10)
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().cooperativeClose(
            receiver,
            open_block_number,
            10,
            encode_hex(bytearray(65)),
            encode_hex(bytearray(65))
        )
    with pytest.raises(tester.TransactionFailed):
        uraiden_instance.transact().settle(receiver, open_block_number)

    # Test functions are private
    # raise ValueError("No matching functions found")
    with pytest.raises(ValueError):
        uraiden_instance.transact().createChannelPrivate(*channel)
    with pytest.raises(ValueError):
        uraiden_instance.transact().topUpPrivate(*channel, 10)
    with pytest.raises(ValueError):
        uraiden_instance.transact().initChallengePeriod(receiver, open_block_number, 10)
    with pytest.raises(ValueError):
        uraiden_instance.transact().settleChannel(*channel, 10)
Beispiel #10
0
    def close(
            self,
            partner: typing.Address,
            nonce: typing.Nonce,
            balance_hash: typing.BalanceHash,
            additional_hash: typing.AdditionalHash,
            signature: typing.Signature,
    ):
        """ Close the channel using the provided balance proof.

        Raises:
            ChannelBusyError: If the channel is busy with another operation.
            ChannelIncorrectStateError: If the channel is not in the open state.
        """

        log_details = {
            'token_network': pex(self.address),
            'node': pex(self.node_address),
            'partner': pex(partner),
            'nonce': nonce,
            'balance_hash': encode_hex(balance_hash),
            'additional_hash': encode_hex(additional_hash),
            'signature': encode_hex(signature),
        }
        log.info('close called', **log_details)

        if not self.channel_is_opened(self.node_address, partner):
            raise ChannelIncorrectStateError(
                'Channel is not in an opened state. It cannot be closed.',
            )

        with self.channel_operations_lock[partner]:
            transaction_hash = self.proxy.transact(
                'closeChannel',
                partner,
                balance_hash,
                nonce,
                additional_hash,
                signature,
            )
            self.client.poll(unhexlify(transaction_hash))

            receipt_or_none = check_transaction_threw(self.client, transaction_hash)
            if receipt_or_none:
                log.critical('close failed', **log_details)
                if not self.channel_is_opened(self.node_address, partner):
                    raise ChannelIncorrectStateError(
                        'Channel is not in an opened state. It cannot be closed.',
                    )
                raise TransactionThrew('Close', receipt_or_none)

            log.info('close successful', **log_details)
Beispiel #11
0
 def __repr__(self):
     return (
         '<'
         'LockedTransferUnsignedState id:{} token:{} balance_proof:{} '
         'lock:{} target:{}'
         '>'
     ).format(
         self.payment_identifier,
         encode_hex(self.token),
         self.balance_proof,
         self.lock,
         encode_hex(self.target),
     )
Beispiel #12
0
 def __repr__(self):
     return (
         '<'
         'LockedTransferSignedState msgid:{} id:{} token:{} lock:{}'
         ' target:{}'
         '>'
     ).format(
         self.message_identifier,
         self.payment_identifier,
         encode_hex(self.token),
         self.lock,
         encode_hex(self.target),
     )
Beispiel #13
0
    def _login_or_register(self):
        # password is signed server address
        password = encode_hex(self._sign(self._server_name.encode()))
        seed = int.from_bytes(self._sign(b'seed')[-32:], 'big')
        rand = Random()  # deterministic, random secret for username suffixes
        rand.seed(seed)
        # try login and register on first 5 possible accounts
        for i in range(5):
            base_username = to_normalized_address(self._raiden_service.address)
            username = base_username
            if i:
                username = f'{username}.{rand.randint(0, 0xffffffff):08x}'

            try:
                self._client.sync_token = None
                self._client.login(username, password)
                self.log.info(
                    'LOGIN',
                    homeserver=self._server_name,
                    server_url=self._server_url,
                    username=username,
                )
                break
            except MatrixRequestError as ex:
                if ex.code != 403:
                    raise
                self.log.debug(
                    'Could not login. Trying register',
                    homeserver=self._server_name,
                    server_url=self._server_url,
                    username=username,
                )
                try:
                    self._client.register_with_password(username, password)
                    self.log.info(
                        'REGISTER',
                        homeserver=self._server_name,
                        server_url=self._server_url,
                        username=username,
                    )
                    break
                except MatrixRequestError as ex:
                    if ex.code != 400:
                        raise
                    self.log.debug('Username taken. Continuing')
                    continue
        else:
            raise ValueError('Could not register or login!')
        # TODO: persist access_token, to avoid generating a new login every time
        name = encode_hex(self._sign(self._client.user_id.encode()))
        self._get_user(self._client.user_id).set_display_name(name)
Beispiel #14
0
    def _set_function_info(self):
        self.abi = find_matching_fn_abi(self.contract_abi,
                                        self.function_identifier,
                                        self.args,
                                        self.kwargs)

        if self.function_identifier is FallbackFn:
            self.selector = encode_hex(b'')
        elif is_text(self.function_identifier):
            self.selector = encode_hex(function_abi_to_4byte_selector(self.abi))
        else:
            raise TypeError("Unsupported function identifier")

        self.arguments = merge_args_and_kwargs(self.abi, self.args, self.kwargs)
Beispiel #15
0
    def all_events_filter(
            self,
            from_block: typing.BlockSpecification = None,
            to_block: typing.BlockSpecification = None,
    ) -> typing.Tuple[Filter, Filter]:

        channel_topics = [
            None,  # event topic is any
            encode_hex(encode_single('bytes32', self.channel_identifier)),  # channel_id
        ]

        # This will match the events:
        # ChannelOpened, ChannelNewDeposit, ChannelWithdraw, ChannelClosed,
        # NonClosingBalanceProofUpdated, ChannelSettled
        channel_filter = self.token_network.client.new_filter(
            contract_address=self.token_network.address,
            topics=channel_topics,
            from_block=from_block,
            to_block=to_block,
        )

        # This will match the events:
        # ChannelUnlocked
        #
        # These topics must not be joined with the channel_filter, otherwise
        # the filter ChannelSettled wont match (observed with geth
        # 1.8.11-stable-dea1ce05)

        event_unlock_abi = CONTRACT_MANAGER.get_event_abi(
            CONTRACT_TOKEN_NETWORK,
            EVENT_CHANNEL_UNLOCKED,
        )

        event_unlock_topic = encode_hex(event_abi_to_log_topic(event_unlock_abi))
        participant1_topic = encode_hex(self.participant1.rjust(32, b'\0'))
        participant2_topic = encode_hex(self.participant2.rjust(32, b'\0'))

        unlock_topics = [
            event_unlock_topic,
            [participant1_topic, participant2_topic],  # event participant1 is us or them
            [participant2_topic, participant1_topic],  # event participant2 is us or them
        ]

        unlock_filter = self.token_network.client.new_filter(
            contract_address=self.token_network.address,
            topics=unlock_topics,
            from_block=from_block,
            to_block=to_block,
        )
        return channel_filter, unlock_filter
def encode_payments(payments):
    args = []
    value_sum = 0
    for idx, v in payments:
        addr = ethereum.tester.accounts[idx]
        value_sum += v
        v = int(v)
        assert v < 2**96
        vv = zpad(int_to_big_endian(v), 12)
        mix = vv + addr
        assert len(mix) == 32
        print(encode_hex(mix), "v: ", v, "addr", encode_hex(addr))
        args.append(mix)
    return args, value_sum
def test_cooperative_wrong_balance_proof(
        channel_manager: ChannelManager,
        confirmed_open_channel: Channel,
        sender_address: str
):
    channel_id = (confirmed_open_channel.sender, confirmed_open_channel.block)
    channel_rec = channel_manager.channels[channel_id]

    sig1 = encode_hex(confirmed_open_channel.create_transfer(5))
    channel_manager.register_payment(sender_address, confirmed_open_channel.block, 5, sig1)

    sig2 = encode_hex(confirmed_open_channel.create_transfer(1))
    with pytest.raises(InvalidBalanceProof):
        channel_manager.sign_close(sender_address, confirmed_open_channel.block, sig2)
    assert channel_rec.is_closed is False
Beispiel #18
0
 def to_dict(self):
     return {
         'type': self.__class__.__name__,
         'chain_id': self.chain_id,
         'message_identifier': self.message_identifier,
         'payment_identifier': self.payment_identifier,
         'secret': encode_hex(self.secret),
         'nonce': self.nonce,
         'token_network_address': to_normalized_address(self.token_network_address),
         'channel': encode_hex(self.channel),
         'transferred_amount': self.transferred_amount,
         'locked_amount': self.locked_amount,
         'locksroot': encode_hex(self.locksroot),
         'signature': encode_hex(self.signature),
     }
Beispiel #19
0
def geth_node_config(miner_pkey, p2p_port, rpc_port):
    address = privatekey_to_address(miner_pkey)
    pub = remove_0x_prefix(encode_hex(privtopub(miner_pkey)))

    config = {
        'nodekey': miner_pkey,
        'nodekeyhex': remove_0x_prefix(encode_hex(miner_pkey)),
        'pub': pub,
        'address': address,
        'port': p2p_port,
        'rpcport': rpc_port,
        'enode': f'enode://{pub}@127.0.0.1:{p2p_port}',
    }

    return config
Beispiel #20
0
def wait_for_txs(client_or_web3, txhashes, timeout=360):
    if isinstance(client_or_web3, Web3):
        web3 = client_or_web3
    else:
        web3 = client_or_web3.web3
    start = time.monotonic()
    outstanding = False
    txhashes = txhashes[:]
    while txhashes and time.monotonic() - start < timeout:
        remaining_timeout = timeout - (time.monotonic() - start)
        if outstanding != len(txhashes) or int(remaining_timeout) % 10 == 0:
            outstanding = len(txhashes)
            log.debug(
                "Waiting for tx confirmations",
                outstanding=outstanding,
                timeout_remaining=int(remaining_timeout),
            )
        for txhash in txhashes[:]:
            tx = web3.eth.getTransaction(txhash)
            if tx and tx['blockNumber'] is not None:
                txhashes.remove(txhash)
            time.sleep(.1)
        time.sleep(1)
    if len(txhashes):
        txhashes_str = ', '.join(encode_hex(txhash) for txhash in txhashes)
        raise ScenarioTxError(
            f"Timeout waiting for txhashes: {txhashes_str}",
        )
Beispiel #21
0
def create_signed_transaction(
        private_key: str,
        web3: Web3,
        to: str,
        value: int=0,
        data=b'',
        nonce_offset: int = 0,
        gas_price: Union[int, None] = None,
        gas_limit: int = NETWORK_CFG.POT_GAS_LIMIT
) -> str:
    """
    Creates a signed on-chain transaction compliant with EIP155.
    """
    if gas_price is None:
        gas_price = NETWORK_CFG.GAS_PRICE
    tx = create_transaction(
        web3=web3,
        from_=privkey_to_addr(private_key),
        to=to,
        value=value,
        data=data,
        nonce_offset=nonce_offset,
        gas_price=gas_price,
        gas_limit=gas_limit
    )
    sign_transaction(tx, private_key, int(web3.version.network))
    return encode_hex(rlp.encode(tx))
def test_receiver_validation(channel_manager, client, wait_for_blocks):
    n = 100
    # open channel
    channel = client.open_channel(channel_manager.state.receiver, n)
    wait_for_blocks(channel_manager.blockchain.n_confirmations)
    gevent.sleep(channel_manager.blockchain.poll_interval)
    assert (channel.sender, channel.block) in channel_manager.channels

    # prepare balance proofs
    t_start = time.time()
    balance_proofs = [encode_hex(channel.create_transfer(1)) for _ in range(n)]
    t_diff = time.time() - t_start
    log.info("%d balance proofs prepared in %s (%f / s)",
             n, datetime.timedelta(seconds=t_diff), n / t_diff)

    # validate
    t_start = time.time()
    for i, balance_proof in enumerate(balance_proofs):
        log.debug('Transfer {}'.format(i))
        sender, received = channel_manager.register_payment(
            channel.sender,
            channel.block,
            i + 1,
            balance_proof)
        assert sender == channel.sender
        assert received == 1

    t_diff = time.time() - t_start
    log.info("%d balance proofs verified in %s (%f / s)",
             n, datetime.timedelta(seconds=t_diff), n / t_diff)
Beispiel #23
0
def create_signed_contract_transaction(
        private_key: str,
        contract: Contract,
        func_name: str,
        args: List[Any],
        value: int=0,
        nonce_offset: int = 0,
        gas_price: Union[int, None] = None,
        gas_limit: int = NETWORK_CFG.GAS_LIMIT
) -> str:
    """
    Creates a signed on-chain contract transaction compliant with EIP155.
    """
    if gas_price is None:
        gas_price = NETWORK_CFG.GAS_PRICE
    tx = create_contract_transaction(
        contract=contract,
        from_=privkey_to_addr(private_key),
        func_name=func_name,
        args=args,
        value=value,
        nonce_offset=nonce_offset,
        gas_price=gas_price,
        gas_limit=gas_limit
    )
    sign_transaction(tx, private_key, int(contract.web3.version.network))
    return encode_hex(rlp.encode(tx))
Beispiel #24
0
 def to_dict(self):
     return {
         'type': self.__class__.__name__,
         'amount': self.amount,
         'expiration': self.expiration,
         'secrethash': encode_hex(self.secrethash),
     }
def test_cooperative(
        channel_manager: ChannelManager,
        confirmed_open_channel: Channel,
        receiver_address: str,
        web3: Web3,
        token_contract: Contract,
        wait_for_blocks,
        sender_address: str
):
    blockchain = channel_manager.blockchain
    channel_id = (confirmed_open_channel.sender, confirmed_open_channel.block)

    sig1 = encode_hex(confirmed_open_channel.create_transfer(5))
    channel_manager.register_payment(sender_address, confirmed_open_channel.block, 5, sig1)

    receiver_sig = channel_manager.sign_close(sender_address, confirmed_open_channel.block, 5)
    channel_rec = channel_manager.channels[channel_id]
    assert channel_rec.is_closed is True
    block_before = web3.eth.blockNumber
    confirmed_open_channel.close_cooperatively(receiver_sig)
    wait_for_blocks(blockchain.n_confirmations)
    gevent.sleep(blockchain.poll_interval)
    logs = get_logs(token_contract, 'Transfer', from_block=block_before - 1)
    assert len([l for l in logs
                if is_same_address(l['args']['_to'], receiver_address) and
                l['args']['_value'] == 5]) == 1
    assert len([l for l in logs
                if is_same_address(l['args']['_to'], sender_address) and
                l['args']['_value'] == 5]) == 1
    wait_for_blocks(blockchain.n_confirmations)
    gevent.sleep(blockchain.poll_interval)
    assert channel_id not in channel_manager.channels
Beispiel #26
0
def test_static_price(
        empty_proxy: PaywalledProxy,
        api_endpoint_address: str,
        client: Client,
        wait_for_blocks
):
    proxy = empty_proxy
    endpoint_url = "http://" + api_endpoint_address

    proxy.add_paywalled_resource(StaticPriceResource, '/resource', 3)

    # test GET
    response = requests.get(endpoint_url + '/resource')
    assert response.status_code == 402
    headers = HTTPHeaders.deserialize(response.headers)
    assert int(headers.price) == 3

    channel = client.get_suitable_channel(headers.receiver_address, int(headers.price) * 4)
    wait_for_blocks(6)
    channel.update_balance(int(headers.price))

    headers = Munch()
    headers.balance = str(channel.balance)
    headers.balance_signature = encode_hex(channel.balance_sig)
    headers.sender_address = channel.sender
    headers.open_block = str(channel.block)
    headers = HTTPHeaders.serialize(headers)

    response = requests.get(endpoint_url + '/resource', headers=headers)
    assert response.status_code == 200
    assert response.text.strip() == 'GET'

    assert_method(requests.post, endpoint_url + '/resource', headers, channel, 'POST')
    assert_method(requests.put, endpoint_url + '/resource', headers, channel, 'PUT')
    assert_method(requests.delete, endpoint_url + '/resource', headers, channel, 'DEL')
Beispiel #27
0
    def send_transaction(
            self,
            to: Address,
            value: int = 0,
            data: bytes = b'',
            startgas: int = None,
            gasprice: int = None,
    ):
        """ Helper to send signed messages.

        This method will use the `privkey` provided in the constructor to
        locally sign the transaction. This requires an extended server
        implementation that accepts the variables v, r, and s.
        """
        if to == to_canonical_address(NULL_ADDRESS):
            warnings.warn('For contract creation the empty string must be used.')

        transaction = dict(
            nonce=self.nonce(),
            gasPrice=gasprice or self.gasprice(),
            gas=self.check_startgas(startgas),
            value=value,
            data=data,
        )

        # add the to address if not deploying a contract
        if to != b'':
            transaction['to'] = to_checksum_address(to)

        signed_txn = self.web3.eth.account.signTransaction(transaction, self.privkey)

        result = self.web3.eth.sendRawTransaction(signed_txn.rawTransaction)
        encoded_result = encode_hex(result)
        return remove_0x_prefix(encoded_result)
def test_balances(
        channel_manager: ChannelManager,
        confirmed_open_channel: Channel,
        wait_for_blocks,
        sender_address: str,
        use_tester: bool
):
    blockchain = channel_manager.blockchain
    initial_liquid_balance = channel_manager.get_liquid_balance()
    initial_locked_balance = channel_manager.get_locked_balance()
    if use_tester:
        assert initial_liquid_balance == 0
        assert initial_locked_balance == 0

    sig = encode_hex(confirmed_open_channel.create_transfer(5))
    channel_manager.register_payment(sender_address, confirmed_open_channel.block, 5, sig)

    assert channel_manager.get_liquid_balance() == initial_liquid_balance
    assert channel_manager.get_locked_balance() == 5

    receiver_sig = channel_manager.sign_close(sender_address, confirmed_open_channel.block, 5)
    confirmed_open_channel.close_cooperatively(receiver_sig)
    wait_for_blocks(blockchain.n_confirmations)
    gevent.sleep(blockchain.poll_interval)

    assert channel_manager.get_liquid_balance() == initial_liquid_balance + 5
    assert channel_manager.get_locked_balance() == initial_locked_balance
Beispiel #29
0
    def getStorageAt(self, address, position, at_block):
        if not is_integer(position) or position < 0:
            raise TypeError("Position of storage must be a whole number, but was: %r" % position)

        with state_at_block(self._chain, at_block) as state:
            stored_val = state.get_storage(address, position)
        return encode_hex(int_to_big_endian(stored_val))
Beispiel #30
0
def test_explicit_json(
        empty_proxy: PaywalledProxy,
        api_endpoint_address: str,
        client: Client,
        wait_for_blocks
):
    proxy = empty_proxy
    endpoint_url = "http://" + api_endpoint_address

    proxy.add_paywalled_resource(JSONResource, '/resource', 3)

    # test GET
    response = requests.get(endpoint_url + '/resource')
    assert response.status_code == 402
    headers = HTTPHeaders.deserialize(response.headers)
    assert int(headers.price) == 3

    channel = client.get_suitable_channel(headers.receiver_address, int(headers.price) * 4)
    wait_for_blocks(6)
    channel.update_balance(int(headers.price))

    headers = Munch()
    headers.balance = str(channel.balance)
    headers.balance_signature = encode_hex(channel.balance_sig)
    headers.sender_address = channel.sender
    headers.open_block = str(channel.block)
    headers = HTTPHeaders.serialize(headers)

    response = requests.get(endpoint_url + '/resource', headers=headers)
    assert response.status_code == 200
    # If headers don't merge properly, this results in 'application/json,application/json'.
    assert response.headers['Content-Type'] == 'application/json'
    assert response.json() == {'GET': 1}
async def test_lightchain_integration(request, event_loop, caplog,
                                      geth_ipc_path, enode, geth_process):
    """Test LightChainSyncer/LightPeerChain against a running geth instance.

    In order to run this manually, you can use `tox -e py36-lightchain_integration` or:

        pytest --integration --capture=no tests/trinity/integration/test_lightchain_integration.py

    The fixture for this test was generated with:

        geth --testnet --syncmode full

    It only needs the first 11 blocks for this test to succeed.
    """
    if not pytest.config.getoption("--integration"):
        pytest.skip("Not asked to run integration tests")

    # will almost certainly want verbose logging in a failure
    caplog.set_level(logging.DEBUG)

    # make sure geth has been launched
    wait_for_socket(geth_ipc_path)

    remote = Node.from_uri(enode)
    base_db = MemoryDB()
    chaindb = FakeAsyncChainDB(base_db)
    chaindb.persist_header(ROPSTEN_GENESIS_HEADER)
    headerdb = FakeAsyncHeaderDB(base_db)
    peer_pool = PeerPool(
        LESPeer,
        FakeAsyncHeaderDB(base_db),
        ROPSTEN_NETWORK_ID,
        ecies.generate_privkey(),
        ROPSTEN_VM_CONFIGURATION,
    )
    chain = FakeAsyncRopstenChain(base_db)
    syncer = LightChainSyncer(chain, chaindb, peer_pool)
    syncer.min_peers_to_sync = 1
    peer_chain = LightPeerChain(headerdb, peer_pool)

    asyncio.ensure_future(peer_pool.run())
    asyncio.ensure_future(connect_to_peers_loop(peer_pool, tuple([remote])))
    asyncio.ensure_future(peer_chain.run())
    asyncio.ensure_future(syncer.run())
    await asyncio.sleep(
        0)  # Yield control to give the LightChainSyncer a chance to start

    def finalizer():
        event_loop.run_until_complete(peer_pool.cancel())
        event_loop.run_until_complete(peer_chain.cancel())
        event_loop.run_until_complete(syncer.cancel())

    request.addfinalizer(finalizer)

    n = 11

    # Wait for the chain to sync a few headers.
    async def wait_for_header_sync(block_number):
        while headerdb.get_canonical_head().block_number < block_number:
            await asyncio.sleep(0.1)

    await asyncio.wait_for(wait_for_header_sync(n), 5)

    # https://ropsten.etherscan.io/block/11
    header = headerdb.get_canonical_block_header_by_number(n)
    body = await peer_chain.get_block_body_by_hash(header.hash)
    assert len(body['transactions']) == 15

    receipts = await peer_chain.get_receipts(header.hash)
    assert len(receipts) == 15
    assert encode_hex(keccak(rlp.encode(receipts[0]))) == (
        '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860')

    assert len(peer_pool) == 1
    head_info = peer_pool.highest_td_peer.head_info
    head = await peer_chain.get_block_header_by_hash(head_info.block_hash)
    assert head.block_number == head_info.block_number

    # In order to answer queries for contract code, geth needs the state trie entry for the block
    # we specify in the query, but because of fast sync we can only assume it has that for recent
    # blocks, so we use the current head to lookup the code for the contract below.
    # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287
    contract_addr = decode_hex('0x8B09D9ac6A4F7778fCb22852e879C7F3B2bEeF81')
    contract_code = await peer_chain.get_contract_code(head.hash,
                                                       contract_addr)
    assert encode_hex(contract_code) == '0x600060006000600060006000356000f1'

    account = await peer_chain.get_account(head.hash, contract_addr)
    assert account.code_hash == keccak(contract_code)
    assert account.balance == 0
Beispiel #32
0
    def run_test(self):
        priv_key = default_config["GENESIS_PRI_KEY"]
        sender = eth_utils.encode_hex(priv_to_addr(priv_key))

        self.rpc = RpcClient(self.nodes[0])

        # apply filter, we expect no logs
        filter = Filter(from_epoch="earliest", to_epoch="latest_mined")
        result = self.rpc.get_logs(filter)
        assert_equal(result, [])

        # deploy contract
        bytecode_file = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), CONTRACT_PATH)
        assert (os.path.isfile(bytecode_file))
        bytecode = open(bytecode_file).read()
        _, contractAddr = self.deploy_contract(sender, priv_key, bytecode)

        # apply filter, we expect a single log with 2 topics
        filter = Filter(from_epoch="earliest", to_epoch="latest_mined")
        logs0 = self.rpc.get_logs(filter)

        self.assert_response_format_correct(logs0)
        assert_equal(len(logs0), 1)

        assert_equal(len(logs0[0]["topics"]), 2)
        assert_equal(logs0[0]["topics"][0], CONSTRUCTED_TOPIC)
        assert_equal(logs0[0]["topics"][1], self.address_to_topic(sender))
        assert_equal(logs0[0]["data"], self.address_to_topic(sender))

        # call method
        receipt = self.call_contract(sender,
                                     priv_key,
                                     contractAddr,
                                     encode_hex_0x(keccak(b"foo()")),
                                     storage_limit=64)

        # apply filter, we expect two logs with 2 and 3 topics respectively
        filter = Filter(from_epoch="earliest", to_epoch="latest_mined")
        logs1 = self.rpc.get_logs(filter)

        self.assert_response_format_correct(logs1)
        assert_equal(len(logs1), 2)
        assert_equal(logs1[0], logs0[0])

        assert_equal(len(logs1[1]["topics"]), 3)
        assert_equal(logs1[1]["topics"][0], CALLED_TOPIC)
        assert_equal(logs1[1]["topics"][1], self.address_to_topic(sender))
        assert_equal(logs1[1]["topics"][2], self.number_to_topic(1))

        # apply filter for specific block, we expect a single log with 3 topics
        filter = Filter(block_hashes=[receipt["blockHash"]])
        logs = self.rpc.get_logs(filter)

        self.assert_response_format_correct(logs)
        assert_equal(len(logs), 1)
        assert_equal(logs[0], logs1[1])

        # call many times
        for ii in range(0, NUM_CALLS - 2):
            self.call_contract(sender,
                               priv_key,
                               contractAddr,
                               encode_hex_0x(keccak(b"foo()")),
                               storage_limit=0)

        # apply filter, we expect NUM_CALLS log entries with inreasing uint32 fields
        filter = Filter(from_epoch="earliest", to_epoch="latest_mined")
        logs = self.rpc.get_logs(filter)

        self.assert_response_format_correct(logs)
        assert_equal(len(logs), NUM_CALLS)

        for ii in range(2, NUM_CALLS):
            assert_equal(len(logs[ii]["topics"]), 3)
            assert_equal(logs[ii]["topics"][0], CALLED_TOPIC)
            assert (logs[ii]["topics"][1] == self.address_to_topic(sender))
            assert_equal(logs[ii]["topics"][2], self.number_to_topic(ii))

        # apply filter for specific topics
        filter = Filter(topics=[CONSTRUCTED_TOPIC])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), 1)

        filter = Filter(topics=[CALLED_TOPIC])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), NUM_CALLS - 1)

        filter = Filter(topics=[None, self.address_to_topic(sender)])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), NUM_CALLS)

        # find logs with `CALLED_TOPIC` as 1st topic and `3` or `4` as 3rd topic
        filter = Filter(topics=[
            CALLED_TOPIC, None,
            [self.number_to_topic(3),
             self.number_to_topic(4)]
        ])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), 2)

        # apply filter with limit
        filter = Filter(limit=("0x%x" % (NUM_CALLS // 2)))
        logs = self.rpc.get_logs(filter)

        self.assert_response_format_correct(logs)
        assert_equal(len(logs), NUM_CALLS // 2)

        # apply filter for specific contract address
        _, contractAddr2 = self.deploy_contract(sender, priv_key, bytecode)

        filter = Filter(address=[contractAddr])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), NUM_CALLS)

        filter = Filter(address=[contractAddr2])
        logs = self.rpc.get_logs(filter)
        self.assert_response_format_correct(logs)
        assert_equal(len(logs), 1)

        # apply filter to very first epoch, we expect no logs
        filter = Filter(from_epoch="0x0", to_epoch="0x0")
        result = self.rpc.get_logs(filter)
        assert_equal(result, [])

        self.log.info("Pass")
Beispiel #33
0
    def register_secret_batch(self, secrets: List[Secret]) -> List[TransactionHash]:
        """Register a batch of secrets. Check if they are already registered at
        the given block identifier."""
        secrets_to_register = list()
        secrethashes_to_register = list()
        secrethashes_not_sent = list()
        secrets_results = list()
        transaction_result = AsyncResult()
        wait_for = set()

        # secret registration has no preconditions:
        #
        # - The action does not depend on any state, it's always valid to call
        #   it.
        # - This action is always susceptible to race conditions.
        #
        # Therefore this proxy only needs to detect if the secret is already
        # registered, to avoid sending obviously unecessary transactions, and
        # it has to handle race conditions.

        with self._open_secret_transactions_lock:
            verification_block_hash = self.client.get_confirmed_blockhash()

            for secret in secrets:
                secrethash = sha256_secrethash(secret)
                secrethash_hex = encode_hex(secrethash)

                # Do the local test on `open_secret_transactions` first, then
                # if necessary do an RPC call.
                #
                # The call to `is_secret_registered` has two conflicting
                # requirements:
                #
                # - Avoid sending duplicated transactions for the same lock
                # - Operating on a consistent/confirmed view of the blockchain
                #   (if a secret has been registered in a block that is not
                #   confirmed it doesn't count yet, an optimization would be to
                #   *not* send the transaction and wait for the confirmation)
                #
                # The code below respects the consistent blockchain view,
                # meaning that if this proxy method is called with an old
                # blockhash an unecessary transaction will be sent, and the
                # error will be treated as a race-condition.
                other_result = self.open_secret_transactions.get(secret)

                if other_result is not None:
                    wait_for.add(other_result)
                    secrethashes_not_sent.append(secrethash_hex)
                    secrets_results.append(other_result)
                elif not self.is_secret_registered(secrethash, verification_block_hash):
                    secrets_to_register.append(secret)
                    secrethashes_to_register.append(secrethash_hex)
                    self.open_secret_transactions[secret] = transaction_result
                    secrets_results.append(transaction_result)

        # From here on the lock is not required. Context-switches will happen
        # for the gas estimation and the transaction, however the
        # synchronization data is limited to the open_secret_transactions
        if secrets_to_register:
            log_details = {"secrethashes_not_sent": secrethashes_not_sent}
            self._register_secret_batch(secrets_to_register, transaction_result, log_details)

        gevent.joinall(wait_for, raise_error=True)
        return [result.get() for result in secrets_results]
Beispiel #34
0
def data_encoder(data: bytes, length: int = 0) -> str:
    data = remove_0x_prefix(encode_hex(data))
    return add_0x_prefix(
        data.rjust(length * 2, b'0').decode(),
    )
Beispiel #35
0
 def generate_block_with_fake_txs(self, txs: list, adaptive=False, tx_data_len: int = 0) -> str:
     encoded_txs = eth_utils.encode_hex(rlp.encode(txs))
     block_hash = self.node.test_generateblockwithfaketxs(encoded_txs, adaptive, tx_data_len)
     assert_is_hash_string(block_hash)
     return block_hash
Beispiel #36
0
 def rand_hash(self, seed: bytes = None) -> str:
     if seed is None:
         seed = os.urandom(32)
     
     return eth_utils.encode_hex(sha3_256(seed))
Beispiel #37
0
    def generate_blocks(self):
        priv_key = default_config["GENESIS_PRI_KEY"]
        sender = encode_hex(priv_to_addr(priv_key))

        # deploy contract
        bytecode_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), CONTRACT_PATH)
        assert(os.path.isfile(bytecode_file))
        bytecode = open(bytecode_file).read()
        _, contract_addr = self.deploy_contract(sender, priv_key, bytecode)
        self.log.info("Contract deployed")

        parent_hash = self.rpc[FULLNODE0].block_by_epoch("latest_mined")['hash']
        nonce = self.rpc[FULLNODE0].get_nonce(sender)

        hashes = []
        num_events = 0
        num_blamed = 0

        for _ in range(0, NORMAL_CHAIN_LENGTH):
            rnd = random.random()

            # ~20% of all block have events
            if rnd < 0.2:
                tx = self.rpc[FULLNODE0].new_contract_tx(
                    receiver=contract_addr,
                    data_hex=encode_hex_0x(keccak(b"foo()")),
                    sender=sender, priv_key=priv_key,
                    storage_limit=64,
                    nonce = nonce
                )

                parent_hash = self.rpc[FULLNODE0].generate_custom_block(parent_hash=parent_hash, txs=[tx], referee=[])

                nonce += 1
                hashes.append(tx.hash_hex())
                num_events += 1

            # ~10% of all blocks are incorrect and blamed
            elif rnd < 0.3:
                blame_info = {}
                blame_info['blame'] = "0x1"
                blame_info['deferredStateRoot'] = "0x1111111111111111111111111111111111111111111111111111111111111111"
                parent_hash = self.nodes[FULLNODE0].test_generateblockwithblameinfo(1, 0, blame_info)

                num_blamed += 1

            # the rest are empty
            else:
                parent_hash = self.rpc[FULLNODE0].generate_block_with_parent(parent_hash=parent_hash)

            # TODO: generate blamed blocks with txs in them (overlap)

        # generate a pivot chain section where we might not be able to decide blaming
        # in this section, all headers will have blame=1
        # odd-numbered blocks are incorrect, even-numbered blocks are correct
        for _ in range(0, BLAMED_SECTION_LENGTH):
            blame_info = {}
            blame_info['blame'] = "0x1"
            parent_hash = self.nodes[FULLNODE0].test_generateblockwithblameinfo(1, 0, blame_info)

        num_blamed += BLAMED_SECTION_LENGTH // 2

        # mine some more blocks to keep blame check offset
        for _ in range(0, BLAMED_SECTION_LENGTH):
            parent_hash = self.rpc[FULLNODE0].generate_custom_block(parent_hash=parent_hash, txs=[], referee=[])

        # check if all txs have been executed successfully
        for hash in hashes:
            receipt = self.rpc[FULLNODE0].get_transaction_receipt(hash)
            assert_equal(receipt["outcomeStatus"], "0x0")

        length = NORMAL_CHAIN_LENGTH + 2 * BLAMED_SECTION_LENGTH
        self.log.info(f"Generated {length} blocks with {num_events} events and {num_blamed} incorrect blocks")
Beispiel #38
0
 def _serialize(self, value, attr, obj):
     return encode_hex(value)
Beispiel #39
0
def geth_create_blockchain(
    deploy_key,
    web3,
    private_keys,
    blockchain_private_keys,
    rpc_ports,
    p2p_ports,
    base_datadir,
    verbosity,
    random_marker,
    genesis_path=None,
    logdirectory=None,
):
    # pylint: disable=too-many-locals,too-many-statements,too-many-arguments,too-many-branches

    nodes_configuration = []
    key_p2p_rpc = zip(blockchain_private_keys, p2p_ports, rpc_ports)

    for pos, (key, p2p_port, rpc_port) in enumerate(key_p2p_rpc):
        config = dict()

        address = privatekey_to_address(key)
        # make the first node miner
        if pos == 0:
            config['unlock'] = 0

        config['nodekey'] = key
        config['nodekeyhex'] = remove_0x_prefix(encode_hex(key))
        config['pub'] = remove_0x_prefix(encode_hex(privtopub(key)))
        config['address'] = address
        config['port'] = p2p_port
        config['rpcport'] = rpc_port
        config['enode'] = 'enode://{pub}@127.0.0.1:{port}'.format(
            pub=config['pub'],
            port=config['port'],
        )
        nodes_configuration.append(config)

    for config in nodes_configuration:
        config['bootnodes'] = ','.join(node['enode']
                                       for node in nodes_configuration)

    all_keys = set(private_keys)
    all_keys.add(deploy_key)
    all_keys = sorted(all_keys)

    cmds = []
    for i, config in enumerate(nodes_configuration):
        # HACK: Use only the first 8 characters to avoid golang's issue
        # https://github.com/golang/go/issues/6895 (IPC bind fails with path
        # longer than 108 characters).
        # BSD (and therefore macOS) socket path length limit is 104 chars
        nodekey_part = config['nodekeyhex'][:8]
        nodedir = os.path.join(base_datadir, nodekey_part)
        node_genesis_path = os.path.join(nodedir, 'custom_genesis.json')

        assert len(nodedir + '/geth.ipc') <= 104, 'geth data path is too large'

        os.makedirs(nodedir)

        if genesis_path is None:
            geth_bare_genesis(node_genesis_path, all_keys, random_marker)
        else:
            shutil.copy(genesis_path, node_genesis_path)

        geth_init_datadir(nodedir, node_genesis_path)

        if 'unlock' in config:
            geth_create_account(nodedir, all_keys[i])

        commandline = geth_to_cmd(config, nodedir, verbosity)
        cmds.append(commandline)

    # save current term settings before running geth
    if isinstance(
            sys.stdin,
            io.IOBase):  # check that the test is running on non-capture mode
        term_settings = termios.tcgetattr(sys.stdin)

    stdout = None
    stderr = None
    processes_list = []
    for pos, cmd in enumerate(cmds):
        if logdirectory:
            log_path = os.path.join(logdirectory, str(pos))
            logfile = open(log_path, 'w')

            stdout = logfile
            stderr = logfile

        if '--unlock' in cmd:
            cmd.append('--mine')
            process = subprocess.Popen(
                cmd,
                universal_newlines=True,
                stdin=subprocess.PIPE,
                stdout=stdout,
                stderr=stderr,
            )

            # --password wont work, write password to unlock
            process.stdin.write(DEFAULT_PASSPHRASE + os.linesep)  # Passphrase:
            process.stdin.write(DEFAULT_PASSPHRASE +
                                os.linesep)  # Repeat passphrase:
        else:
            process = subprocess.Popen(
                cmd,
                universal_newlines=True,
                stdout=stdout,
                stderr=stderr,
            )

        processes_list.append(process)

    try:
        geth_wait_and_check(web3, private_keys, random_marker)

        for process in processes_list:
            process.poll()

            if process.returncode is not None:
                raise ValueError('geth failed to start')

    except (ValueError, RuntimeError) as e:
        # If geth_wait_and_check or the above loop throw an exception make sure
        # we don't end up with a rogue geth process running in the background
        for process in processes_list:
            process.terminate()
        raise e

    finally:
        # reenter echo mode (disabled by geth pasphrase prompt)
        if isinstance(sys.stdin, io.IOBase):
            termios.tcsetattr(sys.stdin, termios.TCSADRAIN, term_settings)

    return processes_list
Beispiel #40
0
def sign_as_hex(text2sign, priv_key):
    """see pyethereum ecsign"""
    pk = coincurve.PrivateKey(normalize_key(priv_key))

    signature = pk.sign_recoverable(eth_text2sign(text2sign), hasher=None)
    return encode_hex(signature)
Beispiel #41
0
def pubtoaddr(public_key):
    raw_pub_key = decode_hex(public_key)
    return encode_hex(sha3(raw_pub_key)[12:]).lower()
Beispiel #42
0
def test_payment_channel_proxy_basics(
    token_network_proxy,
    private_keys,
    token_proxy,
    chain_id,
    web3,
    contract_manager,
    skip_if_parity,
):
    token_network_address = to_canonical_address(
        token_network_proxy.proxy.contract.address)

    c1_client = JSONRPCClient(web3, private_keys[1])
    c1_chain = BlockChainService(
        jsonrpc_client=c1_client,
        contract_manager=contract_manager,
    )
    c2_client = JSONRPCClient(web3, private_keys[2])
    c1_token_network_proxy = TokenNetwork(
        jsonrpc_client=c1_client,
        token_network_address=token_network_address,
        contract_manager=contract_manager,
    )
    c2_token_network_proxy = TokenNetwork(
        jsonrpc_client=c2_client,
        token_network_address=token_network_address,
        contract_manager=contract_manager,
    )

    start_block = web3.eth.blockNumber

    # create a channel
    channel_identifier = c1_token_network_proxy.new_netting_channel(
        partner=c2_client.address,
        settle_timeout=TEST_SETTLE_TIMEOUT_MIN,
        given_block_identifier='latest',
    )
    assert channel_identifier is not None

    # create channel proxies
    channel_proxy_1 = PaymentChannel(
        token_network=c1_token_network_proxy,
        channel_identifier=channel_identifier,
        contract_manager=contract_manager,
    )
    channel_proxy_2 = PaymentChannel(
        token_network=c2_token_network_proxy,
        channel_identifier=channel_identifier,
        contract_manager=contract_manager,
    )

    channel_filter = channel_proxy_1.all_events_filter(
        from_block=start_block,
        to_block='latest',
    )

    assert channel_proxy_1.channel_identifier == channel_identifier
    assert channel_proxy_2.channel_identifier == channel_identifier

    assert channel_proxy_1.opened('latest') is True
    assert channel_proxy_2.opened('latest') is True

    # check the settlement timeouts
    assert channel_proxy_1.settle_timeout() == channel_proxy_2.settle_timeout()
    assert channel_proxy_1.settle_timeout() == TEST_SETTLE_TIMEOUT_MIN

    events = channel_filter.get_all_entries()
    assert len(events) == 1  # ChannelOpened

    # test deposits
    initial_token_balance = 100
    token_proxy.transfer(c1_client.address, initial_token_balance, 'latest')
    initial_balance_c1 = token_proxy.balance_of(c1_client.address)
    assert initial_balance_c1 == initial_token_balance
    initial_balance_c2 = token_proxy.balance_of(c2_client.address)
    assert initial_balance_c2 == 0

    # actual deposit
    channel_proxy_1.set_total_deposit(total_deposit=10,
                                      block_identifier='latest')

    events = channel_filter.get_all_entries()
    assert len(events) == 2  # ChannelOpened, ChannelNewDeposit

    # balance proof by c2
    transferred_amount = 3
    balance_proof = BalanceProof(
        channel_identifier=channel_identifier,
        token_network_address=to_checksum_address(token_network_address),
        nonce=1,
        chain_id=chain_id,
        transferred_amount=transferred_amount,
    )
    balance_proof.signature = encode_hex(
        LocalSigner(private_keys[1]).sign(
            data=balance_proof.serialize_bin(), ), )
    # correct close
    c2_token_network_proxy.close(
        channel_identifier=channel_identifier,
        partner=c1_client.address,
        balance_hash=decode_hex(balance_proof.balance_hash),
        nonce=balance_proof.nonce,
        additional_hash=decode_hex(balance_proof.additional_hash),
        signature=decode_hex(balance_proof.signature),
        given_block_identifier='latest',
    )
    assert channel_proxy_1.closed('latest') is True
    assert channel_proxy_2.closed('latest') is True

    events = channel_filter.get_all_entries()
    assert len(events) == 3  # ChannelOpened, ChannelNewDeposit, ChannelClosed

    # check the settlement timeouts again
    assert channel_proxy_1.settle_timeout() == channel_proxy_2.settle_timeout()
    assert channel_proxy_1.settle_timeout() == TEST_SETTLE_TIMEOUT_MIN

    # update transfer
    c1_chain.wait_until_block(target_block_number=c1_client.block_number() +
                              TEST_SETTLE_TIMEOUT_MIN, )

    c2_token_network_proxy.settle(
        channel_identifier=channel_identifier,
        transferred_amount=0,
        locked_amount=0,
        locksroot=EMPTY_HASH,
        partner=c1_client.address,
        partner_transferred_amount=transferred_amount,
        partner_locked_amount=0,
        partner_locksroot=EMPTY_HASH,
        given_block_identifier='latest',
    )
    assert channel_proxy_1.settled('latest') is True
    assert channel_proxy_2.settled('latest') is True

    events = channel_filter.get_all_entries()

    assert len(
        events
    ) == 4  # ChannelOpened, ChannelNewDeposit, ChannelClosed, ChannelSettled
Beispiel #43
0
 def get_transaction_receipt(self, tx_hash: bytes):
     return self.web3.eth.getTransactionReceipt(encode_hex(tx_hash))
Beispiel #44
0
 def session_id(self) -> str:
     """Session ID as used for OneToN.settled_sessions"""
     return encode_hex(
         keccak(self.receiver + self.sender + encode_single("uint256", self.claimable_until))
     )
Beispiel #45
0
def random_private_key(bound):
    """Randomly gnerate a private key smaller than a certain bound."""
    n = random.randint(1, bound)  # nosec
    private_key = encode_hex(pad_left(int_to_big_endian(n), 32, '\0'))
    return private_key
Beispiel #46
0
 def _block_to_dict(self, block):
     logs_bloom = encode_hex(int_to_big_endian(block.header.bloom))[2:]
     logs_bloom = '0x' + logs_bloom.rjust(512, '0')
     return {
         "difficulty": hex(block.header.difficulty),
         "extraData": encode_hex(block.header.extra_data),
         "gasLimit": hex(block.header.gas_limit),
         "gasUsed": hex(block.header.gas_used),
         "hash": encode_hex(block.header.hash),
         "logsBloom": logs_bloom,
         "mixHash": encode_hex(block.header.mix_hash),
         "nonce": encode_hex(block.header.nonce),
         "number": hex(block.header.block_number),
         "parentHash": encode_hex(block.header.parent_hash),
         "receiptsRoot": encode_hex(block.header.receipt_root),
         "sha3Uncles": encode_hex(block.header.uncles_hash),
         "stateRoot": encode_hex(block.header.state_root),
         "timestamp": hex(block.header.timestamp),
         "totalDifficulty": hex(self.chain.chaindb.get_score(block.hash)),
         "transactions": [encode_hex(tx.hash) for tx in block.transactions],
         "transactionsRoot": encode_hex(block.header.transaction_root),
         "uncles": [encode_hex(uncle.hash) for uncle in block.uncles],
         "size": hex(len(rlp.encode(block))),
         "miner": encode_hex(block.header.coinbase),
     }
Beispiel #47
0
 def __str__(self):
     return "HeadInfo{{block:{}, hash:{}, td:{}, reorg_depth:{}}}".format(
         self.block_number, encode_hex(self.block_hash), self.total_difficulty,
         self.reorg_depth)
Beispiel #48
0
 def format_integer(value, sedes):
     return encode_hex(sedes.serialize(value))
Beispiel #49
0
def hexbytes_to_str(map_: Dict):
    """ Converts values that are of type `HexBytes` to strings. """
    for k, v in map_.items():
        if isinstance(v, HexBytes):
            map_[k] = encode_hex(v)
Beispiel #50
0
 def add(self, node_id: NodeID) -> None:
     if node_id not in self:
         self.logger.debug("Adding entry %s", encode_hex(node_id))
         self.entries.appendleft(node_id)
     else:
         raise ValueError(f"Entry {encode_hex(node_id)} already present in the routing table")
Beispiel #51
0
def run_app(
        address,
        keystore_path,
        gas_price,
        eth_rpc_endpoint,
        tokennetwork_registry_contract_address,
        secret_registry_contract_address,
        endpoint_registry_contract_address,
        listen_address,
        mapped_socket,
        max_unresponsive_time,
        api_address,
        rpc,
        sync_check,
        console,
        password_file,
        web_ui,
        datadir,
        transport,
        matrix_server,
        network_id,
        environment_type,
        unrecoverable_error_should_crash,
        config=None,
        extra_config=None,
        **kwargs,
):
    # pylint: disable=too-many-locals,too-many-branches,too-many-statements,unused-argument

    from raiden.app import App

    _assert_sql_version()

    if transport == 'udp' and not mapped_socket:
        raise RuntimeError('Missing socket')

    if datadir is None:
        datadir = os.path.join(os.path.expanduser('~'), '.raiden')

    address_hex = to_normalized_address(address) if address else None
    address_hex, privatekey_bin = prompt_account(address_hex, keystore_path, password_file)
    address = to_canonical_address(address_hex)

    (listen_host, listen_port) = split_endpoint(listen_address)
    (api_host, api_port) = split_endpoint(api_address)

    config['transport']['udp']['host'] = listen_host
    config['transport']['udp']['port'] = listen_port
    config['console'] = console
    config['rpc'] = rpc
    config['web_ui'] = rpc and web_ui
    config['api_host'] = api_host
    config['api_port'] = api_port
    if mapped_socket:
        config['socket'] = mapped_socket.socket
        config['transport']['udp']['external_ip'] = mapped_socket.external_ip
        config['transport']['udp']['external_port'] = mapped_socket.external_port
    config['transport_type'] = transport
    config['transport']['matrix']['server'] = matrix_server
    config['transport']['udp']['nat_keepalive_retries'] = DEFAULT_NAT_KEEPALIVE_RETRIES
    timeout = max_unresponsive_time / DEFAULT_NAT_KEEPALIVE_RETRIES
    config['transport']['udp']['nat_keepalive_timeout'] = timeout
    config['privatekey_hex'] = encode_hex(privatekey_bin)
    config['unrecoverable_error_should_crash'] = unrecoverable_error_should_crash

    parsed_eth_rpc_endpoint = urlparse(eth_rpc_endpoint)
    if not parsed_eth_rpc_endpoint.scheme:
        eth_rpc_endpoint = f'http://{eth_rpc_endpoint}'

    web3 = _setup_web3(eth_rpc_endpoint)

    rpc_client = JSONRPCClient(
        web3,
        privatekey_bin,
        gas_price_strategy=gas_price,
        block_num_confirmations=DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS,
        uses_infura='infura.io' in eth_rpc_endpoint,
    )

    blockchain_service = BlockChainService(
        privatekey_bin=privatekey_bin,
        jsonrpc_client=rpc_client,
        # Not giving the contract manager here, but injecting it later
        # since we first need blockchain service to calculate the network id
    )

    if network_id == 1:
        # temporarily until red eyes disallow raiden on the mainnet
        click.secho(
            'Running Raiden on the mainnet is currently not supported.',
            fg='red',
        )
        sys.exit(1)

    given_network_id = network_id
    node_network_id = blockchain_service.network_id
    known_given_network_id = given_network_id in ID_TO_NETWORKNAME
    known_node_network_id = node_network_id in ID_TO_NETWORKNAME

    if node_network_id != given_network_id:
        if known_given_network_id and known_node_network_id:
            click.secho(
                f"The chosen ethereum network '{ID_TO_NETWORKNAME[given_network_id]}' "
                f"differs from the ethereum client '{ID_TO_NETWORKNAME[node_network_id]}'. "
                "Please update your settings.",
                fg='red',
            )
        else:
            click.secho(
                f"The chosen ethereum network id '{given_network_id}' differs "
                f"from the ethereum client '{node_network_id}'. "
                "Please update your settings.",
                fg='red',
            )
        sys.exit(1)

    config['chain_id'] = given_network_id

    # interpret the provided string argument
    if environment_type == Environment.PRODUCTION:
        # Safe configuration: restrictions for mainnet apply and matrix rooms have to be private
        config['environment_type'] = Environment.PRODUCTION
        config['transport']['matrix']['private_rooms'] = True
    else:
        config['environment_type'] = Environment.DEVELOPMENT

    environment_type = config['environment_type']
    print(f'Raiden is running in {environment_type.value.lower()} mode')

    chain_config = {}
    contract_addresses_known = False
    contracts = dict()
    config['contracts_path'] = contracts_precompiled_path()

    if node_network_id in ID_TO_NETWORKNAME and ID_TO_NETWORKNAME[node_network_id] != 'smoketest':
        contracts_version = 'pre_limits' if environment_type == Environment.DEVELOPMENT else None
        deployment_data = get_contracts_deployed(node_network_id, contracts_version)
        config['contracts_path'] = contracts_precompiled_path(contracts_version)
        not_allowed = (  # for now we only disallow mainnet with test configuration
            network_id == 1 and
            environment_type == Environment.DEVELOPMENT
        )
        if not_allowed:
            click.secho(
                f'The chosen network ({ID_TO_NETWORKNAME[node_network_id]}) is not a testnet, '
                'but the "development" environment was selected.\n'
                'This is not allowed. Please start again with a safe environment setting '
                '(--environment production).',
                fg='red',
            )
            sys.exit(1)

        contracts = deployment_data['contracts']
        contract_addresses_known = True

    blockchain_service.inject_contract_manager(ContractManager(config['contracts_path']))

    if sync_check:
        check_synced(blockchain_service, known_node_network_id)

    contract_addresses_given = (
        tokennetwork_registry_contract_address is not None and
        secret_registry_contract_address is not None and
        endpoint_registry_contract_address is not None
    )

    if not contract_addresses_given and not contract_addresses_known:
        click.secho(
            f"There are no known contract addresses for network id '{given_network_id}'. "
            "Please provide them on the command line or in the configuration file.",
            fg='red',
        )
        sys.exit(1)

    try:
        token_network_registry = blockchain_service.token_network_registry(
            tokennetwork_registry_contract_address or to_canonical_address(
                contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['address'],
            ),
        )
    except ContractVersionMismatch as e:
        handle_contract_version_mismatch(e)
    except AddressWithoutCode:
        handle_contract_no_code('token network registry', tokennetwork_registry_contract_address)
    except AddressWrongContract:
        handle_contract_wrong_address(
            'token network registry',
            tokennetwork_registry_contract_address,
        )

    try:
        secret_registry = blockchain_service.secret_registry(
            secret_registry_contract_address or to_canonical_address(
                contracts[CONTRACT_SECRET_REGISTRY]['address'],
            ),
        )
    except ContractVersionMismatch as e:
        handle_contract_version_mismatch(e)
    except AddressWithoutCode:
        handle_contract_no_code('secret registry', secret_registry_contract_address)
    except AddressWrongContract:
        handle_contract_wrong_address('secret registry', secret_registry_contract_address)

    database_path = os.path.join(
        datadir,
        f'node_{pex(address)}',
        f'netid_{given_network_id}',
        f'network_{pex(token_network_registry.address)}',
        f'v{RAIDEN_DB_VERSION}_log.db',
    )
    config['database_path'] = database_path

    print(
        '\nYou are connected to the \'{}\' network and the DB path is: {}'.format(
            ID_TO_NETWORKNAME.get(given_network_id, given_network_id),
            database_path,
        ),
    )

    discovery = None
    if transport == 'udp':
        transport, discovery = _setup_udp(
            config,
            blockchain_service,
            address,
            contracts,
            endpoint_registry_contract_address,
        )
    elif transport == 'matrix':
        transport = _setup_matrix(config)
    else:
        raise RuntimeError(f'Unknown transport type "{transport}" given')

    raiden_event_handler = RaidenEventHandler()
    message_handler = MessageHandler()

    try:
        if 'contracts' in chain_config:
            start_block = chain_config['contracts']['TokenNetworkRegistry']['block_number']
        else:
            start_block = 0

        raiden_app = App(
            config=config,
            chain=blockchain_service,
            query_start_block=start_block,
            default_registry=token_network_registry,
            default_secret_registry=secret_registry,
            transport=transport,
            raiden_event_handler=raiden_event_handler,
            message_handler=message_handler,
            discovery=discovery,
        )
    except RaidenError as e:
        click.secho(f'FATAL: {e}', fg='red')
        sys.exit(1)

    try:
        raiden_app.start()
    except RuntimeError as e:
        click.secho(f'FATAL: {e}', fg='red')
        sys.exit(1)
    except filelock.Timeout:
        name_or_id = ID_TO_NETWORKNAME.get(given_network_id, given_network_id)
        click.secho(
            f'FATAL: Another Raiden instance already running for account {address_hex} on '
            f'network id {name_or_id}',
            fg='red',
        )
        sys.exit(1)

    return raiden_app
Beispiel #52
0
def encode_byte_values(map_: Dict):
    """ Converts values that are of type `bytes` to strings. """
    for k, v in map_.items():
        if isinstance(v, bytes):
            map_[k] = encode_hex(v)
Beispiel #53
0
    def close(
        self,
        channel_identifier: typing.ChannelID,
        partner: typing.Address,
        balance_hash: typing.BalanceHash,
        nonce: typing.Nonce,
        additional_hash: typing.AdditionalHash,
        signature: typing.Signature,
    ):
        """ Close the channel using the provided balance proof.

        Raises:
            ChannelBusyError: If the channel is busy with another operation.
            ChannelIncorrectStateError: If the channel is not in the open state.
        """

        log_details = {
            'token_network': pex(self.address),
            'node': pex(self.node_address),
            'partner': pex(partner),
            'nonce': nonce,
            'balance_hash': encode_hex(balance_hash),
            'additional_hash': encode_hex(additional_hash),
            'signature': encode_hex(signature),
        }
        log.info('close called', **log_details)

        self._check_for_outdated_channel(
            self.node_address,
            partner,
            channel_identifier,
        )

        if not self.channel_is_opened(self.node_address, partner,
                                      channel_identifier):
            raise ChannelIncorrectStateError(
                'Channel is not in an opened state. It cannot be closed.', )

        with self.channel_operations_lock[partner]:
            transaction_hash = self.proxy.transact(
                'closeChannel',
                channel_identifier,
                partner,
                balance_hash,
                nonce,
                additional_hash,
                signature,
            )
            self.client.poll(transaction_hash)

            receipt_or_none = check_transaction_threw(self.client,
                                                      transaction_hash)
            if receipt_or_none:
                log.critical('close failed', **log_details)
                if not self.channel_is_opened(self.node_address, partner,
                                              channel_identifier):
                    raise ChannelIncorrectStateError(
                        'Channel is not in an opened state. It cannot be closed.',
                    )
                raise TransactionThrew('Close', receipt_or_none)

            log.info('close successful', **log_details)
Beispiel #54
0
 def rand_account(self) -> (str, bytes):
     priv_key = eth_utils.encode_hex(os.urandom(32))
     addr = eth_utils.encode_hex(priv_to_addr(priv_key))
     return (addr, priv_key)
Beispiel #55
0
    def settle(
        self,
        channel_identifier: typing.ChannelID,
        transferred_amount: int,
        locked_amount: int,
        locksroot: typing.Locksroot,
        partner: typing.Address,
        partner_transferred_amount: int,
        partner_locked_amount: int,
        partner_locksroot: typing.Locksroot,
    ):
        """ Settle the channel.

        Raises:
            ChannelBusyError: If the channel is busy with another operation
        """
        log_details = {
            'token_network': pex(self.address),
            'node': pex(self.node_address),
            'partner': pex(partner),
            'transferred_amount': transferred_amount,
            'locked_amount': locked_amount,
            'locksroot': encode_hex(locksroot),
            'partner_transferred_amount': partner_transferred_amount,
            'partner_locked_amount': partner_locked_amount,
            'partner_locksroot': encode_hex(partner_locksroot),
        }
        log.info('settle called', **log_details)

        self._check_for_outdated_channel(
            self.node_address,
            partner,
            channel_identifier,
        )

        with self.channel_operations_lock[partner]:
            if self._verify_settle_state(
                    transferred_amount,
                    locked_amount,
                    locksroot,
                    partner,
                    partner_transferred_amount,
                    partner_locked_amount,
                    partner_locksroot,
            ) is False:
                raise ChannelIncorrectStateError(
                    'local state can not be used to call settle')
            our_maximum = transferred_amount + locked_amount
            partner_maximum = partner_transferred_amount + partner_locked_amount

            # The second participant transferred + locked amount must be higher
            our_bp_is_larger = our_maximum > partner_maximum

            if our_bp_is_larger:
                transaction_hash = self.proxy.transact(
                    'settleChannel',
                    channel_identifier,
                    partner,
                    partner_transferred_amount,
                    partner_locked_amount,
                    partner_locksroot,
                    self.node_address,
                    transferred_amount,
                    locked_amount,
                    locksroot,
                )
            else:
                transaction_hash = self.proxy.transact(
                    'settleChannel',
                    channel_identifier,
                    self.node_address,
                    transferred_amount,
                    locked_amount,
                    locksroot,
                    partner,
                    partner_transferred_amount,
                    partner_locked_amount,
                    partner_locksroot,
                )

            self.client.poll(transaction_hash)
            receipt_or_none = check_transaction_threw(self.client,
                                                      transaction_hash)
            if receipt_or_none:
                channel_exists = self.channel_exists_and_not_settled(
                    self.node_address,
                    partner,
                    channel_identifier,
                )

                if not channel_exists:
                    log.info('settle failed, channel already settled',
                             **log_details)
                    raise ChannelIncorrectStateError(
                        'Channel already settled or non-existent', )

                channel_closed = self.channel_is_closed(
                    participant1=self.node_address,
                    participant2=partner,
                    channel_identifier=channel_identifier,
                )
                if channel_closed is False:
                    log.info('settle failed, channel is not closed',
                             **log_details)
                    raise ChannelIncorrectStateError(
                        'Channel is not in a closed state. It cannot be settled',
                    )

                log.info('settle failed', **log_details)
                raise TransactionThrew('Settle', receipt_or_none)

            log.info('settle successful', **log_details)
Beispiel #56
0
def pex(data: bytes) -> str:
    return remove_0x_prefix(encode_hex(data))[:8]
Beispiel #57
0
    def _request_resource(
            self,
            method: str,
            url: str,
            **kwargs
    ) -> Tuple[Union[None, Response], bool]:
        """
        Performs a simple GET request to the HTTP server with headers representing the given
        channel state.
        """
        headers = Munch()
        headers.contract_address = self.client.context.channel_manager.address
        if self.channel is not None:
            headers.balance = str(self.channel.balance)
            headers.balance_signature = encode_hex(self.channel.balance_sig)
            headers.sender_address = self.channel.sender
            headers.receiver_address = self.channel.receiver
            headers.open_block = str(self.channel.block)

        headers = HTTPHeaders.serialize(headers)
        if 'headers' in kwargs:
            headers.update(kwargs['headers'])
            kwargs['headers'] = headers
        else:
            kwargs['headers'] = headers
        response = requests.Session.request(self, method, url, **kwargs)

        if self.on_http_response(method, url, response, **kwargs) is False:
            return response, False  # user requested abort

        if response.status_code == requests.codes.OK:
            return response, self.on_success(method, url, response, **kwargs)

        elif response.status_code == requests.codes.PAYMENT_REQUIRED:
            if HTTPHeaders.NONEXISTING_CHANNEL in response.headers:
                return response, self.on_nonexisting_channel(method, url, response, **kwargs)

            elif HTTPHeaders.INSUF_CONFS in response.headers:
                return response, self.on_insufficient_confirmations(
                    method,
                    url,
                    response,
                    **kwargs
                )

            elif HTTPHeaders.INSUF_FUNDS in response.headers:
                return response, self.on_insufficient_funds(method, url, response, **kwargs)

            elif HTTPHeaders.CONTRACT_ADDRESS not in response.headers or not is_same_address(
                response.headers.get(HTTPHeaders.CONTRACT_ADDRESS),
                self.client.context.channel_manager.address
            ):
                return response, self.on_invalid_contract_address(method, url, response, **kwargs)

            elif HTTPHeaders.INVALID_AMOUNT in response.headers:
                return response, self.on_invalid_amount(method, url, response, **kwargs)

            else:
                return response, self.on_payment_requested(method, url, response, **kwargs)
        else:
            return response, self.on_http_error(method, url, response, **kwargs)
Beispiel #58
0
def test_payment_channel_outdated_channel_close(
    token_network_proxy,
    private_keys,
    chain_id,
    web3,
    contract_manager,
    skip_if_parity,
):
    token_network_address = to_canonical_address(
        token_network_proxy.proxy.contract.address)

    partner = privatekey_to_address(private_keys[0])

    client = JSONRPCClient(web3, private_keys[1])
    chain = BlockChainService(
        jsonrpc_client=client,
        contract_manager=contract_manager,
    )
    token_network_proxy = TokenNetwork(
        jsonrpc_client=client,
        token_network_address=token_network_address,
        contract_manager=contract_manager,
    )
    start_block = web3.eth.blockNumber

    # create a channel
    channel_identifier = token_network_proxy.new_netting_channel(
        partner=partner,
        settle_timeout=TEST_SETTLE_TIMEOUT_MIN,
        given_block_identifier='latest',
    )
    assert channel_identifier is not None

    # create channel proxies
    channel_proxy_1 = PaymentChannel(
        token_network=token_network_proxy,
        channel_identifier=channel_identifier,
        contract_manager=contract_manager,
    )

    channel_filter = channel_proxy_1.all_events_filter(
        from_block=start_block,
        to_block='latest',
    )

    assert channel_proxy_1.channel_identifier == channel_identifier

    assert channel_proxy_1.opened('latest') is True

    # balance proof by c1
    balance_proof = BalanceProof(
        channel_identifier=channel_identifier,
        token_network_address=to_checksum_address(token_network_address),
        nonce=0,
        chain_id=chain_id,
        transferred_amount=0,
    )
    balance_proof.signature = encode_hex(
        LocalSigner(private_keys[0]).sign(
            data=balance_proof.serialize_bin(), ), )
    # correct close
    token_network_proxy.close(
        channel_identifier=channel_identifier,
        partner=partner,
        balance_hash=bytes(32),
        nonce=balance_proof.nonce,
        additional_hash=bytes(32),
        signature=decode_hex(balance_proof.signature),
        given_block_identifier='latest',
    )
    assert channel_proxy_1.closed('latest') is True

    events = channel_filter.get_all_entries()
    assert len(events) == 2  # ChannelOpened, ChannelClosed

    # check the settlement timeouts again
    assert channel_proxy_1.settle_timeout() == TEST_SETTLE_TIMEOUT_MIN

    # update transfer
    chain.wait_until_block(target_block_number=client.block_number() +
                           TEST_SETTLE_TIMEOUT_MIN)

    token_network_proxy.settle(
        channel_identifier=channel_identifier,
        transferred_amount=0,
        locked_amount=0,
        locksroot=EMPTY_HASH,
        partner=partner,
        partner_transferred_amount=0,
        partner_locked_amount=0,
        partner_locksroot=EMPTY_HASH,
        given_block_identifier='latest',
    )
    assert channel_proxy_1.settled('latest') is True

    events = channel_filter.get_all_entries()

    assert len(events) == 3  # ChannelOpened, ChannelClosed, ChannelSettled

    # Create a new channel with a different identifier
    # create a channel
    new_channel_identifier = token_network_proxy.new_netting_channel(
        partner=partner,
        settle_timeout=TEST_SETTLE_TIMEOUT_MIN,
        given_block_identifier='latest',
    )
    assert new_channel_identifier is not None
    # create channel proxies
    channel_proxy_2 = PaymentChannel(
        token_network=token_network_proxy,
        channel_identifier=new_channel_identifier,
        contract_manager=contract_manager,
    )

    assert channel_proxy_2.channel_identifier == new_channel_identifier
    assert channel_proxy_2.opened('latest') is True

    with pytest.raises(ChannelOutdatedError):
        token_network_proxy.close(
            channel_identifier=channel_identifier,
            partner=partner,
            balance_hash=bytes(32),
            nonce=balance_proof.nonce,
            additional_hash=bytes(32),
            signature=decode_hex(balance_proof.signature),
            given_block_identifier='latest',
        )
Beispiel #59
0
def test_token_network_proxy_update_transfer(
    token_network_proxy,
    private_keys,
    blockchain_rpc_ports,
    token_proxy,
    chain_id,
    web3,
):
    """Tests channel lifecycle, with `update_transfer` before settling"""
    token_network_address = to_canonical_address(
        token_network_proxy.proxy.contract.address)

    c1_client = JSONRPCClient(
        '0.0.0.0',
        blockchain_rpc_ports[0],
        private_keys[1],
        web3=web3,
    )
    c2_client = JSONRPCClient(
        '0.0.0.0',
        blockchain_rpc_ports[0],
        private_keys[2],
        web3=web3,
    )
    c1_token_network_proxy = TokenNetwork(
        c1_client,
        token_network_address,
    )
    c2_token_network_proxy = TokenNetwork(
        c2_client,
        token_network_address,
    )
    # create a channel
    channel_identifier = c1_token_network_proxy.new_netting_channel(
        c2_client.sender,
        TEST_SETTLE_TIMEOUT_MIN,
    )
    # deposit to the channel
    initial_balance = 100
    token_proxy.transfer(c1_client.sender, initial_balance)
    token_proxy.transfer(c2_client.sender, initial_balance)
    initial_balance_c1 = token_proxy.balance_of(c1_client.sender)
    assert initial_balance_c1 == initial_balance
    initial_balance_c2 = token_proxy.balance_of(c2_client.sender)
    assert initial_balance_c2 == initial_balance
    c1_token_network_proxy.set_total_deposit(
        10,
        c2_client.sender,
    )
    c2_token_network_proxy.set_total_deposit(
        10,
        c1_client.sender,
    )
    # balance proof signed by c1
    transferred_amount_c1 = 1
    transferred_amount_c2 = 3
    balance_proof_c1 = BalanceProof(
        channel_identifier=encode_hex(channel_identifier),
        token_network_address=to_checksum_address(token_network_address),
        nonce=1,
        chain_id=chain_id,
        transferred_amount=transferred_amount_c1,
    )
    balance_proof_c1.signature = encode_hex(
        sign_data(encode_hex(private_keys[1]),
                  balance_proof_c1.serialize_bin()), )
    # balance proof signed by c2
    balance_proof_c2 = BalanceProof(
        channel_identifier=encode_hex(channel_identifier),
        token_network_address=to_checksum_address(token_network_address),
        nonce=2,
        chain_id=chain_id,
        transferred_amount=transferred_amount_c2,
    )
    balance_proof_c2.signature = encode_hex(
        sign_data(encode_hex(private_keys[2]),
                  balance_proof_c2.serialize_bin()), )
    # close by c1
    c1_token_network_proxy.close(
        c2_client.sender,
        balance_proof_c2.nonce,
        decode_hex(balance_proof_c2.balance_hash),
        decode_hex(balance_proof_c2.additional_hash),
        decode_hex(balance_proof_c2.signature),
    )

    # using invalid non-closing signature
    # Usual mistake when calling update Transfer - balance proof signature is missing in the data
    non_closing_data = balance_proof_c1.serialize_bin()
    non_closing_signature = sign_data(
        encode_hex(c2_client.privkey),
        non_closing_data,
    )
    with pytest.raises(TransactionThrew):
        c2_token_network_proxy.update_transfer(
            c1_client.sender,
            balance_proof_c1.nonce,
            decode_hex(balance_proof_c1.balance_hash),
            decode_hex(balance_proof_c1.additional_hash),
            decode_hex(balance_proof_c1.signature),
            non_closing_signature,
        )

    non_closing_data = balance_proof_c1.serialize_bin() + decode_hex(
        balance_proof_c1.signature)
    non_closing_signature = sign_data(
        encode_hex(c2_client.privkey),
        non_closing_data,
    )
    c2_token_network_proxy.update_transfer(
        c1_client.sender,
        balance_proof_c1.nonce,
        decode_hex(balance_proof_c1.balance_hash),
        decode_hex(balance_proof_c1.additional_hash),
        decode_hex(balance_proof_c1.signature),
        non_closing_signature,
    )
    wait_blocks(c1_client.web3, TEST_SETTLE_TIMEOUT_MIN)

    # settling with an invalid amount
    with pytest.raises(TransactionThrew):
        c1_token_network_proxy.settle(
            2,
            0,
            EMPTY_HASH,
            c2_client.sender,
            2,
            0,
            EMPTY_HASH,
        )
    # proper settle
    c1_token_network_proxy.settle(
        transferred_amount_c1,
        0,
        EMPTY_HASH,
        c2_client.sender,
        transferred_amount_c2,
        0,
        EMPTY_HASH,
    )
    assert (token_proxy.balance_of(
        c2_client.sender) == (initial_balance_c2 + transferred_amount_c1 -
                              transferred_amount_c2))
    assert (token_proxy.balance_of(
        c1_client.sender) == (initial_balance_c1 + transferred_amount_c2 -
                              transferred_amount_c1))
Beispiel #60
0
def test_token_network_proxy_basics(
    token_network_proxy,
    private_keys,
    blockchain_rpc_ports,
    token_proxy,
    chain_id,
    web3,
):
    # check settlement timeouts
    assert token_network_proxy.settlement_timeout_min(
    ) == TEST_SETTLE_TIMEOUT_MIN
    assert token_network_proxy.settlement_timeout_max(
    ) == TEST_SETTLE_TIMEOUT_MAX

    token_network_address = to_canonical_address(
        token_network_proxy.proxy.contract.address)

    c1_client = JSONRPCClient(
        '0.0.0.0',
        blockchain_rpc_ports[0],
        private_keys[1],
        web3=web3,
    )
    c2_client = JSONRPCClient(
        '0.0.0.0',
        blockchain_rpc_ports[0],
        private_keys[2],
        web3=web3,
    )
    c1_token_network_proxy = TokenNetwork(
        c1_client,
        token_network_address,
    )
    c2_token_network_proxy = TokenNetwork(
        c2_client,
        token_network_address,
    )

    # instantiating a new channel - test basic assumptions
    assert c1_token_network_proxy.channel_exists(c1_client.sender,
                                                 c2_client.sender) is False
    assert c1_token_network_proxy.channel_is_opened(c1_client.sender,
                                                    c2_client.sender) is False
    assert c1_token_network_proxy.channel_is_closed(c1_client.sender,
                                                    c2_client.sender) is False
    # test timeout limits
    with pytest.raises(InvalidSettleTimeout):
        c1_token_network_proxy.new_netting_channel(
            c2_client.sender,
            TEST_SETTLE_TIMEOUT_MIN - 1,
        )
    with pytest.raises(InvalidSettleTimeout):
        c1_token_network_proxy.new_netting_channel(
            c2_client.sender,
            TEST_SETTLE_TIMEOUT_MAX + 1,
        )
    # channel to self
    with pytest.raises(SamePeerAddress):
        c1_token_network_proxy.new_netting_channel(
            c1_client.sender,
            TEST_SETTLE_TIMEOUT_MIN,
        )
    # actually create a channel
    channel_identifier = c1_token_network_proxy.new_netting_channel(
        c2_client.sender,
        TEST_SETTLE_TIMEOUT_MIN,
    )
    assert channel_identifier is not None
    # multiple channels with the same peer are not allowed
    with pytest.raises(DuplicatedChannelError):
        c1_token_network_proxy.new_netting_channel(
            c2_client.sender,
            TEST_SETTLE_TIMEOUT_MIN,
        )
    assert c1_token_network_proxy.channel_exists(c1_client.sender,
                                                 c2_client.sender) is True
    assert c1_token_network_proxy.channel_is_opened(c1_client.sender,
                                                    c2_client.sender) is True

    # channel is open.
    # deposit with no balance
    with pytest.raises(ValueError):
        c1_token_network_proxy.set_total_deposit(
            10,
            c2_client.sender,
        )
    # test deposits
    initial_token_balance = 100
    token_proxy.transfer(c1_client.sender, initial_token_balance)
    initial_balance_c1 = token_proxy.balance_of(c1_client.sender)
    assert initial_balance_c1 == initial_token_balance
    initial_balance_c2 = token_proxy.balance_of(c2_client.sender)
    assert initial_balance_c2 == 0
    # no negative deposit
    with pytest.raises(ValueError):
        c1_token_network_proxy.set_total_deposit(
            -1,
            c2_client.sender,
        )
    # actual deposit
    c1_token_network_proxy.set_total_deposit(
        10,
        c2_client.sender,
    )
    # balance proof by c2
    transferred_amount = 3
    balance_proof = BalanceProof(
        channel_identifier=encode_hex(channel_identifier),
        token_network_address=to_checksum_address(token_network_address),
        nonce=1,
        chain_id=chain_id,
        transferred_amount=transferred_amount,
    )
    balance_proof.signature = encode_hex(
        sign_data(encode_hex(private_keys[1]),
                  balance_proof.serialize_bin()), )
    # close with invalid signature
    with pytest.raises(TransactionThrew):
        c2_token_network_proxy.close(
            c1_client.sender,
            balance_proof.nonce,
            decode_hex(balance_proof.balance_hash),
            decode_hex(balance_proof.additional_hash),
            b'\x11' * 65,
        )
    # correct close
    c2_token_network_proxy.close(
        c1_client.sender,
        balance_proof.nonce,
        decode_hex(balance_proof.balance_hash),
        decode_hex(balance_proof.additional_hash),
        decode_hex(balance_proof.signature),
    )
    assert c1_token_network_proxy.channel_is_closed(c1_client.sender,
                                                    c2_client.sender) is True
    assert c1_token_network_proxy.channel_exists(c1_client.sender,
                                                 c2_client.sender) is True
    # closing already closed channel
    with pytest.raises(ChannelIncorrectStateError):
        c2_token_network_proxy.close(
            c1_client.sender,
            balance_proof.nonce,
            decode_hex(balance_proof.balance_hash),
            decode_hex(balance_proof.additional_hash),
            decode_hex(balance_proof.signature),
        )
    # update transfer
    wait_blocks(c1_client.web3, TEST_SETTLE_TIMEOUT_MIN)

    c2_token_network_proxy.settle(
        0,
        0,
        EMPTY_HASH,
        c1_client.sender,
        transferred_amount,
        0,
        EMPTY_HASH,
    )
    assert c1_token_network_proxy.channel_exists(c1_client.sender,
                                                 c2_client.sender) is False
    assert token_proxy.balance_of(c1_client.sender) == (initial_balance_c1 -
                                                        transferred_amount)
    assert token_proxy.balance_of(c2_client.sender) == (initial_balance_c2 +
                                                        transferred_amount)