def wait_for_contract(self, contract_address_hex, timeout=None): """ Wait until a contract is mined Args: contract_address_hex (string): hex encoded address of the contract timeout (int): time to wait for the contract to get mined Returns: True if the contract got mined, false otherwise """ contract_address = decode_hex(contract_address_hex) start_time = time.time() result = self._raiden.chain.client.web3.eth.getCode( to_checksum_address(contract_address), ) current_time = time.time() while not result: if timeout and start_time + timeout > current_time: return False result = self._raiden.chain.client.web3.eth.getCode( to_checksum_address(contract_address), ) gevent.sleep(0.5) current_time = time.time() return len(result) > 0
def test_token( deploy_client, token_proxy, private_keys, web3, contract_manager, ): privkey = private_keys[1] address = privatekey_to_address(privkey) address = to_canonical_address(address) other_client = JSONRPCClient(web3, privkey) other_token_proxy = Token( jsonrpc_client=other_client, token_address=to_canonical_address(token_proxy.proxy.contract.address), contract_manager=contract_manager, ) # send some funds from deployer to generated address transfer_funds = 100 token_proxy.transfer(address, transfer_funds) assert transfer_funds == token_proxy.balance_of(address) allow_funds = 100 token_proxy.approve(address, allow_funds) assert allow_funds == token_proxy.proxy.contract.functions.allowance( to_checksum_address(deploy_client.address), to_checksum_address(address), ).call(block_identifier='latest') other_token_proxy.transfer(deploy_client.address, transfer_funds) assert token_proxy.balance_of(address) == 0
def test_api_transfers(api_backend, raiden_network, token_addresses): _, app1 = raiden_network amount = 200 identifier = 42 token_address = token_addresses[0] target_address = app1.raiden.address api_server, _ = api_backend our_address = api_server.rest_api.raiden_api.address transfer = { 'initiator_address': to_checksum_address(our_address), 'target_address': to_checksum_address(target_address), 'token_address': to_checksum_address(token_address), 'amount': amount, 'identifier': identifier, } request = grequests.post( api_url_for( api_backend, 'transfertotargetresource', token_address=to_checksum_address(token_address), target_address=to_checksum_address(target_address), ), json={'amount': amount, 'identifier': identifier}, ) response = request.send().response assert_proper_response(response) response = response.json() assert response == transfer
def test_token( deploy_client, token_proxy, private_keys, blockchain_rpc_ports, web3, ): privkey = private_keys[1] address = privatekey_to_address(privkey) address = to_canonical_address(address) other_client = JSONRPCClient( '0.0.0.0', blockchain_rpc_ports[0], privkey, web3=web3, ) other_token_proxy = Token( other_client, to_canonical_address(token_proxy.proxy.contract.address), ) # send some funds from deployer to generated address transfer_funds = 100 token_proxy.transfer(address, transfer_funds) assert transfer_funds == token_proxy.balance_of(address) allow_funds = 100 token_proxy.approve(address, allow_funds) assert allow_funds == token_proxy.proxy.contract.functions.allowance( to_checksum_address(deploy_client.sender), to_checksum_address(address), ).call() other_token_proxy.transfer(deploy_client.sender, transfer_funds) assert token_proxy.balance_of(address) == 0
def test_api_open_close_and_settle_channel( api_backend, token_addresses, reveal_timeout, ): # let's create a new channel partner_address = '0x61C808D82A3Ac53231750daDc13c777b59310bD9' token_address = token_addresses[0] settle_timeout = 1650 channel_data_obj = { 'partner_address': partner_address, 'token_address': to_checksum_address(token_address), 'settle_timeout': settle_timeout, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response balance = 0 assert_proper_response(response, status_code=HTTPStatus.CREATED) response = response.json() expected_response = channel_data_obj expected_response['balance'] = balance expected_response['state'] = CHANNEL_STATE_OPENED expected_response['reveal_timeout'] = reveal_timeout expected_response['channel_identifier'] = assert_dicts_are_equal.IGNORE_VALUE expected_response['token_network_identifier'] = assert_dicts_are_equal.IGNORE_VALUE assert_dicts_are_equal(response, expected_response) token_network_identifier = response['token_network_identifier'] # let's close the channel request = grequests.patch( api_url_for( api_backend, 'channelsresourcebytokenandpartneraddress', token_address=token_address, partner_address=partner_address, ), json={'state': CHANNEL_STATE_CLOSED}, ) response = request.send().response assert_proper_response(response) expected_response = { 'token_network_identifier': token_network_identifier, 'channel_identifier': assert_dicts_are_equal.IGNORE_VALUE, 'partner_address': partner_address, 'token_address': to_checksum_address(token_address), 'settle_timeout': settle_timeout, 'reveal_timeout': reveal_timeout, 'state': CHANNEL_STATE_CLOSED, 'balance': balance, } assert_dicts_are_equal(response.json(), expected_response)
def to_dict(self) -> Dict[str, Any]: return { 'transaction_hash': serialize_bytes(self.transaction_hash), 'transaction_from': to_checksum_address(self.transaction_from), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'channel_identifier': str(self.channel_identifier), 'block_number': str(self.block_number), }
def test_routing_mocked_pfs_bad_http_code( chain_state, payment_network_state, token_network_state, our_address, ): token_network_state, addresses, channel_states = create_square_network_topology( payment_network_state=payment_network_state, token_network_state=token_network_state, our_address=our_address, ) address1, address2, address3 = addresses channel_state1, channel_state2 = channel_states # test routing with all nodes available chain_state.nodeaddresses_to_networkstates = { address1: NODE_NETWORK_REACHABLE, address2: NODE_NETWORK_REACHABLE, address3: NODE_NETWORK_REACHABLE, } # channel 1 and 2 are flipped here, to see when the PFS gets called json_data = { 'result': [ { 'path': [to_checksum_address(our_address), to_checksum_address(address2)], 'fees': 0, }, { 'path': [to_checksum_address(our_address), to_checksum_address(address1)], 'fees': 0, }, ], } response = Mock() response.configure_mock(status_code=400) response.json = Mock(return_value=json_data) with patch.object(requests, 'get', return_value=response): routes = get_best_routes( chain_state=chain_state, token_network_id=token_network_state.address, from_address=our_address, to_address=address1, amount=50, previous_address=None, config={ 'services': { 'pathfinding_service_address': 'my-pfs', 'pathfinding_max_paths': 3, }, }, ) assert routes[0].node_address == address1 assert routes[0].channel_identifier == channel_state1.identifier assert routes[1].node_address == address2 assert routes[1].channel_identifier == channel_state2.identifier
def test_api_deposit_limit( api_backend, token_addresses, reveal_timeout, ): # let's create a new channel and deposit exactly the limit amount first_partner_address = '0x61C808D82A3Ac53231750daDc13c777b59310bD9' token_address = token_addresses[0] settle_timeout = 1650 balance_working = MAX_TOKENS_DEPLOY * (10 ** 2) # token has two digits channel_data_obj = { 'partner_address': first_partner_address, 'token_address': to_checksum_address(token_address), 'settle_timeout': settle_timeout, 'reveal_timeout': reveal_timeout, 'balance': balance_working, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response assert_proper_response(response, HTTPStatus.CREATED) response = response.json() expected_response = channel_data_obj expected_response['balance'] = balance_working expected_response['state'] = CHANNEL_STATE_OPENED expected_response['channel_identifier'] = assert_dicts_are_equal.IGNORE_VALUE expected_response['token_network_identifier'] = assert_dicts_are_equal.IGNORE_VALUE assert_dicts_are_equal(response, expected_response) # now let's open a channel and deposit a bit more than the limit second_partner_address = '0x29FA6cf0Cce24582a9B20DB94Be4B6E017896038' balance_failing = balance_working + 1 # token has two digits channel_data_obj = { 'partner_address': second_partner_address, 'token_address': to_checksum_address(token_address), 'settle_timeout': settle_timeout, 'reveal_timeout': reveal_timeout, 'balance': balance_failing, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response assert_proper_response(response, HTTPStatus.EXPECTATION_FAILED) response = response.json() assert response['errors'] == 'The deposit of 10001 is bigger than the current limit of 10000'
def to_dict(self) -> typing.Dict[str, typing.Any]: return { 'payment_identifier': str(self.payment_identifier), 'token': to_checksum_address(self.token), 'balance_proof': self.balance_proof, 'lock': self.lock, 'initiator': to_checksum_address(self.initiator), 'target': to_checksum_address(self.target), }
def approve(self, allowed_address: Address, allowance: TokenAmount): """ Aprove `allowed_address` to transfer up to `deposit` amount of token. Note: For channel deposit please use the channel proxy, since it does additional validations. """ log_details = { 'node': pex(self.node_address), 'contract': pex(self.address), 'allowed_address': pex(allowed_address), 'allowance': allowance, } error_prefix = 'Call to approve will fail' gas_limit = self.proxy.estimate_gas( 'pending', 'approve', to_checksum_address(allowed_address), allowance, ) if gas_limit: error_prefix = 'Call to approve failed' log.debug('approve called', **log_details) transaction_hash = self.proxy.transact( 'approve', safe_gas_limit(gas_limit), to_checksum_address(allowed_address), allowance, ) self.client.poll(transaction_hash) receipt_or_none = check_transaction_threw(self.client, transaction_hash) transaction_executed = gas_limit is not None if not transaction_executed or receipt_or_none: if transaction_executed: block = receipt_or_none['blockNumber'] else: block = 'pending' self.proxy.jsonrpc_client.check_for_insufficient_eth( transaction_name='approve', transaction_executed=transaction_executed, required_gas=GAS_REQUIRED_FOR_APPROVE, block_identifier=block, ) msg = self._check_why_approved_failed(allowance, block) error_msg = f'{error_prefix}. {msg}' log.critical(error_msg, **log_details) raise RaidenUnrecoverableError(error_msg) log.info('approve successful', **log_details)
def to_dict(self) -> Dict[str, Any]: result = { 'token_address': to_checksum_address(self.token_address), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'channel_identifier': str(self.channel_identifier), 'participant': to_checksum_address(self.participant), } return result
def to_dict(self) -> Dict[str, Any]: result = { 'channel_identifier': str(self.channel_identifier), 'token_address': to_checksum_address(self.token_address), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'balance_proof': self.balance_proof, } return result
def to_dict(self) -> typing.Dict[str, typing.Any]: return { 'payment_network_identifier': to_checksum_address(self.payment_network_identifier), 'payment_identifier': str(self.payment_identifier), 'amount': str(self.amount), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'initiator': to_checksum_address(self.initiator), 'target': to_checksum_address(self.target), 'secret': serialization.serialize_bytes(self.secret), }
def to_dict(self) -> Dict[str, Any]: result = { 'payment_network_identifier': to_checksum_address(self.payment_network_identifier), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'identifier': str(self.identifier), 'target': to_checksum_address(self.target), 'reason': self.reason, } return result
def to_dict(self) -> Dict[str, Any]: result = { 'payment_network_identifier': to_checksum_address(self.payment_network_identifier), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'identifier': str(self.identifier), 'amount': str(self.amount), 'initiator': to_checksum_address(self.initiator), } return result
def test_register_token(api_backend, token_amount, token_addresses, raiden_network): app0 = raiden_network[0] new_token_address = deploy_contract_web3( CONTRACT_HUMAN_STANDARD_TOKEN, app0.raiden.chain.client, num_confirmations=None, constructor_arguments=( token_amount, 2, 'raiden', 'Rd', ), ) other_token_address = deploy_contract_web3( CONTRACT_HUMAN_STANDARD_TOKEN, app0.raiden.chain.client, num_confirmations=None, constructor_arguments=( token_amount, 2, 'raiden', 'Rd', ), ) register_request = grequests.put(api_url_for( api_backend, 'registertokenresource', token_address=to_checksum_address(new_token_address), )) register_response = register_request.send().response assert_proper_response(register_response, status_code=HTTPStatus.CREATED) response_json = register_response.json() assert 'token_network_address' in response_json assert is_checksum_address(response_json['token_network_address']) # now try to reregister it and get the error conflict_request = grequests.put(api_url_for( api_backend, 'registertokenresource', token_address=to_checksum_address(new_token_address), )) conflict_response = conflict_request.send().response assert_response_with_error(conflict_response, HTTPStatus.CONFLICT) # Burn all the eth and then make sure we get the appropriate API error burn_all_eth(app0.raiden) poor_request = grequests.put(api_url_for( api_backend, 'registertokenresource', token_address=to_checksum_address(other_token_address), )) poor_response = poor_request.send().response assert_response_with_error(poor_response, HTTPStatus.PAYMENT_REQUIRED)
def to_dict(self) -> Dict[str, Any]: return { 'transaction_hash': serialize_bytes(self.transaction_hash), 'token_network_identifier': to_checksum_address(self.token_network_identifier), 'participant': to_checksum_address(self.participant), 'partner': to_checksum_address(self.partner), 'locksroot': serialize_bytes(self.locksroot), 'unlocked_amount': str(self.unlocked_amount), 'returned_tokens': str(self.returned_tokens), 'block_number': str(self.block_number), }
def get_or_deploy_token(runner) -> ContractProxy: """ Deploy or reuse """ contract_manager = ContractManager(contracts_precompiled_path()) token_contract = contract_manager.get_contract(CONTRACT_CUSTOM_TOKEN) token_config = runner.scenario.get('token', {}) if not token_config: token_config = {} address = token_config.get('address') reuse = token_config.get('reuse', False) token_address_file = runner.data_path.joinpath('token.addr') if reuse: if address: raise ScenarioError('Token settings "address" and "reuse" are mutually exclusive.') if token_address_file.exists(): address = token_address_file.read_text() if address: check_address_has_code(runner.client, address, 'Token') token_ctr = runner.client.new_contract_proxy(token_contract['abi'], address) log.debug( "Reusing token", address=to_checksum_address(address), name=token_ctr.contract.functions.name().call(), symbol=token_ctr.contract.functions.symbol().call(), ) return token_ctr token_id = uuid.uuid4() now = datetime.now() name = token_config.get('name', f"Scenario Test Token {token_id!s} {now:%Y-%m-%dT%H:%M}") symbol = token_config.get('symbol', f"T{token_id!s:.3}") decimals = token_config.get('decimals', 0) log.debug("Deploying token", name=name, symbol=symbol, decimals=decimals) token_ctr = runner.client.deploy_solidity_contract( 'CustomToken', contract_manager.contracts, constructor_parameters=(0, decimals, name, symbol), ) contract_checksum_address = to_checksum_address(token_ctr.contract_address) if reuse: token_address_file.write_text(contract_checksum_address) log.info( "Deployed token", address=contract_checksum_address, name=name, symbol=symbol, ) return token_ctr
def to_dict(self) -> Dict[str, Any]: result = { 'recipient': to_checksum_address(self.recipient), 'channel_identifier': str(self.queue_identifier.channel_identifier), 'message_identifier': str(self.message_identifier), 'payment_identifier': str(self.payment_identifier), 'token_address': to_checksum_address(self.token), 'secret': serialization.serialize_bytes(self.secret), 'balance_proof': self.balance_proof, } return result
def test_api_tokens(api_backend, blockchain_services, token_addresses): partner_address = '0x61C808D82A3Ac53231750daDc13c777b59310bD9' token_address1 = token_addresses[0] token_address2 = token_addresses[1] settle_timeout = 1650 channel_data_obj = { 'partner_address': partner_address, 'token_address': to_checksum_address(token_address1), 'settle_timeout': settle_timeout, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response assert_proper_response(response, HTTPStatus.CREATED) partner_address = '0x61C808D82A3Ac53231750daDc13c777b59310bD9' settle_timeout = 1650 channel_data_obj = { 'partner_address': partner_address, 'token_address': to_checksum_address(token_address2), 'settle_timeout': settle_timeout, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response assert_proper_response(response, HTTPStatus.CREATED) # and now let's get the token list request = grequests.get( api_url_for( api_backend, 'tokensresource', ), ) response = request.send().response assert_proper_response(response) response = response.json() expected_response = [ to_checksum_address(token_address1), to_checksum_address(token_address2), ] assert set(response) == set(expected_response)
def test_call_inexisting_address(deploy_client): """ A JSON RPC call to an inexisting address returns the empty string. """ inexisting_address = b'\x01\x02\x03\x04\x05' * 4 assert len(deploy_client.web3.eth.getCode(to_checksum_address(inexisting_address))) == 0 transaction = { 'from': to_checksum_address(deploy_client.sender), 'to': to_checksum_address(inexisting_address), 'data': b'', 'value': 0, } assert deploy_client.web3.eth.call(transaction) == b''
def test_api_get_channel_list( api_backend, token_addresses, reveal_timeout, ): partner_address = '0x61C808D82A3Ac53231750daDc13c777b59310bD9' request = grequests.get( api_url_for( api_backend, 'channelsresource', ), ) response = request.send().response assert_proper_response(response, HTTPStatus.OK) assert response.json() == [] # let's create a new channel token_address = token_addresses[0] settle_timeout = 1650 channel_data_obj = { 'partner_address': partner_address, 'token_address': to_checksum_address(token_address), 'settle_timeout': settle_timeout, 'reveal_timeout': reveal_timeout, } request = grequests.put( api_url_for( api_backend, 'channelsresource', ), json=channel_data_obj, ) response = request.send().response assert_proper_response(response, HTTPStatus.CREATED) request = grequests.get( api_url_for( api_backend, 'channelsresource', ), ) response = request.send().response assert_proper_response(response, HTTPStatus.OK) channel_info = response.json()[0] assert channel_info['partner_address'] == partner_address assert channel_info['token_address'] == to_checksum_address(token_address) assert 'token_network_identifier' in channel_info
def locked_amount_by_locksroot( self, participant1: typing.Address, participant2: typing.Address, locksroot: typing.Locksroot, ) -> int: """ Returns the locked amount for a specific participant's locksroot. """ data = self._call_and_check_result( 'getParticipantLockedAmount', to_checksum_address(participant1), to_checksum_address(participant2), locksroot, ) return data
def close_all_channels_cooperatively( client: Client, private_keys: List[str], contract_address: str, balance: int=None ): addresses_to_keys = { to_checksum_address(privkey_to_addr(private_key)): private_key for private_key in private_keys } client.sync_channels() closable_channels = [c for c in client.channels if c.state != Channel.State.closed] log.info('Closing {} channels.'.format(len(closable_channels))) for channel in closable_channels: private_key = addresses_to_keys.get(to_checksum_address(channel.receiver)) if private_key is not None: close_channel_cooperatively(channel, private_key, contract_address, balance)
def patch_channel( self, registry_address, token_address, partner_address, total_deposit=None, state=None, ): if total_deposit is not None and state is not None: return api_error( errors="Can not update a channel's total deposit and state at the same time", status_code=HTTPStatus.CONFLICT, ) if total_deposit is None and state is None: return api_error( errors="Nothing to do. Should either provide 'total_deposit' or 'state' argument", status_code=HTTPStatus.BAD_REQUEST, ) try: channel_state = self.raiden_api.get_channel( registry_address=registry_address, token_address=token_address, partner_address=partner_address, ) except ChannelNotFound: return api_error( errors='Requested channel for token {} and partner {} not found'.format( to_checksum_address(token_address), to_checksum_address(partner_address), ), status_code=HTTPStatus.CONFLICT, ) if total_deposit is not None: result = self._deposit(registry_address, channel_state, total_deposit) elif state == CHANNEL_STATE_CLOSED: result = self._close(registry_address, channel_state) else: # should never happen, channel_state is validated in the schema result = api_error( errors='Provided invalid channel state {}'.format(state), status_code=HTTPStatus.BAD_REQUEST, ) return result
def detail_channel(self, participant1: typing.Address, participant2: typing.Address) -> Dict: """ Returns a dictionary with the channel specific information. """ channel_data = self._call_and_check_result( 'getChannelInfo', to_checksum_address(participant1), to_checksum_address(participant2), ) assert isinstance(channel_data[0], typing.T_ChannelID) return { 'channel_identifier': channel_data[0], 'settle_block_number': channel_data[1], 'state': channel_data[2], }
def send_transaction( self, to: Address, value: int = 0, data: bytes = b'', startgas: int = None, gasprice: int = None, ): """ Helper to send signed messages. This method will use the `privkey` provided in the constructor to locally sign the transaction. This requires an extended server implementation that accepts the variables v, r, and s. """ if to == to_canonical_address(NULL_ADDRESS): warnings.warn('For contract creation the empty string must be used.') transaction = dict( nonce=self.nonce(), gasPrice=gasprice or self.gasprice(), gas=self.check_startgas(startgas), value=value, data=data, ) # add the to address if not deploying a contract if to != b'': transaction['to'] = to_checksum_address(to) signed_txn = self.web3.eth.account.signTransaction(transaction, self.privkey) result = self.web3.eth.sendRawTransaction(signed_txn.rawTransaction) encoded_result = encode_hex(result) return remove_0x_prefix(encoded_result)
def __init__( self, private_key: str = None, key_password_path: str = None, channel_manager_address: str = None, web3: Web3 = None ) -> None: is_hex_key = is_hex(private_key) and len(remove_0x_prefix(private_key)) == 64 is_path = os.path.exists(private_key) assert is_hex_key or is_path, 'Private key must either be a hex key or a file path.' # Load private key from file if none is specified on command line. if is_path: private_key = get_private_key(private_key, key_password_path) assert private_key is not None, 'Could not load private key from file.' self.channels = [] # type: List[Channel] # Create web3 context if none is provided, either by using the proxies' context or creating # a new one. if not web3: web3 = Web3(HTTPProvider(WEB3_PROVIDER_DEFAULT)) channel_manager_address = to_checksum_address( channel_manager_address or NETWORK_CFG.CHANNEL_MANAGER_ADDRESS ) self.context = Context(private_key, web3, channel_manager_address) self.sync_channels()
def get_filter_args_for_specific_event_from_channel( token_network_address: TokenNetworkAddress, channel_identifier: ChannelID, event_name: str, contract_manager: ContractManager, from_block: BlockSpecification = GENESIS_BLOCK_NUMBER, to_block: BlockSpecification = 'latest', ): """ Return the filter params for a specific event of a given channel. """ if not event_name: raise ValueError('Event name must be given') event_abi = contract_manager.get_event_abi(CONTRACT_TOKEN_NETWORK, event_name) # Here the topics for a specific event are created # The first entry of the topics list is the event name, then the first parameter is encoded, # in the case of a token network, the first parameter is always the channel identifier _, event_filter_params = construct_event_filter_params( event_abi=event_abi, contract_address=to_checksum_address(token_network_address), argument_filters={ 'channel_identifier': channel_identifier, }, fromBlock=from_block, toBlock=to_block, ) return event_filter_params
def setup_address(self, name, address=default, transact={}): ''' Set up the name to point to the supplied address. The sender of the transaction must own the name, or its parent name. Example: If the caller owns ``parentname.eth`` with no subdomains and calls this method with ``sub.parentname.eth``, then ``sub`` will be created as part of this call. :param str name: ENS name to set up, in checksum format :param str address: name will point to this address. If ``None``, erase the record. If not specified, name will point to the owner's address. :param dict transact: the transaction configuration, like in :meth:`~web3.eth.Eth.sendTransaction` :raises InvalidName: if ``name`` has invalid syntax :raises UnauthorizedError: if ``'from'`` in `transact` does not own `name` ''' owner = self.setup_owner(name, transact=transact) self._assert_control(owner, name) if not address or address == EMPTY_ADDR_HEX: address = None elif address is default: address = owner elif is_binary_address(address): address = to_checksum_address(address) elif not is_checksum_address(address): raise ValueError("You must supply the address in checksum format") if self.address(name) == address: return None if address is None: address = EMPTY_ADDR_HEX transact['from'] = owner resolver = self._set_resolver(name, transact=transact) return resolver.setAddr(dot_eth_namehash(name), address, transact=transact)
def open( self, registry_address: typing.PaymentNetworkID, partner_address: typing.Address, token_address: typing.TokenAddress, settle_timeout: typing.BlockTimeout = None, total_deposit: typing.TokenAmount = None, ): log.debug( 'Opening channel', node=pex(self.raiden_api.address), registry_address=to_checksum_address(registry_address), partner_address=to_checksum_address(partner_address), token_address=to_checksum_address(token_address), settle_timeout=settle_timeout, ) try: token = self.raiden_api.raiden.chain.token(token_address) except AddressWithoutCode as e: return api_error( errors=str(e), status_code=HTTPStatus.CONFLICT, ) balance = token.balance_of(self.raiden_api.raiden.address) if total_deposit is not None and total_deposit > balance: error_msg = 'Not enough balance to deposit. {} Available={} Needed={}'.format( pex(token_address), balance, total_deposit, ) return api_error( errors=error_msg, status_code=HTTPStatus.PAYMENT_REQUIRED, ) try: self.raiden_api.channel_open( registry_address, token_address, partner_address, settle_timeout, ) except (InvalidAddress, InvalidSettleTimeout, SamePeerAddress, AddressWithoutCode, DuplicatedChannelError, TokenNotRegistered) as e: return api_error( errors=str(e), status_code=HTTPStatus.CONFLICT, ) except (InsufficientFunds, InsufficientGasReserve) as e: return api_error( errors=str(e), status_code=HTTPStatus.PAYMENT_REQUIRED, ) if total_deposit: # make initial deposit log.debug( 'Depositing to new channel', node=pex(self.raiden_api.address), registry_address=to_checksum_address(registry_address), token_address=to_checksum_address(token_address), partner_address=to_checksum_address(partner_address), total_deposit=total_deposit, ) try: self.raiden_api.set_total_channel_deposit( registry_address=registry_address, token_address=token_address, partner_address=partner_address, total_deposit=total_deposit, ) except InsufficientFunds as e: return api_error( errors=str(e), status_code=HTTPStatus.PAYMENT_REQUIRED, ) except (DepositOverLimit, DepositMismatch) as e: return api_error( errors=str(e), status_code=HTTPStatus.CONFLICT, ) channel_state = views.get_channelstate_for( views.state_from_raiden(self.raiden_api.raiden), registry_address, token_address, partner_address, ) result = self.channel_schema.dump(channel_state) return api_response( result=result.data, status_code=HTTPStatus.CREATED, )
def get_our_address(self): return api_response( result=dict(our_address=to_checksum_address(self.raiden_api.address)), )
def address(self): keccak_hash = keccak.new(digest_bits=256) keccak_hash.update( self.private_key.public_key.format(compressed=False, )[1:]) return to_checksum_address(keccak_hash.digest()[-20:])
def recover_public_address(message, signature_bytes): public_key = recover_public_key(message, signature_bytes) public_address = to_checksum_address( public_key_bytes_to_address(public_key.to_bytes())) return public_address
def verify_deployed_contracts(web3: Web3, contract_manager: ContractManager, deployment_data=None): chain_id = int(web3.version.network) deployment_file_path = None if deployment_data is None: deployment_data = get_contracts_deployed(chain_id, contract_manager.contracts_version) deployment_file_path = contracts_deployed_path( chain_id, contract_manager.contracts_version, ) contracts = deployment_data['contracts'] assert contract_manager.contracts_version == deployment_data['contracts_version'] assert chain_id == deployment_data['chain_id'] endpoint_registry_address = contracts[CONTRACT_ENDPOINT_REGISTRY]['address'] endpoint_registry_abi = contract_manager.get_contract_abi(CONTRACT_ENDPOINT_REGISTRY) endpoint_registry = web3.eth.contract( abi=endpoint_registry_abi, address=endpoint_registry_address, ) # Check that the deployed bytecode matches the precompiled data blockchain_bytecode = web3.eth.getCode(endpoint_registry_address).hex() compiled_bytecode = runtime_hexcode( contract_manager, CONTRACT_ENDPOINT_REGISTRY, len(blockchain_bytecode), ) assert blockchain_bytecode == compiled_bytecode # Check blockchain transaction hash & block information receipt = web3.eth.getTransactionReceipt( contracts[CONTRACT_ENDPOINT_REGISTRY]['transaction_hash'], ) assert receipt['blockNumber'] == contracts[CONTRACT_ENDPOINT_REGISTRY]['block_number'], \ f"We have block_number {contracts[CONTRACT_ENDPOINT_REGISTRY]['block_number']} " \ f"instead of {receipt['blockNumber']}" assert receipt['gasUsed'] == contracts[CONTRACT_ENDPOINT_REGISTRY]['gas_cost'], \ f"We have gasUsed {contracts[CONTRACT_ENDPOINT_REGISTRY]['gas_cost']} " \ f"instead of {receipt['gasUsed']}" assert receipt['contractAddress'] == contracts[CONTRACT_ENDPOINT_REGISTRY]['address'], \ f"We have contractAddress {contracts[CONTRACT_ENDPOINT_REGISTRY]['address']} " \ f"instead of {receipt['contractAddress']}" # Check the contract version version = endpoint_registry.functions.contract_version().call() assert version == deployment_data['contracts_version'] print( f'{CONTRACT_ENDPOINT_REGISTRY} at {endpoint_registry_address} ' f'matches the compiled data from contracts.json', ) secret_registry_address = contracts[CONTRACT_SECRET_REGISTRY]['address'] secret_registry_abi = contract_manager.get_contract_abi(CONTRACT_SECRET_REGISTRY) secret_registry = web3.eth.contract( abi=secret_registry_abi, address=secret_registry_address, ) # Check that the deployed bytecode matches the precompiled data blockchain_bytecode = web3.eth.getCode(secret_registry_address).hex() compiled_bytecode = runtime_hexcode( contract_manager, CONTRACT_SECRET_REGISTRY, len(blockchain_bytecode), ) assert blockchain_bytecode == compiled_bytecode # Check blockchain transaction hash & block information receipt = web3.eth.getTransactionReceipt( contracts[CONTRACT_SECRET_REGISTRY]['transaction_hash'], ) assert receipt['blockNumber'] == contracts[CONTRACT_SECRET_REGISTRY]['block_number'], \ f"We have block_number {contracts[CONTRACT_SECRET_REGISTRY]['block_number']} " \ f"instead of {receipt['blockNumber']}" assert receipt['gasUsed'] == contracts[CONTRACT_SECRET_REGISTRY]['gas_cost'], \ f"We have gasUsed {contracts[CONTRACT_SECRET_REGISTRY]['gas_cost']} " \ f"instead of {receipt['gasUsed']}" assert receipt['contractAddress'] == contracts[CONTRACT_SECRET_REGISTRY]['address'], \ f"We have contractAddress {contracts[CONTRACT_SECRET_REGISTRY]['address']} " \ f"instead of {receipt['contractAddress']}" # Check the contract version version = secret_registry.functions.contract_version().call() assert version == deployment_data['contracts_version'] print( f'{CONTRACT_SECRET_REGISTRY} at {secret_registry_address} ' f'matches the compiled data from contracts.json', ) token_registry_address = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['address'] token_registry_abi = contract_manager.get_contract_abi( CONTRACT_TOKEN_NETWORK_REGISTRY, ) token_network_registry = web3.eth.contract( abi=token_registry_abi, address=token_registry_address, ) # Check that the deployed bytecode matches the precompiled data blockchain_bytecode = web3.eth.getCode(token_registry_address).hex() compiled_bytecode = runtime_hexcode( contract_manager, CONTRACT_TOKEN_NETWORK_REGISTRY, len(blockchain_bytecode), ) assert blockchain_bytecode == compiled_bytecode # Check blockchain transaction hash & block information receipt = web3.eth.getTransactionReceipt( contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['transaction_hash'], ) assert receipt['blockNumber'] == contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['block_number'], \ f"We have block_number {contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['block_number']} " \ f"instead of {receipt['blockNumber']}" assert receipt['gasUsed'] == contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['gas_cost'], \ f"We have gasUsed {contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['gas_cost']} " \ f"instead of {receipt['gasUsed']}" assert receipt['contractAddress'] == contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['address'], \ f"We have contractAddress {contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['address']} " \ f"instead of {receipt['contractAddress']}" # Check the contract version version = token_network_registry.functions.contract_version().call() assert version == deployment_data['contracts_version'] # Check constructor parameters constructor_arguments = contracts[CONTRACT_TOKEN_NETWORK_REGISTRY]['constructor_arguments'] assert to_checksum_address( token_network_registry.functions.secret_registry_address().call(), ) == secret_registry_address assert secret_registry_address == constructor_arguments[0] chain_id = token_network_registry.functions.chain_id().call() assert chain_id == constructor_arguments[1] settlement_timeout_min = token_network_registry.functions.settlement_timeout_min().call() settlement_timeout_max = token_network_registry.functions.settlement_timeout_max().call() assert settlement_timeout_min == constructor_arguments[2] assert settlement_timeout_max == constructor_arguments[3] print( f'{CONTRACT_TOKEN_NETWORK_REGISTRY} at {token_registry_address} ' f'matches the compiled data from contracts.json', ) if deployment_file_path is not None: print(f'Deployment info from {deployment_file_path} has been verified and it is CORRECT.')
def addresses_checksummed(abi_type, data): if abi_type == 'address': return abi_type, to_checksum_address(data)
def load_account_obj(keystore_file, password): with open(keystore_file, "r") as keystore: account = Account(json.load(keystore), password, keystore_file) log.info("Using account", account=to_checksum_address(account.address)) return account
def abi_address_to_hex(abi_type, data): if abi_type == 'address': validate_address(data) if is_binary_address(data): return abi_type, to_checksum_address(data)
def save(self, *args, **kwargs): if self.address: self.address = to_checksum_address(self.address) super().save(*args, **kwargs)
def __init__(self, address): if isinstance(address, Address): self.address = address.address else: self.address = eth_utils.to_checksum_address(address)
def _leave_unused_rooms(self, _address_to_room_ids: Dict[AddressHex, List[_RoomID]] = None): """ Checks for rooms we've joined and which partner isn't health-checked and leave""" # cache in a set all healthchecked addresses healthchecked_hex_addresses: Set[AddressHex] = { to_checksum_address(address) for address in self._address_to_userids } changed = False if _address_to_room_ids is None: _address_to_room_ids = self._client.account_data.get( 'network.raiden.rooms', {}, ).copy() else: # if received _address_to_room_ids, assume it was already modified changed = True keep_rooms: Set[_RoomID] = set() for address_hex, room_ids in list(_address_to_room_ids.items()): if not room_ids: # None or empty room_ids = list() if not isinstance(room_ids, list): # old version, single room room_ids = [room_ids] if address_hex not in healthchecked_hex_addresses: _address_to_room_ids.pop(address_hex) changed = True continue counters = [0, 0] # public, private new_room_ids: List[_RoomID] = list() # limit to at most 2 public and 2 private rooms, preserving order for room_id in room_ids: if room_id not in self._client.rooms: continue elif self._client.rooms[room_id].invite_only is None: new_room_ids.append( room_id) # not known, postpone cleaning elif counters[self._client.rooms[room_id].invite_only] < 2: counters[self._client.rooms[room_id].invite_only] += 1 new_room_ids.append( room_id) # not enough rooms of this type yet else: continue # enough rooms, leave and clean keep_rooms |= set(new_room_ids) if room_ids != new_room_ids: _address_to_room_ids[address_hex] = new_room_ids changed = True rooms: List[Tuple[_RoomID, Room]] = list(self._client.rooms.items()) if changed: self._client.set_account_data('network.raiden.rooms', _address_to_room_ids) def leave(room: Room): """A race between /leave and /sync may remove the room before del on _client.rooms key. Suppress it, as the end result is the same: no more room""" try: return room.leave() except KeyError: return True for room_id, room in rooms: if self._discovery_room and room_id == self._discovery_room.room_id: # don't leave discovery room continue if room_id not in keep_rooms: self._spawn(leave, room)
def new_account(self, password: str) -> str: new_account = self.w3.parity.personal.newAccount(password) return to_checksum_address(new_account) # cast and validate
def main(scenario_file, keystore_file, password, rpc_url, auth, mailgun_api_key): gevent.get_hub().exception_stream = DummyStream() scenario_basename = basename(scenario_file.name) log_file_name = f'scenario-player_{scenario_basename}_{datetime.now():%Y-%m-%dT%H:%M:%S}.log' click.secho(f'Writing log to {log_file_name}', fg='yellow') configure_logging( {'': 'INFO', 'raiden': 'DEBUG', 'scenario_player': 'DEBUG'}, debug_log_file_name=log_file_name, _first_party_packages=frozenset(['raiden', 'scenario_player']), ) log_buffer = LogBuffer() for handler in logging.getLogger('').handlers: if isinstance(handler, logging.StreamHandler): handler.stream = log_buffer break with open(keystore_file, 'r') as keystore: account = Account(json.load(keystore), password, keystore_file) log.info("Using account", account=to_checksum_address(account.address)) # Collect tasks collect_tasks(tasks) runner = ScenarioRunner(account, rpc_url, auth, scenario_file) terminal = Terminal() # Disable line wrapping print(terminal.rmam, end='') gevent.spawn(_ui, terminal, runner, log_file_name, log_buffer) try: assert_errors = runner.run_scenario() if assert_errors: log.error('Run finished', result='assertion errors') else: log.info('Run finished', result='success') if runner.notification_email: if not mailgun_api_key: log.error("Can't send notification mail. No API key provided") return 1 log.info('Sending notification mail') if assert_errors: send_notification_mail( runner.notification_email, f'Unexpected channel balances in {scenario_file.name}', json.dumps(assert_errors), mailgun_api_key, ) else: send_notification_mail( runner.notification_email, f'Scenario successful {scenario_file.name}', 'Success', mailgun_api_key, ) except Exception: if runner.notification_email and mailgun_api_key: send_notification_mail( runner.notification_email, f'Error running scenario {scenario_file.name}', traceback.format_exc(), mailgun_api_key, ) log.exception('Exception while running scenario') finally: try: if terminal.is_a_tty: log.warning('Press Ctrl-C to exit') while True: gevent.sleep(1) finally: # Re-enable line wrapping print(terminal.smam, end='')
def serialize_networkx_graph(graph: networkx.Graph) -> str: return json.dumps([ (to_checksum_address(edge[0]), to_checksum_address(edge[1])) for edge in graph.edges ])
def test_straightbonddetails_normal_1(self, client, session, shared_contract): config.BOND_TOKEN_ENABLED = True # テスト用アカウント issuer = eth_account['issuer'] # TokenListコントラクト token_list = TestV2TokenStraightBondTokenDetails.tokenlist_contract() config.TOKEN_LIST_CONTRACT_ADDRESS = token_list['address'] # データ準備:債券新規発行 exchange_address = to_checksum_address( shared_contract['IbetStraightBondExchange']['address']) personal_info = to_checksum_address( shared_contract['PersonalInfo']['address']) attribute = TestV2TokenStraightBondTokenDetails.bond_token_attribute( exchange_address, personal_info) bond_token = issue_bond_token(issuer, attribute) register_bond_list(issuer, bond_token, token_list) # 取扱トークンデータ挿入 TestV2TokenStraightBondTokenDetails.list_token(session, bond_token) apiurl = self.apiurl_base + bond_token['address'] query_string = '' resp = client.simulate_get(apiurl, query_string=query_string) assumed_body = { 'token_address': bond_token['address'], 'token_template': 'IbetStraightBond', 'owner_address': issuer['account_address'], 'company_name': '', 'rsa_publickey': '', 'name': 'テスト債券', 'symbol': 'BOND', 'total_supply': 1000000, 'face_value': 10000, 'interest_rate': 0.0602, 'interest_payment_date1': '0101', 'interest_payment_date2': '0201', 'interest_payment_date3': '0301', 'interest_payment_date4': '0401', 'interest_payment_date5': '0501', 'interest_payment_date6': '0601', 'interest_payment_date7': '0701', 'interest_payment_date8': '0801', 'interest_payment_date9': '0901', 'interest_payment_date10': '1001', 'interest_payment_date11': '1101', 'interest_payment_date12': '1201', 'isRedeemed': False, 'redemption_date': '20191231', 'redemption_value': 10000, 'return_date': '20191231', 'return_amount': '商品券をプレゼント', 'purpose': '新商品の開発資金として利用。', 'image_url': [{ 'id': 1, 'url': '' }, { 'id': 2, 'url': '' }, { 'id': 3, 'url': '' }], 'certification': [], 'initial_offering_status': False, 'max_holding_quantity': 1, 'max_sell_amount': 1000, 'contact_information': '問い合わせ先', 'privacy_policy': 'プライバシーポリシー', 'transferable': True } assert resp.status_code == 200 assert resp.json['meta'] == {'code': 200, 'message': 'OK'} assert resp.json['data'] == assumed_body
3: { CONTRACT_ENDPOINT_REGISTRY: to_canonical_address(ROPSTEN_DISCOVERY_ADDRESS), CONTRACT_SECRET_REGISTRY: to_canonical_address(ROPSTEN_SECRET_REGISTRY_ADDRESS), CONTRACT_TOKEN_NETWORK_REGISTRY: to_canonical_address(ROPSTEN_REGISTRY_ADDRESS), START_QUERY_BLOCK_KEY: 3604000, # 924 blocks before token network registry deployment }, } NETWORKNAME_TO_ID = {name: id for id, name in ID_TO_NETWORKNAME.items()} MIN_REQUIRED_SOLC = 'v0.4.23' NULL_ADDRESS_BYTES = bytes(20) NULL_ADDRESS = to_checksum_address(NULL_ADDRESS_BYTES) SNAPSHOT_STATE_CHANGES_COUNT = 500 # calculated as of raiden-contracts@d3c30e6d081ac3ed8fbf3f16381889baa3963ea7 # https://github.com/raiden-network/raiden-contracts/tree/d3c30e6d081ac3ed8fbf3f16381889baa3963ea7 GAS_REQUIRED_FOR_OPEN_CHANNEL = 109933 GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT = 42214 GAS_REQUIRED_FOR_REGISTER_SECRET = 46161 GAS_REQUIRED_FOR_CLOSE_CHANNEL = 112684 GAS_REQUIRED_FOR_BALANCE_PROOF = 96284 GAS_REQUIRED_FOR_SETTLE_CHANNEL = 125009 GAS_REQUIRED_FOR_UNLOCK_1_LOCKS = 33547 GAS_REQUIRED_FOR_UNLOCK_6_LOCKS = 73020
def handle_contract_no_code(name: str, address: typing.Address) -> None: hex_addr = to_checksum_address(address) click.secho( f'Error: Provided {name} {hex_addr} contract does not contain code', fg='red') sys.exit(1)
def run_private_blockchain( web3: Web3, eth_nodes: List[EthNodeDescription], base_datadir: str, log_dir: str, verbosity: str, genesis_description: GenesisDescription, ) -> ContextManager[List[JSONRPCExecutor]]: """ Starts a private network with private_keys accounts funded. Args: web3: A Web3 instance used to check when the private chain is running. accounts_to_fund: Accounts that will start with funds in the private chain. eth_nodes: A list of geth node description, containing the details of each node of the private chain. base_datadir: Directory used to store the geth databases. log_dir: Directory used to store the geth logs. verbosity: Verbosity used by the geth nodes. random_marker: A random marked used to identify the private chain. """ # pylint: disable=too-many-locals,too-many-statements,too-many-arguments,too-many-branches password_path = os.path.join(base_datadir, "pw") with open(password_path, "w") as handler: handler.write(DEFAULT_PASSPHRASE) nodes_configuration = [] for node in eth_nodes: config = eth_node_config(node.private_key, node.p2p_port, node.rpc_port, **node.extra_config) if node.miner: config["unlock"] = to_checksum_address(config["address"]) config["mine"] = True config["password"] = os.path.join(base_datadir, "pw") nodes_configuration.append(config) blockchain_type = eth_nodes[0].blockchain_type # This is not be configurable because it must be one of the running eth # nodes. seal_account = privatekey_to_address(eth_nodes[0].private_key) if blockchain_type == "geth": eth_node_config_set_bootnodes(nodes_configuration) genesis_path = os.path.join(base_datadir, "custom_genesis.json") geth_generate_poa_genesis( genesis_path=genesis_path, genesis_description=genesis_description, seal_account=seal_account, ) for config in nodes_configuration: if config.get("mine"): datadir = eth_node_to_datadir(config["nodekeyhex"], base_datadir) keyfile_path = geth_keyfile(datadir, config["address"]) eth_create_account_file(keyfile_path, config["nodekey"]) elif blockchain_type == "parity": genesis_path = os.path.join(base_datadir, "chainspec.json") parity_generate_chain_spec( genesis_path=genesis_path, genesis_description=genesis_description, seal_account=seal_account, ) for config in nodes_configuration: if config.get("mine"): datadir = eth_node_to_datadir(config["nodekeyhex"], base_datadir) keyfile_path = parity_keyfile(datadir) eth_create_account_file(keyfile_path, config["nodekey"]) else: raise TypeError(f'Unknown blockchain client type "{blockchain_type}"') runner = eth_run_nodes( eth_node_descs=eth_nodes, nodes_configuration=nodes_configuration, base_datadir=base_datadir, genesis_file=genesis_path, chain_id=genesis_description.chain_id, random_marker=genesis_description.random_marker, verbosity=verbosity, logdir=log_dir, ) with runner as executors: eth_check_balance(web3, genesis_description.prefunded_accounts) yield executors
def _get_random_checksum_address(): canonical_address = os.urandom(20) checksum_address = to_checksum_address(canonical_address) return checksum_address
def __init__(self, address: Address, w3: Web3) -> None: abi = get_solidity_registry_manifest( )["contract_types"]["PackageRegistry"]["abi"] self.registry = w3.eth.contract(address=address, abi=abi) self.address = to_checksum_address(address) self.w3 = w3
def test_routing_mocked_pfs_unavailabe_peer( chain_state, payment_network_state, token_network_state, our_address, ): token_network_state, addresses, channel_states = create_square_network_topology( payment_network_state=payment_network_state, token_network_state=token_network_state, our_address=our_address, ) address1, address2, address3 = addresses _, channel_state2 = channel_states # test routing with all nodes available chain_state.nodeaddresses_to_networkstates = { address1: NODE_NETWORK_REACHABLE, address2: NODE_NETWORK_REACHABLE, address3: NODE_NETWORK_REACHABLE, } # channel 1 and 2 are flipped here, to see when the PFS gets called json_data = { 'result': [ { 'path': [ to_checksum_address(our_address), to_checksum_address(address2) ], 'fees': 0, }, { 'path': [ to_checksum_address(our_address), to_checksum_address(address1) ], 'fees': 0, }, ], } # test routing with node 2 unavailable chain_state.nodeaddresses_to_networkstates = { address1: NODE_NETWORK_REACHABLE, address2: NODE_NETWORK_UNREACHABLE, address3: NODE_NETWORK_REACHABLE, } response = Mock() response.configure_mock(status_code=200) response.json = Mock(return_value=json_data) with patch.object(requests, 'get', return_value=response): routes = get_best_routes( chain_state=chain_state, token_network_id=token_network_state.address, from_address=our_address, to_address=address1, amount=50, previous_address=None, config={ 'services': { 'pathfinding_service_address': 'my-pfs', 'pathfinding_max_paths': 3, }, }, ) assert routes[0].node_address == address2 assert routes[0].channel_identifier == channel_state2.identifier
def initiate_payment( self, registry_address: typing.PaymentNetworkID, token_address: typing.TokenAddress, target_address: typing.Address, amount: typing.TokenAmount, identifier: typing.PaymentID, secret: typing.Secret, secret_hash: typing.SecretHash, ): log.debug( 'Initiating payment', node=pex(self.raiden_api.address), registry_address=to_checksum_address(registry_address), token_address=to_checksum_address(token_address), target_address=to_checksum_address(target_address), amount=amount, payment_identifier=identifier, secret=secret, secret_hash=secret_hash, ) if identifier is None: identifier = create_default_identifier() try: payment_status = self.raiden_api.transfer( registry_address=registry_address, token_address=token_address, target=target_address, amount=amount, identifier=identifier, secret=secret, secret_hash=secret_hash, ) except ( InvalidAmount, InvalidAddress, InvalidSecretOrSecretHash, PaymentConflict, UnknownTokenAddress, ) as e: return api_error( errors=str(e), status_code=HTTPStatus.CONFLICT, ) except InsufficientFunds as e: return api_error( errors=str(e), status_code=HTTPStatus.PAYMENT_REQUIRED, ) if payment_status.payment_done.get() is False: return api_error( errors="Payment couldn't be completed " "(insufficient funds, no route to target or target offline).", status_code=HTTPStatus.CONFLICT, ) payment = { 'initiator_address': self.raiden_api.address, 'registry_address': registry_address, 'token_address': token_address, 'target_address': target_address, 'amount': amount, 'identifier': identifier, 'secret': to_hex(payment_status.secret), 'secret_hash': to_hex(payment_status.secret_hash), } result = self.payment_schema.dump(payment) return api_response(result=result.data)
def accounts(self) -> List[str]: normalized_addresses = self.__ipc_request(endpoint="account_list") checksum_addresses = [ to_checksum_address(addr) for addr in normalized_addresses ] return checksum_addresses
def patch_channel( self, registry_address: typing.PaymentNetworkID, token_address: typing.TokenAddress, partner_address: typing.Address, total_deposit: typing.TokenAmount = None, state: str = None, ): log.debug( 'Patching channel', node=pex(self.raiden_api.address), registry_address=to_checksum_address(registry_address), token_address=to_checksum_address(token_address), partner_address=to_checksum_address(partner_address), total_deposit=total_deposit, state=state, ) if total_deposit is not None and state is not None: return api_error( errors="Can not update a channel's total deposit and state at the same time", status_code=HTTPStatus.CONFLICT, ) if total_deposit is None and state is None: return api_error( errors="Nothing to do. Should either provide 'total_deposit' or 'state' argument", status_code=HTTPStatus.BAD_REQUEST, ) if total_deposit and total_deposit < 0: return api_error( errors="Amount to deposit must not be negative.", status_code=HTTPStatus.CONFLICT, ) try: channel_state = self.raiden_api.get_channel( registry_address=registry_address, token_address=token_address, partner_address=partner_address, ) except ChannelNotFound: return api_error( errors='Requested channel for token {} and partner {} not found'.format( to_checksum_address(token_address), to_checksum_address(partner_address), ), status_code=HTTPStatus.CONFLICT, ) except InvalidAddress as e: return api_error( errors=str(e), status_code=HTTPStatus.CONFLICT, ) if total_deposit is not None: result = self._deposit(registry_address, channel_state, total_deposit) elif state == CHANNEL_STATE_CLOSED: result = self._close(registry_address, channel_state) else: # should never happen, channel_state is validated in the schema result = api_error( errors='Provided invalid channel state {}'.format(state), status_code=HTTPStatus.BAD_REQUEST, ) return result
def privkey_to_addr(privkey: str) -> str: return to_checksum_address( pubkey_to_addr( PrivateKey.from_hex(remove_0x_prefix(privkey)).public_key))
def main( ctx, scenario_file, keystore_file, password, chains, data_path, auth, mailgun_api_key, ): gevent.get_hub().exception_stream = DummyStream() is_subcommand = ctx.invoked_subcommand is not None if not is_subcommand and scenario_file is None: ctx.fail('No scenario definition file provided') if is_subcommand: log_file_name = ( f'scenario-player-{ctx.invoked_subcommand}_{datetime.now():%Y-%m-%dT%H:%M:%S}.log' ) else: scenario_basename = basename(scenario_file.name) log_file_name = ( f'scenario-player_{scenario_basename}_{datetime.now():%Y-%m-%dT%H:%M:%S}.log' ) click.secho(f'Writing log to {log_file_name}', fg='yellow') configure_logging( { '': 'INFO', 'raiden': 'DEBUG', 'scenario_player': 'DEBUG' }, debug_log_file_name=log_file_name, _first_party_packages=_FIRST_PARTY_PACKAGES | frozenset(['scenario_player']), _debug_log_file_additional_level_filters={'scenario_player': 'DEBUG'}, ) log_buffer = None if sys.stdout.isatty() and not is_subcommand: log_buffer = UrwidLogWalker([]) for handler in logging.getLogger('').handlers: if isinstance(handler, logging.StreamHandler): handler.terminator = ConcatenableNone() handler.formatter = NonStringifyingProcessorFormatter( UrwidLogRenderer(), foreign_pre_chain=LOGGING_PROCESSORS, ) handler.stream = log_buffer break chain_rpc_urls = defaultdict(list) for chain_name, chain_rpc_url in chains: chain_rpc_urls[chain_name].append(chain_rpc_url) with open(keystore_file, 'r') as keystore: account = Account(json.load(keystore), password, keystore_file) log.info("Using account", account=to_checksum_address(account.address)) if is_subcommand: ctx.obj = dict( account=account, chain_rpc_urls=chain_rpc_urls, data_path=data_path, ) return # Collect tasks collect_tasks(tasks) runner = ScenarioRunner(account, chain_rpc_urls, auth, Path(data_path), scenario_file) ui = ScenarioUI(runner, log_buffer, log_file_name) ui_greenlet = ui.run() success = False try: try: runner.run_scenario() success = True log.info('Run finished', result='success') send_notification_mail( runner.notification_email, f'Scenario successful {scenario_file.name}', 'Success', mailgun_api_key, ) except ScenarioAssertionError as ex: log.error('Run finished', result='assertion errors') send_notification_mail( runner.notification_email, f'Assertion mismatch in {scenario_file.name}', str(ex), mailgun_api_key, ) except ScenarioError: log.exception('Run finished', result='scenario error') send_notification_mail( runner.notification_email, f'Invalid scenario {scenario_file.name}', traceback.format_exc(), mailgun_api_key, ) except Exception: log.exception('Exception while running scenario') send_notification_mail( runner.notification_email, f'Error running scenario {scenario_file.name}', traceback.format_exc(), mailgun_api_key, ) finally: try: if sys.stdout.isatty(): ui.set_success(success) log.warning('Press q to exit') while not ui_greenlet.dead: gevent.sleep(1) finally: if runner.is_managed: runner.node_controller.stop() if not ui_greenlet.dead: ui_greenlet.kill(ExitMainLoop) ui_greenlet.join()
def checksum_address(self) -> str: return to_checksum_address(self.__account)
def generate_contract_address(address, nonce): return to_checksum_address('0x' + keccak(rlp.encode([bytes(bytearray.fromhex(address[2:])), nonce]))[-20:].hex())
def _handle_invite(self, room_id: _RoomID, state: dict): """ Join rooms invited by healthchecked partners """ if self._stop_event.ready(): return invite_events = [ event for event in state['events'] if event['type'] == 'm.room.member' and event['content'].get('membership') == 'invite' and event['state_key'] == self._user_id ] if not invite_events: return # there should always be one and only one invite membership event for us invite_event = invite_events[0] sender = invite_event['sender'] sender_join_events = [ event for event in state['events'] if event['type'] == 'm.room.member' and event['content'].get( 'membership') == 'join' and event['state_key'] == sender ] if not sender_join_events: return # there should always be one and only one join membership event for the sender sender_join_event = sender_join_events[0] user = self._get_user(sender) user.displayname = sender_join_event['content'].get( 'displayname') or user.displayname peer_address = self._validate_userid_signature(user) if not peer_address: self.log.debug( 'Got invited to a room by invalid signed user - ignoring', room_id=room_id, user=user, ) return if peer_address not in self._address_to_userids: self.log.debug( 'Got invited by a non-healthchecked user - ignoring', room_id=room_id, user=user, ) return join_rules_events = [ event for event in state['events'] if event['type'] == 'm.room.join_rules' ] # room privacy as seen from the event private_room: bool = False if join_rules_events: join_rules_event = join_rules_events[0] private_room: bool = join_rules_event['content'].get( 'join_rule') == 'invite' # we join room and _set_room_id_for_address despite room privacy and requirements, # _get_room_ids_for_address will take care of returning only matching rooms and # _leave_unused_rooms will clear it in the future, if and when needed room = self._client.join_room(room_id) if not room.listeners: room.add_listener(self._handle_message, 'm.room.message') # at this point, room state is not populated yet, so we populate 'invite_only' from event room.invite_only = private_room self._set_room_id_for_address(peer_address, room_id) self.log.debug( 'Invited and joined a room', room_id=room_id, aliases=room.aliases, peer=to_checksum_address(peer_address), )
def _handle_message(self, room, event) -> bool: """ Handle text messages sent to listening rooms """ if (event['type'] != 'm.room.message' or event['content']['msgtype'] != 'm.text' or self._stop_event.ready()): # Ignore non-messages and non-text messages return False sender_id = event['sender'] if sender_id == self._user_id: # Ignore our own messages return False user = self._get_user(sender_id) peer_address = self._validate_userid_signature(user) if not peer_address: self.log.debug( 'message from invalid user displayName signature', peer_user=user.user_id, room=room, ) return False # don't proceed if user isn't healthchecked (yet) if peer_address not in self._address_to_userids: # user not start_health_check'ed self.log.debug( 'message from non-healthchecked peer - ignoring', sender=user, sender_address=pex(peer_address), room=room, ) return False # rooms we created and invited user, or were invited specifically by them room_ids = self._get_room_ids_for_address(peer_address) if room.room_id not in room_ids: # this should not happen, but is not fatal, as we may not know user yet if bool(room.invite_only) < self._private_rooms: reason = 'required private room, but received message in a public' else: reason = 'unknown room for user' self.log.debug( 'received peer message in an invalid room - ignoring', peer_user=user.user_id, peer_address=pex(peer_address), room=room, reason=reason, ) return False if not room_ids or room.room_id != room_ids[0]: self.log.debug( 'received message triggered new comms room for peer', peer_user=user.user_id, peer_address=pex(peer_address), known_user_rooms=room_ids, room=room, ) self._set_room_id_for_address(peer_address, room.room_id) data = event['content']['body'] if not isinstance(data, str): self.log.warning( 'Received message body not a string', peer_user=user.user_id, peer_address=to_checksum_address(peer_address), room=room, ) return False if data.startswith('0x'): try: message = message_from_bytes(decode_hex(data)) if not message: raise InvalidProtocolMessage except (DecodeError, AssertionError) as ex: self.log.warning( "Can't parse message binary data", message_data=data, peer_address=pex(peer_address), _exc=ex, ) return False except InvalidProtocolMessage as ex: self.log.warning( "Received message binary data is not a valid message", message_data=data, peer_address=pex(peer_address), _exc=ex, ) return False else: try: message_dict = json.loads(data) message = message_from_dict(message_dict) except (UnicodeDecodeError, json.JSONDecodeError) as ex: self.log.warning( "Can't parse message data JSON", message_data=data, peer_address=pex(peer_address), _exc=ex, ) return False except InvalidProtocolMessage as ex: self.log.warning( "Message data JSON are not a valid message", message_data=data, peer_address=pex(peer_address), _exc=ex, ) return False self.log.debug( 'MESSAGE_DATA', data=data, sender=pex(peer_address), sender_user=user, room=room, ) if isinstance(message, Ping): self.log.warning( 'Not required Ping received', message=data, ) return False elif isinstance(message, SignedMessage): if message.sender != peer_address: self.log.warning( 'Message not signed by sender!', message=message, signer=message.sender, peer_address=peer_address, ) return False if isinstance(message, Delivered): self._receive_delivered(message) else: self._receive_message(message) else: self.log.warning( 'Received Invalid message', message=data, ) return False return True