def run_smoketests(raiden_service, test_config, debug=False): """ Test that the assembled raiden_service correctly reflects the configuration from the smoketest_genesis. """ try: chain = raiden_service.chain assert ( raiden_service.default_registry.address == address_decoder(test_config['contracts']['registry_address']) ) assert ( raiden_service.default_registry.token_addresses() == [address_decoder(test_config['contracts']['token_address'])] ) assert len(chain.address_to_discovery.keys()) == 1 assert ( list(chain.address_to_discovery.keys())[0] == address_decoder(test_config['contracts']['discovery_address']) ) discovery = list(chain.address_to_discovery.values())[0] assert discovery.endpoint_by_address(raiden_service.address) != TEST_ENDPOINT assert len(raiden_service.token_to_channelgraph.values()) == 1 graph = list(raiden_service.token_to_channelgraph.values())[0] channel = graph.partneraddress_to_channel[unhexlify(TEST_PARTNER_ADDRESS)] assert channel.can_transfer assert channel.contract_balance == channel.distributable == TEST_DEPOSIT_AMOUNT assert channel.state == CHANNEL_STATE_OPENED run_restapi_smoketests(raiden_service, test_config) except Exception: error = traceback.format_exc() if debug: pdb.post_mortem() return error
def run_smoketests(raiden_service, test_config, debug=False): """ Test that the assembled raiden_service correctly reflects the configuration from the smoketest_genesis. """ try: chain = raiden_service.chain assert (raiden_service.default_registry.address == address_decoder( test_config['contracts']['registry_address'])) assert (raiden_service.default_registry.token_addresses() == [ address_decoder(test_config['contracts']['token_address']) ]) assert len(chain.address_to_discovery.keys()) == 1 assert (list(chain.address_to_discovery.keys())[0] == address_decoder( test_config['contracts']['discovery_address'])) discovery = list(chain.address_to_discovery.values())[0] assert discovery.endpoint_by_address( raiden_service.address) != TEST_ENDPOINT assert len(raiden_service.token_to_channelgraph.values()) == 1 graph = list(raiden_service.token_to_channelgraph.values())[0] channel = graph.partneraddress_to_channel[unhexlify( TEST_PARTNER_ADDRESS)] assert channel.can_transfer assert channel.contract_balance == channel.distributable == TEST_DEPOSIT_AMOUNT assert channel.state == CHANNEL_STATE_OPENED run_restapi_smoketests(raiden_service, test_config) except Exception: error = traceback.format_exc() if debug: pdb.post_mortem() return error
def decode_event_to_internal2(event): """ Enforce the binary encoding of address for internal usage. """ data = event.event_data # Note: All addresses inside the event_data must be decoded. if data['event'] == EVENT_TOKEN_ADDED2: data['token_network_address'] = address_decoder(data['args']['token_network_address']) data['token_address'] = address_decoder(data['args']['token_address']) elif data['event'] == EVENT_CHANNEL_NEW2: data['participant1'] = address_decoder(data['args']['participant1']) data['participant2'] = address_decoder(data['args']['participant2']) elif data['event'] == EVENT_CHANNEL_NEW_BALANCE2: data['participant'] = address_decoder(data['args']['participant']) elif data['event'] == EVENT_CHANNEL_WITHDRAW: data['participant'] = address_decoder(data['args']['participant']) elif data['event'] == EVENT_CHANNEL_UNLOCK: data['participant'] = address_decoder(data['args']['participant']) elif data['event'] == EVENT_TRANSFER_UPDATED: data['closing_participant'] = address_decoder(data['args']['closing_participant']) elif data['event'] == EVENT_CHANNEL_CLOSED: data['closing_participant'] = address_decoder(data['args']['closing_participant']) return event
def decode_event(event): """ Enforce the binary encoding of address for internal usage. """ data = event.event_data assert isinstance(data['_event_type'], bytes) # Note: All addresses inside the event_data must be decoded. if data['_event_type'] == b'TokenAdded': data['channel_manager_address'] = address_decoder( data['channel_manager_address']) data['token_address'] = address_decoder(data['token_address']) elif data['_event_type'] == b'ChannelNew': data['participant1'] = address_decoder(data['participant1']) data['participant2'] = address_decoder(data['participant2']) data['netting_channel'] = address_decoder(data['netting_channel']) elif data['_event_type'] == b'ChannelNewBalance': data['token_address'] = address_decoder(data['token_address']) data['participant'] = address_decoder(data['participant']) elif data['_event_type'] == b'ChannelClosed': data['closing_address'] = address_decoder(data['closing_address']) elif data['_event_type'] == b'ChannelSecretRevealed': data['receiver_address'] = address_decoder(data['receiver_address']) return event
def specify_tokenswap_input(self, tokenswap_input, target_address, identifier): """We don't test the actual tokenswap but only that the input makes it to the backend in the expected format""" self.tokenswap_input = dict(tokenswap_input) self.tokenswap_input['sending_token'] = address_decoder( self.tokenswap_input['sending_token'] ) self.tokenswap_input['receiving_token'] = address_decoder( self.tokenswap_input['receiving_token'] ) self.tokenswap_input['identifier'] = identifier self.tokenswap_input['target_address'] = address_decoder(target_address)
def event_to_state_change(event): # pylint: disable=too-many-return-statements contract_address = event.originating_contract event = event.event_data # Note: All addresses inside the event_data must be decoded. if event['_event_type'] == b'TokenAdded': result = ContractReceiveTokenAdded( contract_address, address_decoder(event['token_address']), address_decoder(event['channel_manager_address']), ) elif event['_event_type'] == b'ChannelNew': result = ContractReceiveNewChannel( contract_address, address_decoder(event['netting_channel']), address_decoder(event['participant1']), address_decoder(event['participant2']), event['settle_timeout'], ) elif event['_event_type'] == b'ChannelNewBalance': result = ContractReceiveBalance( contract_address, address_decoder(event['token_address']), address_decoder(event['participant']), event['balance'], event['block_number'], ) elif event['_event_type'] == b'ChannelClosed': result = ContractReceiveClosed( contract_address, address_decoder(event['closing_address']), event['block_number'], ) elif event['_event_type'] == b'ChannelSettled': result = ContractReceiveSettled( contract_address, event['block_number'], ) elif event['_event_type'] == b'ChannelSecretRevealed': result = ContractReceiveWithdraw( contract_address, event['secret'], address_decoder(event['receiver_address']), ) else: result = None return result
def event_to_state_change(event): # pylint: disable=too-many-return-statements contract_address = event.originating_contract event = event.event_data # Note: All addresses inside the event_data must be decoded. if event['_event_type'] == 'TokenAdded': result = ContractReceiveTokenAdded( contract_address, address_decoder(event['token_address']), address_decoder(event['channel_manager_address']), ) elif event['_event_type'] == 'ChannelNew': result = ContractReceiveNewChannel( contract_address, address_decoder(event['netting_channel']), address_decoder(event['participant1']), address_decoder(event['participant2']), event['settle_timeout'], ) elif event['_event_type'] == 'ChannelNewBalance': result = ContractReceiveBalance( contract_address, address_decoder(event['token_address']), address_decoder(event['participant']), event['balance'], event['block_number'], ) elif event['_event_type'] == 'ChannelClosed': result = ContractReceiveClosed( contract_address, address_decoder(event['closing_address']), event['block_number'], ) elif event['_event_type'] == 'ChannelSettled': result = ContractReceiveSettled( contract_address, event['block_number'], ) elif event['_event_type'] == 'ChannelSecretRevealed': result = ContractReceiveWithdraw( contract_address, event['secret'], address_decoder(event['receiver_address']), ) else: result = None return result
def from_dict(cls, data): assert data['type'] == cls.__name__ message = cls( message_identifier=data['message_identifier'], payment_identifier=data['payment_identifier'], secret=data_decoder(data['secret']), nonce=data['nonce'], token_network_address=address_decoder(data['token_network_address']), channel=address_decoder(data['channel']), transferred_amount=data['transferred_amount'], locked_amount=data['locked_amount'], locksroot=data_decoder(data['locksroot']), ) message.signature = data_decoder(data['signature']) return message
def _get_peer_address_from_room(self, room_alias): match = self._room_alias_re.match(room_alias) if match: addresses = {address_decoder(address) for address in (match.group('peer1', 'peer2'))} addresses = addresses - {self._raiden_service.address} if len(addresses) == 1: return addresses.pop()
def token_addresses(request, token_amount, number_of_tokens, blockchain_services, cached_genesis, register_tokens): """ Fixture that yields `number_of_tokens` ERC20 token addresses, where the `token_amount` (per token) is distributed among the addresses behind `blockchain_services` and potentially pre-registered with the raiden Registry. The following arguments can control the behavior: Args: token_amount (int): the overall number of units minted per token number_of_tokens (int): the number of token instances register_tokens (bool): controls if tokens will be registered with raiden Registry """ if cached_genesis: token_addresses = [ address_decoder(token_address) for token_address in cached_genesis['config']['tokenAddresses'] ] else: participants = [ privatekey_to_address(blockchain_service.private_key) for blockchain_service in blockchain_services.blockchain_services ] token_addresses = _token_addresses(token_amount, number_of_tokens, blockchain_services.deploy_service, blockchain_services.deploy_registry, participants, register_tokens) return token_addresses
def _query_filter(self, function: str) -> List[Dict]: filter_changes = self.client.call(function, self.filter_id_raw) # geth could return None if filter_changes is None: return [] result = list() for log_event in filter_changes: address = address_decoder(log_event['address']) data = data_decoder(log_event['data']) topics = [topic_decoder(topic) for topic in log_event['topics']] block_number = log_event.get('blockNumber') if not block_number: block_number = 0 else: block_number = int(block_number, 0) result.append({ 'topics': topics, 'data': data, 'address': address, 'block_number': block_number, }) return result
def blockchain_services( request, deploy_key, deploy_client, private_keys, poll_timeout, blockchain_backend, # This fixture is required because it will start # the geth subprocesses blockchain_rpc_ports, blockchain_type, tester_blockgas_limit, cached_genesis): registry_address = None if cached_genesis and 'defaultRegistryAddress' in cached_genesis['config']: registry_address = address_decoder( cached_genesis['config']['defaultRegistryAddress']) if blockchain_type == 'geth': return _jsonrpc_services( deploy_key, deploy_client, private_keys, request.config.option.verbose, poll_timeout, registry_address, # _jsonrpc_services will handle the None value ) raise ValueError('unknown cluster type {}'.format(blockchain_type))
def _query_filter(self, function: str) -> List[Dict]: filter_changes = self.client.call(function, self.filter_id_raw) # geth could return None if filter_changes is None: return [] result = list() for log_event in filter_changes: address = address_decoder(log_event['address']) data = data_decoder(log_event['data']) topics = [ topic_decoder(topic) for topic in log_event['topics'] ] block_number = log_event.get('blockNumber') if not block_number: block_number = 0 else: block_number = int(block_number, 0) result.append({ 'topics': topics, 'data': data, 'address': address, 'block_number': block_number, }) return result
def blockchain_services( request, deploy_key, deploy_client, private_keys, poll_timeout, blockchain_backend, # This fixture is required because it will start # the geth subprocesses blockchain_rpc_ports, blockchain_type, tester_blockgas_limit, cached_genesis): registry_address = None if cached_genesis and 'defaultRegistryAddress' in cached_genesis['config']: registry_address = address_decoder( cached_genesis['config']['defaultRegistryAddress'] ) if blockchain_type == 'geth': return _jsonrpc_services( deploy_key, deploy_client, private_keys, request.config.option.verbose, poll_timeout, registry_address, # _jsonrpc_services will handle the None value ) raise ValueError('unknown cluster type {}'.format(blockchain_type))
def new_netting_channel(self, other_peer, settle_timeout): if not isaddress(other_peer): raise ValueError('The other_peer must be a valid address') invalid_timeout = (settle_timeout < NETTINGCHANNEL_SETTLE_TIMEOUT_MIN or settle_timeout > NETTINGCHANNEL_SETTLE_TIMEOUT_MAX) if invalid_timeout: raise ValueError('settle_timeout must be in range [{}, {}]'.format( NETTINGCHANNEL_SETTLE_TIMEOUT_MIN, NETTINGCHANNEL_SETTLE_TIMEOUT_MAX)) local_address = privatekey_to_address(self.client.privkey) if local_address == other_peer: raise SamePeerAddress( 'The other peer must not have the same address as the client.') transaction_hash = estimate_and_transact( self.proxy, 'newChannel', self.startgas, self.gasprice, other_peer, settle_timeout, ) self.client.poll(unhexlify(transaction_hash), timeout=self.poll_timeout) if check_transaction_threw(self.client, transaction_hash): raise DuplicatedChannelError('Duplicated channel') netting_channel_results_encoded = self.proxy.call( 'getChannelWith', other_peer, startgas=self.startgas, ) # address is at index 0 netting_channel_address_encoded = netting_channel_results_encoded if not netting_channel_address_encoded: log.error('netting_channel_address failed', peer1=pex(local_address), peer2=pex(other_peer)) raise RuntimeError('netting_channel_address failed') netting_channel_address_bin = address_decoder( netting_channel_address_encoded) if log.isEnabledFor(logging.INFO): log.info( 'new_netting_channel called', peer1=pex(local_address), peer2=pex(other_peer), netting_channel=pex(netting_channel_address_bin), ) return netting_channel_address_bin
def from_dict(cls, data): assert data['type'] == cls.__name__ processed = cls( sender=address_decoder(data['sender']), message_identifier=data['message_identifier'], ) processed.signature = data_decoder(data['signature']) return processed
def api_url_for(api_backend, endpoint, **kwargs): api_server, _ = api_backend # url_for() expects binary address so we have to convert here for key, val in kwargs.iteritems(): if isinstance(val, basestring) and val.startswith('0x'): kwargs[key] = address_decoder(val) with api_server.flask_app.app_context(): return url_for('v1_resources.{}'.format(endpoint), **kwargs)
def token_address(self): """ Returns the type of token that can be transferred by the channel. Raises: AddressWithoutCode: If the channel was settled prior to the call. """ address = self._call_and_check_result('tokenAddress') return address_decoder(address)
def api_url_for(api_backend, endpoint, **kwargs): api_server, _ = api_backend # url_for() expects binary address so we have to convert here for key, val in kwargs.items(): if isinstance(val, str) and val.startswith('0x'): kwargs[key] = address_decoder(val) with api_server.flask_app.app_context(): return url_for('v1_resources.{}'.format(endpoint), **kwargs)
def closing_address(self): """Returns the address of the participant that called close, or None if the channel is not closed. """ self._check_exists() closing_address = self.proxy.closingAddress() if closing_address is not None: return address_decoder(closing_address)
def channels_by_participant(self, participant_address): # pylint: disable=invalid-name """ Return a list of channel address that `participant_address` is a participant. """ address_list = self.proxy.nettingContractsByAddress.call( participant_address, startgas=self.startgas, ) return [address_decoder(address) for address in address_list]
def channels_by_participant(self, participant_address: Address) -> List[Address]: """ Return a list of channel address that `participant_address` is a participant. """ address_list = self.proxy.contract.functions.nettingContractsByAddress( to_checksum_address(participant_address), ).call( {'from': to_checksum_address(self.client.sender)}) return [address_decoder(address) for address in address_list]
def channels_by_participant(self, participant_address: Address) -> List[Address]: """ Return a list of channel address that `participant_address` is a participant. """ address_list = self.proxy.call( 'nettingContractsByAddress', participant_address, ) return [address_decoder(address) for address in address_list]
def get_filter_events( jsonrpc_client: JSONRPCClient, contract_address: address, topics: Optional[List[int]], from_block: Union[str, int] = 0, to_block: Union[str, int] = 'latest') -> List[Dict]: """ Get filter. This handles bad encoding from geth rpc. """ if isinstance(from_block, int): from_block = hex(from_block) if isinstance(to_block, int): to_block = hex(to_block) json_data = { 'fromBlock': from_block, 'toBlock': to_block, 'address': address_encoder(normalize_address(contract_address)), } if topics is not None: json_data['topics'] = [ topic_encoder(topic) for topic in topics ] filter_changes = jsonrpc_client.call('eth_getLogs', json_data) # geth could return None if filter_changes is None: return [] result = [] for log_event in filter_changes: address = address_decoder(log_event['address']) data = data_decoder(log_event['data']) topics = [ topic_decoder(topic) for topic in log_event['topics'] ] block_number = log_event.get('blockNumber') if not block_number: block_number = 0 else: block_number = int(block_number, 0) result.append({ 'topics': topics, 'data': data, 'address': address, 'block_number': block_number, }) return result
def run_smoketests(raiden_service, test_config, debug=False): """ Test that the assembled raiden_service correctly reflects the configuration from the smoketest_genesis. """ try: chain = raiden_service.chain assert (raiden_service.default_registry.address == address_decoder( test_config['contracts']['registry_address'])) assert (raiden_service.default_registry.token_addresses() == [ address_decoder(test_config['contracts']['token_address']) ]) assert len(chain.address_to_discovery.keys()) == 1 assert (list(chain.address_to_discovery.keys())[0] == address_decoder( test_config['contracts']['discovery_address'])) discovery = list(chain.address_to_discovery.values())[0] assert discovery.endpoint_by_address( raiden_service.address) != TEST_ENDPOINT token_networks = views.get_token_network_addresses_for( views.state_from_raiden(raiden_service), raiden_service.default_registry.address, ) assert len(token_networks) == 1 channel_state = views.get_channelstate_for( views.state_from_raiden(raiden_service), raiden_service.default_registry.address, token_networks[0], unhexlify(TEST_PARTNER_ADDRESS), ) distributable = channel.get_distributable( channel_state.our_state, channel_state.partner_state, ) assert distributable == TEST_DEPOSIT_AMOUNT assert distributable == channel_state.our_state.contract_balance assert channel.get_status(channel_state) == CHANNEL_STATE_OPENED run_restapi_smoketests(raiden_service, test_config) except Exception: error = traceback.format_exc() if debug: pdb.post_mortem() return error
def token_to_tokennetwork(self, token_address: typing.TokenAddress): """ Return the token network address for the given token or None if there is no correspoding address. """ address = self.proxy.contract.functions.token_to_token_networks( token_address).call() if address == b'': return None return address_decoder(address)
def channels_addresses(self) -> List[Tuple[Address, Address]]: # for simplicity the smart contract return a shallow list where every # second item forms a tuple channel_flat_encoded = self.proxy.call('getChannelsParticipants', ) channel_flat = [ address_decoder(channel) for channel in channel_flat_encoded ] # [a,b,c,d] -> [(a,b),(c,d)] channel_iter = iter(channel_flat) return list(zip(channel_iter, channel_iter))
def detail(self): """ FIXME: 'our_address' is only needed for the pure python mock implementation """ self._check_exists() data = self.proxy.addressAndBalance() settle_timeout = self.proxy.settleTimeout() our_address = privatekey_to_address(self.private_key) if address_decoder(data[0]) == our_address: return { 'our_address': address_decoder(data[0]), 'our_balance': data[1], 'partner_address': address_decoder(data[2]), 'partner_balance': data[3], 'settle_timeout': settle_timeout, } if address_decoder(data[2]) == our_address: return { 'our_address': address_decoder(data[2]), 'our_balance': data[3], 'partner_address': address_decoder(data[0]), 'partner_balance': data[1], 'settle_timeout': settle_timeout, } raise ValueError('We [{}] are not a participant of the given channel ({}, {})'.format( pex(our_address), data[0], data[2], ))
def detail(self): """ Returns a dictionary with the details of the netting channel. Raises: AddressWithoutCode: If the channel was settled prior to the call. """ data = self._call_and_check_result('addressAndBalance') settle_timeout = self.settle_timeout() our_address = privatekey_to_address(self.client.privkey) if address_decoder(data[0]) == our_address: return { 'our_address': address_decoder(data[0]), 'our_balance': data[1], 'partner_address': address_decoder(data[2]), 'partner_balance': data[3], 'settle_timeout': settle_timeout, } if address_decoder(data[2]) == our_address: return { 'our_address': address_decoder(data[2]), 'our_balance': data[3], 'partner_address': address_decoder(data[0]), 'partner_balance': data[1], 'settle_timeout': settle_timeout, } raise ValueError('We [{}] are not a participant of the given channel ({}, {})'.format( pex(our_address), data[0], data[2], ))
def detail(self): """ FIXME: 'our_address' is only needed for the pure python mock implementation """ self._check_exists() data = self.proxy.addressAndBalance() settle_timeout = self.proxy.settleTimeout() our_address = privatekey_to_address(self.private_key) if address_decoder(data[0]) == our_address: return { 'our_address': address_decoder(data[0]), 'our_balance': data[1], 'partner_address': address_decoder(data[2]), 'partner_balance': data[3], 'settle_timeout': settle_timeout, } if address_decoder(data[2]) == our_address: return { 'our_address': address_decoder(data[2]), 'our_balance': data[3], 'partner_address': address_decoder(data[0]), 'partner_balance': data[1], 'settle_timeout': settle_timeout, } raise ValueError( 'We [{}] are not a participant of the given channel ({}, {})'. format( pex(our_address), data[0], data[2], ))
def channels_addresses(self): # for simplicity the smart contract return a shallow list where every # second item forms a tuple channel_flat_encoded = self.proxy.getChannelsParticipants.call( startgas=self.startgas) channel_flat = [ address_decoder(channel) for channel in channel_flat_encoded ] # [a,b,c,d] -> [(a,b),(c,d)] channel_iter = iter(channel_flat) return zip(channel_iter, channel_iter)
def token_address(self): """ Returns the type of token that can be transferred by the channel. Raises: AddressWithoutCode: If the channel was settled prior to the call. """ address = self.proxy.tokenAddress.call() if address == '': self._check_exists() raise RuntimeError('token address returned empty') return address_decoder(address)
def closing_address(self): """ Returns the address of the closer, if the channel is closed, None otherwise. Raises: AddressWithoutCode: If the channel was settled prior to the call. """ closer = self.proxy.call('closingAddress') if closer: return address_decoder(closer) return None
def manager_address_by_token(self, token_address): """ Return the channel manager address for the given token or None if there is no correspoding address. """ address = self.proxy.contract.functions.channelManagerByToken( to_checksum_address(token_address), ).call() if address == b'': check_address_has_code(self.client, self.address) return None return address_decoder(address)
def get_filter_events(jsonrpc_client, contract_address, topics, from_block=None, to_block=None): """ Get filter. This handles bad encoding from geth rpc. """ if isinstance(from_block, int): from_block = hex(from_block) if isinstance(to_block, int): to_block = hex(to_block) json_data = { 'fromBlock': from_block or hex(0), 'toBlock': to_block or 'latest', 'address': address_encoder(normalize_address(contract_address)), } if topics is not None: json_data['topics'] = [ topic_encoder(topic) for topic in topics ] filter_changes = jsonrpc_client.call('eth_getLogs', json_data) # geth could return None if filter_changes is None: return [] result = [] for log_event in filter_changes: address = address_decoder(log_event['address']) data = data_decoder(log_event['data']) topics = [ topic_decoder(topic) for topic in log_event['topics'] ] block_number = log_event.get('blockNumber') if not block_number: block_number = 0 else: block_number = int(block_number, 0) result.append({ 'topics': topics, 'data': data, 'address': address, 'block_number': block_number, }) return result
def get_filter_events(jsonrpc_client: JSONRPCClient, contract_address: Address, topics: Optional[List[int]], from_block: Union[str, int] = 0, to_block: Union[str, int] = 'latest') -> List[Dict]: """ Get filter. This handles bad encoding from geth rpc. """ if isinstance(from_block, int): from_block = hex(from_block) if isinstance(to_block, int): to_block = hex(to_block) json_data = { 'fromBlock': from_block, 'toBlock': to_block, 'address': address_encoder(to_canonical_address(contract_address)), } if topics is not None: json_data['topics'] = [topic_encoder(topic) for topic in topics] filter_changes = jsonrpc_client.rpccall_with_retry('eth_getLogs', json_data) # geth could return None if filter_changes is None: return [] result = [] for log_event in filter_changes: address = address_decoder(log_event['address']) data = data_decoder(log_event['data']) topics = [topic_decoder(topic) for topic in log_event['topics']] block_number = log_event.get('blockNumber') if not block_number: block_number = 0 else: block_number = int(block_number, 0) result.append({ 'topics': topics, 'data': data, 'address': address, 'block_number': block_number, }) return result
def manager_address_by_token(self, token_address): """ Return the channel manager address for the given token or None if there is no correspoding address. """ address = self.proxy.channelManagerByToken.call( token_address, startgas=self.startgas, ) if address == '': check_address_has_code(self.client, self.address) return None return address_decoder(address)
def manager_address_by_token(self, token_address): """ Return the channel manager address for the given token or None if there is no correspoding address. """ address = self.proxy.call( 'channelManagerByToken', token_address, startgas=self.startgas, ) if address == b'': check_address_has_code(self.client, self.address) return None return address_decoder(address)
def endpoint_discovery_services(blockchain_services, cached_genesis): discovery_address = None if cached_genesis and 'defaultDiscoveryAddress' in cached_genesis['config']: discovery_address = address_decoder( cached_genesis['config']['defaultDiscoveryAddress'] ) if discovery_address is None: discovery_address = blockchain_services.deploy_service.deploy_contract( 'EndpointRegistry', get_contract_path('EndpointRegistry.sol'), ) return [ ContractDiscovery(chain.node_address, chain.discovery(discovery_address)) for chain in blockchain_services.blockchain_services ]
def new_netting_channel(self, other_peer, settle_timeout): """ Creates a new netting contract between peer1 and peer2. Raises: ValueError: If other_peer is not a valid address. """ if not isaddress(other_peer): raise ValueError('The other_peer must be a valid address') local_address = privatekey_to_address(self.private_key) if local_address == other_peer: raise SamePeerAddress('The other peer must not have the same address as the client.') invalid_timeout = ( settle_timeout < NETTINGCHANNEL_SETTLE_TIMEOUT_MIN or settle_timeout > NETTINGCHANNEL_SETTLE_TIMEOUT_MAX ) if invalid_timeout: raise ValueError('settle_timeout must be in range [{}, {}]'.format( NETTINGCHANNEL_SETTLE_TIMEOUT_MIN, NETTINGCHANNEL_SETTLE_TIMEOUT_MAX )) try: netting_channel_address_hex = self.proxy.newChannel( other_peer, settle_timeout, sender=self.private_key ) except TransactionFailed: raise DuplicatedChannelError('Duplicated channel') self.tester_chain.mine(number_of_blocks=1) channel = NettingChannelTesterMock( self.tester_chain, self.private_key, netting_channel_address_hex, ) return address_decoder(channel.address)
def token_addresses( request, token_amount, number_of_tokens, blockchain_services, cached_genesis, register_tokens): """ Fixture that yields `number_of_tokens` ERC20 token addresses, where the `token_amount` (per token) is distributed among the addresses behind `blockchain_services` and potentially pre-registered with the raiden Registry. The following arguments can control the behavior: Args: token_amount (int): the overall number of units minted per token number_of_tokens (int): the number of token instances register_tokens (bool): controls if tokens will be registered with raiden Registry """ if cached_genesis: token_addresses = [ address_decoder(token_address) for token_address in cached_genesis['config']['tokenAddresses'] ] else: participants = [ privatekey_to_address(blockchain_service.private_key) for blockchain_service in blockchain_services.blockchain_services ] token_addresses = _token_addresses( token_amount, number_of_tokens, blockchain_services.deploy_service, blockchain_services.deploy_registry, participants, register_tokens ) return token_addresses
def token_addresses(self): return [ address_decoder(address) for address in self.proxy.call('tokenAddresses', startgas=self.startgas) ]
def convert(self, value, param, ctx): try: return address_decoder(value) except TypeError: self.fail('Please specify a valid hex-encoded address.')
def app( address, keystore_path, gas_price, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, rpccorsdomain, mapped_socket, logging, logfile, log_json, max_unresponsive_time, send_ping_time, api_address, rpc, sync_check, console, password_file, web_ui, datadir, eth_client_communication, nat): # pylint: disable=too-many-locals,too-many-branches,too-many-statements,unused-argument from raiden.app import App from raiden.network.blockchain_service import BlockChainService (listen_host, listen_port) = split_endpoint(listen_address) (api_host, api_port) = split_endpoint(api_address) config = App.DEFAULT_CONFIG.copy() config['host'] = listen_host config['port'] = listen_port config['console'] = console config['rpc'] = rpc config['web_ui'] = rpc and web_ui config['api_host'] = api_host config['api_port'] = api_port if mapped_socket: config['socket'] = mapped_socket.socket config['external_ip'] = mapped_socket.external_ip config['external_port'] = mapped_socket.external_port else: config['socket'] = None config['external_ip'] = listen_host config['external_port'] = listen_port config['protocol']['nat_keepalive_retries'] = DEFAULT_NAT_KEEPALIVE_RETRIES timeout = max_unresponsive_time / DEFAULT_NAT_KEEPALIVE_RETRIES config['protocol']['nat_keepalive_timeout'] = timeout address_hex = address_encoder(address) if address else None address_hex, privatekey_bin = prompt_account(address_hex, keystore_path, password_file) address = address_decoder(address_hex) privatekey_hex = hexlify(privatekey_bin) config['privatekey_hex'] = privatekey_hex endpoint = eth_rpc_endpoint # Fallback to default port if only an IP address is given rpc_port = 8545 if eth_rpc_endpoint.startswith('http://'): endpoint = eth_rpc_endpoint[len('http://'):] rpc_port = 80 elif eth_rpc_endpoint.startswith('https://'): endpoint = eth_rpc_endpoint[len('https://'):] rpc_port = 443 if ':' not in endpoint: # no port was given in url rpc_host = endpoint else: rpc_host, rpc_port = split_endpoint(endpoint) rpc_client = JSONRPCClient( rpc_host, rpc_port, privatekey_bin, ) # this assumes the eth node is already online if not check_json_rpc(rpc_client): sys.exit(1) blockchain_service = BlockChainService( privatekey_bin, rpc_client, GAS_LIMIT, gas_price, ) if sync_check: check_synced(blockchain_service) discovery_tx_cost = gas_price * DISCOVERY_REGISTRATION_GAS while True: balance = blockchain_service.client.balance(address) if discovery_tx_cost <= balance: break print( 'Account has insufficient funds for discovery registration.\n' 'Needed: {} ETH\n' 'Available: {} ETH.\n' 'Please deposit additional funds into this account.' .format(discovery_tx_cost / denoms.ether, balance / denoms.ether) ) if not click.confirm('Try again?'): sys.exit(1) registry = blockchain_service.registry( registry_contract_address, ) discovery = ContractDiscovery( blockchain_service.node_address, blockchain_service.discovery(discovery_contract_address) ) if datadir is None: # default database directory raiden_directory = os.path.join(os.path.expanduser('~'), '.raiden') else: raiden_directory = datadir if not os.path.exists(raiden_directory): os.makedirs(raiden_directory) user_db_dir = os.path.join(raiden_directory, address_hex[:8]) if not os.path.exists(user_db_dir): os.makedirs(user_db_dir) database_path = os.path.join(user_db_dir, 'log.db') config['database_path'] = database_path return App( config, blockchain_service, registry, discovery, )
def manager_addresses(self): result = [ address_decoder(address) for address in self.registry_proxy.channelManagerAddresses() ] return result
def token_addresses(self): result = [ address_decoder(address) for address in self.registry_proxy.tokenAddresses() ] return result
def specify_channel_for_events(self, channel_address): """Since it's not part of the event but part of the querying and we mock the interface we should check that the channel address properly makes it through the REST api""" self.channel_for_events = address_decoder(channel_address)
def token_address(self): result = address_decoder(self.proxy.tokenAddress()) return result
def specify_token_for_channelnew(self, token_address): """Since it's not part of the event but part of the querying and we mock the interface we should check that the token address properly makes it through the REST api""" self.token_for_channelnew = address_decoder(token_address)
def manager_addresses(self): return [ address_decoder(address) for address in self.proxy.call('channelManagerAddresses', startgas=self.startgas) ]
def token_address(self): token_address_hex = self.proxy.tokenAddress() token_address = address_decoder(token_address_hex) return token_address
def deploy_solidity_contract( self, # pylint: disable=too-many-locals sender, contract_name, all_contracts, libraries, constructor_parameters, contract_path=None, timeout=None, gasprice=GAS_PRICE): """ Deploy a solidity contract. Args: sender (address): the sender address contract_name (str): the name of the contract to compile all_contracts (dict): the json dictionary containing the result of compiling a file libraries (list): A list of libraries to use in deployment constructor_parameters (tuple): A tuple of arguments to pass to the constructor contract_path (str): If we are dealing with solc >= v0.4.9 then the path to the contract is a required argument to extract the contract data from the `all_contracts` dict. timeout (int): Amount of time to poll the chain to confirm deployment gasprice: The gasprice to provide for the transaction """ if contract_name in all_contracts: contract_key = contract_name elif contract_path is not None: _, filename = os.path.split(contract_path) contract_key = filename + ':' + contract_name if contract_key not in all_contracts: raise ValueError('Unknown contract {}'.format(contract_name)) else: raise ValueError( 'Unknown contract {} and no contract_path given'.format(contract_name) ) libraries = dict(libraries) contract = all_contracts[contract_key] contract_interface = contract['abi'] symbols = solidity_unresolved_symbols(contract['bin_hex']) if symbols: available_symbols = list(map(solidity_library_symbol, all_contracts.keys())) unknown_symbols = set(symbols) - set(available_symbols) if unknown_symbols: msg = 'Cannot deploy contract, known symbols {}, unresolved symbols {}.'.format( available_symbols, unknown_symbols, ) raise Exception(msg) dependencies = deploy_dependencies_symbols(all_contracts) deployment_order = dependencies_order_of_build(contract_key, dependencies) deployment_order.pop() # remove `contract_name` from the list log.debug('Deploying dependencies: {}'.format(str(deployment_order))) for deploy_contract in deployment_order: dependency_contract = all_contracts[deploy_contract] hex_bytecode = solidity_resolve_symbols(dependency_contract['bin_hex'], libraries) bytecode = unhexlify(hex_bytecode) dependency_contract['bin_hex'] = hex_bytecode dependency_contract['bin'] = bytecode transaction_hash_hex = self.send_transaction( sender, to=b'', data=bytecode, gasprice=gasprice, ) transaction_hash = unhexlify(transaction_hash_hex) self.poll(transaction_hash, timeout=timeout) receipt = self.eth_getTransactionReceipt(transaction_hash) contract_address = receipt['contractAddress'] # remove the hexadecimal prefix 0x from the address contract_address = contract_address[2:] libraries[deploy_contract] = contract_address deployed_code = self.eth_getCode(address_decoder(contract_address)) if len(deployed_code) == 0: raise RuntimeError('Contract address has no code, check gas usage.') hex_bytecode = solidity_resolve_symbols(contract['bin_hex'], libraries) bytecode = unhexlify(hex_bytecode) contract['bin_hex'] = hex_bytecode contract['bin'] = bytecode if constructor_parameters: translator = ContractTranslator(contract_interface) parameters = translator.encode_constructor_arguments(constructor_parameters) bytecode = contract['bin'] + parameters else: bytecode = contract['bin'] transaction_hash_hex = self.send_transaction( sender, to=b'', data=bytecode, gasprice=gasprice, ) transaction_hash = unhexlify(transaction_hash_hex) self.poll(transaction_hash, timeout=timeout) receipt = self.eth_getTransactionReceipt(transaction_hash) contract_address = receipt['contractAddress'] deployed_code = self.eth_getCode(address_decoder(contract_address)) if len(deployed_code) == 0: raise RuntimeError( 'Deployment of {} failed. Contract address has no code, check gas usage.'.format( contract_name, ) ) return self.new_contract_proxy( contract_interface, contract_address, )
def channels_by_participant(self, peer_address): result = [ address_decoder(address) for address in self.proxy.nettingContractsByAddress(peer_address) ] return result