def geth_wait_and_check(privatekeys, rpc_ports): """ Wait until the geth cluster is ready. """ address = address_encoder(privatekey_to_address(privatekeys[0])) jsonrpc_running = False tries = 5 rpc_port = rpc_ports[0] jsonrpc_client = JSONRPCClient(host="0.0.0.0", port=rpc_port, privkey=privatekeys[0], print_communication=False) while not jsonrpc_running and tries > 0: try: jsonrpc_client.call("eth_getBalance", address, "latest") jsonrpc_running = True except ConnectionError: gevent.sleep(0.5) tries -= 1 if jsonrpc_running is False: raise ValueError("geth didnt start the jsonrpc interface") for key in set(privatekeys): address = address_encoder(privatekey_to_address(key)) jsonrpc_client = JSONRPCClient(host="0.0.0.0", port=rpc_port, privkey=key, print_communication=False) tries = 10 balance = "0x0" while balance == "0x0" and tries > 0: balance = jsonrpc_client.call("eth_getBalance", address, "latest") gevent.sleep(1) tries -= 1 if balance == "0x0": raise ValueError("account is with a balance of 0")
def main(private_key, eth_amount, targets_file, port, host): client = JSONRPCClient( host=host, port=port, privkey=private_key, print_communication=False, ) patch_send_transaction(client) patch_send_message(client) targets = [t.strip() for t in targets_file] balance = client.balance(client.sender) balance_needed = len(targets) * eth_amount if balance_needed * WEI_TO_ETH > balance: print( "Not enough balance to fund {} accounts with {} eth each. Need {}, have {}" .format(len(targets), eth_amount, balance_needed, balance / WEI_TO_ETH)) print("Sending {} eth to:".format(eth_amount)) for target in targets: print(" - {}".format(target)) client.send_transaction(sender=client.sender, to=target, value=eth_amount * WEI_TO_ETH)
def discovery_blockchain(request, private_keys, geth_cluster, poll_timeout): gevent.sleep(2) privatekey = private_keys[0] address = privtoaddr(privatekey) jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekey, print_communication=False, ) patch_send_transaction(jsonrpc_client) # deploy discovery contract discovery_contract_path = get_contract_path('EndpointRegistry.sol') discovery_contracts = compile_file(discovery_contract_path, libraries=dict()) discovery_contract_proxy = jsonrpc_client.deploy_solidity_contract( address, 'EndpointRegistry', discovery_contracts, dict(), tuple(), timeout=poll_timeout, ) discovery_contract_address = discovery_contract_proxy.address # initialize and return ContractDiscovery object from raiden.network.discovery import ContractDiscovery return ContractDiscovery(jsonrpc_client, discovery_contract_address), address
def _jsonrpc_services(private_keys, verbose, poll_timeout): print_communication = True if verbose > 7 else False privatekey = private_keys[0] address = privatekey_to_address(privatekey) jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekey, print_communication=print_communication, ) patch_send_transaction(jsonrpc_client) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path, libraries=dict()) log.info('Deploying registry contract') registry_proxy = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=poll_timeout, ) registry_address = registry_proxy.address blockchain_services = list() for privkey in private_keys: blockchain = BlockChainService( privkey, registry_address, ) blockchain_services.append(blockchain) return blockchain_services
def __init__(self, host_port, privkey, address, registry_address): """ Args: host_port (Tuple[(str, int)]): two-tuple with the (address, host) of the JSON-RPC server. """ channel_manager_abi = get_abi_from_file(get_contract_path('channelManagerContract.sol')) netting_contract_abi = get_abi_from_file(get_contract_path('nettingChannelContract.sol')) registry_abi = get_abi_from_file(get_contract_path('registry.sol')) jsonrpc_client = JSONRPCClient( sender=address, privkey=privkey, ) registry_proxy = jsonrpc_client.new_abi_contract(registry_abi, registry_address) self.asset_managerproxy = dict() self.contract_by_address = dict() self.host_port = host_port self.privkey = privkey self.client = jsonrpc_client self.registry_address = registry_address self.registry_proxy = registry_proxy self.channel_manager_abi = channel_manager_abi self.netting_contract_abi = netting_contract_abi self.registry_abi = registry_abi if not self._code_exists(registry_address): raise ValueError('Registry {} does not exists'.format(registry_address))
class Client(): def __init__(self, tokens): self.client = JSONRPCClient( privkey=None, host=host, port=port, print_communication=False, ) self.wallet_translator = ContractTranslator(wallet_abi) self.tokens = tokens def get_block(self, num): return self.client.call( 'eth_getBlockByNumber', quantity_encoder(num), True ) def get_transaction(self, hash): return self.client.eth_getTransactionByHash(hash) def decode_token_transfer(self, txdata, to_address): if len(txdata) < 8 or txdata[:8] != 'a9059cbb': return None # get rid of signature txdata = txdata[8:] # here we got ourselves a token transfer # transfer(address _from, uint256 _value) token_name = self.tokens.address_is_token(to_address) if token_name is None: print('WARNING: Unknown token {} transferred'.format(to_address)) token_name = 'UNKNOWN' hexdata = txdata.decode('hex') transfer_to = decode_abi(['address'], hexdata[:32])[0] transfer_value = decode_abi(['uint256'], hexdata[32:])[0] if address_is_whitehat(transfer_to) is None: print('WARNING: {} token sent to non-whitehat address?'.format(token_name)) return transfer_value def decode_execute(self, txdata): # get rid of signature and 0x txdata = txdata[10:] # unfortunately the pyethapp way does not work # fndata = c.wallet_translator.function_data['execute'] # return decode_abi(fndata['encode_types'], txdata.decode('hex')) # ... but decoding each arg individually does work sent_to = decode_abi(['address'], txdata.decode('hex')[:32])[0] amount_in_wei = decode_abi(['uint256'], txdata.decode('hex')[32:64])[0] token_value = self.decode_token_transfer(txdata[256:], sent_to) return sent_to, amount_in_wei, token_value
def _jsonrpc_services( deploy_key, private_keys, verbose, poll_timeout, rpc_port, registry_address=None): host = '0.0.0.0' print_communication = verbose > 6 deploy_client = JSONRPCClient( host=host, port=rpc_port, privkey=deploy_key, print_communication=print_communication, ) # we cannot instantiate BlockChainService without a registry, so first # deploy it directly with a JSONRPCClient if registry_address is None: address = privatekey_to_address(deploy_key) patch_send_transaction(deploy_client) patch_send_message(deploy_client) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path, libraries=dict()) log.info('Deploying registry contract') registry_proxy = deploy_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), contract_path=registry_path, gasprice=default_gasprice, timeout=poll_timeout, ) registry_address = registry_proxy.address deploy_blockchain = BlockChainService( deploy_key, registry_address, host, deploy_client.port, ) blockchain_services = list() for privkey in private_keys: blockchain = BlockChainService( privkey, registry_address, host, deploy_client.port, ) blockchain_services.append(blockchain) return BlockchainServices(deploy_blockchain, blockchain_services)
def connect_client(self): while True: try: self.client = JSONRPCClient(port=self.port, print_communication=False) self.client.call('web3_clientVersion') break except ConnectionError: time.sleep(0.5)
def __init__(self, tokens): self.client = JSONRPCClient( privkey=None, host=host, port=port, print_communication=False, ) self.wallet_translator = ContractTranslator(wallet_abi) self.tokens = tokens
def _jsonrpc_services( deploy_key, private_keys, verbose, poll_timeout, rpc_port, registry_address=None): host = '0.0.0.0' deploy_client = JSONRPCClient( host=host, port=rpc_port, privkey=deploy_key, ) # we cannot instantiate BlockChainService without a registry, so first # deploy it directly with a JSONRPCClient if registry_address is None: address = privatekey_to_address(deploy_key) patch_send_transaction(deploy_client) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path, libraries=dict()) log.info('Deploying registry contract') registry_proxy = deploy_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=poll_timeout, ) registry_address = registry_proxy.address deploy_blockchain = BlockChainService( deploy_key, registry_address, host, deploy_client.port, ) blockchain_services = list() for privkey in private_keys: blockchain = BlockChainService( privkey, registry_address, host, deploy_client.port, ) blockchain_services.append(blockchain) return BlockchainServices(deploy_blockchain, blockchain_services)
def __init__( self, privatekey_bin, registry_address, host, port, poll_timeout=DEFAULT_POLL_TIMEOUT, **kwargs): self.address_token = dict() self.address_discovery = dict() self.address_manager = dict() self.address_contract = dict() self.address_registry = dict() self.token_manager = dict() # if this object becomes a problem for testing consider using one of # the mock blockchains jsonrpc_client = JSONRPCClient( privkey=privatekey_bin, host=host, port=port, print_communication=kwargs.get('print_communication', False), ) patch_send_transaction(jsonrpc_client) patch_send_message(jsonrpc_client) self.client = jsonrpc_client self.private_key = privatekey_bin self.node_address = privatekey_to_address(privatekey_bin) self.poll_timeout = poll_timeout self.default_registry = self.registry(registry_address)
def main(num_clients, num_txs=1): import time clients = [JSONRPCClient(4000 + i) for i in range(num_clients)] nonce = clients[0].nonce(clients[0].coinbase) for i in range(num_txs): do_tx(clients[0], nonce=nonce + i) print 'tx', i
def __init__(self, privatekey_bin, registry_address, host, port, poll_timeout=DEFAULT_POLL_TIMEOUT, **kwargs): self.address_to_token = dict() self.address_to_discovery = dict() self.address_to_channelmanager = dict() self.address_to_nettingchannel = dict() self.address_to_registry = dict() self.token_to_channelmanager = dict() jsonrpc_client = JSONRPCClient( privkey=privatekey_bin, host=host, port=port, print_communication=kwargs.get('print_communication', False), ) patch_send_transaction(jsonrpc_client) patch_send_message(jsonrpc_client) self.client = jsonrpc_client self.private_key = privatekey_bin self.node_address = privatekey_to_address(privatekey_bin) self.poll_timeout = poll_timeout self.default_registry = self.registry(registry_address)
def _jsonrpc_services( deploy_key, deploy_client, private_keys, verbose, poll_timeout, registry_address=None): # we cannot instantiate BlockChainService without a registry, so first # deploy it directly with a JSONRPCClient if registry_address is None: address = privatekey_to_address(deploy_key) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path, libraries=dict()) log.info('Deploying registry contract') registry_proxy = deploy_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), contract_path=registry_path, gasprice=default_gasprice, timeout=poll_timeout, ) registry_address = registry_proxy.address # at this point the blockchain must be running, this will overwrite the # method so even if the client is patched twice, it should work fine patch_send_transaction(deploy_client) deploy_blockchain = BlockChainService( deploy_key, registry_address, deploy_client, ) host = '0.0.0.0' blockchain_services = list() for privkey in private_keys: rpc_client = JSONRPCClient( privkey=privkey, host=host, port=deploy_client.port, print_communication=False, ) patch_send_transaction(rpc_client) patch_send_message(rpc_client) blockchain = BlockChainService( privkey, registry_address, rpc_client, ) blockchain_services.append(blockchain) return BlockchainServices(deploy_blockchain, blockchain_services)
def connect(host="127.0.0.1", port=8545, use_ssl=False): """Create a jsonrpcclient instance, using the 'zero-privatekey'. """ client = JSONRPCClient(host, port, privkey="1" * 64) patch_send_transaction(client) return client
def hydrachain_wait(privatekeys, number_of_nodes): """ Wait until the hydrchain cluster is ready. """ jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekeys[0], print_communication=False, ) quantity = jsonrpc_client.call('net_peerCount') tries = 5 while quantity != number_of_nodes and tries > 0: gevent.sleep(0.5) quantity = quantity_decoder(jsonrpc_client.call('net_peerCount')) if quantity != number_of_nodes: raise Exception('hydrachain is taking to long to initialize')
def test_set_host(): host = '1.1.1.1' default_host = '127.0.0.1' client = JSONRPCClient(host) assert client.transport.endpoint == 'http://{}:{}'.format( host, client.port) assert client.transport.endpoint != 'http://{}:{}'.format( default_host, client.port)
def app(privatekey, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, external_listen_address, logging, logfile): slogging.configure(logging, log_file=logfile) if not external_listen_address: # notify('if you are behind a NAT, you should set # `external_listen_address` and configure port forwarding on your router') external_listen_address = listen_address # config_file = args.config_file (listen_host, listen_port) = split_endpoint(listen_address) config = App.default_config.copy() config['host'] = listen_host config['port'] = listen_port config['privatekey_hex'] = privatekey endpoint = eth_rpc_endpoint use_ssl = False if eth_rpc_endpoint.startswith("http://"): endpoint = eth_rpc_endpoint[len("http://"):] rpc_port = 80 elif eth_rpc_endpoint.startswith("https://"): endpoint = eth_rpc_endpoint[len("https://"):] use_ssl = True rpc_port = 443 if ':' not in endpoint: # no port was given in url rpc_host = endpoint else: rpc_host, rpc_port = split_endpoint(endpoint) jsonrpc_client = JSONRPCClient( privkey=privatekey, host=rpc_host, port=rpc_port, print_communication=False, use_ssl=use_ssl, ) blockchain_service = BlockChainService( privatekey.decode('hex'), registry_contract_address.decode('hex'), ) discovery = ContractDiscovery( jsonrpc_client, discovery_contract_address.decode('hex')) # FIXME: double encoding app = App(config, blockchain_service, discovery) discovery.register(app.raiden.address, *split_endpoint(external_listen_address)) app.raiden.register_registry(blockchain_service.default_registry) return app
def tps_run(host, port, config, privatekey, rpc_server, registry_address, token_address, transfer_amount, parallel): # pylint: disable=too-many-locals,too-many-arguments ourprivkey, _ = hostport_to_privkeyaddr(host, port) rpc_connection = rpc_server.split(':') rpc_connection = (rpc_connection[0], int(rpc_connection[1])) with codecs.open(config, encoding='utf8') as handler: config = yaml.load(handler) config['host'] = host config['port'] = port config['privkey'] = ourprivkey rpc_connection = rpc_server.split(':') host, port = (rpc_connection[0], int(rpc_connection[1])) rpc_client = JSONRPCClient( privkey=privatekey, host=host, port=port, print_communication=False, ) blockchain_service = BlockChainService( privatekey, registry_address, rpc_client, ) discovery = Discovery() found_ouraddress = False for node in config['nodes']: _, address = hostport_to_privkeyaddr(node['host'], node['port']) discovery.register(address, node['host'], node['port']) if host == node['host'] and str(port) == node['port']: found_ouraddress = True if not found_ouraddress: print('We are not registered in the configuration file') sys.exit(1) app = App(config, blockchain_service, discovery) for _ in range(parallel): gevent.spawn(random_transfer, app, token_address, transfer_amount) # wait for interrupt event = gevent.event.Event() gevent.signal(signal.SIGQUIT, event.set) gevent.signal(signal.SIGTERM, event.set) gevent.signal(signal.SIGINT, event.set) event.wait() app.stop()
def app(privatekey, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, external_listen_address, logging): slogging.configure(logging) if not external_listen_address: # notify('if you are behind a NAT, you should set # `external_listen_address` and configure port forwarding on your router') external_listen_address = listen_address # config_file = args.config_file rpc_connection = split_endpoint(eth_rpc_endpoint) (listen_host, listen_port) = split_endpoint(listen_address) config = App.default_config.copy() config['host'] = listen_host config['port'] = listen_port config['privatekey_hex'] = privatekey jsonrpc_client = JSONRPCClient( privkey=privatekey, host=rpc_connection[0], port=rpc_connection[1], print_communication=False, ) blockchain_service = BlockChainService( jsonrpc_client, registry_contract_address.decode('hex'), ) discovery = ContractDiscovery( jsonrpc_client, discovery_contract_address.decode('hex')) # FIXME: double encoding app = App(config, blockchain_service, discovery) discovery.register(app.raiden.address, *split_endpoint(external_listen_address)) app.raiden.register_registry(blockchain_service.default_registry) # TODO: # - Ask for confirmation to quit if there are any locked transfers that did # not timeout. console = Console(app) console.start() # wait for interrupt event = gevent.event.Event() gevent.signal(signal.SIGQUIT, event.set) gevent.signal(signal.SIGTERM, event.set) gevent.signal(signal.SIGINT, event.set) event.wait() app.stop()
class BuiltinDriver(base.Base): @property def type(self): return 'tx_driver' def __init__(self): self.logger = logger self.rpc_cli = JSONRPCClient(host = FLAGS.rpc_host, port = FLAGS.rpc_port, print_communication = False) super(BuiltinDriver, self).__init__() def pending_txs(self): self.filter_id = self.rpc_cli.call(constant.METHOD_NEW_PENDING_TX_FILTER) while True: res = self.rpc_cli.call(constant.METHOD_GET_FILTER_CHANGES, self.filter_id) if res: print res else: time.sleep(FLAGS.poll_interval)
def geth_wait_and_check(privatekeys): """ Wait until the geth cluster is ready. """ address = address_encoder(privtoaddr(privatekeys[0])) jsonrpc_running = False tries = 5 jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekeys[0], print_communication=False, ) while not jsonrpc_running and tries > 0: try: jsonrpc_client.call('eth_getBalance', address, 'latest') jsonrpc_running = True except ConnectionError: gevent.sleep(0.5) tries -= 1 if jsonrpc_running is False: raise ValueError('geth didnt start the jsonrpc interface') for key in set(privatekeys): address = address_encoder(privtoaddr(key)) jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=key, print_communication=False, ) tries = 10 balance = '0x0' while balance == '0x0' and tries > 0: balance = jsonrpc_client.call('eth_getBalance', address, 'latest') gevent.sleep(1) tries -= 1 if balance == '0x0': raise ValueError('account is with a balance of 0')
def connect(host='127.0.0.1', port=8545, use_ssl=False): """Create a jsonrpcclient instance, using the 'zero-privatekey'. """ client = JSONRPCClient( host, port, privkey='1' * 64, print_communication=False, use_ssl=use_ssl, ) patch_send_transaction(client) patch_send_message(client) return client
def __init__(self, host_port, privkey, address, registry_address): """ Args: host_port (Tuple[(str, int)]): two-tuple with the (address, host) of the JSON-RPC server. """ channel_manager_abi = get_abi_from_file( get_contract_path('channelManagerContract.sol')) netting_contract_abi = get_abi_from_file( get_contract_path('nettingChannelContract.sol')) registry_abi = get_abi_from_file(get_contract_path('registry.sol')) jsonrpc_client = JSONRPCClient( sender=address, privkey=privkey, ) registry_proxy = jsonrpc_client.new_abi_contract( registry_abi, registry_address) self.asset_managerproxy = dict() self.contract_by_address = dict() self.host_port = host_port self.privkey = privkey self.client = jsonrpc_client self.registry_address = registry_address self.registry_proxy = registry_proxy self.channel_manager_abi = channel_manager_abi self.netting_contract_abi = netting_contract_abi self.registry_abi = registry_abi if not self._code_exists(registry_address): raise ValueError( 'Registry {} does not exists'.format(registry_address))
def start(self): super(TestApp, self).start() log.debug('adding test accounts') # high balance account self.services.accounts.add_account(Account.new('', tester.keys[0]), store=False) # low balance account self.services.accounts.add_account(Account.new('', tester.keys[1]), store=False) # locked account locked_account = Account.new('', tester.keys[2]) locked_account.lock() self.services.accounts.add_account(locked_account, store=False) self.privkey = None assert set(acct.address for acct in self.services.accounts) == set(tester.accounts[:3]) test_transport = TestTransport(call_func=self.rpc_request) self.client = JSONRPCClient(transport=test_transport)
def deploy_client(blockchain_type, blockchain_rpc_ports, deploy_key, request): if blockchain_type == 'geth': host = '0.0.0.0' rpc_port = blockchain_rpc_ports[0] deploy_client = JSONRPCClient( host=host, port=rpc_port, privkey=deploy_key, print_communication=False, ) # cant patch transaction because the blockchain may not be running yet # patch_send_transaction(deploy_client) patch_send_message(deploy_client) return deploy_client
def setup_tps(rpc_server, config_path, privatekey, registry_address, token_address, deposit, settle_timeout): """ Creates the required contract and the fully connected Raiden network prior to running the test. Args: rpc_server (str): A string in the format '{host}:{port}' used to define the JSON-RPC end-point. config_path (str): A full/relative path to the yaml configuration file. channelmanager_address (str): The address of the channel manager contract. token_address (str): The address of the token used for testing. deposit (int): The default deposit that will be made for all test nodes. """ host, port = rpc_server.split(':') rpc_client = JSONRPCClient( privkey=privatekey, host=host, port=port, print_communication=False, ) blockchain_service = BlockChainService( privatekey, registry_address, rpc_client, ) blockchain_service.default_registry.add_token(token_address) with codecs.open(config_path, encoding='utf8') as handler: config = yaml.load(handler) node_addresses = [] for node in config['nodes']: privkey = sha3('{}:{}'.format(node['host'], node['port'])) node_addresses.append(privatekey_to_address(privkey)) random_raiden_network( token_address, blockchain_service, node_addresses, deposit, settle_timeout, )
def main(privatekey_hex, pretty, gas_price, port): slogging.configure(":debug") # Fix pyethapp.rpc_client not using slogging library rpc_logger = logging.getLogger('pyethapp.rpc_client') rpc_logger.setLevel(logging.DEBUG) rpc_logger.parent = slogging.getLogger() privatekey = decode_hex(privatekey_hex) patch_deploy_solidity_contract() client = JSONRPCClient( port=port, privkey=privatekey, print_communication=False, ) patch_send_transaction(client) patch_send_message(client) deployed = deploy_all(client, gas_price) print(json.dumps(deployed, indent=2 if pretty else None))
def geth_wait_and_check(privatekeys, rpc_ports): """ Wait until the geth cluster is ready. """ address = address_encoder(privatekey_to_address(privatekeys[0])) jsonrpc_running = False tries = 5 rpc_port = rpc_ports[0] jsonrpc_client = JSONRPCClient( host='0.0.0.0', port=rpc_port, privkey=privatekeys[0], print_communication=False, ) while not jsonrpc_running and tries > 0: try: jsonrpc_client.call('eth_getBalance', address, 'latest') jsonrpc_running = True except ConnectionError: gevent.sleep(0.5) tries -= 1 if jsonrpc_running is False: raise ValueError('geth didnt start the jsonrpc interface') for key in set(privatekeys): address = address_encoder(privatekey_to_address(key)) jsonrpc_client = JSONRPCClient( host='0.0.0.0', port=rpc_port, privkey=key, print_communication=False, ) tries = 10 balance = '0x0' while balance == '0x0' and tries > 0: balance = jsonrpc_client.call('eth_getBalance', address, 'latest') gevent.sleep(1) tries -= 1 if balance == '0x0': raise ValueError('account is with a balance of 0')
def run(privatekey, registry_contract_address, discovery_contract_address, listen_address, logging, logfile, scenario, stage_prefix, results_filename): # pylint: disable=unused-argument # TODO: only enabled logging on "initiators" slogging.configure(logging, log_file=logfile) (listen_host, listen_port) = split_endpoint(listen_address) config = App.DEFAULT_CONFIG.copy() config['host'] = listen_host config['port'] = listen_port config['privatekey_hex'] = privatekey privatekey_bin = decode_hex(privatekey) rpc_client = JSONRPCClient( privkey=privatekey_bin, host='127.0.0.1', port='8545', print_communication=False, ) blockchain_service = BlockChainService( privatekey_bin, decode_hex(registry_contract_address), rpc_client, ) discovery = ContractDiscovery( blockchain_service, decode_hex(discovery_contract_address) ) app = App(config, blockchain_service, discovery) app.discovery.register( app.raiden.address, listen_host, listen_port, ) app.raiden.register_registry(app.raiden.chain.default_registry.address) if scenario: script = json.load(scenario) tools = ConsoleTools( app.raiden, app.discovery, app.config['settle_timeout'], app.config['reveal_timeout'], ) transfers_by_peer = {} tokens = script['tokens'] token_address = None peer = None our_node = app.raiden.address.encode('hex') log.warning("our address is {}".format(our_node)) for token in tokens: # skip tokens that we're not part of nodes = token['channels'] if our_node not in nodes: continue # allow for prefunded tokens if 'token_address' in token: token_address = token['token_address'] else: token_address = tools.create_token() transfers_with_amount = token['transfers_with_amount'] # FIXME: in order to do bidirectional channels, only one side # (i.e. only token['channels'][0]) should # open; others should join by calling # raiden.api.deposit, AFTER the channel came alive! # NOTE: leaving unidirectional for now because it most # probably will get to higher throughput log.warning("Waiting for all nodes to come online") while not all(tools.ping(node) for node in nodes if node != our_node): gevent.sleep(5) log.warning("All nodes are online") if our_node != nodes[-1]: our_index = nodes.index(our_node) peer = nodes[our_index + 1] tools.register_token(token_address) amount = transfers_with_amount[nodes[-1]] while True: try: app.discovery.get(peer.decode('hex')) break except KeyError: log.warning("Error: peer {} not found in discovery".format(peer)) time.sleep(random.randrange(30)) while True: try: log.warning("Opening channel with {} for {}".format(peer, token_address)) app.raiden.api.open(token_address, peer) break except KeyError: log.warning("Error: could not open channel with {}".format(peer)) time.sleep(random.randrange(30)) while True: try: log.warning("Funding channel with {} for {}".format(peer, token_address)) app.raiden.api.deposit(token_address, peer, amount) break except Exception: log.warning("Error: could not deposit {} for {}".format(amount, peer)) time.sleep(random.randrange(30)) if our_index == 0: last_node = nodes[-1] transfers_by_peer[last_node] = int(amount) else: peer = nodes[-2] if stage_prefix is not None: open('{}.stage1'.format(stage_prefix), 'a').close() log.warning("Done with initialization, waiting to continue...") event = gevent.event.Event() gevent.signal(signal.SIGUSR2, event.set) event.wait() transfer_results = {'total_time': 0, 'timestamps': []} def transfer(token_address, amount_per_transfer, total_transfers, peer, is_async): def transfer_(): log.warning("Making {} transfers to {}".format(total_transfers, peer)) initial_time = time.time() times = [0] * total_transfers for index in xrange(total_transfers): app.raiden.api.transfer( token_address.decode('hex'), amount_per_transfer, peer, ) times[index] = time.time() transfer_results['total_time'] = time.time() - initial_time transfer_results['timestamps'] = times log.warning("Making {} transfers took {}".format( total_transfers, transfer_results['total_time'])) log.warning("Times: {}".format(times)) if is_async: return gevent.spawn(transfer_) else: transfer_() # If sending to multiple targets, do it asynchronously, otherwise # keep it simple and just send to the single target on my thread. if len(transfers_by_peer) > 1: greenlets = [] for peer_, amount in transfers_by_peer.items(): greenlet = transfer(token_address, 1, amount, peer_, True) if greenlet is not None: greenlets.append(greenlet) gevent.joinall(greenlets) elif len(transfers_by_peer) == 1: for peer_, amount in transfers_by_peer.items(): transfer(token_address, 1, amount, peer_, False) log.warning("Waiting for termination") open('{}.stage2'.format(stage_prefix), 'a').close() log.warning("Waiting for transfers to finish, will write results...") event = gevent.event.Event() gevent.signal(signal.SIGUSR2, event.set) event.wait() results = tools.channel_stats_for(token_address, peer) if transfer_results['total_time'] != 0: results['total_time'] = transfer_results['total_time'] if len(transfer_results['timestamps']) > 0: results['timestamps'] = transfer_results['timestamps'] results['channel'] = repr(results['channel']) # FIXME log.warning("Results: {}".format(results)) with open(results_filename, 'w') as fp: json.dump(results, fp, indent=2) open('{}.stage3'.format(stage_prefix), 'a').close() event = gevent.event.Event() gevent.signal(signal.SIGQUIT, event.set) gevent.signal(signal.SIGTERM, event.set) gevent.signal(signal.SIGINT, event.set) event.wait() else: log.warning("No scenario file supplied, doing nothing!") open('{}.stage2'.format(stage_prefix), 'a').close() event = gevent.event.Event() gevent.signal(signal.SIGQUIT, event.set) gevent.signal(signal.SIGTERM, event.set) gevent.signal(signal.SIGINT, event.set) event.wait() app.stop()
def test_blockchain(request): # pylint: disable=too-many-locals from hydrachain import app app.slogging.configure(':ERROR') quantity = 3 base_port = 29870 timeout = 3 # seconds tmp_datadir = tempfile.mktemp() private_keys = [ mk_privkey('raidentest:{}'.format(position)) for position in range(quantity) ] addresses = [ privtoaddr(priv) for priv in private_keys ] hydrachain_apps = hydrachain_network(private_keys, base_port, tmp_datadir) privatekey = private_keys[0] address = privtoaddr(privatekey) jsonrpc_client = JSONRPCClient(privkey=private_keys[0], print_communication=False) humantoken_path = get_contract_path('HumanStandardToken.sol') humantoken_contracts = compile_file(humantoken_path, libraries=dict()) token_abi = jsonrpc_client.deploy_solidity_contract( address, 'HumanStandardToken', humantoken_contracts, dict(), (9999, 'raiden', 2, 'Rd'), timeout=timeout, ) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path, libraries=dict()) registry_abi = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=timeout, ) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 0 # pylint: disable=no-member assert token_abi.balanceOf(address) == 9999 transaction_hash = registry_abi.addAsset(token_abi.address) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=timeout) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 1 channel_manager_address_encoded = registry_abi.channelManagerByAsset.call(token_abi.address) channel_manager_address = channel_manager_address_encoded.decode('hex') log_channel_manager_address_encoded = log_list[0]['data'] log_channel_manager_address = log_channel_manager_address_encoded[2:].lstrip('0').rjust(40, '0').decode('hex') assert channel_manager_address == log_channel_manager_address channel_manager_abi = jsonrpc_client.new_contract_proxy( registry_contracts['ChannelManagerContract']['abi'], channel_manager_address, ) transaction_hash = channel_manager_abi.newChannel(addresses[1], 10) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=timeout) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 2 channel_manager_abi.get.call( address.encode('hex'), addresses[1].encode('hex'), )
def __init__(self): self.logger = logger self.rpc_cli = JSONRPCClient(host = FLAGS.rpc_host, port = FLAGS.rpc_port, print_communication = False) super(BuiltinDriver, self).__init__()
class BuiltinDriver(base.TokenBuiltinBase): @property def type(self): return 'DGD' @property def event(self): return constant.DGD_EVENT @property def last_block(self): return self._last_block def __init__(self): self.logger = logger self.rpc_cli = JSONRPCClient(host = FLAGS.rpc_host, port = FLAGS.rpc_port, print_communication = False) super(BuiltinDriver, self).__init__() def initialize(self): self.add_indexes_for_token(self.type) res = self.db_proxy.get(FLAGS.token_prefix + self.type, {"type": self.event}, multi = True, sort_key = "block", ascend = False, limit = 1) if res: self._last_block = res[0]['block'] else: self._last_block = None # add token basic info source_code = self.get_source_code(self.type) abi = self.get_abi(self.type, constant.DGD_CONTRACT_NAME) basic_info = dict( token=self.type, source_code=source_code, abi=abi, address=constant.DGD_ADDR ) self.db_proxy.update(FLAGS.token_basic, {"token":self.type}, {"$set":basic_info}, upsert = True) def get_past_logs(self): self.abi = self.get_abi(self.type, constant.DGD_CONTRACT_NAME) event_id = self.get_event_id(self.abi, self.event) from_block = hex(self.last_block) if self.last_block else "0x0" params = { "fromBlock": from_block, "toBlock": "latest", "address": constant.DGD_ADDR, "topics": [event_id, None, None] } self.filter_id = self.rpc_cli.call(constant.METHOD_NEW_FILTER, params) res = self.rpc_cli.call(constant.METHOD_GET_FILTER_LOGS, self.filter_id) for log in res: self.handle_log(log) def handle_log(self, log): if log.has_key("removed"): del log['removed'] hash = _utils.hash_log(log) transfer_table = FLAGS.token_prefix + self.type balance_table = FLAGS.balance_prefix + self.type data = log['data'][2:].decode('hex') data_params = self.data_params(self.abi, self.event) value = _abi.decode_abi(data_params, data)[0] f = '0x' + log['topics'][1].lower()[26:] to = '0x' + log['topics'][2].lower()[26:] operation = { "$set": { "hash" : hash, "from" : f, "to" : to, "value" : value, "transactionHash" : log["transactionHash"], "logIndex" : log["logIndex"], "block" : int(log["blockNumber"], 16), "blockHash" : log["blockHash"], "type" : self.event } } objectId = self.db_proxy.update(transfer_table, {"hash":hash}, operation, multi = False, upsert = True).upserted_id if objectId is None: self.logger.info("event log %s has been add, ignore it", hash) return # update balance # TODO parse demical of token operation = { "$inc" : {"balance" : value * -1} } self.db_proxy.update(balance_table, {"account" : f}, operation, upsert = True) operation2 = { "$inc" : {"balance" : value } } self.db_proxy.update(balance_table, {"account" : to}, operation2, upsert = True) def revert_log(self, log): if log.has_key("removed"): del log['removed'] hash = _utils.hash_log(log) transfer_table = FLAGS.token_prefix + self.type balance_table = FLAGS.balance_prefix + self.type data = log['data'][2:].decode('hex') data_params = self.data_params(self.abi, self.event) value = _abi.decode_abi(data_params, data)[0] f = '0x' + log['topics'][1].lower()[26:] to = '0x' + log['topics'][2].lower()[26:] deleted_count = self.db_proxy.delete(transfer_table, { "hash" : hash, }, multi = False).deleted_count if deleted_count == 0: self.logger.info("event log %s has been deleted, ignore it", hash) return # update balance # TODO parse demical of token operation = { "$inc" : {"balance" : value * 1} } self.db_proxy.update(balance_table, {"account" : f}, operation, upsert = True) operation2 = { "$inc" : {"balance" : value * -1} } self.db_proxy.update(balance_table, {"account" : to}, operation2, upsert = True) def wait(self): self.listener.join()
def test_blockchain( blockchain_type, blockchain_backend, # required to start the geth backend blockchain_rpc_ports, private_keys, poll_timeout): # pylint: disable=too-many-locals # this test is for interaction with a blockchain using json-rpc, so it # doesnt make sense to execute it against mock or tester if blockchain_type not in ('geth',): return addresses = [ privatekey_to_address(priv) for priv in private_keys ] privatekey = private_keys[0] address = privatekey_to_address(privatekey) total_token = 100 jsonrpc_client = JSONRPCClient( port=blockchain_rpc_ports[0], privkey=privatekey, print_communication=False, ) patch_send_transaction(jsonrpc_client) patch_send_message(jsonrpc_client) humantoken_path = get_contract_path('HumanStandardToken.sol') humantoken_contracts = compile_file(humantoken_path, libraries=dict()) token_proxy = jsonrpc_client.deploy_solidity_contract( address, 'HumanStandardToken', humantoken_contracts, dict(), (total_token, 'raiden', 2, 'Rd'), contract_path=humantoken_path, gasprice=default_gasprice, timeout=poll_timeout, ) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path) registry_proxy = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), contract_path=registry_path, gasprice=default_gasprice, timeout=poll_timeout, ) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 0 # pylint: disable=no-member assert token_proxy.balanceOf(address) == total_token transaction_hash = registry_proxy.addToken.transact( token_proxy.address, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) assert len(registry_proxy.tokenAddresses.call()) == 1 log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 1 channel_manager_address_encoded = registry_proxy.channelManagerByToken.call( token_proxy.address, ) channel_manager_address = channel_manager_address_encoded.decode('hex') log = log_list[0] log_topics = [ decode_topic(topic) for topic in log['topics'] # pylint: disable=invalid-sequence-index ] log_data = log['data'] event = registry_proxy.translator.decode_event( log_topics, log_data[2:].decode('hex'), ) assert channel_manager_address == event['channel_manager_address'].decode('hex') assert token_proxy.address == event['token_address'].decode('hex') channel_manager_proxy = jsonrpc_client.new_contract_proxy( CHANNEL_MANAGER_ABI, channel_manager_address, ) transaction_hash = channel_manager_proxy.newChannel.transact( addresses[1], 10, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 2
def test_blockchain( blockchain_type, blockchain_backend, # required to start the geth backend blockchain_rpc_ports, private_keys, poll_timeout): # pylint: disable=too-many-locals # this test is for interaction with a blockchain using json-rpc, so it # doesnt make sense to execute it against mock or tester if blockchain_type not in ('geth',): return addresses = [ privatekey_to_address(priv) for priv in private_keys ] privatekey = private_keys[0] address = privatekey_to_address(privatekey) total_asset = 100 jsonrpc_client = JSONRPCClient( port=blockchain_rpc_ports[0], privkey=privatekey, print_communication=False, ) patch_send_transaction(jsonrpc_client) humantoken_path = get_contract_path('HumanStandardToken.sol') humantoken_contracts = compile_file(humantoken_path, libraries=dict()) token_proxy = jsonrpc_client.deploy_solidity_contract( address, 'HumanStandardToken', humantoken_contracts, dict(), (total_asset, 'raiden', 2, 'Rd'), timeout=poll_timeout, ) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path) registry_proxy = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=poll_timeout, ) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 0 # pylint: disable=no-member assert token_proxy.balanceOf(address) == total_asset transaction_hash = registry_proxy.addAsset.transact( token_proxy.address, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) assert len(registry_proxy.assetAddresses.call()) == 1 log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 1 channel_manager_address_encoded = registry_proxy.channelManagerByAsset.call( token_proxy.address, ) channel_manager_address = channel_manager_address_encoded.decode('hex') log = log_list[0] log_topics = [ decode_topic(topic) for topic in log['topics'] # pylint: disable=invalid-sequence-index ] log_data = log['data'] event = registry_proxy.translator.decode_event( log_topics, log_data[2:].decode('hex'), ) assert channel_manager_address == event['channel_manager_address'].decode('hex') assert token_proxy.address == event['asset_address'].decode('hex') channel_manager_proxy = jsonrpc_client.new_contract_proxy( registry_contracts['ChannelManagerContract']['abi'], channel_manager_address, ) transaction_hash = channel_manager_proxy.newChannel.transact( addresses[1], 10, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 2
name = name_from_file(c) proxy = client.deploy_solidity_contract(client.sender, name, compiled_contracts, libraries, '', gasprice=denoms.shannon * 20) libraries[name] = proxy.address return {name: addr.encode('hex') for name, addr in libraries.items()} def deploy_all(client): patch_send_transaction(client) deployed = dict() deployed.update(deploy_files(RAIDEN_CONTRACT_FILES, client)) deployed.update(deploy_files(DISCOVERY_CONTRACT_FILES, client)) return deployed if __name__ == "__main__": # FIXME: client params should be read from cmdline-args! pretty = False client = JSONRPCClient( port=8545, privkey='1' * 64, print_communication=False, ) patch_send_transaction(client) deployed = deploy_all(client) print json.dumps(deployed, indent=2 if pretty else None)
def app( address, keystore_path, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, rpccorsdomain, # pylint: disable=unused-argument mapped_socket, logging, logfile, log_json, max_unresponsive_time, send_ping_time, api_address, rpc, console, password_file, web_ui, datadir, eth_client_communication): from raiden.app import App from raiden.network.rpc.client import BlockChainService # config_file = args.config_file (listen_host, listen_port) = split_endpoint(listen_address) (api_host, api_port) = split_endpoint(api_address) config = App.DEFAULT_CONFIG.copy() config['host'] = listen_host config['port'] = listen_port config['console'] = console config['rpc'] = rpc config['web_ui'] = rpc and web_ui config['api_host'] = api_host config['api_port'] = api_port if mapped_socket: config['socket'] = mapped_socket.socket config['external_ip'] = mapped_socket.external_ip config['external_port'] = mapped_socket.external_port else: config['socket'] = None config['external_ip'] = listen_host config['external_port'] = listen_port config['protocol']['nat_keepalive_retries'] = DEFAULT_NAT_KEEPALIVE_RETRIES timeout = max_unresponsive_time / DEFAULT_NAT_KEEPALIVE_RETRIES config['protocol']['nat_keepalive_timeout'] = timeout address_hex = address_encoder(address) if address else None address_hex, privatekey_bin = prompt_account(address_hex, keystore_path, password_file) privatekey_hex = privatekey_bin.encode('hex') config['privatekey_hex'] = privatekey_hex endpoint = eth_rpc_endpoint # Fallback default port if only an IP address is given rpc_port = 8545 if eth_rpc_endpoint.startswith("http://"): endpoint = eth_rpc_endpoint[len("http://"):] rpc_port = 80 elif eth_rpc_endpoint.startswith("https://"): endpoint = eth_rpc_endpoint[len("https://"):] rpc_port = 443 if ':' not in endpoint: # no port was given in url rpc_host = endpoint else: rpc_host, rpc_port = split_endpoint(endpoint) rpc_client = JSONRPCClient( privkey=privatekey_bin, host=rpc_host, port=rpc_port, print_communication=eth_client_communication, ) # this assumes the eth node is already online patch_send_transaction(rpc_client) patch_send_message(rpc_client) try: blockchain_service = BlockChainService( privatekey_bin, registry_contract_address, rpc_client, ) except ValueError as e: # ValueError exception raised if: # - The registry contract address doesn't have code, this might happen # if the connected geth process is not synced or if the wrong address # is provided (e.g. using the address from a smart contract deployed on # ropsten with a geth node connected to morden) print(e.message) sys.exit(1) discovery_tx_cost = GAS_PRICE * DISCOVERY_REGISTRATION_GAS while True: balance = blockchain_service.client.balance(address_hex) if discovery_tx_cost <= balance: break print('Account has insufficient funds for discovery registration.\n' 'Needed: {} ETH\n' 'Available: {} ETH.\n' 'Please deposit additional funds on this account.'.format( discovery_tx_cost / float(denoms.ether), balance / float(denoms.ether))) if not click.confirm('Try again?'): sys.exit(1) discovery = ContractDiscovery( blockchain_service.node_address, blockchain_service.discovery(discovery_contract_address)) if datadir is None: # default database directory raiden_directory = os.path.join(os.path.expanduser('~'), '.raiden') else: raiden_directory = datadir if not os.path.exists(raiden_directory): os.makedirs(raiden_directory) user_db_dir = os.path.join(raiden_directory, address_hex[:8]) if not os.path.exists(user_db_dir): os.makedirs(user_db_dir) database_path = os.path.join(user_db_dir, 'log.db') config['database_path'] = database_path return App(config, blockchain_service, discovery)
def deployed_network(request, private_keys, channels_per_node, deposit, number_of_assets, settle_timeout, poll_timeout, transport_class, geth_cluster): gevent.sleep(2) assert channels_per_node in (0, 1, 2, CHAIN), ( 'deployed_network uses create_sequential_network that can only work ' 'with 0, 1 or 2 channels' ) privatekey = private_keys[0] address = privtoaddr(privatekey) blockchain_service_class = BlockChainService jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekey, print_communication=False, ) patch_send_transaction(jsonrpc_client) humantoken_path = get_contract_path('HumanStandardToken.sol') registry_path = get_contract_path('Registry.sol') humantoken_contracts = compile_file(humantoken_path, libraries=dict()) registry_contracts = compile_file(registry_path, libraries=dict()) registry_proxy = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=poll_timeout, ) registry_address = registry_proxy.address # Using 3 * deposit because we assume that is the maximum number of # channels that will be created. # `total_per_node = channels_per_node * deposit` total_per_node = 3 * deposit total_asset = total_per_node * len(private_keys) asset_addresses = [] for _ in range(number_of_assets): token_proxy = jsonrpc_client.deploy_solidity_contract( address, 'HumanStandardToken', humantoken_contracts, dict(), (total_asset, 'raiden', 2, 'Rd'), timeout=poll_timeout, ) asset_address = token_proxy.address assert len(asset_address) asset_addresses.append(asset_address) transaction_hash = registry_proxy.addAsset(asset_address) # pylint: disable=no-member jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) # only the creator of the token starts with a balance, transfer from # the creator to the other nodes for transfer_to in private_keys: if transfer_to != jsonrpc_client.privkey: transaction_hash = token_proxy.transfer( # pylint: disable=no-member privtoaddr(transfer_to), total_per_node, startgas=GAS_LIMIT, ) jsonrpc_client.poll(transaction_hash.decode('hex')) for key in private_keys: assert token_proxy.balanceOf(privtoaddr(key)) == total_per_node # pylint: disable=no-member raiden_apps = create_sequential_network( private_keys, asset_addresses[0], registry_address, channels_per_node, deposit, settle_timeout, poll_timeout, transport_class, blockchain_service_class, ) _raiden_cleanup(request, raiden_apps) return raiden_apps
def test_blockchain(blockchain_backend, private_keys, number_of_nodes, poll_timeout): # pylint: disable=too-many-locals addresses = [privatekey_to_address(priv) for priv in private_keys] privatekey = private_keys[0] address = privatekey_to_address(privatekey) total_asset = 100 jsonrpc_client = JSONRPCClient( privkey=privatekey, print_communication=False, ) patch_send_transaction(jsonrpc_client) humantoken_path = get_contract_path('HumanStandardToken.sol') humantoken_contracts = compile_file(humantoken_path, libraries=dict()) token_proxy = jsonrpc_client.deploy_solidity_contract( address, 'HumanStandardToken', humantoken_contracts, dict(), (total_asset, 'raiden', 2, 'Rd'), timeout=poll_timeout, ) registry_path = get_contract_path('Registry.sol') registry_contracts = compile_file(registry_path) registry_proxy = jsonrpc_client.deploy_solidity_contract( address, 'Registry', registry_contracts, dict(), tuple(), timeout=poll_timeout, ) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 0 # pylint: disable=no-member assert token_proxy.balanceOf(address) == total_asset transaction_hash = registry_proxy.addAsset.transact( token_proxy.address, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) assert len(registry_proxy.assetAddresses.call()) == 1 log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 1 channel_manager_address_encoded = registry_proxy.channelManagerByAsset.call( token_proxy.address) channel_manager_address = channel_manager_address_encoded.decode('hex') log = log_list[0] log_topics = [ decode_topic(topic) for topic in log['topics'] # pylint: disable=invalid-sequence-index ] log_data = log['data'] event = registry_proxy.translator.decode_event( log_topics, log_data[2:].decode('hex'), ) assert channel_manager_address == event['channelManagerAddress'].decode( 'hex') assert token_proxy.address == event['assetAddress'].decode('hex') channel_manager_proxy = jsonrpc_client.new_contract_proxy( registry_contracts['ChannelManagerContract']['abi'], channel_manager_address, ) transaction_hash = channel_manager_proxy.newChannel.transact( addresses[1], 10, gasprice=denoms.wei, ) jsonrpc_client.poll(transaction_hash.decode('hex'), timeout=poll_timeout) log_list = jsonrpc_client.call( 'eth_getLogs', { 'fromBlock': '0x0', 'toBlock': 'latest', 'topics': [], }, ) assert len(log_list) == 2
class BlockTest(unittest.TestCase): def setUp(self): self.db_proxy = dbproxy.MongoDBProxy() self.rpc_cli = JSONRPCClient(host = FLAGS.rpc_host, port = FLAGS.rpc_port, print_communication = False) self.handler = handler.BlockHandler(self.rpc_cli, logger, self.db_proxy) def test_fork(self): # old self.handler.blk_number = 2074903 old_block = self.rpc_cli.call("eth_getBlockByNumber", 2074903, True) self.handler.process_block(old_block) # new new_block = self.rpc_cli.call("eth_getBlockByNumber", 2074903, True) new_block['uncles'] = [] tx = self.rpc_cli.call("eth_getTransactionByHash", "0xa2d7bdf90e507979d7005399f2af77918a538d5288076b0e2a1308e7a419f1bc") tx["blockNumber"] = hex(2074903) new_block['hash'] = "0x123" new_block['transactions'] = [tx] self.handler.process_fork(old_block, new_block) # tx origin_tx_hash = "0xec50f325f70e08de1750c2655d867217a49dbba75ef09c66e1661be75e5fcafe" acc = self.db_proxy.get(FLAGS.accounts, {"address":"0x03da00938219676af361cfc22a49ab1e4a64fd6f"}, block_height = self.handler.blk_number) self.assertNotIn(origin_tx_hash, acc['tx_out']) txs = self.db_proxy.get(FLAGS.txs, {"blockNumber": hex(2074903)}, multi = True, block_height = 2074903) tx_hashes = [tx['hash'] for tx in txs] self.assertNotIn(origin_tx_hash, tx_hashes) self.assertIn(tx['hash'], tx_hashes) # block info block = self.db_proxy.get(FLAGS.blocks, {"number":2074903}, block_height = self.handler.blk_number) self.assertEqual(block['hash'], "0x123") # uncle buncle1 = self.db_proxy.get(FLAGS.uncles, {"mainNumber":2074903, "hash":"0xe83ede60f9ee37d506101d542578d7a26236829364a36652c0bd0d9e6652a0db"}, block_height = self.handler.blk_number) self.assertEqual(buncle1, None) def test_process_block(self): self.handler.blk_number = 2074903 block = self.rpc_cli.call("eth_getBlockByNumber", 2074903, True) self.handler.process_block(block) # block blk = self.db_proxy.get(FLAGS.blocks, {"number":self.handler.blk_number}, block_height = self.handler.blk_number) self.assertEqual(blk['hash'], '0xcb3d7de2ed7817fb5c5763c7cf8429ad0efb12ad4f14420c9ab56b71664f77d4') # account res = self.db_proxy.get(FLAGS.accounts, {"address":"0x2a65aca4d5fc5b5c859090a6c34d164135398226"}, block_height = self.handler.blk_number) # 1 mine mine = res['mine'] self.assertIn(blk['hash'], mine) # 2 uncle buncle = self.db_proxy.get(FLAGS.uncles, {"mainNumber":2074903}, block_height = self.handler.blk_number) self.assertEqual(buncle['mainNumber'], 2074903) # 3 tx txs = self.db_proxy.search(FLAGS.txs, {"blockNumber": hex(2074903)}, multi = True) self.assertEqual(len(txs), 2) def test_process_by_hash(self): self.handler.blk_number = 2074903 block = "0xcb3d7de2ed7817fb5c5763c7cf8429ad0efb12ad4f14420c9ab56b71664f77d4" self.handler.execute(block, True) # block blk = self.db_proxy.get(FLAGS.blocks, {"number":self.handler.blk_number}, block_height = self.handler.blk_number) self.assertEqual(blk['hash'], '0xcb3d7de2ed7817fb5c5763c7cf8429ad0efb12ad4f14420c9ab56b71664f77d4') # account res = self.db_proxy.get(FLAGS.accounts, {"address":"0x2a65aca4d5fc5b5c859090a6c34d164135398226"}, block_height = self.handler.blk_number) # 1 mine mine = res['mine'] self.assertIn(blk['hash'], mine) # 2 uncle buncle = self.db_proxy.get(FLAGS.uncles, {"mainNumber":2074903}, block_height = self.handler.blk_number) self.assertEqual(buncle['mainNumber'], 2074903) # 3 tx txs = self.db_proxy.search(FLAGS.txs, {"blockNumber": hex(2074903)}, multi = True) self.assertEqual(len(txs), 2) def test_revert(self): self.handler.blk_number = 2074903 block = self.rpc_cli.call("eth_getBlockByNumber", 2074903, True) txs = block['transactions'] self.handler.process_block(block) blk = self.db_proxy.get(FLAGS.blocks, {"number":self.handler.blk_number}, block_height = self.handler.blk_number) self.handler.revert(blk,txs) # block blk = self.db_proxy.get(FLAGS.blocks, {"number":self.handler.blk_number}, block_height = self.handler.blk_number) self.assertEqual(blk, None) # account res = self.db_proxy.get(FLAGS.accounts, {"address":"0x2a65aca4d5fc5b5c859090a6c34d164135398226"}, block_height = self.handler.blk_number) # 1 mine mine = res['mine'] self.assertEqual(mine, []) # 2 tx txs = self.db_proxy.search(FLAGS.txs, {"blockNumber": hex(2074903)}, multi = True) self.assertEqual(len(txs), 0) def test_add_genesis(self): FLAGS.genesis_data = "../genesisdata/genesis_frontier.json" driver = builtin.BuiltinDriver() driver.add_genesis_data() # block blk = self.db_proxy.get(FLAGS.blocks, {"number":0}, block_height = 0) self.assertEqual(blk['hash'], '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3') # account acct = self.db_proxy.get(FLAGS.accounts, {"address":"0x3282791d6fd713f1e94f4bfd565eaa78b3a0599d"}, block_height = 0) # 1 tx res = self.db_proxy.get(FLAGS.txs, None, block_height=0, multi = True) self.assertEqual(len(res), 8893) def test_process_tx(self): # block 1700002 tx_hash = "0xa2d7bdf90e507979d7005399f2af77918a538d5288076b0e2a1308e7a419f1bc" tx_obj = self.rpc_cli.call("eth_getTransactionByHash", tx_hash) self.handler.blk_number = 1700002 self.handler.process_tx(tx_obj, None) # tx tx = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1700002, multi = False) self.assertEqual(tx["hash"], tx_hash) self.assertEqual(utils.convert_to_int(tx['gasUsed']), 21000) # 1 account acc1 = self.db_proxy.get(FLAGS.accounts, {"address":"0x4bb96091ee9d802ed039c4d1a5f6216f90f81b01"}, block_height = 1700002, multi = False) acc2 = self.db_proxy.get(FLAGS.accounts, {"address":"0xae8f3c8d1134e50a7c63c39d78406ab7334149ac"}, block_height = 1700002, multi = False) self.assertIn(tx['hash'], acc1['tx_out']) self.assertIn(tx['hash'], acc2['tx_in']) # create contract tx # insert tx_hash = "0xfeae1ff3cf9b6927d607744e3883ea105fb16042d4639857d9cfce3eba644286" tx_obj = self.rpc_cli.call("eth_getTransactionByHash", tx_hash) self.handler.blk_number = 1883496 self.handler.process_tx(tx_obj, None) # tx tx = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1883496, multi = False) self.assertEqual(tx["hash"], tx_hash) self.assertEqual(utils.convert_to_int(tx['gasUsed']), 368040) # account # 1 account acc1 = self.db_proxy.get(FLAGS.accounts, {"address":"0x2ef1f605af5d03874ee88773f41c1382ac71c239"}, block_height = 1883496, multi = False) acc2 = self.db_proxy.get(FLAGS.accounts, {"address":"0xbf4ed7b27f1d666546e30d74d50d173d20bca754"}, block_height = 1883496, multi = False) self.assertIn(tx['hash'], acc1['tx_out']) self.assertIn(tx['hash'], acc2['tx_in']) self.assertEqual(acc2['is_contract'], 1) def test_revert_tx(self): # insert tx_hash = "0xa2d7bdf90e507979d7005399f2af77918a538d5288076b0e2a1308e7a419f1bc" tx_obj = self.rpc_cli.call("eth_getTransactionByHash", tx_hash) self.handler.blk_number = 1700002 self.handler.process_tx(tx_obj, None) # revert tx = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1700002, multi = False) self.handler.revert_tx(tx, 1700002) # tx res = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1700002, multi = False) self.assertEqual(res, None) # account # 1 account acc1 = self.db_proxy.get(FLAGS.accounts, {"address":"0x4bb96091ee9d802ed039c4d1a5f6216f90f81b01"}, block_height = 1700002, multi = False) acc2 = self.db_proxy.get(FLAGS.accounts, {"address":"0xae8f3c8d1134e50a7c63c39d78406ab7334149ac"}, block_height = 1700002, multi = False) self.assertNotIn(tx['hash'], acc1['tx_out']) self.assertNotIn(tx['hash'], acc2['tx_in']) # create contract tx # insert tx_hash = "0xfeae1ff3cf9b6927d607744e3883ea105fb16042d4639857d9cfce3eba644286" tx_obj = self.rpc_cli.call("eth_getTransactionByHash", tx_hash) self.handler.blk_number = 1883496 self.handler.process_tx(tx_obj, None) # revert tx = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1883496, multi = False) self.handler.revert_tx(tx, 1883496) # tx res = self.db_proxy.get(FLAGS.txs, {"hash":tx_hash}, block_height = 1883496, multi = False) self.assertEqual(res, None) # account # 1 account acc1 = self.db_proxy.get(FLAGS.accounts, {"address":"0x2ef1f605af5d03874ee88773f41c1382ac71c239"}, block_height = 1883496, multi = False) acc2 = self.db_proxy.get(FLAGS.accounts, {"address":"0xbf4ed7b27f1d666546e30d74d50d173d20bca754"}, block_height = 1883496, multi = False) self.assertEqual(acc2, None) self.assertNotIn(tx['hash'], acc1['tx_out']) def test_process_uncle(self): # block 2122962 blk_hash = "0xa9389966cec0062be52f16440e9ee9447e849698934b62aac93138fdfdb751b1" blk_obj = self.rpc_cli.call("eth_getBlockByHash", blk_hash, False) self.handler.blk_number = 2122962 self.handler.process_uncle(blk_obj) # uncle uncle = self.db_proxy.get(FLAGS.uncles, {"hash":"0xa05ba9c6f686d92ef62a1adf18c3c97ed9041b3341de74c20d3cb421216a7f48"}, block_height = 2122962, multi = False) self.assertEqual(uncle['mainNumber'], 2122962) self.assertEqual(uncle['hash'], "0xa05ba9c6f686d92ef62a1adf18c3c97ed9041b3341de74c20d3cb421216a7f48") self.assertEqual(uncle['reward'], utils.unit_convert_from_ether(4.375)) # account acc = self.db_proxy.get(FLAGS.accounts, {"address":"0x2a65aca4d5fc5b5c859090a6c34d164135398226"}, block_height = 2122962, multi = False) # 1 mine mine_uncles = acc['uncles'] self.assertIn(uncle['hash'], mine_uncles) def test_revert_uncle(self): # block 2122962 blk_hash = "0xa9389966cec0062be52f16440e9ee9447e849698934b62aac93138fdfdb751b1" blk_obj = self.rpc_cli.call("eth_getBlockByHash", blk_hash, False) self.handler.blk_number = 2122962 self.handler.process_uncle(blk_obj) self.handler.revert_uncle(blk_obj) # uncle uncle = self.db_proxy.get(FLAGS.uncles, {"hash":"0xa05ba9c6f686d92ef62a1adf18c3c97ed9041b3341de74c20d3cb421216a7f48"}, block_height = 2122962, multi = False) self.assertEqual(uncle, None) # account acc = self.db_proxy.get(FLAGS.accounts, {"address":"0x2a65aca4d5fc5b5c859090a6c34d164135398226"}, block_height = 2122962, multi = False) # 1 mine mine_uncles = acc['uncles'] self.assertEqual(mine_uncles, []) def test_set_balance(self): block_num = 2142168 block = self.rpc_cli.call("eth_getBlockByNumber", block_num, True) shandler = handler.BlockHandler(self.rpc_cli, logger, self.db_proxy, True) shandler.blk_number = block_num shandler.process_block(block) # miner, tx-out acct tx-in acct uncle-miner accts = ["0xbcdfc35b86bedf72f0cda046a3c16829a2ef41d1","0x362db1e4830bf2c401d7f9f45034f5e6e1c46a0b", "0xbfc39b6f805a9e40e77291aff27aee3c96915bdd", "0x6cafe7473925998db07a497ac3fd10405637a46d"] balances = [self.rpc_cli.call("eth_getBalance", acct, block_num) for acct in accts] for _,acct in enumerate(accts): res = self.db_proxy.get(FLAGS.accounts, {"address":acct}, multi = False, block_height = block_num) self.assertEqual(res['balance'], balances[_]) def test_sync_balance(self): # insert account data accounts = ["0x3282791d6fd713f1e94f4bfd565eaa78b3a0599d", "0x17961d633bcf20a7b029a7d94b7df4da2ec5427f", "0x493a67fe23decc63b10dda75f3287695a81bd5ab"] for _, acct in enumerate(accounts): self.db_proxy.insert(FLAGS.accounts, {"address":acct}, block_height = _ * FLAGS.table_capacity) self.handler._sync_balance(100) res = self.db_proxy.get(FLAGS.meta, {"sync_record":"ethereum"}, multi = False) self.assertEqual(res["last_sync_block"], 100) def tearDown(self): self.db_proxy.drop_db(FLAGS.mongodb_default_db)
import time from ethereum._solidity import solc_wrapper from pyethapp.rpc_client import JSONRPCClient, address_encoder from ethereum.processblock import mk_contract_address from ethereum.utils import denoms RPC_PORT = 4000 # change for Geth client = JSONRPCClient(port=RPC_PORT, print_communication=False) sender = client.coinbase code = open('spread.sol').read() def create_creator(): # create Creator contract contract_name = 'Creator' binary = solc_wrapper.compile(code, contract_name=contract_name) abi = solc_wrapper.mk_full_signature(code, 'Spread') print abi print 'creating Creator contract' assert sender creator_contract = client.send_transaction(sender, to='', value=0, data=binary, startgas=400000) assert len(creator_contract) == 40 print 'creator contract at', creator_contract return creator_contract def create_spread(creator_contract):
def setUp(self): self.db_proxy = dbproxy.MongoDBProxy() self.rpc_cli = JSONRPCClient(host = FLAGS.rpc_host, port = FLAGS.rpc_port, print_communication = False) self.handler = handler.BlockHandler(self.rpc_cli, logger, self.db_proxy)