def geth_wait_and_check(privatekeys, rpc_ports): """ Wait until the geth cluster is ready. """ address = address_encoder(privatekey_to_address(privatekeys[0])) jsonrpc_running = False tries = 5 rpc_port = rpc_ports[0] jsonrpc_client = JSONRPCClient(host="0.0.0.0", port=rpc_port, privkey=privatekeys[0], print_communication=False) while not jsonrpc_running and tries > 0: try: jsonrpc_client.call("eth_getBalance", address, "latest") jsonrpc_running = True except ConnectionError: gevent.sleep(0.5) tries -= 1 if jsonrpc_running is False: raise ValueError("geth didnt start the jsonrpc interface") for key in set(privatekeys): address = address_encoder(privatekey_to_address(key)) jsonrpc_client = JSONRPCClient(host="0.0.0.0", port=rpc_port, privkey=key, print_communication=False) tries = 10 balance = "0x0" while balance == "0x0" and tries > 0: balance = jsonrpc_client.call("eth_getBalance", address, "latest") gevent.sleep(1) tries -= 1 if balance == "0x0": raise ValueError("account is with a balance of 0")
def test_pending_transaction_filter(test_app): filter_id = test_app.rpc_request('eth_newPendingTransactionFilter') assert test_app.rpc_request('eth_getFilterChanges', filter_id) == [] tx = { 'from': address_encoder(test_app.services.accounts.unlocked_accounts()[0].address), 'to': address_encoder('\xff' * 20) } def test_sequence(s): tx_hashes = [] for c in s: if c == 't': tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) elif c == 'b': test_app.mine_next_block() else: assert False assert test_app.rpc_request('eth_getFilterChanges', filter_id) == tx_hashes assert test_app.rpc_request('eth_getFilterChanges', filter_id) == [] sequences = [ 't', 'b', 'ttt', 'tbt', 'ttbttt', 'bttbtttbt', 'bttbtttbttbb', ] map(test_sequence, sequences)
def __init__( self, jsonrpc_client, channel_address, startgas=GAS_LIMIT, gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): # pylint: disable=too-many-arguments result = jsonrpc_client.call( 'eth_getCode', address_encoder(channel_address), 'latest', ) if result == '0x': raise ValueError('Netting channel address {} does not contain code'.format( address_encoder(channel_address), )) proxy = jsonrpc_client.new_abi_contract( NETTING_CHANNEL_ABI, address_encoder(channel_address), ) self.address = channel_address self.proxy = proxy self.client = jsonrpc_client self.startgas = startgas self.gasprice = gasprice self.poll_timeout = poll_timeout # check we are a participant of the given channel self.node_address = privatekey_to_address(self.client.privkey) self.detail(self.node_address)
def __init__( self, jsonrpc_client, manager_address, startgas=GAS_LIMIT, gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): # pylint: disable=too-many-arguments result = jsonrpc_client.call( 'eth_getCode', address_encoder(manager_address), 'latest', ) if result == '0x': raise ValueError('Channel manager address {} does not contain code'.format( address_encoder(manager_address), )) proxy = jsonrpc_client.new_abi_contract( CHANNEL_MANAGER_ABI, address_encoder(manager_address), ) self.address = manager_address self.proxy = proxy self.client = jsonrpc_client self.startgas = startgas self.gasprice = gasprice self.poll_timeout = poll_timeout
def __init__( self, jsonrpc_client, discovery_address, startgas=GAS_LIMIT, gasprice=GAS_PRICE, poll_timeout=DEFAULT_POLL_TIMEOUT): result = jsonrpc_client.call( 'eth_getCode', address_encoder(discovery_address), 'latest', ) if result == '0x': raise ValueError('Discovery address {} does not contain code'.format( address_encoder(discovery_address), )) proxy = jsonrpc_client.new_abi_contract( ENDPOINT_REGISTRY_ABI, address_encoder(discovery_address), ) self.address = discovery_address self.proxy = proxy self.client = jsonrpc_client self.startgas = startgas self.gasprice = gasprice self.poll_timeout = poll_timeout
def test_send_transaction(test_app): chain = test_app.services.chain.chain assert chain.head_candidate.get_balance("\xff" * 20) == 0 sender = test_app.services.accounts.unlocked_accounts()[0].address assert chain.head_candidate.get_balance(sender) > 0 tx = {"from": address_encoder(sender), "to": address_encoder("\xff" * 20), "value": quantity_encoder(1)} tx_hash = data_decoder(test_app.rpc_request("eth_sendTransaction", tx)) assert tx_hash == chain.head_candidate.get_transaction(0).hash assert chain.head_candidate.get_balance("\xff" * 20) == 1 test_app.mine_next_block() assert tx_hash == chain.head.get_transaction(0).hash assert chain.head.get_balance("\xff" * 20) == 1 # send transactions from account which can't pay gas tx["from"] = address_encoder(test_app.services.accounts.unlocked_accounts()[1].address) tx_hash = data_decoder(test_app.rpc_request("eth_sendTransaction", tx)) assert chain.head_candidate.get_transactions() == []
def test_send_transaction(test_app): chain = test_app.services.chain.chain assert chain.head_candidate.get_balance('\xff' * 20) == 0 tx = { 'from': address_encoder(test_app.services.accounts.unlocked_accounts()[0].address), 'to': address_encoder('\xff' * 20), 'value': quantity_encoder(1) } tx_hash = data_decoder(test_app.rpc_request('eth_sendTransaction', tx)) assert tx_hash == chain.head_candidate.get_transaction(0).hash assert chain.head_candidate.get_balance('\xff' * 20) == 1 test_app.mine_next_block() assert tx_hash == chain.head.get_transaction(0).hash assert chain.head.get_balance('\xff' * 20) == 1 # send transactions from account which can't pay gas tx['from'] = address_encoder(test_app.services.accounts.unlocked_accounts()[1].address) tx_hash = data_decoder(test_app.rpc_request('eth_sendTransaction', tx)) assert chain.head_candidate.get_transactions() == []
def new_filter(jsonrpc_client, contract_address, topics): """ Custom new filter implementation to handle bad encoding from geth rpc. """ json_data = { 'fromBlock': '', 'toBlock': '', 'address': address_encoder(normalize_address(contract_address)), 'topics': [topic_encoder(topic) for topic in topics], } return jsonrpc_client.call('eth_newFilter', json_data)
def test_pending_transaction_filter(test_app): filter_id = test_app.rpc_request("eth_newPendingTransactionFilter") assert test_app.rpc_request("eth_getFilterChanges", filter_id) == [] tx = { "from": address_encoder(test_app.services.accounts.unlocked_accounts()[0].address), "to": address_encoder("\xff" * 20), } def test_sequence(s): tx_hashes = [] for c in s: if c == "t": tx_hashes.append(test_app.rpc_request("eth_sendTransaction", tx)) elif c == "b": test_app.mine_next_block() else: assert False assert test_app.rpc_request("eth_getFilterChanges", filter_id) == tx_hashes assert test_app.rpc_request("eth_getFilterChanges", filter_id) == [] sequences = ["t", "b", "ttt", "tbt", "ttbttt", "bttbtttbt", "bttbtttbttbb"] map(test_sequence, sequences)
def geth_wait_and_check(privatekeys): """ Wait until the geth cluster is ready. """ address = address_encoder(privtoaddr(privatekeys[0])) jsonrpc_running = False tries = 5 jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=privatekeys[0], print_communication=False, ) while not jsonrpc_running and tries > 0: try: jsonrpc_client.call('eth_getBalance', address, 'latest') jsonrpc_running = True except ConnectionError: gevent.sleep(0.5) tries -= 1 if jsonrpc_running is False: raise ValueError('geth didnt start the jsonrpc interface') for key in set(privatekeys): address = address_encoder(privtoaddr(key)) jsonrpc_client = JSONRPCClient( host='0.0.0.0', privkey=key, print_communication=False, ) tries = 10 balance = '0x0' while balance == '0x0' and tries > 0: balance = jsonrpc_client.call('eth_getBalance', address, 'latest') gevent.sleep(1) tries -= 1 if balance == '0x0': raise ValueError('account is with a balance of 0')
def wait_for_contract(self, contract_address, timeout=None): start_time = time.time() result = self._raiden.chain.client.call( 'eth_getCode', address_encoder(contract_address), 'latest', ) current_time = time.time() while result == '0x': if timeout and start_time + timeout > current_time: return False result = self._raiden.chain.client.call( 'eth_getCode', address_encoder(contract_address), 'latest', ) gevent.sleep(0.5) current_time = time.time() return result != '0x'
def geth_bare_genesis(genesis_path, private_keys): """Creates a bare genesis inside `datadir`. Args: datadir (str): the datadir in which the blockchain is initialized. Returns: str: The path to the genisis file. """ account_addresses = [privatekey_to_address(key) for key in set(private_keys)] alloc = {address_encoder(address): {"balance": DEFAULT_BALANCE_BIN} for address in account_addresses} genesis = GENESIS_STUB.copy() genesis["alloc"] = alloc with open(genesis_path, "w") as handler: json.dump(genesis, handler)
def all_contract_events_raw(rpc, contract_address, start_block=None, end_block=None): """Find all events for a deployed contract given its `contract_address`. Args: rpc (pyethapp.rpc_client.JSONRPCClient): client instance. contract_address (string): hex encoded contract address. start_block (int): read event-logs starting from this block number. end_block (int): read event-logs up to this block number. Returns: events (list) """ return rpc.call('eth_getLogs', { 'fromBlock': str(start_block or 0), 'toBlock': str(end_block or 'latest'), 'address': address_encoder(normalize_address(contract_address)), 'topics': [], })
def signed_tx_example(to=z_address, value=100): from ethereum.transactions import Transaction from pyethapp.accounts import mk_privkey, privtoaddr secret_seed = 'wow' privkey = mk_privkey(secret_seed) sender = privtoaddr(privkey) # fetch nonce nonce = quantity_decoder( JSONRPCClient().call('eth_getTransactionCount', address_encoder(sender), 'pending')) # create transaction tx = Transaction(nonce, default_gasprice, default_startgas, to=z_address, value=value, data='') tx.sign(privkey) tx_dict = tx.to_dict() tx_dict.pop('hash') res = JSONRPCClient().eth_sendTransaction(**tx_dict) if len(res) == 20: print 'contract created @', res.encode('hex') else: assert len(res) == 32 print 'tx hash', res.encode('hex')
def signed_tx_example(): from ethereum.transactions import Transaction from pyethapp.accounts import mk_privkey, privtoaddr secret_seed = "wow" privkey = mk_privkey(secret_seed) sender = privtoaddr(privkey) # fetch nonce nonce = quantity_decoder(JSONRPCClient().call("eth_getTransactionCount", address_encoder(sender), "pending")) # create transaction tx = Transaction(nonce, default_gasprice, default_startgas, to=z_address, value=100, data="") tx.sign(privkey) tx_dict = tx.to_dict() tx_dict.pop("hash") res = JSONRPCClient().eth_sendTransaction(**tx_dict) if len(res) == 20: print "contract created @", res.encode("hex") else: assert len(res) == 32 print "tx hash", res.encode("hex")
def nonce(self, address): if len(address) == 40: address = address.decode('hex') return quantity_decoder( self.call('eth_getTransactionCount', address_encoder(address), 'pending'))
def create_geth_cluster(private_keys, geth_private_keys, p2p_base_port, base_datadir): # pylint: disable=too-many-locals,too-many-statements # TODO: handle better the errors cases: # - cant bind, port in use start_rpcport = 4000 account_addresses = [ privtoaddr(key) for key in set(private_keys) ] alloc = { address_encoder(address): { 'balance': DEFAULT_BALANCE, } for address in account_addresses } genesis = { 'config': { 'homesteadBlock': 0, }, 'nonce': '0x0000000000000042', 'mixhash': '0x0000000000000000000000000000000000000000000000000000000000000000', 'difficulty': '0x40', 'coinbase': '0x0000000000000000000000000000000000000000', 'timestamp': '0x00', 'parentHash': '0x0000000000000000000000000000000000000000000000000000000000000000', 'extraData': 'raiden', 'gasLimit': GAS_LIMIT_HEX, 'alloc': alloc, } nodes_configuration = [] for pos, key in enumerate(geth_private_keys): config = dict() # make the first node miner if pos == 0: config['minerthreads'] = 1 # conservative config['unlock'] = 0 config['nodekey'] = key config['nodekeyhex'] = encode_hex(key) config['pub'] = encode_hex(privtopub(key)) config['address'] = privtoaddr(key) config['port'] = p2p_base_port + pos config['rpcport'] = start_rpcport + pos config['enode'] = 'enode://{pub}@127.0.0.1:{port}'.format( pub=config['pub'], port=config['port'], ) config['bootnodes'] = ','.join(node['enode'] for node in nodes_configuration) nodes_configuration.append(config) cmds = [] for i, config in enumerate(nodes_configuration): nodedir = os.path.join(base_datadir, config['nodekeyhex']) os.makedirs(nodedir) geth_init_datadir(genesis, nodedir) if 'minerthreads' in config: geth_create_account(nodedir, private_keys[i]) cmds.append(geth_to_cmd(config, nodedir)) # save current term settings before running geth if isinstance(sys.stdin, file): # check that the test is running on non-capture mode term_settings = termios.tcgetattr(sys.stdin) processes_list = [] for cmd in cmds: if '--unlock' in cmd: process = subprocess.Popen(cmd, universal_newlines=True, stdin=subprocess.PIPE) # --password wont work, write password to unlock process.stdin.write(DEFAULT_PASSPHRASE + os.linesep) # Passphrase: process.stdin.write(DEFAULT_PASSPHRASE + os.linesep) # Repeat passphrase: else: process = subprocess.Popen(cmd) processes_list.append(process) assert process.returncode is None geth_wait_and_check(private_keys) # reenter echo mode (disabled by geth pasphrase prompt) if isinstance(sys.stdin, file): termios.tcsetattr(sys.stdin, termios.TCSADRAIN, term_settings) return processes_list
def test_get_filter_changes(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder(test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = {"from": sender, "data": data_encoder(LOG_EVM)} tx_hash = test_app.rpc_request("eth_sendTransaction", contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request("eth_getTransactionReceipt", tx_hash) contract_address = receipt["contractAddress"] tx = {"from": sender, "to": contract_address} pending_filter_id = test_app.rpc_request("eth_newFilter", {"fromBlock": "pending", "toBlock": "pending"}) latest_filter_id = test_app.rpc_request("eth_newFilter", {"fromBlock": "latest", "toBlock": "latest"}) tx_hashes = [] logs = [] # tx in pending block tx_hashes.append(test_app.rpc_request("eth_sendTransaction", tx)) logs.append(test_app.rpc_request("eth_getFilterChanges", pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]["type"] == "pending" assert logs[-1][0]["logIndex"] == None assert logs[-1][0]["transactionIndex"] == None assert logs[-1][0]["transactionHash"] == None assert logs[-1][0]["blockHash"] == None assert logs[-1][0]["blockNumber"] == None assert logs[-1][0]["address"] == contract_address pending_log = logs[-1][0] logs.append(test_app.rpc_request("eth_getFilterChanges", pending_filter_id)) assert logs[-1] == [] logs.append(test_app.rpc_request("eth_getFilterChanges", latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request("eth_getFilterChanges", latest_filter_id)) assert len(logs[-1]) == 1 # log from before, but now mined assert logs[-1][0]["type"] == "mined" assert logs[-1][0]["logIndex"] == "0x0" assert logs[-1][0]["transactionIndex"] == "0x0" assert logs[-1][0]["transactionHash"] == tx_hashes[-1] assert logs[-1][0]["blockHash"] == data_encoder(test_app.services.chain.chain.head.hash) assert logs[-1][0]["blockNumber"] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs[-1][0]["address"] == contract_address logs_in_range = [logs[-1][0]] # send tx and mine block tx_hashes.append(test_app.rpc_request("eth_sendTransaction", tx)) test_app.mine_next_block() logs.append(test_app.rpc_request("eth_getFilterChanges", pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]["type"] == "mined" assert logs[-1][0]["logIndex"] == "0x0" assert logs[-1][0]["transactionIndex"] == "0x0" assert logs[-1][0]["transactionHash"] == tx_hashes[-1] assert logs[-1][0]["blockHash"] == data_encoder(test_app.services.chain.chain.head.hash) assert logs[-1][0]["blockNumber"] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs[-1][0]["address"] == contract_address logs_in_range.append(logs[-1][0]) logs.append(test_app.rpc_request("eth_getFilterChanges", latest_filter_id)) assert logs[-1] == logs[-2] # latest and pending filter see same (mined) log logs.append(test_app.rpc_request("eth_getFilterChanges", latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request("eth_getFilterChanges", pending_filter_id)) assert logs[-1] == [] range_filter_id = test_app.rpc_request( "eth_newFilter", {"fromBlock": quantity_encoder(test_app.services.chain.chain.head.number - 3), "toBlock": "pending"}, ) tx_hashes.append(test_app.rpc_request("eth_sendTransaction", tx)) logs.append(test_app.rpc_request("eth_getFilterChanges", range_filter_id)) assert sorted(logs[-1]) == sorted(logs_in_range + [pending_log])
def get_our_address(self): return api_response(result=dict( our_address=address_encoder(self.raiden_api.address)))
def get_our_address(self): return {'our_address': address_encoder(self.raiden_api.address)}
def _check_exists(self): if self.tester_state.block.get_code(self.address) == '': raise AddressWithoutCode( 'Netting channel address {} does not contain code'.format( address_encoder(self.address), ))
def _serialize(self, value, attr, obj): return address_encoder(value)
def to_url(self, value): value = address_encoder(value) return value
def get_balance(account): b = quantity_decoder( JSONRPCClient().call('eth_getBalance', address_encoder(account), 'pending')) return b
def app(address, keystore_path, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, rpccorsdomain, mapped_socket, logging, logfile, log_json, max_unresponsive_time, send_ping_time, api_address, rpc, sync_check, console, password_file, web_ui, datadir, eth_client_communication, nat): # pylint: disable=too-many-locals,too-many-branches,too-many-statements,unused-argument from raiden.app import App from raiden.network.rpc.client import BlockChainService (listen_host, listen_port) = split_endpoint(listen_address) (api_host, api_port) = split_endpoint(api_address) config = App.DEFAULT_CONFIG.copy() config['host'] = listen_host config['port'] = listen_port config['console'] = console config['rpc'] = rpc config['web_ui'] = rpc and web_ui config['api_host'] = api_host config['api_port'] = api_port if mapped_socket: config['socket'] = mapped_socket.socket config['external_ip'] = mapped_socket.external_ip config['external_port'] = mapped_socket.external_port else: config['socket'] = None config['external_ip'] = listen_host config['external_port'] = listen_port config['protocol']['nat_keepalive_retries'] = DEFAULT_NAT_KEEPALIVE_RETRIES timeout = max_unresponsive_time / DEFAULT_NAT_KEEPALIVE_RETRIES config['protocol']['nat_keepalive_timeout'] = timeout address_hex = address_encoder(address) if address else None address_hex, privatekey_bin = prompt_account(address_hex, keystore_path, password_file) privatekey_hex = privatekey_bin.encode('hex') config['privatekey_hex'] = privatekey_hex endpoint = eth_rpc_endpoint # Fallback default port if only an IP address is given rpc_port = 8545 if eth_rpc_endpoint.startswith("http://"): endpoint = eth_rpc_endpoint[len("http://"):] rpc_port = 80 elif eth_rpc_endpoint.startswith("https://"): endpoint = eth_rpc_endpoint[len("https://"):] rpc_port = 443 if ':' not in endpoint: # no port was given in url rpc_host = endpoint else: rpc_host, rpc_port = split_endpoint(endpoint) rpc_client = JSONRPCClient( privkey=privatekey_bin, host=rpc_host, port=rpc_port, print_communication=eth_client_communication, ) # this assumes the eth node is already online patch_send_transaction(rpc_client) patch_send_message(rpc_client) try: blockchain_service = BlockChainService( privatekey_bin, rpc_client, ) except ValueError as e: # ValueError exception raised if: # - The registry contract address doesn't have code, this might happen # if the connected geth process is not synced or if the wrong address # is provided (e.g. using the address from a smart contract deployed on # ropsten with a geth node connected to morden) print(e.message) sys.exit(1) if sync_check: try: net_id = int(blockchain_service.client.call('net_version')) network = ID_TO_NETWORKNAME[net_id] except: # pylint: disable=bare-except print( "Couldn't determine the network the ethereum node is connected to.\n" "Because of this there is not way to determine the latest\n" "block with an oracle, and the events from the ethereum\n" "node cannot be trusted. Giving up.\n") sys.exit(1) url = ETHERSCAN_API.format( network=network, action='eth_blockNumber', ) wait_for_sync( blockchain_service, url=url, tolerance=ORACLE_BLOCKNUMBER_DRIFT_TOLERANCE, sleep=3, ) discovery_tx_cost = GAS_PRICE * DISCOVERY_REGISTRATION_GAS while True: balance = blockchain_service.client.balance(address_hex) if discovery_tx_cost <= balance: break print('Account has insufficient funds for discovery registration.\n' 'Needed: {} ETH\n' 'Available: {} ETH.\n' 'Please deposit additional funds on this account.'.format( discovery_tx_cost / float(denoms.ether), balance / float(denoms.ether))) if not click.confirm('Try again?'): sys.exit(1) registry = blockchain_service.registry(registry_contract_address, ) discovery = ContractDiscovery( blockchain_service.node_address, blockchain_service.discovery(discovery_contract_address)) if datadir is None: # default database directory raiden_directory = os.path.join(os.path.expanduser('~'), '.raiden') else: raiden_directory = datadir if not os.path.exists(raiden_directory): os.makedirs(raiden_directory) user_db_dir = os.path.join(raiden_directory, address_hex[:8]) if not os.path.exists(user_db_dir): os.makedirs(user_db_dir) database_path = os.path.join(user_db_dir, 'log.db') config['database_path'] = database_path return App( config, blockchain_service, registry, discovery, )
def cached_genesis(request, blockchain_type): """ Deploy all contracts that are required by the fixtures into a tester and then serialize the accounts into a genesis block. Returns: dict: A dictionary representing the genesis block. """ if not request.config.option.blockchain_cache: return if blockchain_type != 'geth': return # this will create the tester _and_ deploy the Registry deploy_key = request.getfixturevalue('deploy_key') private_keys = request.getfixturevalue('private_keys') deploy_service, blockchain_services = _tester_services( deploy_key, private_keys, request.getfixturevalue('tester_blockgas_limit'), ) # create_network only registers the tokens, # the contracts must be deployed previously register = True token_contract_addresses = _token_addresses( request.getfixturevalue('token_amount'), request.getfixturevalue('number_of_tokens'), deploy_service, blockchain_services, register) raiden_apps = create_apps( blockchain_services, request.getfixturevalue('raiden_udp_ports'), DummyTransport, # Do not use a UDP server to avoid port reuse in MacOSX request.config.option.verbose, request.getfixturevalue('send_ping_time'), request.getfixturevalue('max_unresponsive_time'), request.getfixturevalue('reveal_timeout'), request.getfixturevalue('settle_timeout'), request.getfixturevalue('database_paths'), ) if 'raiden_network' in request.fixturenames: create_network_channels( raiden_apps, token_contract_addresses, request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) elif 'raiden_chain' in request.fixturenames: create_sequential_channels( raiden_apps, token_contract_addresses[0], request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) # else: a test that is not creating channels for app in raiden_apps: app.stop() # save the state from the last block into a genesis dict tester = blockchain_services[0].tester_state tester.mine() registry_address = blockchain_services[0].default_registry.address genesis_alloc = dict() for account_address in tester.block.state.to_dict(): account_alloc = tester.block.account_to_dict(account_address) # Both keys and values of the account storage associative array # must now be encoded with 64 hex digits if account_alloc['storage']: new_storage = dict() for key, val in account_alloc['storage'].iteritems(): # account_to_dict() from pyethereum can return 0x for a storage # position. That is an invalid way of representing 0x0, which we # have to take care of here. new_key = '0x%064x' % int(key if key != '0x' else '0x0', 16) new_val = '0x%064x' % int(val, 16) new_storage[new_key] = new_val account_alloc['storage'] = new_storage # code must be hex encoded with 0x prefix account_alloc['code'] = account_alloc.get('code', '') # account_to_dict returns accounts with nonce=0 and the nonce must # be encoded with 16 hex digits account_alloc['nonce'] = '0x%016x' % tester.block.get_nonce( account_address) genesis_alloc[account_address] = account_alloc account_addresses = [ privatekey_to_address(key) for key in set(private_keys) ] for address in account_addresses: genesis_alloc[address]['balance'] = DEFAULT_BALANCE_BIN alloc = { address_encoder(address_maybe_bin): data for address_maybe_bin, data in genesis_alloc.iteritems() } genesis = GENESIS_STUB.copy() genesis['alloc'] = alloc genesis['config']['defaultRegistryAddress'] = address_encoder( registry_address) genesis['config']['tokenAddresses'] = [ address_encoder(token_address) for token_address in token_contract_addresses ] return genesis
def test_query_events(raiden_chain, deposit, settle_timeout, events_poll_timeout): app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking token_address = app0.raiden.chain.default_registry.token_addresses()[0] assert len(app0.raiden.managers_by_token_address[token_address]. address_channel) == 0 assert len(app1.raiden.managers_by_token_address[token_address]. address_channel) == 0 token0 = app0.raiden.chain.token(token_address) manager0 = app0.raiden.chain.manager_by_token(token_address) events = app0.raiden.event_handler.get_network_events(from_block=0) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'TokenAdded', 'channel_manager_address': address_encoder(manager0.address), 'token_address': address_encoder(token_address), }) netcontract_address = manager0.new_netting_channel( app0.raiden.address, app1.raiden.address, settle_timeout, ) events = app0.raiden.event_handler.get_token_network_events( token_address=address_encoder(token_address), from_block=0) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'ChannelNew', 'settle_timeout': 10, 'netting_channel': address_encoder(netcontract_address), 'participant1': address_encoder(app0.raiden.address), 'participant2': address_encoder(app1.raiden.address), }) netting_channel0 = app0.raiden.chain.netting_channel(netcontract_address) gevent.sleep(events_poll_timeout) # channel is created but not opened and without funds assert len(app0.raiden.managers_by_token_address[token_address]. address_channel) == 1 assert len(app1.raiden.managers_by_token_address[token_address]. address_channel) == 1 channel0 = app0.raiden.managers_by_token_address[ token_address].address_channel.values()[0] channel1 = app1.raiden.managers_by_token_address[ token_address].address_channel.values()[0] assert_synched_channels( channel0, 0, [], channel1, 0, [], ) token0.approve(netcontract_address, deposit) netting_channel0.deposit(app0.raiden.address, deposit) gevent.sleep(events_poll_timeout) events = app0.raiden.event_handler.get_channel_events( channel_address=address_encoder(netcontract_address), event_id=CHANNELNEWBALANCE_EVENTID, from_block=0) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'ChannelNewBalance', 'token_address': address_encoder(token_address), 'participant': address_encoder(app0.raiden.address), 'balance': deposit, 'block_number': 'ignore', }) channel0.external_state.close(app0.raiden.address, '') events = app0.raiden.event_handler.get_channel_events( channel_address=address_encoder(netcontract_address), event_id=CHANNELCLOSED_EVENTID, from_block=0) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'ChannelClosed', 'closing_address': address_encoder(app0.raiden.address), 'block_number': 'ignore', }) settle_expiration = app0.raiden.chain.block_number() + settle_timeout + 1 wait_until_block(app0.raiden.chain, settle_expiration) channel1.external_state.settle() events = app0.raiden.event_handler.get_channel_events( channel_address=address_encoder(netcontract_address), event_id=CHANNELSETTLED_EVENTID, from_block=0) assert len(events) == 1 assert event_dicts_are_equal(events[0], { '_event_type': 'ChannelSettled', 'block_number': 'ignore', })
def test_get_filter_changes(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder( test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = {'from': sender, 'data': data_encoder(LOG_EVM)} tx_hash = test_app.rpc_request('eth_sendTransaction', contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request('eth_getTransactionReceipt', tx_hash) contract_address = receipt['contractAddress'] tx = {'from': sender, 'to': contract_address} pending_filter_id = test_app.rpc_request('eth_newFilter', { 'fromBlock': 'pending', 'toBlock': 'pending' }) latest_filter_id = test_app.rpc_request('eth_newFilter', { 'fromBlock': 'latest', 'toBlock': 'latest' }) tx_hashes = [] logs = [] # tx in pending block tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]['type'] == 'pending' assert logs[-1][0]['logIndex'] == None assert logs[-1][0]['transactionIndex'] == None assert logs[-1][0]['transactionHash'] == None assert logs[-1][0]['blockHash'] == None assert logs[-1][0]['blockNumber'] == None assert logs[-1][0]['address'] == contract_address pending_log = logs[-1][0] logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert logs[-1] == [] logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert len(logs[-1]) == 1 # log from before, but now mined assert logs[-1][0]['type'] == 'mined' assert logs[-1][0]['logIndex'] == '0x0' assert logs[-1][0]['transactionIndex'] == '0x0' assert logs[-1][0]['transactionHash'] == tx_hashes[-1] assert logs[-1][0]['blockHash'] == data_encoder( test_app.services.chain.chain.head.hash) assert logs[-1][0]['blockNumber'] == quantity_encoder( test_app.services.chain.chain.head.number) assert logs[-1][0]['address'] == contract_address logs_in_range = [logs[-1][0]] # send tx and mine block tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]['type'] == 'mined' assert logs[-1][0]['logIndex'] == '0x0' assert logs[-1][0]['transactionIndex'] == '0x0' assert logs[-1][0]['transactionHash'] == tx_hashes[-1] assert logs[-1][0]['blockHash'] == data_encoder( test_app.services.chain.chain.head.hash) assert logs[-1][0]['blockNumber'] == quantity_encoder( test_app.services.chain.chain.head.number) assert logs[-1][0]['address'] == contract_address logs_in_range.append(logs[-1][0]) logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == logs[ -2] # latest and pending filter see same (mined) log logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert logs[-1] == [] range_filter_id = test_app.rpc_request( 'eth_newFilter', { 'fromBlock': quantity_encoder(test_app.services.chain.chain.head.number - 3), 'toBlock': 'pending' }) tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) logs.append(test_app.rpc_request('eth_getFilterChanges', range_filter_id)) assert sorted(logs[-1]) == sorted(logs_in_range + [pending_log])
def cached_genesis(request, blockchain_type): """ Deploy all contracts that are required by the fixtures into a tester and then serialize the accounts into a genesis block. Returns: dict: A dictionary representing the genesis block. """ if not request.config.option.blockchain_cache: return if blockchain_type != 'geth': return # this will create the tester _and_ deploy the Registry deploy_key = request.getfixturevalue('deploy_key') private_keys = request.getfixturevalue('private_keys') deploy_service, blockchain_services = _tester_services( deploy_key, private_keys, request.getfixturevalue('tester_blockgas_limit'), ) # create_network only registers the tokens, # the contracts must be deployed previously register = True participants = [ privatekey_to_address(privatekey) for privatekey in private_keys ] token_contract_addresses = _token_addresses( request.getfixturevalue('token_amount'), request.getfixturevalue('number_of_tokens'), deploy_service, participants, register) endpoint_discovery_address = deploy_service.deploy_contract( 'EndpointRegistry', 'EndpointRegistry.sol', ) endpoint_discovery_services = [ ContractDiscovery( chain.node_address, chain.discovery(endpoint_discovery_address), ) for chain in blockchain_services ] raiden_apps = create_apps( blockchain_services, endpoint_discovery_services, request.getfixturevalue('raiden_udp_ports'), DummyTransport, # Do not use a UDP server to avoid port reuse in MacOSX request.config.option.verbose, request.getfixturevalue('reveal_timeout'), request.getfixturevalue('settle_timeout'), request.getfixturevalue('database_paths'), request.getfixturevalue('retry_interval'), request.getfixturevalue('retries_before_backoff'), request.getfixturevalue('throttle_capacity'), request.getfixturevalue('throttle_fill_rate'), request.getfixturevalue('nat_invitation_timeout'), request.getfixturevalue('nat_keepalive_retries'), request.getfixturevalue('nat_keepalive_timeout'), ) if 'raiden_network' in request.fixturenames: create_network_channels( raiden_apps, token_contract_addresses, request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) elif 'raiden_chain' in request.fixturenames: create_sequential_channels( raiden_apps, token_contract_addresses[0], request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) # else: a test that is not creating channels for app in raiden_apps: app.stop(leave_channels=False) # save the state from the last block into a genesis dict tester = blockchain_services[0].tester_state tester.mine() registry_address = blockchain_services[0].default_registry.address genesis_alloc = dict() for account_address in tester.block.state.to_dict(): account_alloc = tester.block.account_to_dict(account_address) # Both keys and values of the account storage associative array # must now be encoded with 64 hex digits if account_alloc['storage']: account_alloc['storage'] = fix_tester_storage( account_alloc['storage']) # code must be hex encoded with 0x prefix account_alloc['code'] = account_alloc.get('code', '') # account_to_dict returns accounts with nonce=0 and the nonce must # be encoded with 16 hex digits account_alloc['nonce'] = '0x%016x' % tester.block.get_nonce( account_address) genesis_alloc[account_address] = account_alloc account_addresses = [ privatekey_to_address(key) for key in set(private_keys) ] for address in account_addresses: genesis_alloc[address]['balance'] = DEFAULT_BALANCE_BIN alloc = { address_encoder(address_maybe_bin): data for address_maybe_bin, data in genesis_alloc.iteritems() } genesis = GENESIS_STUB.copy() genesis['alloc'] = alloc genesis['config']['defaultDiscoveryAddress'] = address_encoder( endpoint_discovery_address) genesis['config']['defaultRegistryAddress'] = address_encoder( registry_address) genesis['config']['tokenAddresses'] = [ address_encoder(token_address) for token_address in token_contract_addresses ] return genesis
def to_url(self, value): return address_encoder(value)
def test_query_events(raiden_chain, deposit, settle_timeout, events_poll_timeout): app0, app1 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking token_address = app0.raiden.chain.default_registry.token_addresses()[0] assert len(app0.raiden.channelgraphs[token_address].address_channel) == 0 assert len(app1.raiden.channelgraphs[token_address].address_channel) == 0 token0 = app0.raiden.chain.token(token_address) manager0 = app0.raiden.chain.manager_by_token(token_address) events = get_all_registry_events( app0.raiden.chain, app0.raiden.chain.default_registry.address, events=ALL_EVENTS, from_block=0, to_block='latest', ) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'TokenAdded', 'channel_manager_address': address_encoder(manager0.address), 'token_address': address_encoder(token_address), }) events = get_all_registry_events( app0.raiden.chain, app0.raiden.chain.default_registry.address, events=ALL_EVENTS, from_block=999999998, to_block=999999999, ) assert not events netcontract_address = manager0.new_netting_channel( app0.raiden.address, app1.raiden.address, settle_timeout, ) events = get_all_channel_manager_events( app0.raiden.chain, manager0.address, events=ALL_EVENTS, from_block=0, to_block='latest', ) assert len(events) == 1 assert event_dicts_are_equal( events[0], { '_event_type': 'ChannelNew', 'settle_timeout': settle_timeout, 'netting_channel': address_encoder(netcontract_address), 'participant1': address_encoder(app0.raiden.address), 'participant2': address_encoder(app1.raiden.address), }) events = get_all_channel_manager_events( app0.raiden.chain, manager0.address, events=ALL_EVENTS, from_block=999999998, to_block=999999999, ) assert not events netting_channel0 = app0.raiden.chain.netting_channel(netcontract_address) gevent.sleep(events_poll_timeout * 2) # channel is created but not opened and without funds assert len(app0.raiden.channelgraphs[token_address].address_channel) == 1 assert len(app1.raiden.channelgraphs[token_address].address_channel) == 1 channel0 = app0.raiden.channelgraphs[token_address].address_channel.values( )[0] channel1 = app1.raiden.channelgraphs[token_address].address_channel.values( )[0] assert_synched_channels( channel0, 0, [], channel1, 0, [], ) token0.approve(netcontract_address, deposit) netting_channel0.deposit(deposit) all_netting_channel_events = get_all_netting_channel_events( app0.raiden.chain, netting_channel_address=netcontract_address, from_block=0, to_block='latest', ) events = get_all_netting_channel_events( app0.raiden.chain, netcontract_address, events=[CHANNELNEWBALANCE_EVENTID], ) assert len(all_netting_channel_events) == 1 assert len(events) == 1 new_balance_event = { '_event_type': 'ChannelNewBalance', 'token_address': address_encoder(token_address), 'participant': address_encoder(app0.raiden.address), 'balance': deposit, 'block_number': 'ignore', } assert event_dicts_are_equal(all_netting_channel_events[-1], new_balance_event) assert event_dicts_are_equal(events[0], new_balance_event) channel0.external_state.close('') all_netting_channel_events = get_all_netting_channel_events( app0.raiden.chain, netting_channel_address=netcontract_address, from_block=0, to_block='latest', ) events = get_all_netting_channel_events( app0.raiden.chain, netcontract_address, events=[CHANNELCLOSED_EVENTID], ) assert len(all_netting_channel_events) == 2 assert len(events) == 1 closed_event = { '_event_type': 'ChannelClosed', 'closing_address': address_encoder(app0.raiden.address), 'block_number': 'ignore', } assert event_dicts_are_equal(all_netting_channel_events[-1], closed_event) assert event_dicts_are_equal(events[0], closed_event) settle_expiration = app0.raiden.chain.block_number() + settle_timeout + 1 wait_until_block(app0.raiden.chain, settle_expiration) channel1.external_state.settle() all_netting_channel_events = get_all_netting_channel_events( app0.raiden.chain, netting_channel_address=netcontract_address, from_block=0, to_block='latest', ) events = get_all_netting_channel_events( app0.raiden.chain, netcontract_address, events=[CHANNELSETTLED_EVENTID], ) assert len(all_netting_channel_events) == 3 assert len(events) == 1 settled_event = { '_event_type': 'ChannelSettled', 'block_number': 'ignore', } assert event_dicts_are_equal(all_netting_channel_events[-1], settled_event) assert event_dicts_are_equal(events[0], settled_event)
def cached_genesis(request, blockchain_type): """ Deploy all contracts that are required by the fixtures into a tester and then serialize the accounts into a genesis block. Returns: dict: A dictionary representing the genesis block. """ if not request.config.option.blockchain_cache: return # cannot cache for mock blockchain if blockchain_type == 'mock': return # this will create the tester _and_ deploy the Registry deploy_key = request.getfixturevalue('deploy_key') private_keys = request.getfixturevalue('private_keys') deploy_service, blockchain_services = _tester_services( deploy_key, private_keys, request.getfixturevalue('tester_blockgas_limit'), ) # create_network only registers the tokens, # the contracts must be deployed previously token_contract_addresses = _tokens_addresses( request.getfixturevalue('token_amount'), request.getfixturevalue('number_of_tokens'), deploy_service, blockchain_services, ) raiden_apps = create_apps( blockchain_services, request.getfixturevalue('raiden_udp_ports'), DummyTransport, # Do not use a UDP server to avoid port reuse in MacOSX request.config.option.verbose, request.getfixturevalue('send_ping_time'), request.getfixturevalue('max_unresponsive_time'), request.getfixturevalue('reveal_timeout'), ) if 'raiden_network' in request.fixturenames: create_network_channels( raiden_apps, token_contract_addresses, request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) elif 'raiden_chain' in request.fixturenames: create_sequential_channels( raiden_apps, token_contract_addresses[0], request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) # else: a test that is not creating channels for app in raiden_apps: app.stop() # save the state from the last block into a genesis dict tester = blockchain_services[0].tester_state tester.mine() registry_address = blockchain_services[0].default_registry.address genesis_alloc = dict() for account_address in tester.block.state.to_dict(): account_alloc = tester.block.account_to_dict(account_address) # code must be hex encoded without 0x prefix account_alloc['code'] = safe_lstrip_hex(account_alloc.get('code', '')) # account_to_dict returns accounts with nonce=0 account_alloc['nonce'] = tester.block.get_nonce(account_address) genesis_alloc[account_address] = account_alloc account_addresses = [ privatekey_to_address(key) for key in set(private_keys) ] for address in account_addresses: genesis_alloc[address]['balance'] = DEFAULT_BALANCE_BIN alloc = { safe_lstrip_hex(address_encoder(address_maybe_bin)): data for address_maybe_bin, data in genesis_alloc.iteritems() } genesis = GENESIS_STUB.copy() genesis['alloc'] = alloc genesis['config']['defaultRegistryAddress'] = address_encoder( registry_address) genesis['config']['tokenAddresses'] = [ address_encoder(token_address) for token_address in token_contract_addresses ] return genesis
def cached_genesis(request, blockchain_type): """ Deploy all contracts that are required by the fixtures into a tester and then serialize the accounts into a genesis block. Returns: dict: A dictionary representing the genesis block. """ if not request.config.option.blockchain_cache: return # cannot cache for mock blockchain if blockchain_type == 'mock': return # this will create the tester _and_ deploy the Registry deploy_key = request.getfixturevalue('deploy_key') private_keys = request.getfixturevalue('private_keys') deploy_service, blockchain_services = _tester_services( deploy_key, private_keys, request.getfixturevalue('tester_blockgas_limit'), ) # create_network only register the assets, the contracts must be deployed # previously asset_contract_addresses = _assets_addresses( request.getfixturevalue('asset_amount'), request.getfixturevalue('number_of_assets'), deploy_service, blockchain_services, ) raiden_apps = create_apps( blockchain_services, request.getfixturevalue('transport_class'), request.config.option.verbose, request.getfixturevalue('send_ping_time'), request.getfixturevalue('max_unresponsive_time'), ) if 'raiden_network' in request.fixturenames: create_network_channels( raiden_apps, asset_contract_addresses, request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) elif 'raiden_chain' in request.fixturenames: create_sequential_channels( raiden_apps, asset_contract_addresses[0], request.getfixturevalue('channels_per_node'), request.getfixturevalue('deposit'), request.getfixturevalue('settle_timeout'), ) # else: a test that is not creating channels for app in raiden_apps: app.stop() # save the state from the last block into a genesis dict tester = blockchain_services[0].tester_state tester.mine() registry_address = blockchain_services[0].default_registry.address genesis_alloc = dict() for account_address in tester.block.state.to_dict(): account_alloc = tester.block.account_to_dict(account_address) # code must be hex encoded without 0x prefix account_alloc['code'] = safe_lstrip_hex(account_alloc.get('code', '')) # account_to_dict returns accounts with nonce=0 account_alloc['nonce'] = tester.block.get_nonce(account_address) genesis_alloc[account_address] = account_alloc account_addresses = [ privatekey_to_address(key) for key in set(private_keys) ] for address in account_addresses: genesis_alloc[address]['balance'] = DEFAULT_BALANCE_BIN alloc = { safe_lstrip_hex(address_encoder(address_maybe_bin)): data for address_maybe_bin, data in genesis_alloc.iteritems() } genesis = GENESIS_STUB.copy() genesis['alloc'] = alloc genesis['config']['defaultRegistryAddress'] = address_encoder(registry_address) genesis['config']['assetAddresses'] = [ address_encoder(asset_address) for asset_address in asset_contract_addresses ] return genesis
def app( address, keystore_path, eth_rpc_endpoint, registry_contract_address, discovery_contract_address, listen_address, rpccorsdomain, # pylint: disable=unused-argument mapped_socket, logging, logfile, log_json, max_unresponsive_time, send_ping_time, api_address, rpc, console, password_file, web_ui, datadir): from raiden.app import App from raiden.network.rpc.client import BlockChainService # config_file = args.config_file (listen_host, listen_port) = split_endpoint(listen_address) (api_host, api_port) = split_endpoint(api_address) config = App.DEFAULT_CONFIG.copy() config['host'] = listen_host config['port'] = listen_port config['console'] = console config['rpc'] = rpc config['web_ui'] = rpc and web_ui config['api_host'] = api_host config['api_port'] = api_port if mapped_socket: config['socket'] = mapped_socket.socket config['external_ip'] = mapped_socket.external_ip config['external_port'] = mapped_socket.external_port else: config['socket'] = None config['external_ip'] = listen_host config['external_port'] = listen_port retries = max_unresponsive_time / DEFAULT_NAT_KEEPALIVE_RETRIES config['protocol']['nat_keepalive_retries'] = retries config['protocol']['nat_keepalive_timeout'] = send_ping_time address_hex = address_encoder(address) if address else None address_hex, privatekey_bin = prompt_account(address_hex, keystore_path, password_file) privatekey_hex = privatekey_bin.encode('hex') config['privatekey_hex'] = privatekey_hex endpoint = eth_rpc_endpoint # Fallback default port if only an IP address is given rpc_port = 8545 if eth_rpc_endpoint.startswith("http://"): endpoint = eth_rpc_endpoint[len("http://"):] rpc_port = 80 elif eth_rpc_endpoint.startswith("https://"): endpoint = eth_rpc_endpoint[len("https://"):] rpc_port = 443 if ':' not in endpoint: # no port was given in url rpc_host = endpoint else: rpc_host, rpc_port = split_endpoint(endpoint) try: blockchain_service = BlockChainService( privatekey_bin, registry_contract_address, host=rpc_host, port=rpc_port, ) except ValueError as e: # ValueError exception raised if: # - The registry contract address doesn't have code, this might happen # if the connected geth process is not synced or if the wrong address # is provided (e.g. using the address from a smart contract deployed on # ropsten with a geth node connected to morden) print(e.message) sys.exit(1) discovery_tx_cost = GAS_PRICE * DISCOVERY_REGISTRATION_GAS while True: balance = blockchain_service.client.balance(address_hex) if discovery_tx_cost <= balance: break print('Account has insufficient funds for discovery registration.\n' 'Needed: {} ETH\n' 'Available: {} ETH.\n' 'Please deposit additional funds on this account.'.format( discovery_tx_cost / float(denoms.ether), balance / float(denoms.ether))) if not click.confirm('Try again?'): sys.exit(1) discovery = ContractDiscovery( blockchain_service.node_address, blockchain_service.discovery(discovery_contract_address)) if datadir is None: # default database directory raiden_directory = os.path.join(os.path.expanduser('~'), '.raiden') else: raiden_directory = datadir if not os.path.exists(raiden_directory): os.makedirs(raiden_directory) user_db_dir = os.path.join(raiden_directory, address_hex[:8]) if not os.path.exists(user_db_dir): os.makedirs(user_db_dir) database_path = os.path.join(user_db_dir, 'log.db') config['database_path'] = database_path return App(config, blockchain_service, discovery)
def test_logfilters_topics(test_app): sample_compiled = _solidity.compile_code( sample_sol_code, combined='bin,abi', ) filepath = None contract_data = _solidity.solidity_get_contract_data( sample_compiled, filepath, 'SampleContract') theabi = contract_data['abi'] theevm = contract_data['bin_hex'] sender_address = test_app.services.accounts.unlocked_accounts[0].address sender = address_encoder(sender_address) event1 = get_event(theabi, 'Event1') event2 = get_event(theabi, 'Event2') event3 = get_event(theabi, 'Event3') event1_id = event_id(*get_eventname_types(event1)) event2_id = event_id(*get_eventname_types(event2)) event3_id = event_id(*get_eventname_types(event3)) test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number assert n0 == 1 contract_creation = { 'from': sender, 'data': '0x' + theevm, 'gas': quantity_encoder(1000000) } tx_hash = test_app.client.call('eth_sendTransaction', contract_creation) test_app.mine_next_block() receipt = test_app.client.call('eth_getTransactionReceipt', tx_hash) contract_address = receipt['contractAddress'] sample_contract = ContractProxy(sender_address, theabi, contract_address, test_app.client.call, test_app.client.send_transaction) topic1 = hex(event1_id).rstrip("L") topic2 = hex(event2_id).rstrip("L") topic3 = hex(event3_id).rstrip("L") topica, topicb, topicc = \ '0x0000000000000000000000000000000000000000000000000000000000000001',\ '0x0000000000000000000000000000000000000000000000000000000000000064',\ '0x00000000000000000000000000000000000000000000000000000000000003e8' topic_filter_1 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic1] }) topic_filter_2 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic2] }) topic_filter_3 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic3] }) topic_filter_4 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic1, topica] }) topic_filter_5 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic2, topica, topicb] }) topic_filter_6 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic3, topica, topicb, topicc] }) topic_filter_7 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topica, topicb, topicc] }) topic_filter_8 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic3, topica, topicb] }) topic_filter_9 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topicc, topicb, topica, topic3] }) topic_filter_10 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topicb, topicc, topica, topic3] }) topic_filter_11 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic2, topica] }) topic_filter_12 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic3, topica] }) topic_filter_13 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topica, topicb] }) topic_filter_14 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic2, [topica, topicb]] }) topic_filter_15 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [[topic1, topic2], topica] }) topic_filter_16 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [[topic1, topic2, topic3]] }) topic_filter_17 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [[topic1, topic2, topic3, topica, topicb, topicc]] }) topic_filter_18 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic2, topica, topicb, [topic2, topica, topicb]] }) topic_filter_19 = test_app.client.call('eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [topic1, topica, topicb] }) topic_filter_20 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [[topic1, topic2], [topica, topicb], [topica, topicb]] }) topic_filter_21 = test_app.client.call( 'eth_newFilter', { 'fromBlock': 0, 'toBlock': 'pending', 'topics': [[topic2, topic3], [topica, topicb], [topica, topicb]] }) thecode = test_app.client.call('eth_getCode', address_encoder(sample_contract.address)) assert len(thecode) > 2 sample_contract.trigger1(1) test_app.mine_next_block() sample_contract.trigger2(100) test_app.mine_next_block() sample_contract.trigger3(1000) test_app.mine_next_block() tl1 = test_app.client.call('eth_getFilterChanges', topic_filter_1) assert len(tl1) == 1 tl2 = test_app.client.call('eth_getFilterChanges', topic_filter_2) assert len(tl2) == 1 tl3 = test_app.client.call('eth_getFilterChanges', topic_filter_3) assert len(tl3) == 1 tl4 = test_app.client.call('eth_getFilterChanges', topic_filter_4) assert len(tl4) == 1 tl5 = test_app.client.call('eth_getFilterChanges', topic_filter_5) assert len(tl5) == 1 tl6 = test_app.client.call('eth_getFilterChanges', topic_filter_6) assert len(tl6) == 1 tl7 = test_app.client.call('eth_getFilterChanges', topic_filter_7) assert len(tl7) == 0 tl8 = test_app.client.call('eth_getFilterChanges', topic_filter_8) assert len(tl8) == 1 tl9 = test_app.client.call('eth_getFilterChanges', topic_filter_9) assert len(tl9) == 0 tl10 = test_app.client.call('eth_getFilterChanges', topic_filter_10) assert len(tl10) == 0 tl11 = test_app.client.call('eth_getFilterChanges', topic_filter_11) assert len(tl11) == 1 tl12 = test_app.client.call('eth_getFilterChanges', topic_filter_12) assert len(tl12) == 1 tl13 = test_app.client.call('eth_getFilterChanges', topic_filter_13) assert len(tl13) == 0 tl14 = test_app.client.call('eth_getFilterChanges', topic_filter_14) assert len(tl14) == 1 tl15 = test_app.client.call('eth_getFilterChanges', topic_filter_15) assert len(tl15) == 2 tl16 = test_app.client.call('eth_getFilterChanges', topic_filter_16) assert len(tl16) == 3 tl17 = test_app.client.call('eth_getFilterChanges', topic_filter_17) assert len(tl17) == 3 tl18 = test_app.client.call('eth_getFilterChanges', topic_filter_18) assert len(tl18) == 0 tl19 = test_app.client.call('eth_getFilterChanges', topic_filter_19) assert len(tl19) == 0 tl20 = test_app.client.call('eth_getFilterChanges', topic_filter_20) assert len(tl20) == 1 tl21 = test_app.client.call('eth_getFilterChanges', topic_filter_21) assert len(tl21) == 2
def patch_channel(self, channel_address, balance=None, state=None): if balance is not None and state is not None: return make_response( 'Can not update balance and change channel state at the same time', httplib.CONFLICT, ) elif balance is None and state is None: return make_response( 'Nothing to do. Should either provide \'balance\' or \'state\' argument', httplib.BAD_REQUEST, ) # find the channel try: channel = self.raiden_api.get_channel(channel_address) except ChannelNotFound: return make_response( "Requested channel {} not found".format( address_encoder(channel_address)), httplib.CONFLICT ) current_state = channel.state # if we patch with `balance` it's a deposit if balance is not None: if current_state != CHANNEL_STATE_OPENED: return make_response( "Can't deposit on a closed channel", httplib.CONFLICT, ) try: raiden_service_result = self.raiden_api.deposit( channel.token_address, channel.partner_address, balance ) except InsufficientFunds as e: return make_response(str(e), httplib.PAYMENT_REQUIRED) result = self.channel_schema.dump(channel_to_api_dict(raiden_service_result)) return jsonify(result.data) if state == CHANNEL_STATE_CLOSED: if current_state != CHANNEL_STATE_OPENED: return make_response( 'Attempted to close an already closed channel', httplib.CONFLICT, ) raiden_service_result = self.raiden_api.close( channel.token_address, channel.partner_address ) result = self.channel_schema.dump(channel_to_api_dict(raiden_service_result)) return jsonify(result.data) if state == CHANNEL_STATE_SETTLED: if current_state == CHANNEL_STATE_SETTLED or current_state == CHANNEL_STATE_OPENED: return make_response( 'Attempted to settle a channel at its {} state'.format(current_state), httplib.CONFLICT, ) try: raiden_service_result = self.raiden_api.settle( channel.token_address, channel.partner_address ) except InvalidState: result = make_response( 'Settlement period is not yet over', httplib.CONFLICT, ) else: result = self.channel_schema.dump(channel_to_api_dict(raiden_service_result)) return jsonify(result.data) # should never happen, channel_state is validated in the schema return make_response( 'Provided invalid channel state {}'.format(state), httplib.BAD_REQUEST, )
def test_get_logs(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder(test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = { 'from': sender, 'data': data_encoder(LOG_EVM) } tx_hash = test_app.rpc_request('eth_sendTransaction', contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request('eth_getTransactionReceipt', tx_hash) contract_address = receipt['contractAddress'] tx = { 'from': sender, 'to': contract_address } # single log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs1 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }); assert len(logs1) == 1 assert logs1[0]['type'] == 'pending' assert logs1[0]['logIndex'] == None assert logs1[0]['transactionIndex'] == None assert logs1[0]['transactionHash'] == None assert logs1[0]['blockHash'] == None assert logs1[0]['blockNumber'] == None assert logs1[0]['address'] == contract_address logs2 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }); assert logs2 == logs1 # same log, but now mined in head test_app.mine_next_block() logs3 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'latest' }); assert len(logs3) == 1 assert logs3[0]['type'] == 'mined' assert logs3[0]['logIndex'] == '0x0' assert logs3[0]['transactionIndex'] == '0x0' assert logs3[0]['blockHash'] == data_encoder(test_app.services.chain.chain.head.hash) assert logs3[0]['blockNumber'] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs3[0]['address'] == contract_address # another log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs4 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'pending' }) assert logs4 == [logs1[0], logs3[0]] or logs4 == [logs3[0], logs1[0]] # two logs in pending block test_app.rpc_request('eth_sendTransaction', tx) logs5 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }) assert len(logs5) == 2 assert logs5[0] == logs5[1] == logs1[0] # two logs in head test_app.mine_next_block() logs6 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'pending' }) for log in logs6: assert log['type'] == 'mined' assert log['logIndex'] == '0x0' assert log['blockHash'] == data_encoder(test_app.services.chain.chain.head.hash) assert log['blockNumber'] == quantity_encoder(test_app.services.chain.chain.head.number) assert log['address'] == contract_address assert sorted([log['transactionIndex'] for log in logs6]) == ['0x0', '0x1'] # everything together with another log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs7 = test_app.rpc_request('eth_getLogs', { 'fromBlock': quantity_encoder(n0), 'toBlock': 'pending' }) assert sorted(logs7) == sorted(logs3 + logs6 + logs1)
def test_get_logs(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder(test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = {"from": sender, "data": data_encoder(LOG_EVM)} tx_hash = test_app.rpc_request("eth_sendTransaction", contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request("eth_getTransactionReceipt", tx_hash) contract_address = receipt["contractAddress"] tx = {"from": sender, "to": contract_address} # single log in pending block test_app.rpc_request("eth_sendTransaction", tx) logs1 = test_app.rpc_request("eth_getLogs", {"fromBlock": "pending", "toBlock": "pending"}) assert len(logs1) == 1 assert logs1[0]["type"] == "pending" assert logs1[0]["logIndex"] == None assert logs1[0]["transactionIndex"] == None assert logs1[0]["transactionHash"] == None assert logs1[0]["blockHash"] == None assert logs1[0]["blockNumber"] == None assert logs1[0]["address"] == contract_address logs2 = test_app.rpc_request("eth_getLogs", {"fromBlock": "pending", "toBlock": "pending"}) assert logs2 == logs1 # same log, but now mined in head test_app.mine_next_block() logs3 = test_app.rpc_request("eth_getLogs", {"fromBlock": "latest", "toBlock": "latest"}) assert len(logs3) == 1 assert logs3[0]["type"] == "mined" assert logs3[0]["logIndex"] == "0x0" assert logs3[0]["transactionIndex"] == "0x0" assert logs3[0]["blockHash"] == data_encoder(test_app.services.chain.chain.head.hash) assert logs3[0]["blockNumber"] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs3[0]["address"] == contract_address # another log in pending block test_app.rpc_request("eth_sendTransaction", tx) logs4 = test_app.rpc_request("eth_getLogs", {"fromBlock": "latest", "toBlock": "pending"}) assert logs4 == [logs1[0], logs3[0]] or logs4 == [logs3[0], logs1[0]] # two logs in pending block test_app.rpc_request("eth_sendTransaction", tx) logs5 = test_app.rpc_request("eth_getLogs", {"fromBlock": "pending", "toBlock": "pending"}) assert len(logs5) == 2 assert logs5[0] == logs5[1] == logs1[0] # two logs in head test_app.mine_next_block() logs6 = test_app.rpc_request("eth_getLogs", {"fromBlock": "latest", "toBlock": "pending"}) for log in logs6: assert log["type"] == "mined" assert log["logIndex"] == "0x0" assert log["blockHash"] == data_encoder(test_app.services.chain.chain.head.hash) assert log["blockNumber"] == quantity_encoder(test_app.services.chain.chain.head.number) assert log["address"] == contract_address assert sorted([log["transactionIndex"] for log in logs6]) == ["0x0", "0x1"] # everything together with another log in pending block test_app.rpc_request("eth_sendTransaction", tx) logs7 = test_app.rpc_request("eth_getLogs", {"fromBlock": quantity_encoder(n0), "toBlock": "pending"}) assert sorted(logs7) == sorted(logs3 + logs6 + logs1)
def test_get_filter_changes(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder(test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = { 'from': sender, 'data': data_encoder(LOG_EVM) } tx_hash = test_app.rpc_request('eth_sendTransaction', contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request('eth_getTransactionReceipt', tx_hash) contract_address = receipt['contractAddress'] tx = { 'from': sender, 'to': contract_address } pending_filter_id = test_app.rpc_request('eth_newFilter', { 'fromBlock': 'pending', 'toBlock': 'pending' }); latest_filter_id = test_app.rpc_request('eth_newFilter', { 'fromBlock': 'latest', 'toBlock': 'latest' }); tx_hashes = [] logs = [] # tx in pending block tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]['type'] == 'pending' assert logs[-1][0]['logIndex'] == None assert logs[-1][0]['transactionIndex'] == None assert logs[-1][0]['transactionHash'] == None assert logs[-1][0]['blockHash'] == None assert logs[-1][0]['blockNumber'] == None assert logs[-1][0]['address'] == contract_address pending_log = logs[-1][0] logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert logs[-1] == [] logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert len(logs[-1]) == 1 # log from before, but now mined assert logs[-1][0]['type'] == 'mined' assert logs[-1][0]['logIndex'] == '0x0' assert logs[-1][0]['transactionIndex'] == '0x0' assert logs[-1][0]['transactionHash'] == tx_hashes[-1] assert logs[-1][0]['blockHash'] == data_encoder(test_app.services.chain.chain.head.hash) assert logs[-1][0]['blockNumber'] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs[-1][0]['address'] == contract_address logs_in_range = [logs[-1][0]] # send tx and mine block tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert len(logs[-1]) == 1 assert logs[-1][0]['type'] == 'mined' assert logs[-1][0]['logIndex'] == '0x0' assert logs[-1][0]['transactionIndex'] == '0x0' assert logs[-1][0]['transactionHash'] == tx_hashes[-1] assert logs[-1][0]['blockHash'] == data_encoder(test_app.services.chain.chain.head.hash) assert logs[-1][0]['blockNumber'] == quantity_encoder(test_app.services.chain.chain.head.number) assert logs[-1][0]['address'] == contract_address logs_in_range.append(logs[-1][0]) logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == logs[-2] # latest and pending filter see same (mined) log logs.append(test_app.rpc_request('eth_getFilterChanges', latest_filter_id)) assert logs[-1] == [] test_app.mine_next_block() logs.append(test_app.rpc_request('eth_getFilterChanges', pending_filter_id)) assert logs[-1] == [] range_filter_id = test_app.rpc_request('eth_newFilter', { 'fromBlock': quantity_encoder(test_app.services.chain.chain.head.number - 3), 'toBlock': 'pending' }) tx_hashes.append(test_app.rpc_request('eth_sendTransaction', tx)) logs.append(test_app.rpc_request('eth_getFilterChanges', range_filter_id)) assert sorted(logs[-1]) == sorted(logs_in_range + [pending_log])
def create_geth_cluster(private_keys, geth_private_keys, p2p_base_port, base_datadir): # pylint: disable=too-many-locals,too-many-statements # TODO: handle better the errors cases: # - cant bind, port in use start_rpcport = 4000 account_addresses = [privtoaddr(key) for key in set(private_keys)] alloc = { address_encoder(address): { 'balance': DEFAULT_BALANCE, } for address in account_addresses } genesis = { 'config': { 'homesteadBlock': 0, }, 'nonce': '0x0000000000000042', 'mixhash': '0x0000000000000000000000000000000000000000000000000000000000000000', 'difficulty': '0x40', 'coinbase': '0x0000000000000000000000000000000000000000', 'timestamp': '0x00', 'parentHash': '0x0000000000000000000000000000000000000000000000000000000000000000', 'extraData': 'raiden', 'gasLimit': GAS_LIMIT_HEX, 'alloc': alloc, } nodes_configuration = [] for pos, key in enumerate(geth_private_keys): config = dict() # make the first node miner if pos == 0: config['minerthreads'] = 1 # conservative config['unlock'] = 0 config['nodekey'] = key config['nodekeyhex'] = encode_hex(key) config['pub'] = encode_hex(privtopub(key)) config['address'] = privtoaddr(key) config['port'] = p2p_base_port + pos config['rpcport'] = start_rpcport + pos config['enode'] = 'enode://{pub}@127.0.0.1:{port}'.format( pub=config['pub'], port=config['port'], ) config['bootnodes'] = ','.join(node['enode'] for node in nodes_configuration) nodes_configuration.append(config) cmds = [] for i, config in enumerate(nodes_configuration): nodedir = os.path.join(base_datadir, config['nodekeyhex']) os.makedirs(nodedir) geth_init_datadir(genesis, nodedir) if 'minerthreads' in config: geth_create_account(nodedir, private_keys[i]) cmds.append(geth_to_cmd(config, nodedir)) # save current term settings before running geth if isinstance(sys.stdin, file): # check that the test is running on non-capture mode term_settings = termios.tcgetattr(sys.stdin) processes_list = [] for cmd in cmds: if '--unlock' in cmd: process = subprocess.Popen(cmd, universal_newlines=True, stdin=subprocess.PIPE) # --password wont work, write password to unlock process.stdin.write(DEFAULT_PASSPHRASE + os.linesep) # Passphrase: process.stdin.write(DEFAULT_PASSPHRASE + os.linesep) # Repeat passphrase: else: process = subprocess.Popen(cmd) processes_list.append(process) assert process.returncode is None geth_wait_and_check(private_keys) # reenter echo mode (disabled by geth pasphrase prompt) if isinstance(sys.stdin, file): termios.tcsetattr(sys.stdin, termios.TCSADRAIN, term_settings) return processes_list
def test_get_logs(test_app): test_app.mine_next_block() # start with a fresh block n0 = test_app.services.chain.chain.head.number sender = address_encoder( test_app.services.accounts.unlocked_accounts()[0].address) contract_creation = {'from': sender, 'data': data_encoder(LOG_EVM)} tx_hash = test_app.rpc_request('eth_sendTransaction', contract_creation) test_app.mine_next_block() receipt = test_app.rpc_request('eth_getTransactionReceipt', tx_hash) contract_address = receipt['contractAddress'] tx = {'from': sender, 'to': contract_address} # single log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs1 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }) assert len(logs1) == 1 assert logs1[0]['type'] == 'pending' assert logs1[0]['logIndex'] == None assert logs1[0]['transactionIndex'] == None assert logs1[0]['transactionHash'] == None assert logs1[0]['blockHash'] == None assert logs1[0]['blockNumber'] == None assert logs1[0]['address'] == contract_address logs2 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }) assert logs2 == logs1 # same log, but now mined in head test_app.mine_next_block() logs3 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'latest' }) assert len(logs3) == 1 assert logs3[0]['type'] == 'mined' assert logs3[0]['logIndex'] == '0x0' assert logs3[0]['transactionIndex'] == '0x0' assert logs3[0]['blockHash'] == data_encoder( test_app.services.chain.chain.head.hash) assert logs3[0]['blockNumber'] == quantity_encoder( test_app.services.chain.chain.head.number) assert logs3[0]['address'] == contract_address # another log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs4 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'pending' }) assert logs4 == [logs1[0], logs3[0]] or logs4 == [logs3[0], logs1[0]] # two logs in pending block test_app.rpc_request('eth_sendTransaction', tx) logs5 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'pending', 'toBlock': 'pending' }) assert len(logs5) == 2 assert logs5[0] == logs5[1] == logs1[0] # two logs in head test_app.mine_next_block() logs6 = test_app.rpc_request('eth_getLogs', { 'fromBlock': 'latest', 'toBlock': 'pending' }) for log in logs6: assert log['type'] == 'mined' assert log['logIndex'] == '0x0' assert log['blockHash'] == data_encoder( test_app.services.chain.chain.head.hash) assert log['blockNumber'] == quantity_encoder( test_app.services.chain.chain.head.number) assert log['address'] == contract_address assert sorted([log['transactionIndex'] for log in logs6]) == ['0x0', '0x1'] # everything together with another log in pending block test_app.rpc_request('eth_sendTransaction', tx) logs7 = test_app.rpc_request('eth_getLogs', { 'fromBlock': quantity_encoder(n0), 'toBlock': 'pending' }) assert sorted(logs7) == sorted(logs3 + logs6 + logs1)
def to_url(self, value): value = address_encoder(value) return BaseConverter.to_url(value)
def balance(self, account): b = quantity_decoder( self.call('eth_getBalance', address_encoder(account), 'pending')) return b