def setup_computation(vm_class, create_address, code): message = Message( to=CANONICAL_ADDRESS_A, sender=CANONICAL_ADDRESS_B, create_address=create_address, value=0, data=b'', code=code, gas=1000000, ) tx_context = vm_class._state_class.transaction_context_class( gas_price=1, origin=CANONICAL_ADDRESS_B, ) vm = vm_class(GENESIS_HEADER, ChainDB(AtomicDB())) computation = vm_class._state_class.computation_class( state=vm.state, message=message, transaction_context=tx_context, ) return computation
def database_server_ipc_path(): core_db = AtomicDB() core_db[b'key-a'] = b'value-a' chaindb = ChainDB(core_db) # TODO: use a custom chain class only for testing. chaindb.persist_header(ROPSTEN_GENESIS_HEADER) with tempfile.TemporaryDirectory() as temp_dir: trinity_config = TrinityConfig( network_id=ROPSTEN_NETWORK_ID, data_dir=temp_dir, ) manager = get_chaindb_manager(trinity_config, core_db) chaindb_server_process = multiprocessing.Process( target=serve_chaindb, args=(manager, ), ) chaindb_server_process.start() wait_for_ipc(trinity_config.database_ipc_path) try: yield trinity_config.database_ipc_path finally: kill_process_gracefully(chaindb_server_process, logging.getLogger())
def test_mainnet_eip1085_matches_mainnet_genesis_header( mainnet_genesis_config): genesis_data = extract_genesis_data(mainnet_genesis_config) genesis_state = { address: account.to_dict() for address, account in genesis_data.state.items() } genesis_params = genesis_data.params.to_dict() chain = Chain.configure( vm_configuration=genesis_data.vm_configuration, chain_id=genesis_data.chain_id, ).from_genesis(AtomicDB(), genesis_params, genesis_state) genesis_header = chain.get_canonical_head() assert genesis_header == MAINNET_GENESIS_HEADER assert chain.chain_id == MainnetChain.chain_id actual_fork_blocks = tuple(zip(*chain.vm_configuration))[0] expected_fork_blocks = tuple(zip(*MainnetChain.vm_configuration))[0] assert actual_fork_blocks == expected_fork_blocks actual_homestead_vm = chain.vm_configuration[1][1] expected_homestead_vm = MainnetChain.vm_configuration[1][1] assert issubclass(actual_homestead_vm, HomesteadVM) assert actual_homestead_vm.support_dao_fork is True assert actual_homestead_vm.get_dao_fork_block_number( ) == expected_homestead_vm.get_dao_fork_block_number() # noqa: E501
def __init__(self, eth_node_ip=None, eth_node_port=None) -> None: chain_class = Chain.configure( __name__='Blockchain', vm_configuration=( (constants.GENESIS_BLOCK_NUMBER, FrontierVMForFuzzTesting), (HOMESTEAD_MAINNET_BLOCK, HomesteadVMForFuzzTesting), (TANGERINE_WHISTLE_MAINNET_BLOCK, TangerineWhistleVMForFuzzTesting), (SPURIOUS_DRAGON_MAINNET_BLOCK, SpuriousDragonVMForFuzzTesting), (BYZANTIUM_MAINNET_BLOCK, ByzantiumVMForFuzzTesting), (PETERSBURG_MAINNET_BLOCK, PetersburgVMForFuzzTesting), ), ) class MyMemoryDB(MemoryDB): def __init__(self) -> None: self.kv_store = {'storage': dict(), 'account': dict(), 'code': dict()} def rst(self) -> None: self.kv_store = {'storage': dict(), 'account': dict(), 'code': dict()} if eth_node_ip and eth_node_port and settings.REMOTE_FUZZING: self.w3 = Web3(HTTPProvider('http://%s:%s' % (eth_node_ip, eth_node_port))) else: self.w3 = None self.chain = chain_class.from_genesis_header(AtomicDB(MyMemoryDB()), MAINNET_GENESIS_HEADER) self.logger = initialize_logger("EVM") self.accounts = list() self.snapshot = None self.vm = None
def db(request): base_db = MemoryDB() if request.param is JournalDB: yield JournalDB(base_db) elif request.param is BatchDB: yield BatchDB(base_db) elif request.param is MemoryDB: yield base_db elif request.param is AtomicDB: atomic_db = AtomicDB(base_db) with atomic_db.atomic_batch() as batch: yield batch elif request.param is CacheDB: yield CacheDB(base_db) else: raise Exception("Invariant")
def prepare_computation(vm_class): message = Message( to=CANONICAL_ADDRESS_A, sender=CANONICAL_ADDRESS_B, value=100, data=b'', code=b'', gas=800, ) tx_context = vm_class._state_class.transaction_context_class( gas_price=1, origin=CANONICAL_ADDRESS_B, ) vm = vm_class(GENESIS_HEADER, ChainDB(AtomicDB())) computation = vm_class._state_class.computation_class( state=vm.state, message=message, transaction_context=tx_context, ) computation.state.account_db.touch_account( decode_hex(EMPTY_ADDRESS_IN_STATE)) computation.state.account_db.set_code(decode_hex(ADDRESS_WITH_CODE[0]), ADDRESS_WITH_CODE[1]) return computation
def atomic_db(request, tmpdir): if request.param == 'atomic': return AtomicDB() elif request.param == 'level': return LevelDB(db_path=tmpdir.mkdir("level_db_path")) else: raise ValueError("Unexpected database type: {}".format(request.param))
def initialize_vm_and_state(state_test): account_state = decode_account_state(state_test["pre"]) # print(account_state) base_db = AtomicDB() chain = MainnetChain(base_db) pre_genesis_header = BlockHeader(difficulty=0, block_number=-1, gas_limit=0) chain_context = ChainContext(MAINNET_CHAIN_ID) state = IstanbulVM.build_state(base_db, pre_genesis_header, chain_context) # apply custom state apply_state_dict(state, account_state) state.persist() # print("initial state", encode_hex(state.make_state_root())) current_block_params = decode_current_block_params(state_test["env"]) current_block_header = BlockHeader(**current_block_params) # vm = chain.get_vm() vm = IstanbulVM( header=pre_genesis_header, chaindb=chain.chaindb, chain_context=chain_context, consensus_context=chain.consensus_context, ) return vm, state, current_block_header
def genesis(chain_class: ChainAPI, db: AtomicDatabaseAPI = None, params: Dict[str, HeaderParams] = None, state: GeneralState = None) -> ChainAPI: """ Initialize the given chain class with the given genesis header parameters and chain state. """ if state is None: genesis_state: AccountState = {} else: genesis_state = _fill_and_normalize_state(state) genesis_params_defaults = _get_default_genesis_params(genesis_state) if params is None: genesis_params = genesis_params_defaults else: genesis_params = merge(genesis_params_defaults, params) if db is None: base_db: AtomicDatabaseAPI = AtomicDB() else: base_db = db return chain_class.from_genesis(base_db, genesis_params, genesis_state)
def _create( cls, model_class: Type[BaseBeaconChain], *args: Any, **kwargs: Any ) -> BaseBeaconChain: """ Create a BeaconChain according to the factory definition. NOTE: clients of this class may provide a ``branch`` keyword in the ``kwargs`` to construct a chain with a ``Collection[BaseSignedBeaconBlock]``. This ``branch`` is NOT assumed to have been constructed according to the full set of validity rules, e.g. lacking a proper signature so the ``perform_validation`` option to ``import_block`` is disabled. """ override_lengths(cls.config) if "num_validators" in kwargs: num_validators = kwargs["num_validators"] else: num_validators = cls.num_validators if kwargs["genesis_state"] is None: keymap = mk_keymap_of_size(num_validators) genesis_state, genesis_block = create_mock_genesis( config=cls.config, pubkeys=tuple(keymap.keys()), keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=Timestamp(int(time.time())), ) elif kwargs["genesis_block"] is None: genesis_state = kwargs["genesis_state"] genesis_block = get_genesis_block( genesis_state.hash_tree_root, SerenityBeaconBlock ) else: genesis_state = kwargs["genesis_state"] genesis_block = kwargs["genesis_block"] db = kwargs.pop("db", AtomicDB()) genesis_config = model_class.get_genesis_state_machine_class().config chain = model_class.from_genesis( base_db=db, genesis_state=genesis_state, genesis_block=genesis_block, genesis_config=genesis_config, ) if kwargs["branch"] is not None: branch = kwargs["branch"] for block in branch: if block.is_genesis: continue # NOTE: ideally we use the ``import_block`` method # on ``chain`` but for the time being we skip some # validation corresponding to assumptions made in clients of # this class. A future refactoring should use the external API. chain.chaindb.persist_block( block, SerenitySignedBeaconBlock, HigherSlotScoring() ) return chain
async def test_generate_eth_cap_enr_field(): base_db = AtomicDB() ChainDB(base_db).persist_header(ROPSTEN_GENESIS_HEADER) enr_field = await generate_eth_cap_enr_field(ROPSTEN_VM_CONFIGURATION, AsyncHeaderDB(base_db)) enr = ENRFactory(custom_kv_pairs={enr_field[0]: enr_field[1]}) assert extract_forkid(enr) == ForkID(hash=to_bytes(hexstr='0x30c7ddbc'), next=10)
def get_fresh_chain_db(): db = AtomicDB() genesis_block = create_test_block(slot=0) chain_db = BeaconChainDB(db) chain_db.persist_block(genesis_block, BeaconBlock) return chain_db
async def _test_trie_sync(): src_trie, contents = make_random_trie(random) dest_db = AtomicDB() nodes_cache = AtomicDB() scheduler = HexaryTrieSync(src_trie.root_hash, dest_db, nodes_cache, ExtendedDebugLogger("test")) requests = scheduler.next_batch() while len(requests) > 0: results = [] for request in requests: results.append( [request.node_key, src_trie.db[request.node_key]]) await scheduler.process(results) requests = scheduler.next_batch(10) dest_trie = HexaryTrie(dest_db, src_trie.root_hash) for key, value in contents.items(): assert dest_trie[key] == value
def chaindb_uncle(leveldb_1000, leveldb_uncle_chain): canoncical_chain = load_mining_chain(AtomicDB(leveldb_1000)) uncle_chain = load_mining_chain(AtomicDB(leveldb_uncle_chain)) # This fixture shares a common history with `leveldb_1000` from genesis till block 474. # It then forks of and contains uncles from 475 till 1000. These numbers were picked because # it fully spans the first gap defined in `chaindb_with_gaps` (test_sync.py) and only # partially spans the second gap defined in `chaindb_with_gaps`. header_before_fork = canoncical_chain.get_canonical_block_header_by_number( 474) assert uncle_chain.get_canonical_block_header_by_number( 474) == header_before_fork # Forks at header 475 fork_header = canoncical_chain.get_canonical_block_header_by_number(475) assert uncle_chain.get_canonical_block_header_by_number(475) != fork_header assert uncle_chain.chaindb.get_canonical_head().block_number == 1000 return uncle_chain.chaindb
def test_chaindb_persist_unexecuted_block( chain, chain_without_block_validation_factory, funded_address, funded_address_private_key, use_persist_unexecuted_block): # We need one chain to create blocks and a second one with a pristine database to test # persisting blocks that have not been executed. second_chain = chain_without_block_validation_factory(AtomicDB()) assert chain.get_canonical_head() == second_chain.get_canonical_head() assert chain != second_chain NUMBER_BLOCKS_IN_CHAIN = 5 TRANSACTIONS_IN_BLOCK = 10 REQUIRED_BLOCK_NUMBER = 2 REQUIRED_RECEIPT_INDEX = 3 for (block, receipts) in mine_blocks_with_receipts( chain, NUMBER_BLOCKS_IN_CHAIN, TRANSACTIONS_IN_BLOCK, funded_address, funded_address_private_key, ): if block.header.block_number == REQUIRED_BLOCK_NUMBER: actual_receipt = receipts[REQUIRED_RECEIPT_INDEX] actual_tx = block.transactions[REQUIRED_RECEIPT_INDEX] tx_class = block.transaction_class if use_persist_unexecuted_block: second_chain.chaindb.persist_unexecuted_block(block, receipts) else: # We just use this for an XFAIL to prove `persist_block` does not properly # persist blocks that were not executed. second_chain.chaindb.persist_block(block) chaindb_retrieved_tx = second_chain.chaindb.get_transaction_by_index( REQUIRED_BLOCK_NUMBER, REQUIRED_RECEIPT_INDEX, tx_class) assert chaindb_retrieved_tx == actual_tx # Check that the receipt retrieved is indeed the actual one chaindb_retrieved_receipt = second_chain.chaindb.get_receipt_by_index( REQUIRED_BLOCK_NUMBER, REQUIRED_RECEIPT_INDEX, ) assert chaindb_retrieved_receipt == actual_receipt # Raise error if block number is not found with pytest.raises(ReceiptNotFound): second_chain.chaindb.get_receipt_by_index( NUMBER_BLOCKS_IN_CHAIN + 1, REQUIRED_RECEIPT_INDEX, ) # Raise error if receipt index is out of range with pytest.raises(ReceiptNotFound): second_chain.chaindb.get_receipt_by_index( NUMBER_BLOCKS_IN_CHAIN, TRANSACTIONS_IN_BLOCK + 1, )
def _create(cls, model_class: Type[TestnetChain], *args: Any, **kwargs: Any) -> BaseBeaconChain: return model_class.from_genesis( base_db=AtomicDB(), genesis_state=genesis_state, genesis_block=genesis_block, genesis_config=Eth2GenesisConfig( model_class.get_genesis_state_machine_class().config), )
def setup_vm(vm_class, chain_id=None): db = AtomicDB() chain_context = ChainContext(chain_id) genesis_header = vm_class.create_genesis_header( difficulty=constants.GENESIS_DIFFICULTY, timestamp=0, ) return vm_class(genesis_header, ChainDB(db), chain_context, ConsensusContext(db))
def chaindb_cold_state(leveldb_cold_state): chain = load_mining_chain(AtomicDB(leveldb_cold_state)) chaindb = chain.chaindb head = chaindb.get_canonical_head() assert head.block_number == 42 assert head.state_root == ( b"\xecVG\x1dT\xd7l'M/\xfd\xfe\xf961:\xc2\x10\xc5\xbd)+&\xd6\x82\xe43\x1c$$\xb3\xb5" ) return chaindb
def instantiate_vm(vm_class): GENESIS_HEADER = BlockHeader( difficulty=17179869184, block_number=BlockNumber(0), gas_limit=5000, ) chain_context = ChainContext(None) db = AtomicDB() return vm_class(GENESIS_HEADER, ChainDB(db), chain_context, ConsensusContext(db))
async def main() -> None: parser = argparse.ArgumentParser() parser.add_argument('-bootnode', type=str, help="The enode to use as bootnode") parser.add_argument('-networkid', type=int, choices=[ROPSTEN_NETWORK_ID, MAINNET_NETWORK_ID], default=ROPSTEN_NETWORK_ID, help="1 for mainnet, 3 for testnet") parser.add_argument('-l', type=str, help="Log level", default="info") args = parser.parse_args() logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%H:%M:%S') if args.l == "debug2": # noqa: E741 log_level = DEBUG2_LEVEL_NUM else: log_level = getattr(logging, args.l.upper()) logging.getLogger('p2p').setLevel(log_level) network_cfg = PRECONFIGURED_NETWORKS[args.networkid] # Listen on a port other than 30303 so that we can test against a local geth instance # running on that port. listen_port = 30304 # Use a hard-coded privkey so that our enode is always the same. privkey = keys.PrivateKey( b'~\x054{4\r\xd64\x0f\x98\x1e\x85;\xcc\x08\x1eQ\x10t\x16\xc0\xb0\x7f)=\xc4\x1b\xb7/\x8b&\x83' ) # noqa: E501 addr = kademlia.Address('127.0.0.1', listen_port, listen_port) if args.bootnode: bootstrap_nodes = tuple([kademlia.Node.from_uri(args.bootnode)]) else: bootstrap_nodes = tuple( kademlia.Node.from_uri(enode) for enode in network_cfg.bootnodes) ipc_path = Path(f"networking-{uuid.uuid4()}.ipc") networking_connection_config = ConnectionConfig( name=NETWORKING_EVENTBUS_ENDPOINT, path=ipc_path) headerdb = TrioHeaderDB(AtomicDB(MemoryDB())) headerdb.persist_header(network_cfg.genesis_header) vm_config = network_cfg.vm_configuration enr_field_providers = (functools.partial(generate_eth_cap_enr_field, vm_config, headerdb), ) socket = trio.socket.socket(family=trio.socket.AF_INET, type=trio.socket.SOCK_DGRAM) await socket.bind(('0.0.0.0', listen_port)) async with TrioEndpoint.serve(networking_connection_config) as endpoint: service = DiscoveryService(privkey, addr, bootstrap_nodes, endpoint, socket, enr_field_providers) service.logger.info("Enode: %s", service.this_node.uri()) async with background_trio_service(service): await service.manager.wait_finished()
async def test_persisting_and_looking_up(): wit_db = AsyncWitnessDB(AtomicDB()) hash1 = Hash32Factory() with pytest.raises(WitnessHashesUnavailable): await wit_db.coro_get_witness_hashes(hash1) hash1_witnesses = tuple(Hash32Factory.create_batch(5)) await wit_db.coro_persist_witness_hashes(hash1, hash1_witnesses) assert await wit_db.coro_get_witness_hashes(hash1) == hash1_witnesses
async def get_chain_db(blocks=(), genesis_config=SERENITY_GENESIS_CONFIG, fork_choice_scoring=higher_slot_scoring): db = AtomicDB() chain_db = AsyncBeaconChainDB(db=db, genesis_config=genesis_config) await chain_db.coro_persist_block_chain( blocks, BeaconBlock, (higher_slot_scoring, ) * len(blocks), ) return chain_db
def __init__(self): klass = MyChain.configure( __name__='EVMSimulatorChain', vm_configuration=((constants.GENESIS_BLOCK_NUMBER, MyVm), )) self.used_addresses = {MASTER_ADDRESS} self.chain = klass.from_genesis(AtomicDB(), GENESIS_PARAMS, GENESIS_STATE) self.vm = self.chain.get_vm() self.vm.state.computation_class = MyComputation self.vm.get_state_class().computation_class = MyComputation self.seed = keccak_256(time.time().hex().encode("utf-8")).hexdigest()
def get_chain(vm: Type[BaseVM]) -> MiningChain: return chain_without_pow( AtomicDB(), vm, GENESIS_PARAMS, genesis_state([ AddressSetup(address=FUNDED_ADDRESS, balance=DEFAULT_INITIAL_BALANCE, code=b''), AddressSetup(address=SECOND_ADDRESS, balance=DEFAULT_INITIAL_BALANCE, code=b''), ]))
def get_chain(trinity_config: TrinityConfig) -> ChainAPI: app_config = trinity_config.get_app_config(Eth1AppConfig) ensure_eth1_dirs(app_config) base_db = LevelDB(db_path=app_config.database_dir) chain_config = app_config.get_chain_config() chain = chain_config.full_chain_class(AtomicDB(base_db)) initialize_database(chain_config, chain.chaindb, base_db) return chain
async def test_state_sync(): raw_db, state_root, contents = make_random_state(1000) dest_db = AtomicDB() nodes_cache = AtomicDB() scheduler = StateSync(state_root, dest_db, nodes_cache, ExtendedDebugLogger('test')) requests = scheduler.next_batch(10) while requests: results = [] for request in requests: results.append([request.node_key, raw_db[request.node_key]]) await scheduler.process(results) requests = scheduler.next_batch(10) result_account_db = AccountDB(dest_db, state_root) for addr, account_data in contents.items(): balance, nonce, storage, code = account_data assert result_account_db.get_balance(addr) == balance assert result_account_db.get_nonce(addr) == nonce assert result_account_db.get_storage(addr, 0) == storage assert result_account_db.get_code(addr) == code
def test_goerli_eip1085_matches_goerli_chain(goerli_genesis_config): genesis_data = extract_genesis_data(goerli_genesis_config) genesis_state = { address: account.to_dict() for address, account in genesis_data.state.items() } genesis_params = genesis_data.params.to_dict() chain = Chain.configure( vm_configuration=genesis_data.vm_configuration, chain_id=genesis_data.chain_id, ).from_genesis(AtomicDB(), genesis_params, genesis_state) genesis_header = chain.get_canonical_head() assert genesis_header == GOERLI_GENESIS_HEADER
def test_chain_config_from_preconfigured_network(network_id): chain_config = Eth1ChainConfig.from_preconfigured_network(network_id) chain = chain_config.initialize_chain(AtomicDB(MemoryDB())) if network_id == MAINNET_NETWORK_ID: assert chain_config.chain_id == MainnetChain.chain_id assert_vm_configuration_equal(chain_config.vm_configuration, MainnetChain.vm_configuration) assert chain.get_canonical_head() == MAINNET_GENESIS_HEADER elif network_id == ROPSTEN_NETWORK_ID: assert chain_config.chain_id == RopstenChain.chain_id assert_vm_configuration_equal(chain_config.vm_configuration, RopstenChain.vm_configuration) assert chain.get_canonical_head() == ROPSTEN_GENESIS_HEADER else: raise AssertionError("Invariant: unreachable code path")
def new_chain_from_fixture(fixture, chain_cls=MainnetChain): base_db = AtomicDB() vm_config = chain_vm_configuration(fixture) ChainFromFixture = chain_cls.configure( 'ChainFromFixture', vm_configuration=vm_config, ) return ChainFromFixture.from_genesis( base_db, genesis_params=genesis_params_from_fixture(fixture), genesis_state=fixture['pre'], )
def get_server(privkey, address): base_db = AtomicDB() headerdb = FakeAsyncHeaderDB(base_db) chaindb = ChainDB(base_db) chaindb.persist_header(ROPSTEN_GENESIS_HEADER) chain = RopstenChain(base_db) server = ParagonServer( privkey=privkey, port=address.tcp_port, chain=chain, chaindb=chaindb, headerdb=headerdb, base_db=base_db, network_id=NETWORK_ID, ) return server