def generate_genesis_state(cls, get_genesis_time: Callable[[], Timestamp], network_dir: Path, keymap: Dict[BLSPubkey, int], clients: Tuple[Client, ...]) -> None: logger = cls.get_logger() state_machine_class = XiaoLongBaoStateMachine # NOTE: see https://github.com/ethereum/trinity/issues/786 override_lengths(XiaoLongBaoStateMachine.config) # Since create_mock_genesis takes a long time, update the real genesis_time later dummy_time = Timestamp(int(time.time())) state, _ = create_mock_genesis( pubkeys=cast( Sequence[BLSPubkey], keymap.keys(), ), config=state_machine_class.config, keymap=keymap, genesis_block_class=state_machine_class.block_class, genesis_time=dummy_time, ) genesis_time = get_genesis_time() logger.info( "Genesis time will be %s from now", humanize_seconds(genesis_time - int(time.time())), ) state = state.copy(genesis_time=genesis_time, ) # The output here can be trusted, so use unsafe mode for performance yaml = YAML(typ='unsafe') with open(network_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f) # Distribute genesis file to clients for client in clients: with open(client.client_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f)
def generate_genesis_state(cls, get_genesis_time: Callable[[], Timestamp], network_dir: Path, keymap: Dict[Any, Any], clients: Tuple[Client, ...]) -> None: logger = cls.get_logger() state_machine_class = XiaoLongBaoStateMachine # Since create_mock_genesis takes a long time, update the real genesis_time later dummy_time = Timestamp(int(time.time())) state, _ = create_mock_genesis( num_validators=len(keymap.keys()), config=state_machine_class.config, keymap=keymap, genesis_block_class=state_machine_class.block_class, genesis_time=dummy_time, ) genesis_time = get_genesis_time() logger.info( "Genesis time will be %s from now", humanize_seconds(genesis_time - int(time.time())), ) state = state.copy(genesis_time=genesis_time, ) # The output here can be trusted, so use unsafe mode for performance yaml = YAML(typ='unsafe') with open(network_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f) # Distribute genesis file to clients for client in clients: with open(client.client_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f)
def generate_genesis_state(cls, genesis_delay: Second, network_dir: Path, keymap: Dict[Any, Any], clients: Tuple[Client, ...]) -> None: logger = cls.get_logger() state_machine_class = XiaoLongBaoStateMachine # Since create_mock_genesis takes a long time, update the real genesis_time later dummy_time = Timestamp(int(time.time())) state, _ = create_mock_genesis( num_validators=len(keymap.keys()), config=state_machine_class.config, keymap=keymap, genesis_block_class=state_machine_class.block_class, genesis_time=dummy_time, ) logger.info(f"Genesis time will be {genesis_delay} seconds from now") genesis_time = Timestamp(int(time.time()) + genesis_delay) state = state.copy(genesis_time=genesis_time, ) yaml = YAML() with open(network_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f) # Distribute genesis file to clients for client in clients: with open(client.client_dir / GENESIS_FILE, "w") as f: yaml.dump(to_formatted_dict(state), f)
def _create( cls, model_class: Type[BaseBeaconChain], *args: Any, **kwargs: Any ) -> BaseBeaconChain: """ Create a BeaconChain according to the factory definition. NOTE: clients of this class may provide a ``branch`` keyword in the ``kwargs`` to construct a chain with a ``Collection[BaseSignedBeaconBlock]``. This ``branch`` is NOT assumed to have been constructed according to the full set of validity rules, e.g. lacking a proper signature so the ``perform_validation`` option to ``import_block`` is disabled. """ override_lengths(cls.config) if "num_validators" in kwargs: num_validators = kwargs["num_validators"] else: num_validators = cls.num_validators if kwargs["genesis_state"] is None: keymap = mk_keymap_of_size(num_validators) genesis_state, genesis_block = create_mock_genesis( config=cls.config, pubkeys=tuple(keymap.keys()), keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=Timestamp(int(time.time())), ) elif kwargs["genesis_block"] is None: genesis_state = kwargs["genesis_state"] genesis_block = get_genesis_block( genesis_state.hash_tree_root, SerenityBeaconBlock ) else: genesis_state = kwargs["genesis_state"] genesis_block = kwargs["genesis_block"] db = kwargs.pop("db", AtomicDB()) genesis_config = model_class.get_genesis_state_machine_class().config chain = model_class.from_genesis( base_db=db, genesis_state=genesis_state, genesis_block=genesis_block, genesis_config=genesis_config, ) if kwargs["branch"] is not None: branch = kwargs["branch"] for block in branch: if block.is_genesis: continue # NOTE: ideally we use the ``import_block`` method # on ``chain`` but for the time being we skip some # validation corresponding to assumptions made in clients of # this class. A future refactoring should use the external API. chain.chaindb.persist_block( block, SerenitySignedBeaconBlock, HigherSlotScoring() ) return chain
def genesis_state(number_of_validators, config): keymap = mk_keymap_of_size(number_of_validators) genesis_state, _ = create_mock_genesis( config=config, pubkeys=tuple(keymap.keys()), keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=Timestamp(int(time.time())), ) return genesis_state
async def test_json_rpc_http_server(aiohttp_raw_server, aiohttp_client, event_bus, base_db, ipc_path): manager = DBManager(base_db) with manager.run(ipc_path): # Set chaindb override_lengths(SERENITY_CONFIG) db = DBClient.connect(ipc_path) genesis_config = SERENITY_CONFIG chaindb = AsyncBeaconChainDB(db, genesis_config) fork_choice_scoring = HigherSlotScoring() genesis_state, genesis_block = create_mock_genesis( pubkeys=(), config=SERENITY_CONFIG, keymap=dict(), genesis_block_class=BeaconBlock, genesis_time=0, ) chaindb.persist_state(genesis_state) chaindb.persist_block( SignedBeaconBlock.create(message=genesis_block), SignedBeaconBlock, fork_choice_scoring, ) try: rpc = RPCServer(initialize_beacon_modules(chaindb, event_bus), chaindb, event_bus) raw_server = await aiohttp_raw_server( RPCHandler.handle(rpc.execute)) client = await aiohttp_client(raw_server) request_id = 1 request_data = { "jsonrpc": "2.0", "method": "beacon_head", "params": [], "id": request_id, } response = await client.post("/", json=request_data) response_data = await response.json() assert response_data["id"] == request_id result = response_data["result"] assert result["slot"] == 0 assert decode_hex( result["block_root"]) == genesis_block.hash_tree_root assert decode_hex( result["state_root"]) == genesis_state.hash_tree_root except KeyboardInterrupt: pass finally: await raw_server.close() db.close()
def initialize_chain(self, base_db: BaseAtomicDB) -> 'BeaconChain': # Only used for testing chain_class = self.beacon_chain_class _, state_machine = chain_class.sm_configuration[0] state, block = create_mock_genesis( num_validators=self.genesis_data.num_validators, config=state_machine.config, keymap=self.genesis_data.keymap, genesis_block_class=state_machine.block_class, genesis_time=self.genesis_time, ) return cast( 'BeaconChain', chain_class.from_genesis( base_db=base_db, genesis_state=state, genesis_block=block, ))
def initialize_chain(self, base_db: BaseAtomicDB) -> 'BeaconChain': config = SERENITY_CONFIG # Only used for testing num_validators = 10 privkeys = tuple(2**i for i in range(num_validators)) keymap = {} for k in privkeys: keymap[bls.privtopub(k)] = k state, block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=ZERO_TIMESTAMP, ) return cast( 'BeaconChain', self.beacon_chain_class.from_genesis( base_db=base_db, genesis_state=state, genesis_block=block, ))
def _create(cls, model_class: Type[BaseBeaconChain], *args: Any, **kwargs: Any) -> BaseBeaconChain: override_lengths(cls.config) keymap = mk_keymap_of_size(cls.num_validators) genesis_state, genesis_block = create_mock_genesis( config=cls.config, pubkeys=tuple(keymap.keys()), keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=Timestamp(int(time.time())), ) db = kwargs.pop("db", AtomicDB()) chain = model_class.from_genesis( base_db=db, genesis_state=genesis_state, genesis_block=genesis_block, genesis_config=Eth2GenesisConfig( model_class.get_genesis_state_machine_class().config), ) return chain
def test_demo(base_db, validator_count, keymap, pubkeys, fork_choice_scoring): bls.use_noop_backend() slots_per_epoch = 8 config = SERENITY_CONFIG._replace( SLOTS_PER_EPOCH=slots_per_epoch, GENESIS_EPOCH=slot_to_epoch(SERENITY_CONFIG.GENESIS_SLOT, slots_per_epoch), TARGET_COMMITTEE_SIZE=3, SHARD_COUNT=2, MIN_ATTESTATION_INCLUSION_DELAY=2, ) override_vector_lengths(config) fixture_sm_class = SerenityStateMachine.configure( __name__='SerenityStateMachineForTesting', config=config, ) genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) attestation_pool = AttestationPool() # TODO(ralexstokes) clean up how the cache is populated for i in range(validator_count): pubkeys[i] genesis_state, genesis_block = create_mock_genesis( num_validators=validator_count, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(validator_count): assert genesis_state.validators[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock, fork_choice_scoring) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, attestation_pool, blocks[-1].slot, ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, attestation_pool, blocks[-1].slot, ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock, fork_choice_scoring) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, attestation_pool, block.slot, ), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot # Justification assertions assert state.current_justified_epoch == genesis_epoch assert state.finalized_epoch == genesis_epoch
int.from_bytes(hash_eth2(str(i).encode("utf-8"))[:4], "big") for i in range(NUM_VALIDATORS) ) index_to_pubkey = {} keymap = {} # pub -> priv for i, k in enumerate(privkeys): pubkey = bls.privtopub(k) index_to_pubkey[i] = pubkey keymap[pubkey] = k override_lengths(XIAO_LONG_BAO_CONFIG) genesis_state, genesis_block = create_mock_genesis( config=XIAO_LONG_BAO_CONFIG, pubkeys=tuple(keymap.keys()), keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=Timestamp(int(time.time())), ) class BeaconChainFactory(factory.Factory): class Meta: model = TestnetChain @classmethod def _create( cls, model_class: Type[TestnetChain], *args: Any, **kwargs: Any ) -> BaseBeaconChain: db = kwargs.pop("db", AtomicDB())
def test_demo(base_db, num_validators, config, keymap, fixture_sm_class): genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db) genesis_state, genesis_block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(num_validators): assert genesis_state.validator_registry[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, blocks[-1], ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, blocks[-1], ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, block, ), attestation_slot=attestation_slot, beacon_block_root=block.signed_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot assert isinstance(sm.block, SerenityBeaconBlock) # Justification assertions assert state.current_justified_epoch == 2 + genesis_epoch assert state.finalized_epoch == 1 + genesis_epoch
def test_demo(base_db, validator_count, keymap, pubkeys, fork_choice_scoring): bls.use_noop_backend() config = MINIMAL_SERENITY_CONFIG override_lengths(config) fixture_sm_class = SerenityStateMachine.configure( __name__="SerenityStateMachineForTesting", config=config) genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) genesis_state, genesis_block = create_mock_genesis( pubkeys=pubkeys[:validator_count], config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(validator_count): assert genesis_state.validators[i].is_active(genesis_slot) chaindb.persist_block( SerenitySignedBeaconBlock.create(message=genesis_block), SerenitySignedBeaconBlock, fork_choice_scoring, ) chaindb.persist_state(genesis_state) state = genesis_state block = SerenitySignedBeaconBlock.create(message=genesis_block) chain_length = 4 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class(chaindb), signed_block_class=SerenitySignedBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class(chaindb) state, _ = sm.import_block(block, state) chaindb.persist_state(state) chaindb.persist_block(block, SerenitySignedBeaconBlock, fork_choice_scoring) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class(chaindb), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot # Justification assertions # NOTE: why are the number `2` or `3` used in the checks below? # Answer: # "We do not check any justification and finality during epochs 0 or 1. We do check for # justification and finality from epoch 2 onward." # [epoch 0]------[epoch 1]------> # # "In epoch 2, we justify the current epoch. This epoch is in fact justified but we do not # recognize it in the protocol due to an artifact of the construction of the genesis state # (using the `zero` value for `Checkpoint` type)." # [epoch 0]------[epoch 1]------[epoch 2]*------> # []*: checkpoint justified # []**: checkpoint finalized # # "In epoch 3, we have the previous justified checkpoint at the prior current justified # checkpoint (so `GENESIS_EPOCH + 2`) and we justify this current epoch. we check finality here # and see that we finalize the prior justified checkpoint at epoch 2." # [epoch 0]------[epoch 1]------[epoch 2]**------[epoch 3]*------> # # "Given the way we handle epoch processing (i.e. process a given epoch at the start of # the next epoch), we need to transition through `4 * SLOTS_PER_EPOCH` worth of slots to # include the processing of epoch 3." # # source: https://github.com/ethereum/trinity/pull/1214#issuecomment-546184080 # # epoch | prev_justified_checkpoint | cur_justified_checkpoint | finalized_checkpoint # ------|---------------------------|--------------------------|--------------------- # 0 | 0 | 0 | 0 # 1 | 0 | 0 | 0 # 2 | 0 | 0 | 0 # 3 | 0 | 2 | 0 # 4 | 2 | 3 | 2 assert state.previous_justified_checkpoint.epoch == 2 + genesis_epoch assert state.current_justified_checkpoint.epoch == 3 + genesis_epoch assert state.finalized_checkpoint.epoch == 2 + genesis_epoch
privkeys = tuple( int.from_bytes(hash_eth2(str(i).encode('utf-8'))[:4], 'big') for i in range(NUM_VALIDATORS)) index_to_pubkey = {} keymap = {} # pub -> priv for i, k in enumerate(privkeys): pubkey = bls.privtopub(k) index_to_pubkey[i] = pubkey keymap[pubkey] = k genesis_time = int(time.time()) genesis_state, genesis_block = create_mock_genesis( num_validators=NUM_VALIDATORS, config=XIAO_LONG_BAO_CONFIG, keymap=keymap, genesis_block_class=SerenityBeaconBlock, genesis_time=genesis_time, ) genesis_data = BeaconGenesisData( genesis_time=genesis_time, genesis_slot=XIAO_LONG_BAO_CONFIG.GENESIS_SLOT, keymap=keymap, num_validators=NUM_VALIDATORS, ) beacon_chain_config = BeaconChainConfig(chain_name='TestTestTest', genesis_data=genesis_data) chain_class = beacon_chain_config.beacon_chain_class class FakeProtocol:
def test_demo(base_db, keymap): slots_per_epoch = 8 config = SERENITY_CONFIG._replace( SLOTS_PER_EPOCH=slots_per_epoch, GENESIS_EPOCH=slot_to_epoch(SERENITY_CONFIG.GENESIS_SLOT, slots_per_epoch), TARGET_COMMITTEE_SIZE=3, SHARD_COUNT=2, MIN_ATTESTATION_INCLUSION_DELAY=2, ) fixture_sm_class = SerenityStateMachine.configure( __name__='SerenityStateMachineForTesting', config=config, ) num_validators = 40 genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) genesis_state, genesis_block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(num_validators): assert genesis_state.validator_registry[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, blocks[-1], ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, blocks[-1], ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, block, ), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot assert isinstance(sm.block, SerenityBeaconBlock) # Justification assertions assert state.current_justified_epoch == 2 + genesis_epoch assert state.finalized_epoch == 1 + genesis_epoch
def test_demo(base_db, num_validators, config, keymap, fixture_sm_class): chaindb = BeaconChainDB(base_db) genesis_state, genesis_block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(num_validators): assert genesis_state.validator_registry[i].is_active(0) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state current_slot = 1 chain_length = 3 * config.EPOCH_LENGTH attestations = () for current_slot in range(chain_length): # two epochs block = create_mock_block( state=state, config=config, block_class=SerenityBeaconBlock, parent_block=genesis_block, keymap=keymap, slot=current_slot, attestations=attestations, ) block = block.copy( body=block.body.copy( attestations=attestations, ) ) # Get state machine instance sm = fixture_sm_class( chaindb, block, parent_block_class=SerenityBeaconBlock, ) state, _ = sm.import_block(block) # TODO: move to chain level? block = block.copy( state_root=state.root, ) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock) if current_slot > config.MIN_ATTESTATION_INCLUSION_DELAY: attestation_slot = current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY attestations = create_mock_signed_attestations_at_slot( state, config, attestation_slot, keymap, 1.0, ) else: attestations = () assert state.slot == chain_length - 1 assert isinstance(sm.block, SerenityBeaconBlock)