def get_beacon_shell_context( database_dir: Path, trinity_config: TrinityConfig) -> Iterator[Dict[str, Any]]: app_config = trinity_config.get_app_config(BeaconAppConfig) ipc_path = trinity_config.database_ipc_path trinity_already_running = ipc_path.exists() with _get_base_db(database_dir, ipc_path) as db: chain_config = app_config.get_chain_config() chain = chain_config.beacon_chain_class(db, chain_config.genesis_config) chaindb = BeaconChainDB(db, chain_config.genesis_config) head = chaindb.get_canonical_head(BeaconBlock) yield { 'db': db, 'chaindb': chaindb, 'trinity_config': trinity_config, 'chain_config': chain_config, 'chain': chain, 'block_number': head.slot, 'hex_hash': head.hash_tree_root.hex(), 'state_root_hex': encode_hex(head.state_root), 'trinity_already_running': trinity_already_running }
def get_beacon_shell_context(database_dir: Path, trinity_config: TrinityConfig) -> Dict[str, Any]: app_config = trinity_config.get_app_config(BeaconAppConfig) ipc_path = trinity_config.database_ipc_path db: DatabaseAPI trinity_already_running = ipc_path.exists() if trinity_already_running: db = DBClient.connect(ipc_path) else: db = LevelDB(database_dir) chain_config = app_config.get_chain_config() chain = chain_config.beacon_chain_class(db, chain_config.genesis_config) chaindb = BeaconChainDB(db, chain_config.genesis_config) head = chaindb.get_canonical_head(BeaconBlock) return { 'db': db, 'chaindb': chaindb, 'trinity_config': trinity_config, 'chain_config': chain_config, 'chain': chain, 'block_number': head.slot, 'hex_hash': head.hash_tree_root.hex(), 'state_root_hex': encode_hex(head.state_root), 'trinity_already_running': trinity_already_running }
def get_beacon_shell_context(database_dir: Path, trinity_config: TrinityConfig) -> Dict[str, Any]: app_config = trinity_config.get_app_config(BeaconAppConfig) ipc_path = trinity_config.database_ipc_path trinity_already_running = ipc_path.exists() if trinity_already_running: db_manager = beacon.manager.create_db_consumer_manager( ipc_path) # type: ignore db = db_manager.get_db() else: db = LevelDB(database_dir) chain_config = app_config.get_chain_config() chain = chain_config.beacon_chain_class attestation_pool = AttestationPool() chain = chain_config.beacon_chain_class(db, attestation_pool, chain_config.genesis_config) chaindb = BeaconChainDB(db, chain_config.genesis_config) head = chaindb.get_canonical_head(BeaconBlock) return { 'db': db, 'chaindb': chaindb, 'trinity_config': trinity_config, 'chain_config': chain_config, 'chain': chain, 'block_number': head.slot, 'hex_hash': head.hash_tree_root.hex(), 'state_root_hex': encode_hex(head.state_root), 'trinity_already_running': trinity_already_running }
def get_fresh_chain_db(): db = AtomicDB() genesis_block = create_test_block(slot=0) chain_db = BeaconChainDB(db) chain_db.persist_block(genesis_block, BeaconBlock) return chain_db
def apply_advance_to_slot( test_case: StateTestCase, sm_class: Type[SerenityStateMachine], chaindb: BeaconChainDB, attestation_pool: AttestationPool, state: BeaconState) -> Tuple[BeaconState, BeaconChainDB]: post_state = state.copy() sm = sm_class(chaindb, attestation_pool, None, post_state) slot = test_case.pre.slot + test_case.slots chaindb.persist_state(post_state) return advance_to_slot(sm, post_state, slot), chaindb
def test_per_slot_transition(base_db, genesis_block, genesis_state, fixture_sm_class, config, state_slot, keymap): chaindb = BeaconChainDB(base_db) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state # Create a block block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, genesis_block, ), block_class=SerenityBeaconBlock, parent_block=genesis_block, keymap=keymap, slot=state_slot, ) # Store in chaindb chaindb.persist_block(block, SerenityBeaconBlock) # Get state machine instance sm = fixture_sm_class( chaindb, block, ) # Get state transition instance st = sm.state_transition_class(sm.config) # NOTE: we want to run both functions, however they are run independently # so we have two function calls updated_state = st.cache_state(state) updated_state = st.per_slot_transition(updated_state) # Ensure that slot gets increased by 1 assert updated_state.slot == state.slot + 1 # latest_block_roots latest_block_roots_index = (updated_state.slot - 1) % st.config.SLOTS_PER_HISTORICAL_ROOT assert updated_state.latest_block_roots[ latest_block_roots_index] == block.previous_block_root # historical_roots if updated_state.slot % st.config.SLOTS_PER_HISTORICAL_ROOT == 0: historical_batch = HistoricalBatch( block_roots=state.latest_block_roots, state_roots=state.latest_state_roots, slots_per_historical_root=config.SLOTS_PER_HISTORICAL_ROOT, ) assert updated_state.historical_roots[ -1] == historical_batch.hash_tree_root else: assert updated_state.historical_roots == state.historical_roots
def create_db_server_manager(trinity_config: TrinityConfig, base_db: BaseAtomicDB) -> BaseManager: app_config = trinity_config.get_app_config(BeaconAppConfig) chain_config = app_config.get_chain_config() chaindb = BeaconChainDB(base_db) if not is_beacon_database_initialized(chaindb, BeaconBlock): initialize_beacon_database(chain_config, chaindb, base_db, BeaconBlock) class DBManager(BaseManager): pass DBManager.register('get_db', callable=lambda: TracebackRecorder(base_db), proxytype=AsyncDBProxy) DBManager.register( 'get_chaindb', callable=lambda: TracebackRecorder(chaindb), proxytype=AsyncBeaconChainDBProxy, ) manager = DBManager(address=str( trinity_config.database_ipc_path)) # type: ignore return manager
def create_db_server_manager(trinity_config: TrinityConfig, base_db: BaseAtomicDB) -> BaseManager: app_config = trinity_config.get_app_config(BeaconAppConfig) chain_config = app_config.get_chain_config() chaindb = BeaconChainDB(base_db, chain_config.genesis_config) if not is_beacon_database_initialized(chaindb, BeaconBlock): initialize_beacon_database(chain_config, chaindb, base_db, BeaconBlock) # This enables connection when clients launch from another process on the shell multiprocessing.current_process().authkey = AUTH_KEY class DBManager(BaseManager): pass DBManager.register('get_db', callable=lambda: TracebackRecorder(base_db), proxytype=AsyncDBProxy) DBManager.register( 'get_chaindb', callable=lambda: TracebackRecorder(chaindb), proxytype=AsyncBeaconChainDBProxy, ) manager = DBManager(address=str( trinity_config.database_ipc_path)) # type: ignore return manager
def apply_blocks(test_case: StateTestCase, sm_class: Type[SerenityStateMachine], chaindb: BeaconChainDB, attestation_pool: AttestationPool, state: BeaconState) -> Tuple[BeaconState, BeaconChainDB]: post_state = state.copy() for block in test_case.blocks: sm = sm_class(chaindb, attestation_pool, None, post_state) post_state, imported_block = sm.import_block(block) chaindb.persist_state(post_state) if imported_block.state_root != block.state_root: raise ValidationError( f"Block did not have the expected state root:\n" f"\tExpected: {encode_hex(block.state_root)}\n" f"\tResult: {encode_hex(imported_block.state_root)}\n") return post_state, chaindb
def get_base_db(boot_info: BootInfo) -> LevelDB: app_config = boot_info.trinity_config.get_app_config(BeaconAppConfig) chain_config = app_config.get_chain_config() base_db = LevelDB(db_path=app_config.database_dir) chaindb = BeaconChainDB(base_db, chain_config.genesis_config) if not is_beacon_database_initialized(chaindb): initialize_beacon_database(chain_config, chaindb, base_db) return base_db
def test_per_slot_transition(base_db, genesis_block, genesis_state, fixture_sm_class, config, state_slot, keymap): chaindb = BeaconChainDB(base_db) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state # Create a block block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, genesis_block, ), block_class=SerenityBeaconBlock, parent_block=genesis_block, keymap=keymap, slot=state_slot, ) # Store in chaindb chaindb.persist_block(block, SerenityBeaconBlock) # Get state machine instance sm = fixture_sm_class( chaindb, block, ) # Get state transition instance st = sm.state_transition_class(sm.config) updated_state = st.per_slot_transition(state, block.parent_root) # Ensure that slot gets increased by 1 assert updated_state.slot == state.slot + 1 # latest_block_roots latest_block_roots_index = (updated_state.slot - 1) % st.config.SLOTS_PER_HISTORICAL_ROOT assert updated_state.latest_block_roots[latest_block_roots_index] == block.parent_root # historical_roots if updated_state.slot % st.config.SLOTS_PER_HISTORICAL_ROOT == 0: assert updated_state.historical_roots[-1] == get_merkle_root( updated_state.latest_block_roots ) else: assert updated_state.historical_roots == state.historical_roots
def _initialize_beacon_filesystem_and_db(boot_info: BootInfo) -> None: app_config = boot_info.trinity_config.get_app_config(BeaconAppConfig) ensure_beacon_dirs(app_config) base_db = LevelDB(db_path=app_config.database_dir) chain_config = app_config.get_chain_config() chaindb = BeaconChainDB(base_db) if not is_beacon_database_initialized(chaindb): initialize_beacon_database(chain_config, chaindb, base_db)
def test_process_attestations(genesis_state, genesis_block, sample_beacon_block_params, sample_beacon_block_body_params, config, keymap, fixture_sm_class, base_db, success): attestation_slot = 0 current_slot = attestation_slot + config.MIN_ATTESTATION_INCLUSION_DELAY state = genesis_state.copy(slot=current_slot, ) attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( BeaconChainDB(base_db), genesis_block, ), attestation_slot=attestation_slot, beacon_block_root=genesis_block.signed_root, keymap=keymap, voted_attesters_ratio=1.0, ) assert len(attestations) > 0 if not success: # create invalid attestation in the future invalid_attestation_data = attestations[-1].data.copy(slot=state.slot + 10, ) invalid_attestation = attestations[-1].copy( data=invalid_attestation_data, ) attestations = attestations[:-1] + (invalid_attestation, ) block_body = BeaconBlockBody(**sample_beacon_block_body_params).copy( attestations=attestations, ) block = SerenityBeaconBlock(**sample_beacon_block_params).copy( slot=current_slot, body=block_body, ) if success: new_state = process_attestations( state, block, config, ) assert len(new_state.current_epoch_attestations) == len(attestations) else: with pytest.raises(ValidationError): process_attestations( state, block, config, )
def run_state_execution(test_case: StateTestCase, sm_class: Type[SerenityStateMachine], chaindb: BeaconChainDB, attestation_pool: AttestationPool, state: BeaconState) -> BeaconState: chaindb.persist_state(state) post_state = state post_state, chaindb = apply_advance_to_slot( test_case, sm_class, chaindb, attestation_pool, post_state, ) post_state, chaindb = apply_blocks( test_case, sm_class, chaindb, attestation_pool, post_state, ) return post_state
def run_database_process(trinity_config: TrinityConfig, db_class: Type[LevelDB]) -> None: with trinity_config.process_id_file('database'): app_config = trinity_config.get_app_config(BeaconAppConfig) chain_config = app_config.get_chain_config() base_db = db_class(db_path=app_config.database_dir) chaindb = BeaconChainDB(base_db, chain_config.genesis_config) if not is_beacon_database_initialized(chaindb, BeaconBlock): initialize_beacon_database(chain_config, chaindb, base_db, BeaconBlock) manager = DBManager(base_db) with manager.run(trinity_config.database_ipc_path): try: manager.wait_stopped() except KeyboardInterrupt: pass
def execute_state_transtion(test_case, base_db): bls.use_noop_backend() dict_config = test_case['config'] verify_signatures = test_case['verify_signatures'] dict_initial_state = test_case['initial_state'] dict_blocks = test_case['blocks'] dict_expected_state = test_case['expected_state'] # TODO: make it case by case assert verify_signatures is False # Set config config = generate_config_by_dict(dict_config) # Set Vector fields override_vector_lengths(config) # Set pre_state pre_state = from_formatted_dict(dict_initial_state, BeaconState) # Set blocks blocks = () for dict_block in dict_blocks: block = from_formatted_dict(dict_block, SerenityBeaconBlock) blocks += (block, ) sm_class = SerenityStateMachine.configure( __name__='SerenityStateMachineForTesting', config=config, ) chaindb = BeaconChainDB(base_db, Eth2GenesisConfig(config)) attestation_pool = AttestationPool() post_state = pre_state.copy() for block in blocks: sm = sm_class(chaindb, attestation_pool, None, post_state) post_state, _ = sm.import_block(block) # Use dict diff, easier to see the diff dict_post_state = to_formatted_dict(post_state, BeaconState) for key, value in dict_expected_state.items(): if isinstance(value, list): value = tuple(value) assert dict_post_state[key] == value
def test_process_max_attestations(genesis_state, genesis_block, sample_beacon_block_params, sample_beacon_block_body_params, config, keymap, fixture_sm_class, base_db): attestation_slot = config.GENESIS_SLOT current_slot = attestation_slot + config.MIN_ATTESTATION_INCLUSION_DELAY state = genesis_state.copy(slot=current_slot, ) attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( BeaconChainDB(base_db), genesis_block, ), attestation_slot=attestation_slot, beacon_block_root=genesis_block.signed_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_count = len(attestations) assert attestations_count > 0 block_body = BeaconBlockBody(**sample_beacon_block_body_params).copy( attestations=attestations * (attestations_count // config.MAX_ATTESTATIONS + 1), ) block = SerenityBeaconBlock(**sample_beacon_block_params).copy( slot=current_slot, body=block_body, ) with pytest.raises(ValidationError): process_attestations( state, block, config, )
def create_db_server_manager(trinity_config: TrinityConfig, base_db: BaseAtomicDB) -> BaseManager: chaindb = BeaconChainDB(base_db) # TODO: handle initialization class DBManager(BaseManager): pass DBManager.register('get_db', callable=lambda: TracebackRecorder(base_db), proxytype=AsyncDBProxy) DBManager.register( 'get_chaindb', callable=lambda: TracebackRecorder(chaindb), proxytype=AsyncBeaconChainDBProxy, ) manager = DBManager(address=str( trinity_config.database_ipc_path)) # type: ignore return manager
def __init__( self, local_node_key: PrivateKey, eth2_config: Eth2Config, chain_config: BeaconChainConfig, database_dir: Path, chain_class: Type[BaseBeaconChain], time_provider: TimeProvider = get_unix_time, ) -> None: self._local_key_pair = create_new_key_pair(local_node_key.to_bytes()) self._eth2_config = eth2_config self._clock = _mk_clock(eth2_config, chain_config.genesis_time, time_provider) self._base_db = LevelDB(db_path=database_dir) self._chain_db = BeaconChainDB(self._base_db, eth2_config) if not is_beacon_database_initialized(self._chain_db): initialize_beacon_database(chain_config, self._chain_db, self._base_db) self._chain = chain_class(self._base_db, eth2_config)
def test_demo(base_db, sample_beacon_block_params, genesis_state, fixture_sm_class, config, privkeys, pubkeys): chaindb = BeaconChainDB(base_db) state = genesis_state block = SerenityBeaconBlock(**sample_beacon_block_params).copy( slot=state.slot + 2, state_root=state.root, ) # Sign block beacon_proposer_index = get_beacon_proposer_index( state, block.slot, config.EPOCH_LENGTH, ) index_in_privkeys = pubkeys.index( state.validator_registry[beacon_proposer_index].pubkey) beacon_proposer_privkey = privkeys[index_in_privkeys] empty_signature_block_root = block.block_without_signature_root proposal_root = ProposalSignedData( block.slot, config.BEACON_CHAIN_SHARD_NUMBER, empty_signature_block_root, ).root block = block.copy(signature=bls.sign( message=proposal_root, privkey=beacon_proposer_privkey, domain=SignatureDomain.DOMAIN_PROPOSAL, ), ) # Store in chaindb chaindb.persist_block(block, SerenityBeaconBlock) chaindb.persist_state(state) # Get state machine instance sm = fixture_sm_class(chaindb, block.root, SerenityBeaconBlock) result_state, _ = sm.import_block(block) assert state.slot == 0 assert result_state.slot == block.slot assert isinstance(sm.block, SerenityBeaconBlock)
def test_demo(base_db, validator_count, keymap, pubkeys, fork_choice_scoring): bls.use_noop_backend() slots_per_epoch = 8 config = SERENITY_CONFIG._replace( SLOTS_PER_EPOCH=slots_per_epoch, GENESIS_EPOCH=slot_to_epoch(SERENITY_CONFIG.GENESIS_SLOT, slots_per_epoch), TARGET_COMMITTEE_SIZE=3, SHARD_COUNT=2, MIN_ATTESTATION_INCLUSION_DELAY=2, ) override_vector_lengths(config) fixture_sm_class = SerenityStateMachine.configure( __name__='SerenityStateMachineForTesting', config=config, ) genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) attestation_pool = AttestationPool() # TODO(ralexstokes) clean up how the cache is populated for i in range(validator_count): pubkeys[i] genesis_state, genesis_block = create_mock_genesis( num_validators=validator_count, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(validator_count): assert genesis_state.validators[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock, fork_choice_scoring) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, attestation_pool, blocks[-1].slot, ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, attestation_pool, blocks[-1].slot, ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock, fork_choice_scoring) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, attestation_pool, block.slot, ), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot # Justification assertions assert state.current_justified_epoch == genesis_epoch assert state.finalized_epoch == genesis_epoch
def chaindb(base_db, config): return BeaconChainDB(base_db, config)
def test_demo(base_db, num_validators, config, keymap, fixture_sm_class): genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db) genesis_state, genesis_block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(num_validators): assert genesis_state.validator_registry[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, blocks[-1], ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, blocks[-1], ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, block, ), attestation_slot=attestation_slot, beacon_block_root=block.signed_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot assert isinstance(sm.block, SerenityBeaconBlock) # Justification assertions assert state.current_justified_epoch == 2 + genesis_epoch assert state.finalized_epoch == 1 + genesis_epoch
def test_per_slot_transition(base_db, genesis_block, genesis_state, fixture_sm_class, config, state_slot, keymap): chaindb = BeaconChainDB(base_db) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state # Create a block block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, genesis_block, ), block_class=SerenityBeaconBlock, parent_block=genesis_block, keymap=keymap, slot=state_slot, ) # Store in chaindb chaindb.persist_block(block, SerenityBeaconBlock) # Get state machine instance sm = fixture_sm_class( chaindb, block, ) # Get state transition instance st = sm.state_transition_class(sm.config) updated_state = st.per_slot_transition(state, block.parent_root) # Ensure that slot gets increased by 1 assert updated_state.slot == state.slot + 1 # Validator Registry # Tweaking the slot, so that we get the correct proposer index beacon_proposer_index = get_beacon_proposer_index( state, state.slot + 1, st.config.GENESIS_EPOCH, st.config.EPOCH_LENGTH, st.config.TARGET_COMMITTEE_SIZE, st.config.SHARD_COUNT, ) for validator_index, _ in enumerate(updated_state.validator_registry): if validator_index != beacon_proposer_index: # Validator Record shouldn't change if not proposer assert (updated_state.validator_registry[validator_index] == state.validator_registry[validator_index]) else: # randao layers of proposer's record should increase by 1 assert ( updated_state.validator_registry[validator_index].randao_layers == state.validator_registry[validator_index].randao_layers + 1) # latest_randao_mixes assert ( updated_state.latest_randao_mixes[updated_state.slot % st.config.LATEST_RANDAO_MIXES_LENGTH] == state.latest_randao_mixes[(state.slot) % st.config.LATEST_RANDAO_MIXES_LENGTH]) # latest_block_roots latest_block_roots_index = (updated_state.slot - 1) % st.config.LATEST_BLOCK_ROOTS_LENGTH assert updated_state.latest_block_roots[ latest_block_roots_index] == block.parent_root # batched_block_roots if updated_state.slot % st.config.LATEST_BLOCK_ROOTS_LENGTH == 0: assert updated_state.batched_block_roots[-1] == get_merkle_root( updated_state.latest_block_roots) else: assert updated_state.batched_block_roots == state.batched_block_roots
def chaindb(base_db): return BeaconChainDB(base_db)
def test_demo(base_db, validator_count, keymap, pubkeys, fork_choice_scoring): bls.use_noop_backend() config = MINIMAL_SERENITY_CONFIG override_lengths(config) fixture_sm_class = SerenityStateMachine.configure( __name__="SerenityStateMachineForTesting", config=config) genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) genesis_state, genesis_block = create_mock_genesis( pubkeys=pubkeys[:validator_count], config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(validator_count): assert genesis_state.validators[i].is_active(genesis_slot) chaindb.persist_block( SerenitySignedBeaconBlock.create(message=genesis_block), SerenitySignedBeaconBlock, fork_choice_scoring, ) chaindb.persist_state(genesis_state) state = genesis_state block = SerenitySignedBeaconBlock.create(message=genesis_block) chain_length = 4 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class(chaindb), signed_block_class=SerenitySignedBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class(chaindb) state, _ = sm.import_block(block, state) chaindb.persist_state(state) chaindb.persist_block(block, SerenitySignedBeaconBlock, fork_choice_scoring) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class(chaindb), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot # Justification assertions # NOTE: why are the number `2` or `3` used in the checks below? # Answer: # "We do not check any justification and finality during epochs 0 or 1. We do check for # justification and finality from epoch 2 onward." # [epoch 0]------[epoch 1]------> # # "In epoch 2, we justify the current epoch. This epoch is in fact justified but we do not # recognize it in the protocol due to an artifact of the construction of the genesis state # (using the `zero` value for `Checkpoint` type)." # [epoch 0]------[epoch 1]------[epoch 2]*------> # []*: checkpoint justified # []**: checkpoint finalized # # "In epoch 3, we have the previous justified checkpoint at the prior current justified # checkpoint (so `GENESIS_EPOCH + 2`) and we justify this current epoch. we check finality here # and see that we finalize the prior justified checkpoint at epoch 2." # [epoch 0]------[epoch 1]------[epoch 2]**------[epoch 3]*------> # # "Given the way we handle epoch processing (i.e. process a given epoch at the start of # the next epoch), we need to transition through `4 * SLOTS_PER_EPOCH` worth of slots to # include the processing of epoch 3." # # source: https://github.com/ethereum/trinity/pull/1214#issuecomment-546184080 # # epoch | prev_justified_checkpoint | cur_justified_checkpoint | finalized_checkpoint # ------|---------------------------|--------------------------|--------------------- # 0 | 0 | 0 | 0 # 1 | 0 | 0 | 0 # 2 | 0 | 0 | 0 # 3 | 0 | 2 | 0 # 4 | 2 | 3 | 2 assert state.previous_justified_checkpoint.epoch == 2 + genesis_epoch assert state.current_justified_checkpoint.epoch == 3 + genesis_epoch assert state.finalized_checkpoint.epoch == 2 + genesis_epoch
def chaindb_at_genesis(base_db, genesis_state, genesis_block, fork_choice_scoring): return BeaconChainDB.from_genesis(base_db, genesis_state, SerenitySignedBeaconBlock, fork_choice_scoring)
def test_demo(base_db, keymap): slots_per_epoch = 8 config = SERENITY_CONFIG._replace( SLOTS_PER_EPOCH=slots_per_epoch, GENESIS_EPOCH=slot_to_epoch(SERENITY_CONFIG.GENESIS_SLOT, slots_per_epoch), TARGET_COMMITTEE_SIZE=3, SHARD_COUNT=2, MIN_ATTESTATION_INCLUSION_DELAY=2, ) fixture_sm_class = SerenityStateMachine.configure( __name__='SerenityStateMachineForTesting', config=config, ) num_validators = 40 genesis_slot = config.GENESIS_SLOT genesis_epoch = config.GENESIS_EPOCH chaindb = BeaconChainDB(base_db, config) genesis_state, genesis_block = create_mock_genesis( num_validators=num_validators, config=config, keymap=keymap, genesis_block_class=SerenityBeaconBlock, ) for i in range(num_validators): assert genesis_state.validator_registry[i].is_active(genesis_slot) chaindb.persist_block(genesis_block, SerenityBeaconBlock) chaindb.persist_state(genesis_state) state = genesis_state block = genesis_block chain_length = 3 * config.SLOTS_PER_EPOCH blocks = (block, ) attestations_map = {} # Dict[Slot, Sequence[Attestation]] for current_slot in range(genesis_slot + 1, genesis_slot + chain_length + 1): if current_slot > genesis_slot + config.MIN_ATTESTATION_INCLUSION_DELAY: attestations = attestations_map[ current_slot - config.MIN_ATTESTATION_INCLUSION_DELAY] else: attestations = () block = create_mock_block( state=state, config=config, state_machine=fixture_sm_class( chaindb, blocks[-1], ), block_class=SerenityBeaconBlock, parent_block=block, keymap=keymap, slot=current_slot, attestations=attestations, ) # Get state machine instance sm = fixture_sm_class( chaindb, blocks[-1], ) state, _ = sm.import_block(block) chaindb.persist_state(state) chaindb.persist_block(block, SerenityBeaconBlock) blocks += (block, ) # Mock attestations attestation_slot = current_slot attestations = create_mock_signed_attestations_at_slot( state=state, config=config, state_machine=fixture_sm_class( chaindb, block, ), attestation_slot=attestation_slot, beacon_block_root=block.signing_root, keymap=keymap, voted_attesters_ratio=1.0, ) attestations_map[attestation_slot] = attestations assert state.slot == chain_length + genesis_slot assert isinstance(sm.block, SerenityBeaconBlock) # Justification assertions assert state.current_justified_epoch == 2 + genesis_epoch assert state.finalized_epoch == 1 + genesis_epoch
def chaindb(base_db, genesis_config): return BeaconChainDB(base_db, genesis_config)
def get_chaindb_of_config(base_db, config): return BeaconChainDB(base_db, Eth2GenesisConfig(config))