def test_chaindb_get_score(chaindb): genesis = BlockHeader(difficulty=1, block_number=0, gas_limit=0) chaindb.persist_header(genesis) genesis_score_key = SchemaV1.make_block_hash_to_score_lookup_key( genesis.hash) genesis_score = rlp.decode(chaindb.db.get(genesis_score_key), sedes=rlp.sedes.big_endian_int) assert genesis_score == 1 assert chaindb.get_score(genesis.hash) == 1 block1 = BlockHeader( difficulty=10, block_number=1, gas_limit=0, parent_hash=genesis.hash, timestamp=genesis.timestamp + 1, ) chaindb.persist_header(block1) block1_score_key = SchemaV1.make_block_hash_to_score_lookup_key( block1.hash) block1_score = rlp.decode(chaindb.db.get(block1_score_key), sedes=rlp.sedes.big_endian_int) assert block1_score == 11 assert chaindb.get_score(block1.hash) == 11
def from_genesis(cls, base_db: AtomicDatabaseAPI, genesis_params: Dict[str, HeaderParams], genesis_state: AccountState=None) -> 'BaseChain': genesis_vm_class = cls.get_vm_class_for_block_number(BlockNumber(0)) pre_genesis_header = BlockHeader(difficulty=0, block_number=-1, gas_limit=0) chain_context = ChainContext(cls.chain_id) state = genesis_vm_class.build_state(base_db, pre_genesis_header, chain_context) if genesis_state is None: genesis_state = {} # mutation apply_state_dict(state, genesis_state) state.persist() if 'state_root' not in genesis_params: # If the genesis state_root was not specified, use the value # computed from the initialized state database. genesis_params = assoc(genesis_params, 'state_root', state.state_root) elif genesis_params['state_root'] != state.state_root: # If the genesis state_root was specified, validate that it matches # the computed state from the initialized state database. raise ValidationError( "The provided genesis state root does not match the computed " f"genesis state root. Got {state.state_root!r}. " f"Expected {genesis_params['state_root']!r}" ) genesis_header = BlockHeader(**genesis_params) return cls.from_genesis_header(base_db, genesis_header)
def set_vm(self, block_identifier='latest'): _block = None if self.w3: if block_identifier == 'latest': block_identifier = self.w3.eth.blockNumber validate_uint256(block_identifier) _block = self.w3.eth.getBlock(block_identifier) if not _block: if block_identifier in [ HOMESTEAD_MAINNET_BLOCK, BYZANTIUM_MAINNET_BLOCK, PETERSBURG_MAINNET_BLOCK ]: _block = self.get_cached_block_by_id(block_identifier) else: self.logger.error("Unknown block identifier.") sys.exit(-4) block_header = BlockHeader( difficulty=_block.difficulty, block_number=_block.number, gas_limit=_block.gasLimit, timestamp=_block.timestamp, coinbase=ZERO_ADDRESS, # default value parent_hash=_block.parentHash, uncles_hash=_block.uncles, state_root=_block.stateRoot, transaction_root=_block.transactionsRoot, receipt_root=_block.receiptsRoot, bloom=0, # default value gas_used=_block.gasUsed, extra_data=_block.extraData, mix_hash=_block.mixHash, nonce=_block.nonce) self.vm = self.chain.get_vm(block_header)
def generate_header_from_parent_header(compute_difficulty_fn: Callable[ [BlockHeader, int], int], parent_header: BlockHeader, coinbase: Address, timestamp: Optional[int] = None, extra_data: bytes = b'') -> BlockHeader: """ Generate BlockHeader from state_root and parent_header """ if timestamp is None: timestamp = max(int(time.time()), parent_header.timestamp + 1) elif timestamp <= parent_header.timestamp: raise ValueError("header.timestamp ({}) should be higher than" "parent_header.timestamp ({})".format( timestamp, parent_header.timestamp, )) header = BlockHeader( difficulty=compute_difficulty_fn(parent_header, timestamp), block_number=(parent_header.block_number + 1), gas_limit=compute_gas_limit( parent_header, gas_limit_floor=GENESIS_GAS_LIMIT, ), timestamp=timestamp, parent_hash=parent_header.hash, state_root=parent_header.state_root, coinbase=coinbase, extra_data=extra_data, ) return header
def mk_uncle(block_number): return BlockHeader( state_root=os.urandom(32), difficulty=1000000, block_number=block_number, gas_limit=3141592, timestamp=int(time.time()), )
def mk_header_chain(length): assert length >= 1 genesis = BlockHeader(difficulty=100, block_number=0, gas_limit=3000000) yield genesis parent = genesis if length == 1: return for i in range(length - 1): header = BlockHeader( difficulty=100, block_number=parent.block_number + 1, parent_hash=parent.hash, gas_limit=3000000, ) yield header parent = header
def test_blockchain_fixtures(fixture_data, fixture): try: chain = new_chain_from_fixture(fixture) except ValueError as e: raise AssertionError("could not load chain for {}".format( (fixture_data, ))) from e genesis_params = genesis_params_from_fixture(fixture) expected_genesis_header = BlockHeader(**genesis_params) # TODO: find out if this is supposed to pass? # if 'genesisRLP' in fixture: # assert rlp.encode(genesis_header) == fixture['genesisRLP'] genesis_block = chain.get_canonical_block_by_number(0) genesis_header = genesis_block.header assert_imported_genesis_header_unchanged(expected_genesis_header, genesis_header) # 1 - mine the genesis block # 2 - loop over blocks: # - apply transactions # - mine block # 3 - diff resulting state with expected state # 4 - check that all previous blocks were valid for block_fixture in fixture['blocks']: should_be_good_block = 'blockHeader' in block_fixture if 'rlp_error' in block_fixture: assert not should_be_good_block continue if should_be_good_block: (block, mined_block, block_rlp) = apply_fixture_block_to_chain( block_fixture, chain, perform_validation=False # we manually validate below ) assert_mined_block_unchanged(block, mined_block) chain.validate_block(block) else: try: apply_fixture_block_to_chain(block_fixture, chain) except (TypeError, rlp.DecodingError, rlp.DeserializationError, ValidationError) as err: # failure is expected on this bad block pass else: raise AssertionError( "Block should have caused a validation error") latest_block_hash = chain.get_canonical_block_by_number( chain.get_block().number - 1).hash if latest_block_hash != fixture['lastblockhash']: verify_account_db(fixture['postState'], chain.get_vm().state.account_db)
def instantiate_vm(vm_class): GENESIS_HEADER = BlockHeader( difficulty=17179869184, block_number=BlockNumber(0), gas_limit=5000, ) chain_context = ChainContext(None) db = AtomicDB() return vm_class(GENESIS_HEADER, ChainDB(db), chain_context, ConsensusContext(db))
def add_receipt_to_header(self, old_header: BlockHeader, receipt: Receipt) -> BlockHeader: # Skip merkelizing the account data and persisting it to disk on every transaction. # Starting in Byzantium, this is no longer necessary, because the state root isn't # in the receipt anymore. return old_header.copy( bloom=int(BloomFilter(old_header.bloom) | receipt.bloom), gas_used=receipt.gas_used, )
def test_headerdb_persist_header_disallows_unknown_parent(headerdb): header = BlockHeader( difficulty=GENESIS_DIFFICULTY, block_number=GENESIS_BLOCK_NUMBER, gas_limit=GENESIS_GAS_LIMIT, parent_hash=b'\x0f' * 32, ) with pytest.raises(ParentNotFound, match="unknown parent"): headerdb.persist_header(header)
def deserialize(cls, encoded: List[bytes]) -> BlockHeaderAPI: num_fields = len(encoded) if num_fields == 16: return LondonBlockHeader.deserialize(encoded) elif num_fields == 15: return BlockHeader.deserialize(encoded) else: raise ValueError( "London & earlier can only handle headers of 15 or 16 fields. " f"Got {num_fields} in {encoded!r}")
def mk_header_and_receipts(block_number, num_receipts): receipts = mk_receipts(num_receipts) root_hash, trie_root_and_data = make_trie_root_and_nodes(receipts) header = BlockHeader( difficulty=1000000, block_number=block_number, gas_limit=3141592, timestamp=int(time.time()), receipt_root=root_hash, ) return header, receipts, (root_hash, trie_root_and_data)
def mk_header_chain(base_header, length): previous_header = base_header for _ in range(length): next_header = BlockHeader.from_parent( parent=previous_header, timestamp=previous_header.timestamp + 1, gas_limit=previous_header.gas_limit, difficulty=previous_header.difficulty, extra_data=keccak(random.randint(0, 1e18)), ) yield next_header previous_header = next_header
def test_blockchain_fixtures(fixture_data, fixture): try: chain = new_chain_from_fixture(fixture) except ValueError as e: raise AssertionError("could not load chain for %r" % fixture_data) from e genesis_params = genesis_params_from_fixture(fixture) expected_genesis_header = BlockHeader(**genesis_params) # TODO: find out if this is supposed to pass? # if 'genesisRLP' in fixture: # assert rlp.encode(genesis_header) == fixture['genesisRLP'] genesis_block = chain.get_canonical_block_by_number(0) genesis_header = genesis_block.header assert_rlp_equal(genesis_header, expected_genesis_header) # 1 - mine the genesis block # 2 - loop over blocks: # - apply transactions # - mine block # 4 - profit!! for block_fixture in fixture['blocks']: should_be_good_block = 'blockHeader' in block_fixture if 'rlp_error' in block_fixture: assert not should_be_good_block continue if should_be_good_block: (block, mined_block, block_rlp) = apply_fixture_block_to_chain(block_fixture, chain) assert_rlp_equal(block, mined_block) else: try: apply_fixture_block_to_chain(block_fixture, chain) except (TypeError, rlp.DecodingError, rlp.DeserializationError, ValidationError) as err: # failure is expected on this bad block pass else: raise AssertionError( "Block should have caused a validation error") latest_block_hash = chain.get_canonical_block_by_number( chain.get_block().number - 1).hash assert latest_block_hash == fixture['lastblockhash'] verify_account_db(fixture['postState'], chain.get_vm().state.account_db)
def build_state( cls, db: BaseAtomicDB, header: BlockHeader, previous_hashes: Iterable[Hash32] = ()) -> BaseState: """ You probably want `VM().state` instead of this. Occasionally, you want to build custom state against a particular header and DB, even if you don't have the VM initialized. This is a convenience method to do that. """ execution_context = header.create_execution_context(previous_hashes) return cls.get_state_class()(db, execution_context, header.state_root)
def __init__(self, vm_class): header = BlockHeader( difficulty=1, gas_limit=8000000, gas_used=0, bloom=0, mix_hash=constants.ZERO_HASH32, nonce=constants.GENESIS_NONCE, block_number=0, parent_hash=constants.ZERO_HASH32, receipt_root=constants.BLANK_ROOT_HASH, uncles_hash=constants.EMPTY_UNCLE_HASH, timestamp=0, transaction_root=constants.BLANK_ROOT_HASH, ) self.vm = vm_class(header, ChainDB(AtomicDB()))
async def preview_transactions(self, header: BlockHeader, transactions: Tuple[BaseTransaction, ...], parent_state_root: Hash32, lagging: bool = True) -> None: self.run_task( self._preview_address_load(header, parent_state_root, transactions)) # This is a hack, so that preview executions can load ancestor block-hashes self._db[header.hash] = rlp.encode(header) # Always broadcast, to start previewing transactions that are further ahead in the block old_state_header = header.copy(state_root=parent_state_root) self._event_bus.broadcast_nowait( DoStatelessBlockPreview(old_state_header, transactions))
def validate_header(cls, header: BlockHeader, parent_header: BlockHeader, check_seal: bool = True) -> None: """ :raise eth.exceptions.ValidationError: if the header is not valid """ if parent_header is None: # to validate genesis header, check if it equals canonical header at block number 0 raise ValidationError( "Must have access to parent header to validate current header") else: validate_length_lte(header.extra_data, 32, title="BlockHeader.extra_data") validate_gas_limit(header.gas_limit, parent_header.gas_limit) if header.block_number != parent_header.block_number + 1: raise ValidationError( "Blocks must be numbered consecutively. Block number #{} has parent #{}" .format( header.block_number, parent_header.block_number, )) # timestamp if header.timestamp <= parent_header.timestamp: raise ValidationError( "timestamp must be strictly later than parent, but is {} seconds before.\n" "- child : {}\n" "- parent : {}. ".format( parent_header.timestamp - header.timestamp, header.timestamp, parent_header.timestamp, )) if check_seal: try: cls.validate_seal(header) except ValidationError: cls.cls_logger.warning( "Failed to validate header proof of work on header: %r", header.as_dict()) raise
def make_next_header(previous_header, signer_private_key, coinbase=ZERO_ADDRESS, nonce=NONCE_DROP, difficulty=2): next_header = sign_block_header( BlockHeader.from_parent( coinbase=coinbase, nonce=nonce, parent=previous_header, timestamp=previous_header.timestamp + 1, gas_limit=previous_header.gas_limit, difficulty=difficulty, # FIXME: I think our sign_block_header is wrong extra_data=VANITY_LENGTH * b'0' + SIGNATURE_LENGTH * b'0', ), signer_private_key) return next_header
def create_fhe_header_from_parent(parent_header, **header_params): if 'difficulty' not in header_params: # Use setdefault to ensure the new header has the same timestamp we use to calculate its # difficulty. header_params.setdefault('timestamp', parent_header.timestamp + 1) header_params['difficulty'] = compute_fhe_difficulty( parent_header, header_params['timestamp'], ) if 'gas_limit' not in header_params: header_params['gas_limit'] = compute_gas_limit( parent_header, gas_limit_floor=GENESIS_GAS_LIMIT, ) header = BlockHeader.from_parent(parent=parent_header, **header_params) return header
def test_difficulty_fixtures(fixture): fork_name = list(fixture.keys())[1] if fork_name not in VM_FORK_MAP.keys(): raise NotImplementedError( f'VM_FORK_MAP needs to be updated to support {fork_name}.') vm = VM_FORK_MAP[fork_name] fixture_payload = fixture[fork_name].items() for _, test_payload in fixture_payload: formatted_test_payload = { # hexstr -> int for all values in test_payload k: to_int(hexstr=v) for k, v in test_payload.items() } parent_uncle_hash = ( # 'parentUncles' are either 0 or 1, depending on whether the parent has uncles or not. # Therefore, use EMPTY_UNCLE_HASH when 0 and non-empty hash when 1. EMPTY_UNCLE_HASH if formatted_test_payload['parentUncles'] == 0 else "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347" ) current_block_number = formatted_test_payload['currentBlockNumber'] parent_header = BlockHeader( difficulty=formatted_test_payload['parentDifficulty'], block_number=current_block_number - 1, timestamp=formatted_test_payload['parentTimestamp'], uncles_hash=parent_uncle_hash, gas_limit= 0, # necessary for instantiation but arbitrary for this test ) # calculate the current difficulty using the parent header difficulty = vm.compute_difficulty( parent_header=parent_header, timestamp=formatted_test_payload['currentTimestamp'], ) assert difficulty == formatted_test_payload['currentDifficulty']
def set_block_transactions(self, base_block: BaseBlock, new_header: BlockHeader, transactions: Tuple[BaseTransaction, ...], receipts: Tuple[Receipt, ...]) -> BaseBlock: tx_root_hash, tx_kv_nodes = make_trie_root_and_nodes(transactions) self.chaindb.persist_trie_data_dict(tx_kv_nodes) receipt_root_hash, receipt_kv_nodes = make_trie_root_and_nodes(receipts) self.chaindb.persist_trie_data_dict(receipt_kv_nodes) return base_block.copy( transactions=transactions, header=new_header.copy( transaction_root=tx_root_hash, receipt_root=receipt_root_hash, ), )
def mk_header_and_body(block_number, num_transactions, num_uncles): transactions = tuple(mk_transaction() for _ in range(num_transactions)) uncles = tuple(mk_uncle(block_number - 1) for _ in range(num_uncles)) transaction_root, trie_data = make_trie_root_and_nodes(transactions) uncles_hash = keccak(rlp.encode(uncles)) body = BlockBody(transactions=transactions, uncles=uncles) header = BlockHeader( difficulty=1000000, block_number=block_number, gas_limit=3141592, timestamp=int(time.time()), transaction_root=transaction_root, uncles_hash=uncles_hash, ) return header, body, transaction_root, trie_data, uncles_hash
def create_frontier_header_from_parent(parent_header: BlockHeaderAPI, **header_params: Any) -> BlockHeader: if 'timestamp' not in header_params: header_params['timestamp'] = new_timestamp_from_parent(parent_header) if 'difficulty' not in header_params: # Use setdefault to ensure the new header has the same timestamp we use to calculate its # difficulty. header_params['difficulty'] = compute_frontier_difficulty( parent_header, header_params['timestamp'], ) if 'gas_limit' not in header_params: header_params['gas_limit'] = compute_gas_limit( parent_header, genesis_gas_limit=GENESIS_GAS_LIMIT, ) all_fields = fill_header_params_from_parent(parent_header, **header_params) return BlockHeader(**all_fields)
def from_genesis(cls, base_db: BaseDB, genesis_params: Dict[str, HeaderParams], genesis_state: AccountState=None) -> 'BaseChain': """ Initializes the Chain from a genesis state. """ genesis_vm_class = cls.get_vm_class_for_block_number(BlockNumber(0)) account_db = genesis_vm_class.get_state_class().get_account_db_class()( base_db, BLANK_ROOT_HASH, ) if genesis_state is None: genesis_state = {} # mutation apply_state_dict(account_db, genesis_state) account_db.persist() if 'state_root' not in genesis_params: # If the genesis state_root was not specified, use the value # computed from the initialized state database. genesis_params = assoc(genesis_params, 'state_root', account_db.state_root) elif genesis_params['state_root'] != account_db.state_root: # If the genesis state_root was specified, validate that it matches # the computed state from the initialized state database. raise ValidationError( "The provided genesis state root does not match the computed " "genesis state root. Got {0}. Expected {1}".format( account_db.state_root, genesis_params['state_root'], ) ) genesis_header = BlockHeader(**genesis_params) return cls.from_genesis_header(base_db, genesis_header)
def apply_transaction( self, header: BlockHeader, transaction: BaseTransaction ) -> Tuple[BlockHeader, Receipt, BaseComputation]: """ Apply the transaction to the current block. This is a wrapper around :func:`~eth.vm.state.State.apply_transaction` with some extra orchestration logic. :param header: header of the block before application :param transaction: to apply """ self.validate_transaction_against_header(header, transaction) state_root, computation = self.state.apply_transaction(transaction) receipt = self.make_receipt(header, transaction, computation, self.state) self.validate_receipt(receipt) new_header = header.copy( bloom=int(BloomFilter(header.bloom) | receipt.bloom), gas_used=receipt.gas_used, state_root=state_root, ) return new_header, receipt, computation
async def preview_transactions(self, header: BlockHeader, transactions: Tuple[BaseTransaction, ...], parent_state_root: Hash32, lagging: bool = True) -> None: if not self.manager.is_running: # If the service is shutting down, we can ignore preview requests return self.manager.run_task(self._preview_address_load, header, parent_state_root, transactions) # This is a hack, so that preview executions can load ancestor block-hashes self._db[header.hash] = rlp.encode(header) # Always broadcast, to start previewing transactions that are further ahead in the block old_state_header = header.copy(state_root=parent_state_root) self._event_bus.broadcast_nowait( DoStatelessBlockPreview(old_state_header, transactions), FIRE_AND_FORGET_BROADCASTING) self._backfiller.set_root_hash(parent_state_root)
MAINNET_VM_CONFIGURATION = tuple(zip(MAINNET_FORK_BLOCKS, MAINNET_VMS)) class BaseMainnetChain: chain_id = MAINNET_CHAIN_ID vm_configuration = MAINNET_VM_CONFIGURATION # type: Tuple[Tuple[int, Type[BaseVM]], ...] class MainnetChain(BaseMainnetChain, Chain): pass MAINNET_GENESIS_HEADER = BlockHeader( difficulty=eth_constants.GENESIS_DIFFICULTY, extra_data=decode_hex( "0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa"), gas_limit=eth_constants.GENESIS_GAS_LIMIT, gas_used=0, bloom=0, mix_hash=eth_constants.ZERO_HASH32, nonce=eth_constants.GENESIS_NONCE, block_number=0, parent_hash=eth_constants.ZERO_HASH32, receipt_root=eth_constants.BLANK_ROOT_HASH, uncles_hash=eth_constants.EMPTY_UNCLE_HASH, state_root=decode_hex( "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"), timestamp=0, transaction_root=eth_constants.BLANK_ROOT_HASH, )
from eth.vm.message import ( Message, ) NORMALIZED_ADDRESS_A = "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" NORMALIZED_ADDRESS_B = "0xcd1722f3947def4cf144679da39c4c32bdc35681" ADDRESS_WITH_CODE = ("0xddd722f3947def4cf144679da39c4c32bdc35681", b'pseudocode') EMPTY_ADDRESS_IN_STATE = NORMALIZED_ADDRESS_A ADDRESS_NOT_IN_STATE = NORMALIZED_ADDRESS_B CANONICAL_ADDRESS_A = to_canonical_address( "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6") CANONICAL_ADDRESS_B = to_canonical_address( "0xcd1722f3947def4cf144679da39c4c32bdc35681") GENESIS_HEADER = BlockHeader( difficulty=constants.GENESIS_DIFFICULTY, block_number=constants.GENESIS_BLOCK_NUMBER, gas_limit=constants.GENESIS_GAS_LIMIT, ) def prepare_computation(vm_class): message = Message( to=CANONICAL_ADDRESS_A, sender=CANONICAL_ADDRESS_B, value=100, data=b'', code=b'', gas=800, )
class ETC_VM(MainnetHomesteadVM): support_dao_fork = False # Ethereum mainnet headers, from two headers before to ten headers after the fork: ETH_HEADERS_NEAR_FORK = [ BlockHeader( difficulty=62352470509925, block_number=1919998, gas_limit=4712388, timestamp=1469020835, coinbase=b'\xbc\xdf\xc3[\x86\xbe\xdfr\xf0\xcd\xa0F\xa3\xc1h)\xa2\xefA\xd1', parent_hash=b'\xe7\xe3\xe8+\xf3C\xbe\xf9\xa2R\xb8\x7f\x06r\x9adZop\x9b.RK\x9e\xf4\xf9;\xb9\xf2]S\x8d', # noqa: E501 uncles_hash=b'\x1d\xccM\xe8\xde\xc7]z\xab\x85\xb5g\xb6\xcc\xd4\x1a\xd3\x12E\x1b\x94\x8at\x13\xf0\xa1B\xfd@\xd4\x93G', # noqa: E501 state_root=b'\x1f!\x88?4\xde&\x93\xb4\xadGD\xc26a\xdbd\xca\xcb=\xa2\x1dr \xceW\xb97d\xb3\xbb\xfe', # noqa: E501 transaction_root=b'\xf2n\xb9\x94\x0e\xbb\xe8\x0c\xc3\xab\xbc\x9ev\xe9\xb7\xb1\x0f\xbcG\xc0\xd2\x12\xf9\x81\xa6q/\xf7\xf4\x97\xd3\xb4', # noqa: E501 receipt_root=b'D\xda\xa2\x9c4?\xa0/\xe8\x8fH\xf8?z\xc2\x1e\xfa\xc8j\xb0w8\r\xed\x81[(n\xd2jx\x1f', # noqa: E501 bloom=0, gas_used=420000, extra_data=b'\xd7\x83\x01\x04\n\x84Geth\x87go1.6.2\x85linux', mix_hash=b'\x8d\x03\xe0$?1\xa6\xcd\x11\x04E\x1f\xfc\x10#[\x04\x16N\xbe[\xd4u-\xa6\xb54t\x8d\x87}\x9f', # noqa: E501 nonce=b'a\xd8\xc5\xdf\xfd\x0e\xb2v', ), BlockHeader( difficulty=62382916183238, block_number=1919999, gas_limit=4707788, timestamp=1469020838, coinbase=b'*e\xac\xa4\xd5\xfc[\\\x85\x90\x90\xa6\xc3M\x16A59\x82&', parent_hash=b'P_\xfd!\xf4\xcb\xf2\xc5\xc3O\xa8L\xd8\xc9%%\xf3\xa7\x19\xb7\xad\x18\x85+\xff\xdd\xad`\x105\xf5\xf4', # noqa: E501