def test_chaindb_get_score(chaindb): genesis = BlockHeader(difficulty=1, block_number=0, gas_limit=0) chaindb.persist_header(genesis) genesis_score_key = SchemaV1.make_block_hash_to_score_lookup_key( genesis.hash) genesis_score = rlp.decode(chaindb.db.get(genesis_score_key), sedes=rlp.sedes.big_endian_int) assert genesis_score == 1 assert chaindb.get_score(genesis.hash) == 1 block1 = BlockHeader( difficulty=10, block_number=1, gas_limit=0, parent_hash=genesis.hash, timestamp=genesis.timestamp + 1, ) chaindb.persist_header(block1) block1_score_key = SchemaV1.make_block_hash_to_score_lookup_key( block1.hash) block1_score = rlp.decode(chaindb.db.get(block1_score_key), sedes=rlp.sedes.big_endian_int) assert block1_score == 11 assert chaindb.get_score(block1.hash) == 11
def _decanonicalize_descendant_orphans( cls, db: DatabaseAPI, header: BlockHeaderAPI, checkpoints: Tuple[Hash32, ...]) -> None: # Determine if any children need to be de-canonicalized because they are not children of # the new chain head new_gaps = starting_gaps = cls._get_header_chain_gaps(db) child_number = BlockNumber(header.block_number + 1) try: child = cls._get_canonical_block_header_by_number(db, child_number) except HeaderNotFound: # There is no canonical block here next_invalid_child = None else: if child.parent_hash != header.hash: if child.hash in checkpoints: raise CheckpointsMustBeCanonical( f"Trying to decanonicalize {child} while making {header} the chain tip" ) else: next_invalid_child = child else: next_invalid_child = None while next_invalid_child: # decanonicalize, and add gap for tracking db.delete(SchemaV1.make_block_number_to_hash_lookup_key(child_number)) new_gaps = reopen_gap(child_number, new_gaps) # find next child child_number = BlockNumber(child_number + 1) try: # All contiguous children must now be made invalid next_invalid_child = cls._get_canonical_block_header_by_number(db, child_number) except HeaderNotFound: # Found the end of this streak of canonical blocks break else: if next_invalid_child.hash in checkpoints: raise CheckpointsMustBeCanonical( f"Trying to decanonicalize {next_invalid_child} while making {header} the" " chain tip" ) if new_gaps != starting_gaps: db.set( SchemaV1.make_header_chain_gaps_lookup_key(), rlp.encode(new_gaps, sedes=chain_gaps) )
def _set_as_canonical_chain_head(self, block_hash: Hash32 ) -> Tuple[Tuple[BlockHeader, ...], Tuple[BlockHeader, ...]]: """ Returns iterable of headers newly on the canonical head """ try: header = self.get_block_header_by_hash(block_hash) except HeaderNotFound: raise ValueError("Cannot use unknown block hash as canonical head: {}".format( header.hash)) new_canonical_headers = tuple(reversed(self._find_new_ancestors(header))) old_canonical_headers = [] # remove transaction lookups for blocks that are no longer canonical for h in new_canonical_headers: try: old_hash = self.get_canonical_block_hash(h.block_number) except HeaderNotFound: # no old block, and no more possible break else: old_header = self.get_block_header_by_hash(old_hash) old_canonical_headers.append(old_header) for transaction_hash in self.get_block_transaction_hashes(old_header): self._remove_transaction_from_canonical_chain(transaction_hash) for h in new_canonical_headers: self._add_block_number_to_hash_lookup(h) self.db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers, tuple(old_canonical_headers)
def test_chaindb_add_block_number_to_hash_lookup(chaindb, block): block_number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key( block.number) assert not chaindb.exists(block_number_to_hash_key) assert chaindb.get_chain_gaps() == GENESIS_CHAIN_GAPS chaindb.persist_block(block) assert chaindb.exists(block_number_to_hash_key)
def _decanonicalize_single(cls, db: DatabaseAPI, block_num: BlockNumber, base_gaps: ChainGaps) -> ChainGaps: """ A single block number was found to no longer be canonical. At doc-time, this only happens because it does not link up with a checkpoint header. So de-canonicalize this block number and insert a gap in the tracked chain gaps. """ db.delete(SchemaV1.make_block_number_to_hash_lookup_key(block_num)) new_gaps = reopen_gap(block_num, base_gaps) if new_gaps != base_gaps: db.set(SchemaV1.make_header_chain_gaps_lookup_key(), rlp.encode(new_gaps, sedes=chain_gaps)) return new_gaps
def _get_score(db: BaseDB, block_hash: Hash32) -> int: try: encoded_score = db[SchemaV1.make_block_hash_to_score_lookup_key(block_hash)] except KeyError: raise HeaderNotFound("No header with hash {0} found".format( encode_hex(block_hash))) return rlp.decode(encoded_score, sedes=rlp.sedes.big_endian_int)
def _set_as_canonical_chain_head(cls, db: BaseDB, block_hash: Hash32 ) -> Tuple[Tuple[BlockHeader, ...], Tuple[BlockHeader, ...]]: """ Sets the canonical chain HEAD to the block header as specified by the given block hash. :return: a tuple of the headers that are newly in the canonical chain, and the headers that are no longer in the canonical chain """ try: header = cls._get_block_header_by_hash(db, block_hash) except HeaderNotFound: raise ValueError( "Cannot use unknown block hash as canonical head: {}".format(block_hash) ) new_canonical_headers = tuple(reversed(cls._find_new_ancestors(db, header))) old_canonical_headers = [] for h in new_canonical_headers: try: old_canonical_hash = cls._get_canonical_block_hash(db, h.block_number) except HeaderNotFound: # no old_canonical block, and no more possible break else: old_canonical_header = cls._get_block_header_by_hash(db, old_canonical_hash) old_canonical_headers.append(old_canonical_header) for h in new_canonical_headers: cls._add_block_number_to_hash_lookup(db, h) db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers, tuple(old_canonical_headers)
def _update_header_chain_gaps(cls, db: DatabaseAPI, persisting_header: BlockHeaderAPI, base_gaps: ChainGaps = None) -> GapInfo: # The only reason we overwrite this here is to be able to detect when the HeaderDB # de-canonicalizes an uncle that should cause us to re-open a block gap. gap_change, gaps = super()._update_header_chain_gaps( db, persisting_header, base_gaps) if gap_change is not GapChange.NoChange or persisting_header.block_number == 0: return gap_change, gaps # We have written a header for which block number we've already had a header. # This might be a sign of a de-canonicalized uncle. current_gaps = cls._get_chain_gaps(db) if not is_block_number_in_gap(persisting_header.block_number, current_gaps): # ChainDB believes we have that block. If the header has changed, we need to re-open # a gap for the corresponding block. old_canonical_header = cls._get_canonical_block_header_by_number( db, persisting_header.block_number) if old_canonical_header != persisting_header: updated_gaps = reopen_gap(persisting_header.block_number, current_gaps) db.set(SchemaV1.make_chain_gaps_lookup_key(), rlp.encode(updated_gaps, sedes=chain_gaps)) return gap_change, gaps
def _set_as_canonical_chain_head(cls, db: BaseDB, block_hash: Hash32, ) -> Tuple[Tuple[BlockHeader, ...], Tuple[BlockHeader, ...]]: try: header = cls._get_block_header_by_hash(db, block_hash) except HeaderNotFound: raise ValueError("Cannot use unknown block hash as canonical head: {}".format( header.hash)) new_canonical_headers = tuple(reversed(cls._find_new_ancestors(db, header))) old_canonical_headers = [] # remove transaction lookups for blocks that are no longer canonical for h in new_canonical_headers: try: old_hash = cls._get_canonical_block_hash(db, h.block_number) except HeaderNotFound: # no old block, and no more possible break else: old_header = cls._get_block_header_by_hash(db, old_hash) old_canonical_headers.append(old_header) for transaction_hash in cls._get_block_transaction_hashes(db, old_header): cls._remove_transaction_from_canonical_chain(db, transaction_hash) for h in new_canonical_headers: cls._add_block_number_to_hash_lookup(db, h) db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers, tuple(old_canonical_headers)
def _persist_header_chain( cls, db: BaseDB, headers: Iterable[BlockHeader] ) -> Tuple[Tuple[BlockHeader, ...], Tuple[BlockHeader, ...]]: try: first_header = first(headers) except StopIteration: return tuple(), tuple() else: for parent, child in sliding_window(2, headers): if parent.hash != child.parent_hash: raise ValidationError( "Non-contiguous chain. Expected {} to have {} as parent but was {}" .format( encode_hex(child.hash), encode_hex(parent.hash), encode_hex(child.parent_hash), )) is_genesis = first_header.parent_hash == GENESIS_PARENT_HASH if not is_genesis and not cls._header_exists( db, first_header.parent_hash): raise ParentNotFound( "Cannot persist block header ({}) with unknown parent ({})" .format(encode_hex(first_header.hash), encode_hex(first_header.parent_hash))) score = 0 if is_genesis else cls._get_score( db, first_header.parent_hash) for header in headers: db.set( header.hash, rlp.encode(header), ) score += header.difficulty db.set( SchemaV1.make_block_hash_to_score_lookup_key(header.hash), rlp.encode(score, sedes=rlp.sedes.big_endian_int), ) try: previous_canonical_head = cls._get_canonical_head(db).hash head_score = cls._get_score(db, previous_canonical_head) except CanonicalHeadNotFound: (new_canonical_headers, old_canonical_headers) = cls._set_as_canonical_chain_head( db, header.hash) else: if score > head_score: (new_canonical_headers, old_canonical_headers) = cls._set_as_canonical_chain_head( db, header.hash) else: new_canonical_headers = tuple() old_canonical_headers = tuple() return new_canonical_headers, old_canonical_headers
def _get_canonical_head(cls, db: BaseDB) -> BlockHeader: try: canonical_head_hash = db[ SchemaV1.make_canonical_head_hash_lookup_key()] except KeyError: raise CanonicalHeadNotFound("No canonical head set for this chain") return cls._get_block_header_by_hash(db, canonical_head_hash)
def _get_chain_gaps(cls, db: DatabaseAPI) -> ChainGaps: try: encoded_gaps = db[SchemaV1.make_chain_gaps_lookup_key()] except KeyError: return GENESIS_CHAIN_GAPS else: return rlp.decode(encoded_gaps, sedes=chain_gaps)
def test_chaindb_persist_block(chaindb, block): block = block.copy(header=set_empty_root(chaindb, block.header)) block_to_hash_key = SchemaV1.make_block_hash_to_score_lookup_key( block.hash) assert not chaindb.exists(block_to_hash_key) chaindb.persist_block(block) assert chaindb.exists(block_to_hash_key)
def _set_as_canonical_chain_head( self, block_hash: Hash32) -> Tuple[BlockHeader, ...]: """ Sets the canonical chain HEAD to the block header as specified by the given block hash. Returns iterable of headers newly on the canonical head """ try: header = self.get_block_header_by_hash(block_hash) except HeaderNotFound: raise ValueError( "Cannot use unknown block hash as canonical head: {}".format( block_hash)) new_canonical_headers = tuple( reversed(self._find_new_ancestors(header))) for h in new_canonical_headers: self._add_block_number_to_hash_lookup(h) self.db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers
def _get_checkpoints(cls, db: DatabaseAPI) -> Tuple[Hash32, ...]: concatenated_checkpoints = db.get( SchemaV1.make_checkpoint_headers_key()) if concatenated_checkpoints is None: return () else: return tuple( Hash32(concatenated_checkpoints[index:index + 32]) for index in range(0, len(concatenated_checkpoints), 32))
def _remove_transaction_from_canonical_chain( db: BaseDB, transaction_hash: Hash32) -> None: """ Removes the transaction specified by the given hash from the canonical chain. """ db.delete( SchemaV1.make_transaction_hash_to_block_lookup_key( transaction_hash))
def get_canonical_head(self) -> BlockHeader: """ Returns the current block header at the head of the chain. """ try: canonical_head_hash = self.db[ SchemaV1.make_canonical_head_hash_lookup_key()] except KeyError: raise CanonicalHeadNotFound("No canonical head set for this chain") return self.get_block_header_by_hash(canonical_head_hash)
def test_chaindb_persist_header(chaindb, header): with pytest.raises(HeaderNotFound): chaindb.get_block_header_by_hash(header.hash) number_to_hash_key = SchemaV1.make_block_hash_to_score_lookup_key(header.hash) assert not chaindb.exists(number_to_hash_key) chaindb.persist_header(header) assert chaindb.get_block_header_by_hash(header.hash) == header assert chaindb.exists(number_to_hash_key)
def _set_hash_scores_to_db(cls, db: DatabaseAPI, header: BlockHeaderAPI, score: int) -> int: new_score = score + header.difficulty db.set( SchemaV1.make_block_hash_to_score_lookup_key(header.hash), rlp.encode(new_score, sedes=rlp.sedes.big_endian_int), ) return new_score
def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[BlockNumber, int]: key = SchemaV1.make_transaction_hash_to_block_lookup_key(transaction_hash) try: encoded_key = self.db[key] except KeyError: raise TransactionNotFound( f"Transaction {encode_hex(transaction_hash)} not found in canonical chain" ) transaction_key = rlp.decode(encoded_key, sedes=TransactionKey) return (transaction_key.block_number, transaction_key.index)
def _add_block_number_to_hash_lookup(self, header: BlockHeader) -> None: """ Sets a record in the database to allow looking up this header by its block number. """ block_number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key( header.block_number) self.db.set( block_number_to_hash_key, rlp.encode(header.hash, sedes=rlp.sedes.binary), )
def _get_canonical_block_hash(db: BaseDB, block_number: BlockNumber) -> Hash32: validate_block_number(block_number) number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key(block_number) try: encoded_key = db[number_to_hash_key] except KeyError: raise HeaderNotFound( "No canonical header for block number #{0}".format(block_number) ) else: return rlp.decode(encoded_key, sedes=rlp.sedes.binary)
def _persist_checkpoint_header( cls, db: DatabaseAPI, header: BlockHeaderAPI, score: int ) -> None: db.set( header.hash, rlp.encode(header), ) # Add new checkpoint header previous_checkpoints = cls._get_checkpoints(db) new_checkpoints = previous_checkpoints + (header.hash,) db.set( SchemaV1.make_checkpoint_headers_key(), b''.join(new_checkpoints), ) previous_score = score - header.difficulty cls._set_hash_scores_to_db(db, header, previous_score) cls._set_as_canonical_chain_head(db, header, GENESIS_PARENT_HASH) _, gaps = cls._update_header_chain_gaps(db, header) # check if the parent block number exists, and is not a match for checkpoint.parent_hash parent_block_num = BlockNumber(header.block_number - 1) try: parent_hash = cls._get_canonical_block_hash(db, parent_block_num) except HeaderNotFound: # no parent to check pass else: # User is asserting that the checkpoint must be canonical, so if the parent doesn't # match, then the parent must not be canonical, and should be de-canonicalized. if parent_hash != header.parent_hash: # does the correct header exist in the database? try: true_parent = cls._get_block_header_by_hash(db, header.parent_hash) except HeaderNotFound: # True parent unavailable, just delete the now non-canonical one cls._decanonicalize_single(db, parent_block_num, gaps) else: # True parent should have already been canonicalized during # _set_as_canonical_chain_head() raise ValidationError( f"Why was a non-matching parent header {parent_hash!r} left as canonical " f"after _set_as_canonical_chain_head() and {true_parent} is available?" ) cls._decanonicalize_descendant_orphans(db, header, new_checkpoints)
def _update_chain_gaps(cls, db: DatabaseAPI, persisted_block: BlockAPI, base_gaps: ChainGaps = None) -> GapInfo: # If we make many updates in a row, we can avoid reloading the integrity info by # continuously caching it and providing it as a parameter to this API if base_gaps is None: base_gaps = cls._get_chain_gaps(db) gap_change, gaps = fill_gap(persisted_block.number, base_gaps) if gap_change is not GapChange.NoChange: db.set(SchemaV1.make_chain_gaps_lookup_key(), rlp.encode(gaps, sedes=chain_gaps)) return gap_change, gaps
def _add_transaction_to_canonical_chain(self, transaction_hash: Hash32, block_header: BlockHeader, index: int) -> None: """ :param bytes transaction_hash: the hash of the transaction to add the lookup for :param block_header: The header of the block with the txn that is in the canonical chain :param int index: the position of the transaction in the block - add lookup from transaction hash to the block number and index that the body is stored at - remove transaction hash to body lookup in the pending pool """ transaction_key = TransactionKey(block_header.block_number, index) self.db.set( SchemaV1.make_transaction_hash_to_block_lookup_key( transaction_hash), rlp.encode(transaction_key), )
def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[BlockNumber, int]: """ Returns a 2-tuple of (block_number, transaction_index) indicating which block the given transaction can be found in and at what index in the block transactions. Raises TransactionNotFound if the transaction_hash is not found in the canonical chain. """ key = SchemaV1.make_transaction_hash_to_block_lookup_key(transaction_hash) try: encoded_key = self.db[key] except KeyError: raise TransactionNotFound( "Transaction {} not found in canonical chain".format(encode_hex(transaction_hash))) transaction_key = rlp.decode(encoded_key, sedes=TransactionKey) return (transaction_key.block_number, transaction_key.index)
def persist_header( self, header: BlockHeader ) -> Tuple[Tuple[BlockHeader, ...], Tuple[BlockHeader, ...]]: """ :returns: iterable of headers newly on the canonical chain """ if header.parent_hash != GENESIS_PARENT_HASH: try: self.get_block_header_by_hash(header.parent_hash) except HeaderNotFound: raise ParentNotFound( "Cannot persist block header ({}) with unknown parent ({})" .format(encode_hex(header.hash), encode_hex(header.parent_hash))) self.db.set( header.hash, rlp.encode(header), ) if header.parent_hash == GENESIS_PARENT_HASH: score = header.difficulty else: score = self.get_score(header.parent_hash) + header.difficulty self.db.set( SchemaV1.make_block_hash_to_score_lookup_key(header.hash), rlp.encode(score, sedes=rlp.sedes.big_endian_int), ) try: head_score = self.get_score(self.get_canonical_head().hash) except CanonicalHeadNotFound: new_canonical_headers, old_canonical_headers = self._set_as_canonical_chain_head( header.hash, ) else: if score > head_score: new_canonical_headers, old_canonical_headers = self._set_as_canonical_chain_head( header.hash) else: new_canonical_headers = tuple() old_canonical_headers = tuple() return new_canonical_headers, old_canonical_headers
def _set_as_canonical_chain_head( cls, db: DatabaseAPI, block_hash: Hash32, ) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]: # noqa: E501 try: header = cls._get_block_header_by_hash(db, block_hash) except HeaderNotFound: raise ValueError( "Cannot use unknown block hash as canonical head: {}".format( header.hash)) new_canonical_headers = tuple( reversed(cls._find_new_ancestors(db, header))) old_canonical_headers = [] # remove transaction lookups for blocks that are no longer canonical for h in new_canonical_headers: try: old_hash = cls._get_canonical_block_hash(db, h.block_number) except HeaderNotFound: # no old block, and no more possible break else: old_header = cls._get_block_header_by_hash(db, old_hash) old_canonical_headers.append(old_header) try: for transaction_hash in cls._get_block_transaction_hashes( db, old_header): cls._remove_transaction_from_canonical_chain( db, transaction_hash) except MissingTrieNode: # If the transactions were never stored for the (now) non-canonical chain, # then you don't need to remove them from the canonical chain lookup. pass for h in new_canonical_headers: cls._add_block_number_to_hash_lookup(db, h) db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers, tuple(old_canonical_headers)
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32: """ Returns the block hash for the canonical block at the given number. Raises BlockNotFound if there's no block header with the given number in the canonical chain. """ validate_block_number(block_number, title="Block Number") number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key( block_number) try: encoded_key = self.db[number_to_hash_key] except KeyError: raise HeaderNotFound( "No canonical header for block number #{0}".format( block_number)) else: return rlp.decode(encoded_key, sedes=rlp.sedes.binary)
def _set_as_canonical_chain_head( cls, db: DatabaseAPI, header: BlockHeaderAPI, genesis_parent_hash: Hash32, ) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]: """ Sets the canonical chain HEAD to the block header as specified by the given block hash. :return: a tuple of the headers that are newly in the canonical chain, and the headers that are no longer in the canonical chain :raises CheckpointsMustBeCanonical: if trying to set a head that would de-canonicalize a checkpoint """ try: current_canonical_head = cls._get_canonical_head_hash(db) except CanonicalHeadNotFound: current_canonical_head = None new_canonical_headers: Tuple[BlockHeaderAPI, ...] old_canonical_headers: Tuple[BlockHeaderAPI, ...] if current_canonical_head and header.parent_hash == current_canonical_head: # the calls to _find_new_ancestors and _find_headers_to_decanonicalize are # relatively expensive, it's better to skip them in this case, where we're # extending the canonical chain by a header new_canonical_headers = (header, ) old_canonical_headers = () cls._add_block_number_to_hash_lookup(db, header) else: ( new_canonical_headers, old_canonical_headers, ) = cls._canonicalize_header(db, header, genesis_parent_hash) db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash) return new_canonical_headers, old_canonical_headers