def merkleize(chunks: Sequence[Hash32]) -> Hash32: padded_chunks = pad_chunks(chunks) number_of_layers = int(math.log2(len(padded_chunks))) + 1 layers = take(number_of_layers, iterate(hash_layer, padded_chunks)) root, = last(layers) return root
def _find_oldest_unpruned_task_id(self, finished_task_id: TTaskID) -> TTaskID: get_dependency_of_id = compose( curry(do)(self._validate_has_task), self._dependency_of, attrgetter('task'), self._tasks.get, ) ancestors = iterate(get_dependency_of_id, finished_task_id) return nth(self._max_depth, ancestors)
def get_ancestors(self, limit: int, header: BlockHeaderAPI) -> Tuple[BlockAPI, ...]: ancestor_count = min(header.block_number, limit) # We construct a temporary block object vm_class = self.get_vm_class_for_block_number(header.block_number) block_class = vm_class.get_block_class() block = block_class(header=header, uncles=[], transactions=[]) ancestor_generator = iterate(compose( self.get_block_by_hash, operator.attrgetter('parent_hash'), operator.attrgetter('header'), ), block) # we peel off the first element from the iterator which will be the # temporary block object we constructed. next(ancestor_generator) return tuple(take(ancestor_count, ancestor_generator))
def set_chunk_in_tree(hash_tree: RawHashTree, index: int, chunk: Hash32) -> RawHashTree: hash_tree_with_updated_chunk = hash_tree.transform((0, index), chunk) parent_layer_indices = drop(1, range(len(hash_tree))) parent_hash_indices = drop( 1, take(len(hash_tree), iterate(lambda index: index // 2, index))) update_functions = (partial(recompute_hash_in_tree, layer_index=layer_index, hash_index=hash_index) for layer_index, hash_index in zip( parent_layer_indices, parent_hash_indices)) hash_tree_with_updated_branch = pipe(hash_tree_with_updated_chunk, *update_functions) if len(hash_tree_with_updated_branch[-1]) == 1: return hash_tree_with_updated_branch elif len(hash_tree_with_updated_branch[-1]) == 2: return recompute_hash_in_tree(hash_tree_with_updated_branch, len(hash_tree), 0) else: raise Exception("Unreachable")
from eth_typing import Hash32 from eth_utils.toolz import iterate, take from ssz.hash import hash_eth2 CHUNK_SIZE = 32 # named BYTES_PER_CHUNK in the spec EMPTY_CHUNK = Hash32(b"\x00" * CHUNK_SIZE) SIGNATURE_FIELD_NAME = "signature" # number of bytes for a serialized offset OFFSET_SIZE = 4 FIELDS_META_ATTR = "fields" ZERO_BYTES32 = Hash32(b"\x00" * 32) MAX_ZERO_HASHES_LAYER = 100 ZERO_HASHES = tuple( take( MAX_ZERO_HASHES_LAYER, iterate(lambda child: hash_eth2(child + child), ZERO_BYTES32), )) BASE_TYPES = (int, bytes, bool)
from eth_typing import ( Hash32, ) from .common import ( # noqa: F401 _calc_parent_hash, _hash_layer, get_branch_indices, get_merkle_proof, get_root, MerkleTree, MerkleProof, ) if TYPE_CHECKING: from typing import Tuple # noqa: F401 TreeDepth = 32 EmptyNodeHashes = tuple( take( TreeDepth, iterate(lambda node_hash: hash_eth2(node_hash + node_hash), b'\x00' * 32))) def verify_merkle_proof(root: Hash32, leaf: Hash32, index: int, proof: MerkleProof) -> bool: """ Verify that the given ``item`` is on the merkle branch ``proof`` starting with the given ``root``. """ assert len(proof) == TreeDepth value = leaf for i in range(TreeDepth): if index // (2**i) % 2: value = hash_eth2(proof[i] + value) else: value = hash_eth2(value + proof[i])
MerkleTree, _calc_parent_hash, _hash_layer, get_branch_indices, get_merkle_proof, get_root, ) if TYPE_CHECKING: from typing import Tuple # noqa: F401 TreeDepth = 32 EmptyNodeHashes = tuple( take( TreeDepth, iterate(lambda node_hash: hash_eth2(node_hash + node_hash), b"\x00" * 32), ) ) def verify_merkle_proof( root: Hash32, leaf: Hash32, index: int, proof: MerkleProof ) -> bool: """ Verify that the given ``item`` is on the merkle branch ``proof`` starting with the given ``root``. """ assert len(proof) == TreeDepth value = leaf for i in range(TreeDepth): if index // (2 ** i) % 2: