def _mk_bytecode(opcodes, data): index_tracker = itertools.count(0) for opcode in opcodes: opcode_as_byte = bytes((opcode, )) if opcode in NON_PUSH_CODES: yield next(index_tracker), opcode_as_byte else: data_size = opcode - 95 push_data = data.draw( st.binary(min_size=data_size, max_size=data_size)) yield next(index_tracker), opcode_as_byte + push_data drop(data_size, index_tracker)
async def new_sync_headers( self, max_batch_size: int = None) -> AsyncIterator[Tuple[BlockHeader, ...]]: while True: next_batch = tuple(take(max_batch_size, self._headers_to_emit)) if not next_batch: self._new_data.clear() await self._new_data.wait() continue yield next_batch self._headers_to_emit = tuple(drop(max_batch_size, self._headers_to_emit))
def set_chunk_in_tree(hash_tree: RawHashTree, index: int, chunk: Hash32) -> RawHashTree: hash_tree_with_updated_chunk = hash_tree.transform((0, index), chunk) parent_layer_indices = drop(1, range(len(hash_tree))) parent_hash_indices = drop( 1, take(len(hash_tree), iterate(lambda index: index // 2, index))) update_functions = (partial(recompute_hash_in_tree, layer_index=layer_index, hash_index=hash_index) for layer_index, hash_index in zip( parent_layer_indices, parent_hash_indices)) hash_tree_with_updated_branch = pipe(hash_tree_with_updated_chunk, *update_functions) if len(hash_tree_with_updated_branch[-1]) == 1: return hash_tree_with_updated_branch elif len(hash_tree_with_updated_branch[-1]) == 2: return recompute_hash_in_tree(hash_tree_with_updated_branch, len(hash_tree), 0) else: raise Exception("Unreachable")