async def get_blocks_by_hash( self, header_hashes: List[bytes32]) -> List[FullBlock]: """ Returns a list of Full Blocks blocks, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present """ if len(header_hashes) == 0: return [] header_hashes_db = tuple([hh.hex() for hh in header_hashes]) formatted_str = f'SELECT block from full_blocks WHERE header_hash in ({"?," * (len(header_hashes_db) - 1)}?)' cursor = await self.db.execute(formatted_str, header_hashes_db) rows = await cursor.fetchall() await cursor.close() all_blocks: Dict[bytes32, FullBlock] = {} for row in rows: full_block: FullBlock = FullBlock.from_bytes(row[0]) all_blocks[full_block.header_hash] = full_block ret: List[FullBlock] = [] for hh in header_hashes: if hh not in all_blocks: raise ValueError(f"Header hash {hh} not in the blockchain") ret.append(all_blocks[hh]) return ret
async def get_full_blocks_at(self, heights: List[uint32]) -> List[FullBlock]: if len(heights) == 0: return [] heights_db = tuple(heights) formatted_str = f'SELECT block from full_blocks WHERE height in ({"?," * (len(heights_db) - 1)}?)' cursor = await self.db.execute(formatted_str, heights_db) rows = await cursor.fetchall() await cursor.close() return [FullBlock.from_bytes(row[0]) for row in rows]
async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]: cached = self.block_cache.get(header_hash) if cached is not None: return cached cursor = await self.db.execute("SELECT block from full_blocks WHERE header_hash=?", (header_hash.hex(),)) row = await cursor.fetchone() await cursor.close() if row is not None: return FullBlock.from_bytes(row[0]) return None
async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]: cached = self.block_cache.get(header_hash) if cached is not None: log.debug(f"cache hit for block {header_hash.hex()}") return cached log.debug(f"cache miss for block {header_hash.hex()}") cursor = await self.db.execute("SELECT block from full_blocks WHERE header_hash=?", (header_hash.hex(),)) row = await cursor.fetchone() await cursor.close() if row is not None: block = FullBlock.from_bytes(row[0]) self.block_cache.put(header_hash, block) return block return None
async def get_header_blocks_in_range( self, start: int, stop: int, ) -> Dict[bytes32, HeaderBlock]: formatted_str = f"SELECT header_hash,block from full_blocks WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, HeaderBlock] = {} for row in rows: # Ugly hack, until full_block.get_block_header is rewritten as part of generator runner change await asyncio.sleep(0.001) header_hash = bytes.fromhex(row[0]) full_block: FullBlock = FullBlock.from_bytes(row[1]) ret[header_hash] = full_block.get_block_header() return ret
def persistent_blocks( num_of_blocks: int, db_name: str, seed: bytes = b"", empty_sub_slots=0, normalized_to_identity_cc_eos: bool = False, normalized_to_identity_icc_eos: bool = False, normalized_to_identity_cc_sp: bool = False, normalized_to_identity_cc_ip: bool = False, ): # try loading from disc, if not create new blocks.db file # TODO hash fixtures.py and blocktool.py, add to path, delete if the files changed block_path_dir = Path("~/.chia/blocks").expanduser() file_path = Path(f"~/.chia/blocks/{db_name}").expanduser() if not path.exists(block_path_dir): mkdir(block_path_dir.parent) mkdir(block_path_dir) if file_path.exists(): try: bytes_list = file_path.read_bytes() block_bytes_list: List[bytes] = pickle.loads(bytes_list) blocks: List[FullBlock] = [] for block_bytes in block_bytes_list: blocks.append(FullBlock.from_bytes(block_bytes)) if len(blocks) == num_of_blocks: print(f"\n loaded {file_path} with {len(blocks)} blocks") return blocks except EOFError: print("\n error reading db file") return new_test_db( file_path, num_of_blocks, seed, empty_sub_slots, normalized_to_identity_cc_eos, normalized_to_identity_icc_eos, normalized_to_identity_cc_sp, normalized_to_identity_cc_ip, )
ConditionOpcode.ASSERT_SECONDS_ABSOLUTE[0]: 13, ConditionOpcode.ASSERT_HEIGHT_RELATIVE[0]: 14, ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE[0]: 15, } c = sqlite3.connect(sys.argv[1]) rows = c.execute( "SELECT header_hash, height, block FROM full_blocks ORDER BY height") height_to_hash: List[bytes] = [] for r in rows: hh = bytes.fromhex(r[0]) height = r[1] block = FullBlock.from_bytes(r[2]) if len(height_to_hash) <= height: assert len(height_to_hash) == height height_to_hash.append(hh) else: height_to_hash[height] = hh if height > 0: prev_hh = block.prev_header_hash h = height - 1 while height_to_hash[h] != prev_hh: height_to_hash[h] = prev_hh ref = c.execute( "SELECT block FROM full_blocks WHERE header_hash=?", (prev_hh.hex(), ))
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: Optional[List[bytes]], header_blocks_pickled: Optional[List[bytes]], prev_transaction_generators: List[Optional[bytes]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], ) -> List[bytes]: blocks = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) if full_blocks_pickled is not None and header_blocks_pickled is not None: assert ValueError("Only one should be passed here") if full_blocks_pickled is not None: for i in range(len(full_blocks_pickled)): try: block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] npc_result: Optional[NPCResult] = None if block.height in npc_results: npc_result = NPCResult.from_bytes( npc_results[block.height]) assert npc_result is not None if npc_result.npc_list is not None: removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: prev_generator_bytes = prev_transaction_generators[i] assert prev_generator_bytes is not None assert block.transactions_info is not None block_generator: BlockGenerator = BlockGenerator.from_bytes( prev_generator_bytes) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), True) removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) header_block = get_block_header(block, tx_additions, removals) required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, npc_result)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) elif header_blocks_pickled is not None: for i in range(len(header_blocks_pickled)): try: header_block = HeaderBlock.from_bytes(header_blocks_pickled[i]) required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, None)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) return [bytes(r) for r in results]
def maybe_decompress(self, block_bytes: bytes) -> FullBlock: if self.db_wrapper.db_version == 2: return FullBlock.from_bytes(zstd.decompress(block_bytes)) else: return FullBlock.from_bytes(block_bytes)
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: Optional[List[bytes]], header_blocks_pickled: Optional[List[bytes]], prev_transaction_generators: List[Optional[bytes]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_signatures: bool, ) -> List[bytes]: blocks: Dict[bytes, BlockRecord] = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) if full_blocks_pickled is not None and header_blocks_pickled is not None: assert ValueError("Only one should be passed here") # In this case, we are validating full blocks, not headers if full_blocks_pickled is not None: for i in range(len(full_blocks_pickled)): try: block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] npc_result: Optional[NPCResult] = None if block.height in npc_results: npc_result = NPCResult.from_bytes( npc_results[block.height]) assert npc_result is not None if npc_result.npc_list is not None: removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: prev_generator_bytes = prev_transaction_generators[i] assert prev_generator_bytes is not None assert block.transactions_info is not None block_generator: BlockGenerator = BlockGenerator.from_bytes( prev_generator_bytes) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, mempool_mode=False, height=block.height, ) removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) if npc_result is not None and npc_result.error is not None: results.append( PreValidationResult(uint16(npc_result.error), None, npc_result, False)) continue header_block = get_block_header(block, tx_additions, removals) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) successfully_validated_signatures = False # If we failed CLVM, no need to validate signature, the block is already invalid if error_int is None: # If this is False, it means either we don't have a signature (not a tx block) or we have an invalid # signature (which also puts in an error) or we didn't validate the signature because we want to # validate it later. receive_block will attempt to validate the signature later. if validate_signatures: if npc_result is not None and block.transactions_info is not None: pairs_pks, pairs_msgs = pkm_pairs( npc_result.npc_list, constants.AGG_SIG_ME_ADDITIONAL_DATA) pks_objects: List[G1Element] = [ G1Element.from_bytes(pk) for pk in pairs_pks ] if not AugSchemeMPL.aggregate_verify( pks_objects, pairs_msgs, block. transactions_info.aggregated_signature): error_int = uint16( Err.BAD_AGGREGATE_SIGNATURE.value) else: successfully_validated_signatures = True results.append( PreValidationResult(error_int, required_iters, npc_result, successfully_validated_signatures)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) # In this case, we are validating header blocks elif header_blocks_pickled is not None: for i in range(len(header_blocks_pickled)): try: header_block = HeaderBlock.from_bytes(header_blocks_pickled[i]) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, None, False)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) return [bytes(r) for r in results]