async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the peak header hash. """ peak = await self.get_peak() if peak is None: return {}, None ret: Dict[bytes32, BlockRecord] = {} if self.db_wrapper.db_version == 2: async with self.db.execute( "SELECT header_hash, block_record FROM full_blocks WHERE height >= ?", (peak[1] - blocks_n,), ) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) else: formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak[1] - blocks_n}" async with self.db.execute(formatted_str) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(self.maybe_from_hex(row[0])) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret, peak[0]
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks in range between start and stop if present. """ ret: Dict[bytes32, BlockRecord] = {} if self.db_wrapper.db_version == 2: async with self.db.execute( "SELECT header_hash, block_record FROM full_blocks WHERE height >= ? AND height <= ?", (start, stop), ) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) else: formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" async with await self.db.execute(formatted_str) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(self.maybe_from_hex(row[0])) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret
async def get_block_records_by_hash(self, header_hashes: List[bytes32]): """ Returns a list of Block Records, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present """ if len(header_hashes) == 0: return [] all_blocks: Dict[bytes32, BlockRecord] = {} if self.db_wrapper.db_version == 2: async with self.db.execute( "SELECT header_hash,block_record FROM full_blocks " f'WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)', tuple(header_hashes), ) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(row[0]) all_blocks[header_hash] = BlockRecord.from_bytes(row[1]) else: formatted_str = f'SELECT block from block_records WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)' async with self.db.execute(formatted_str, tuple([hh.hex() for hh in header_hashes])) as cursor: for row in await cursor.fetchall(): block_rec: BlockRecord = BlockRecord.from_bytes(row[0]) all_blocks[block_rec.header_hash] = block_rec ret: List[BlockRecord] = [] for hh in header_hashes: if hh not in all_blocks: raise ValueError(f"Header hash {hh} not in the blockchain") ret.append(all_blocks[hh]) return ret
async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ res = await self.db.execute( "SELECT header_hash, height from block_records WHERE is_peak = 1") row = await res.fetchone() await res.close() if row is None: return {}, None header_hash_bytes, peak_height = row peak: bytes32 = bytes32(bytes.fromhex(header_hash_bytes)) formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_height - blocks_n}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret, peak
async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: cursor = await self.db.execute( "SELECT block from block_records WHERE header_hash=?", (header_hash.hex(),), ) row = await cursor.fetchone() await cursor.close() if row is not None: return BlockRecord.from_bytes(row[0]) return None
async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]: if len(heights) == 0: return [] heights_db = tuple(heights) formatted_str = ( f'SELECT block from block_records WHERE height in ({"?," * (len(heights_db) - 1)}?) ORDER BY height ASC;' ) cursor = await self.db.execute(formatted_str, heights_db) rows = await cursor.fetchall() await cursor.close() return [BlockRecord.from_bytes(row[0]) for row in rows]
async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: if self.db_wrapper.db_version == 2: async with self.db.execute( "SELECT block_record FROM full_blocks WHERE header_hash=?", (header_hash,), ) as cursor: row = await cursor.fetchone() if row is not None: return BlockRecord.from_bytes(row[0]) else: async with self.db.execute( "SELECT block from block_records WHERE header_hash=?", (header_hash.hex(),), ) as cursor: row = await cursor.fetchone() if row is not None: return BlockRecord.from_bytes(row[0]) return None
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], header_blocks_pickled: List[bytes], transaction_generators: List[Optional[bytes]], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_transactions: bool, ) -> List[bytes]: assert len(header_blocks_pickled) == len(transaction_generators) blocks = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) for i in range(len(header_blocks_pickled)): try: header_block: HeaderBlock = HeaderBlock.from_bytes( header_blocks_pickled[i]) generator: Optional[bytes] = transaction_generators[i] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) cost_result: Optional[CostResult] = None error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) if constants_dict["NETWORK_TYPE"] == NetworkType.MAINNET.value: cost_result = None else: if not error and generator is not None and validate_transactions: cost_result = calculate_cost_of_program( SerializedProgram.from_bytes(generator), constants.CLVM_COST_RATIO_CONSTANT) results.append( PreValidationResult(error_int, required_iters, cost_result)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) return [bytes(r) for r in results]
async def get_block_records( self, ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ cursor = await self.db.execute("SELECT * from block_records") rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} peak: Optional[bytes32] = None for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[3]) if row[5]: assert peak is None # Sanity check, only one peak peak = header_hash return ret, peak
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks in range between start and stop if present. """ formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret
async def get_block_records_by_hash(self, header_hashes: List[bytes32]): """ Returns a list of Block Records, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present """ if len(header_hashes) == 0: return [] header_hashes_db = tuple([hh.hex() for hh in header_hashes]) formatted_str = f'SELECT block from block_records WHERE header_hash in ({"?," * (len(header_hashes_db) - 1)}?)' cursor = await self.db.execute(formatted_str, header_hashes_db) rows = await cursor.fetchall() await cursor.close() all_blocks: Dict[bytes32, BlockRecord] = {} for row in rows: block_rec: BlockRecord = BlockRecord.from_bytes(row[0]) all_blocks[block_rec.header_hash] = block_rec ret: List[BlockRecord] = [] for hh in header_hashes: if hh not in all_blocks: raise ValueError(f"Header hash {hh} not in the blockchain") ret.append(all_blocks[hh]) return ret
async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the peak header hash. """ res = await self.db.execute("SELECT * from block_records WHERE is_peak = 1") peak_row = await res.fetchone() await res.close() if peak_row is None: return {}, None formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_row[2] - blocks_n}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret, bytes.fromhex(peak_row[0])
async def _validate_weight_proof_inner( self, weight_proof: WeightProof, skip_segment_validation: bool ) -> Tuple[bool, uint32, List[SubEpochSummary], List[BlockRecord]]: assert len(weight_proof.sub_epochs) > 0 if len(weight_proof.sub_epochs) == 0: return False, uint32(0), [], [] peak_height = weight_proof.recent_chain_data[ -1].reward_chain_block.height log.info(f"validate weight proof peak height {peak_height}") summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries( self._constants, weight_proof) if summaries is None: log.error("weight proof failed sub epoch data validation") return False, uint32(0), [], [] seed = summaries[-2].get_hash() rng = random.Random(seed) if not validate_sub_epoch_sampling(rng, sub_epoch_weight_list, weight_proof): log.error("failed weight proof sub epoch sample validation") return False, uint32(0), [], [] constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes( self._constants, summaries, weight_proof) vdf_tasks: List[asyncio.Future] = [] recent_blocks_validation_task: asyncio.Future = asyncio.get_running_loop( ).run_in_executor( self._executor, _validate_recent_blocks_and_get_records, constants, wp_recent_chain_bytes, summary_bytes, pathlib.Path(self._executor_shutdown_tempfile.name), ) try: if not skip_segment_validation: segments_validated, vdfs_to_validate = _validate_sub_epoch_segments( constants, rng, wp_segment_bytes, summary_bytes) if not segments_validated: return False, uint32(0), [], [] vdf_chunks = chunks(vdfs_to_validate, self._num_processes) for chunk in vdf_chunks: byte_chunks = [] for vdf_proof, classgroup, vdf_info in chunk: byte_chunks.append( (bytes(vdf_proof), bytes(classgroup), bytes(vdf_info))) vdf_task: asyncio.Future = asyncio.get_running_loop( ).run_in_executor( self._executor, _validate_vdf_batch, constants, byte_chunks, pathlib.Path(self._executor_shutdown_tempfile.name), ) vdf_tasks.append(vdf_task) for vdf_task in vdf_tasks: validated = await vdf_task if not validated: return False, uint32(0), [], [] valid_recent_blocks, records_bytes = await recent_blocks_validation_task finally: recent_blocks_validation_task.cancel() for vdf_task in vdf_tasks: vdf_task.cancel() if not valid_recent_blocks: log.error("failed validating weight proof recent blocks") # Verify the data return False, uint32(0), [], [] records = [BlockRecord.from_bytes(b) for b in records_bytes] # TODO fix find fork point return True, uint32(0), summaries, records
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: Optional[List[bytes]], header_blocks_pickled: Optional[List[bytes]], prev_transaction_generators: List[Optional[bytes]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], ) -> List[bytes]: blocks = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) if full_blocks_pickled is not None and header_blocks_pickled is not None: assert ValueError("Only one should be passed here") if full_blocks_pickled is not None: for i in range(len(full_blocks_pickled)): try: block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] npc_result: Optional[NPCResult] = None if block.height in npc_results: npc_result = NPCResult.from_bytes( npc_results[block.height]) assert npc_result is not None if npc_result.npc_list is not None: removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: prev_generator_bytes = prev_transaction_generators[i] assert prev_generator_bytes is not None assert block.transactions_info is not None block_generator: BlockGenerator = BlockGenerator.from_bytes( prev_generator_bytes) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), True) removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) header_block = get_block_header(block, tx_additions, removals) required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, npc_result)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) elif header_blocks_pickled is not None: for i in range(len(header_blocks_pickled)): try: header_block = HeaderBlock.from_bytes(header_blocks_pickled[i]) required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, None)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) return [bytes(r) for r in results]
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: Optional[List[bytes]], header_blocks_pickled: Optional[List[bytes]], prev_transaction_generators: List[Optional[bytes]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_signatures: bool, ) -> List[bytes]: blocks: Dict[bytes, BlockRecord] = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) if full_blocks_pickled is not None and header_blocks_pickled is not None: assert ValueError("Only one should be passed here") # In this case, we are validating full blocks, not headers if full_blocks_pickled is not None: for i in range(len(full_blocks_pickled)): try: block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] npc_result: Optional[NPCResult] = None if block.height in npc_results: npc_result = NPCResult.from_bytes( npc_results[block.height]) assert npc_result is not None if npc_result.npc_list is not None: removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: prev_generator_bytes = prev_transaction_generators[i] assert prev_generator_bytes is not None assert block.transactions_info is not None block_generator: BlockGenerator = BlockGenerator.from_bytes( prev_generator_bytes) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, mempool_mode=False, height=block.height, ) removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) if npc_result is not None and npc_result.error is not None: results.append( PreValidationResult(uint16(npc_result.error), None, npc_result, False)) continue header_block = get_block_header(block, tx_additions, removals) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) successfully_validated_signatures = False # If we failed CLVM, no need to validate signature, the block is already invalid if error_int is None: # If this is False, it means either we don't have a signature (not a tx block) or we have an invalid # signature (which also puts in an error) or we didn't validate the signature because we want to # validate it later. receive_block will attempt to validate the signature later. if validate_signatures: if npc_result is not None and block.transactions_info is not None: pairs_pks, pairs_msgs = pkm_pairs( npc_result.npc_list, constants.AGG_SIG_ME_ADDITIONAL_DATA) pks_objects: List[G1Element] = [ G1Element.from_bytes(pk) for pk in pairs_pks ] if not AugSchemeMPL.aggregate_verify( pks_objects, pairs_msgs, block. transactions_info.aggregated_signature): error_int = uint16( Err.BAD_AGGREGATE_SIGNATURE.value) else: successfully_validated_signatures = True results.append( PreValidationResult(error_int, required_iters, npc_result, successfully_validated_signatures)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) # In this case, we are validating header blocks elif header_blocks_pickled is not None: for i in range(len(header_blocks_pickled)): try: header_block = HeaderBlock.from_bytes(header_blocks_pickled[i]) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, None, False)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) return [bytes(r) for r in results]