async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ res = await self.db.execute( "SELECT header_hash, height from block_records WHERE is_peak = 1") row = await res.fetchone() await res.close() if row is None: return {}, None header_hash_bytes, peak_height = row peak: bytes32 = bytes32(bytes.fromhex(header_hash_bytes)) formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_height - blocks_n}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret, peak
async def get_block_record_by_height(self, height) -> Optional[BlockRecord]: try: response = await self.fetch("get_block_record_by_height", {"height": height}) except Exception: return None return BlockRecord.from_json_dict(response["block_record"])
async def get_block_record(self, header_hash) -> Optional[BlockRecord]: try: response = await self.fetch("get_block_record", {"header_hash": header_hash.hex()}) if response["block_record"] is None: return None except Exception: return None return BlockRecord.from_json_dict(response["block_record"])
async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: cursor = await self.db.execute( "SELECT block from block_records WHERE header_hash=?", (header_hash.hex(),), ) row = await cursor.fetchone() await cursor.close() if row is not None: return BlockRecord.from_bytes(row[0]) return None
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], header_blocks_pickled: List[bytes], transaction_generators: List[Optional[bytes]], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_transactions: bool, ) -> List[bytes]: assert len(header_blocks_pickled) == len(transaction_generators) blocks = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) for i in range(len(header_blocks_pickled)): try: header_block: HeaderBlock = HeaderBlock.from_bytes( header_blocks_pickled[i]) generator: Optional[bytes] = transaction_generators[i] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) cost_result: Optional[CostResult] = None error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) if constants_dict["NETWORK_TYPE"] == NetworkType.MAINNET.value: cost_result = None else: if not error and generator is not None and validate_transactions: cost_result = calculate_cost_of_program( SerializedProgram.from_bytes(generator), constants.CLVM_COST_RATIO_CONSTANT) results.append( PreValidationResult(error_int, required_iters, cost_result)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) return [bytes(r) for r in results]
async def get_block_records( self, ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ cursor = await self.db.execute("SELECT * from block_records") rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} peak: Optional[bytes32] = None for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[3]) if row[5]: assert peak is None # Sanity check, only one peak peak = header_hash return ret, peak
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks in range between start and stop if present. """ formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret
async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the peak header hash. """ res = await self.db.execute("SELECT * from block_records WHERE is_peak = 1") peak_row = await res.fetchone() await res.close() if peak_row is None: return {}, None formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_row[2] - blocks_n}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash = bytes.fromhex(row[0]) ret[header_hash] = BlockRecord.from_bytes(row[1]) return ret, bytes.fromhex(peak_row[0])
def header_block_to_sub_block_record( constants: ConsensusConstants, required_iters: uint64, block: Union[FullBlock, HeaderBlock], sub_slot_iters: uint64, overflow: bool, deficit: uint8, prev_transaction_block_height: uint32, ses: Optional[SubEpochSummary], ): reward_claims_incorporated = ( block.transactions_info.reward_claims_incorporated if block.transactions_info is not None else None) cbi = ChallengeBlockInfo( block.reward_chain_block.proof_of_space, block.reward_chain_block.challenge_chain_sp_vdf, block.reward_chain_block.challenge_chain_sp_signature, block.reward_chain_block.challenge_chain_ip_vdf, ) if block.reward_chain_block.infused_challenge_chain_ip_vdf is not None: icc_output: Optional[ ClassgroupElement] = block.reward_chain_block.infused_challenge_chain_ip_vdf.output else: icc_output = None if len(block.finished_sub_slots) > 0: finished_challenge_slot_hashes: Optional[List[bytes32]] = [ sub_slot.challenge_chain.get_hash() for sub_slot in block.finished_sub_slots ] finished_reward_slot_hashes: Optional[List[bytes32]] = [ sub_slot.reward_chain.get_hash() for sub_slot in block.finished_sub_slots ] finished_infused_challenge_slot_hashes: Optional[List[bytes32]] = [ sub_slot.infused_challenge_chain.get_hash() for sub_slot in block.finished_sub_slots if sub_slot.infused_challenge_chain is not None ] elif block.height == 0: finished_challenge_slot_hashes = [constants.GENESIS_CHALLENGE] finished_reward_slot_hashes = [constants.GENESIS_CHALLENGE] finished_infused_challenge_slot_hashes = None else: finished_challenge_slot_hashes = None finished_reward_slot_hashes = None finished_infused_challenge_slot_hashes = None prev_transaction_block_hash = ( block.foliage_transaction_block.prev_transaction_block_hash if block.foliage_transaction_block is not None else None) timestamp = block.foliage_transaction_block.timestamp if block.foliage_transaction_block is not None else None fees = block.transactions_info.fees if block.transactions_info is not None else None return BlockRecord( block.header_hash, block.prev_header_hash, block.height, block.weight, block.total_iters, block.reward_chain_block.signage_point_index, block.reward_chain_block.challenge_chain_ip_vdf.output, icc_output, block.reward_chain_block.get_hash(), cbi.get_hash(), sub_slot_iters, block.foliage.foliage_block_data.pool_target.puzzle_hash, block.foliage.foliage_block_data.farmer_reward_puzzle_hash, required_iters, deficit, overflow, prev_transaction_block_height, timestamp, prev_transaction_block_hash, fees, reward_claims_incorporated, finished_challenge_slot_hashes, finished_infused_challenge_slot_hashes, finished_reward_slot_hashes, ses, )
async def get_blockchain_state(self) -> Dict: response = await self.fetch("get_blockchain_state", {}) if response["blockchain_state"]["peak"] is not None: response["blockchain_state"]["peak"] = BlockRecord.from_json_dict( response["blockchain_state"]["peak"]) return response["blockchain_state"]
def new_peak( self, peak: BlockRecord, sp_sub_slot: Optional[ EndOfSubSlotBundle], # None if not overflow, or in first/second slot ip_sub_slot: Optional[EndOfSubSlotBundle], # None if in first slot reorg: bool, blocks: BlockchainInterface, ) -> Tuple[Optional[EndOfSubSlotBundle], List[SignagePoint], List[timelord_protocol.NewInfusionPointVDF]]: """ If the peak is an overflow block, must provide two sub-slots: one for the current sub-slot and one for the prev sub-slot (since we still might get more blocks with an sp in the previous sub-slot) Results in either one or two sub-slots in finished_sub_slots. """ assert len(self.finished_sub_slots) >= 1 if ip_sub_slot is None: # We are still in the first sub-slot, no new sub slots ey self.initialize_genesis_sub_slot() else: # This is not the first sub-slot in the chain sp_sub_slot_sps: List[Optional[SignagePoint]] = [ None ] * self.constants.NUM_SPS_SUB_SLOT ip_sub_slot_sps: List[Optional[SignagePoint]] = [ None ] * self.constants.NUM_SPS_SUB_SLOT if not reorg: # If it's not a reorg, we can keep signage points that we had before, in the cache for index, (sub_slot, sps, total_iters) in enumerate(self.finished_sub_slots): if sub_slot is None: continue if sub_slot == sp_sub_slot: sp_sub_slot_sps = sps if sub_slot == ip_sub_slot: ip_sub_slot_sps = sps self.clear_slots() prev_sub_slot_total_iters = peak.sp_sub_slot_total_iters( self.constants) if sp_sub_slot is not None or prev_sub_slot_total_iters == 0: assert peak.overflow or prev_sub_slot_total_iters self.finished_sub_slots.append( (sp_sub_slot, sp_sub_slot_sps, prev_sub_slot_total_iters)) ip_sub_slot_total_iters = peak.ip_sub_slot_total_iters( self.constants) self.finished_sub_slots.append( (ip_sub_slot, ip_sub_slot_sps, ip_sub_slot_total_iters)) new_eos: Optional[EndOfSubSlotBundle] = None new_sps: List[SignagePoint] = [] new_ips: List[timelord_protocol.NewInfusionPointVDF] = [] for eos in self.future_eos_cache.get( peak.reward_infusion_new_challenge, []): if self.new_finished_sub_slot(eos, blocks, peak) is not None: new_eos = eos break # This cache is not currently being used for sp in self.future_sp_cache.get(peak.reward_infusion_new_challenge, []): assert sp.cc_vdf is not None index = uint8(sp.cc_vdf.number_of_iterations // peak.sub_slot_iters) if self.new_signage_point(index, blocks, peak, peak.sub_slot_iters, sp): new_sps.append(sp) for ip in self.future_ip_cache.get(peak.reward_infusion_new_challenge, []): new_ips.append(ip) self.future_eos_cache.pop(peak.reward_infusion_new_challenge, []) self.future_sp_cache.pop(peak.reward_infusion_new_challenge, []) self.future_ip_cache.pop(peak.reward_infusion_new_challenge, []) return new_eos, new_sps, new_ips
def block_to_block_record( constants: ConsensusConstants, blocks: BlockchainInterface, required_iters: uint64, full_block: Optional[Union[FullBlock, HeaderBlock]], header_block: Optional[HeaderBlock], ): if full_block is None: assert header_block is not None block: Union[HeaderBlock, FullBlock] = header_block else: block = full_block if block.height == 0: prev_b: Optional[BlockRecord] = None sub_slot_iters: uint64 = uint64(constants.SUB_SLOT_ITERS_STARTING) else: prev_b = blocks.block_record(block.prev_header_hash) assert prev_b is not None sub_slot_iters = get_next_sub_slot_iters( constants, blocks, prev_b.prev_hash, prev_b.height, prev_b.sub_slot_iters, prev_b.deficit, len(block.finished_sub_slots) > 0, prev_b.sp_total_iters(constants), ) overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index) deficit = calculate_deficit( constants, block.height, prev_b, overflow, len(block.finished_sub_slots), ) prev_transaction_block_hash = ( block.foliage_transaction_block.prev_transaction_block_hash if block.foliage_transaction_block is not None else None) timestamp = block.foliage_transaction_block.timestamp if block.foliage_transaction_block is not None else None fees = block.transactions_info.fees if block.transactions_info is not None else None reward_claims_incorporated = ( block.transactions_info.reward_claims_incorporated if block.transactions_info is not None else None) if len(block.finished_sub_slots) > 0: finished_challenge_slot_hashes: Optional[List[bytes32]] = [ sub_slot.challenge_chain.get_hash() for sub_slot in block.finished_sub_slots ] finished_reward_slot_hashes: Optional[List[bytes32]] = [ sub_slot.reward_chain.get_hash() for sub_slot in block.finished_sub_slots ] finished_infused_challenge_slot_hashes: Optional[List[bytes32]] = [ sub_slot.infused_challenge_chain.get_hash() for sub_slot in block.finished_sub_slots if sub_slot.infused_challenge_chain is not None ] elif block.height == 0: finished_challenge_slot_hashes = [constants.GENESIS_CHALLENGE] finished_reward_slot_hashes = [constants.GENESIS_CHALLENGE] finished_infused_challenge_slot_hashes = None else: finished_challenge_slot_hashes = None finished_reward_slot_hashes = None finished_infused_challenge_slot_hashes = None found_ses_hash: Optional[bytes32] = None ses: Optional[SubEpochSummary] = None if len(block.finished_sub_slots) > 0: for sub_slot in block.finished_sub_slots: if sub_slot.challenge_chain.subepoch_summary_hash is not None: found_ses_hash = sub_slot.challenge_chain.subepoch_summary_hash if found_ses_hash: assert prev_b is not None assert len(block.finished_sub_slots) > 0 ses = make_sub_epoch_summary( constants, blocks, block.height, blocks.block_record(prev_b.prev_hash), block.finished_sub_slots[0].challenge_chain.new_difficulty, block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters, ) assert ses.get_hash() == found_ses_hash cbi = ChallengeBlockInfo( block.reward_chain_block.proof_of_space, block.reward_chain_block.challenge_chain_sp_vdf, block.reward_chain_block.challenge_chain_sp_signature, block.reward_chain_block.challenge_chain_ip_vdf, ) if block.reward_chain_block.infused_challenge_chain_ip_vdf is not None: icc_output: Optional[ ClassgroupElement] = block.reward_chain_block.infused_challenge_chain_ip_vdf.output else: icc_output = None prev_transaction_block_height = uint32(0) curr: Optional[BlockRecord] = blocks.try_block_record( block.prev_header_hash) while curr is not None and not curr.is_transaction_block: curr = blocks.try_block_record(curr.prev_hash) if curr is not None and curr.is_transaction_block: prev_transaction_block_height = curr.height return BlockRecord( block.header_hash, block.prev_header_hash, block.height, block.weight, block.total_iters, block.reward_chain_block.signage_point_index, block.reward_chain_block.challenge_chain_ip_vdf.output, icc_output, block.reward_chain_block.get_hash(), cbi.get_hash(), sub_slot_iters, block.foliage.foliage_block_data.pool_target.puzzle_hash, block.foliage.foliage_block_data.farmer_reward_puzzle_hash, required_iters, deficit, overflow, prev_transaction_block_height, timestamp, prev_transaction_block_hash, fees, reward_claims_incorporated, finished_challenge_slot_hashes, finished_infused_challenge_slot_hashes, finished_reward_slot_hashes, ses, )