async def get_filter_additions_removals( self, new_block: HeaderBlock, transactions_filter: bytes, fork_point_with_peak: Optional[uint32] ) -> Tuple[List[bytes32], List[bytes32]]: """ Returns a list of our coin ids, and a list of puzzle_hashes that positively match with provided filter. """ # assert new_block.prev_header_hash in self.blockchain.blocks tx_filter = PyBIP158([b for b in transactions_filter]) # Find fork point if fork_point_with_peak is not None: fork_h: int = fork_point_with_peak elif new_block.prev_header_hash != self.constants.GENESIS_CHALLENGE and self.peak is not None: # TODO: handle returning of -1 fork_h = find_fork_point_in_chain( self.blockchain, self.blockchain.block_record(self.peak.header_hash), new_block, ) else: fork_h = 0 # Get all unspent coins my_coin_records: Set[ WalletCoinRecord] = await self.coin_store.get_unspent_coins_at_height( uint32(fork_h)) # Filter coins up to and including fork point unspent_coin_names: Set[bytes32] = set() for coin in my_coin_records: if coin.confirmed_block_height <= fork_h: unspent_coin_names.add(coin.name()) # Get all blocks after fork point up to but not including this block if new_block.height > 0: curr: BlockRecord = self.blockchain.block_record( new_block.prev_hash) reorg_blocks: List[HeaderBlockRecord] = [] while curr.height > fork_h: header_block_record = await self.block_store.get_header_block_record( curr.header_hash) assert header_block_record is not None reorg_blocks.append(header_block_record) if curr.height == 0: break curr = self.blockchain.block_record(curr.prev_hash) reorg_blocks.reverse() # For each block, process additions to get all Coins, then process removals to get unspent coins for reorg_block in reorg_blocks: for addition in reorg_block.additions: unspent_coin_names.add(addition.name()) for removal in reorg_block.removals: record = await self.puzzle_store.get_derivation_record_for_puzzle_hash( removal.puzzle_hash) if record is None: continue unspent_coin_names.remove(removal) my_puzzle_hashes = self.puzzle_store.all_puzzle_hashes removals_of_interest: bytes32 = [] additions_of_interest: bytes32 = [] ( trade_removals, trade_additions, ) = await self.trade_manager.get_coins_of_interest() for name, trade_coin in trade_removals.items(): if tx_filter.Match(bytearray(trade_coin.name())): removals_of_interest.append(trade_coin.name()) for name, trade_coin in trade_additions.items(): if tx_filter.Match(bytearray(trade_coin.puzzle_hash)): additions_of_interest.append(trade_coin.puzzle_hash) for coin_name in unspent_coin_names: if tx_filter.Match(bytearray(coin_name)): removals_of_interest.append(coin_name) for puzzle_hash in my_puzzle_hashes: if tx_filter.Match(bytearray(puzzle_hash)): additions_of_interest.append(puzzle_hash) return additions_of_interest, removals_of_interest
async def _reconsider_peak( self, block_record: BlockRecord, genesis: bool, fork_point_with_peak: Optional[uint32]) -> Optional[uint32]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. It returns the height of the fork between the previous chain and the new chain, or returns None if there was no update to the heaviest chain. """ peak = self.get_peak() if genesis: if peak is None: block: Optional[ HeaderBlockRecord] = await self.block_store.get_header_block_record( block_record.header_hash) assert block is not None self.__height_to_hash[uint32(0)] = block.header_hash for removed in block.removals: self.log.debug(f"Removed: {removed.name()}") await self.coins_of_interest_received(block.removals, block.additions, block.height) self._peak_height = uint32(0) return uint32(0) return None assert peak is not None if block_record.weight > peak.weight: # Find the fork. if the block is just being appended, it will return the peak # If no blocks in common, returns -1, and reverts all blocks if fork_point_with_peak is not None: fork_h: int = fork_point_with_peak else: fork_h = find_fork_point_in_chain(self, block_record, peak) # Rollback to fork self.log.debug( f"fork_h: {fork_h}, SB: {block_record.height}, peak: {peak.height}" ) await self.reorg_rollback(fork_h) # Rollback sub_epoch_summaries heights_to_delete = [] for ses_included_height in self.__sub_epoch_summaries.keys(): if ses_included_height > fork_h: heights_to_delete.append(ses_included_height) for height in heights_to_delete: del self.__sub_epoch_summaries[height] # Collect all blocks from fork point to new peak blocks_to_add: List[Tuple[HeaderBlockRecord, BlockRecord]] = [] curr = block_record.header_hash while fork_h < 0 or curr != self.height_to_hash(uint32(fork_h)): fetched_header_block: Optional[ HeaderBlockRecord] = await self.block_store.get_header_block_record( curr) fetched_block_record: Optional[ BlockRecord] = await self.block_store.get_block_record(curr ) assert fetched_header_block is not None assert fetched_block_record is not None blocks_to_add.append( (fetched_header_block, fetched_block_record)) if fetched_header_block.height == 0: # Doing a full reorg, starting at height 0 break curr = fetched_block_record.prev_hash for fetched_header_block, fetched_block_record in reversed( blocks_to_add): self.__height_to_hash[ fetched_block_record. height] = fetched_block_record.header_hash if fetched_block_record.is_transaction_block: await self.coins_of_interest_received( fetched_header_block.removals, fetched_header_block.additions, fetched_header_block.height, ) if fetched_block_record.sub_epoch_summary_included is not None: self.__sub_epoch_summaries[ fetched_block_record. height] = fetched_block_record.sub_epoch_summary_included # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) self._peak_height = block_record.height return uint32(min(fork_h, 0)) # This is not a heavier block than the heaviest we have seen, so we don't change the coin set return None
async def test_basic_store(self, empty_blockchain, normalized_to_identity: bool = False): blockchain = empty_blockchain blocks = bt.get_consecutive_blocks( 10, seed=b"1234", normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) store = FullNodeStore(test_constants) unfinished_blocks = [] for block in blocks: unfinished_blocks.append( UnfinishedBlock( block.finished_sub_slots, block.reward_chain_block.get_unfinished(), block.challenge_chain_sp_proof, block.reward_chain_sp_proof, block.foliage, block.foliage_transaction_block, block.transactions_info, block.transactions_generator, [], )) # Add/get candidate block assert store.get_candidate_block( unfinished_blocks[0].get_hash()) is None for height, unf_block in enumerate(unfinished_blocks): store.add_candidate_block(unf_block.get_hash(), uint32(height), unf_block) candidate = store.get_candidate_block(unfinished_blocks[4].get_hash()) assert candidate is not None assert candidate[1] == unfinished_blocks[4] store.clear_candidate_blocks_below(uint32(8)) assert store.get_candidate_block( unfinished_blocks[5].get_hash()) is None assert store.get_candidate_block( unfinished_blocks[8].get_hash()) is not None # Test seen unfinished blocks h_hash_1 = bytes32(token_bytes(32)) assert not store.seen_unfinished_block(h_hash_1) assert store.seen_unfinished_block(h_hash_1) store.clear_seen_unfinished_blocks() assert not store.seen_unfinished_block(h_hash_1) # Add/get unfinished block for height, unf_block in enumerate(unfinished_blocks): assert store.get_unfinished_block(unf_block.partial_hash) is None store.add_unfinished_block( uint32(height), unf_block, PreValidationResult(None, uint64(123532), None, False)) assert store.get_unfinished_block( unf_block.partial_hash) == unf_block store.remove_unfinished_block(unf_block.partial_hash) assert store.get_unfinished_block(unf_block.partial_hash) is None blocks = bt.get_consecutive_blocks( 1, skip_slots=5, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, ) sub_slots = blocks[0].finished_sub_slots assert len(sub_slots) == 5 assert (store.get_finished_sub_slots( BlockCache({}), None, sub_slots[0].challenge_chain.challenge_chain_end_of_slot_vdf. challenge, ) == []) # Test adding non-connecting sub-slots genesis assert store.get_sub_slot(test_constants.GENESIS_CHALLENGE) is None assert store.get_sub_slot( sub_slots[0].challenge_chain.get_hash()) is None assert store.get_sub_slot( sub_slots[1].challenge_chain.get_hash()) is None assert store.new_finished_sub_slot(sub_slots[1], blockchain, None, None) is None assert store.new_finished_sub_slot(sub_slots[2], blockchain, None, None) is None # Test adding sub-slots after genesis assert store.new_finished_sub_slot(sub_slots[0], blockchain, None, None) is not None sub_slot = store.get_sub_slot(sub_slots[0].challenge_chain.get_hash()) assert sub_slot is not None assert sub_slot[0] == sub_slots[0] assert store.get_sub_slot( sub_slots[1].challenge_chain.get_hash()) is None assert store.new_finished_sub_slot(sub_slots[1], blockchain, None, None) is not None for i in range(len(sub_slots)): assert store.new_finished_sub_slot(sub_slots[i], blockchain, None, None) is not None slot_i = store.get_sub_slot( sub_slots[i].challenge_chain.get_hash()) assert slot_i is not None assert slot_i[0] == sub_slots[i] assert store.get_finished_sub_slots( BlockCache({}), None, sub_slots[-1].challenge_chain.get_hash()) == sub_slots assert store.get_finished_sub_slots(BlockCache( {}), None, std_hash(b"not a valid hash")) is None assert (store.get_finished_sub_slots( BlockCache({}), None, sub_slots[-2].challenge_chain.get_hash()) == sub_slots[:-1]) # Test adding genesis peak await _validate_and_add_block(blockchain, blocks[0]) peak = blockchain.get_peak() peak_full_block = await blockchain.get_full_peak() if peak.overflow: store.new_peak(peak, peak_full_block, sub_slots[-2], sub_slots[-1], None, blockchain) else: store.new_peak(peak, peak_full_block, None, sub_slots[-1], None, blockchain) assert store.get_sub_slot( sub_slots[0].challenge_chain.get_hash()) is None assert store.get_sub_slot( sub_slots[1].challenge_chain.get_hash()) is None assert store.get_sub_slot( sub_slots[2].challenge_chain.get_hash()) is None if peak.overflow: slot_3 = store.get_sub_slot( sub_slots[3].challenge_chain.get_hash()) assert slot_3 is not None assert slot_3[0] == sub_slots[3] else: assert store.get_sub_slot( sub_slots[3].challenge_chain.get_hash()) is None slot_4 = store.get_sub_slot(sub_slots[4].challenge_chain.get_hash()) assert slot_4 is not None assert slot_4[0] == sub_slots[4] assert (store.get_finished_sub_slots( blockchain, peak, sub_slots[-1].challenge_chain.get_hash(), ) == []) # Test adding non genesis peak directly blocks = bt.get_consecutive_blocks( 2, skip_slots=2, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) blocks = bt.get_consecutive_blocks( 3, block_list_input=blocks, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) for block in blocks: await _validate_and_add_block_no_error(blockchain, block) sb = blockchain.block_record(block.header_hash) sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( block.header_hash) res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain) assert res.added_eos is None # Add reorg blocks blocks_reorg = bt.get_consecutive_blocks( 20, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) for block in blocks_reorg: peak = blockchain.get_peak() assert peak is not None await _validate_and_add_block_no_error(blockchain, block) if blockchain.get_peak().header_hash == block.header_hash: sb = blockchain.block_record(block.header_hash) fork = find_fork_point_in_chain( blockchain, peak, blockchain.block_record(sb.header_hash)) if fork > 0: fork_block = blockchain.height_to_block_record(fork) else: fork_block = None sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( block.header_hash) res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, fork_block, blockchain) assert res.added_eos is None # Add slots to the end blocks_2 = bt.get_consecutive_blocks( 1, block_list_input=blocks_reorg, skip_slots=2, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) for slot in blocks_2[-1].finished_sub_slots: store.new_finished_sub_slot(slot, blockchain, blockchain.get_peak(), await blockchain.get_full_peak()) assert store.get_sub_slot( sub_slots[3].challenge_chain.get_hash()) is None assert store.get_sub_slot( sub_slots[4].challenge_chain.get_hash()) is None # Test adding signage point peak = blockchain.get_peak() ss_start_iters = peak.ip_sub_slot_total_iters(test_constants) for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA): sp = get_signage_point( test_constants, blockchain, peak, ss_start_iters, uint8(i), [], peak.sub_slot_iters, ) assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) blocks = blocks_reorg while True: blocks = bt.get_consecutive_blocks( 1, block_list_input=blocks, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) await _validate_and_add_block(blockchain, blocks[-1]) if blockchain.get_peak().header_hash == blocks[-1].header_hash: sb = blockchain.block_record(blocks[-1].header_hash) fork = find_fork_point_in_chain( blockchain, peak, blockchain.block_record(sb.header_hash)) if fork > 0: fork_block = blockchain.height_to_block_record(fork) else: fork_block = None sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( blocks[-1].header_hash) res = store.new_peak(sb, blocks[-1], sp_sub_slot, ip_sub_slot, fork_block, blockchain) assert res.added_eos is None if sb.overflow and sp_sub_slot is not None: assert sp_sub_slot != ip_sub_slot break peak = blockchain.get_peak() assert peak.overflow # Overflow peak should result in 2 finished sub slots assert len(store.finished_sub_slots) == 2 # Add slots to the end, except for the last one, which we will use to test invalid SP blocks_2 = bt.get_consecutive_blocks( 1, block_list_input=blocks, skip_slots=3, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) for slot in blocks_2[-1].finished_sub_slots[:-1]: store.new_finished_sub_slot(slot, blockchain, blockchain.get_peak(), await blockchain.get_full_peak()) finished_sub_slots = blocks_2[-1].finished_sub_slots assert len(store.finished_sub_slots) == 4 # Test adding signage points for overflow blocks (sp_sub_slot) ss_start_iters = peak.sp_sub_slot_total_iters(test_constants) # for i in range(peak.signage_point_index, test_constants.NUM_SPS_SUB_SLOT): # if i < peak.signage_point_index: # continue # latest = peak # while latest.total_iters > peak.sp_total_iters(test_constants): # latest = blockchain.blocks[latest.prev_hash] # sp = get_signage_point( # test_constants, # blockchain.blocks, # latest, # ss_start_iters, # uint8(i), # [], # peak.sub_slot_iters, # ) # assert store.new_signage_point(i, blockchain.blocks, peak, peak.sub_slot_iters, sp) # Test adding signage points for overflow blocks (ip_sub_slot) for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA): sp = get_signage_point( test_constants, blockchain, peak, peak.ip_sub_slot_total_iters(test_constants), uint8(i), [], peak.sub_slot_iters, ) assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Test adding future signage point, a few slots forward (good) saved_sp_hash = None for slot_offset in range(1, len(finished_sub_slots)): for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA, ): sp = get_signage_point( test_constants, blockchain, peak, peak.ip_sub_slot_total_iters(test_constants) + slot_offset * peak.sub_slot_iters, uint8(i), finished_sub_slots[:slot_offset], peak.sub_slot_iters, ) assert sp.cc_vdf is not None saved_sp_hash = sp.cc_vdf.output.get_hash() assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Test adding future signage point (bad) for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA): sp = get_signage_point( test_constants, blockchain, peak, peak.ip_sub_slot_total_iters(test_constants) + len(finished_sub_slots) * peak.sub_slot_iters, uint8(i), finished_sub_slots[:len(finished_sub_slots)], peak.sub_slot_iters, ) assert not store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Test adding past signage point sp = SignagePoint( blocks[1].reward_chain_block.challenge_chain_sp_vdf, blocks[1].challenge_chain_sp_proof, blocks[1].reward_chain_block.reward_chain_sp_vdf, blocks[1].reward_chain_sp_proof, ) assert not store.new_signage_point( blocks[1].reward_chain_block.signage_point_index, blockchain, peak, blockchain.block_record( blocks[1].header_hash).sp_sub_slot_total_iters(test_constants), sp, ) # Get signage point by index assert (store.get_signage_point_by_index( finished_sub_slots[0].challenge_chain.get_hash(), uint8(4), finished_sub_slots[0].reward_chain.get_hash(), ) is not None) assert (store.get_signage_point_by_index( finished_sub_slots[0].challenge_chain.get_hash(), uint8(4), std_hash(b"1")) is None) # Get signage point by hash # TODO: address hint error and remove ignore # error: Argument 1 to "get_signage_point" of "FullNodeStore" has incompatible type "Optional[bytes32]"; # expected "bytes32" [arg-type] assert store.get_signage_point( saved_sp_hash) is not None # type: ignore[arg-type] assert store.get_signage_point(std_hash(b"2")) is None # Test adding signage points before genesis store.initialize_genesis_sub_slot() assert len(store.finished_sub_slots) == 1 for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA): sp = get_signage_point( test_constants, BlockCache({}, {}), None, uint128(0), uint8(i), [], peak.sub_slot_iters, ) assert store.new_signage_point(uint8(i), blockchain, None, peak.sub_slot_iters, sp) blocks_3 = bt.get_consecutive_blocks( 1, skip_slots=2, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) for slot in blocks_3[-1].finished_sub_slots: store.new_finished_sub_slot(slot, blockchain, None, None) assert len(store.finished_sub_slots) == 3 finished_sub_slots = blocks_3[-1].finished_sub_slots for slot_offset in range(1, len(finished_sub_slots) + 1): for i in range( 1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA, ): sp = get_signage_point( test_constants, BlockCache({}, {}), None, slot_offset * peak.sub_slot_iters, uint8(i), finished_sub_slots[:slot_offset], peak.sub_slot_iters, ) assert store.new_signage_point(uint8(i), blockchain, None, peak.sub_slot_iters, sp) # Test adding signage points after genesis blocks_4 = bt.get_consecutive_blocks( 1, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) blocks_5 = bt.get_consecutive_blocks( 1, block_list_input=blocks_4, skip_slots=1, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) # If this is not the case, fix test to find a block that is assert (blocks_4[-1].reward_chain_block.signage_point_index < test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA) await _validate_and_add_block( blockchain, blocks_4[-1], expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN) sb = blockchain.block_record(blocks_4[-1].header_hash) store.new_peak(sb, blocks_4[-1], None, None, None, blockchain) for i in range( sb.signage_point_index + test_constants.NUM_SP_INTERVALS_EXTRA, test_constants.NUM_SPS_SUB_SLOT, ): if is_overflow_block(test_constants, uint8(i)): finished_sub_slots = blocks_5[-1].finished_sub_slots else: finished_sub_slots = [] sp = get_signage_point( test_constants, blockchain, sb, uint128(0), uint8(i), finished_sub_slots, peak.sub_slot_iters, ) assert store.new_signage_point(uint8(i), empty_blockchain, sb, peak.sub_slot_iters, sp) # Test future EOS cache store.initialize_genesis_sub_slot() blocks = bt.get_consecutive_blocks( 1, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) await _validate_and_add_block_no_error(blockchain, blocks[-1]) while True: blocks = bt.get_consecutive_blocks( 1, block_list_input=blocks, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) await _validate_and_add_block_no_error(blockchain, blocks[-1]) sb = blockchain.block_record(blocks[-1].header_hash) if sb.first_in_sub_slot: break assert len(blocks) >= 2 dependant_sub_slots = blocks[-1].finished_sub_slots peak = blockchain.get_peak() peak_full_block = await blockchain.get_full_peak() for block in blocks[:-2]: sb = blockchain.block_record(block.header_hash) sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( block.header_hash) peak = sb peak_full_block = block res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain) assert res.added_eos is None assert store.new_finished_sub_slot(dependant_sub_slots[0], blockchain, peak, peak_full_block) is None block = blocks[-2] sb = blockchain.block_record(block.header_hash) sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( block.header_hash) res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain) assert res.added_eos == dependant_sub_slots[0] assert res.new_signage_points == res.new_infusion_points == [] # Test future IP cache store.initialize_genesis_sub_slot() blocks = bt.get_consecutive_blocks( 60, normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, normalized_to_identity_cc_eos=normalized_to_identity, normalized_to_identity_icc_eos=normalized_to_identity, ) for block in blocks[:5]: await _validate_and_add_block_no_error(blockchain, block) sb = blockchain.block_record(block.header_hash) sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( block.header_hash) res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain) assert res.added_eos is None case_0, case_1 = False, False for i in range(5, len(blocks) - 1): prev_block = blocks[i] block = blocks[i + 1] new_ip = NewInfusionPointVDF( block.reward_chain_block.get_unfinished().get_hash(), block.reward_chain_block.challenge_chain_ip_vdf, block.challenge_chain_ip_proof, block.reward_chain_block.reward_chain_ip_vdf, block.reward_chain_ip_proof, block.reward_chain_block.infused_challenge_chain_ip_vdf, block.infused_challenge_chain_ip_proof, ) store.add_to_future_ip(new_ip) await _validate_and_add_block_no_error(blockchain, prev_block) sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots( prev_block.header_hash) sb = blockchain.block_record(prev_block.header_hash) res = store.new_peak(sb, prev_block, sp_sub_slot, ip_sub_slot, None, blockchain) if len(block.finished_sub_slots) == 0: case_0 = True assert res.new_infusion_points == [new_ip] else: case_1 = True assert res.new_infusion_points == [] found_ips: List[timelord_protocol.NewInfusionPointVDF] = [] for ss in block.finished_sub_slots: ipvdf = store.new_finished_sub_slot( ss, blockchain, sb, prev_block) assert ipvdf is not None found_ips += ipvdf assert found_ips == [new_ip] # If flaky, increase the number of blocks created assert case_0 and case_1 # Try to get two blocks in the same slot, such that we have # SP, B2 SP .... SP B1 # i2 ......... i1 # Then do a reorg up to B2, removing all signage points after B2, but not before log.warning(f"Adding blocks up to {blocks[-1]}") for block in blocks: await _validate_and_add_block_no_error(blockchain, block) log.warning(f"Starting loop") while True: log.warning("Looping") blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1) await _validate_and_add_block_no_error(blockchain, blocks[-1]) peak = blockchain.get_peak() sub_slots = await blockchain.get_sp_and_ip_sub_slots( peak.header_hash) store.new_peak(peak, blocks[-1], sub_slots[0], sub_slots[1], None, blockchain) blocks = bt.get_consecutive_blocks( 2, block_list_input=blocks, guarantee_transaction_block=True) i3 = blocks[-3].reward_chain_block.signage_point_index i2 = blocks[-2].reward_chain_block.signage_point_index i1 = blocks[-1].reward_chain_block.signage_point_index if (len(blocks[-2].finished_sub_slots) == len( blocks[-1].finished_sub_slots) == 0 and not is_overflow_block(test_constants, signage_point_index=i2) and not is_overflow_block(test_constants, signage_point_index=i1) and i2 > i3 + 3 and i1 > (i2 + 3)): # We hit all the conditions that we want all_sps: List[Optional[SignagePoint]] = [ None ] * test_constants.NUM_SPS_SUB_SLOT def assert_sp_none(sp_index: int, is_none: bool): sp_to_check: Optional[SignagePoint] = all_sps[sp_index] assert sp_to_check is not None assert sp_to_check.cc_vdf is not None fetched = store.get_signage_point( sp_to_check.cc_vdf.output.get_hash()) assert (fetched is None) == is_none if fetched is not None: assert fetched == sp_to_check for i in range(i3 + 1, test_constants.NUM_SPS_SUB_SLOT - 3): finished_sub_slots = [] sp = get_signage_point( test_constants, blockchain, peak, uint128(peak.ip_sub_slot_total_iters(bt.constants)), uint8(i), finished_sub_slots, peak.sub_slot_iters, ) all_sps[i] = sp assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Adding a new peak clears all SPs after that peak await _validate_and_add_block_no_error(blockchain, blocks[-2]) peak = blockchain.get_peak() sub_slots = await blockchain.get_sp_and_ip_sub_slots( peak.header_hash) store.new_peak(peak, blocks[-2], sub_slots[0], sub_slots[1], None, blockchain) assert_sp_none(i2, False) assert_sp_none(i2 + 1, False) assert_sp_none(i1, True) assert_sp_none(i1 + 1, True) assert_sp_none(i1 + 4, True) for i in range(i2, test_constants.NUM_SPS_SUB_SLOT): if is_overflow_block(test_constants, uint8(i)): blocks_alt = bt.get_consecutive_blocks( 1, block_list_input=blocks[:-1], skip_slots=1) finished_sub_slots = blocks_alt[-1].finished_sub_slots else: finished_sub_slots = [] sp = get_signage_point( test_constants, blockchain, peak, uint128(peak.ip_sub_slot_total_iters(bt.constants)), uint8(i), finished_sub_slots, peak.sub_slot_iters, ) all_sps[i] = sp assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) assert_sp_none(i2, False) assert_sp_none(i2 + 1, False) assert_sp_none(i1, False) assert_sp_none(i1 + 1, False) assert_sp_none(i1 + 4, False) await _validate_and_add_block_no_error(blockchain, blocks[-1]) peak = blockchain.get_peak() sub_slots = await blockchain.get_sp_and_ip_sub_slots( peak.header_hash) # Do a reorg, which should remove everything after B2 store.new_peak( peak, blocks[-1], sub_slots[0], sub_slots[1], (await blockchain.get_block_records_at([blocks[-2].height]))[0], blockchain, ) assert_sp_none(i2, False) assert_sp_none(i2 + 1, False) assert_sp_none(i1, True) assert_sp_none(i1 + 1, True) assert_sp_none(i1 + 4, True) break else: for block in blocks[-2:]: await _validate_and_add_block_no_error(blockchain, block)
async def get_block_generator( self, block: Union[FullBlock, UnfinishedBlock], additional_blocks=None ) -> Optional[BlockGenerator]: if additional_blocks is None: additional_blocks = {} ref_list = block.transactions_generator_ref_list if block.transactions_generator is None: assert len(ref_list) == 0 return None if len(ref_list) == 0: return BlockGenerator(block.transactions_generator, []) result: List[GeneratorArg] = [] previous_block_hash = block.prev_header_hash if ( self.try_block_record(previous_block_hash) and self.height_to_hash(self.block_record(previous_block_hash).height) == previous_block_hash ): # We are not in a reorg, no need to look up alternate header hashes (we can get them from height_to_hash) for ref_height in block.transactions_generator_ref_list: header_hash = self.height_to_hash(ref_height) ref_block = await self.get_full_block(header_hash) assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(GeneratorArg(ref_block.height, ref_block.transactions_generator)) else: # First tries to find the blocks in additional_blocks reorg_chain: Dict[uint32, FullBlock] = {} curr: Union[FullBlock, UnfinishedBlock] = block additional_height_dict = {} while curr.prev_header_hash in additional_blocks: prev: FullBlock = additional_blocks[curr.prev_header_hash] additional_height_dict[prev.height] = prev if isinstance(curr, FullBlock): assert curr.height == prev.height + 1 reorg_chain[prev.height] = prev curr = prev peak: Optional[BlockRecord] = self.get_peak() if self.contains_block(curr.prev_header_hash) and peak is not None: # Then we look up blocks up to fork point one at a time, backtracking previous_block_hash = curr.prev_header_hash prev_block_record = await self.block_store.get_block_record(previous_block_hash) prev_block = await self.block_store.get_full_block(previous_block_hash) assert prev_block is not None assert prev_block_record is not None fork = find_fork_point_in_chain(self, peak, prev_block_record) curr_2: Optional[FullBlock] = prev_block assert curr_2 is not None and isinstance(curr_2, FullBlock) reorg_chain[curr_2.height] = curr_2 while curr_2.height > fork and curr_2.height > 0: curr_2 = await self.block_store.get_full_block(curr_2.prev_header_hash) assert curr_2 is not None reorg_chain[curr_2.height] = curr_2 for ref_height in block.transactions_generator_ref_list: if ref_height in reorg_chain: ref_block = reorg_chain[ref_height] assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(GeneratorArg(ref_block.height, ref_block.transactions_generator)) else: if ref_height in additional_height_dict: ref_block = additional_height_dict[ref_height] else: header_hash = self.height_to_hash(ref_height) ref_block = await self.get_full_block(header_hash) assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(GeneratorArg(ref_block.height, ref_block.transactions_generator)) assert len(result) == len(ref_list) return BlockGenerator(block.transactions_generator, result)
async def receive_block( self, block: HeaderBlock) -> Tuple[ReceiveBlockResult, Optional[Err]]: if self.contains_block(block.header_hash): return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None if not self.contains_block( block.prev_header_hash) and block.height > 0: return ReceiveBlockResult.DISCONNECTED_BLOCK, None if (len(block.finished_sub_slots) > 0 and block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None): assert block.finished_sub_slots[ 0].challenge_chain.new_difficulty is not None # They both change together sub_slot_iters: uint64 = block.finished_sub_slots[ 0].challenge_chain.new_sub_slot_iters difficulty: uint64 = block.finished_sub_slots[ 0].challenge_chain.new_difficulty else: sub_slot_iters = self._sub_slot_iters difficulty = self._difficulty required_iters, error = validate_finished_header_block( self.constants, self, block, False, difficulty, sub_slot_iters, False) if error is not None: return ReceiveBlockResult.INVALID_BLOCK, error.code if required_iters is None: return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_POSPACE block_record: BlockRecord = block_to_block_record( self.constants, self, required_iters, None, block, sub_slot_iters) self.add_block_record(block_record) if self._peak is None: if block_record.is_transaction_block: latest_timestamp = block_record.timestamp else: latest_timestamp = None self._height_to_hash[ block_record.height] = block_record.header_hash await self.set_peak_block(block, latest_timestamp) return ReceiveBlockResult.NEW_PEAK, None elif block_record.weight > self._peak.weight: if block_record.prev_hash == self._peak.header_hash: fork_height: int = self._peak.height else: fork_height = find_fork_point_in_chain(self, block_record, self._peak) await self._rollback_to_height(fork_height) curr_record: BlockRecord = block_record latest_timestamp = self._latest_timestamp while curr_record.height > fork_height: self._height_to_hash[ curr_record.height] = curr_record.header_hash if curr_record.timestamp is not None and curr_record.timestamp > latest_timestamp: latest_timestamp = curr_record.timestamp if curr_record.height == 0: break curr_record = self.block_record(curr_record.prev_hash) self._sub_slot_iters = block_record.sub_slot_iters self._difficulty = uint64( block_record.weight - self.block_record(block_record.prev_hash).weight) await self.set_peak_block(block, latest_timestamp) self.clean_block_records() return ReceiveBlockResult.NEW_PEAK, None return ReceiveBlockResult.ADDED_AS_ORPHAN, None
async def validate_block_body( constants: ConsensusConstants, blocks: BlockchainInterface, block_store: BlockStore, coin_store: CoinStore, peak: Optional[BlockRecord], block: Union[FullBlock, UnfinishedBlock], height: uint32, npc_result: Optional[NPCResult], fork_point_with_peak: Optional[uint32], get_block_generator: Callable, ) -> Tuple[Optional[Err], Optional[NPCResult]]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. Returns None for the first value if everything validates correctly, or an Err if something does not validate. For the second value, returns a CostResult if validation succeeded, and there are transactions """ if isinstance(block, FullBlock): assert height == block.height prev_transaction_block_height: uint32 = uint32(0) # 1. For non block blocks, foliage block, transaction filter, transactions info, and generator must be empty # If it is a block but not a transaction block, there is no body to validate. Check that all fields are None if block.foliage.foliage_transaction_block_hash is None: if ( block.foliage_transaction_block is not None or block.transactions_info is not None or block.transactions_generator is not None ): return Err.NOT_BLOCK_BUT_HAS_DATA, None return None, None # This means the block is valid # 2. For blocks, foliage block, transaction filter, transactions info must not be empty if ( block.foliage_transaction_block is None or block.foliage_transaction_block.filter_hash is None or block.transactions_info is None ): return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None # keeps track of the reward coins that need to be incorporated expected_reward_coins: Set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_transaction_block.transactions_info_hash != std_hash(block.transactions_info): return Err.INVALID_TRANSACTIONS_INFO_HASH, None # 4. The foliage block hash in the foliage block must match the foliage block if block.foliage.foliage_transaction_block_hash != std_hash(block.foliage_transaction_block): return Err.INVALID_FOLIAGE_BLOCK_HASH, None # 7. The reward claims must be valid for the previous blocks, and current block fees if height > 0: # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter) prev_transaction_block = blocks.block_record(block.foliage_transaction_block.prev_transaction_block_hash) prev_transaction_block_height = prev_transaction_block.height assert prev_transaction_block.fees is not None pool_coin = create_pool_coin( prev_transaction_block_height, prev_transaction_block.pool_puzzle_hash, calculate_pool_reward(prev_transaction_block.height), constants.GENESIS_CHALLENGE, ) farmer_coin = create_farmer_coin( prev_transaction_block_height, prev_transaction_block.farmer_puzzle_hash, uint64(calculate_base_farmer_reward(prev_transaction_block.height) + prev_transaction_block.fees), constants.GENESIS_CHALLENGE, ) # Adds the previous block expected_reward_coins.add(pool_coin) expected_reward_coins.add(farmer_coin) # For the second block in the chain, don't go back further if prev_transaction_block.height > 0: curr_b = blocks.block_record(prev_transaction_block.prev_hash) while not curr_b.is_transaction_block: expected_reward_coins.add( create_pool_coin( curr_b.height, curr_b.pool_puzzle_hash, calculate_pool_reward(curr_b.height), constants.GENESIS_CHALLENGE, ) ) expected_reward_coins.add( create_farmer_coin( curr_b.height, curr_b.farmer_puzzle_hash, calculate_base_farmer_reward(curr_b.height), constants.GENESIS_CHALLENGE, ) ) curr_b = blocks.block_record(curr_b.prev_hash) if set(block.transactions_info.reward_claims_incorporated) != expected_reward_coins: return Err.INVALID_REWARD_COINS, None removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] coin_announcement_names: Set[bytes32] = set() puzzle_announcement_names: Set[bytes32] = set() npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) if height <= constants.INITIAL_FREEZE_PERIOD and block.transactions_generator is not None: return Err.INITIAL_TRANSACTION_FREEZE, None if height > constants.INITIAL_FREEZE_PERIOD and constants.NETWORK_TYPE == NetworkType.MAINNET: return Err.INITIAL_TRANSACTION_FREEZE, None else: # 6a. The generator root must be the hash of the serialized bytes of # the generator for this block (or zeroes if no generator) if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator)) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None else: if block.transactions_info.generator_root != bytes([0] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None # 6b. The generator_ref_list must be the hash of the serialized bytes of # the generator ref list for this block (or 'one' bytes [0x01] if no generator) # 6c. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries if block.transactions_generator_ref_list in (None, []): if block.transactions_info.generator_refs_root != bytes([1] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None else: # If we have a generator reference list, we must have a generator if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None # The generator_refs_root must be the hash of the concatenation of the List[uint32] generator_refs_hash = std_hash(b"".join([bytes(i) for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None if len(block.transactions_generator_ref_list) > constants.MAX_GENERATOR_REF_LIST_SIZE: return Err.PRE_SOFT_FORK_TOO_MANY_GENERATOR_REFS, None if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions created assert npc_result is not None cost = calculate_cost_of_program(block.transactions_generator, npc_result, constants.COST_PER_BYTE) npc_list = npc_result.npc_list # 8. Check that cost <= MAX_BLOCK_COST_CLVM log.warning(f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM}") if cost > constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX, None if npc_result.error is not None: return Err.GENERATOR_RUNTIME_ERROR, None for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) coin_announcement_names = coin_announcements_names_for_npc(npc_list) puzzle_announcement_names = puzzle_announcements_names_for_npc(npc_list) else: assert npc_result is None # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST, None additions_dic: Dict[bytes32, Coin] = {} # 10. Check additions for max coin amount # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer for coin in additions + coinbase_additions: additions_dic[coin.name()] = coin if coin.amount > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 11. Validate addition and removal roots root_error = validate_block_merkle_roots( block.foliage_transaction_block.additions_root, block.foliage_transaction_block.removals_root, additions + coinbase_additions, removals, ) if root_error: return root_error, None # 12. The additions and removals must result in the correct filter byte_array_tx: List[bytes32] = [] for coin in additions + coinbase_additions: byte_array_tx.append(bytearray(coin.puzzle_hash)) for coin_name in removals: byte_array_tx.append(bytearray(coin_name)) bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.foliage_transaction_block.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH, None # 13. Check for duplicate outputs in additions addition_counter = collections.Counter(_.name() for _ in additions + coinbase_additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT, None # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND, None # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) if peak is None or height == 0: fork_h: int = -1 elif fork_point_with_peak is not None: fork_h = fork_point_with_peak else: fork_h = find_fork_point_in_chain(blocks, peak, blocks.block_record(block.prev_header_hash)) if fork_h == -1: coin_store_reorg_height = -1 else: last_block_in_common = await blocks.get_block_record_from_db(blocks.height_to_hash(uint32(fork_h))) assert last_block_in_common is not None coin_store_reorg_height = last_block_in_common.height # Get additions and removals since (after) fork_h but not including this block additions_since_fork: Dict[bytes32, Tuple[Coin, uint32]] = {} removals_since_fork: Set[bytes32] = set() coinbases_since_fork: Dict[bytes32, uint32] = {} if height > 0: prev_block: Optional[FullBlock] = await block_store.get_full_block(block.prev_header_hash) reorg_blocks: Dict[int, FullBlock] = {} curr: Optional[FullBlock] = prev_block assert curr is not None reorg_blocks[curr.height] = curr while curr.height > fork_h: if curr.height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None reorg_blocks[curr.height] = curr curr = prev_block assert curr is not None while curr.height > fork_h: # Coin store doesn't contain coins from fork, we have to run generator for each block in fork if curr.transactions_generator is not None: curr_block_generator: Optional[BlockGenerator] = await get_block_generator(curr) assert curr_block_generator is not None npc_result = get_name_puzzle_conditions(curr_block_generator, False) removals_in_curr, additions_in_curr = tx_removals_and_additions(npc_result.npc_list) else: removals_in_curr = [] additions_in_curr = [] for c_name in removals_in_curr: removals_since_fork.add(c_name) for c in additions_in_curr: additions_since_fork[c.name()] = (c, curr.height) for coinbase_coin in curr.get_included_reward_coins(): additions_since_fork[coinbase_coin.name()] = (coinbase_coin, curr.height) coinbases_since_fork[coinbase_coin.name()] = curr.height if curr.height == 0: break curr = reorg_blocks[curr.height - 1] assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord( rem_coin, height, uint32(0), False, (rem in coinbases_since_fork), block.foliage_transaction_block.timestamp, ) removal_coin_records[new_unspent.name] = new_unspent else: unspent = await coin_store.get_coin_record(rem) if unspent is not None and unspent.confirmed_block_index <= coin_store_reorg_height: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= coin_store_reorg_height: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND, None removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # Check for spending a coin that does not exist in this fork # TODO: fix this, there is a consensus bug here return Err.UNKNOWN_UNSPENT, None new_coin, confirmed_height = additions_since_fork[rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, (rem in coinbases_since_fork), block.foliage_transaction_block.timestamp, ) removal_coin_records[new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork)> if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND, None removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount # 16. Check that the total coin amount for added is <= removed if removed < added: return Err.MINTING_COIN, None fees = removed - added assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee # 17. Check that the assert fee sum <= fees if fees < assert_fee_sum: return Err.RESERVE_FEE_CONDITION_FAILED, None # 18. Check that the assert fee amount < maximum coin amount if fees > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 19. Check that the computed fees are equal to the fees in the block header if block.transactions_info.fees != fees: return Err.INVALID_BLOCK_FEE_AMOUNT, None # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH, None # 21. Verify conditions # create hash_key list for aggsig check pairs_pks = [] pairs_msgs = [] for npc in npc_list: assert height is not None unspent = removal_coin_records[npc.coin_name] error = mempool_check_conditions_dict( unspent, coin_announcement_names, puzzle_announcement_names, npc.condition_dict, prev_transaction_block_height, block.foliage_transaction_block.timestamp, ) if error: return error, None for pk, m in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name, constants.AGG_SIG_ME_ADDITIONAL_DATA ): pairs_pks.append(pk) pairs_msgs.append(m) # 22. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.transactions_info.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE, None # noinspection PyTypeChecker if not AugSchemeMPL.aggregate_verify(pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature): return Err.BAD_AGGREGATE_SIGNATURE, None return None, npc_result
async def _reconsider_peak( self, block_record: BlockRecord, genesis: bool, fork_point_with_peak: Optional[uint32], npc_result: Optional[NPCResult], ) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord]]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. It returns the height of the fork between the previous chain and the new chain, or returns None if there was no update to the heaviest chain. """ peak = self.get_peak() if genesis: if peak is None: block: Optional[FullBlock] = await self.block_store.get_full_block(block_record.header_hash) assert block is not None if npc_result is not None: tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list) else: tx_removals, tx_additions = [], [] await self.coin_store.new_block(block, tx_additions, tx_removals) await self.block_store.set_peak(block.header_hash) return uint32(0), uint32(0), [block_record] return None, None, [] assert peak is not None if block_record.weight > peak.weight: # Find the fork. if the block is just being appended, it will return the peak # If no blocks in common, returns -1, and reverts all blocks if block_record.prev_hash == peak.header_hash: fork_height: int = peak.height elif fork_point_with_peak is not None: fork_height = fork_point_with_peak else: fork_height = find_fork_point_in_chain(self, block_record, peak) if block_record.prev_hash != peak.header_hash: await self.coin_store.rollback_to_block(fork_height) # Rollback sub_epoch_summaries heights_to_delete = [] for ses_included_height in self.__sub_epoch_summaries.keys(): if ses_included_height > fork_height: heights_to_delete.append(ses_included_height) for height in heights_to_delete: log.info(f"delete ses at height {height}") del self.__sub_epoch_summaries[height] # Collect all blocks from fork point to new peak blocks_to_add: List[Tuple[FullBlock, BlockRecord]] = [] curr = block_record.header_hash while fork_height < 0 or curr != self.height_to_hash(uint32(fork_height)): fetched_full_block: Optional[FullBlock] = await self.block_store.get_full_block(curr) fetched_block_record: Optional[BlockRecord] = await self.block_store.get_block_record(curr) assert fetched_full_block is not None assert fetched_block_record is not None blocks_to_add.append((fetched_full_block, fetched_block_record)) if fetched_full_block.height == 0: # Doing a full reorg, starting at height 0 break curr = fetched_block_record.prev_hash records_to_add = [] for fetched_full_block, fetched_block_record in reversed(blocks_to_add): records_to_add.append(fetched_block_record) if fetched_block_record.is_transaction_block: if fetched_block_record.header_hash == block_record.header_hash: tx_removals, tx_additions = await self.get_tx_removals_and_additions( fetched_full_block, npc_result ) else: tx_removals, tx_additions = await self.get_tx_removals_and_additions(fetched_full_block, None) await self.coin_store.new_block(fetched_full_block, tx_additions, tx_removals) # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) return uint32(max(fork_height, 0)), block_record.height, records_to_add # This is not a heavier block than the heaviest we have seen, so we don't change the coin set return None, None, []
async def validate_block_body( constants: ConsensusConstants, blocks: BlockchainInterface, block_store: BlockStore, coin_store: CoinStore, peak: Optional[BlockRecord], block: Union[FullBlock, UnfinishedBlock], height: uint32, npc_result: Optional[NPCResult], fork_point_with_peak: Optional[uint32], get_block_generator: Callable, validate_signature=True, ) -> Tuple[Optional[Err], Optional[NPCResult]]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. Returns None for the first value if everything validates correctly, or an Err if something does not validate. For the second value, returns a CostResult only if validation succeeded, and there are transactions. In other cases it returns None. The NPC result is the result of running the generator with the previous generators refs. It is only present for transaction blocks which have spent coins. """ if isinstance(block, FullBlock): assert height == block.height prev_transaction_block_height: uint32 = uint32(0) # 1. For non transaction-blocs: foliage block, transaction filter, transactions info, and generator must # be empty. If it is a block but not a transaction block, there is no body to validate. Check that all fields are # None if block.foliage.foliage_transaction_block_hash is None: if (block.foliage_transaction_block is not None or block.transactions_info is not None or block.transactions_generator is not None): return Err.NOT_BLOCK_BUT_HAS_DATA, None prev_tb: BlockRecord = blocks.block_record(block.prev_header_hash) while not prev_tb.is_transaction_block: prev_tb = blocks.block_record(prev_tb.prev_hash) assert prev_tb.timestamp is not None if len(block.transactions_generator_ref_list) > 0: return Err.NOT_BLOCK_BUT_HAS_DATA, None return None, None # This means the block is valid # All checks below this point correspond to transaction blocks # 2. For blocks, foliage block, transactions info must not be empty if block.foliage_transaction_block is None or block.transactions_info is None: return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None assert block.foliage_transaction_block is not None # keeps track of the reward coins that need to be incorporated expected_reward_coins: Set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_transaction_block.transactions_info_hash != std_hash( block.transactions_info): return Err.INVALID_TRANSACTIONS_INFO_HASH, None # 4. The foliage block hash in the foliage block must match the foliage block if block.foliage.foliage_transaction_block_hash != std_hash( block.foliage_transaction_block): return Err.INVALID_FOLIAGE_BLOCK_HASH, None # 5. The reward claims must be valid for the previous blocks, and current block fees # If height == 0, expected_reward_coins will be left empty if height > 0: # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter) prev_transaction_block = blocks.block_record( block.foliage_transaction_block.prev_transaction_block_hash) prev_transaction_block_height = prev_transaction_block.height assert prev_transaction_block.fees is not None pool_coin = create_pool_coin( prev_transaction_block_height, prev_transaction_block.pool_puzzle_hash, calculate_pool_reward(prev_transaction_block.height), constants.GENESIS_CHALLENGE, ) farmer_coin = create_farmer_coin( prev_transaction_block_height, prev_transaction_block.farmer_puzzle_hash, uint64( calculate_base_farmer_reward(prev_transaction_block.height) + prev_transaction_block.fees), constants.GENESIS_CHALLENGE, ) # Adds the previous block expected_reward_coins.add(pool_coin) expected_reward_coins.add(farmer_coin) # For the second block in the chain, don't go back further if prev_transaction_block.height > 0: curr_b = blocks.block_record(prev_transaction_block.prev_hash) while not curr_b.is_transaction_block: expected_reward_coins.add( create_pool_coin( curr_b.height, curr_b.pool_puzzle_hash, calculate_pool_reward(curr_b.height), constants.GENESIS_CHALLENGE, )) expected_reward_coins.add( create_farmer_coin( curr_b.height, curr_b.farmer_puzzle_hash, calculate_base_farmer_reward(curr_b.height), constants.GENESIS_CHALLENGE, )) curr_b = blocks.block_record(curr_b.prev_hash) if set(block.transactions_info.reward_claims_incorporated ) != expected_reward_coins: return Err.INVALID_REWARD_COINS, None if len(block.transactions_info.reward_claims_incorporated) != len( expected_reward_coins): return Err.INVALID_REWARD_COINS, None removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) # In header validation we check that timestamp is not more that 10 minutes into the future # 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp # (this test has been removed) # 7a. The generator root must be the hash of the serialized bytes of # the generator for this block (or zeroes if no generator) if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator) ) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None else: if block.transactions_info.generator_root != bytes([0] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None # 8a. The generator_ref_list must be the hash of the serialized bytes of # the generator ref list for this block (or 'one' bytes [0x01] if no generator) # 8b. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries # 8c. The generator ref list must not point to a height >= this block's height if block.transactions_generator_ref_list in (None, []): if block.transactions_info.generator_refs_root != bytes([1] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None else: # If we have a generator reference list, we must have a generator if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None # The generator_refs_root must be the hash of the concatenation of the List[uint32] generator_refs_hash = std_hash(b"".join( [bytes(i) for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None if len(block.transactions_generator_ref_list ) > constants.MAX_GENERATOR_REF_LIST_SIZE: return Err.TOO_MANY_GENERATOR_REFS, None if any([ index >= height for index in block.transactions_generator_ref_list ]): return Err.FUTURE_GENERATOR_REFS, None if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions created assert npc_result is not None cost = calculate_cost_of_program(block.transactions_generator, npc_result, constants.COST_PER_BYTE) npc_list = npc_result.npc_list # 7. Check that cost <= MAX_BLOCK_COST_CLVM log.debug( f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM} " f"percent full: {round(100 * (cost / constants.MAX_BLOCK_COST_CLVM), 2)}%" ) if cost > constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX, None # 8. The CLVM program must not return any errors if npc_result.error is not None: return Err(npc_result.error), None for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) else: assert npc_result is None # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST, None additions_dic: Dict[bytes32, Coin] = {} # 10. Check additions for max coin amount # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer # We will not even reach here because Coins do type checking (uint64) for coin in additions + coinbase_additions: additions_dic[coin.name()] = coin if coin.amount < 0: return Err.COIN_AMOUNT_NEGATIVE, None if coin.amount > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 11. Validate addition and removal roots root_error = validate_block_merkle_roots( block.foliage_transaction_block.additions_root, block.foliage_transaction_block.removals_root, additions + coinbase_additions, removals, ) if root_error: return root_error, None # 12. The additions and removals must result in the correct filter byte_array_tx: List[bytes32] = [] for coin in additions + coinbase_additions: # TODO: address hint error and remove ignore # error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32" # [arg-type] byte_array_tx.append(bytearray( coin.puzzle_hash)) # type: ignore[arg-type] for coin_name in removals: # TODO: address hint error and remove ignore # error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32" # [arg-type] byte_array_tx.append(bytearray(coin_name)) # type: ignore[arg-type] bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.foliage_transaction_block.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH, None # 13. Check for duplicate outputs in additions addition_counter = collections.Counter( _.name() for _ in additions + coinbase_additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT, None # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND, None # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) # The fork point is the last block in common between the peak chain and the chain of `block` if peak is None or height == 0: fork_h: int = -1 elif fork_point_with_peak is not None: fork_h = fork_point_with_peak else: fork_h = find_fork_point_in_chain( blocks, peak, blocks.block_record(block.prev_header_hash)) # Get additions and removals since (after) fork_h but not including this block # The values include: the coin that was added, the height of the block in which it was confirmed, and the # timestamp of the block in which it was confirmed additions_since_fork: Dict[bytes32, Tuple[Coin, uint32, uint64]] = { } # This includes coinbase additions removals_since_fork: Set[bytes32] = set() # For height 0, there are no additions and removals before this block, so we can skip if height > 0: # First, get all the blocks in the fork > fork_h, < block.height prev_block: Optional[FullBlock] = await block_store.get_full_block( block.prev_header_hash) reorg_blocks: Dict[uint32, FullBlock] = {} curr: Optional[FullBlock] = prev_block assert curr is not None while curr.height > fork_h: if curr.height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None reorg_blocks[curr.height] = curr if fork_h != -1: assert len(reorg_blocks) == height - fork_h - 1 curr = prev_block assert curr is not None while curr.height > fork_h: # Coin store doesn't contain coins from fork, we have to run generator for each block in fork if curr.transactions_generator is not None: # These blocks are in the past and therefore assumed to be valid, so get_block_generator won't raise curr_block_generator: Optional[ BlockGenerator] = await get_block_generator(curr) assert curr_block_generator is not None and curr.transactions_info is not None curr_npc_result = get_name_puzzle_conditions( curr_block_generator, min(constants.MAX_BLOCK_COST_CLVM, curr.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, mempool_mode=False, ) removals_in_curr, additions_in_curr = tx_removals_and_additions( curr_npc_result.npc_list) else: removals_in_curr = [] additions_in_curr = [] for c_name in removals_in_curr: assert c_name not in removals_since_fork removals_since_fork.add(c_name) for c in additions_in_curr: assert c.name() not in additions_since_fork assert curr.foliage_transaction_block is not None additions_since_fork[c.name()] = ( c, curr.height, curr.foliage_transaction_block.timestamp) for coinbase_coin in curr.get_included_reward_coins(): assert coinbase_coin.name() not in additions_since_fork assert curr.foliage_transaction_block is not None additions_since_fork[coinbase_coin.name()] = ( coinbase_coin, curr.height, curr.foliage_transaction_block.timestamp, ) if curr.height == 0: break curr = reorg_blocks[curr.height - 1] assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord( rem_coin, height, height, False, block.foliage_transaction_block.timestamp, ) removal_coin_records[new_unspent.name] = new_unspent else: unspent = await coin_store.get_coin_record(rem) if unspent is not None and unspent.confirmed_block_index <= fork_h: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= fork_h: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND, None removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # Check for spending a coin that does not exist in this fork log.error( f"Err.UNKNOWN_UNSPENT: COIN ID: {rem} NPC RESULT: {npc_result}" ) return Err.UNKNOWN_UNSPENT, None new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[ rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, confirmed_timestamp, ) removal_coin_records[new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork) if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND_IN_FORK, None removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount # 16. Check that the total coin amount for added is <= removed if removed < added: return Err.MINTING_COIN, None fees = removed - added assert fees >= 0 assert_fee_sum: uint128 = uint128(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) if fee < 0: return Err.RESERVE_FEE_CONDITION_FAILED, None assert_fee_sum = uint128(assert_fee_sum + fee) # 17. Check that the assert fee sum <= fees, and that each reserved fee is non-negative if fees < assert_fee_sum: return Err.RESERVE_FEE_CONDITION_FAILED, None # 18. Check that the fee amount + farmer reward < maximum coin amount if fees + calculate_base_farmer_reward(height) > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 19. Check that the computed fees are equal to the fees in the block header if block.transactions_info.fees != fees: return Err.INVALID_BLOCK_FEE_AMOUNT, None # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH, None # 21. Verify conditions for npc in npc_list: assert height is not None unspent = removal_coin_records[npc.coin_name] error = mempool_check_conditions_dict( unspent, npc.condition_dict, prev_transaction_block_height, block.foliage_transaction_block.timestamp, ) if error: return error, None # create hash_key list for aggsig check pairs_pks, pairs_msgs = pkm_pairs(npc_list, constants.AGG_SIG_ME_ADDITIONAL_DATA) # 22. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.transactions_info.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE, None # The pairing cache is not useful while syncing as each pairing is seen # only once, so the extra effort of populating it is not justified. # However, we force caching of pairings just for unfinished blocks # as the cache is likely to be useful when validating the corresponding # finished blocks later. if validate_signature: force_cache: bool = isinstance(block, UnfinishedBlock) if not cached_bls.aggregate_verify( pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature, force_cache): return Err.BAD_AGGREGATE_SIGNATURE, None return None, npc_result
async def get_block_generator( self, block: Union[FullBlock, UnfinishedBlock], additional_blocks=None) -> Optional[BlockGenerator]: if additional_blocks is None: additional_blocks = {} ref_list = block.transactions_generator_ref_list if block.transactions_generator is None: assert len(ref_list) == 0 return None if len(ref_list) == 0: return BlockGenerator(block.transactions_generator, [], []) result: List[SerializedProgram] = [] previous_block_hash = block.prev_header_hash if (self.try_block_record(previous_block_hash) and self.height_to_hash( self.block_record(previous_block_hash).height) == previous_block_hash): # We are not in a reorg, no need to look up alternate header hashes # (we can get them from height_to_hash) for ref_height in block.transactions_generator_ref_list: header_hash = self.height_to_hash(ref_height) # if ref_height is invalid, this block should have failed with # FUTURE_GENERATOR_REFS before getting here assert header_hash is not None ref_block = await self.block_store.get_full_block(header_hash) assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(ref_block.transactions_generator) else: # First tries to find the blocks in additional_blocks reorg_chain: Dict[uint32, FullBlock] = {} curr: Union[FullBlock, UnfinishedBlock] = block additional_height_dict = {} while curr.prev_header_hash in additional_blocks: prev: FullBlock = additional_blocks[curr.prev_header_hash] additional_height_dict[prev.height] = prev if isinstance(curr, FullBlock): assert curr.height == prev.height + 1 reorg_chain[prev.height] = prev curr = prev peak: Optional[BlockRecord] = self.get_peak() if self.contains_block(curr.prev_header_hash) and peak is not None: # Then we look up blocks up to fork point one at a time, backtracking previous_block_hash = curr.prev_header_hash prev_block_record = await self.block_store.get_block_record( previous_block_hash) prev_block = await self.block_store.get_full_block( previous_block_hash) assert prev_block is not None assert prev_block_record is not None fork = find_fork_point_in_chain(self, peak, prev_block_record) curr_2: Optional[FullBlock] = prev_block assert curr_2 is not None and isinstance(curr_2, FullBlock) reorg_chain[curr_2.height] = curr_2 while curr_2.height > fork and curr_2.height > 0: curr_2 = await self.block_store.get_full_block( curr_2.prev_header_hash) assert curr_2 is not None reorg_chain[curr_2.height] = curr_2 for ref_height in block.transactions_generator_ref_list: if ref_height in reorg_chain: ref_block = reorg_chain[ref_height] assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(ref_block.transactions_generator) else: if ref_height in additional_height_dict: ref_block = additional_height_dict[ref_height] else: header_hash = self.height_to_hash(ref_height) # TODO: address hint error and remove ignore # error: Argument 1 to "get_full_block" of "Blockchain" has incompatible type # "Optional[bytes32]"; expected "bytes32" [arg-type] ref_block = await self.get_full_block( header_hash) # type: ignore[arg-type] assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(ref_block.transactions_generator) assert len(result) == len(ref_list) return BlockGenerator(block.transactions_generator, result, [])
async def _reconsider_peak( self, block_record: BlockRecord, genesis: bool, fork_point_with_peak: Optional[uint32], npc_result: Optional[NPCResult], ) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord], Tuple[ List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], ]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. It returns the height of the fork between the previous chain and the new chain, or returns None if there was no update to the heaviest chain. """ peak = self.get_peak() lastest_coin_state: Dict[bytes32, CoinRecord] = {} hint_coin_state: Dict[bytes, Dict[bytes32, CoinRecord]] = {} if genesis: if peak is None: block: Optional[ FullBlock] = await self.block_store.get_full_block( block_record.header_hash) assert block is not None if npc_result is not None: tx_removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: tx_removals, tx_additions = [], [] if block.is_transaction_block(): assert block.foliage_transaction_block is not None added = await self.coin_store.new_block( block.height, block.foliage_transaction_block.timestamp, block.get_included_reward_coins(), tx_additions, tx_removals, ) else: added, _ = [], [] await self.block_store.set_in_chain([ (block_record.header_hash, ) ]) await self.block_store.set_peak(block_record.header_hash) return uint32(0), uint32(0), [block_record], (added, {}) return None, None, [], ([], {}) assert peak is not None if block_record.weight > peak.weight: # Find the fork. if the block is just being appended, it will return the peak # If no blocks in common, returns -1, and reverts all blocks if block_record.prev_hash == peak.header_hash: fork_height: int = peak.height elif fork_point_with_peak is not None: fork_height = fork_point_with_peak else: fork_height = find_fork_point_in_chain(self, block_record, peak) if block_record.prev_hash != peak.header_hash: roll_changes: List[ CoinRecord] = await self.coin_store.rollback_to_block( fork_height) for coin_record in roll_changes: lastest_coin_state[coin_record.name] = coin_record # Rollback sub_epoch_summaries self.__height_map.rollback(fork_height) await self.block_store.rollback(fork_height) # Collect all blocks from fork point to new peak blocks_to_add: List[Tuple[FullBlock, BlockRecord]] = [] curr = block_record.header_hash while fork_height < 0 or curr != self.height_to_hash( uint32(fork_height)): fetched_full_block: Optional[ FullBlock] = await self.block_store.get_full_block(curr) fetched_block_record: Optional[ BlockRecord] = await self.block_store.get_block_record(curr ) assert fetched_full_block is not None assert fetched_block_record is not None blocks_to_add.append( (fetched_full_block, fetched_block_record)) if fetched_full_block.height == 0: # Doing a full reorg, starting at height 0 break curr = fetched_block_record.prev_hash records_to_add = [] for fetched_full_block, fetched_block_record in reversed( blocks_to_add): records_to_add.append(fetched_block_record) if fetched_full_block.is_transaction_block(): if fetched_block_record.header_hash == block_record.header_hash: tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( fetched_full_block, npc_result) else: tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( fetched_full_block, None) assert fetched_full_block.foliage_transaction_block is not None added_rec = await self.coin_store.new_block( fetched_full_block.height, fetched_full_block.foliage_transaction_block.timestamp, fetched_full_block.get_included_reward_coins(), tx_additions, tx_removals, ) removed_rec: List[Optional[CoinRecord]] = [ await self.coin_store.get_coin_record(name) for name in tx_removals ] # Set additions first, then removals in order to handle ephemeral coin state # Add in height order is also required record: Optional[CoinRecord] for record in added_rec: assert record lastest_coin_state[record.name] = record for record in removed_rec: assert record lastest_coin_state[record.name] = record if npc_res is not None: hint_list: List[Tuple[ bytes32, bytes]] = self.get_hint_list(npc_res) await self.hint_store.add_hints(hint_list) # There can be multiple coins for the same hint for coin_id, hint in hint_list: key = hint if key not in hint_coin_state: hint_coin_state[key] = {} hint_coin_state[key][coin_id] = lastest_coin_state[ coin_id] await self.block_store.set_in_chain([(br.header_hash, ) for br in records_to_add]) # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) return ( uint32(max(fork_height, 0)), block_record.height, records_to_add, (list(lastest_coin_state.values()), hint_coin_state), ) # This is not a heavier block than the heaviest we have seen, so we don't change the coin set return None, None, [], ([], {})
async def _reconsider_peak( self, block_record: BlockRecord, genesis: bool, fork_point_with_peak: Optional[uint32], additional_coin_spends_from_wallet: Optional[List[CoinSpend]], heights_changed: Set[Tuple[uint32, Optional[bytes32]]], ) -> Tuple[Optional[uint32], List[BlockRecord]]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. It returns the height of the fork between the previous chain and the new chain, or returns None if there was no update to the heaviest chain. """ peak = self.get_peak() if genesis: if peak is None: block: Optional[HeaderBlockRecord] = await self.block_store.get_header_block_record( block_record.header_hash ) assert block is not None replaced = None if uint32(0) in self.__height_to_hash: replaced = (self.__height_to_hash[uint32(0)],) self.__height_to_hash[uint32(0)] = block.header_hash heights_changed.add((uint32(0), replaced)) assert len(block.additions) == 0 and len(block.removals) == 0 await self.new_transaction_block_callback(block.removals, block.additions, block_record, []) self._peak_height = uint32(0) return uint32(0), [block_record] return None, [] assert peak is not None if block_record.weight > peak.weight: # Find the fork. if the block is just being appended, it will return the peak # If no blocks in common, returns -1, and reverts all blocks if fork_point_with_peak is not None: fork_h: int = fork_point_with_peak else: fork_h = find_fork_point_in_chain(self, block_record, peak) # Rollback to fork self.log.debug(f"fork_h: {fork_h}, SB: {block_record.height}, peak: {peak.height}") if block_record.prev_hash != peak.header_hash: await self.reorg_rollback(fork_h) # Rollback sub_epoch_summaries heights_to_delete = [] for ses_included_height in self.__sub_epoch_summaries.keys(): if ses_included_height > fork_h: heights_to_delete.append(ses_included_height) for height in heights_to_delete: del self.__sub_epoch_summaries[height] # Collect all blocks from fork point to new peak blocks_to_add: List[Tuple[HeaderBlockRecord, BlockRecord, List[CoinSpend]]] = [] curr = block_record.header_hash while fork_h < 0 or curr != self.height_to_hash(uint32(fork_h)): fetched_header_block: Optional[HeaderBlockRecord] = await self.block_store.get_header_block_record(curr) fetched_block_record: Optional[BlockRecord] = await self.block_store.get_block_record(curr) if curr == block_record.header_hash: additional_coin_spends = additional_coin_spends_from_wallet else: additional_coin_spends = await self.block_store.get_additional_coin_spends(curr) if additional_coin_spends is None: additional_coin_spends = [] assert fetched_header_block is not None assert fetched_block_record is not None blocks_to_add.append((fetched_header_block, fetched_block_record, additional_coin_spends)) if fetched_header_block.height == 0: # Doing a full reorg, starting at height 0 break curr = fetched_block_record.prev_hash records_to_add: List[BlockRecord] = [] for fetched_header_block, fetched_block_record, additional_coin_spends in reversed(blocks_to_add): replaced = None if fetched_block_record.height in self.__height_to_hash: replaced = self.__height_to_hash[fetched_block_record.height] self.__height_to_hash[fetched_block_record.height] = fetched_block_record.header_hash heights_changed.add((fetched_block_record.height, replaced)) records_to_add.append(fetched_block_record) if fetched_block_record.is_transaction_block: await self.new_transaction_block_callback( fetched_header_block.removals, fetched_header_block.additions, fetched_block_record, additional_coin_spends, ) # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) self._peak_height = block_record.height if fork_h < 0: return None, records_to_add return uint32(fork_h), records_to_add # This is not a heavier block than the heaviest we have seen, so we don't change the coin set return None, []
async def _reconsider_peak( self, block_record: BlockRecord, genesis: bool, fork_point_with_peak: Optional[uint32]) -> Optional[uint32]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. It returns the height of the fork between the previous chain and the new chain, or returns None if there was no update to the heaviest chain. """ peak = self.get_peak() if genesis: if peak is None: block: Optional[ FullBlock] = await self.block_store.get_full_block( block_record.header_hash) assert block is not None # Begins a transaction, because we want to ensure that the coin store and block store are only updated # in sync. await self.block_store.begin_transaction() try: await self.coin_store.new_block(block) self.__height_to_hash[uint32(0)] = block.header_hash self._peak_height = uint32(0) await self.block_store.set_peak(block.header_hash) await self.block_store.commit_transaction() except Exception: await self.block_store.rollback_transaction() raise return uint32(0) return None assert peak is not None if block_record.weight > peak.weight: # Find the fork. if the block is just being appended, it will return the peak # If no blocks in common, returns -1, and reverts all blocks if fork_point_with_peak is not None: fork_height: int = fork_point_with_peak else: fork_height = find_fork_point_in_chain(self, block_record, peak) # Begins a transaction, because we want to ensure that the coin store and block store are only updated # in sync. await self.block_store.begin_transaction() try: # Rollback to fork await self.coin_store.rollback_to_block(fork_height) # Rollback sub_epoch_summaries heights_to_delete = [] for ses_included_height in self.__sub_epoch_summaries.keys(): if ses_included_height > fork_height: heights_to_delete.append(ses_included_height) for height in heights_to_delete: log.info(f"delete ses at height {height}") del self.__sub_epoch_summaries[height] if len(heights_to_delete) > 0: # remove segments from prev fork log.info(f"remove segments for se above {fork_height}") await self.block_store.delete_sub_epoch_challenge_segments( uint32(fork_height)) # Collect all blocks from fork point to new peak blocks_to_add: List[Tuple[FullBlock, BlockRecord]] = [] curr = block_record.header_hash while fork_height < 0 or curr != self.height_to_hash( uint32(fork_height)): fetched_full_block: Optional[ FullBlock] = await self.block_store.get_full_block(curr ) fetched_block_record: Optional[ BlockRecord] = await self.block_store.get_block_record( curr) assert fetched_full_block is not None assert fetched_block_record is not None blocks_to_add.append( (fetched_full_block, fetched_block_record)) if fetched_full_block.height == 0: # Doing a full reorg, starting at height 0 break curr = fetched_block_record.prev_hash for fetched_full_block, fetched_block_record in reversed( blocks_to_add): self.__height_to_hash[ fetched_block_record. height] = fetched_block_record.header_hash if fetched_block_record.is_transaction_block: await self.coin_store.new_block(fetched_full_block) if fetched_block_record.sub_epoch_summary_included is not None: self.__sub_epoch_summaries[ fetched_block_record. height] = fetched_block_record.sub_epoch_summary_included # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) self._peak_height = block_record.height await self.block_store.commit_transaction() except Exception: await self.block_store.rollback_transaction() raise return uint32(max(fork_height, 0)) # This is not a heavier block than the heaviest we have seen, so we don't change the coin set return None