def test_cached_bls(self): n_keys = 10 seed = b"a" * 31 sks = [AugSchemeMPL.key_gen(seed + bytes([i])) for i in range(n_keys)] pks = [bytes(sk.get_g1()) for sk in sks] msgs = [("msg-%d" % (i,)).encode() for i in range(n_keys)] sigs = [AugSchemeMPL.sign(sk, msg) for sk, msg in zip(sks, msgs)] agg_sig = AugSchemeMPL.aggregate(sigs) pks_half = pks[: n_keys // 2] msgs_half = msgs[: n_keys // 2] sigs_half = sigs[: n_keys // 2] agg_sig_half = AugSchemeMPL.aggregate(sigs_half) assert AugSchemeMPL.aggregate_verify([G1Element.from_bytes(pk) for pk in pks], msgs, agg_sig) # Verify with empty cache and populate it assert cached_bls.aggregate_verify(pks_half, msgs_half, agg_sig_half, True) # Verify with partial cache hit assert cached_bls.aggregate_verify(pks, msgs, agg_sig, True) # Verify with full cache hit assert cached_bls.aggregate_verify(pks, msgs, agg_sig) # Use a small cache which can not accommodate all pairings local_cache = LRUCache(n_keys // 2) # Verify signatures and cache pairings one at a time for pk, msg, sig in zip(pks_half, msgs_half, sigs_half): assert cached_bls.aggregate_verify([pk], [msg], sig, True, local_cache) # Verify the same messages with aggregated signature (full cache hit) assert cached_bls.aggregate_verify(pks_half, msgs_half, agg_sig_half, False, local_cache) # Verify more messages (partial cache hit) assert cached_bls.aggregate_verify(pks, msgs, agg_sig, False, local_cache)
def validate_clvm_and_signature( spend_bundle_bytes: bytes, max_cost: int, cost_per_byte: int, additional_data: bytes ) -> Tuple[Optional[Err], bytes, Dict[bytes, bytes]]: """ Validates CLVM and aggregate signature for a spendbundle. This is meant to be called under a ProcessPoolExecutor in order to validate the heavy parts of a transction in a different thread. Returns an optional error, the NPCResult and a cache of the new pairings validated (if not error) """ try: bundle: SpendBundle = SpendBundle.from_bytes(spend_bundle_bytes) program = simple_solution_generator(bundle) # npc contains names of the coins removed, puzzle_hashes and their spend conditions result: NPCResult = get_name_puzzle_conditions( program, max_cost, cost_per_byte=cost_per_byte, mempool_mode=True) if result.error is not None: return Err(result.error), b"", {} pks: List[G1Element] = [] msgs: List[bytes32] = [] # TODO: address hint error and remove ignore # error: Incompatible types in assignment (expression has type "List[bytes]", variable has type # "List[bytes32]") [assignment] pks, msgs = pkm_pairs(result.npc_list, additional_data) # type: ignore[assignment] # Verify aggregated signature cache: LRUCache = LRUCache(10000) if not cached_bls.aggregate_verify( pks, msgs, bundle.aggregated_signature, True, cache): return Err.BAD_AGGREGATE_SIGNATURE, b"", {} new_cache_entries: Dict[bytes, bytes] = {} for k, v in cache.cache.items(): new_cache_entries[k] = bytes(v) except ValidationError as e: return e.code, b"", {} except Exception: return Err.UNKNOWN, b"", {} return None, bytes(result), new_cache_entries
async def validate_block_body( constants: ConsensusConstants, blocks: BlockchainInterface, block_store: BlockStore, coin_store: CoinStore, peak: Optional[BlockRecord], block: Union[FullBlock, UnfinishedBlock], height: uint32, npc_result: Optional[NPCResult], fork_point_with_peak: Optional[uint32], get_block_generator: Callable, validate_signature=True, ) -> Tuple[Optional[Err], Optional[NPCResult]]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. Returns None for the first value if everything validates correctly, or an Err if something does not validate. For the second value, returns a CostResult only if validation succeeded, and there are transactions. In other cases it returns None. The NPC result is the result of running the generator with the previous generators refs. It is only present for transaction blocks which have spent coins. """ if isinstance(block, FullBlock): assert height == block.height prev_transaction_block_height: uint32 = uint32(0) # 1. For non transaction-blocs: foliage block, transaction filter, transactions info, and generator must # be empty. If it is a block but not a transaction block, there is no body to validate. Check that all fields are # None if block.foliage.foliage_transaction_block_hash is None: if (block.foliage_transaction_block is not None or block.transactions_info is not None or block.transactions_generator is not None): return Err.NOT_BLOCK_BUT_HAS_DATA, None prev_tb: BlockRecord = blocks.block_record(block.prev_header_hash) while not prev_tb.is_transaction_block: prev_tb = blocks.block_record(prev_tb.prev_hash) assert prev_tb.timestamp is not None if len(block.transactions_generator_ref_list) > 0: return Err.NOT_BLOCK_BUT_HAS_DATA, None return None, None # This means the block is valid # All checks below this point correspond to transaction blocks # 2. For blocks, foliage block, transactions info must not be empty if block.foliage_transaction_block is None or block.transactions_info is None: return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None assert block.foliage_transaction_block is not None # keeps track of the reward coins that need to be incorporated expected_reward_coins: Set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_transaction_block.transactions_info_hash != std_hash( block.transactions_info): return Err.INVALID_TRANSACTIONS_INFO_HASH, None # 4. The foliage block hash in the foliage block must match the foliage block if block.foliage.foliage_transaction_block_hash != std_hash( block.foliage_transaction_block): return Err.INVALID_FOLIAGE_BLOCK_HASH, None # 5. The reward claims must be valid for the previous blocks, and current block fees # If height == 0, expected_reward_coins will be left empty if height > 0: # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter) prev_transaction_block = blocks.block_record( block.foliage_transaction_block.prev_transaction_block_hash) prev_transaction_block_height = prev_transaction_block.height assert prev_transaction_block.fees is not None pool_coin = create_pool_coin( prev_transaction_block_height, prev_transaction_block.pool_puzzle_hash, calculate_pool_reward(prev_transaction_block.height), constants.GENESIS_CHALLENGE, ) farmer_coin = create_farmer_coin( prev_transaction_block_height, prev_transaction_block.farmer_puzzle_hash, uint64( calculate_base_farmer_reward(prev_transaction_block.height) + prev_transaction_block.fees), constants.GENESIS_CHALLENGE, ) # Adds the previous block expected_reward_coins.add(pool_coin) expected_reward_coins.add(farmer_coin) # For the second block in the chain, don't go back further if prev_transaction_block.height > 0: curr_b = blocks.block_record(prev_transaction_block.prev_hash) while not curr_b.is_transaction_block: expected_reward_coins.add( create_pool_coin( curr_b.height, curr_b.pool_puzzle_hash, calculate_pool_reward(curr_b.height), constants.GENESIS_CHALLENGE, )) expected_reward_coins.add( create_farmer_coin( curr_b.height, curr_b.farmer_puzzle_hash, calculate_base_farmer_reward(curr_b.height), constants.GENESIS_CHALLENGE, )) curr_b = blocks.block_record(curr_b.prev_hash) if set(block.transactions_info.reward_claims_incorporated ) != expected_reward_coins: return Err.INVALID_REWARD_COINS, None if len(block.transactions_info.reward_claims_incorporated) != len( expected_reward_coins): return Err.INVALID_REWARD_COINS, None removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) # In header validation we check that timestamp is not more that 10 minutes into the future # 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp # (this test has been removed) # 7a. The generator root must be the hash of the serialized bytes of # the generator for this block (or zeroes if no generator) if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator) ) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None else: if block.transactions_info.generator_root != bytes([0] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None # 8a. The generator_ref_list must be the hash of the serialized bytes of # the generator ref list for this block (or 'one' bytes [0x01] if no generator) # 8b. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries # 8c. The generator ref list must not point to a height >= this block's height if block.transactions_generator_ref_list in (None, []): if block.transactions_info.generator_refs_root != bytes([1] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None else: # If we have a generator reference list, we must have a generator if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None # The generator_refs_root must be the hash of the concatenation of the List[uint32] generator_refs_hash = std_hash(b"".join( [bytes(i) for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None if len(block.transactions_generator_ref_list ) > constants.MAX_GENERATOR_REF_LIST_SIZE: return Err.TOO_MANY_GENERATOR_REFS, None if any([ index >= height for index in block.transactions_generator_ref_list ]): return Err.FUTURE_GENERATOR_REFS, None if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions created assert npc_result is not None cost = calculate_cost_of_program(block.transactions_generator, npc_result, constants.COST_PER_BYTE) npc_list = npc_result.npc_list # 7. Check that cost <= MAX_BLOCK_COST_CLVM log.debug( f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM} " f"percent full: {round(100 * (cost / constants.MAX_BLOCK_COST_CLVM), 2)}%" ) if cost > constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX, None # 8. The CLVM program must not return any errors if npc_result.error is not None: return Err(npc_result.error), None for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) else: assert npc_result is None # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST, None additions_dic: Dict[bytes32, Coin] = {} # 10. Check additions for max coin amount # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer # We will not even reach here because Coins do type checking (uint64) for coin in additions + coinbase_additions: additions_dic[coin.name()] = coin if coin.amount < 0: return Err.COIN_AMOUNT_NEGATIVE, None if coin.amount > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 11. Validate addition and removal roots root_error = validate_block_merkle_roots( block.foliage_transaction_block.additions_root, block.foliage_transaction_block.removals_root, additions + coinbase_additions, removals, ) if root_error: return root_error, None # 12. The additions and removals must result in the correct filter byte_array_tx: List[bytes32] = [] for coin in additions + coinbase_additions: # TODO: address hint error and remove ignore # error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32" # [arg-type] byte_array_tx.append(bytearray( coin.puzzle_hash)) # type: ignore[arg-type] for coin_name in removals: # TODO: address hint error and remove ignore # error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32" # [arg-type] byte_array_tx.append(bytearray(coin_name)) # type: ignore[arg-type] bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.foliage_transaction_block.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH, None # 13. Check for duplicate outputs in additions addition_counter = collections.Counter( _.name() for _ in additions + coinbase_additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT, None # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND, None # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) # The fork point is the last block in common between the peak chain and the chain of `block` if peak is None or height == 0: fork_h: int = -1 elif fork_point_with_peak is not None: fork_h = fork_point_with_peak else: fork_h = find_fork_point_in_chain( blocks, peak, blocks.block_record(block.prev_header_hash)) # Get additions and removals since (after) fork_h but not including this block # The values include: the coin that was added, the height of the block in which it was confirmed, and the # timestamp of the block in which it was confirmed additions_since_fork: Dict[bytes32, Tuple[Coin, uint32, uint64]] = { } # This includes coinbase additions removals_since_fork: Set[bytes32] = set() # For height 0, there are no additions and removals before this block, so we can skip if height > 0: # First, get all the blocks in the fork > fork_h, < block.height prev_block: Optional[FullBlock] = await block_store.get_full_block( block.prev_header_hash) reorg_blocks: Dict[uint32, FullBlock] = {} curr: Optional[FullBlock] = prev_block assert curr is not None while curr.height > fork_h: if curr.height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None reorg_blocks[curr.height] = curr if fork_h != -1: assert len(reorg_blocks) == height - fork_h - 1 curr = prev_block assert curr is not None while curr.height > fork_h: # Coin store doesn't contain coins from fork, we have to run generator for each block in fork if curr.transactions_generator is not None: # These blocks are in the past and therefore assumed to be valid, so get_block_generator won't raise curr_block_generator: Optional[ BlockGenerator] = await get_block_generator(curr) assert curr_block_generator is not None and curr.transactions_info is not None curr_npc_result = get_name_puzzle_conditions( curr_block_generator, min(constants.MAX_BLOCK_COST_CLVM, curr.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, mempool_mode=False, ) removals_in_curr, additions_in_curr = tx_removals_and_additions( curr_npc_result.npc_list) else: removals_in_curr = [] additions_in_curr = [] for c_name in removals_in_curr: assert c_name not in removals_since_fork removals_since_fork.add(c_name) for c in additions_in_curr: assert c.name() not in additions_since_fork assert curr.foliage_transaction_block is not None additions_since_fork[c.name()] = ( c, curr.height, curr.foliage_transaction_block.timestamp) for coinbase_coin in curr.get_included_reward_coins(): assert coinbase_coin.name() not in additions_since_fork assert curr.foliage_transaction_block is not None additions_since_fork[coinbase_coin.name()] = ( coinbase_coin, curr.height, curr.foliage_transaction_block.timestamp, ) if curr.height == 0: break curr = reorg_blocks[curr.height - 1] assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord( rem_coin, height, height, False, block.foliage_transaction_block.timestamp, ) removal_coin_records[new_unspent.name] = new_unspent else: unspent = await coin_store.get_coin_record(rem) if unspent is not None and unspent.confirmed_block_index <= fork_h: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= fork_h: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND, None removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # Check for spending a coin that does not exist in this fork log.error( f"Err.UNKNOWN_UNSPENT: COIN ID: {rem} NPC RESULT: {npc_result}" ) return Err.UNKNOWN_UNSPENT, None new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[ rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, confirmed_timestamp, ) removal_coin_records[new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork) if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND_IN_FORK, None removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount # 16. Check that the total coin amount for added is <= removed if removed < added: return Err.MINTING_COIN, None fees = removed - added assert fees >= 0 assert_fee_sum: uint128 = uint128(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) if fee < 0: return Err.RESERVE_FEE_CONDITION_FAILED, None assert_fee_sum = uint128(assert_fee_sum + fee) # 17. Check that the assert fee sum <= fees, and that each reserved fee is non-negative if fees < assert_fee_sum: return Err.RESERVE_FEE_CONDITION_FAILED, None # 18. Check that the fee amount + farmer reward < maximum coin amount if fees + calculate_base_farmer_reward(height) > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 19. Check that the computed fees are equal to the fees in the block header if block.transactions_info.fees != fees: return Err.INVALID_BLOCK_FEE_AMOUNT, None # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH, None # 21. Verify conditions for npc in npc_list: assert height is not None unspent = removal_coin_records[npc.coin_name] error = mempool_check_conditions_dict( unspent, npc.condition_dict, prev_transaction_block_height, block.foliage_transaction_block.timestamp, ) if error: return error, None # create hash_key list for aggsig check pairs_pks, pairs_msgs = pkm_pairs(npc_list, constants.AGG_SIG_ME_ADDITIONAL_DATA) # 22. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.transactions_info.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE, None # The pairing cache is not useful while syncing as each pairing is seen # only once, so the extra effort of populating it is not justified. # However, we force caching of pairings just for unfinished blocks # as the cache is likely to be useful when validating the corresponding # finished blocks later. if validate_signature: force_cache: bool = isinstance(block, UnfinishedBlock) if not cached_bls.aggregate_verify( pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature, force_cache): return Err.BAD_AGGREGATE_SIGNATURE, None return None, npc_result
async def add_spendbundle( self, new_spend: SpendBundle, npc_result: NPCResult, spend_name: bytes32, validate_signature=True, program: Optional[SerializedProgram] = None, ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: """ Tries to add spend bundle to the mempool Returns the cost (if SUCCESS), the result (MempoolInclusion status), and an optional error """ start_time = time.time() if self.peak is None: return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED npc_list = npc_result.npc_list if program is None: program = simple_solution_generator(new_spend).program cost = calculate_cost_of_program(program, npc_result, self.constants.COST_PER_BYTE) log.debug(f"Cost: {cost}") if cost > int(self.limit_factor * self.constants.MAX_BLOCK_COST_CLVM): # we shouldn't ever end up here, since the cost is limited when we # execute the CLVM program. return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX if npc_result.error is not None: return None, MempoolInclusionStatus.FAILED, Err(npc_result.error) # build removal list removal_names: List[bytes32] = [npc.coin_name for npc in npc_list] additions = additions_for_npc(npc_list) additions_dict: Dict[bytes32, Coin] = {} for add in additions: additions_dict[add.name()] = add addition_amount = uint64(0) # Check additions for max coin amount for coin in additions: if coin.amount < 0: return ( None, MempoolInclusionStatus.FAILED, Err.COIN_AMOUNT_NEGATIVE, ) if coin.amount > self.constants.MAX_COIN_AMOUNT: return ( None, MempoolInclusionStatus.FAILED, Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, ) addition_amount = uint64(addition_amount + coin.amount) # Check for duplicate outputs addition_counter = collections.Counter(_.name() for _ in additions) for k, v in addition_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT # Check for duplicate inputs removal_counter = collections.Counter(name for name in removal_names) for k, v in removal_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND # Skip if already added if spend_name in self.mempool.spends: return uint64(cost), MempoolInclusionStatus.SUCCESS, None removal_record_dict: Dict[bytes32, CoinRecord] = {} removal_coin_dict: Dict[bytes32, Coin] = {} removal_amount = uint64(0) for name in removal_names: removal_record = await self.coin_store.get_coin_record(name) if removal_record is None and name not in additions_dict: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT elif name in additions_dict: removal_coin = additions_dict[name] # TODO(straya): what timestamp to use here? assert self.peak.timestamp is not None removal_record = CoinRecord( removal_coin, uint32( self.peak.height + 1), # In mempool, so will be included in next height uint32(0), False, False, uint64(self.peak.timestamp + 1), ) assert removal_record is not None removal_amount = uint64(removal_amount + removal_record.coin.amount) removal_record_dict[name] = removal_record removal_coin_dict[name] = removal_record.coin removals: List[Coin] = [coin for coin in removal_coin_dict.values()] if addition_amount > removal_amount: print(addition_amount, removal_amount) return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN fees = uint64(removal_amount - addition_amount) assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) if fee < 0: return None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED assert_fee_sum = assert_fee_sum + fee if fees < assert_fee_sum: return ( None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED, ) if cost == 0: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN fees_per_cost: float = fees / cost # If pool is at capacity check the fee, if not then accept even without the fee if self.mempool.at_full_capacity(cost): if fees_per_cost < self.nonzero_fee_minimum_fpc: return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO if fees_per_cost <= self.mempool.get_min_fee_rate(cost): return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle # Use this information later when constructing a block fail_reason, conflicts = await self.check_removals(removal_record_dict) # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others tmp_error: Optional[Err] = None conflicting_pool_items: Dict[bytes32, MempoolItem] = {} if fail_reason is Err.MEMPOOL_CONFLICT: for conflicting in conflicts: sb: MempoolItem = self.mempool.removals[conflicting.name()] conflicting_pool_items[sb.name] = sb if not self.can_replace(conflicting_pool_items, removal_record_dict, fees, fees_per_cost): potential = MempoolItem(new_spend, uint64(fees), npc_result, cost, spend_name, additions, removals, program) self.potential_cache.add(potential) return ( uint64(cost), MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT, ) elif fail_reason: return None, MempoolInclusionStatus.FAILED, fail_reason if tmp_error: return None, MempoolInclusionStatus.FAILED, tmp_error # Verify conditions, create hash_key list for aggsig check pks: List[G1Element] = [] msgs: List[bytes32] = [] error: Optional[Err] = None coin_announcements_in_spend: Set[ bytes32] = coin_announcements_names_for_npc(npc_list) puzzle_announcements_in_spend: Set[ bytes32] = puzzle_announcements_names_for_npc(npc_list) for npc in npc_list: coin_record: CoinRecord = removal_record_dict[npc.coin_name] # Check that the revealed removal puzzles actually match the puzzle hash if npc.puzzle_hash != coin_record.coin.puzzle_hash: log.warning( "Mempool rejecting transaction because of wrong puzzle_hash" ) log.warning( f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}") return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH chialisp_height = (self.peak.prev_transaction_block_height if not self.peak.is_transaction_block else self.peak.height) assert self.peak.timestamp is not None error = mempool_check_conditions_dict( coin_record, coin_announcements_in_spend, puzzle_announcements_in_spend, npc.condition_dict, uint32(chialisp_height), self.peak.timestamp, ) if error: if error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or error is Err.ASSERT_HEIGHT_RELATIVE_FAILED: potential = MempoolItem(new_spend, uint64(fees), npc_result, cost, spend_name, additions, removals, program) self.potential_cache.add(potential) return uint64(cost), MempoolInclusionStatus.PENDING, error break if validate_signature: for pk, message in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name, self.constants.AGG_SIG_ME_ADDITIONAL_DATA): pks.append(pk) msgs.append(message) if error: return None, MempoolInclusionStatus.FAILED, error if validate_signature: # Verify aggregated signature if not cached_bls.aggregate_verify( pks, msgs, new_spend.aggregated_signature, True): log.warning( f"Aggsig validation error {pks} {msgs} {new_spend}") return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE # Remove all conflicting Coins and SpendBundles if fail_reason: mempool_item: MempoolItem for mempool_item in conflicting_pool_items.values(): self.mempool.remove_from_pool(mempool_item) new_item = MempoolItem(new_spend, uint64(fees), npc_result, cost, spend_name, additions, removals, program) self.mempool.add_to_pool(new_item) log.info( f"add_spendbundle took {time.time() - start_time} seconds, cost {cost} " f"({round(100.0 * cost/self.constants.MAX_BLOCK_COST_CLVM, 3)}%)") return uint64(cost), MempoolInclusionStatus.SUCCESS, None