async def sign_coin_solutions( coin_solutions: List[CoinSolution], secret_key_for_public_key_f: Callable[[bytes], Optional[PrivateKey]], ) -> SpendBundle: signatures = [] pk_list = [] msg_list = [] for coin_solution in coin_solutions: # Get AGG_SIG conditions err, conditions_dict, cost = conditions_dict_for_solution( coin_solution.puzzle_reveal, coin_solution.solution) if err or conditions_dict is None: error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}" raise ValueError(error_msg) # Create signature for _, msg in pkm_pairs_for_conditions_dict( conditions_dict, bytes(coin_solution.coin.name())): pk_list.append(_) msg_list.append(msg) secret_key = secret_key_for_public_key_f(_) if secret_key is None: e_msg = f"no secret key for {_}" raise ValueError(e_msg) assert bytes(secret_key.get_g1()) == bytes(_) signature = AugSchemeMPL.sign(secret_key, msg) assert AugSchemeMPL.verify(_, msg, signature) signatures.append(signature) # Aggregate signatures aggsig = AugSchemeMPL.aggregate(signatures) assert AugSchemeMPL.aggregate_verify(pk_list, msg_list, aggsig) return SpendBundle(coin_solutions, aggsig)
def test_cached_bls(self): n_keys = 10 seed = b"a" * 31 sks = [AugSchemeMPL.key_gen(seed + bytes([i])) for i in range(n_keys)] pks = [bytes(sk.get_g1()) for sk in sks] msgs = [("msg-%d" % (i,)).encode() for i in range(n_keys)] sigs = [AugSchemeMPL.sign(sk, msg) for sk, msg in zip(sks, msgs)] agg_sig = AugSchemeMPL.aggregate(sigs) pks_half = pks[: n_keys // 2] msgs_half = msgs[: n_keys // 2] sigs_half = sigs[: n_keys // 2] agg_sig_half = AugSchemeMPL.aggregate(sigs_half) assert AugSchemeMPL.aggregate_verify([G1Element.from_bytes(pk) for pk in pks], msgs, agg_sig) # Verify with empty cache and populate it assert cached_bls.aggregate_verify(pks_half, msgs_half, agg_sig_half, True) # Verify with partial cache hit assert cached_bls.aggregate_verify(pks, msgs, agg_sig, True) # Verify with full cache hit assert cached_bls.aggregate_verify(pks, msgs, agg_sig) # Use a small cache which can not accommodate all pairings local_cache = LRUCache(n_keys // 2) # Verify signatures and cache pairings one at a time for pk, msg, sig in zip(pks_half, msgs_half, sigs_half): assert cached_bls.aggregate_verify([pk], [msg], sig, True, local_cache) # Verify the same messages with aggregated signature (full cache hit) assert cached_bls.aggregate_verify(pks_half, msgs_half, agg_sig_half, False, local_cache) # Verify more messages (partial cache hit) assert cached_bls.aggregate_verify(pks, msgs, agg_sig, False, local_cache)
async def sign_coin_spends( coin_spends: List[CoinSpend], secret_key_for_public_key_f: Any, # Potentially awaitable function from G1Element => Optional[PrivateKey] additional_data: bytes, max_cost: int, ) -> SpendBundle: """ Sign_coin_spends runs the puzzle code with the given argument and searches the result for an AGG_SIG_ME condition, which it attempts to sign by requesting a matching PrivateKey corresponding with the given G1Element (public key) specified in the resulting condition output. It's important to note that as mentioned in the documentation about the standard spend that the public key presented to the secret_key_for_public_key_f function provided to sign_coin_spends must be prepared to do the key derivations required by the coin types it's allowed to spend (at least the derivation of the standard spend as done by calculate_synthetic_secret_key with DEFAULT_PUZZLE_HASH). If a coin performed a different key derivation, the pk presented to this function would be similarly alien, and would need to be tried against the first stage derived keys (those returned by master_sk_to_wallet_sk from the ['sk'] member of wallet rpc's get_private_key method). """ signatures: List[blspy.G2Element] = [] pk_list: List[blspy.G1Element] = [] msg_list: List[bytes] = [] for coin_spend in coin_spends: # Get AGG_SIG conditions err, conditions_dict, cost = conditions_dict_for_solution( coin_spend.puzzle_reveal, coin_spend.solution, max_cost) if err or conditions_dict is None: error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}" raise ValueError(error_msg) # Create signature for pk_bytes, msg in pkm_pairs_for_conditions_dict( conditions_dict, coin_spend.coin.name(), additional_data): pk = blspy.G1Element.from_bytes(pk_bytes) pk_list.append(pk) msg_list.append(msg) if inspect.iscoroutinefunction(secret_key_for_public_key_f): secret_key = await secret_key_for_public_key_f(pk) else: secret_key = secret_key_for_public_key_f(pk) if secret_key is None: e_msg = f"no secret key for {pk}" raise ValueError(e_msg) assert bytes(secret_key.get_g1()) == bytes(pk) signature = AugSchemeMPL.sign(secret_key, msg) assert AugSchemeMPL.verify(pk, msg, signature) signatures.append(signature) # Aggregate signatures aggsig = AugSchemeMPL.aggregate(signatures) assert AugSchemeMPL.aggregate_verify(pk_list, msg_list, aggsig) return SpendBundle(coin_spends, aggsig)
def aggregate_verify(pks: List[G1Element], msgs: List[bytes], sig: G2Element, force_cache: bool = False, cache: LRUCache = LOCAL_CACHE): pairings: List[GTElement] = get_pairings(cache, pks, msgs, force_cache) if len(pairings) == 0: return AugSchemeMPL.aggregate_verify(pks, msgs, sig) pairings_prod: GTElement = functools.reduce(GTElement.__mul__, pairings) return pairings_prod == sig.pair(G1Element.generator())
def batch_verification(): numIters = 100000 sig_bytes = [] pk_bytes = [] ms = [] for i in range(numIters): message = b"%d" % i sk: PrivateKey = AugSchemeMPL.key_gen(secrets.token_bytes(32)) pk: G1Element = sk.get_g1() sig: G2Element = AugSchemeMPL.sign(sk, message) sig_bytes.append(bytes(sig)) pk_bytes.append(bytes(pk)) ms.append(message) pks = [] start = startStopwatch(); for pk in pk_bytes: pks.append(G1Element.from_bytes(pk)) endStopwatch("Public key validation", start, numIters); sigs = [] start = startStopwatch() for sig in sig_bytes: sigs.append(G2Element.from_bytes(sig)) endStopwatch("Signature validation", start, numIters); start = startStopwatch() aggSig = AugSchemeMPL.aggregate(sigs) endStopwatch("Aggregation", start, numIters); start = startStopwatch() ok = AugSchemeMPL.aggregate_verify(pks, ms, aggSig); endStopwatch("Batch verification", start, numIters); if not ok: print("aggregate_verification failed!") sys.exit(1)
async def sign_coin_solutions( coin_solutions: List[CoinSolution], secret_key_for_public_key_f: Any, # Potentially awaitable function from G1Element => Optional[PrivateKey] additional_data: bytes, max_cost: int, ) -> SpendBundle: signatures: List[blspy.G2Element] = [] pk_list: List[blspy.G1Element] = [] msg_list: List[bytes] = [] for coin_solution in coin_solutions: # Get AGG_SIG conditions err, conditions_dict, cost = conditions_dict_for_solution( coin_solution.puzzle_reveal, coin_solution.solution, max_cost) if err or conditions_dict is None: error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}" raise ValueError(error_msg) # Create signature for pk, msg in pkm_pairs_for_conditions_dict( conditions_dict, bytes(coin_solution.coin.name()), additional_data): pk_list.append(pk) msg_list.append(msg) if inspect.iscoroutinefunction(secret_key_for_public_key_f): secret_key = await secret_key_for_public_key_f(pk) else: secret_key = secret_key_for_public_key_f(pk) if secret_key is None: e_msg = f"no secret key for {pk}" raise ValueError(e_msg) assert bytes(secret_key.get_g1()) == bytes(pk) signature = AugSchemeMPL.sign(secret_key, msg) assert AugSchemeMPL.verify(pk, msg, signature) signatures.append(signature) # Aggregate signatures aggsig = AugSchemeMPL.aggregate(signatures) assert AugSchemeMPL.aggregate_verify(pk_list, msg_list, aggsig) return SpendBundle(coin_solutions, aggsig)
async def validate_block_body( constants: ConsensusConstants, blocks: BlockchainInterface, block_store: BlockStore, coin_store: CoinStore, peak: Optional[BlockRecord], block: Union[FullBlock, UnfinishedBlock], height: uint32, npc_result: Optional[NPCResult], fork_point_with_peak: Optional[uint32], get_block_generator: Callable, ) -> Tuple[Optional[Err], Optional[NPCResult]]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. Returns None for the first value if everything validates correctly, or an Err if something does not validate. For the second value, returns a CostResult if validation succeeded, and there are transactions """ if isinstance(block, FullBlock): assert height == block.height prev_transaction_block_height: uint32 = uint32(0) # 1. For non block blocks, foliage block, transaction filter, transactions info, and generator must be empty # If it is a block but not a transaction block, there is no body to validate. Check that all fields are None if block.foliage.foliage_transaction_block_hash is None: if ( block.foliage_transaction_block is not None or block.transactions_info is not None or block.transactions_generator is not None ): return Err.NOT_BLOCK_BUT_HAS_DATA, None return None, None # This means the block is valid # 2. For blocks, foliage block, transaction filter, transactions info must not be empty if ( block.foliage_transaction_block is None or block.foliage_transaction_block.filter_hash is None or block.transactions_info is None ): return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None # keeps track of the reward coins that need to be incorporated expected_reward_coins: Set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_transaction_block.transactions_info_hash != std_hash(block.transactions_info): return Err.INVALID_TRANSACTIONS_INFO_HASH, None # 4. The foliage block hash in the foliage block must match the foliage block if block.foliage.foliage_transaction_block_hash != std_hash(block.foliage_transaction_block): return Err.INVALID_FOLIAGE_BLOCK_HASH, None # 7. The reward claims must be valid for the previous blocks, and current block fees if height > 0: # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter) prev_transaction_block = blocks.block_record(block.foliage_transaction_block.prev_transaction_block_hash) prev_transaction_block_height = prev_transaction_block.height assert prev_transaction_block.fees is not None pool_coin = create_pool_coin( prev_transaction_block_height, prev_transaction_block.pool_puzzle_hash, calculate_pool_reward(prev_transaction_block.height), constants.GENESIS_CHALLENGE, ) farmer_coin = create_farmer_coin( prev_transaction_block_height, prev_transaction_block.farmer_puzzle_hash, uint64(calculate_base_farmer_reward(prev_transaction_block.height) + prev_transaction_block.fees), constants.GENESIS_CHALLENGE, ) # Adds the previous block expected_reward_coins.add(pool_coin) expected_reward_coins.add(farmer_coin) # For the second block in the chain, don't go back further if prev_transaction_block.height > 0: curr_b = blocks.block_record(prev_transaction_block.prev_hash) while not curr_b.is_transaction_block: expected_reward_coins.add( create_pool_coin( curr_b.height, curr_b.pool_puzzle_hash, calculate_pool_reward(curr_b.height), constants.GENESIS_CHALLENGE, ) ) expected_reward_coins.add( create_farmer_coin( curr_b.height, curr_b.farmer_puzzle_hash, calculate_base_farmer_reward(curr_b.height), constants.GENESIS_CHALLENGE, ) ) curr_b = blocks.block_record(curr_b.prev_hash) if set(block.transactions_info.reward_claims_incorporated) != expected_reward_coins: return Err.INVALID_REWARD_COINS, None removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] coin_announcement_names: Set[bytes32] = set() puzzle_announcement_names: Set[bytes32] = set() npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) if height <= constants.INITIAL_FREEZE_PERIOD and block.transactions_generator is not None: return Err.INITIAL_TRANSACTION_FREEZE, None if height > constants.INITIAL_FREEZE_PERIOD and constants.NETWORK_TYPE == NetworkType.MAINNET: return Err.INITIAL_TRANSACTION_FREEZE, None else: # 6a. The generator root must be the hash of the serialized bytes of # the generator for this block (or zeroes if no generator) if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator)) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None else: if block.transactions_info.generator_root != bytes([0] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None # 6b. The generator_ref_list must be the hash of the serialized bytes of # the generator ref list for this block (or 'one' bytes [0x01] if no generator) # 6c. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries if block.transactions_generator_ref_list in (None, []): if block.transactions_info.generator_refs_root != bytes([1] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None else: # If we have a generator reference list, we must have a generator if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None # The generator_refs_root must be the hash of the concatenation of the List[uint32] generator_refs_hash = std_hash(b"".join([bytes(i) for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None if len(block.transactions_generator_ref_list) > constants.MAX_GENERATOR_REF_LIST_SIZE: return Err.PRE_SOFT_FORK_TOO_MANY_GENERATOR_REFS, None if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions created assert npc_result is not None cost = calculate_cost_of_program(block.transactions_generator, npc_result, constants.COST_PER_BYTE) npc_list = npc_result.npc_list # 8. Check that cost <= MAX_BLOCK_COST_CLVM log.warning(f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM}") if cost > constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX, None if npc_result.error is not None: return Err.GENERATOR_RUNTIME_ERROR, None for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) coin_announcement_names = coin_announcements_names_for_npc(npc_list) puzzle_announcement_names = puzzle_announcements_names_for_npc(npc_list) else: assert npc_result is None # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST, None additions_dic: Dict[bytes32, Coin] = {} # 10. Check additions for max coin amount # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer for coin in additions + coinbase_additions: additions_dic[coin.name()] = coin if coin.amount > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 11. Validate addition and removal roots root_error = validate_block_merkle_roots( block.foliage_transaction_block.additions_root, block.foliage_transaction_block.removals_root, additions + coinbase_additions, removals, ) if root_error: return root_error, None # 12. The additions and removals must result in the correct filter byte_array_tx: List[bytes32] = [] for coin in additions + coinbase_additions: byte_array_tx.append(bytearray(coin.puzzle_hash)) for coin_name in removals: byte_array_tx.append(bytearray(coin_name)) bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.foliage_transaction_block.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH, None # 13. Check for duplicate outputs in additions addition_counter = collections.Counter(_.name() for _ in additions + coinbase_additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT, None # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND, None # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) if peak is None or height == 0: fork_h: int = -1 elif fork_point_with_peak is not None: fork_h = fork_point_with_peak else: fork_h = find_fork_point_in_chain(blocks, peak, blocks.block_record(block.prev_header_hash)) if fork_h == -1: coin_store_reorg_height = -1 else: last_block_in_common = await blocks.get_block_record_from_db(blocks.height_to_hash(uint32(fork_h))) assert last_block_in_common is not None coin_store_reorg_height = last_block_in_common.height # Get additions and removals since (after) fork_h but not including this block additions_since_fork: Dict[bytes32, Tuple[Coin, uint32]] = {} removals_since_fork: Set[bytes32] = set() coinbases_since_fork: Dict[bytes32, uint32] = {} if height > 0: prev_block: Optional[FullBlock] = await block_store.get_full_block(block.prev_header_hash) reorg_blocks: Dict[int, FullBlock] = {} curr: Optional[FullBlock] = prev_block assert curr is not None reorg_blocks[curr.height] = curr while curr.height > fork_h: if curr.height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None reorg_blocks[curr.height] = curr curr = prev_block assert curr is not None while curr.height > fork_h: # Coin store doesn't contain coins from fork, we have to run generator for each block in fork if curr.transactions_generator is not None: curr_block_generator: Optional[BlockGenerator] = await get_block_generator(curr) assert curr_block_generator is not None npc_result = get_name_puzzle_conditions(curr_block_generator, False) removals_in_curr, additions_in_curr = tx_removals_and_additions(npc_result.npc_list) else: removals_in_curr = [] additions_in_curr = [] for c_name in removals_in_curr: removals_since_fork.add(c_name) for c in additions_in_curr: additions_since_fork[c.name()] = (c, curr.height) for coinbase_coin in curr.get_included_reward_coins(): additions_since_fork[coinbase_coin.name()] = (coinbase_coin, curr.height) coinbases_since_fork[coinbase_coin.name()] = curr.height if curr.height == 0: break curr = reorg_blocks[curr.height - 1] assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord( rem_coin, height, uint32(0), False, (rem in coinbases_since_fork), block.foliage_transaction_block.timestamp, ) removal_coin_records[new_unspent.name] = new_unspent else: unspent = await coin_store.get_coin_record(rem) if unspent is not None and unspent.confirmed_block_index <= coin_store_reorg_height: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= coin_store_reorg_height: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND, None removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # Check for spending a coin that does not exist in this fork # TODO: fix this, there is a consensus bug here return Err.UNKNOWN_UNSPENT, None new_coin, confirmed_height = additions_since_fork[rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, (rem in coinbases_since_fork), block.foliage_transaction_block.timestamp, ) removal_coin_records[new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork)> if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND, None removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount # 16. Check that the total coin amount for added is <= removed if removed < added: return Err.MINTING_COIN, None fees = removed - added assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee # 17. Check that the assert fee sum <= fees if fees < assert_fee_sum: return Err.RESERVE_FEE_CONDITION_FAILED, None # 18. Check that the assert fee amount < maximum coin amount if fees > constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None # 19. Check that the computed fees are equal to the fees in the block header if block.transactions_info.fees != fees: return Err.INVALID_BLOCK_FEE_AMOUNT, None # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH, None # 21. Verify conditions # create hash_key list for aggsig check pairs_pks = [] pairs_msgs = [] for npc in npc_list: assert height is not None unspent = removal_coin_records[npc.coin_name] error = mempool_check_conditions_dict( unspent, coin_announcement_names, puzzle_announcement_names, npc.condition_dict, prev_transaction_block_height, block.foliage_transaction_block.timestamp, ) if error: return error, None for pk, m in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name, constants.AGG_SIG_ME_ADDITIONAL_DATA ): pairs_pks.append(pk) pairs_msgs.append(m) # 22. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.transactions_info.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE, None # noinspection PyTypeChecker if not AugSchemeMPL.aggregate_verify(pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature): return Err.BAD_AGGREGATE_SIGNATURE, None return None, npc_result
def test_readme(): seed: bytes = bytes([ 0, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192, 19, 18, 12, 89, 6, 220, 18, 102, 58, 209, 82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22, ]) sk: PrivateKey = AugSchemeMPL.key_gen(seed) pk: G1Element = sk.get_g1() message: bytes = bytes([1, 2, 3, 4, 5]) signature: G2Element = AugSchemeMPL.sign(sk, message) ok: bool = AugSchemeMPL.verify(pk, message, signature) assert ok sk_bytes: bytes = bytes(sk) # 32 bytes pk_bytes: bytes = bytes(pk) # 48 bytes signature_bytes: bytes = bytes(signature) # 96 bytes print(sk_bytes.hex(), pk_bytes.hex(), signature_bytes.hex()) sk = PrivateKey.from_bytes(sk_bytes) pk = G1Element.from_bytes(pk_bytes) signature = G2Element.from_bytes(signature_bytes) seed = bytes([1]) + seed[1:] sk1: PrivateKey = AugSchemeMPL.key_gen(seed) seed = bytes([2]) + seed[1:] sk2: PrivateKey = AugSchemeMPL.key_gen(seed) message2: bytes = bytes([1, 2, 3, 4, 5, 6, 7]) pk1: G1Element = sk1.get_g1() sig1: G2Element = AugSchemeMPL.sign(sk1, message) pk2: G1Element = sk2.get_g1() sig2: G2Element = AugSchemeMPL.sign(sk2, message2) agg_sig: G2Element = AugSchemeMPL.aggregate([sig1, sig2]) ok = AugSchemeMPL.aggregate_verify([pk1, pk2], [message, message2], agg_sig) assert ok seed = bytes([3]) + seed[1:] sk3: PrivateKey = AugSchemeMPL.key_gen(seed) pk3: G1Element = sk3.get_g1() message3: bytes = bytes([100, 2, 254, 88, 90, 45, 23]) sig3: G2Element = AugSchemeMPL.sign(sk3, message3) agg_sig_final: G2Element = AugSchemeMPL.aggregate([agg_sig, sig3]) ok = AugSchemeMPL.aggregate_verify([pk1, pk2, pk3], [message, message2, message3], agg_sig_final) assert ok pop_sig1: G2Element = PopSchemeMPL.sign(sk1, message) pop_sig2: G2Element = PopSchemeMPL.sign(sk2, message) pop_sig3: G2Element = PopSchemeMPL.sign(sk3, message) pop1: G2Element = PopSchemeMPL.pop_prove(sk1) pop2: G2Element = PopSchemeMPL.pop_prove(sk2) pop3: G2Element = PopSchemeMPL.pop_prove(sk3) ok = PopSchemeMPL.pop_verify(pk1, pop1) assert ok ok = PopSchemeMPL.pop_verify(pk2, pop2) assert ok ok = PopSchemeMPL.pop_verify(pk3, pop3) assert ok pop_sig_agg: G2Element = PopSchemeMPL.aggregate( [pop_sig1, pop_sig2, pop_sig3]) ok = PopSchemeMPL.fast_aggregate_verify([pk1, pk2, pk3], message, pop_sig_agg) assert ok pop_agg_pk: G1Element = pk1 + pk2 + pk3 ok = PopSchemeMPL.verify(pop_agg_pk, message, pop_sig_agg) assert ok pop_agg_sk: PrivateKey = PrivateKey.aggregate([sk1, sk2, sk3]) ok = PopSchemeMPL.sign(pop_agg_sk, message) == pop_sig_agg assert ok master_sk: PrivateKey = AugSchemeMPL.key_gen(seed) child: PrivateKey = AugSchemeMPL.derive_child_sk(master_sk, 152) grandchild: PrivateKey = AugSchemeMPL.derive_child_sk(child, 952) master_pk: G1Element = master_sk.get_g1() child_u: PrivateKey = AugSchemeMPL.derive_child_sk_unhardened( master_sk, 22) grandchild_u: PrivateKey = AugSchemeMPL.derive_child_sk_unhardened( child_u, 0) child_u_pk: G1Element = AugSchemeMPL.derive_child_pk_unhardened( master_pk, 22) grandchild_u_pk: G1Element = AugSchemeMPL.derive_child_pk_unhardened( child_u_pk, 0) ok = grandchild_u_pk == grandchild_u.get_g1() assert ok
async def process_partial( self, partial: SubmitPartial, time_received_partial: uint64, balance: uint64, curr_difficulty: uint64, ) -> Dict: if partial.payload.suggested_difficulty < self.min_difficulty: return { "error_code": PoolErr.INVALID_DIFFICULTY.value, "error_message": f"Invalid difficulty {partial.payload.suggested_difficulty}. minimum: {self.min_difficulty} ", "points_balance": balance, "curr_difficulty": curr_difficulty, } # Validate signatures pk1: G1Element = partial.payload.owner_public_key m1: bytes = partial.payload.rewards_target pk2: G1Element = partial.payload.proof_of_space.plot_public_key m2: bytes = partial.payload.get_hash() valid_sig = AugSchemeMPL.aggregate_verify( [pk1, pk2], [m1, m2], partial.rewards_and_partial_aggregate_signature) if not valid_sig: return { "error_code": PoolErr.INVALID_SIGNATURE.value, "error_message": f"The aggregate signature is invalid {partial.rewards_and_partial_aggregate_signature}", "points_balance": balance, "difficulty": curr_difficulty, } if partial.payload.proof_of_space.pool_contract_puzzle_hash != await self.calculate_p2_singleton_ph( partial): return { "error_code": PoolErr.INVALID_P2_SINGLETON_PUZZLE_HASH.value, "error_message": f"The puzzl h {partial.rewards_and_partial_aggregate_signature}", "points_balance": balance, "difficulty": curr_difficulty, } if partial.payload.end_of_sub_slot: response = await self.node_rpc_client.get_recent_signage_point_or_eos( None, partial.payload.sp_hash) else: response = await self.node_rpc_client.get_recent_signage_point_or_eos( partial.payload.sp_hash, None) if response is None or response["reverted"]: return { "error_code": PoolErr.NOT_FOUND.value, "error_message": f"Did not find signage point or EOS {partial.payload.sp_hash}, {response}", "points_balance": balance, "difficulty": curr_difficulty, } node_time_received_sp = response["time_received"] signage_point: Optional[SignagePoint] = response.get( "signage_point", None) end_of_sub_slot: Optional[EndOfSubSlotBundle] = response.get( "eos", None) if time_received_partial - node_time_received_sp > self.partial_time_limit: return { "error_code": PoolErr.TOO_LATE.value, "error_message": f"Received partial in {time_received_partial - node_time_received_sp}. " f"Make sure your proof of space lookups are fast, and network connectivity is good. Response " f"must happen in less than {self.partial_time_limit} seconds. NAS or networking farming can be an " f"issue", "points_balance": balance, "curr_difficulty": curr_difficulty, } # Validate the proof if signage_point is not None: challenge_hash: bytes32 = signage_point.cc_vdf.challenge else: challenge_hash = end_of_sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.get_hash( ) quality_string: Optional[ bytes32] = partial.payload.proof_of_space.verify_and_get_quality_string( self.constants, challenge_hash, partial.payload.sp_hash) if quality_string is None: return { "error_code": PoolErr.INVALID_PROOF.value, "error_message": f"Invalid proof of space {partial.payload.sp_hash}", "points_balance": balance, "curr_difficulty": curr_difficulty, } required_iters: uint64 = calculate_iterations_quality( self.constants.DIFFICULTY_CONSTANT_FACTOR, quality_string, partial.payload.proof_of_space.size, curr_difficulty, partial.payload.sp_hash, ) if required_iters >= self.iters_limit: return { "error_code": PoolErr.PROOF_NOT_GOOD_ENOUGH.value, "error_message": f"Proof of space has required iters {required_iters}, too high for difficulty " f"{curr_difficulty}", "points_balance": balance, "curr_difficulty": curr_difficulty, } await self.pending_point_partials.put( (partial, time_received_partial, curr_difficulty)) return {"points_balance": balance, "curr_difficulty": curr_difficulty}
def test_aggregate_verify_zero_items(): assert AugSchemeMPL.aggregate_verify([], [], G2Element())
def debug_spend_bundle( spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA ) -> None: """ Print a lot of useful information about a `SpendBundle` that might help with debugging its clvm. """ pks = [] msgs = [] created_coin_announcements: List[List[bytes]] = [] asserted_coin_announcements = [] created_puzzle_announcements: List[List[bytes]] = [] asserted_puzzle_announcements = [] print("=" * 80) for coin_spend in spend_bundle.coin_spends: coin = coin_spend.coin puzzle_reveal = Program.from_bytes(bytes(coin_spend.puzzle_reveal)) solution = Program.from_bytes(bytes(coin_spend.solution)) coin_name = coin.name() if puzzle_reveal.get_tree_hash() != coin_spend.coin.puzzle_hash: print("*** BAD PUZZLE REVEAL") print( f"{puzzle_reveal.get_tree_hash().hex()} vs {coin_spend.coin.puzzle_hash.hex()}" ) print("*" * 80) continue print(f"consuming coin {dump_coin(coin)}") print(f" with id {coin_name}") print() print( f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'" ) error, conditions, cost = conditions_dict_for_solution( puzzle_reveal, solution, INFINITE_COST) if error: print(f"*** error {error}") elif conditions is not None: for pk_bytes, m in pkm_pairs_for_conditions_dict( conditions, coin_name, agg_sig_additional_data): pks.append(G1Element.from_bytes(pk_bytes)) msgs.append(m) print() cost, r = puzzle_reveal.run_with_cost(INFINITE_COST, solution) # type: ignore print(disassemble(r)) print() if conditions and len(conditions) > 0: print("grouped conditions:") for condition_programs in conditions.values(): print() for c in condition_programs: if len(c.vars) == 1: as_prog = Program.to([c.opcode, c.vars[0]]) if len(c.vars) == 2: as_prog = Program.to( [c.opcode, c.vars[0], c.vars[1]]) print(f" {disassemble(as_prog)}") created_coin_announcements.extend( [coin_name] + _.vars for _ in conditions.get( ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])) asserted_coin_announcements.extend([ _.vars[0].hex() for _ in conditions.get( ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, []) ]) created_puzzle_announcements.extend( [puzzle_reveal.get_tree_hash()] + _.vars for _ in conditions.get( ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [])) asserted_puzzle_announcements.extend([ _.vars[0].hex() for _ in conditions.get( ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, []) ]) print() else: print("(no output conditions generated)") print() print("-------") created = set(spend_bundle.additions()) spent = set(spend_bundle.removals()) zero_coin_set = set(coin.name() for coin in created if coin.amount == 0) ephemeral = created.intersection(spent) created.difference_update(ephemeral) spent.difference_update(ephemeral) print() print("spent coins") for coin in sorted(spent, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => spent coin id {coin.name()}") print() print("created coins") for coin in sorted(created, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") if ephemeral: print() print("ephemeral coins") for coin in sorted(ephemeral, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") created_coin_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_coin_announcements] if created_coin_announcement_pairs: print("created coin announcements") for announcement, hashed in sorted(created_coin_announcement_pairs, key=lambda _: _[-1]): as_hex = [f"0x{_.hex()}" for _ in announcement] print(f" {as_hex} =>\n {hashed}") eor_coin_announcements = sorted( set(_[-1] for _ in created_coin_announcement_pairs) ^ set(asserted_coin_announcements)) created_puzzle_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_puzzle_announcements] if created_puzzle_announcements: print("created puzzle announcements") for announcement, hashed in sorted(created_puzzle_announcement_pairs, key=lambda _: _[-1]): as_hex = [f"0x{_.hex()}" for _ in announcement] print(f" {as_hex} =>\n {hashed}") eor_puzzle_announcements = sorted( set(_[-1] for _ in created_puzzle_announcement_pairs) ^ set(asserted_puzzle_announcements)) print() print() print(f"zero_coin_set = {sorted(zero_coin_set)}") print() if created_coin_announcement_pairs or asserted_coin_announcements: print( f"created coin announcements = {sorted([_[-1] for _ in created_coin_announcement_pairs])}" ) print() print( f"asserted coin announcements = {sorted(asserted_coin_announcements)}" ) print() print( f"symdiff of coin announcements = {sorted(eor_coin_announcements)}" ) print() if created_puzzle_announcement_pairs or asserted_puzzle_announcements: print( f"created puzzle announcements = {sorted([_[-1] for _ in created_puzzle_announcement_pairs])}" ) print() print( f"asserted puzzle announcements = {sorted(asserted_puzzle_announcements)}" ) print() print( f"symdiff of puzzle announcements = {sorted(eor_puzzle_announcements)}" ) print() print() print("=" * 80) print() validates = AugSchemeMPL.aggregate_verify( pks, msgs, spend_bundle.aggregated_signature) print(f"aggregated signature check pass: {validates}") print(f"pks: {pks}") print(f"msgs: {[msg.hex() for msg in msgs]}") print(f" msg_data: {[msg.hex()[:-128] for msg in msgs]}") print(f" coin_ids: {[msg.hex()[-128:-64] for msg in msgs]}") print(f" add_data: {[msg.hex()[-64:] for msg in msgs]}") print(f"signature: {spend_bundle.aggregated_signature}")
def debug_spend_bundle(spend_bundle: SpendBundle) -> None: """ Print a lot of useful information about a `SpendBundle` that might help with debugging its clvm. """ pks = [] msgs = [] created_announcements: List[List[bytes]] = [] asserted_annoucements = [] print("=" * 80) for coin_solution in spend_bundle.coin_solutions: coin = coin_solution.coin puzzle_reveal = coin_solution.puzzle_reveal solution = coin_solution.solution coin_name = coin.name() print(f"consuming coin {dump_coin(coin)}") print(f" with id {coin_name}") print() print( f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'" ) error, conditions, cost = conditions_dict_for_solution( puzzle_reveal, solution, INFINITE_COST) if error: print(f"*** error {error}") elif conditions is not None: for pk, m in pkm_pairs_for_conditions_dict(conditions, coin_name, bytes([3] * 32)): pks.append(pk) msgs.append(m) print() r = puzzle_reveal.run(solution) print(disassemble(r)) print() if conditions and len(conditions) > 0: print("grouped conditions:") for condition_programs in conditions.values(): print() for c in condition_programs: if len(c.vars) == 1: as_prog = Program.to([c.opcode, c.vars[0]]) if len(c.vars) == 2: as_prog = Program.to( [c.opcode, c.vars[0], c.vars[1]]) print(f" {disassemble(as_prog)}") created_announcements.extend( [coin_name] + _.vars for _ in conditions.get( ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])) asserted_annoucements.extend([ _.vars[0].hex() for _ in conditions.get( ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, []) ]) print() else: print("(no output conditions generated)") print() print("-------") created = set(spend_bundle.additions()) spent = set(spend_bundle.removals()) zero_coin_set = set(coin.name() for coin in created if coin.amount == 0) ephemeral = created.intersection(spent) created.difference_update(ephemeral) spent.difference_update(ephemeral) print() print("spent coins") for coin in sorted(spent, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => spent coin id {coin.name()}") print() print("created coins") for coin in sorted(created, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") if ephemeral: print() print("ephemeral coins") for coin in sorted(ephemeral, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") created_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_announcements] if created_announcements: print("created announcements") for announcement, hashed in sorted(created_announcement_pairs, key=lambda _: _[-1]): as_hex = [f"0x{_.hex()}" for _ in announcement] print(f" {as_hex} =>\n {hashed}") eor_announcements = sorted( set(_[-1] for _ in created_announcement_pairs) ^ set(asserted_annoucements)) print() print() print(f"zero_coin_set = {sorted(zero_coin_set)}") print() print( f"created announcements = {sorted([_[-1] for _ in created_announcement_pairs])}" ) print() print(f"asserted announcements = {sorted(asserted_annoucements)}") print() print(f"symdiff of announcements = {sorted(eor_announcements)}") print() print() print("=" * 80) print() validates = AugSchemeMPL.aggregate_verify( pks, msgs, spend_bundle.aggregated_signature) print(f"aggregated signature check pass: {validates}")
def validate_spendbundle( new_spend: SpendBundle, mempool_removals: List[Coin], current_coin_records: List[CoinRecord], block_height: uint32, validate_signature=True ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: spend_name = new_spend.name() cost_result = CostResult.from_bytes(validate_transaction(bytes(new_spend))) npc_list = cost_result.npc_list cost = cost_result.cost if cost > DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM: return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX if cost_result.error is not None: return None, MempoolInclusionStatus.FAILED, Err(cost_result.error) removal_names: List[bytes32] = new_spend.removal_names() additions = additions_for_npc(npc_list) additions_dict: Dict[bytes32, Coin] = {} for add in additions: additions_dict[add.name()] = add addition_amount = uint64(0) # Check additions for max coin amount for coin in additions: if coin.amount > DEFAULT_CONSTANTS.MAX_COIN_AMOUNT: return ( None, MempoolInclusionStatus.FAILED, Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, ) addition_amount = uint64(addition_amount + coin.amount) # Check for duplicate outputs addition_counter = collections.Counter(_.name() for _ in additions) for k, v in addition_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT # Check for duplicate inputs removal_counter = collections.Counter(name for name in removal_names) for k, v in removal_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND removal_record_dict: Dict[bytes32, CoinRecord] = {} removal_coin_dict: Dict[bytes32, Coin] = {} unknown_unspent_error: bool = False removal_amount = uint64(0) for name in removal_names: removal_record = list( filter(lambda e: e.coin.name() == name, current_coin_records)) if len(removal_record) == 0: removal_record = None else: removal_record = removal_record[0] if removal_record is None and name not in additions_dict: unknown_unspent_error = True break elif name in additions_dict: removal_coin = additions_dict[name] # TODO(straya): what timestamp to use here? removal_record = CoinRecord( removal_coin, uint32(self.peak.height + 1), # In mempool, so will be included in next height uint32(0), False, False, uint64(int(time.time())), ) assert removal_record is not None removal_amount = uint64(removal_amount + removal_record.coin.amount) removal_record_dict[name] = removal_record removal_coin_dict[name] = removal_record.coin if unknown_unspent_error: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT if addition_amount > removal_amount: return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN fees = removal_amount - addition_amount assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionVarPair] = npc.condition_dict[ ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee if fees < assert_fee_sum: return ( None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED, ) if cost == 0: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN # Use this information later when constructing a block fail_reason, conflicts = check_removals(mempool_removals, removal_record_dict) # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others tmp_error: Optional[Err] = None conflicting_pool_items: Dict[bytes32, Coin] = {} if fail_reason is Err.MEMPOOL_CONFLICT: for conflicting in conflicts: # sb: Coin = mempool_removals[conflicting.name()] conflicting_pool_items[conflicting.name()] = conflicting # for item in conflicting_pool_items.values(): # if item.fee_per_cost >= fees_per_cost: # self.add_to_potential_tx_set(new_spend, spend_name, cost_result) print("The following items conflict with current mempool items: " + conflicting_pool_items) print( "This fails in the simulation, but the bigger fee_per_cost likely wins on the network" ) return ( uint64(cost), MempoolInclusionStatus.FAILED, Err.MEMPOOL_CONFLICT, ) elif fail_reason: return None, MempoolInclusionStatus.FAILED, fail_reason if tmp_error: return None, MempoolInclusionStatus.FAILED, tmp_error # Verify conditions, create hash_key list for aggsig check pks: List[G1Element] = [] msgs: List[bytes32] = [] error: Optional[Err] = None for npc in npc_list: coin_record: CoinRecord = removal_record_dict[npc.coin_name] # Check that the revealed removal puzzles actually match the puzzle hash if npc.puzzle_hash != coin_record.coin.puzzle_hash: return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH chialisp_height = block_height - 1 error = mempool_check_conditions_dict(coin_record, new_spend, npc.condition_dict, uint32(chialisp_height)) if error: if error is Err.ASSERT_HEIGHT_NOW_EXCEEDS_FAILED or error is Err.ASSERT_HEIGHT_AGE_EXCEEDS_FAILED: return uint64(cost), MempoolInclusionStatus.PENDING, error break if validate_signature: for pk, message in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name): pks.append(pk) msgs.append(message) if error: return None, MempoolInclusionStatus.FAILED, error if validate_signature: # Verify aggregated signature if not AugSchemeMPL.aggregate_verify(pks, msgs, new_spend.aggregated_signature): return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE removals: List[Coin] = [coin for coin in removal_coin_dict.values()] # new_item = MempoolItem(new_spend, uint64(fees), cost_result, spend_name, additions, removals) # self.mempool.add_to_pool(new_item, additions, removal_coin_dict) # log.info(f"add_spendbundle took {time.time() - start_time} seconds") return uint64(cost), MempoolInclusionStatus.SUCCESS, None
async def add_spendbundle( self, new_spend: SpendBundle, cached_result: Optional[CostResult] = None ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: """ Tries to add spendbundle to either self.mempools or to_pool if it's specified. Returns true if it's added in any of pools, Returns error if it fails. """ if self.peak is None: return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED self.seen_bundle_hashes[new_spend.name()] = new_spend.name() self.maybe_pop_seen() if cached_result is None: # Calculate the cost and fees program = best_solution_program(new_spend) # npc contains names of the coins removed, puzzle_hashes and their spend conditions cached_result = calculate_cost_of_program( program, self.constants.CLVM_COST_RATIO_CONSTANT, True) npc_list = cached_result.npc_list cost = cached_result.cost if cached_result.error is not None: return None, MempoolInclusionStatus.FAILED, Err( cached_result.error) # build removal list removal_names: List[bytes32] = new_spend.removal_names() additions = new_spend.additions() additions_dict: Dict[bytes32, Coin] = {} for add in additions: additions_dict[add.name()] = add addition_amount = uint64(0) # Check additions for max coin amount for coin in additions: if coin.amount >= self.constants.MAX_COIN_AMOUNT: return ( None, MempoolInclusionStatus.FAILED, Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, ) addition_amount = uint64(addition_amount + coin.amount) # Check for duplicate outputs addition_counter = collections.Counter(_.name() for _ in additions) for k, v in addition_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT # Check for duplicate inputs removal_counter = collections.Counter(name for name in removal_names) for k, v in removal_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND # Skip if already added if new_spend.name() in self.mempool.spends: return uint64(cost), MempoolInclusionStatus.SUCCESS, None removal_record_dict: Dict[bytes32, CoinRecord] = {} removal_coin_dict: Dict[bytes32, Coin] = {} unknown_unspent_error: bool = False removal_amount = uint64(0) for name in removal_names: removal_record = await self.coin_store.get_coin_record(name) if removal_record is None and name not in additions_dict: unknown_unspent_error = True break elif name in additions_dict: removal_coin = additions_dict[name] # TODO(straya): what timestamp to use here? removal_record = CoinRecord( removal_coin, uint32(self.peak.height + 1), uint32(0), False, False, uint64(int(time.time())), ) assert removal_record is not None removal_amount = uint64(removal_amount + removal_record.coin.amount) removal_record_dict[name] = removal_record removal_coin_dict[name] = removal_record.coin if unknown_unspent_error: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT if addition_amount > removal_amount: print(addition_amount, removal_amount) return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN fees = removal_amount - addition_amount assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.ASSERT_FEE in npc.condition_dict: fee_list: List[ConditionVarPair] = npc.condition_dict[ ConditionOpcode.ASSERT_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee if fees < assert_fee_sum: return ( None, MempoolInclusionStatus.FAILED, Err.ASSERT_FEE_CONDITION_FAILED, ) if cost == 0: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN fees_per_cost: float = fees / cost # If pool is at capacity check the fee, if not then accept even without the fee if self.mempool.at_full_capacity(): if fees == 0: return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE if fees_per_cost < self.mempool.get_min_fee_rate(): return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle # Use this information later when constructing a block fail_reason, conflicts = await self.check_removals(removal_record_dict) # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others tmp_error: Optional[Err] = None conflicting_pool_items: Dict[bytes32, MempoolItem] = {} if fail_reason is Err.MEMPOOL_CONFLICT: for conflicting in conflicts: sb: MempoolItem = self.mempool.removals[conflicting.name()] conflicting_pool_items[sb.name] = sb for item in conflicting_pool_items.values(): if item.fee_per_cost >= fees_per_cost: self.add_to_potential_tx_set(new_spend, cached_result) return ( uint64(cost), MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT, ) elif fail_reason: return None, MempoolInclusionStatus.FAILED, fail_reason if tmp_error: return None, MempoolInclusionStatus.FAILED, tmp_error # Verify conditions, create hash_key list for aggsig check pks: List[G1Element] = [] msgs: List[bytes32] = [] error: Optional[Err] = None for npc in npc_list: coin_record: CoinRecord = removal_record_dict[npc.coin_name] # Check that the revealed removal puzzles actually match the puzzle hash if npc.puzzle_hash != coin_record.coin.puzzle_hash: log.warning( "Mempool rejecting transaction because of wrong puzzle_hash" ) log.warning( f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}") return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH error = mempool_check_conditions_dict(coin_record, new_spend, npc.condition_dict, uint32(self.peak.height + 1)) if error: if error is Err.ASSERT_BLOCK_INDEX_EXCEEDS_FAILED or error is Err.ASSERT_BLOCK_AGE_EXCEEDS_FAILED: self.add_to_potential_tx_set(new_spend, cached_result) return uint64(cost), MempoolInclusionStatus.PENDING, error break for pk, message in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name): pks.append(pk) msgs.append(message) if error: return None, MempoolInclusionStatus.FAILED, error # Verify aggregated signature if len(pks) == 0 and len(msgs) == 0: validates = new_spend.aggregated_signature == G2Element.infinity() else: validates = AugSchemeMPL.aggregate_verify( pks, msgs, new_spend.aggregated_signature) if not validates: log.warning(f"Aggsig validation error {pks} {msgs} {new_spend}") return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE # Remove all conflicting Coins and SpendBundles if fail_reason: mempool_item: MempoolItem for mempool_item in conflicting_pool_items.values(): self.mempool.remove_spend(mempool_item) new_item = MempoolItem(new_spend, fees_per_cost, uint64(fees), cached_result) self.mempool.add_to_pool(new_item, additions, removal_coin_dict) return uint64(cost), MempoolInclusionStatus.SUCCESS, None
def batch_pre_validate_blocks( constants_dict: Dict, blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: Optional[List[bytes]], header_blocks_pickled: Optional[List[bytes]], prev_transaction_generators: List[Optional[bytes]], npc_results: Dict[uint32, bytes], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_signatures: bool, ) -> List[bytes]: blocks: Dict[bytes, BlockRecord] = {} for k, v in blocks_pickled.items(): blocks[k] = BlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) if full_blocks_pickled is not None and header_blocks_pickled is not None: assert ValueError("Only one should be passed here") # In this case, we are validating full blocks, not headers if full_blocks_pickled is not None: for i in range(len(full_blocks_pickled)): try: block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] npc_result: Optional[NPCResult] = None if block.height in npc_results: npc_result = NPCResult.from_bytes( npc_results[block.height]) assert npc_result is not None if npc_result.npc_list is not None: removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) else: removals, tx_additions = [], [] if block.transactions_generator is not None and npc_result is None: prev_generator_bytes = prev_transaction_generators[i] assert prev_generator_bytes is not None assert block.transactions_info is not None block_generator: BlockGenerator = BlockGenerator.from_bytes( prev_generator_bytes) assert block_generator.program == block.transactions_generator npc_result = get_name_puzzle_conditions( block_generator, min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, mempool_mode=False, height=block.height, ) removals, tx_additions = tx_removals_and_additions( npc_result.npc_list) if npc_result is not None and npc_result.error is not None: results.append( PreValidationResult(uint16(npc_result.error), None, npc_result, False)) continue header_block = get_block_header(block, tx_additions, removals) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) successfully_validated_signatures = False # If we failed CLVM, no need to validate signature, the block is already invalid if error_int is None: # If this is False, it means either we don't have a signature (not a tx block) or we have an invalid # signature (which also puts in an error) or we didn't validate the signature because we want to # validate it later. receive_block will attempt to validate the signature later. if validate_signatures: if npc_result is not None and block.transactions_info is not None: pairs_pks, pairs_msgs = pkm_pairs( npc_result.npc_list, constants.AGG_SIG_ME_ADDITIONAL_DATA) pks_objects: List[G1Element] = [ G1Element.from_bytes(pk) for pk in pairs_pks ] if not AugSchemeMPL.aggregate_verify( pks_objects, pairs_msgs, block. transactions_info.aggregated_signature): error_int = uint16( Err.BAD_AGGREGATE_SIGNATURE.value) else: successfully_validated_signatures = True results.append( PreValidationResult(error_int, required_iters, npc_result, successfully_validated_signatures)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) # In this case, we are validating header blocks elif header_blocks_pickled is not None: for i in range(len(header_blocks_pickled)): try: header_block = HeaderBlock.from_bytes(header_blocks_pickled[i]) # TODO: address hint error and remove ignore # error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected # "Dict[bytes32, BlockRecord]" [arg-type] required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), # type: ignore[arg-type] header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) error_int = None if error is not None: error_int = uint16(error.code.value) results.append( PreValidationResult(error_int, required_iters, None, False)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False)) return [bytes(r) for r in results]
async def _validate_transactions(self, block: FullBlock, fee_base: uint64) -> Optional[Err]: # TODO(straya): review, further test the code, and number all the validation steps # 1. Check that transactions generator is present if not block.transactions_generator: return Err.UNKNOWN # Get List of names removed, puzzles hashes for removed coins and conditions crated error, npc_list, cost = calculate_cost_of_program( block.transactions_generator, self.constants.CLVM_COST_RATIO_CONSTANT) # 2. Check that cost <= MAX_BLOCK_COST_CLVM if cost > self.constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX if error: return error prev_header: Header if block.prev_header_hash in self.headers: prev_header = self.headers[block.prev_header_hash] else: return Err.EXTENDS_UNKNOWN_BLOCK removals: List[bytes32] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions: List[Coin] = additions_for_npc(npc_list) additions_dic: Dict[bytes32, Coin] = {} # Check additions for max coin amount for coin in additions: additions_dic[coin.name()] = coin if coin.amount >= self.constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM # Validate addition and removal roots root_error = self._validate_merkle_root(block, additions, removals) if root_error: return root_error # Validate filter byte_array_tx: List[bytes32] = [] for coin in additions: byte_array_tx.append(bytearray(coin.puzzle_hash)) for coin_name in removals: byte_array_tx.append(bytearray(coin_name)) byte_array_tx.append( bytearray(block.header.data.farmer_rewards_puzzle_hash)) byte_array_tx.append( bytearray(block.header.data.pool_target.puzzle_hash)) bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.header.data.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH # Watch out for duplicate outputs addition_counter = collections.Counter(_.name() for _ in additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT # Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND # Check if removals exist and were not previously spend. (unspent_db + diff_store + this_block) fork_h = find_fork_point_in_chain(self.headers, self.lca_block, block.header) # Get additions and removals since (after) fork_h but not including this block additions_since_fork: Dict[bytes32, Tuple[Coin, uint32]] = {} removals_since_fork: Set[bytes32] = set() coinbases_since_fork: Dict[bytes32, uint32] = {} curr: Optional[FullBlock] = await self.block_store.get_block( block.prev_header_hash) assert curr is not None while curr.height > fork_h: removals_in_curr, additions_in_curr = await curr.tx_removals_and_additions( ) for c_name in removals_in_curr: removals_since_fork.add(c_name) for c in additions_in_curr: additions_since_fork[c.name()] = (c, curr.height) coinbase_coin = curr.get_coinbase() fees_coin = curr.get_fees_coin() additions_since_fork[coinbase_coin.name()] = ( coinbase_coin, curr.height, ) additions_since_fork[fees_coin.name()] = ( fees_coin, curr.height, ) coinbases_since_fork[coinbase_coin.name()] = curr.height coinbases_since_fork[fees_coin.name()] = curr.height curr = await self.block_store.get_block(curr.prev_header_hash) assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord(rem_coin, block.height, uint32(0), False, False) removal_coin_records[new_unspent.name] = new_unspent else: assert prev_header is not None unspent = await self.coin_store.get_coin_record( rem, prev_header) if unspent is not None and unspent.confirmed_block_index <= fork_h: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= fork_h: # Spend in an ancestor block, so this is a double spend return Err.DOUBLE_SPEND # If it's a coinbase, check that it's not frozen if unspent.coinbase == 1: if (block.height < unspent.confirmed_block_index + self.coinbase_freeze): return Err.COINBASE_NOT_YET_SPENDABLE removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # This coin does not exist in the fork # TODO: fix this, there is a consensus bug here return Err.UNKNOWN_UNSPENT if rem in coinbases_since_fork: # This coin is a coinbase coin if (block.height < coinbases_since_fork[rem] + self.coinbase_freeze): return Err.COINBASE_NOT_YET_SPENDABLE new_coin, confirmed_height = additions_since_fork[rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, (rem in coinbases_since_fork), ) removal_coin_records[ new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork)> if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND # Check fees removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount if removed < added: return Err.MINTING_COIN fees = removed - added assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.ASSERT_FEE in npc.condition_dict: fee_list: List[ConditionVarPair] = npc.condition_dict[ ConditionOpcode.ASSERT_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.var1) assert_fee_sum = assert_fee_sum + fee if fees < assert_fee_sum: return Err.ASSERT_FEE_CONDITION_FAILED # Check coinbase reward if fees + fee_base != block.header.data.total_transaction_fees: return Err.BAD_COINBASE_REWARD # Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH # Verify conditions, create hash_key list for aggsig check pool_target_m = bytes(block.header.data.pool_target) # The pool signature on the pool target is checked here as well, since the pool signature is # aggregated along with the transaction signatures pairs_pks = [block.proof_of_space.pool_public_key] pairs_msgs = [pool_target_m] for npc in npc_list: unspent = removal_coin_records[npc.coin_name] error = blockchain_check_conditions_dict( unspent, removal_coin_records, npc.condition_dict, block.header, ) if error: return error for pk, m in pkm_pairs_for_conditions_dict(npc.condition_dict, npc.coin_name): pairs_pks.append(pk) pairs_msgs.append(m) # Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.header.data.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE validates = AugSchemeMPL.aggregate_verify( pairs_pks, pairs_msgs, block.header.data.aggregated_signature) if not validates: return Err.BAD_AGGREGATE_SIGNATURE return None
async def validate_block_body( constants: ConsensusConstants, sub_blocks: Dict[bytes32, SubBlockRecord], sub_height_to_hash: Dict[uint32, bytes32], block_store: BlockStore, coin_store: CoinStore, peak: Optional[SubBlockRecord], block: Union[FullBlock, UnfinishedBlock], sub_height: uint32, height: Optional[uint32], cached_cost_result: Optional[CostResult] = None, ) -> Optional[Err]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. Returns None if everything validates correctly, or an Err if something does not validate. """ if isinstance(block, FullBlock): assert sub_height == block.sub_block_height if height is not None: assert height == block.height assert block.is_block() else: assert not block.is_block() # 1. For non block sub-blocks, foliage block, transaction filter, transactions info, and generator must be empty # If it is a sub block but not a block, there is no body to validate. Check that all fields are None if block.foliage_sub_block.foliage_block_hash is None: if (block.foliage_block is not None or block.transactions_info is not None or block.transactions_generator is not None): return Err.NOT_BLOCK_BUT_HAS_DATA return None # This means the sub-block is valid # 2. For blocks, foliage block, transaction filter, transactions info must not be empty if block.foliage_block is None or block.foliage_block.filter_hash is None or block.transactions_info is None: return Err.IS_BLOCK_BUT_NO_DATA # keeps track of the reward coins that need to be incorporated expected_reward_coins: Set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_block.transactions_info_hash != std_hash( block.transactions_info): return Err.INVALID_TRANSACTIONS_INFO_HASH # 4. The foliage block hash in the foliage sub block must match the foliage block if block.foliage_sub_block.foliage_block_hash != std_hash( block.foliage_block): return Err.INVALID_FOLIAGE_BLOCK_HASH # 5. The prev generators root must be valid # TODO(straya): implement prev generators # 6. The generator root must be the tree-hash of the generator (or zeroes if no generator) if block.transactions_generator is not None: if block.transactions_generator.get_tree_hash( ) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT else: if block.transactions_info.generator_root != bytes([0] * 32): return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT # 7. The reward claims must be valid for the previous sub-blocks, and current block fees if sub_height > 0: # Add reward claims for all sub-blocks from the prev prev block, until the prev block (including the latter) prev_block = sub_blocks[block.foliage_block.prev_block_hash] assert prev_block.fees is not None pool_coin = create_pool_coin( prev_block.sub_block_height, prev_block.pool_puzzle_hash, calculate_pool_reward(prev_block.height), ) farmer_coin = create_farmer_coin( prev_block.sub_block_height, prev_block.farmer_puzzle_hash, uint64( calculate_base_farmer_reward(prev_block.height) + prev_block.fees), ) # Adds the previous block expected_reward_coins.add(pool_coin) expected_reward_coins.add(farmer_coin) # For the second block in the chain, don't go back further if prev_block.sub_block_height > 0: curr_sb = sub_blocks[prev_block.prev_hash] curr_height = curr_sb.height while not curr_sb.is_block: expected_reward_coins.add( create_pool_coin( curr_sb.sub_block_height, curr_sb.pool_puzzle_hash, calculate_pool_reward(curr_height), )) expected_reward_coins.add( create_farmer_coin( curr_sb.sub_block_height, curr_sb.farmer_puzzle_hash, calculate_base_farmer_reward(curr_height), )) curr_sb = sub_blocks[curr_sb.prev_hash] if set(block.transactions_info.reward_claims_incorporated ) != expected_reward_coins: return Err.INVALID_REWARD_COINS removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions crated if cached_cost_result is not None: result: CostResult = cached_cost_result else: result = calculate_cost_of_program( block.transactions_generator, constants.CLVM_COST_RATIO_CONSTANT) cost = result.cost npc_list = result.npc_list # 8. Check that cost <= MAX_BLOCK_COST_CLVM if cost > constants.MAX_BLOCK_COST_CLVM: return Err.BLOCK_COST_EXCEEDS_MAX if result.error is not None: return Err(result.error) for npc in npc_list: removals.append(npc.coin_name) removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST additions_dic: Dict[bytes32, Coin] = {} # 10. Check additions for max coin amount for coin in additions + coinbase_additions: additions_dic[coin.name()] = coin if coin.amount >= constants.MAX_COIN_AMOUNT: return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM # 11. Validate addition and removal roots root_error = validate_block_merkle_roots( block.foliage_block.additions_root, block.foliage_block.removals_root, additions + coinbase_additions, removals, ) if root_error: return root_error # 12. The additions and removals must result in the correct filter byte_array_tx: List[bytes32] = [] for coin in additions + coinbase_additions: byte_array_tx.append(bytearray(coin.puzzle_hash)) for coin_name in removals: byte_array_tx.append(bytearray(coin_name)) bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded_filter = bytes(bip158.GetEncoded()) filter_hash = std_hash(encoded_filter) if filter_hash != block.foliage_block.filter_hash: return Err.INVALID_TRANSACTIONS_FILTER_HASH # 13. Check for duplicate outputs in additions addition_counter = collections.Counter( _.name() for _ in additions + coinbase_additions) for k, v in addition_counter.items(): if v > 1: return Err.DUPLICATE_OUTPUT # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) for k, v in removal_counter.items(): if v > 1: return Err.DOUBLE_SPEND # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) if peak is None or sub_height == 0: fork_sub_h: int = -1 else: fork_sub_h = find_fork_point_in_chain( sub_blocks, peak, sub_blocks[block.prev_header_hash]) if fork_sub_h == -1: coin_store_reorg_height = -1 else: last_sb_in_common = sub_blocks[sub_height_to_hash[uint32(fork_sub_h)]] if last_sb_in_common.is_block: coin_store_reorg_height = last_sb_in_common.height else: coin_store_reorg_height = last_sb_in_common.height - 1 # Get additions and removals since (after) fork_h but not including this block additions_since_fork: Dict[bytes32, Tuple[Coin, uint32]] = {} removals_since_fork: Set[bytes32] = set() coinbases_since_fork: Dict[bytes32, uint32] = {} if sub_height > 0: curr: Optional[FullBlock] = await block_store.get_full_block( block.prev_header_hash) assert curr is not None while curr.sub_block_height > fork_sub_h: removals_in_curr, additions_in_curr = curr.tx_removals_and_additions( ) for c_name in removals_in_curr: removals_since_fork.add(c_name) for c in additions_in_curr: additions_since_fork[c.name()] = (c, curr.sub_block_height) for coinbase_coin in curr.get_included_reward_coins(): additions_since_fork[coinbase_coin.name()] = ( coinbase_coin, curr.sub_block_height) coinbases_since_fork[ coinbase_coin.name()] = curr.sub_block_height if curr.sub_block_height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None removal_coin_records: Dict[bytes32, CoinRecord] = {} for rem in removals: if rem in additions_dic: # Ephemeral coin rem_coin: Coin = additions_dic[rem] new_unspent: CoinRecord = CoinRecord( rem_coin, sub_height, uint32(0), False, False, block.foliage_block.timestamp, ) removal_coin_records[new_unspent.name] = new_unspent else: unspent = await coin_store.get_coin_record(rem) if unspent is not None and unspent.confirmed_block_index <= coin_store_reorg_height: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) if unspent.spent == 1 and unspent.spent_block_index <= coin_store_reorg_height: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND removal_coin_records[unspent.name] = unspent else: # This coin is not in the current heaviest chain, so it must be in the fork if rem not in additions_since_fork: # Check for spending a coin that does not exist in this fork # TODO: fix this, there is a consensus bug here return Err.UNKNOWN_UNSPENT new_coin, confirmed_height = additions_since_fork[rem] new_coin_record: CoinRecord = CoinRecord( new_coin, confirmed_height, uint32(0), False, (rem in coinbases_since_fork), block.foliage_block.timestamp, ) removal_coin_records[new_coin_record.name] = new_coin_record # This check applies to both coins created before fork (pulled from coin_store), # and coins created after fork (additions_since_fork)> if rem in removals_since_fork: # This coin was spent in the fork return Err.DOUBLE_SPEND removed = 0 for unspent in removal_coin_records.values(): removed += unspent.coin.amount added = 0 for coin in additions: added += coin.amount # 16. Check that the total coin amount for added is <= removed if removed < added: return Err.MINTING_COIN fees = removed - added assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.ASSERT_FEE in npc.condition_dict: fee_list: List[ConditionVarPair] = npc.condition_dict[ ConditionOpcode.ASSERT_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee # 17. Check that the assert fee sum <= fees if fees < assert_fee_sum: return Err.ASSERT_FEE_CONDITION_FAILED # 18. Check that the computed fees are equal to the fees in the block header if block.transactions_info.fees != fees: return Err.INVALID_BLOCK_FEE_AMOUNT # 19. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes for unspent in removal_coin_records.values(): if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]: return Err.WRONG_PUZZLE_HASH # 20. Verify conditions # create hash_key list for aggsig check pairs_pks = [] pairs_msgs = [] for npc in npc_list: assert height is not None unspent = removal_coin_records[npc.coin_name] error = blockchain_check_conditions_dict( unspent, removal_coin_records, npc.condition_dict, height, block.foliage_block.timestamp, ) if error: return error for pk, m in pkm_pairs_for_conditions_dict(npc.condition_dict, npc.coin_name): pairs_pks.append(pk) pairs_msgs.append(m) # 21. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster if not block.transactions_info.aggregated_signature: return Err.BAD_AGGREGATE_SIGNATURE if len(pairs_pks) == 0: if len( pairs_msgs ) != 0 or block.transactions_info.aggregated_signature != G2Element.infinity( ): return Err.BAD_AGGREGATE_SIGNATURE else: # noinspection PyTypeChecker validates = AugSchemeMPL.aggregate_verify( pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature) if not validates: return Err.BAD_AGGREGATE_SIGNATURE return None
def debug_spend_bundle(spend_bundle: SpendBundle) -> None: """ Print a lot of useful information about a `SpendBundle` that might help with debugging its clvm. """ assert_consumed_set = set() pks = [] msgs = [] print("=" * 80) for coin_solution in spend_bundle.coin_solutions: coin, solution_pair = coin_solution.coin, Program.to( coin_solution.solution) puzzle_reveal = solution_pair.first() solution = solution_pair.rest().first() print(f"consuming coin {dump_coin(coin)}") print(f" with id {coin.name()}") print() print( f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'" ) error, conditions, cost = conditions_dict_for_solution( Program.to([puzzle_reveal, solution])) if error: print(f"*** error {error}") elif conditions is not None: for pk, m in pkm_pairs_for_conditions_dict(conditions, coin.name()): pks.append(pk) msgs.append(m) print() r = puzzle_reveal.run(solution) print(disassemble(r)) print() if conditions and len(conditions) > 0: print("grouped conditions:") for condition_programs in conditions.values(): print() for c in condition_programs: as_prog = Program.to([c.opcode, c.vars[0], c.vars[1]]) print(f" {disassemble(as_prog)}") print() for _ in conditions.get(ConditionOpcode.ASSERT_COIN_CONSUMED, []): assert_consumed_set.add(bytes32(c.vars[0])) else: print("(no output conditions generated)") print() print("-------") created = set(spend_bundle.additions()) spent = set(spend_bundle.removals()) zero_coin_set = set(coin.name() for coin in created if coin.amount == 0) ephemeral = created.intersection(spent) created.difference_update(ephemeral) spent.difference_update(ephemeral) print() print("spent coins") for coin in sorted(spent, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => spent coin id {coin.name()}") print() print("created coins") for coin in sorted(created, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") if ephemeral: print() print("ephemeral coins") for coin in sorted(ephemeral, key=lambda _: _.name()): print(f" {dump_coin(coin)}") print(f" => created coin id {coin.name()}") print() print(f"assert_consumed_set = {sorted(assert_consumed_set)}") print() print(f"zero_coin_set = {sorted(zero_coin_set)}") print() set_difference = zero_coin_set ^ assert_consumed_set print(f"zero_coin_set ^ assert_consumed_set = {sorted(set_difference)}") if len(set_difference): print("not all zero coins asserted consumed or vice versa") print() print("=" * 80) print() if len(msgs) > 0: validates = AugSchemeMPL.aggregate_verify( pks, msgs, spend_bundle.aggregated_signature) print(f"aggregated signature check pass: {validates}")
async def add_spendbundle( self, new_spend: SpendBundle, cost_result: CostResult, spend_name: bytes32, validate_signature=True, ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: """ Tries to add spendbundle to either self.mempools or to_pool if it's specified. Returns true if it's added in any of pools, Returns error if it fails. """ start_time = time.time() if self.peak is None: return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED npc_list = cost_result.npc_list cost = cost_result.cost log.debug(f"Cost: {cost}") if cost > self.constants.MAX_BLOCK_COST_CLVM: return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX if cost_result.error is not None: return None, MempoolInclusionStatus.FAILED, Err(cost_result.error) # build removal list removal_names: List[bytes32] = new_spend.removal_names() additions = additions_for_npc(npc_list) additions_dict: Dict[bytes32, Coin] = {} for add in additions: additions_dict[add.name()] = add addition_amount = uint64(0) # Check additions for max coin amount for coin in additions: if coin.amount > self.constants.MAX_COIN_AMOUNT: return ( None, MempoolInclusionStatus.FAILED, Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, ) addition_amount = uint64(addition_amount + coin.amount) # Check for duplicate outputs addition_counter = collections.Counter(_.name() for _ in additions) for k, v in addition_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT # Check for duplicate inputs removal_counter = collections.Counter(name for name in removal_names) for k, v in removal_counter.items(): if v > 1: return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND # Skip if already added if spend_name in self.mempool.spends: return uint64(cost), MempoolInclusionStatus.SUCCESS, None removal_record_dict: Dict[bytes32, CoinRecord] = {} removal_coin_dict: Dict[bytes32, Coin] = {} unknown_unspent_error: bool = False removal_amount = uint64(0) for name in removal_names: removal_record = await self.coin_store.get_coin_record(name) if removal_record is None and name not in additions_dict: unknown_unspent_error = True break elif name in additions_dict: removal_coin = additions_dict[name] # TODO(straya): what timestamp to use here? removal_record = CoinRecord( removal_coin, uint32( self.peak.height + 1), # In mempool, so will be included in next height uint32(0), False, False, uint64(int(time.time())), ) assert removal_record is not None removal_amount = uint64(removal_amount + removal_record.coin.amount) removal_record_dict[name] = removal_record removal_coin_dict[name] = removal_record.coin if unknown_unspent_error: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT if addition_amount > removal_amount: print(addition_amount, removal_amount) return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN fees = removal_amount - addition_amount assert_fee_sum: uint64 = uint64(0) for npc in npc_list: if ConditionOpcode.RESERVE_FEE in npc.condition_dict: fee_list: List[ConditionWithArgs] = npc.condition_dict[ ConditionOpcode.RESERVE_FEE] for cvp in fee_list: fee = int_from_bytes(cvp.vars[0]) assert_fee_sum = assert_fee_sum + fee if fees < assert_fee_sum: return ( None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED, ) if cost == 0: return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN fees_per_cost: float = fees / cost # If pool is at capacity check the fee, if not then accept even without the fee if self.mempool.at_full_capacity(cost): if fees == 0: return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE if fees_per_cost <= self.mempool.get_min_fee_rate(cost): return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle # Use this information later when constructing a block fail_reason, conflicts = await self.check_removals(removal_record_dict) # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others tmp_error: Optional[Err] = None conflicting_pool_items: Dict[bytes32, MempoolItem] = {} if fail_reason is Err.MEMPOOL_CONFLICT: for conflicting in conflicts: sb: MempoolItem = self.mempool.removals[conflicting.name()] conflicting_pool_items[sb.name] = sb if not self.can_replace(conflicting_pool_items, removal_record_dict, fees, fees_per_cost): self.add_to_potential_tx_set(new_spend, spend_name, cost_result) return ( uint64(cost), MempoolInclusionStatus.PENDING, Err.MEMPOOL_CONFLICT, ) elif fail_reason: return None, MempoolInclusionStatus.FAILED, fail_reason if tmp_error: return None, MempoolInclusionStatus.FAILED, tmp_error # Verify conditions, create hash_key list for aggsig check pks: List[G1Element] = [] msgs: List[bytes32] = [] error: Optional[Err] = None coin_announcements_in_spend: Set[ bytes32] = coin_announcements_names_for_npc(npc_list) puzzle_announcements_in_spend: Set[ bytes32] = puzzle_announcements_names_for_npc(npc_list) for npc in npc_list: coin_record: CoinRecord = removal_record_dict[npc.coin_name] # Check that the revealed removal puzzles actually match the puzzle hash if npc.puzzle_hash != coin_record.coin.puzzle_hash: log.warning( "Mempool rejecting transaction because of wrong puzzle_hash" ) log.warning( f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}") return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH chialisp_height = (self.peak.prev_transaction_block_height if not self.peak.is_transaction_block else self.peak.height) error = mempool_check_conditions_dict( coin_record, coin_announcements_in_spend, puzzle_announcements_in_spend, npc.condition_dict, uint32(chialisp_height), ) if error: if error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or error is Err.ASSERT_HEIGHT_RELATIVE_FAILED: self.add_to_potential_tx_set(new_spend, spend_name, cost_result) return uint64(cost), MempoolInclusionStatus.PENDING, error break if validate_signature: for pk, message in pkm_pairs_for_conditions_dict( npc.condition_dict, npc.coin_name, self.constants.AGG_SIG_ME_ADDITIONAL_DATA): pks.append(pk) msgs.append(message) if error: return None, MempoolInclusionStatus.FAILED, error if validate_signature: # Verify aggregated signature if not AugSchemeMPL.aggregate_verify( pks, msgs, new_spend.aggregated_signature): log.warning( f"Aggsig validation error {pks} {msgs} {new_spend}") return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE # Remove all conflicting Coins and SpendBundles if fail_reason: mempool_item: MempoolItem for mempool_item in conflicting_pool_items.values(): self.mempool.remove_from_pool(mempool_item) removals: List[Coin] = [coin for coin in removal_coin_dict.values()] new_item = MempoolItem(new_spend, uint64(fees), cost_result, spend_name, additions, removals) self.mempool.add_to_pool(new_item, additions, removal_coin_dict) log.info(f"add_spendbundle took {time.time() - start_time} seconds") return uint64(cost), MempoolInclusionStatus.SUCCESS, None
async def process_partial( self, partial: PostPartialRequest, farmer_record: FarmerRecord, time_received_partial: uint64, ) -> Dict: # Validate signatures message: bytes32 = partial.payload.get_hash() pk1: G1Element = partial.payload.proof_of_space.plot_public_key pk2: G1Element = farmer_record.authentication_public_key valid_sig = AugSchemeMPL.aggregate_verify([pk1, pk2], [message, message], partial.aggregate_signature) if not valid_sig: return error_dict( PoolErrorCode.INVALID_SIGNATURE, f"The aggregate signature is invalid {partial.aggregate_signature}", ) # TODO (chia-dev): Check DB p2_singleton_puzzle_hash and compare # if partial.payload.proof_of_space.pool_contract_puzzle_hash != p2_singleton_puzzle_hash: # return error_dict( # PoolErrorCode.INVALID_P2_SINGLETON_PUZZLE_HASH, # f"Invalid plot pool contract puzzle hash {partial.payload.proof_of_space.pool_contract_puzzle_hash}" # ) async def get_signage_point_or_eos(): if partial.payload.end_of_sub_slot: return await self.node_rpc_client.get_recent_signage_point_or_eos( None, partial.payload.sp_hash) else: return await self.node_rpc_client.get_recent_signage_point_or_eos( partial.payload.sp_hash, None) response = await get_signage_point_or_eos() if response is None: # Try again after 10 seconds in case we just didn't yet receive the signage point await asyncio.sleep(10) response = await get_signage_point_or_eos() if response is None or response["reverted"]: return error_dict( PoolErrorCode.NOT_FOUND, f"Did not find signage point or EOS {partial.payload.sp_hash}, {response}" ) node_time_received_sp = response["time_received"] signage_point: Optional[SignagePoint] = response.get( "signage_point", None) end_of_sub_slot: Optional[EndOfSubSlotBundle] = response.get( "eos", None) if time_received_partial - node_time_received_sp > self.partial_time_limit: return error_dict( PoolErrorCode.TOO_LATE, f"Received partial in {time_received_partial - node_time_received_sp}. " f"Make sure your proof of space lookups are fast, and network connectivity is good." f"Response must happen in less than {self.partial_time_limit} seconds. NAS or network" f" farming can be an issue", ) # Validate the proof if signage_point is not None: challenge_hash: bytes32 = signage_point.cc_vdf.challenge else: challenge_hash = end_of_sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.get_hash( ) quality_string: Optional[ bytes32] = partial.payload.proof_of_space.verify_and_get_quality_string( self.constants, challenge_hash, partial.payload.sp_hash) if quality_string is None: return error_dict( PoolErrorCode.INVALID_PROOF, f"Invalid proof of space {partial.payload.sp_hash}") current_difficulty = farmer_record.difficulty required_iters: uint64 = calculate_iterations_quality( self.constants.DIFFICULTY_CONSTANT_FACTOR, quality_string, partial.payload.proof_of_space.size, current_difficulty, partial.payload.sp_hash, ) if required_iters >= self.iters_limit: return error_dict( PoolErrorCode.PROOF_NOT_GOOD_ENOUGH, f"Proof of space has required iters {required_iters}, too high for difficulty " f"{current_difficulty}", ) await self.pending_point_partials.put( (partial, time_received_partial, current_difficulty)) async with self.store.lock: # Obtains the new record in case we just updated difficulty farmer_record: Optional[ FarmerRecord] = await self.store.get_farmer_record( partial.payload.launcher_id) if farmer_record is not None: current_difficulty = farmer_record.difficulty # Decide whether to update the difficulty recent_partials = await self.store.get_recent_partials( partial.payload.launcher_id, self.number_of_partials_target) # Only update the difficulty if we meet certain conditions new_difficulty: uint64 = get_new_difficulty( recent_partials, int(self.number_of_partials_target), int(self.time_target), current_difficulty, time_received_partial, self.min_difficulty, ) if current_difficulty != new_difficulty: await self.store.update_difficulty( partial.payload.launcher_id, new_difficulty) return PostPartialResponse(current_difficulty).to_json_dict()