示例#1
0
def _run_generator(
    constants_dict: bytes,
    unfinished_block_bytes: bytes,
    block_generator_bytes: bytes,
) -> Tuple[Optional[Err], Optional[bytes]]:
    """
    Runs the CLVM generator from bytes inputs. This is meant to be called under a ProcessPoolExecutor, in order to
    validate the heavy parts of a block (clvm program) in a different process.
    """
    try:
        constants: ConsensusConstants = dataclass_from_dict(
            ConsensusConstants, constants_dict)
        unfinished_block: UnfinishedBlock = UnfinishedBlock.from_bytes(
            unfinished_block_bytes)
        assert unfinished_block.transactions_info is not None
        block_generator: BlockGenerator = BlockGenerator.from_bytes(
            block_generator_bytes)
        assert block_generator.program == unfinished_block.transactions_generator
        npc_result: NPCResult = get_name_puzzle_conditions(
            block_generator,
            min(constants.MAX_BLOCK_COST_CLVM,
                unfinished_block.transactions_info.cost),
            cost_per_byte=constants.COST_PER_BYTE,
            mempool_mode=False,
        )
        if npc_result.error is not None:
            return Err(npc_result.error), None
    except ValidationError as e:
        return e.code, None
    except Exception:
        return Err.UNKNOWN, None

    return None, bytes(npc_result)
def validate_clvm_and_signature(
        spend_bundle_bytes: bytes, max_cost: int, cost_per_byte: int,
        additional_data: bytes
) -> Tuple[Optional[Err], bytes, Dict[bytes, bytes]]:
    """
    Validates CLVM and aggregate signature for a spendbundle. This is meant to be called under a ProcessPoolExecutor
    in order to validate the heavy parts of a transction in a different thread. Returns an optional error,
    the NPCResult and a cache of the new pairings validated (if not error)
    """
    try:
        bundle: SpendBundle = SpendBundle.from_bytes(spend_bundle_bytes)
        program = simple_solution_generator(bundle)
        # npc contains names of the coins removed, puzzle_hashes and their spend conditions
        result: NPCResult = get_name_puzzle_conditions(
            program, max_cost, cost_per_byte=cost_per_byte, mempool_mode=True)

        if result.error is not None:
            return Err(result.error), b"", {}

        pks: List[G1Element] = []
        msgs: List[bytes32] = []
        # TODO: address hint error and remove ignore
        #       error: Incompatible types in assignment (expression has type "List[bytes]", variable has type
        #       "List[bytes32]")  [assignment]
        pks, msgs = pkm_pairs(result.npc_list,
                              additional_data)  # type: ignore[assignment]

        # Verify aggregated signature
        cache: LRUCache = LRUCache(10000)
        if not cached_bls.aggregate_verify(
                pks, msgs, bundle.aggregated_signature, True, cache):
            return Err.BAD_AGGREGATE_SIGNATURE, b"", {}
        new_cache_entries: Dict[bytes, bytes] = {}
        for k, v in cache.cache.items():
            new_cache_entries[k] = bytes(v)
    except ValidationError as e:
        return e.code, b"", {}
    except Exception:
        return Err.UNKNOWN, b"", {}

    return None, bytes(result), new_cache_entries
示例#3
0
    async def fetch_blocks_and_validate(
        self,
        peer: WSChiaConnection,
        height_start: uint32,
        height_end: uint32,
        fork_point_with_peak: Optional[uint32],
    ) -> Tuple[bool, bool]:
        """
        Returns whether the blocks validated, and whether the peak was advanced
        """
        if self.wallet_state_manager is None:
            return False, False

        self.log.info(f"Requesting blocks {height_start}-{height_end}")
        request = RequestHeaderBlocks(uint32(height_start), uint32(height_end))
        res: Optional[RespondHeaderBlocks] = await peer.request_header_blocks(request)
        if res is None or not isinstance(res, RespondHeaderBlocks):
            raise ValueError("Peer returned no response")
        header_blocks: List[HeaderBlock] = res.header_blocks
        advanced_peak = False
        if header_blocks is None:
            raise ValueError(f"No response from peer {peer}")
        if (
            self.full_node_peer is not None
            and peer.peer_host == self.full_node_peer.host
            or peer.peer_host == "127.0.0.1"
        ):
            trusted = True
            pre_validation_results: Optional[List[PreValidationResult]] = None
        else:
            trusted = False
            pre_validation_results = await self.wallet_state_manager.blockchain.pre_validate_blocks_multiprocessing(
                header_blocks
            )
            if pre_validation_results is None:
                return False, advanced_peak
            assert len(header_blocks) == len(pre_validation_results)

        for i in range(len(header_blocks)):
            header_block = header_blocks[i]
            if not trusted and pre_validation_results is not None and pre_validation_results[i].error is not None:
                raise ValidationError(Err(pre_validation_results[i].error))

            fork_point_with_old_peak = None if advanced_peak else fork_point_with_peak
            if header_block.is_transaction_block:
                # Find additions and removals
                (additions, removals,) = await self.wallet_state_manager.get_filter_additions_removals(
                    header_block, header_block.transactions_filter, fork_point_with_old_peak
                )

                # Get Additions
                added_coins = await self.get_additions(peer, header_block, additions)
                if added_coins is None:
                    raise ValueError("Failed to fetch additions")

                # Get removals
                removed_coins = await self.get_removals(peer, header_block, added_coins, removals)
                if removed_coins is None:
                    raise ValueError("Failed to fetch removals")

                header_block_record = HeaderBlockRecord(header_block, added_coins, removed_coins)
            else:
                header_block_record = HeaderBlockRecord(header_block, [], [])
            start_t = time.time()
            if trusted:
                (result, error, fork_h,) = await self.wallet_state_manager.blockchain.receive_block(
                    header_block_record, None, trusted, fork_point_with_old_peak
                )
            else:
                assert pre_validation_results is not None
                (result, error, fork_h,) = await self.wallet_state_manager.blockchain.receive_block(
                    header_block_record, pre_validation_results[i], trusted, fork_point_with_old_peak
                )
            self.log.debug(
                f"Time taken to validate {header_block.height} with fork "
                f"{fork_point_with_old_peak}: {time.time() - start_t}"
            )
            if result == ReceiveBlockResult.NEW_PEAK:
                advanced_peak = True
                self.wallet_state_manager.state_changed("new_block")
            elif result == ReceiveBlockResult.INVALID_BLOCK:
                raise ValueError("Value error peer sent us invalid block")
        if advanced_peak:
            await self.wallet_state_manager.create_more_puzzle_hashes()
        return True, advanced_peak
    async def add_spendbundle(
        self,
        new_spend: SpendBundle,
        cost_result: CostResult,
        spend_name: bytes32,
        validate_signature=True,
    ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]:
        """
        Tries to add spendbundle to either self.mempools or to_pool if it's specified.
        Returns true if it's added in any of pools, Returns error if it fails.
        """
        start_time = time.time()
        if self.peak is None:
            return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED

        npc_list = cost_result.npc_list
        cost = cost_result.cost

        log.debug(f"Cost: {cost}")

        if cost > self.constants.MAX_BLOCK_COST_CLVM:
            return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX

        if cost_result.error is not None:
            return None, MempoolInclusionStatus.FAILED, Err(cost_result.error)
        # build removal list
        removal_names: List[bytes32] = new_spend.removal_names()

        additions = additions_for_npc(npc_list)

        additions_dict: Dict[bytes32, Coin] = {}
        for add in additions:
            additions_dict[add.name()] = add

        addition_amount = uint64(0)
        # Check additions for max coin amount
        for coin in additions:
            if coin.amount > self.constants.MAX_COIN_AMOUNT:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_EXCEEDS_MAXIMUM,
                )
            addition_amount = uint64(addition_amount + coin.amount)
        # Check for duplicate outputs
        addition_counter = collections.Counter(_.name() for _ in additions)
        for k, v in addition_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT
        # Check for duplicate inputs
        removal_counter = collections.Counter(name for name in removal_names)
        for k, v in removal_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND
        # Skip if already added
        if spend_name in self.mempool.spends:
            return uint64(cost), MempoolInclusionStatus.SUCCESS, None

        removal_record_dict: Dict[bytes32, CoinRecord] = {}
        removal_coin_dict: Dict[bytes32, Coin] = {}
        unknown_unspent_error: bool = False
        removal_amount = uint64(0)
        for name in removal_names:
            removal_record = await self.coin_store.get_coin_record(name)
            if removal_record is None and name not in additions_dict:
                unknown_unspent_error = True
                break
            elif name in additions_dict:
                removal_coin = additions_dict[name]
                # TODO(straya): what timestamp to use here?
                removal_record = CoinRecord(
                    removal_coin,
                    uint32(
                        self.peak.height +
                        1),  # In mempool, so will be included in next height
                    uint32(0),
                    False,
                    False,
                    uint64(int(time.time())),
                )

            assert removal_record is not None
            removal_amount = uint64(removal_amount +
                                    removal_record.coin.amount)
            removal_record_dict[name] = removal_record
            removal_coin_dict[name] = removal_record.coin
        if unknown_unspent_error:
            return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT

        if addition_amount > removal_amount:
            print(addition_amount, removal_amount)
            return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN

        fees = removal_amount - addition_amount
        assert_fee_sum: uint64 = uint64(0)

        for npc in npc_list:
            if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
                fee_list: List[ConditionWithArgs] = npc.condition_dict[
                    ConditionOpcode.RESERVE_FEE]
                for cvp in fee_list:
                    fee = int_from_bytes(cvp.vars[0])
                    assert_fee_sum = assert_fee_sum + fee
        if fees < assert_fee_sum:
            return (
                None,
                MempoolInclusionStatus.FAILED,
                Err.RESERVE_FEE_CONDITION_FAILED,
            )

        if cost == 0:
            return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN

        fees_per_cost: float = fees / cost
        # If pool is at capacity check the fee, if not then accept even without the fee
        if self.mempool.at_full_capacity(cost):
            if fees == 0:
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
            if fees_per_cost <= self.mempool.get_min_fee_rate(cost):
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
        # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
        # Use this information later when constructing a block
        fail_reason, conflicts = await self.check_removals(removal_record_dict)
        # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others
        tmp_error: Optional[Err] = None
        conflicting_pool_items: Dict[bytes32, MempoolItem] = {}
        if fail_reason is Err.MEMPOOL_CONFLICT:
            for conflicting in conflicts:
                sb: MempoolItem = self.mempool.removals[conflicting.name()]
                conflicting_pool_items[sb.name] = sb
            if not self.can_replace(conflicting_pool_items,
                                    removal_record_dict, fees, fees_per_cost):
                self.add_to_potential_tx_set(new_spend, spend_name,
                                             cost_result)
                return (
                    uint64(cost),
                    MempoolInclusionStatus.PENDING,
                    Err.MEMPOOL_CONFLICT,
                )

        elif fail_reason:
            return None, MempoolInclusionStatus.FAILED, fail_reason

        if tmp_error:
            return None, MempoolInclusionStatus.FAILED, tmp_error

        # Verify conditions, create hash_key list for aggsig check
        pks: List[G1Element] = []
        msgs: List[bytes32] = []
        error: Optional[Err] = None
        coin_announcements_in_spend: Set[
            bytes32] = coin_announcements_names_for_npc(npc_list)
        puzzle_announcements_in_spend: Set[
            bytes32] = puzzle_announcements_names_for_npc(npc_list)
        for npc in npc_list:
            coin_record: CoinRecord = removal_record_dict[npc.coin_name]
            # Check that the revealed removal puzzles actually match the puzzle hash
            if npc.puzzle_hash != coin_record.coin.puzzle_hash:
                log.warning(
                    "Mempool rejecting transaction because of wrong puzzle_hash"
                )
                log.warning(
                    f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}")
                return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH

            chialisp_height = (self.peak.prev_transaction_block_height
                               if not self.peak.is_transaction_block else
                               self.peak.height)
            error = mempool_check_conditions_dict(
                coin_record,
                coin_announcements_in_spend,
                puzzle_announcements_in_spend,
                npc.condition_dict,
                uint32(chialisp_height),
            )

            if error:
                if error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or error is Err.ASSERT_HEIGHT_RELATIVE_FAILED:
                    self.add_to_potential_tx_set(new_spend, spend_name,
                                                 cost_result)
                    return uint64(cost), MempoolInclusionStatus.PENDING, error
                break

            if validate_signature:
                for pk, message in pkm_pairs_for_conditions_dict(
                        npc.condition_dict, npc.coin_name,
                        self.constants.AGG_SIG_ME_ADDITIONAL_DATA):
                    pks.append(pk)
                    msgs.append(message)
        if error:
            return None, MempoolInclusionStatus.FAILED, error

        if validate_signature:
            # Verify aggregated signature
            if not AugSchemeMPL.aggregate_verify(
                    pks, msgs, new_spend.aggregated_signature):
                log.warning(
                    f"Aggsig validation error {pks} {msgs} {new_spend}")
                return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE
        # Remove all conflicting Coins and SpendBundles
        if fail_reason:
            mempool_item: MempoolItem
            for mempool_item in conflicting_pool_items.values():
                self.mempool.remove_from_pool(mempool_item)

        removals: List[Coin] = [coin for coin in removal_coin_dict.values()]
        new_item = MempoolItem(new_spend, uint64(fees), cost_result,
                               spend_name, additions, removals)
        self.mempool.add_to_pool(new_item, additions, removal_coin_dict)
        log.info(f"add_spendbundle took {time.time() - start_time} seconds")
        return uint64(cost), MempoolInclusionStatus.SUCCESS, None
    async def receive_block(
        self,
        header_block_record: HeaderBlockRecord,
        pre_validation_result: Optional[PreValidationResult] = None,
        trusted: bool = False,
        fork_point_with_peak: Optional[uint32] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """

        block = header_block_record.header
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if block.height == 0:
            prev_b: Optional[BlockRecord] = None
        else:
            prev_b = self.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            self.constants,
            len(block.finished_sub_slots) > 0, prev_b, self)

        if trusted is False and pre_validation_result is None:
            required_iters, error = validate_finished_header_block(
                self.constants, self, block, False, difficulty, sub_slot_iters)
        elif trusted:
            unfinished_header_block = UnfinishedHeaderBlock(
                block.finished_sub_slots,
                block.reward_chain_block.get_unfinished(),
                block.challenge_chain_sp_proof,
                block.reward_chain_sp_proof,
                block.foliage,
                block.foliage_transaction_block,
                block.transactions_filter,
            )

            required_iters, val_error = validate_unfinished_header_block(
                self.constants, self, unfinished_header_block, False,
                difficulty, sub_slot_iters, False, True)
            error = ValidationError(
                Err(val_error)) if val_error is not None else None
        else:
            assert pre_validation_result is not None
            required_iters = pre_validation_result.required_iters
            error = (ValidationError(Err(pre_validation_result.error))
                     if pre_validation_result.error is not None else None)

        if error is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        assert required_iters is not None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            None,
            block,
        )

        # Always add the block to the database
        async with self.block_store.db_wrapper.lock:
            try:
                await self.block_store.db_wrapper.begin_transaction()
                await self.block_store.add_block_record(
                    header_block_record, block_record)
                self.add_block_record(block_record)
                self.clean_block_record(block_record.height -
                                        self.constants.BLOCKS_CACHE_SIZE)

                fork_height: Optional[uint32] = await self._reconsider_peak(
                    block_record, genesis, fork_point_with_peak)
                await self.block_store.db_wrapper.commit_transaction()
            except BaseException as e:
                self.log.error(f"Error during db transaction: {e}")
                await self.block_store.db_wrapper.rollback_transaction()
                raise
        if fork_height is not None:
            self.log.info(
                f"💰 Updated wallet peak to height {block_record.height}, weight {block_record.weight}, "
            )
            return ReceiveBlockResult.NEW_PEAK, None, fork_height
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
示例#6
0
async def validate_block_body(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    block_store: BlockStore,
    coin_store: CoinStore,
    peak: Optional[BlockRecord],
    block: Union[FullBlock, UnfinishedBlock],
    height: uint32,
    npc_result: Optional[NPCResult],
    fork_point_with_peak: Optional[uint32],
    get_block_generator: Callable,
    validate_signature=True,
) -> Tuple[Optional[Err], Optional[NPCResult]]:
    """
    This assumes the header block has been completely validated.
    Validates the transactions and body of the block. Returns None for the first value if everything
    validates correctly, or an Err if something does not validate. For the second value, returns a CostResult
    only if validation succeeded, and there are transactions. In other cases it returns None. The NPC result is
    the result of running the generator with the previous generators refs. It is only present for transaction
    blocks which have spent coins.
    """
    if isinstance(block, FullBlock):
        assert height == block.height
    prev_transaction_block_height: uint32 = uint32(0)

    # 1. For non transaction-blocs: foliage block, transaction filter, transactions info, and generator must
    # be empty. If it is a block but not a transaction block, there is no body to validate. Check that all fields are
    # None
    if block.foliage.foliage_transaction_block_hash is None:
        if (block.foliage_transaction_block is not None
                or block.transactions_info is not None
                or block.transactions_generator is not None):
            return Err.NOT_BLOCK_BUT_HAS_DATA, None

        prev_tb: BlockRecord = blocks.block_record(block.prev_header_hash)
        while not prev_tb.is_transaction_block:
            prev_tb = blocks.block_record(prev_tb.prev_hash)
        assert prev_tb.timestamp is not None
        if len(block.transactions_generator_ref_list) > 0:
            return Err.NOT_BLOCK_BUT_HAS_DATA, None

        return None, None  # This means the block is valid

    # All checks below this point correspond to transaction blocks
    # 2. For blocks, foliage block, transactions info must not be empty
    if block.foliage_transaction_block is None or block.transactions_info is None:
        return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None
    assert block.foliage_transaction_block is not None

    # keeps track of the reward coins that need to be incorporated
    expected_reward_coins: Set[Coin] = set()

    # 3. The transaction info hash in the Foliage block must match the transaction info
    if block.foliage_transaction_block.transactions_info_hash != std_hash(
            block.transactions_info):
        return Err.INVALID_TRANSACTIONS_INFO_HASH, None

    # 4. The foliage block hash in the foliage block must match the foliage block
    if block.foliage.foliage_transaction_block_hash != std_hash(
            block.foliage_transaction_block):
        return Err.INVALID_FOLIAGE_BLOCK_HASH, None

    # 5. The reward claims must be valid for the previous blocks, and current block fees
    # If height == 0, expected_reward_coins will be left empty
    if height > 0:
        # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter)
        prev_transaction_block = blocks.block_record(
            block.foliage_transaction_block.prev_transaction_block_hash)
        prev_transaction_block_height = prev_transaction_block.height
        assert prev_transaction_block.fees is not None
        pool_coin = create_pool_coin(
            prev_transaction_block_height,
            prev_transaction_block.pool_puzzle_hash,
            calculate_pool_reward(prev_transaction_block.height),
            constants.GENESIS_CHALLENGE,
        )
        farmer_coin = create_farmer_coin(
            prev_transaction_block_height,
            prev_transaction_block.farmer_puzzle_hash,
            uint64(
                calculate_base_farmer_reward(prev_transaction_block.height) +
                prev_transaction_block.fees),
            constants.GENESIS_CHALLENGE,
        )
        # Adds the previous block
        expected_reward_coins.add(pool_coin)
        expected_reward_coins.add(farmer_coin)

        # For the second block in the chain, don't go back further
        if prev_transaction_block.height > 0:
            curr_b = blocks.block_record(prev_transaction_block.prev_hash)
            while not curr_b.is_transaction_block:
                expected_reward_coins.add(
                    create_pool_coin(
                        curr_b.height,
                        curr_b.pool_puzzle_hash,
                        calculate_pool_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                expected_reward_coins.add(
                    create_farmer_coin(
                        curr_b.height,
                        curr_b.farmer_puzzle_hash,
                        calculate_base_farmer_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                curr_b = blocks.block_record(curr_b.prev_hash)

    if set(block.transactions_info.reward_claims_incorporated
           ) != expected_reward_coins:
        return Err.INVALID_REWARD_COINS, None

    if len(block.transactions_info.reward_claims_incorporated) != len(
            expected_reward_coins):
        return Err.INVALID_REWARD_COINS, None

    removals: List[bytes32] = []
    coinbase_additions: List[Coin] = list(expected_reward_coins)
    additions: List[Coin] = []
    npc_list: List[NPC] = []
    removals_puzzle_dic: Dict[bytes32, bytes32] = {}
    cost: uint64 = uint64(0)

    # In header validation we check that timestamp is not more that 10 minutes into the future
    # 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp
    # (this test has been removed)

    # 7a. The generator root must be the hash of the serialized bytes of
    #     the generator for this block (or zeroes if no generator)
    if block.transactions_generator is not None:
        if std_hash(bytes(block.transactions_generator)
                    ) != block.transactions_info.generator_root:
            return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None
    else:
        if block.transactions_info.generator_root != bytes([0] * 32):
            return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None

    # 8a. The generator_ref_list must be the hash of the serialized bytes of
    #     the generator ref list for this block (or 'one' bytes [0x01] if no generator)
    # 8b. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries
    # 8c. The generator ref list must not point to a height >= this block's height
    if block.transactions_generator_ref_list in (None, []):
        if block.transactions_info.generator_refs_root != bytes([1] * 32):
            return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None
    else:
        # If we have a generator reference list, we must have a generator
        if block.transactions_generator is None:
            return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None

        # The generator_refs_root must be the hash of the concatenation of the List[uint32]
        generator_refs_hash = std_hash(b"".join(
            [bytes(i) for i in block.transactions_generator_ref_list]))
        if block.transactions_info.generator_refs_root != generator_refs_hash:
            return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None
        if len(block.transactions_generator_ref_list
               ) > constants.MAX_GENERATOR_REF_LIST_SIZE:
            return Err.TOO_MANY_GENERATOR_REFS, None
        if any([
                index >= height
                for index in block.transactions_generator_ref_list
        ]):
            return Err.FUTURE_GENERATOR_REFS, None

    if block.transactions_generator is not None:
        # Get List of names removed, puzzles hashes for removed coins and conditions created

        assert npc_result is not None
        cost = calculate_cost_of_program(block.transactions_generator,
                                         npc_result, constants.COST_PER_BYTE)
        npc_list = npc_result.npc_list

        # 7. Check that cost <= MAX_BLOCK_COST_CLVM
        log.debug(
            f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM} "
            f"percent full: {round(100 * (cost / constants.MAX_BLOCK_COST_CLVM), 2)}%"
        )
        if cost > constants.MAX_BLOCK_COST_CLVM:
            return Err.BLOCK_COST_EXCEEDS_MAX, None

        # 8. The CLVM program must not return any errors
        if npc_result.error is not None:
            return Err(npc_result.error), None

        for npc in npc_list:
            removals.append(npc.coin_name)
            removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash

        additions = additions_for_npc(npc_list)
    else:
        assert npc_result is None

    # 9. Check that the correct cost is in the transactions info
    if block.transactions_info.cost != cost:
        return Err.INVALID_BLOCK_COST, None

    additions_dic: Dict[bytes32, Coin] = {}
    # 10. Check additions for max coin amount
    # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer
    # We will not even reach here because Coins do type checking (uint64)
    for coin in additions + coinbase_additions:
        additions_dic[coin.name()] = coin
        if coin.amount < 0:
            return Err.COIN_AMOUNT_NEGATIVE, None

        if coin.amount > constants.MAX_COIN_AMOUNT:
            return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

    # 11. Validate addition and removal roots
    root_error = validate_block_merkle_roots(
        block.foliage_transaction_block.additions_root,
        block.foliage_transaction_block.removals_root,
        additions + coinbase_additions,
        removals,
    )
    if root_error:
        return root_error, None

    # 12. The additions and removals must result in the correct filter
    byte_array_tx: List[bytes32] = []

    for coin in additions + coinbase_additions:
        # TODO: address hint error and remove ignore
        #       error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32"
        #       [arg-type]
        byte_array_tx.append(bytearray(
            coin.puzzle_hash))  # type: ignore[arg-type]
    for coin_name in removals:
        # TODO: address hint error and remove ignore
        #       error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32"
        #       [arg-type]
        byte_array_tx.append(bytearray(coin_name))  # type: ignore[arg-type]

    bip158: PyBIP158 = PyBIP158(byte_array_tx)
    encoded_filter = bytes(bip158.GetEncoded())
    filter_hash = std_hash(encoded_filter)

    if filter_hash != block.foliage_transaction_block.filter_hash:
        return Err.INVALID_TRANSACTIONS_FILTER_HASH, None

    # 13. Check for duplicate outputs in additions
    addition_counter = collections.Counter(
        _.name() for _ in additions + coinbase_additions)
    for k, v in addition_counter.items():
        if v > 1:
            return Err.DUPLICATE_OUTPUT, None

    # 14. Check for duplicate spends inside block
    removal_counter = collections.Counter(removals)
    for k, v in removal_counter.items():
        if v > 1:
            return Err.DOUBLE_SPEND, None

    # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block)
    # The fork point is the last block in common between the peak chain and the chain of `block`
    if peak is None or height == 0:
        fork_h: int = -1
    elif fork_point_with_peak is not None:
        fork_h = fork_point_with_peak
    else:
        fork_h = find_fork_point_in_chain(
            blocks, peak, blocks.block_record(block.prev_header_hash))

    # Get additions and removals since (after) fork_h but not including this block
    # The values include: the coin that was added, the height of the block in which it was confirmed, and the
    # timestamp of the block in which it was confirmed
    additions_since_fork: Dict[bytes32, Tuple[Coin, uint32, uint64]] = {
    }  # This includes coinbase additions
    removals_since_fork: Set[bytes32] = set()

    # For height 0, there are no additions and removals before this block, so we can skip
    if height > 0:
        # First, get all the blocks in the fork > fork_h, < block.height
        prev_block: Optional[FullBlock] = await block_store.get_full_block(
            block.prev_header_hash)
        reorg_blocks: Dict[uint32, FullBlock] = {}
        curr: Optional[FullBlock] = prev_block
        assert curr is not None
        while curr.height > fork_h:
            if curr.height == 0:
                break
            curr = await block_store.get_full_block(curr.prev_header_hash)
            assert curr is not None
            reorg_blocks[curr.height] = curr
        if fork_h != -1:
            assert len(reorg_blocks) == height - fork_h - 1

        curr = prev_block
        assert curr is not None
        while curr.height > fork_h:
            # Coin store doesn't contain coins from fork, we have to run generator for each block in fork
            if curr.transactions_generator is not None:
                # These blocks are in the past and therefore assumed to be valid, so get_block_generator won't raise
                curr_block_generator: Optional[
                    BlockGenerator] = await get_block_generator(curr)
                assert curr_block_generator is not None and curr.transactions_info is not None
                curr_npc_result = get_name_puzzle_conditions(
                    curr_block_generator,
                    min(constants.MAX_BLOCK_COST_CLVM,
                        curr.transactions_info.cost),
                    cost_per_byte=constants.COST_PER_BYTE,
                    mempool_mode=False,
                )
                removals_in_curr, additions_in_curr = tx_removals_and_additions(
                    curr_npc_result.npc_list)
            else:
                removals_in_curr = []
                additions_in_curr = []

            for c_name in removals_in_curr:
                assert c_name not in removals_since_fork
                removals_since_fork.add(c_name)
            for c in additions_in_curr:
                assert c.name() not in additions_since_fork
                assert curr.foliage_transaction_block is not None
                additions_since_fork[c.name()] = (
                    c, curr.height, curr.foliage_transaction_block.timestamp)

            for coinbase_coin in curr.get_included_reward_coins():
                assert coinbase_coin.name() not in additions_since_fork
                assert curr.foliage_transaction_block is not None
                additions_since_fork[coinbase_coin.name()] = (
                    coinbase_coin,
                    curr.height,
                    curr.foliage_transaction_block.timestamp,
                )
            if curr.height == 0:
                break
            curr = reorg_blocks[curr.height - 1]
            assert curr is not None

    removal_coin_records: Dict[bytes32, CoinRecord] = {}
    for rem in removals:
        if rem in additions_dic:
            # Ephemeral coin
            rem_coin: Coin = additions_dic[rem]
            new_unspent: CoinRecord = CoinRecord(
                rem_coin,
                height,
                height,
                False,
                block.foliage_transaction_block.timestamp,
            )
            removal_coin_records[new_unspent.name] = new_unspent
        else:
            unspent = await coin_store.get_coin_record(rem)
            if unspent is not None and unspent.confirmed_block_index <= fork_h:
                # Spending something in the current chain, confirmed before fork
                # (We ignore all coins confirmed after fork)
                if unspent.spent == 1 and unspent.spent_block_index <= fork_h:
                    # Check for coins spent in an ancestor block
                    return Err.DOUBLE_SPEND, None
                removal_coin_records[unspent.name] = unspent
            else:
                # This coin is not in the current heaviest chain, so it must be in the fork
                if rem not in additions_since_fork:
                    # Check for spending a coin that does not exist in this fork
                    log.error(
                        f"Err.UNKNOWN_UNSPENT: COIN ID: {rem} NPC RESULT: {npc_result}"
                    )
                    return Err.UNKNOWN_UNSPENT, None
                new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[
                    rem]
                new_coin_record: CoinRecord = CoinRecord(
                    new_coin,
                    confirmed_height,
                    uint32(0),
                    False,
                    confirmed_timestamp,
                )
                removal_coin_records[new_coin_record.name] = new_coin_record

            # This check applies to both coins created before fork (pulled from coin_store),
            # and coins created after fork (additions_since_fork)
            if rem in removals_since_fork:
                # This coin was spent in the fork
                return Err.DOUBLE_SPEND_IN_FORK, None

    removed = 0
    for unspent in removal_coin_records.values():
        removed += unspent.coin.amount

    added = 0
    for coin in additions:
        added += coin.amount

    # 16. Check that the total coin amount for added is <= removed
    if removed < added:
        return Err.MINTING_COIN, None

    fees = removed - added
    assert fees >= 0
    assert_fee_sum: uint128 = uint128(0)

    for npc in npc_list:
        if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
            fee_list: List[ConditionWithArgs] = npc.condition_dict[
                ConditionOpcode.RESERVE_FEE]
            for cvp in fee_list:
                fee = int_from_bytes(cvp.vars[0])
                if fee < 0:
                    return Err.RESERVE_FEE_CONDITION_FAILED, None
                assert_fee_sum = uint128(assert_fee_sum + fee)

    # 17. Check that the assert fee sum <= fees, and that each reserved fee is non-negative
    if fees < assert_fee_sum:
        return Err.RESERVE_FEE_CONDITION_FAILED, None

    # 18. Check that the fee amount + farmer reward < maximum coin amount
    if fees + calculate_base_farmer_reward(height) > constants.MAX_COIN_AMOUNT:
        return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

    # 19. Check that the computed fees are equal to the fees in the block header
    if block.transactions_info.fees != fees:
        return Err.INVALID_BLOCK_FEE_AMOUNT, None

    # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes
    for unspent in removal_coin_records.values():
        if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]:
            return Err.WRONG_PUZZLE_HASH, None

    # 21. Verify conditions
    for npc in npc_list:
        assert height is not None
        unspent = removal_coin_records[npc.coin_name]
        error = mempool_check_conditions_dict(
            unspent,
            npc.condition_dict,
            prev_transaction_block_height,
            block.foliage_transaction_block.timestamp,
        )
        if error:
            return error, None

    # create hash_key list for aggsig check
    pairs_pks, pairs_msgs = pkm_pairs(npc_list,
                                      constants.AGG_SIG_ME_ADDITIONAL_DATA)

    # 22. Verify aggregated signature
    # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster
    if not block.transactions_info.aggregated_signature:
        return Err.BAD_AGGREGATE_SIGNATURE, None

    # The pairing cache is not useful while syncing as each pairing is seen
    # only once, so the extra effort of populating it is not justified.
    # However, we force caching of pairings just for unfinished blocks
    # as the cache is likely to be useful when validating the corresponding
    # finished blocks later.
    if validate_signature:
        force_cache: bool = isinstance(block, UnfinishedBlock)
        if not cached_bls.aggregate_verify(
                pairs_pks, pairs_msgs,
                block.transactions_info.aggregated_signature, force_cache):
            return Err.BAD_AGGREGATE_SIGNATURE, None

    return None, npc_result
示例#7
0
async def validate_block_body(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    block_store: BlockStore,
    coin_store: CoinStore,
    peak: Optional[BlockRecord],
    block: Union[FullBlock, UnfinishedBlock],
    height: uint32,
    cached_cost_result: Optional[CostResult] = None,
    fork_point_with_peak: Optional[uint32] = None,
) -> Tuple[Optional[Err], Optional[CostResult]]:
    """
    This assumes the header block has been completely validated.
    Validates the transactions and body of the block. Returns None for the first value if everything
    validates correctly, or an Err if something does not validate. For the second value, returns a CostResult
    if validation succeeded, and there are transactions
    """
    if isinstance(block, FullBlock):
        assert height == block.height
    prev_transaction_block_height: uint32 = uint32(0)

    # 1. For non block blocks, foliage block, transaction filter, transactions info, and generator must be empty
    # If it is a block but not a transaction block, there is no body to validate. Check that all fields are None
    if block.foliage.foliage_transaction_block_hash is None:
        if (block.foliage_transaction_block is not None
                or block.transactions_info is not None
                or block.transactions_generator is not None):
            return Err.NOT_BLOCK_BUT_HAS_DATA, None
        return None, None  # This means the block is valid

    # 2. For blocks, foliage block, transaction filter, transactions info must not be empty
    if (block.foliage_transaction_block is None
            or block.foliage_transaction_block.filter_hash is None
            or block.transactions_info is None):
        return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None

    # keeps track of the reward coins that need to be incorporated
    expected_reward_coins: Set[Coin] = set()

    # 3. The transaction info hash in the Foliage block must match the transaction info
    if block.foliage_transaction_block.transactions_info_hash != std_hash(
            block.transactions_info):
        return Err.INVALID_TRANSACTIONS_INFO_HASH, None

    # 4. The foliage block hash in the foliage block must match the foliage block
    if block.foliage.foliage_transaction_block_hash != std_hash(
            block.foliage_transaction_block):
        return Err.INVALID_FOLIAGE_BLOCK_HASH, None

    # 5. The prev generators root must be valid
    # TODO(straya): implement prev generators

    # 4. The foliage block hash in the foliage block must match the foliage block
    if block.foliage.foliage_transaction_block_hash != std_hash(
            block.foliage_transaction_block):
        return Err.INVALID_FOLIAGE_BLOCK_HASH, None

    # 7. The reward claims must be valid for the previous blocks, and current block fees
    if height > 0:
        # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter)
        prev_transaction_block = blocks.block_record(
            block.foliage_transaction_block.prev_transaction_block_hash)
        prev_transaction_block_height = prev_transaction_block.height
        assert prev_transaction_block.fees is not None
        pool_coin = create_pool_coin(
            prev_transaction_block_height,
            prev_transaction_block.pool_puzzle_hash,
            calculate_pool_reward(prev_transaction_block.height),
            constants.GENESIS_CHALLENGE,
        )
        farmer_coin = create_farmer_coin(
            prev_transaction_block_height,
            prev_transaction_block.farmer_puzzle_hash,
            uint64(
                calculate_base_farmer_reward(prev_transaction_block.height) +
                prev_transaction_block.fees),
            constants.GENESIS_CHALLENGE,
        )
        # Adds the previous block
        expected_reward_coins.add(pool_coin)
        expected_reward_coins.add(farmer_coin)

        # For the second block in the chain, don't go back further
        if prev_transaction_block.height > 0:
            curr_b = blocks.block_record(prev_transaction_block.prev_hash)
            while not curr_b.is_transaction_block:
                expected_reward_coins.add(
                    create_pool_coin(
                        curr_b.height,
                        curr_b.pool_puzzle_hash,
                        calculate_pool_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                expected_reward_coins.add(
                    create_farmer_coin(
                        curr_b.height,
                        curr_b.farmer_puzzle_hash,
                        calculate_base_farmer_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                curr_b = blocks.block_record(curr_b.prev_hash)

    if set(block.transactions_info.reward_claims_incorporated
           ) != expected_reward_coins:
        return Err.INVALID_REWARD_COINS, None

    removals: List[bytes32] = []
    coinbase_additions: List[Coin] = list(expected_reward_coins)
    additions: List[Coin] = []
    coin_announcement_names: Set[bytes32] = set()
    puzzle_announcement_names: Set[bytes32] = set()
    npc_list: List[NPC] = []
    removals_puzzle_dic: Dict[bytes32, bytes32] = {}
    cost: uint64 = uint64(0)

    if height <= constants.INITIAL_FREEZE_PERIOD and block.transactions_generator is not None:
        return Err.INITIAL_TRANSACTION_FREEZE, None

    if height > constants.INITIAL_FREEZE_PERIOD and constants.NETWORK_TYPE == NetworkType.MAINNET:
        return Err.INITIAL_TRANSACTION_FREEZE, None
    else:
        # 6. The generator root must be the tree-hash of the generator (or zeroes if no generator)
        if block.transactions_generator is not None:
            if block.transactions_generator.get_tree_hash(
            ) != block.transactions_info.generator_root:
                return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None
        else:
            if block.transactions_info.generator_root != bytes([0] * 32):
                return Err.INVALID_TRANSACTIONS_GENERATOR_ROOT, None

        if block.transactions_generator_ref_list is not None:
            if len(bytes(block.transactions_generator_ref_list)
                   ) > constants.MAX_GENERATOR_REF_LIST_SIZE:
                return Err.PRE_SOFT_FORK_MAX_GENERATOR_REF_LIST_SIZE, None

        if block.transactions_generator is not None:
            # Get List of names removed, puzzles hashes for removed coins and conditions crated
            if cached_cost_result is not None:
                result: Optional[CostResult] = cached_cost_result
            else:
                result = calculate_cost_of_program(
                    block.transactions_generator,
                    constants.CLVM_COST_RATIO_CONSTANT)
            assert result is not None
            cost = result.cost
            npc_list = result.npc_list

            # 8. Check that cost <= MAX_BLOCK_COST_CLVM
            if cost > constants.MAX_BLOCK_COST_CLVM:
                return Err.BLOCK_COST_EXCEEDS_MAX, None
            if result.error is not None:
                return Err(result.error), None

            for npc in npc_list:
                removals.append(npc.coin_name)
                removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash

            additions = additions_for_npc(npc_list)
            coin_announcement_names = coin_announcements_names_for_npc(
                npc_list)
            puzzle_announcement_names = puzzle_announcements_names_for_npc(
                npc_list)
        else:
            result = None

        # 9. Check that the correct cost is in the transactions info
        if block.transactions_info.cost != cost:
            return Err.INVALID_BLOCK_COST, None

        additions_dic: Dict[bytes32, Coin] = {}
        # 10. Check additions for max coin amount
        # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer
        for coin in additions + coinbase_additions:
            additions_dic[coin.name()] = coin
            if coin.amount > constants.MAX_COIN_AMOUNT:
                return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

        # 11. Validate addition and removal roots
        root_error = validate_block_merkle_roots(
            block.foliage_transaction_block.additions_root,
            block.foliage_transaction_block.removals_root,
            additions + coinbase_additions,
            removals,
        )
        if root_error:
            return root_error, None

        # 12. The additions and removals must result in the correct filter
        byte_array_tx: List[bytes32] = []

        for coin in additions + coinbase_additions:
            byte_array_tx.append(bytearray(coin.puzzle_hash))
        for coin_name in removals:
            byte_array_tx.append(bytearray(coin_name))

        bip158: PyBIP158 = PyBIP158(byte_array_tx)
        encoded_filter = bytes(bip158.GetEncoded())
        filter_hash = std_hash(encoded_filter)

        if filter_hash != block.foliage_transaction_block.filter_hash:
            return Err.INVALID_TRANSACTIONS_FILTER_HASH, None

        # 13. Check for duplicate outputs in additions
        addition_counter = collections.Counter(
            _.name() for _ in additions + coinbase_additions)
        for k, v in addition_counter.items():
            if v > 1:
                return Err.DUPLICATE_OUTPUT, None

        # 14. Check for duplicate spends inside block
        removal_counter = collections.Counter(removals)
        for k, v in removal_counter.items():
            if v > 1:
                return Err.DOUBLE_SPEND, None

        # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block)
        if peak is None or height == 0:
            fork_h: int = -1
        elif fork_point_with_peak is not None:
            fork_h = fork_point_with_peak
        else:
            fork_h = find_fork_point_in_chain(
                blocks, peak, blocks.block_record(block.prev_header_hash))

        if fork_h == -1:
            coin_store_reorg_height = -1
        else:
            last_block_in_common = await blocks.get_block_record_from_db(
                blocks.height_to_hash(uint32(fork_h)))
            assert last_block_in_common is not None
            coin_store_reorg_height = last_block_in_common.height

        # Get additions and removals since (after) fork_h but not including this block
        additions_since_fork: Dict[bytes32, Tuple[Coin, uint32]] = {}
        removals_since_fork: Set[bytes32] = set()
        coinbases_since_fork: Dict[bytes32, uint32] = {}

        if height > 0:
            curr: Optional[FullBlock] = await block_store.get_full_block(
                block.prev_header_hash)
            assert curr is not None

            while curr.height > fork_h:
                removals_in_curr, additions_in_curr = curr.tx_removals_and_additions(
                )
                for c_name in removals_in_curr:
                    removals_since_fork.add(c_name)
                for c in additions_in_curr:
                    additions_since_fork[c.name()] = (c, curr.height)

                for coinbase_coin in curr.get_included_reward_coins():
                    additions_since_fork[coinbase_coin.name()] = (
                        coinbase_coin, curr.height)
                    coinbases_since_fork[coinbase_coin.name()] = curr.height
                if curr.height == 0:
                    break
                curr = await block_store.get_full_block(curr.prev_header_hash)
                assert curr is not None

        removal_coin_records: Dict[bytes32, CoinRecord] = {}
        for rem in removals:
            if rem in additions_dic:
                # Ephemeral coin
                rem_coin: Coin = additions_dic[rem]
                new_unspent: CoinRecord = CoinRecord(
                    rem_coin,
                    height,
                    uint32(0),
                    False,
                    (rem in coinbases_since_fork),
                    block.foliage_transaction_block.timestamp,
                )
                removal_coin_records[new_unspent.name] = new_unspent
            else:
                unspent = await coin_store.get_coin_record(rem)
                if unspent is not None and unspent.confirmed_block_index <= coin_store_reorg_height:
                    # Spending something in the current chain, confirmed before fork
                    # (We ignore all coins confirmed after fork)
                    if unspent.spent == 1 and unspent.spent_block_index <= coin_store_reorg_height:
                        # Check for coins spent in an ancestor block
                        return Err.DOUBLE_SPEND, None
                    removal_coin_records[unspent.name] = unspent
                else:
                    # This coin is not in the current heaviest chain, so it must be in the fork
                    if rem not in additions_since_fork:
                        # Check for spending a coin that does not exist in this fork
                        # TODO: fix this, there is a consensus bug here
                        return Err.UNKNOWN_UNSPENT, None
                    new_coin, confirmed_height = additions_since_fork[rem]
                    new_coin_record: CoinRecord = CoinRecord(
                        new_coin,
                        confirmed_height,
                        uint32(0),
                        False,
                        (rem in coinbases_since_fork),
                        block.foliage_transaction_block.timestamp,
                    )
                    removal_coin_records[
                        new_coin_record.name] = new_coin_record

                # This check applies to both coins created before fork (pulled from coin_store),
                # and coins created after fork (additions_since_fork)>
                if rem in removals_since_fork:
                    # This coin was spent in the fork
                    return Err.DOUBLE_SPEND, None

        removed = 0
        for unspent in removal_coin_records.values():
            removed += unspent.coin.amount

        added = 0
        for coin in additions:
            added += coin.amount

        # 16. Check that the total coin amount for added is <= removed
        if removed < added:
            return Err.MINTING_COIN, None

        fees = removed - added
        assert_fee_sum: uint64 = uint64(0)

        for npc in npc_list:
            if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
                fee_list: List[ConditionWithArgs] = npc.condition_dict[
                    ConditionOpcode.RESERVE_FEE]
                for cvp in fee_list:
                    fee = int_from_bytes(cvp.vars[0])
                    assert_fee_sum = assert_fee_sum + fee

        # 17. Check that the assert fee sum <= fees
        if fees < assert_fee_sum:
            return Err.RESERVE_FEE_CONDITION_FAILED, None

        # 18. Check that the assert fee amount < maximum coin amount
        if fees > constants.MAX_COIN_AMOUNT:
            return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

        # 19. Check that the computed fees are equal to the fees in the block header
        if block.transactions_info.fees != fees:
            return Err.INVALID_BLOCK_FEE_AMOUNT, None

        # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes
        for unspent in removal_coin_records.values():
            if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]:
                return Err.WRONG_PUZZLE_HASH, None

        # 21. Verify conditions
        # create hash_key list for aggsig check
        pairs_pks = []
        pairs_msgs = []
        for npc in npc_list:
            assert height is not None
            unspent = removal_coin_records[npc.coin_name]
            error = blockchain_check_conditions_dict(
                unspent,
                coin_announcement_names,
                puzzle_announcement_names,
                npc.condition_dict,
                prev_transaction_block_height,
                block.foliage_transaction_block.timestamp,
            )
            if error:
                return error, None
            for pk, m in pkm_pairs_for_conditions_dict(
                    npc.condition_dict, npc.coin_name,
                    constants.AGG_SIG_ME_ADDITIONAL_DATA):
                pairs_pks.append(pk)
                pairs_msgs.append(m)

        # 22. Verify aggregated signature
        # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster
        if not block.transactions_info.aggregated_signature:
            return Err.BAD_AGGREGATE_SIGNATURE, None

            # noinspection PyTypeChecker
        if not AugSchemeMPL.aggregate_verify(
                pairs_pks, pairs_msgs,
                block.transactions_info.aggregated_signature):
            return Err.BAD_AGGREGATE_SIGNATURE, None

        return None, result
示例#8
0
    async def receive_block(
        self,
        block: FullBlock,
        pre_validation_result: PreValidationResult,
        fork_point_with_peak: Optional[uint32] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32], Tuple[
            List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], ]:
        """
        This method must be called under the blockchain lock
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.

        Args:
            block: The FullBlock to be validated.
            pre_validation_result: A result of successful pre validation
            fork_point_with_peak: The fork point, for efficiency reasons, if None, it will be recomputed

        Returns:
            The result of adding the block to the blockchain (NEW_PEAK, ADDED_AS_ORPHAN, INVALID_BLOCK,
                DISCONNECTED_BLOCK, ALREDY_HAVE_BLOCK)
            An optional error if the result is not NEW_PEAK or ADDED_AS_ORPHAN
            A fork point if the result is NEW_PEAK
            A list of changes to the coin store, and changes to hints, if the result is NEW_PEAK
        """

        genesis: bool = block.height == 0
        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None, ([], {})

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (ReceiveBlockResult.DISCONNECTED_BLOCK,
                    Err.INVALID_PREV_BLOCK_HASH, None, ([], {}))

        if not genesis and (self.block_record(block.prev_header_hash).height +
                            1) != block.height:
            return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None, (
                [], {})

        npc_result: Optional[NPCResult] = pre_validation_result.npc_result
        required_iters = pre_validation_result.required_iters
        if pre_validation_result.error is not None:
            return ReceiveBlockResult.INVALID_BLOCK, Err(
                pre_validation_result.error), None, ([], {})
        assert required_iters is not None

        error_code, _ = await validate_block_body(
            self.constants,
            self,
            self.block_store,
            self.coin_store,
            self.get_peak(),
            block,
            block.height,
            npc_result,
            fork_point_with_peak,
            self.get_block_generator,
            # If we did not already validate the signature, validate it now
            validate_signature=not pre_validation_result.validated_signature,
        )
        if error_code is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error_code, None, ([], {})

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            block,
            None,
        )
        # Always add the block to the database
        async with self.block_store.db_wrapper.lock:
            try:
                header_hash: bytes32 = block.header_hash
                # Perform the DB operations to update the state, and rollback if something goes wrong
                await self.block_store.db_wrapper.begin_transaction()
                await self.block_store.add_full_block(header_hash, block,
                                                      block_record, False)
                fork_height, peak_height, records, (
                    coin_record_change,
                    hint_changes) = await self._reconsider_peak(
                        block_record, genesis, fork_point_with_peak,
                        npc_result)
                await self.block_store.db_wrapper.commit_transaction()

                # Then update the memory cache. It is important that this task is not cancelled and does not throw
                self.add_block_record(block_record)
                for fetched_block_record in records:
                    self.__height_map.update_height(
                        fetched_block_record.height,
                        fetched_block_record.header_hash,
                        fetched_block_record.sub_epoch_summary_included,
                    )
                if peak_height is not None:
                    self._peak_height = peak_height
                    await self.__height_map.maybe_flush()
            except BaseException as e:
                self.block_store.rollback_cache_block(header_hash)
                await self.block_store.db_wrapper.rollback_transaction()
                log.error(
                    f"Error while adding block {block.header_hash} height {block.height},"
                    f" rolling back: {traceback.format_exc()} {e}")
                raise

        if fork_height is not None:
            # new coin records added
            assert coin_record_change is not None
            return ReceiveBlockResult.NEW_PEAK, None, fork_height, (
                coin_record_change, hint_changes)
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None, ([], {})
示例#9
0
    async def receive_block(
        self,
        header_block_record: HeaderBlockRecord,
        pre_validation_result: Optional[PreValidationResult] = None,
        trusted: bool = False,
        fork_point_with_peak: Optional[uint32] = None,
        additional_coin_spends: List[CoinSpend] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """

        if additional_coin_spends is None:
            additional_coin_spends = []
        block = header_block_record.header
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if block.height == 0:
            prev_b: Optional[BlockRecord] = None
        else:
            prev_b = self.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            self.constants, len(block.finished_sub_slots) > 0, prev_b, self
        )

        if trusted is False and pre_validation_result is None:
            required_iters, error = validate_finished_header_block(
                self.constants, self, block, False, difficulty, sub_slot_iters
            )
        elif trusted:
            unfinished_header_block = UnfinishedHeaderBlock(
                block.finished_sub_slots,
                block.reward_chain_block.get_unfinished(),
                block.challenge_chain_sp_proof,
                block.reward_chain_sp_proof,
                block.foliage,
                block.foliage_transaction_block,
                block.transactions_filter,
            )

            required_iters, val_error = validate_unfinished_header_block(
                self.constants, self, unfinished_header_block, False, difficulty, sub_slot_iters, False, True
            )
            error = val_error if val_error is not None else None
        else:
            assert pre_validation_result is not None
            required_iters = pre_validation_result.required_iters
            error = (
                ValidationError(Err(pre_validation_result.error)) if pre_validation_result.error is not None else None
            )

        if error is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        assert required_iters is not None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            None,
            block,
        )
        heights_changed: Set[Tuple[uint32, Optional[bytes32]]] = set()
        # Always add the block to the database
        async with self.wallet_state_manager_lock:
            async with self.block_store.db_wrapper.lock:
                try:
                    await self.block_store.db_wrapper.begin_transaction()
                    await self.block_store.add_block_record(header_block_record, block_record, additional_coin_spends)
                    self.add_block_record(block_record)
                    self.clean_block_record(block_record.height - self.constants.BLOCKS_CACHE_SIZE)
                    fork_height, records_to_add = await self._reconsider_peak(
                        block_record, genesis, fork_point_with_peak, additional_coin_spends, heights_changed
                    )
                    for record in records_to_add:
                        if record.sub_epoch_summary_included is not None:
                            self.__sub_epoch_summaries[record.height] = record.sub_epoch_summary_included
                    await self.block_store.db_wrapper.commit_transaction()
                except BaseException as e:
                    self.log.error(f"Error during db transaction: {e}")
                    if self.block_store.db_wrapper.db._connection is not None:
                        await self.block_store.db_wrapper.rollback_transaction()
                        self.remove_block_record(block_record.header_hash)
                        self.block_store.rollback_cache_block(block_record.header_hash)
                        await self.coin_store.rebuild_wallet_cache()
                        await self.tx_store.rebuild_tx_cache()
                        await self.pool_store.rebuild_cache()
                        for height, replaced in heights_changed:
                            # If it was replaced change back to the previous value otherwise pop the change
                            if replaced is not None:
                                self.__height_to_hash[height] = replaced
                            else:
                                self.__height_to_hash.pop(height)
                    raise
            if fork_height is not None:
                self.log.info(f"💰 Updated wallet peak to height {block_record.height}, weight {block_record.weight}, ")
                return ReceiveBlockResult.NEW_PEAK, None, fork_height
            else:
                return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
async def _validate_and_add_block(
    blockchain: Blockchain,
    block: FullBlock,
    expected_result: Optional[ReceiveBlockResult] = None,
    expected_error: Optional[Err] = None,
    skip_prevalidation: bool = False,
) -> None:
    # Tries to validate and add the block, and checks that there are no errors in the process and that the
    # block is added to the peak.
    # If expected_result is not None, that result will be enforced.
    # If expected_error is not None, that error will be enforced. If expected_error is not None,
    # receive_block must return Err.INVALID_BLOCK.
    # If expected_result == INVALID_BLOCK but expected_error is None, we will allow for errors to happen

    await check_block_store_invariant(blockchain)
    if skip_prevalidation:
        results = PreValidationResult(None, uint64(1), None, False)
    else:
        # Do not change this, validate_signatures must be False
        pre_validation_results: List[
            PreValidationResult] = await blockchain.pre_validate_blocks_multiprocessing(
                [block], {}, validate_signatures=False)
        assert pre_validation_results is not None
        results = pre_validation_results[0]
    if results.error is not None:
        if expected_result == ReceiveBlockResult.INVALID_BLOCK and expected_error is None:
            # We expected an error but didn't specify which one
            await check_block_store_invariant(blockchain)
            return None
        if expected_error is None:
            # We did not expect an error
            raise AssertionError(Err(results.error))
        elif Err(results.error) != expected_error:
            # We expected an error but a different one
            raise AssertionError(
                f"Expected {expected_error} but got {Err(results.error)}")
        await check_block_store_invariant(blockchain)
        return None

    result, err, _, _ = await blockchain.receive_block(block, results)
    await check_block_store_invariant(blockchain)

    if expected_error is None and expected_result != ReceiveBlockResult.INVALID_BLOCK:
        # Expecting an error here (but didn't specify which), let's check if we actually got an error
        if err is not None:
            # Got an error
            raise AssertionError(err)
    else:
        # Here we will enforce checking of the exact error
        if err != expected_error:
            # Did not get the right error, or did not get an error
            raise AssertionError(f"Expected {expected_error} but got {err}")

    if expected_result is not None and expected_result != result:
        raise AssertionError(f"Expected {expected_result} but got {result}")
    elif expected_result is None:
        # If we expected an error assume that expected_result = INVALID_BLOCK
        if expected_error is not None and result != ReceiveBlockResult.INVALID_BLOCK:
            raise AssertionError(
                f"Block should be invalid, but received: {result}")
        # Otherwise, assume that expected_result = NEW_PEAK
        if expected_error is None and result != ReceiveBlockResult.NEW_PEAK:
            raise AssertionError(f"Block was not added: {result}")