Beispiel #1
0
def iters_from_block(
    constants,
    reward_chain_block: Union[RewardChainBlock, RewardChainBlockUnfinished],
    sub_slot_iters: uint64,
    difficulty: uint64,
) -> Tuple[uint64, uint64]:
    if reward_chain_block.challenge_chain_sp_vdf is None:
        assert reward_chain_block.signage_point_index == 0
        cc_sp: bytes32 = reward_chain_block.pos_ss_cc_challenge_hash
    else:
        cc_sp = reward_chain_block.challenge_chain_sp_vdf.output.get_hash()

    quality_string: Optional[bytes32] = reward_chain_block.proof_of_space.verify_and_get_quality_string(
        constants,
        reward_chain_block.pos_ss_cc_challenge_hash,
        cc_sp,
    )
    assert quality_string is not None

    required_iters: uint64 = calculate_iterations_quality(
        constants.DIFFICULTY_CONSTANT_FACTOR,
        quality_string,
        reward_chain_block.proof_of_space.size,
        difficulty,
        cc_sp,
    )
    return (
        calculate_sp_iters(constants, sub_slot_iters, reward_chain_block.signage_point_index),
        calculate_ip_iters(
            constants,
            sub_slot_iters,
            reward_chain_block.signage_point_index,
            required_iters,
        ),
    )
    async def challenge_response(
            self, challenge_response: harvester_protocol.ChallengeResponse):
        """
        This is a response from the harvester, for a NewChallenge. Here we check if the proof
        of space is sufficiently good, and if so, we ask for the whole proof.
        """

        if challenge_response.quality_string in self.harvester_responses_challenge:
            log.warning(
                f"Have already seen quality string {challenge_response.quality_string}"
            )
            return
        weight: uint128 = self.challenge_to_weight[
            challenge_response.challenge_hash]
        height: uint32 = self.challenge_to_height[
            challenge_response.challenge_hash]
        difficulty: uint64 = uint64(0)
        for posf in self.challenges[weight]:
            if posf.challenge_hash == challenge_response.challenge_hash:
                difficulty = posf.difficulty
        if difficulty == 0:
            raise RuntimeError("Did not find challenge")

        estimate_min = (self.proof_of_time_estimate_ips *
                        self.constants["BLOCK_TIME_TARGET"] /
                        self.constants["MIN_ITERS_PROPORTION"])
        number_iters: uint64 = calculate_iterations_quality(
            challenge_response.quality_string,
            challenge_response.plot_size,
            difficulty,
            estimate_min,
        )
        if height < 1000:  # As the difficulty adjusts, don't fetch all qualities
            if challenge_response.challenge_hash not in self.challenge_to_best_iters:
                self.challenge_to_best_iters[
                    challenge_response.challenge_hash] = number_iters
            elif (number_iters < self.challenge_to_best_iters[
                    challenge_response.challenge_hash]):
                self.challenge_to_best_iters[
                    challenge_response.challenge_hash] = number_iters
            else:
                return
        estimate_secs: float = number_iters / self.proof_of_time_estimate_ips

        log.info(
            f"Estimate: {estimate_secs}, rate: {self.proof_of_time_estimate_ips}"
        )
        if (estimate_secs < self.config["pool_share_threshold"]
                or estimate_secs < self.config["propagate_threshold"]):
            self.harvester_responses_challenge[
                challenge_response.
                quality_string] = challenge_response.challenge_hash
            request = harvester_protocol.RequestProofOfSpace(
                challenge_response.quality_string)

            yield OutboundMessage(
                NodeType.HARVESTER,
                Message("request_proof_of_space", request),
                Delivery.RESPOND,
            )
Beispiel #3
0
        def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
            # Uses the DiskProver object to lookup qualities. This is a blocking call,
            # so it should be run in a thread pool.
            try:
                sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
                    plot_info.prover.get_id(),
                    new_challenge.challenge_hash,
                    new_challenge.sp_hash,
                )
                try:
                    quality_strings = plot_info.prover.get_qualities_for_challenge(sp_challenge_hash)
                except Exception as e:
                    self.harvester.log.error(f"Error using prover object {e}")
                    return []

                responses: List[Tuple[bytes32, ProofOfSpace]] = []
                if quality_strings is not None:
                    # Found proofs of space (on average 1 is expected per plot)
                    for index, quality_str in enumerate(quality_strings):
                        required_iters: uint64 = calculate_iterations_quality(
                            self.harvester.constants.DIFFICULTY_CONSTANT_FACTOR,
                            quality_str,
                            plot_info.prover.get_size(),
                            new_challenge.difficulty,
                            new_challenge.sp_hash,
                        )
                        sp_interval_iters = calculate_sp_interval_iters(
                            self.harvester.constants, new_challenge.sub_slot_iters
                        )
                        if required_iters < sp_interval_iters:
                            # Found a very good proof of space! will fetch the whole proof from disk,
                            # then send to farmer
                            try:
                                proof_xs = plot_info.prover.get_full_proof(sp_challenge_hash, index)
                            except RuntimeError:
                                self.harvester.log.error(f"Exception fetching full proof for {filename}")
                                continue

                            plot_public_key = ProofOfSpace.generate_plot_public_key(
                                plot_info.local_sk.get_g1(), plot_info.farmer_public_key
                            )
                            responses.append(
                                (
                                    quality_str,
                                    ProofOfSpace(
                                        sp_challenge_hash,
                                        plot_info.pool_public_key,
                                        plot_info.pool_contract_puzzle_hash,
                                        plot_public_key,
                                        uint8(plot_info.prover.get_size()),
                                        proof_xs,
                                    ),
                                )
                            )
                return responses
            except Exception as e:
                self.harvester.log.error(f"Unknown error: {e}")
                return []
    def test_win_percentage(self):
        """
        Tests that the percentage of blocks won is proportional to the space of each farmer,
        with the assumption that all farmers have access to the same VDF speed.
        """
        farmer_ks = {
            uint8(32): 100,
            uint8(33): 100,
            uint8(34): 100,
            uint8(35): 100,
            uint8(36): 100,
        }
        farmer_space = {
            k: _expected_plot_size(uint8(k)) * count
            for k, count in farmer_ks.items()
        }
        total_space = sum(farmer_space.values())
        percentage_space = {
            k: float(sp / total_space)
            for k, sp in farmer_space.items()
        }
        wins = {k: 0 for k in farmer_ks.keys()}
        total_slots = 50
        num_sps = 16
        sp_interval_iters = uint64(100000000 // 32)
        difficulty = uint64(500000000000)

        for slot_index in range(total_slots):
            total_wins_in_slot = 0
            for sp_index in range(num_sps):
                sp_hash = std_hash(
                    slot_index.to_bytes(4, "big") +
                    sp_index.to_bytes(4, "big"))
                for k, count in farmer_ks.items():
                    for farmer_index in range(count):
                        quality = std_hash(
                            slot_index.to_bytes(4, "big") +
                            k.to_bytes(1, "big") + bytes(farmer_index))
                        required_iters = calculate_iterations_quality(
                            2**25, quality, k, difficulty, sp_hash)
                        if required_iters < sp_interval_iters:
                            wins[k] += 1
                            total_wins_in_slot += 1

        win_percentage = {
            k: wins[k] / sum(wins.values())
            for k in farmer_ks.keys()
        }
        for k in farmer_ks.keys():
            # Win rate is proportional to percentage of space
            assert abs(win_percentage[k] - percentage_space[k]) < 0.01
Beispiel #5
0
async def load_blocks_dont_validate(
    blocks,
) -> Tuple[Dict[bytes32, HeaderBlock], Dict[uint32, bytes32], Dict[
        bytes32, BlockRecord], Dict[bytes32, SubEpochSummary]]:
    header_cache: Dict[bytes32, HeaderBlock] = {}
    height_to_hash: Dict[uint32, bytes32] = {}
    sub_blocks: Dict[bytes32, BlockRecord] = {}
    sub_epoch_summaries: Dict[bytes32, SubEpochSummary] = {}
    prev_block = None
    difficulty = test_constants.DIFFICULTY_STARTING
    block: FullBlock
    for block in blocks:
        if block.height > 0:
            assert prev_block is not None
            difficulty = block.reward_chain_block.weight - prev_block.weight

        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            assert block.reward_chain_block.signage_point_index == 0
            cc_sp: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
        else:
            cc_sp = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )

        quality_string: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                test_constants,
                block.reward_chain_block.pos_ss_cc_challenge_hash,
                cc_sp,
            )
        assert quality_string is not None

        required_iters: uint64 = calculate_iterations_quality(
            test_constants.DIFFICULTY_CONSTANT_FACTOR,
            quality_string,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp,
        )

        sub_block = block_to_block_record(
            test_constants, BlockCache(sub_blocks, height_to_hash),
            required_iters, block, None)
        sub_blocks[block.header_hash] = sub_block
        height_to_hash[block.height] = block.header_hash
        header_cache[block.header_hash] = block.get_block_header()
        if sub_block.sub_epoch_summary_included is not None:
            sub_epoch_summaries[
                block.height] = sub_block.sub_epoch_summary_included
        prev_block = block
    return header_cache, height_to_hash, sub_blocks, sub_epoch_summaries
    def get_pospaces_for_challenge(
        self,
        constants: ConsensusConstants,
        challenge_hash: bytes32,
        signage_point: bytes32,
        seed: bytes,
        difficulty: uint64,
        sub_slot_iters: uint64,
    ) -> List[Tuple[uint64, ProofOfSpace]]:
        found_proofs: List[Tuple[uint64, ProofOfSpace]] = []
        plots: List[PlotInfo] = [
            plot_info for _, plot_info in sorted(list(self.plots.items()), key=lambda x: str(x[0]))
        ]
        random.seed(seed)
        for plot_info in plots:
            plot_id = plot_info.prover.get_id()
            if ProofOfSpace.passes_plot_filter(constants, plot_id, challenge_hash, signage_point):
                new_challenge: bytes32 = ProofOfSpace.calculate_pos_challenge(plot_id, challenge_hash, signage_point)
                qualities = plot_info.prover.get_qualities_for_challenge(new_challenge)

                for proof_index, quality_str in enumerate(qualities):

                    required_iters = calculate_iterations_quality(
                        quality_str,
                        plot_info.prover.get_size(),
                        difficulty,
                        signage_point,
                    )
                    if required_iters < calculate_sp_interval_iters(constants, sub_slot_iters):
                        proof_xs: bytes = plot_info.prover.get_full_proof(new_challenge, proof_index)
                        plot_pk = ProofOfSpace.generate_plot_public_key(
                            plot_info.local_sk.get_g1(),
                            plot_info.farmer_public_key,
                        )
                        proof_of_space: ProofOfSpace = ProofOfSpace(
                            new_challenge,
                            plot_info.pool_public_key,
                            None,
                            plot_pk,
                            plot_info.prover.get_size(),
                            proof_xs,
                        )
                        found_proofs.append((required_iters, proof_of_space))
        random_sample = found_proofs
        if len(found_proofs) >= 1:
            if random.random() < 0.1:
                # Removes some proofs of space to create "random" chains, based on the seed
                random_sample = random.sample(found_proofs, len(found_proofs) - 1)
        return random_sample
    def test_win_percentage(self):
        """
        Tests that the percentage of blocks won is proportional to the space of each farmer,
        with the assumption that all farmers have access to the same VDF speed.
        """
        farmer_ks = [
            uint8(34),
            uint8(35),
            uint8(36),
            uint8(37),
            uint8(38),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(40),
            uint8(41),
        ]
        farmer_space = [_expected_plot_size(uint8(k)) for k in farmer_ks]
        total_space = sum(farmer_space)
        percentage_space = [float(sp / total_space) for sp in farmer_space]
        wins = [0 for _ in range(len(farmer_ks))]
        total_blocks = 5000

        for b_index in range(total_blocks):
            qualities = [
                std_hash(b_index.to_bytes(32, "big") + bytes(farmer_index))
                for farmer_index in range(len(farmer_ks))
            ]
            iters = [
                calculate_iterations_quality(
                    qualities[i],
                    farmer_ks[i],
                    uint64(50000000),
                    uint64(5000 * 30),
                ) for i in range(len(qualities))
            ]
            wins[iters.index(min(iters))] += 1

        win_percentage = [
            wins[w] / total_blocks for w in range(len(farmer_ks))
        ]
        for i in range(len(percentage_space)):
            # Win rate is proportional to percentage of space
            assert abs(win_percentage[i] - percentage_space[i]) < 0.01
Beispiel #8
0
    async def _get_required_iters(self, challenge_hash: bytes32,
                                  quality_string: bytes32, plot_size: uint8):
        weight: uint128 = self.challenge_to_weight[challenge_hash]
        difficulty: uint64 = uint64(0)
        for posf in self.challenges[weight]:
            if posf.challenge_hash == challenge_hash:
                difficulty = posf.difficulty
        if difficulty == 0:
            raise RuntimeError("Did not find challenge")

        estimate_min = (self.proof_of_time_estimate_ips *
                        self.constants["BLOCK_TIME_TARGET"] /
                        self.constants["MIN_ITERS_PROPORTION"])
        number_iters: uint64 = calculate_iterations_quality(
            quality_string,
            plot_size,
            difficulty,
            estimate_min,
        )
        return number_iters
Beispiel #9
0
    def __validate_pospace(
        self,
        segment: SubEpochChallengeSegment,
        idx: int,
        curr_diff: uint64,
        prev_cc_sub_slot: Optional[bytes32],
    ) -> Optional[uint64]:

        # find challenge block sub slot
        challenge_sub_slot: SubSlotData = segment.sub_slots[idx]

        if prev_cc_sub_slot is None:
            # genesis
            cc_sp_hash: bytes32 = self.constants.GENESIS_CHALLENGE
            challenge = self.constants.GENESIS_CHALLENGE
        else:
            challenge = prev_cc_sub_slot
            if challenge_sub_slot.cc_sp_vdf_info is None:
                cc_sp_hash = prev_cc_sub_slot
            else:
                cc_sp_hash = challenge_sub_slot.cc_sp_vdf_info.output.get_hash(
                )

        # validate proof of space
        assert challenge_sub_slot.proof_of_space is not None
        q_str = challenge_sub_slot.proof_of_space.verify_and_get_quality_string(
            self.constants,
            challenge,
            cc_sp_hash,
        )
        if q_str is None:
            self.log.error("could not verify proof of space")
            return None
        return calculate_iterations_quality(
            self.constants.DIFFICULTY_CONSTANT_FACTOR,
            q_str,
            challenge_sub_slot.proof_of_space.size,
            curr_diff,
            cc_sp_hash,
        )
def load_block_list(
    block_list: List[FullBlock], constants
) -> Tuple[Dict[uint32, bytes32], uint64, Dict[uint32, SubBlockRecord]]:
    difficulty = 0
    height_to_hash: Dict[uint32, bytes32] = {}
    sub_blocks: Dict[uint32, SubBlockRecord] = {}
    for full_block in block_list:
        if full_block.sub_block_height == 0:
            difficulty = uint64(constants.DIFFICULTY_STARTING)
        else:
            difficulty = full_block.weight - block_list[full_block.sub_block_height - 1].weight
        if full_block.reward_chain_sub_block.signage_point_index == 0:
            challenge = full_block.reward_chain_sub_block.pos_ss_cc_challenge_hash
            sp_hash = challenge
        else:
            assert full_block.reward_chain_sub_block.challenge_chain_sp_vdf is not None
            challenge = full_block.reward_chain_sub_block.challenge_chain_sp_vdf.challenge
            sp_hash = full_block.reward_chain_sub_block.challenge_chain_sp_vdf.output.get_hash()
        quality_str = full_block.reward_chain_sub_block.proof_of_space.verify_and_get_quality_string(
            constants, challenge, sp_hash
        )
        required_iters: uint64 = calculate_iterations_quality(
            quality_str,
            full_block.reward_chain_sub_block.proof_of_space.size,
            difficulty,
            sp_hash,
        )

        sub_blocks[full_block.header_hash] = block_to_sub_block_record(
            constants,
            sub_blocks,
            height_to_hash,
            required_iters,
            full_block,
            None,
        )
        height_to_hash[uint32(full_block.sub_block_height)] = full_block.header_hash
    return height_to_hash, uint64(difficulty), sub_blocks
Beispiel #11
0
async def validate_unfinished_block_header(
    constants: Dict,
    headers: Dict[bytes32, Header],
    height_to_hash: Dict[uint32, bytes32],
    block_header: Header,
    proof_of_space: ProofOfSpace,
    prev_header_block: Optional[HeaderBlock],
    pre_validated: bool = False,
    pos_quality_string: bytes32 = None,
) -> Tuple[Optional[Err], Optional[uint64]]:
    """
    Block validation algorithm. Returns the number of VDF iterations that this block's
    proof of time must have, if the candidate block is fully valid (except for proof of
    time). The same as validate_block, but without proof of time and challenge validation.
    If the block is invalid, an error code is returned.

    Does NOT validate transactions and fees.
    """
    if not pre_validated:
        # 1. The hash of the proof of space must match header_data.proof_of_space_hash
        if proof_of_space.get_hash() != block_header.data.proof_of_space_hash:
            return (Err.INVALID_POSPACE_HASH, None)

        # 2. The coinbase signature must be valid, according the the pool public key
        pair = block_header.data.coinbase_signature.PkMessagePair(
            proof_of_space.pool_pubkey,
            block_header.data.coinbase.name(),
        )

        if not block_header.data.coinbase_signature.validate([pair]):
            return (Err.INVALID_COINBASE_SIGNATURE, None)

        # 3. Check harvester signature of header data is valid based on harvester key
        if not block_header.harvester_signature.verify(
            [blspy.Util.hash256(block_header.data.get_hash())],
            [proof_of_space.plot_pubkey],
        ):
            return (Err.INVALID_HARVESTER_SIGNATURE, None)

    # 4. If not genesis, the previous block must exist
    if prev_header_block is not None and block_header.prev_header_hash not in headers:
        return (Err.DOES_NOT_EXTEND, None)

    # 5. If not genesis, the timestamp must be >= the average timestamp of last 11 blocks
    # and less than 2 hours in the future (if block height < 11, average all previous blocks).
    # Average is the sum, int diveded by the number of timestamps
    if prev_header_block is not None:
        last_timestamps: List[uint64] = []
        curr = prev_header_block.header
        while len(last_timestamps) < constants["NUMBER_OF_TIMESTAMPS"]:
            last_timestamps.append(curr.data.timestamp)
            fetched = headers.get(curr.prev_header_hash, None)
            if not fetched:
                break
            curr = fetched
        if len(last_timestamps) != constants["NUMBER_OF_TIMESTAMPS"]:
            # For blocks 1 to 10, average timestamps of all previous blocks
            assert curr.height == 0
        prev_time: uint64 = uint64(
            int(sum(last_timestamps) // len(last_timestamps)))
        if block_header.data.timestamp < prev_time:
            return (Err.TIMESTAMP_TOO_FAR_IN_PAST, None)
        if block_header.data.timestamp > time.time(
        ) + constants["MAX_FUTURE_TIME"]:
            return (Err.TIMESTAMP_TOO_FAR_IN_FUTURE, None)

    # 7. Extension data must be valid, if any is present

    # Compute challenge of parent
    challenge_hash: bytes32
    if prev_header_block is not None:
        challenge: Challenge = prev_header_block.challenge
        challenge_hash = challenge.get_hash()
        # 8. Check challenge hash of prev is the same as in pos
        if challenge_hash != proof_of_space.challenge_hash:
            return (Err.INVALID_POSPACE_CHALLENGE, None)

    # 10. The proof of space must be valid on the challenge
    if pos_quality_string is None:
        pos_quality_string = proof_of_space.verify_and_get_quality_string()
        if not pos_quality_string:
            return (Err.INVALID_POSPACE, None)

    if prev_header_block is not None:
        # 11. If not genesis, the height on the previous block must be one less than on this block
        if block_header.height != prev_header_block.height + 1:
            return (Err.INVALID_HEIGHT, None)
    else:
        # 12. If genesis, the height must be 0
        if block_header.height != 0:
            return (Err.INVALID_HEIGHT, None)

    # 13. The coinbase reward must match the block schedule
    coinbase_reward = calculate_block_reward(block_header.height)
    if coinbase_reward != block_header.data.coinbase.amount:
        return (Err.INVALID_COINBASE_AMOUNT, None)

    # 13b. The coinbase parent id must be the height
    if block_header.data.coinbase.parent_coin_info != block_header.height.to_bytes(
            32, "big"):
        return (Err.INVALID_COINBASE_PARENT, None)

    # 13c. The fees coin parent id must be hash(hash(height))
    if block_header.data.fees_coin.parent_coin_info != std_hash(
            std_hash(uint32(block_header.height))):
        return (Err.INVALID_FEES_COIN_PARENT, None)

    difficulty: uint64
    if prev_header_block is not None:
        difficulty = get_next_difficulty(constants, headers, height_to_hash,
                                         prev_header_block.header)
        min_iters = get_next_min_iters(constants, headers, height_to_hash,
                                       prev_header_block)
    else:
        difficulty = uint64(constants["DIFFICULTY_STARTING"])
        min_iters = uint64(constants["MIN_ITERS_STARTING"])

    number_of_iters: uint64 = calculate_iterations_quality(
        pos_quality_string,
        proof_of_space.size,
        difficulty,
        min_iters,
    )

    assert count_significant_bits(difficulty) <= constants["SIGNIFICANT_BITS"]
    assert count_significant_bits(min_iters) <= constants["SIGNIFICANT_BITS"]

    if prev_header_block is not None:
        # 17. If not genesis, the total weight must be the parent weight + difficulty
        if block_header.weight != prev_header_block.weight + difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 18. If not genesis, the total iters must be parent iters + number_iters
        if (block_header.data.total_iters !=
                prev_header_block.header.data.total_iters + number_of_iters):
            return (Err.INVALID_TOTAL_ITERS, None)
    else:
        # 19. If genesis, the total weight must be starting difficulty
        if block_header.weight != difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 20. If genesis, the total iters must be number iters
        if block_header.data.total_iters != number_of_iters:
            return (Err.INVALID_TOTAL_ITERS, None)

    return (None, number_of_iters)
    def validate_select_proofs(
        self,
        all_proof_hashes: List[Tuple[bytes32, Optional[Tuple[uint64,
                                                             uint64]]]],
        heights: List[uint32],
        cached_blocks: Dict[bytes32, Tuple[BlockRecord, HeaderBlock,
                                           Optional[bytes]]],
        potential_header_hashes: Dict[uint32, bytes32],
    ) -> bool:
        """
        Given a full list of proof hashes (hash of pospace and time, along with difficulty resets), this function
        checks that the proofs at the passed in heights are correct. This is used to validate the weight of a chain,
        by probabilisticly sampling a few blocks, and only validating these. Cached blocks and potential header hashes
        contains the actual data for the header blocks to validate. This method also requires the previous block for
        each height to be present, to ensure an attacker can't grind on the challenge hash.
        """

        for height in heights:
            prev_height = uint32(height - 1)
            # Get previous header block
            prev_hh = potential_header_hashes[prev_height]
            _, prev_header_block, _ = cached_blocks[prev_hh]

            # Validate proof hash of previous header block
            if (std_hash(prev_header_block.proof_of_space.get_hash() +
                         prev_header_block.proof_of_time.output.get_hash()) !=
                    all_proof_hashes[prev_height][0]):
                return False

            # Calculate challenge hash (with difficulty)
            if (prev_header_block.challenge.prev_challenge_hash !=
                    prev_header_block.proof_of_space.challenge_hash):
                return False
            if (prev_header_block.challenge.prev_challenge_hash !=
                    prev_header_block.proof_of_time.challenge_hash):
                return False
            if (prev_header_block.challenge.proofs_hash !=
                    all_proof_hashes[prev_height][0]):
                return False
            if (height % self.constants["DIFFICULTY_EPOCH"] ==
                    self.constants["DIFFICULTY_DELAY"]):
                diff_change = all_proof_hashes[height][1]
                assert diff_change is not None
                if prev_header_block.challenge.new_work_difficulty != diff_change:
                    return False
            else:
                if prev_header_block.challenge.new_work_difficulty is not None:
                    return False
            challenge_hash = prev_header_block.challenge.get_hash()

            # Get header block
            hh = potential_header_hashes[height]
            _, header_block, _ = cached_blocks[hh]

            # Validate challenge hash is == pospace challenge hash
            if challenge_hash != header_block.proof_of_space.challenge_hash:
                return False
            # Validate challenge hash is == potime challenge hash
            if challenge_hash != header_block.proof_of_time.challenge_hash:
                return False
            # Validate proof hash
            if (std_hash(header_block.proof_of_space.get_hash() +
                         header_block.proof_of_time.output.get_hash()) !=
                    all_proof_hashes[height][0]):
                return False

            # Get difficulty
            if (height % self.constants["DIFFICULTY_EPOCH"] <
                    self.constants["DIFFICULTY_DELAY"]):
                diff_height = (height -
                               (height % self.constants["DIFFICULTY_EPOCH"]) -
                               (self.constants["DIFFICULTY_EPOCH"] -
                                self.constants["DIFFICULTY_DELAY"]))
            else:
                diff_height = (height -
                               (height % self.constants["DIFFICULTY_EPOCH"]) +
                               self.constants["DIFFICULTY_DELAY"])

            difficulty = all_proof_hashes[diff_height][1]
            assert difficulty is not None

            # Validate pospace to get iters
            quality_str = header_block.proof_of_space.verify_and_get_quality_string(
            )
            assert quality_str is not None

            if (height < self.constants["DIFFICULTY_EPOCH"] +
                    self.constants["DIFFICULTY_DELAY"]):
                min_iters = self.constants["MIN_ITERS_STARTING"]
            else:
                if (height % self.constants["DIFFICULTY_EPOCH"] <
                        self.constants["DIFFICULTY_DELAY"]):
                    height2 = (height -
                               (height % self.constants["DIFFICULTY_EPOCH"]) -
                               self.constants["DIFFICULTY_EPOCH"] - 1)
                else:
                    height2 = height - (height %
                                        self.constants["DIFFICULTY_EPOCH"]) - 1

                height1 = height2 - self.constants["DIFFICULTY_EPOCH"]
                if height1 == -1:
                    iters1 = uint64(0)
                else:
                    iters1 = all_proof_hashes[height1][2]
                    assert iters1 is not None
                iters2 = all_proof_hashes[height2][2]
                assert iters2 is not None

                min_iters = uint64((iters2 - iters1) //
                                   (self.constants["DIFFICULTY_EPOCH"] *
                                    self.constants["MIN_ITERS_PROPORTION"]))

            number_of_iters: uint64 = calculate_iterations_quality(
                quality_str,
                header_block.proof_of_space.size,
                difficulty,
                min_iters,
            )

            # Validate potime
            if number_of_iters != header_block.proof_of_time.number_of_iterations:
                return False

            if not header_block.proof_of_time.is_valid(
                    self.constants["DISCRIMINANT_SIZE_BITS"]):
                return False

        return True
Beispiel #13
0
def validate_unfinished_header_block(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    header_block: UnfinishedHeaderBlock,
    check_filter: bool,
    expected_difficulty: uint64,
    expected_sub_slot_iters: uint64,
    skip_overflow_last_ss_validation: bool = False,
    skip_vdf_is_valid: bool = False,
) -> Tuple[Optional[uint64], Optional[ValidationError]]:
    """
    Validates an unfinished header block. This is a block without the infusion VDFs (unfinished)
    and without transactions and transaction info (header). Returns (required_iters, error).

    This method is meant to validate only the unfinished part of the block. However, the finished_sub_slots
    refers to all sub-slots that were finishes from the previous block's infusion point, up to this blocks
    infusion point. Therefore, in the case where this is an overflow block, and the last sub-slot is not yet
    released, header_block.finished_sub_slots will be missing one sub-slot. In this case,
    skip_overflow_last_ss_validation must be set to True. This will skip validation of end of slots, sub-epochs,
    and lead to other small tweaks in validation.
    """
    # 1. Check that the previous block exists in the blockchain, or that it is correct

    prev_b = blocks.try_block_record(header_block.prev_header_hash)
    genesis_block = prev_b is None
    if genesis_block and header_block.prev_header_hash != constants.GENESIS_CHALLENGE:
        return None, ValidationError(Err.INVALID_PREV_BLOCK_HASH)

    overflow = is_overflow_block(
        constants, header_block.reward_chain_block.signage_point_index)
    if skip_overflow_last_ss_validation and overflow:
        if final_eos_is_already_included(header_block, blocks,
                                         expected_sub_slot_iters):
            skip_overflow_last_ss_validation = False
            finished_sub_slots_since_prev = len(
                header_block.finished_sub_slots)
        else:
            finished_sub_slots_since_prev = len(
                header_block.finished_sub_slots) + 1
    else:
        finished_sub_slots_since_prev = len(header_block.finished_sub_slots)

    new_sub_slot: bool = finished_sub_slots_since_prev > 0

    can_finish_se: bool = False
    can_finish_epoch: bool = False
    if genesis_block:
        height: uint32 = uint32(0)
        assert expected_difficulty == constants.DIFFICULTY_STARTING
        assert expected_sub_slot_iters == constants.SUB_SLOT_ITERS_STARTING
    else:
        assert prev_b is not None
        height = uint32(prev_b.height + 1)
        if prev_b.sub_epoch_summary_included is not None:
            can_finish_se, can_finish_epoch = False, False
        else:
            if new_sub_slot:
                can_finish_se, can_finish_epoch = can_finish_sub_and_full_epoch(
                    constants,
                    prev_b.height,
                    prev_b.deficit,
                    blocks,
                    prev_b.prev_hash,
                    False,
                )
            else:
                can_finish_se = False
                can_finish_epoch = False

    # 2. Check finished slots that have been crossed since prev_b
    ses_hash: Optional[bytes32] = None
    if new_sub_slot and not skip_overflow_last_ss_validation:
        # Finished a slot(s) since previous block. The first sub-slot must have at least one block, and all
        # subsequent sub-slots must be empty
        for finished_sub_slot_n, sub_slot in enumerate(
                header_block.finished_sub_slots):
            # Start of slot challenge is fetched from SP
            challenge_hash: bytes32 = sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.challenge

            if finished_sub_slot_n == 0:
                if genesis_block:
                    # 2a. check sub-slot challenge hash for genesis block
                    if challenge_hash != constants.GENESIS_CHALLENGE:
                        return None, ValidationError(
                            Err.INVALID_PREV_CHALLENGE_SLOT_HASH)
                else:
                    assert prev_b is not None
                    curr: BlockRecord = prev_b
                    while not curr.first_in_sub_slot:
                        curr = blocks.block_record(curr.prev_hash)
                    assert curr.finished_challenge_slot_hashes is not None

                    # 2b. check sub-slot challenge hash for non-genesis block
                    if not curr.finished_challenge_slot_hashes[
                            -1] == challenge_hash:
                        print(curr.finished_challenge_slot_hashes[-1],
                              challenge_hash)
                        return None, ValidationError(
                            Err.INVALID_PREV_CHALLENGE_SLOT_HASH)
            else:
                # 2c. check sub-slot challenge hash for empty slot
                if (not header_block.finished_sub_slots[
                        finished_sub_slot_n - 1].challenge_chain.get_hash()
                        == challenge_hash):
                    return None, ValidationError(
                        Err.INVALID_PREV_CHALLENGE_SLOT_HASH)

            if genesis_block:
                # 2d. Validate that genesis block has no ICC
                if sub_slot.infused_challenge_chain is not None:
                    return None, ValidationError(Err.SHOULD_NOT_HAVE_ICC)
            else:
                assert prev_b is not None
                icc_iters_committed: Optional[uint64] = None
                icc_iters_proof: Optional[uint64] = None
                icc_challenge_hash: Optional[bytes32] = None
                icc_vdf_input = None
                if prev_b.deficit < constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
                    # There should be no ICC chain if the last block's deficit is 16
                    # Prev sb's deficit is 0, 1, 2, 3, or 4
                    if finished_sub_slot_n == 0:
                        # This is the first sub slot after the last sb, which must have deficit 1-4, and thus an ICC
                        curr = prev_b
                        while not curr.is_challenge_block(
                                constants) and not curr.first_in_sub_slot:
                            curr = blocks.block_record(curr.prev_hash)
                        if curr.is_challenge_block(constants):
                            icc_challenge_hash = curr.challenge_block_info_hash
                            icc_iters_committed = uint64(
                                prev_b.sub_slot_iters -
                                curr.ip_iters(constants))
                        else:
                            assert curr.finished_infused_challenge_slot_hashes is not None
                            icc_challenge_hash = curr.finished_infused_challenge_slot_hashes[
                                -1]
                            icc_iters_committed = prev_b.sub_slot_iters
                        icc_iters_proof = uint64(prev_b.sub_slot_iters -
                                                 prev_b.ip_iters(constants))
                        if prev_b.is_challenge_block(constants):
                            icc_vdf_input = ClassgroupElement.get_default_element(
                            )
                        else:
                            icc_vdf_input = prev_b.infused_challenge_vdf_output
                    else:
                        # This is not the first sub slot after the last block, so we might not have an ICC
                        if (header_block.finished_sub_slots[
                                finished_sub_slot_n - 1].reward_chain.deficit <
                                constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK):
                            finished_ss = header_block.finished_sub_slots[
                                finished_sub_slot_n - 1]
                            assert finished_ss.infused_challenge_chain is not None

                            # Only sets the icc iff the previous sub slots deficit is 4 or less
                            icc_challenge_hash = finished_ss.infused_challenge_chain.get_hash(
                            )
                            icc_iters_committed = prev_b.sub_slot_iters
                            icc_iters_proof = icc_iters_committed
                            icc_vdf_input = ClassgroupElement.get_default_element(
                            )

                # 2e. Validate that there is not icc iff icc_challenge hash is None
                assert (sub_slot.infused_challenge_chain is
                        None) == (icc_challenge_hash is None)
                if sub_slot.infused_challenge_chain is not None:
                    assert icc_vdf_input is not None
                    assert icc_iters_proof is not None
                    assert icc_challenge_hash is not None
                    assert sub_slot.proofs.infused_challenge_chain_slot_proof is not None
                    # 2f. Check infused challenge chain sub-slot VDF
                    # Only validate from prev_b to optimize
                    target_vdf_info = VDFInfo(
                        icc_challenge_hash,
                        icc_iters_proof,
                        sub_slot.infused_challenge_chain.
                        infused_challenge_chain_end_of_slot_vdf.output,
                    )
                    if sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf != dataclasses.replace(
                            target_vdf_info,
                            number_of_iterations=icc_iters_committed,
                    ):
                        return None, ValidationError(Err.INVALID_ICC_EOS_VDF)
                    if not skip_vdf_is_valid and not sub_slot.proofs.infused_challenge_chain_slot_proof.is_valid(
                            constants, icc_vdf_input, target_vdf_info, None):
                        return None, ValidationError(Err.INVALID_ICC_EOS_VDF)

                    if sub_slot.reward_chain.deficit == constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
                        # 2g. Check infused challenge sub-slot hash in challenge chain, deficit 16
                        if (sub_slot.infused_challenge_chain.get_hash() !=
                                sub_slot.challenge_chain.
                                infused_challenge_chain_sub_slot_hash):
                            return None, ValidationError(
                                Err.INVALID_ICC_HASH_CC)
                    else:
                        # 2h. Check infused challenge sub-slot hash not included for other deficits
                        if sub_slot.challenge_chain.infused_challenge_chain_sub_slot_hash is not None:
                            return None, ValidationError(
                                Err.INVALID_ICC_HASH_CC)

                    # 2i. Check infused challenge sub-slot hash in reward sub-slot
                    if (sub_slot.infused_challenge_chain.get_hash() !=
                            sub_slot.reward_chain.
                            infused_challenge_chain_sub_slot_hash):
                        return None, ValidationError(Err.INVALID_ICC_HASH_RC)
                else:
                    # 2j. If no icc, check that the cc doesn't include it
                    if sub_slot.challenge_chain.infused_challenge_chain_sub_slot_hash is not None:
                        return None, ValidationError(Err.INVALID_ICC_HASH_CC)

                    # 2k. If no icc, check that the cc doesn't include it
                    if sub_slot.reward_chain.infused_challenge_chain_sub_slot_hash is not None:
                        return None, ValidationError(Err.INVALID_ICC_HASH_RC)

            if sub_slot.challenge_chain.subepoch_summary_hash is not None:
                assert ses_hash is None  # Only one of the slots can have it
                ses_hash = sub_slot.challenge_chain.subepoch_summary_hash

            # 2l. check sub-epoch summary hash is None for empty slots
            if finished_sub_slot_n != 0:
                if sub_slot.challenge_chain.subepoch_summary_hash is not None:
                    return None, ValidationError(
                        Err.INVALID_SUB_EPOCH_SUMMARY_HASH)

            if can_finish_epoch and sub_slot.challenge_chain.subepoch_summary_hash is not None:
                # 2m. Check new difficulty and ssi
                if sub_slot.challenge_chain.new_sub_slot_iters != expected_sub_slot_iters:
                    return None, ValidationError(
                        Err.INVALID_NEW_SUB_SLOT_ITERS)
                if sub_slot.challenge_chain.new_difficulty != expected_difficulty:
                    return None, ValidationError(Err.INVALID_NEW_DIFFICULTY)
            else:
                # 2n. Check new difficulty and ssi are None if we don't finish epoch
                if sub_slot.challenge_chain.new_sub_slot_iters is not None:
                    return None, ValidationError(
                        Err.INVALID_NEW_SUB_SLOT_ITERS)
                if sub_slot.challenge_chain.new_difficulty is not None:
                    return None, ValidationError(Err.INVALID_NEW_DIFFICULTY)

            # 2o. Check challenge sub-slot hash in reward sub-slot
            if sub_slot.challenge_chain.get_hash(
            ) != sub_slot.reward_chain.challenge_chain_sub_slot_hash:
                return (
                    None,
                    ValidationError(
                        Err.INVALID_CHALLENGE_SLOT_HASH_RC,
                        "sub-slot hash in reward sub-slot mismatch",
                    ),
                )

            eos_vdf_iters: uint64 = expected_sub_slot_iters
            cc_start_element: ClassgroupElement = ClassgroupElement.get_default_element(
            )
            cc_eos_vdf_challenge: bytes32 = challenge_hash
            if genesis_block:
                if finished_sub_slot_n == 0:
                    # First block, one empty slot. prior_point is the initial challenge
                    rc_eos_vdf_challenge: bytes32 = constants.GENESIS_CHALLENGE
                    cc_eos_vdf_challenge = constants.GENESIS_CHALLENGE
                else:
                    # First block, but have at least two empty slots
                    rc_eos_vdf_challenge = header_block.finished_sub_slots[
                        finished_sub_slot_n - 1].reward_chain.get_hash()
            else:
                assert prev_b is not None
                if finished_sub_slot_n == 0:
                    # No empty slots, so the starting point of VDF is the last reward block. Uses
                    # the same IPS as the previous block, since it's the same slot
                    rc_eos_vdf_challenge = prev_b.reward_infusion_new_challenge
                    eos_vdf_iters = uint64(prev_b.sub_slot_iters -
                                           prev_b.ip_iters(constants))
                    cc_start_element = prev_b.challenge_vdf_output
                else:
                    # At least one empty slot, so use previous slot hash. IPS might change because it's a new slot
                    rc_eos_vdf_challenge = header_block.finished_sub_slots[
                        finished_sub_slot_n - 1].reward_chain.get_hash()

            # 2p. Check end of reward slot VDF
            target_vdf_info = VDFInfo(
                rc_eos_vdf_challenge,
                eos_vdf_iters,
                sub_slot.reward_chain.end_of_slot_vdf.output,
            )
            if not skip_vdf_is_valid and not sub_slot.proofs.reward_chain_slot_proof.is_valid(
                    constants,
                    ClassgroupElement.get_default_element(),
                    sub_slot.reward_chain.end_of_slot_vdf,
                    target_vdf_info,
            ):
                return None, ValidationError(Err.INVALID_RC_EOS_VDF)

            # 2q. Check challenge chain sub-slot VDF
            partial_cc_vdf_info = VDFInfo(
                cc_eos_vdf_challenge,
                eos_vdf_iters,
                sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.
                output,
            )
            if genesis_block:
                cc_eos_vdf_info_iters = constants.SUB_SLOT_ITERS_STARTING
            else:
                assert prev_b is not None
                if finished_sub_slot_n == 0:
                    cc_eos_vdf_info_iters = prev_b.sub_slot_iters
                else:
                    cc_eos_vdf_info_iters = expected_sub_slot_iters
            # Check that the modified data is correct
            if sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf != dataclasses.replace(
                    partial_cc_vdf_info,
                    number_of_iterations=cc_eos_vdf_info_iters,
            ):
                return None, ValidationError(
                    Err.INVALID_CC_EOS_VDF,
                    "wrong challenge chain end of slot vdf")

            # Pass in None for target info since we are only checking the proof from the temporary point,
            # but the challenge_chain_end_of_slot_vdf actually starts from the start of slot (for light clients)
            if not skip_vdf_is_valid and not sub_slot.proofs.challenge_chain_slot_proof.is_valid(
                    constants, cc_start_element, partial_cc_vdf_info, None):
                return None, ValidationError(Err.INVALID_CC_EOS_VDF)

            if genesis_block:
                # 2r. Check deficit (MIN_SUB.. deficit edge case for genesis block)
                if sub_slot.reward_chain.deficit != constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
                    return (
                        None,
                        ValidationError(
                            Err.INVALID_DEFICIT,
                            f"genesis, expected deficit {constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK}",
                        ),
                    )
            else:
                assert prev_b is not None
                if prev_b.deficit == 0:
                    # 2s. If prev sb had deficit 0, resets deficit to MIN_BLOCK_PER_CHALLENGE_BLOCK
                    if sub_slot.reward_chain.deficit != constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
                        log.error(constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK, )
                        return (
                            None,
                            ValidationError(
                                Err.INVALID_DEFICIT,
                                f"expected deficit {constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK}, saw "
                                f"{sub_slot.reward_chain.deficit}",
                            ),
                        )
                else:
                    # 2t. Otherwise, deficit stays the same at the slot ends, cannot reset until 0
                    if sub_slot.reward_chain.deficit != prev_b.deficit:
                        return None, ValidationError(
                            Err.INVALID_DEFICIT,
                            "deficit is wrong at slot end")

        # 3. Check sub-epoch summary
        # Note that the subepoch summary is the summary of the previous subepoch (not the one that just finished)
        if not skip_overflow_last_ss_validation:
            if ses_hash is not None:
                # 3a. Check that genesis block does not have sub-epoch summary
                if genesis_block:
                    return (
                        None,
                        ValidationError(
                            Err.INVALID_SUB_EPOCH_SUMMARY_HASH,
                            "genesis with sub-epoch-summary hash",
                        ),
                    )
                assert prev_b is not None

                # 3b. Check that we finished a slot and we finished a sub-epoch
                if not new_sub_slot or not can_finish_se:
                    return (
                        None,
                        ValidationError(
                            Err.INVALID_SUB_EPOCH_SUMMARY_HASH,
                            f"new sub-slot: {new_sub_slot} finishes sub-epoch {can_finish_se}",
                        ),
                    )

                # 3c. Check the actual sub-epoch is correct
                expected_sub_epoch_summary = make_sub_epoch_summary(
                    constants,
                    blocks,
                    height,
                    blocks.block_record(prev_b.prev_hash),
                    expected_difficulty if can_finish_epoch else None,
                    expected_sub_slot_iters if can_finish_epoch else None,
                )
                expected_hash = expected_sub_epoch_summary.get_hash()
                if expected_hash != ses_hash:
                    log.error(f"{expected_sub_epoch_summary}")
                    return (
                        None,
                        ValidationError(
                            Err.INVALID_SUB_EPOCH_SUMMARY,
                            f"expected ses hash: {expected_hash} got {ses_hash} ",
                        ),
                    )
            elif new_sub_slot and not genesis_block:
                # 3d. Check that we don't have to include a sub-epoch summary
                if can_finish_se or can_finish_epoch:
                    return (
                        None,
                        ValidationError(
                            Err.INVALID_SUB_EPOCH_SUMMARY,
                            "block finishes sub-epoch but ses-hash is None",
                        ),
                    )

    # 4. Check if the number of blocks is less than the max
    if not new_sub_slot and not genesis_block:
        assert prev_b is not None
        num_blocks = 2  # This includes the current block and the prev block
        curr = prev_b
        while not curr.first_in_sub_slot:
            num_blocks += 1
            curr = blocks.block_record(curr.prev_hash)
        if num_blocks > constants.MAX_SUB_SLOT_BLOCKS:
            return None, ValidationError(Err.TOO_MANY_BLOCKS)

    # If block state is correct, we should always find a challenge here
    # This computes what the challenge should be for this block

    challenge = get_block_challenge(
        constants,
        header_block,
        blocks,
        genesis_block,
        overflow,
        skip_overflow_last_ss_validation,
    )

    # 5a. Check proof of space
    if challenge != header_block.reward_chain_block.pos_ss_cc_challenge_hash:
        log.error(f"Finished slots: {header_block.finished_sub_slots}")
        log.error(
            f"Data: {genesis_block} {overflow} {skip_overflow_last_ss_validation} {header_block.total_iters} "
            f"{header_block.reward_chain_block.signage_point_index}"
            f"Prev: {prev_b}")
        log.error(
            f"Challenge {challenge} provided {header_block.reward_chain_block.pos_ss_cc_challenge_hash}"
        )
        return None, ValidationError(Err.INVALID_CC_CHALLENGE)

    # 5b. Check proof of space
    if header_block.reward_chain_block.challenge_chain_sp_vdf is None:
        # Edge case of first sp (start of slot), where sp_iters == 0
        cc_sp_hash: bytes32 = challenge
    else:
        cc_sp_hash = header_block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
        )

    q_str: Optional[
        bytes32] = header_block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
            constants, challenge, cc_sp_hash)
    if q_str is None:
        return None, ValidationError(Err.INVALID_POSPACE)

    # 6. check signage point index
    # no need to check negative values as this is uint 8
    if header_block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
        return None, ValidationError(Err.INVALID_SP_INDEX)

    # Note that required iters might be from the previous slot (if we are in an overflow block)
    required_iters: uint64 = calculate_iterations_quality(
        constants.DIFFICULTY_CONSTANT_FACTOR,
        q_str,
        header_block.reward_chain_block.proof_of_space.size,
        expected_difficulty,
        cc_sp_hash,
    )

    # 7. check signage point index
    # no need to check negative values as this is uint8. (Assumes types are checked)
    if header_block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
        return None, ValidationError(Err.INVALID_SP_INDEX)

    # 8a. check signage point index 0 has no cc sp
    if (header_block.reward_chain_block.signage_point_index == 0) != (
            header_block.reward_chain_block.challenge_chain_sp_vdf is None):
        return None, ValidationError(Err.INVALID_SP_INDEX)

    # 8b. check signage point index 0 has no rc sp
    if (header_block.reward_chain_block.signage_point_index == 0) != (
            header_block.reward_chain_block.reward_chain_sp_vdf is None):
        return None, ValidationError(Err.INVALID_SP_INDEX)

    sp_iters: uint64 = calculate_sp_iters(
        constants,
        expected_sub_slot_iters,
        header_block.reward_chain_block.signage_point_index,
    )

    ip_iters: uint64 = calculate_ip_iters(
        constants,
        expected_sub_slot_iters,
        header_block.reward_chain_block.signage_point_index,
        required_iters,
    )
    if header_block.reward_chain_block.challenge_chain_sp_vdf is None:
        # Blocks with very low required iters are not overflow blocks
        assert not overflow

    # 9. Check no overflows in the first sub-slot of a new epoch
    # (although they are OK in the second sub-slot), this is important
    if overflow and can_finish_epoch:
        if finished_sub_slots_since_prev < 2:
            return None, ValidationError(
                Err.NO_OVERFLOWS_IN_FIRST_SUB_SLOT_NEW_EPOCH)

    # 10. Check total iters
    if genesis_block:
        total_iters: uint128 = uint128(expected_sub_slot_iters *
                                       finished_sub_slots_since_prev)
    else:
        assert prev_b is not None
        if new_sub_slot:
            total_iters = prev_b.total_iters
            # Add the rest of the slot of prev_b
            total_iters = uint128(total_iters + prev_b.sub_slot_iters -
                                  prev_b.ip_iters(constants))
            # Add other empty slots
            total_iters = uint128(total_iters +
                                  (expected_sub_slot_iters *
                                   (finished_sub_slots_since_prev - 1)))
        else:
            # Slot iters is guaranteed to be the same for header_block and prev_b
            # This takes the beginning of the slot, and adds ip_iters
            total_iters = uint128(prev_b.total_iters -
                                  prev_b.ip_iters(constants))
    total_iters = uint128(total_iters + ip_iters)
    if total_iters != header_block.reward_chain_block.total_iters:
        return (
            None,
            ValidationError(
                Err.INVALID_TOTAL_ITERS,
                f"expected {total_iters} got {header_block.reward_chain_block.total_iters}",
            ),
        )

    sp_total_iters: uint128 = uint128(total_iters - ip_iters + sp_iters - (
        expected_sub_slot_iters if overflow else 0))
    if overflow and skip_overflow_last_ss_validation:
        dummy_vdf_info = VDFInfo(
            bytes32([0] * 32),
            uint64(1),
            ClassgroupElement.get_default_element(),
        )
        dummy_sub_slot = EndOfSubSlotBundle(
            ChallengeChainSubSlot(dummy_vdf_info, None, None, None, None),
            None,
            RewardChainSubSlot(dummy_vdf_info, bytes32([0] * 32), None,
                               uint8(0)),
            SubSlotProofs(VDFProof(uint8(0), b""), None,
                          VDFProof(uint8(0), b"")),
        )
        sub_slots_to_pass_in = header_block.finished_sub_slots + [
            dummy_sub_slot
        ]
    else:
        sub_slots_to_pass_in = header_block.finished_sub_slots
    (
        cc_vdf_challenge,
        rc_vdf_challenge,
        cc_vdf_input,
        rc_vdf_input,
        cc_vdf_iters,
        rc_vdf_iters,
    ) = get_signage_point_vdf_info(
        constants,
        sub_slots_to_pass_in,
        overflow,
        prev_b,
        blocks,
        sp_total_iters,
        sp_iters,
    )

    # 11. Check reward chain sp proof
    if sp_iters != 0:
        assert (header_block.reward_chain_block.reward_chain_sp_vdf is not None
                and header_block.reward_chain_sp_proof is not None)
        target_vdf_info = VDFInfo(
            rc_vdf_challenge,
            rc_vdf_iters,
            header_block.reward_chain_block.reward_chain_sp_vdf.output,
        )
        if not skip_vdf_is_valid and not header_block.reward_chain_sp_proof.is_valid(
                constants,
                rc_vdf_input,
                header_block.reward_chain_block.reward_chain_sp_vdf,
                target_vdf_info,
        ):
            return None, ValidationError(Err.INVALID_RC_SP_VDF)
        rc_sp_hash = header_block.reward_chain_block.reward_chain_sp_vdf.output.get_hash(
        )
    else:
        # Edge case of first sp (start of slot), where sp_iters == 0
        assert overflow is not None
        if header_block.reward_chain_block.reward_chain_sp_vdf is not None:
            return None, ValidationError(Err.INVALID_RC_SP_VDF)
        if new_sub_slot:
            rc_sp_hash = header_block.finished_sub_slots[
                -1].reward_chain.get_hash()
        else:
            if genesis_block:
                rc_sp_hash = constants.GENESIS_CHALLENGE
            else:
                assert prev_b is not None
                curr = prev_b
                while not curr.first_in_sub_slot:
                    curr = blocks.block_record(curr.prev_hash)
                assert curr.finished_reward_slot_hashes is not None
                rc_sp_hash = curr.finished_reward_slot_hashes[-1]

    # 12. Check reward chain sp signature
    if not AugSchemeMPL.verify(
            header_block.reward_chain_block.proof_of_space.plot_public_key,
            rc_sp_hash,
            header_block.reward_chain_block.reward_chain_sp_signature,
    ):
        return None, ValidationError(Err.INVALID_RC_SIGNATURE)

    # 13. Check cc sp vdf
    if sp_iters != 0:
        assert header_block.reward_chain_block.challenge_chain_sp_vdf is not None
        assert header_block.challenge_chain_sp_proof is not None
        target_vdf_info = VDFInfo(
            cc_vdf_challenge,
            cc_vdf_iters,
            header_block.reward_chain_block.challenge_chain_sp_vdf.output,
        )

        if header_block.reward_chain_block.challenge_chain_sp_vdf != dataclasses.replace(
                target_vdf_info,
                number_of_iterations=sp_iters,
        ):
            return None, ValidationError(Err.INVALID_CC_SP_VDF)
        if not skip_vdf_is_valid and not header_block.challenge_chain_sp_proof.is_valid(
                constants, cc_vdf_input, target_vdf_info, None):
            return None, ValidationError(Err.INVALID_CC_SP_VDF)
    else:
        assert overflow is not None
        if header_block.reward_chain_block.challenge_chain_sp_vdf is not None:
            return None, ValidationError(Err.INVALID_CC_SP_VDF)

    # 14. Check cc sp sig
    if not AugSchemeMPL.verify(
            header_block.reward_chain_block.proof_of_space.plot_public_key,
            cc_sp_hash,
            header_block.reward_chain_block.challenge_chain_sp_signature,
    ):
        return None, ValidationError(Err.INVALID_CC_SIGNATURE,
                                     "invalid cc sp sig")

    # 15. Check is_transaction_block
    if genesis_block:
        if header_block.foliage.foliage_transaction_block_hash is None:
            return None, ValidationError(Err.INVALID_IS_TRANSACTION_BLOCK,
                                         "invalid genesis")
    else:
        assert prev_b is not None
        # Finds the previous block
        curr = prev_b
        while not curr.is_transaction_block:
            curr = blocks.block_record(curr.prev_hash)

        # The first block to have an sp > the last tx block's infusion iters, is a tx block
        if overflow:
            our_sp_total_iters: uint128 = uint128(total_iters - ip_iters +
                                                  sp_iters -
                                                  expected_sub_slot_iters)
        else:
            our_sp_total_iters = uint128(total_iters - ip_iters + sp_iters)
        if (our_sp_total_iters > curr.total_iters) != (
                header_block.foliage.foliage_transaction_block_hash
                is not None):
            return None, ValidationError(Err.INVALID_IS_TRANSACTION_BLOCK)
        if (our_sp_total_iters > curr.total_iters) != (
                header_block.foliage.foliage_transaction_block_signature
                is not None):
            return None, ValidationError(Err.INVALID_IS_TRANSACTION_BLOCK)

    # 16. Check foliage block signature by plot key
    if not AugSchemeMPL.verify(
            header_block.reward_chain_block.proof_of_space.plot_public_key,
            header_block.foliage.foliage_block_data.get_hash(),
            header_block.foliage.foliage_block_data_signature,
    ):
        return None, ValidationError(Err.INVALID_PLOT_SIGNATURE)

    # 17. Check foliage block signature by plot key
    if header_block.foliage.foliage_transaction_block_hash is not None:
        if not AugSchemeMPL.verify(
                header_block.reward_chain_block.proof_of_space.plot_public_key,
                header_block.foliage.foliage_transaction_block_hash,
                header_block.foliage.foliage_transaction_block_signature,
        ):
            return None, ValidationError(Err.INVALID_PLOT_SIGNATURE)

    # 18. Check unfinished reward chain block hash
    if (header_block.reward_chain_block.get_hash() != header_block.foliage.
            foliage_block_data.unfinished_reward_block_hash):
        return None, ValidationError(Err.INVALID_URSB_HASH)

    # 19. Check pool target max height
    if (header_block.foliage.foliage_block_data.pool_target.max_height != 0
            and header_block.foliage.foliage_block_data.pool_target.max_height
            < height):
        return None, ValidationError(Err.OLD_POOL_TARGET)

    # 20a. Check pre-farm puzzle hashes for genesis block.
    if genesis_block:
        if (header_block.foliage.foliage_block_data.pool_target.puzzle_hash !=
                constants.GENESIS_PRE_FARM_POOL_PUZZLE_HASH):
            log.error(
                f"Pool target {header_block.foliage.foliage_block_data.pool_target} hb {header_block}"
            )
            return None, ValidationError(Err.INVALID_PREFARM)
        if (header_block.foliage.foliage_block_data.farmer_reward_puzzle_hash
                != constants.GENESIS_PRE_FARM_FARMER_PUZZLE_HASH):
            return None, ValidationError(Err.INVALID_PREFARM)
    else:
        # 20b. If pospace has a pool pk, heck pool target signature. Should not check this for genesis block.
        if header_block.reward_chain_block.proof_of_space.pool_public_key is not None:
            assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is None
            if not AugSchemeMPL.verify(
                    header_block.reward_chain_block.proof_of_space.
                    pool_public_key,
                    bytes(header_block.foliage.foliage_block_data.pool_target),
                    header_block.foliage.foliage_block_data.pool_signature,
            ):
                return None, ValidationError(Err.INVALID_POOL_SIGNATURE)
        else:
            # 20c. Otherwise, the plot is associated with a contract puzzle hash, not a public key
            assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is not None
            if (header_block.foliage.foliage_block_data.pool_target.puzzle_hash
                    != header_block.reward_chain_block.proof_of_space.
                    pool_contract_puzzle_hash):
                return None, ValidationError(Err.INVALID_POOL_TARGET)

    # 21. Check extension data if applicable. None for mainnet.
    # 22. Check if foliage block is present
    if (header_block.foliage.foliage_transaction_block_hash
            is not None) != (header_block.foliage_transaction_block
                             is not None):
        return None, ValidationError(Err.INVALID_FOLIAGE_BLOCK_PRESENCE)

    if (header_block.foliage.foliage_transaction_block_signature
            is not None) != (header_block.foliage_transaction_block
                             is not None):
        return None, ValidationError(Err.INVALID_FOLIAGE_BLOCK_PRESENCE)

    if header_block.foliage_transaction_block is not None:
        # 23. Check foliage block hash
        if header_block.foliage_transaction_block.get_hash(
        ) != header_block.foliage.foliage_transaction_block_hash:
            return None, ValidationError(Err.INVALID_FOLIAGE_BLOCK_HASH)

        if genesis_block:
            # 24a. Check prev block hash for genesis
            if header_block.foliage_transaction_block.prev_transaction_block_hash != constants.GENESIS_CHALLENGE:
                return None, ValidationError(Err.INVALID_PREV_BLOCK_HASH)
        else:
            assert prev_b is not None
            # 24b. Check prev block hash for non-genesis
            curr_b: BlockRecord = prev_b
            while not curr_b.is_transaction_block:
                curr_b = blocks.block_record(curr_b.prev_hash)
            if not header_block.foliage_transaction_block.prev_transaction_block_hash == curr_b.header_hash:
                log.error(
                    f"Prev BH: {header_block.foliage_transaction_block.prev_transaction_block_hash} "
                    f"{curr_b.header_hash} curr sb: {curr_b}")
                return None, ValidationError(Err.INVALID_PREV_BLOCK_HASH)

        # 25. The filter hash in the Foliage Block must be the hash of the filter
        if check_filter:
            if header_block.foliage_transaction_block.filter_hash != std_hash(
                    header_block.transactions_filter):
                return None, ValidationError(
                    Err.INVALID_TRANSACTIONS_FILTER_HASH)

        # 26. The timestamp in Foliage Block must comply with the timestamp rules
        if prev_b is not None:
            last_timestamps: List[uint64] = []
            curr_b = blocks.block_record(
                header_block.foliage_transaction_block.
                prev_transaction_block_hash)
            assert curr_b.timestamp is not None
            while len(last_timestamps) < constants.NUMBER_OF_TIMESTAMPS:
                last_timestamps.append(curr_b.timestamp)
                fetched: Optional[BlockRecord] = blocks.try_block_record(
                    curr_b.prev_transaction_block_hash)
                if not fetched:
                    break
                curr_b = fetched
            if len(last_timestamps) != constants.NUMBER_OF_TIMESTAMPS:
                # For blocks 1 to 10, average timestamps of all previous blocks
                assert curr_b.height == 0
            prev_time: uint64 = uint64(
                int(sum(last_timestamps) // len(last_timestamps)))
            if header_block.foliage_transaction_block.timestamp <= prev_time:
                return None, ValidationError(Err.TIMESTAMP_TOO_FAR_IN_PAST)
            if header_block.foliage_transaction_block.timestamp > int(
                    time.time() + constants.MAX_FUTURE_TIME):
                return None, ValidationError(Err.TIMESTAMP_TOO_FAR_IN_FUTURE)

    return required_iters, None  # Valid unfinished header block
Beispiel #14
0
    async def respond_proof_of_space(
        self, response: harvester_protocol.RespondProofOfSpace
    ):
        """
        This is a response from the harvester with a proof of space. We check it's validity,
        and request a pool partial, a header signature, or both, if the proof is good enough.
        """

        challenge_hash: bytes32 = response.proof.challenge_hash
        challenge_weight: uint128 = self.challenge_to_weight[challenge_hash]
        difficulty: uint64 = uint64(0)
        for posf in self.challenges[challenge_weight]:
            if posf.challenge_hash == challenge_hash:
                difficulty = posf.difficulty
        if difficulty == 0:
            raise RuntimeError("Did not find challenge")

        computed_quality_string = response.proof.verify_and_get_quality_string(
            self.constants.NUMBER_ZERO_BITS_CHALLENGE_SIG
        )
        if computed_quality_string is None:
            raise RuntimeError("Invalid proof of space")

        self.harvester_responses_proofs[
            (response.proof.challenge_hash, response.plot_id, response.response_number)
        ] = response.proof
        self.harvester_responses_proof_hash_to_info[response.proof.get_hash()] = (
            response.proof.challenge_hash,
            response.plot_id,
            response.response_number,
        )

        estimate_min = (
            self.proof_of_time_estimate_ips
            * self.constants.BLOCK_TIME_TARGET
            / self.constants.MIN_ITERS_PROPORTION
        )
        estimate_min = uint64(int(estimate_min))
        number_iters: uint64 = calculate_iterations_quality(
            computed_quality_string,
            response.proof.size,
            difficulty,
            estimate_min,
        )
        estimate_secs: float = number_iters / self.proof_of_time_estimate_ips

        if estimate_secs < self.config["pool_share_threshold"]:
            # TODO: implement pooling
            pass
        if estimate_secs < self.config["propagate_threshold"]:
            pool_pk = bytes(response.proof.pool_public_key)
            if pool_pk not in self.pool_sks_map:
                log.error(
                    f"Don't have the private key for the pool key used by harvester: {pool_pk.hex()}"
                )
                return
            pool_target: PoolTarget = PoolTarget(self.pool_target, uint32(0))
            pool_target_signature: G2Element = AugSchemeMPL.sign(
                self.pool_sks_map[pool_pk], bytes(pool_target)
            )

            request2 = farmer_protocol.RequestHeaderHash(
                challenge_hash,
                response.proof,
                pool_target,
                pool_target_signature,
                self.wallet_target,
            )

            yield OutboundMessage(
                NodeType.FULL_NODE,
                Message("request_header_hash", request2),
                Delivery.BROADCAST,
            )
async def validate_unfinished_block_header(
    constants: ConsensusConstants,
    headers: Dict[bytes32, Header],
    height_to_hash: Dict[uint32, bytes32],
    block_header: Header,
    proof_of_space: ProofOfSpace,
    prev_header_block: Optional[HeaderBlock],
    pre_validated: bool = False,
    pos_quality_string: bytes32 = None,
) -> Tuple[Optional[Err], Optional[uint64]]:
    """
    Block validation algorithm. Returns the number of VDF iterations that this block's
    proof of time must have, if the candidate block is fully valid (except for proof of
    time). The same as validate_block, but without proof of time and challenge validation.
    If the block is invalid, an error code is returned.

    Does NOT validate transactions and fees.
    """
    if not pre_validated:
        # 1. The hash of the proof of space must match header_data.proof_of_space_hash
        if proof_of_space.get_hash() != block_header.data.proof_of_space_hash:
            return (Err.INVALID_POSPACE_HASH, None)

        # 2. The coinbase signature must be valid, according the the pool public key
        # TODO: change numbers

        # 3. Check harvester signature of header data is valid based on harvester key
        validates = blspy.AugSchemeMPL.verify(
            proof_of_space.plot_public_key,
            block_header.data.get_hash(),
            block_header.plot_signature,
        )
        if not validates:
            return (Err.INVALID_PLOT_SIGNATURE, None)

    # 4. If not genesis, the previous block must exist
    if prev_header_block is not None and block_header.prev_header_hash not in headers:
        return (Err.DOES_NOT_EXTEND, None)

    # 5. If not genesis, the timestamp must be >= the average timestamp of last 11 blocks
    # and less than 2 hours in the future (if block height < 11, average all previous blocks).
    # Average is the sum, int diveded by the number of timestamps
    if prev_header_block is not None:
        last_timestamps: List[uint64] = []
        curr = prev_header_block.header
        while len(last_timestamps) < constants.NUMBER_OF_TIMESTAMPS:
            last_timestamps.append(curr.data.timestamp)
            fetched = headers.get(curr.prev_header_hash, None)
            if not fetched:
                break
            curr = fetched
        if len(last_timestamps) != constants.NUMBER_OF_TIMESTAMPS:
            # For blocks 1 to 10, average timestamps of all previous blocks
            assert curr.height == 0
        prev_time: uint64 = uint64(
            int(sum(last_timestamps) // len(last_timestamps)))
        if block_header.data.timestamp < prev_time:
            return (Err.TIMESTAMP_TOO_FAR_IN_PAST, None)
        if block_header.data.timestamp > time.time(
        ) + constants.MAX_FUTURE_TIME:
            return (Err.TIMESTAMP_TOO_FAR_IN_FUTURE, None)

    # 7. Extension data must be valid, if any is present

    # Compute challenge of parent
    challenge_hash: bytes32
    if prev_header_block is not None:
        challenge: Challenge = prev_header_block.challenge
        challenge_hash = challenge.get_hash()
        # 8. Check challenge hash of prev is the same as in pos
        if challenge_hash != proof_of_space.challenge_hash:
            return (Err.INVALID_POSPACE_CHALLENGE, None)

    # 10. The proof of space must be valid on the challenge
    if pos_quality_string is None:
        pos_quality_string = proof_of_space.verify_and_get_quality_string(
            constants.NUMBER_ZERO_BITS_CHALLENGE_SIG)
        if not pos_quality_string:
            return (Err.INVALID_POSPACE, None)

    if prev_header_block is not None:
        # 11. If not genesis, the height on the previous block must be one less than on this block
        if block_header.height != prev_header_block.height + 1:
            return (Err.INVALID_HEIGHT, None)
    else:
        # 12. If genesis, the height must be 0
        if block_header.height != 0:
            return (Err.INVALID_HEIGHT, None)

    # 13. The pool max height must be valid
    if (block_header.data.pool_target.max_height != 0 and
            block_header.data.pool_target.max_height < block_header.height):
        return (Err.INVALID_POOL_TARGET, None)

    difficulty: uint64
    if prev_header_block is not None:
        difficulty = get_next_difficulty(constants, headers, height_to_hash,
                                         prev_header_block.header)
        min_iters = get_next_min_iters(constants, headers, height_to_hash,
                                       prev_header_block)
    else:
        difficulty = uint64(constants.DIFFICULTY_STARTING)
        min_iters = uint64(constants.MIN_ITERS_STARTING)

    number_of_iters: uint64 = calculate_iterations_quality(
        pos_quality_string,
        proof_of_space.size,
        difficulty,
        min_iters,
    )

    assert count_significant_bits(difficulty) <= constants.SIGNIFICANT_BITS
    assert count_significant_bits(min_iters) <= constants.SIGNIFICANT_BITS

    if prev_header_block is not None:
        # 17. If not genesis, the total weight must be the parent weight + difficulty
        if block_header.weight != prev_header_block.weight + difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 18. If not genesis, the total iters must be parent iters + number_iters
        if (block_header.data.total_iters !=
                prev_header_block.header.data.total_iters + number_of_iters):
            return (Err.INVALID_TOTAL_ITERS, None)
    else:
        # 19. If genesis, the total weight must be starting difficulty
        if block_header.weight != difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 20. If genesis, the total iters must be number iters
        if block_header.data.total_iters != number_of_iters:
            return (Err.INVALID_TOTAL_ITERS, None)

    return (None, number_of_iters)
Beispiel #16
0
    async def validate_block(
        self,
        block: FullBlock,
        genesis: bool = False,
        pre_validated: bool = False,
        pos_quality: bytes32 = None,
    ) -> bool:
        """
        Block validation algorithm. Returns true iff the candidate block is fully valid,
        and extends something in the blockchain.
        """
        # 1. Validate unfinished block (check the rest of the conditions)
        if not (
            await self.validate_unfinished_block(
                block, genesis, pre_validated, pos_quality
            )
        ):
            return False

        difficulty: uint64
        ips: uint64
        if not genesis:
            difficulty = self.get_next_difficulty(block.prev_header_hash)
            ips = self.get_next_ips(block.prev_header_hash)
        else:
            difficulty = uint64(self.constants["DIFFICULTY_STARTING"])
            ips = uint64(self.constants["VDF_IPS_STARTING"])

        # 2. Check proof of space hash
        if not pre_validated:
            if not block.header_block.challenge or not block.header_block.proof_of_time:
                return False
            if (
                block.header_block.proof_of_space.get_hash()
                != block.header_block.challenge.proof_of_space_hash
            ):
                return False

        # 3. Check number of iterations on PoT is correct, based on prev block and PoS
        if pos_quality is None:
            pos_quality = block.header_block.proof_of_space.verify_and_get_quality()

        if pos_quality is None:
            return False

        number_of_iters: uint64 = calculate_iterations_quality(
            pos_quality,
            block.header_block.proof_of_space.size,
            difficulty,
            ips,
            self.constants["MIN_BLOCK_TIME"],
        )

        if block.header_block.proof_of_time is None:
            return False

        if number_of_iters != block.header_block.proof_of_time.number_of_iterations:
            return False

        # 4. Check PoT
        if not pre_validated:
            if not block.header_block.proof_of_time.is_valid(
                self.constants["DISCRIMINANT_SIZE_BITS"]
            ):
                return False

        if block.header_block.challenge is None:
            return False

        if block.body.coinbase.height != block.header_block.challenge.height:
            return False

        if not genesis:
            prev_block: Optional[HeaderBlock] = self.header_blocks.get(
                block.prev_header_hash, None
            )
            if not prev_block or not prev_block.challenge:
                return False

            # 5. and check if PoT.challenge_hash matches
            if (
                block.header_block.proof_of_time.challenge_hash
                != prev_block.challenge.get_hash()
            ):
                return False

            # 6a. Check challenge height = parent height + 1
            if block.header_block.challenge.height != prev_block.challenge.height + 1:
                return False

            # 7a. Check challenge total_weight = parent total_weight + difficulty
            if (
                block.header_block.challenge.total_weight
                != prev_block.challenge.total_weight + difficulty
            ):
                return False

            # 8a. Check challenge total_iters = parent total_iters + number_iters
            if (
                block.header_block.challenge.total_iters
                != prev_block.challenge.total_iters + number_of_iters
            ):
                return False
        else:
            # 6b. Check challenge height = parent height + 1
            if block.header_block.challenge.height != 0:
                return False

            # 7b. Check challenge total_weight = parent total_weight + difficulty
            if block.header_block.challenge.total_weight != difficulty:
                return False

            # 8b. Check challenge total_iters = parent total_iters + number_iters
            if block.header_block.challenge.total_iters != number_of_iters:
                return False

        return True
Beispiel #17
0
    async def respond_proof_of_space(
            self, response: harvester_protocol.RespondProofOfSpace):
        """
        This is a response from the harvester with a proof of space. We check it's validity,
        and request a pool partial, a header signature, or both, if the proof is good enough.
        """

        if response.proof.pool_pubkey not in self.pool_public_keys:
            raise RuntimeError("Pool pubkey not in list of approved keys")

        challenge_hash: bytes32 = self.harvester_responses_challenge[
            response.quality_string]
        challenge_weight: uint128 = self.challenge_to_weight[challenge_hash]
        challenge_height: uint32 = self.challenge_to_height[challenge_hash]
        new_proof_height: uint32 = uint32(challenge_height + 1)
        difficulty: uint64 = uint64(0)
        for posf in self.challenges[challenge_weight]:
            if posf.challenge_hash == challenge_hash:
                difficulty = posf.difficulty
        if difficulty == 0:
            raise RuntimeError("Did not find challenge")

        computed_quality_string = response.proof.verify_and_get_quality_string(
        )
        if response.quality_string != computed_quality_string:
            raise RuntimeError("Invalid quality for proof of space")

        self.harvester_responses_proofs[
            response.quality_string] = response.proof
        self.harvester_responses_proof_hash_to_qual[
            response.proof.get_hash()] = response.quality_string

        estimate_min = (self.proof_of_time_estimate_ips *
                        self.constants["BLOCK_TIME_TARGET"] /
                        self.constants["MIN_ITERS_PROPORTION"])
        number_iters: uint64 = calculate_iterations_quality(
            computed_quality_string,
            response.proof.size,
            difficulty,
            estimate_min,
        )
        estimate_secs: float = number_iters / self.proof_of_time_estimate_ips

        if estimate_secs < self.config["pool_share_threshold"]:
            request1 = harvester_protocol.RequestPartialProof(
                response.quality_string,
                self.wallet_target,
            )
            yield OutboundMessage(
                NodeType.HARVESTER,
                Message("request_partial_proof", request1),
                Delivery.RESPOND,
            )
        if estimate_secs < self.config["propagate_threshold"]:
            pool_pk = bytes(response.proof.pool_pubkey)
            if pool_pk not in self.pool_sks_map:
                log.error(
                    f"Don't have the private key for the pool key used by harvester: {pool_pk.hex()}"
                )
                return
            sk = self.pool_sks_map[pool_pk]
            coinbase_reward = uint64(
                calculate_block_reward(uint32(new_proof_height)))

            coinbase, signature = create_coinbase_coin_and_signature(
                new_proof_height,
                self.pool_target,
                coinbase_reward,
                sk,
            )

            request2 = farmer_protocol.RequestHeaderHash(
                challenge_hash,
                coinbase,
                signature,
                self.wallet_target,
                response.proof,
            )

            yield OutboundMessage(
                NodeType.FULL_NODE,
                Message("request_header_hash", request2),
                Delivery.BROADCAST,
            )
Beispiel #18
0
    async def respond_proof_of_space(
            self, response: harvester_protocol.RespondProofOfSpace):
        """
        This is a response from the harvester with a proof of space. We check it's validity,
        and request a pool partial, a header signature, or both, if the proof is good enough.
        """

        pool_sks: List[PrivateKey] = [
            PrivateKey.from_bytes(bytes.fromhex(ce))
            for ce in self.key_config["pool_sks"]
        ]
        assert response.proof.pool_pubkey in [
            sk.get_public_key() for sk in pool_sks
        ]

        challenge_hash: bytes32 = self.harvester_responses_challenge[
            response.quality]
        challenge_weight: uint64 = self.challenge_to_weight[challenge_hash]
        challenge_height: uint32 = self.challenge_to_height[challenge_hash]
        new_proof_height: uint32 = uint32(challenge_height + 1)
        difficulty: uint64 = uint64(0)
        for posf in self.challenges[challenge_weight]:
            if posf.challenge_hash == challenge_hash:
                difficulty = posf.difficulty
        if difficulty == 0:
            raise RuntimeError("Did not find challenge")

        computed_quality = response.proof.verify_and_get_quality()
        assert response.quality == computed_quality

        self.harvester_responses_proofs[response.quality] = response.proof
        self.harvester_responses_proof_hash_to_qual[
            response.proof.get_hash()] = response.quality

        number_iters: uint64 = calculate_iterations_quality(
            computed_quality,
            response.proof.size,
            difficulty,
            self.proof_of_time_estimate_ips,
            constants["MIN_BLOCK_TIME"],
        )
        estimate_secs: float = number_iters / self.proof_of_time_estimate_ips

        if estimate_secs < self.config["pool_share_threshold"]:
            request1 = harvester_protocol.RequestPartialProof(
                response.quality,
                sha256(bytes.fromhex(
                    self.key_config["farmer_target"])).digest(),
            )
            yield OutboundMessage(
                NodeType.HARVESTER,
                Message("request_partial_proof", request1),
                Delivery.RESPOND,
            )
        if estimate_secs < self.config["propagate_threshold"]:
            if new_proof_height not in self.coinbase_rewards:
                log.error(
                    f"Don't have coinbase transaction for height {new_proof_height}, cannot submit PoS"
                )
                return

            coinbase, signature = self.coinbase_rewards[new_proof_height]
            request2 = farmer_protocol.RequestHeaderHash(
                challenge_hash,
                coinbase,
                signature,
                bytes.fromhex(self.key_config["farmer_target"]),
                response.proof,
            )

            yield OutboundMessage(
                NodeType.FULL_NODE,
                Message("request_header_hash", request2),
                Delivery.BROADCAST,
            )
    async def new_proof_of_space(
            self, new_proof_of_space: harvester_protocol.NewProofOfSpace,
            peer: ws.WSChiaConnection):
        """
        This is a response from the harvester, for a NewChallenge. Here we check if the proof
        of space is sufficiently good, and if so, we ask for the whole proof.
        """
        if new_proof_of_space.sp_hash not in self.farmer.number_of_responses:
            self.farmer.number_of_responses[new_proof_of_space.sp_hash] = 0
            self.farmer.cache_add_time[new_proof_of_space.sp_hash] = uint64(
                int(time.time()))

        self.farmer.state_changed("proof", {"proof": new_proof_of_space})
        max_pos_per_sp = 5
        if self.farmer.number_of_responses[
                new_proof_of_space.sp_hash] > max_pos_per_sp:
            self.farmer.log.warning(
                f"Surpassed {max_pos_per_sp} PoSpace for one SP, no longer submitting PoSpace for signage point "
                f"{new_proof_of_space.sp_hash}")
            return

        if new_proof_of_space.sp_hash not in self.farmer.sps:
            self.farmer.log.warning(
                f"Received response for a signage point that we do not have {new_proof_of_space.sp_hash}"
            )
            return

        sps = self.farmer.sps[new_proof_of_space.sp_hash]
        for sp in sps:
            computed_quality_string = new_proof_of_space.proof.verify_and_get_quality_string(
                self.farmer.constants,
                new_proof_of_space.challenge_hash,
                new_proof_of_space.sp_hash,
            )
            if computed_quality_string is None:
                self.farmer.log.error(
                    f"Invalid proof of space {new_proof_of_space.proof}")
                return

            self.farmer.number_of_responses[new_proof_of_space.sp_hash] += 1

            required_iters: uint64 = calculate_iterations_quality(
                computed_quality_string,
                new_proof_of_space.proof.size,
                sp.difficulty,
                new_proof_of_space.sp_hash,
            )
            # Double check that the iters are good
            assert required_iters < calculate_sp_interval_iters(
                self.farmer.constants, sp.sub_slot_iters)

            self.farmer.state_changed("proof", {"proof": new_proof_of_space})

            # Proceed at getting the signatures for this PoSpace
            request = harvester_protocol.RequestSignatures(
                new_proof_of_space.plot_identifier,
                new_proof_of_space.challenge_hash,
                new_proof_of_space.sp_hash,
                [sp.challenge_chain_sp, sp.reward_chain_sp],
            )

            if new_proof_of_space.sp_hash not in self.farmer.proofs_of_space:
                self.farmer.proofs_of_space[new_proof_of_space.sp_hash] = [(
                    new_proof_of_space.plot_identifier,
                    new_proof_of_space.proof,
                )]
            else:
                self.farmer.proofs_of_space[new_proof_of_space.sp_hash].append(
                    (
                        new_proof_of_space.plot_identifier,
                        new_proof_of_space.proof,
                    ))
            self.farmer.cache_add_time[new_proof_of_space.sp_hash] = uint64(
                int(time.time()))
            self.farmer.quality_str_to_identifiers[computed_quality_string] = (
                new_proof_of_space.plot_identifier,
                new_proof_of_space.challenge_hash,
                new_proof_of_space.sp_hash,
                peer.peer_node_id,
            )
            self.farmer.cache_add_time[computed_quality_string] = uint64(
                int(time.time()))

            return Message("request_signatures", request)
async def pre_validate_blocks_multiprocessing(
    constants: ConsensusConstants,
    constants_json: Dict,
    block_records: BlockchainInterface,
    blocks: Sequence[Union[FullBlock, HeaderBlock]],
    pool: ProcessPoolExecutor,
    validate_transactions: bool,
    check_filter: bool,
) -> Optional[List[PreValidationResult]]:
    """
    This method must be called under the blockchain lock
    If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
    if any validation issue occurs, returns False.

    Args:
        check_filter:
        validate_transactions:
        constants_json:
        pool:
        constants:
        block_records:
        blocks: list of full blocks to validate (must be connected to current chain)
    """
    batch_size = 4
    prev_b: Optional[BlockRecord] = None
    # Collects all the recent blocks (up to the previous sub-epoch)
    recent_blocks: Dict[bytes32, BlockRecord] = {}
    recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
    num_sub_slots_found = 0
    num_blocks_seen = 0
    if blocks[0].height > 0:
        if not block_records.contains_block(blocks[0].prev_header_hash):
            return [
                PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value),
                                    None, None)
            ]
        curr = block_records.block_record(blocks[0].prev_header_hash)
        num_sub_slots_to_look_for = 3 if curr.overflow else 2
        while (curr.sub_epoch_summary_included is None
               or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
               or num_sub_slots_found < num_sub_slots_to_look_for
               ) and curr.height > 0:
            if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
                recent_blocks_compressed[curr.header_hash] = curr

            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
            recent_blocks[curr.header_hash] = curr
            if curr.is_transaction_block:
                num_blocks_seen += 1
            curr = block_records.block_record(curr.prev_hash)
        recent_blocks[curr.header_hash] = curr
        recent_blocks_compressed[curr.header_hash] = curr
    block_record_was_present = []
    for block in blocks:
        block_record_was_present.append(
            block_records.contains_block(block.header_hash))

    diff_ssis: List[Tuple[uint64, uint64]] = []
    for block in blocks:
        if block.height != 0 and prev_b is None:
            prev_b = block_records.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            constants,
            len(block.finished_sub_slots) > 0, prev_b, block_records)

        if block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
            log.warning(f"Block: {block.reward_chain_block}")
        overflow = is_overflow_block(
            constants, block.reward_chain_block.signage_point_index)
        challenge = get_block_challenge(constants, block,
                                        BlockCache(recent_blocks),
                                        prev_b is None, overflow, False)
        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            cc_sp_hash: bytes32 = challenge
        else:
            cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )
        q_str: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                constants, challenge, cc_sp_hash)
        if q_str is None:
            for i, block_i in enumerate(blocks):
                if not block_record_was_present[
                        i] and block_records.contains_block(
                            block_i.header_hash):
                    block_records.remove_block_record(block_i.header_hash)
            return None

        required_iters: uint64 = calculate_iterations_quality(
            constants.DIFFICULTY_CONSTANT_FACTOR,
            q_str,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp_hash,
        )

        block_rec = block_to_block_record(
            constants,
            block_records,
            required_iters,
            block,
            None,
        )
        recent_blocks[block_rec.header_hash] = block_rec
        recent_blocks_compressed[block_rec.header_hash] = block_rec
        block_records.add_block_record(
            block_rec)  # Temporarily add block to dict
        prev_b = block_rec
        diff_ssis.append((difficulty, sub_slot_iters))

    for i, block in enumerate(blocks):
        if not block_record_was_present[i]:
            block_records.remove_block_record(block.header_hash)

    recent_sb_compressed_pickled = {
        bytes(k): bytes(v)
        for k, v in recent_blocks_compressed.items()
    }

    futures = []
    # Pool of workers to validate blocks concurrently
    for i in range(0, len(blocks), batch_size):
        end_i = min(i + batch_size, len(blocks))
        blocks_to_validate = blocks[i:end_i]
        if any([
                len(block.finished_sub_slots) > 0
                for block in blocks_to_validate
        ]):
            final_pickled = {
                bytes(k): bytes(v)
                for k, v in recent_blocks.items()
            }
        else:
            final_pickled = recent_sb_compressed_pickled
        hb_pickled: List[bytes] = []
        generators: List[Optional[bytes]] = []
        for block in blocks_to_validate:
            if isinstance(block, FullBlock):
                hb_pickled.append(bytes(block.get_block_header()))
                generators.append(
                    bytes(block.transactions_generator) if block.
                    transactions_generator is not None else None)
            else:
                hb_pickled.append(bytes(block))
                generators.append(None)

        futures.append(asyncio.get_running_loop().run_in_executor(
            pool,
            batch_pre_validate_blocks,
            constants_json,
            final_pickled,
            hb_pickled,
            generators,
            check_filter,
            [diff_ssis[j][0] for j in range(i, end_i)],
            [diff_ssis[j][1] for j in range(i, end_i)],
            validate_transactions,
        ))
    # Collect all results into one flat list
    return [
        PreValidationResult.from_bytes(result)
        for batch_result in (await asyncio.gather(*futures))
        for result in batch_result
    ]
    async def validate_header_block(self, br: BlockRecord,
                                    header_block: HeaderBlock) -> bool:
        """
        Fully validates a header block. This requires the ancestors to be present in the blockchain.
        This method also validates that the header block is consistent with the block record.
        """
        # POS challenge hash == POT challenge hash == Challenge prev challenge hash
        if (header_block.proof_of_space.challenge_hash !=
                header_block.proof_of_time.challenge_hash):
            return False
        if (header_block.proof_of_space.challenge_hash !=
                header_block.challenge.prev_challenge_hash):
            return False

        if br.height > 0:
            prev_br = self.block_records[br.prev_header_hash]
            # If prev header block, check prev header block hash matches
            if prev_br.new_challenge_hash is not None:
                if (header_block.proof_of_space.challenge_hash !=
                        prev_br.new_challenge_hash):
                    return False

        # Validate PoS and get quality
        quality_str: Optional[
            bytes32] = header_block.proof_of_space.verify_and_get_quality_string(
            )
        if quality_str is None:
            return False

        difficulty: uint64
        min_iters: uint64 = self.get_min_iters(br)
        prev_block: Optional[BlockRecord]
        if (br.height % self.constants["DIFFICULTY_EPOCH"] !=
                self.constants["DIFFICULTY_DELAY"]):
            # Only allow difficulty changes once per epoch
            if br.height > 1:
                prev_block = self.block_records[br.prev_header_hash]
                assert prev_block is not None
                prev_prev_block = self.block_records[
                    prev_block.prev_header_hash]
                assert prev_prev_block is not None
                difficulty = uint64(br.weight - prev_block.weight)
                assert difficulty == prev_block.weight - prev_prev_block.weight
            elif br.height == 1:
                prev_block = self.block_records[br.prev_header_hash]
                assert prev_block is not None
                difficulty = uint64(br.weight - prev_block.weight)
                assert difficulty == prev_block.weight
            else:
                difficulty = uint64(br.weight)
                assert difficulty == self.constants["DIFFICULTY_STARTING"]
        else:
            # This is a difficulty change, so check whether it's within the allowed range.
            # (But don't check whether it's the right amount).
            prev_block = self.block_records[br.prev_header_hash]
            assert prev_block is not None
            prev_prev_block = self.block_records[prev_block.prev_header_hash]
            assert prev_prev_block is not None
            difficulty = uint64(br.weight - prev_block.weight)
            prev_difficulty = uint64(prev_block.weight -
                                     prev_prev_block.weight)

            # Ensures the challenge for this block is valid (contains correct diff reset)
            if prev_block.header_hash in self.difficulty_resets_prev:
                if self.difficulty_resets_prev[
                        prev_block.header_hash] != difficulty:
                    return False

            max_diff = uint64(
                truncate_to_significant_bits(
                    prev_difficulty * self.constants["DIFFICULTY_FACTOR"],
                    self.constants["SIGNIFICANT_BITS"],
                ))
            min_diff = uint64(
                truncate_to_significant_bits(
                    prev_difficulty // self.constants["DIFFICULTY_FACTOR"],
                    self.constants["SIGNIFICANT_BITS"],
                ))

            if difficulty < min_diff or difficulty > max_diff:
                return False

        number_of_iters: uint64 = calculate_iterations_quality(
            quality_str,
            header_block.proof_of_space.size,
            difficulty,
            min_iters,
        )

        if header_block.proof_of_time is None:
            return False

        if number_of_iters != header_block.proof_of_time.number_of_iterations:
            return False

        # Check PoT
        if not header_block.proof_of_time.is_valid(
                self.constants["DISCRIMINANT_SIZE_BITS"]):
            return False

        # Validate challenge
        proofs_hash = std_hash(header_block.proof_of_space.get_hash() +
                               header_block.proof_of_time.output.get_hash())
        if proofs_hash != header_block.challenge.proofs_hash:
            return False
        # Note that we are not validating the work difficulty reset (since we don't know the
        # next block yet. When we process the next block, we will check that it matches).

        # Validate header:
        if header_block.header.header_hash != br.header_hash:
            return False
        if header_block.header.prev_header_hash != br.prev_header_hash:
            return False
        if header_block.height != br.height:
            return False
        if header_block.weight != br.weight:
            return False
        if br.height > 0:
            assert prev_block is not None
            if prev_block.weight + difficulty != br.weight:
                return False
            if prev_block.total_iters is not None and br.total_iters is not None:
                if prev_block.total_iters + number_of_iters != br.total_iters:
                    return False
            if prev_block.height + 1 != br.height:
                return False
        else:
            if br.weight != difficulty:
                return False
            if br.total_iters != number_of_iters:
                return False

        # Check that block is not far in the future
        if (header_block.header.data.timestamp >
                time.time() + self.constants["MAX_FUTURE_TIME"]):
            return False

        # Check header pos hash
        if (header_block.proof_of_space.get_hash() !=
                header_block.header.data.proof_of_space_hash):
            return False

        # Check coinbase sig
        pair = header_block.header.data.coinbase_signature.PkMessagePair(
            header_block.proof_of_space.pool_pubkey,
            header_block.header.data.coinbase.name(),
        )

        if not header_block.header.data.coinbase_signature.validate([pair]):
            return False

        # Check coinbase and fees amount
        coinbase_reward = calculate_block_reward(br.height)
        if coinbase_reward != header_block.header.data.coinbase.amount:
            return False
        return True