Ejemplo n.º 1
0
async def load_blocks_dont_validate(
    blocks,
) -> Tuple[Dict[bytes32, HeaderBlock], Dict[uint32, bytes32], Dict[
        bytes32, BlockRecord], Dict[bytes32, SubEpochSummary]]:
    header_cache: Dict[bytes32, HeaderBlock] = {}
    height_to_hash: Dict[uint32, bytes32] = {}
    sub_blocks: Dict[bytes32, BlockRecord] = {}
    sub_epoch_summaries: Dict[bytes32, SubEpochSummary] = {}
    prev_block = None
    difficulty = test_constants.DIFFICULTY_STARTING
    block: FullBlock
    for block in blocks:
        if block.height > 0:
            assert prev_block is not None
            difficulty = block.reward_chain_block.weight - prev_block.weight

        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            assert block.reward_chain_block.signage_point_index == 0
            cc_sp: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
        else:
            cc_sp = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )

        quality_string: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                test_constants,
                block.reward_chain_block.pos_ss_cc_challenge_hash,
                cc_sp,
            )
        assert quality_string is not None

        required_iters: uint64 = calculate_iterations_quality(
            test_constants.DIFFICULTY_CONSTANT_FACTOR,
            quality_string,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp,
        )

        sub_block = block_to_block_record(
            test_constants, BlockCache(sub_blocks, height_to_hash),
            required_iters, block, None)
        sub_blocks[block.header_hash] = sub_block
        height_to_hash[block.height] = block.header_hash
        header_cache[block.header_hash] = block.get_block_header()
        if sub_block.sub_epoch_summary_included is not None:
            sub_epoch_summaries[
                block.height] = sub_block.sub_epoch_summary_included
        prev_block = block
    return header_cache, height_to_hash, sub_blocks, sub_epoch_summaries
async def pre_validate_blocks_multiprocessing(
    constants: ConsensusConstants,
    constants_json: Dict,
    block_records: BlockchainInterface,
    blocks: Sequence[Union[FullBlock, HeaderBlock]],
    pool: ProcessPoolExecutor,
    validate_transactions: bool,
    check_filter: bool,
) -> Optional[List[PreValidationResult]]:
    """
    This method must be called under the blockchain lock
    If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
    if any validation issue occurs, returns False.

    Args:
        check_filter:
        validate_transactions:
        constants_json:
        pool:
        constants:
        block_records:
        blocks: list of full blocks to validate (must be connected to current chain)
    """
    batch_size = 4
    prev_b: Optional[BlockRecord] = None
    # Collects all the recent blocks (up to the previous sub-epoch)
    recent_blocks: Dict[bytes32, BlockRecord] = {}
    recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
    num_sub_slots_found = 0
    num_blocks_seen = 0
    if blocks[0].height > 0:
        if not block_records.contains_block(blocks[0].prev_header_hash):
            return [
                PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value),
                                    None, None)
            ]
        curr = block_records.block_record(blocks[0].prev_header_hash)
        num_sub_slots_to_look_for = 3 if curr.overflow else 2
        while (curr.sub_epoch_summary_included is None
               or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
               or num_sub_slots_found < num_sub_slots_to_look_for
               ) and curr.height > 0:
            if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
                recent_blocks_compressed[curr.header_hash] = curr

            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
            recent_blocks[curr.header_hash] = curr
            if curr.is_transaction_block:
                num_blocks_seen += 1
            curr = block_records.block_record(curr.prev_hash)
        recent_blocks[curr.header_hash] = curr
        recent_blocks_compressed[curr.header_hash] = curr
    block_record_was_present = []
    for block in blocks:
        block_record_was_present.append(
            block_records.contains_block(block.header_hash))

    diff_ssis: List[Tuple[uint64, uint64]] = []
    for block in blocks:
        if block.height != 0 and prev_b is None:
            prev_b = block_records.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            constants,
            len(block.finished_sub_slots) > 0, prev_b, block_records)

        if block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
            log.warning(f"Block: {block.reward_chain_block}")
        overflow = is_overflow_block(
            constants, block.reward_chain_block.signage_point_index)
        challenge = get_block_challenge(constants, block,
                                        BlockCache(recent_blocks),
                                        prev_b is None, overflow, False)
        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            cc_sp_hash: bytes32 = challenge
        else:
            cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )
        q_str: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                constants, challenge, cc_sp_hash)
        if q_str is None:
            for i, block_i in enumerate(blocks):
                if not block_record_was_present[
                        i] and block_records.contains_block(
                            block_i.header_hash):
                    block_records.remove_block_record(block_i.header_hash)
            return None

        required_iters: uint64 = calculate_iterations_quality(
            constants.DIFFICULTY_CONSTANT_FACTOR,
            q_str,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp_hash,
        )

        block_rec = block_to_block_record(
            constants,
            block_records,
            required_iters,
            block,
            None,
        )
        recent_blocks[block_rec.header_hash] = block_rec
        recent_blocks_compressed[block_rec.header_hash] = block_rec
        block_records.add_block_record(
            block_rec)  # Temporarily add block to dict
        prev_b = block_rec
        diff_ssis.append((difficulty, sub_slot_iters))

    for i, block in enumerate(blocks):
        if not block_record_was_present[i]:
            block_records.remove_block_record(block.header_hash)

    recent_sb_compressed_pickled = {
        bytes(k): bytes(v)
        for k, v in recent_blocks_compressed.items()
    }

    futures = []
    # Pool of workers to validate blocks concurrently
    for i in range(0, len(blocks), batch_size):
        end_i = min(i + batch_size, len(blocks))
        blocks_to_validate = blocks[i:end_i]
        if any([
                len(block.finished_sub_slots) > 0
                for block in blocks_to_validate
        ]):
            final_pickled = {
                bytes(k): bytes(v)
                for k, v in recent_blocks.items()
            }
        else:
            final_pickled = recent_sb_compressed_pickled
        hb_pickled: List[bytes] = []
        generators: List[Optional[bytes]] = []
        for block in blocks_to_validate:
            if isinstance(block, FullBlock):
                hb_pickled.append(bytes(block.get_block_header()))
                generators.append(
                    bytes(block.transactions_generator) if block.
                    transactions_generator is not None else None)
            else:
                hb_pickled.append(bytes(block))
                generators.append(None)

        futures.append(asyncio.get_running_loop().run_in_executor(
            pool,
            batch_pre_validate_blocks,
            constants_json,
            final_pickled,
            hb_pickled,
            generators,
            check_filter,
            [diff_ssis[j][0] for j in range(i, end_i)],
            [diff_ssis[j][1] for j in range(i, end_i)],
            validate_transactions,
        ))
    # Collect all results into one flat list
    return [
        PreValidationResult.from_bytes(result)
        for batch_result in (await asyncio.gather(*futures))
        for result in batch_result
    ]
Ejemplo n.º 3
0
    async def receive_block(
        self,
        block: FullBlock,
        pre_validation_result: Optional[PreValidationResult] = None,
        fork_point_with_peak: Optional[uint32] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        This method must be called under the blockchain lock
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if pre_validation_result is None:
            if block.height == 0:
                prev_b: Optional[BlockRecord] = None
            else:
                prev_b = self.block_record(block.prev_header_hash)
            sub_slot_iters, difficulty = get_sub_slot_iters_and_difficulty(
                self.constants, block, prev_b, self)
            required_iters, error = validate_finished_header_block(
                self.constants,
                self,
                block.get_block_header(),
                False,
                difficulty,
                sub_slot_iters,
            )

            if error is not None:
                return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        else:
            required_iters = pre_validation_result.required_iters
            assert pre_validation_result.error is None
        assert required_iters is not None
        error_code, _ = await validate_block_body(
            self.constants,
            self,
            self.block_store,
            self.coin_store,
            self.get_peak(),
            block,
            block.height,
            pre_validation_result.cost_result
            if pre_validation_result is not None else None,
            fork_point_with_peak,
        )
        if error_code is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error_code, None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            block,
            None,
        )
        # Always add the block to the database
        await self.block_store.add_full_block(block, block_record)

        self.add_block_record(block_record)

        fork_height: Optional[uint32] = await self._reconsider_peak(
            block_record, genesis, fork_point_with_peak)

        if fork_height is not None:
            return ReceiveBlockResult.NEW_PEAK, None, fork_height
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
Ejemplo n.º 4
0
    async def receive_block(
        self,
        header_block_record: HeaderBlockRecord,
        pre_validation_result: Optional[PreValidationResult] = None,
        trusted: bool = False,
        fork_point_with_peak: Optional[uint32] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """
        block = header_block_record.header
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if block.height == 0:
            prev_b: Optional[BlockRecord] = None
        else:
            prev_b = self.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_sub_slot_iters_and_difficulty(
            self.constants, block, prev_b, self)

        if trusted is False and pre_validation_result is None:
            required_iters, error = validate_finished_header_block(
                self.constants, self, block, False, difficulty, sub_slot_iters)
        elif trusted:
            unfinished_header_block = UnfinishedHeaderBlock(
                block.finished_sub_slots,
                block.reward_chain_block.get_unfinished(),
                block.challenge_chain_sp_proof,
                block.reward_chain_sp_proof,
                block.foliage,
                block.foliage_transaction_block,
                block.transactions_filter,
            )

            required_iters, val_error = validate_unfinished_header_block(
                self.constants, self, unfinished_header_block, False,
                difficulty, sub_slot_iters, False, True)
            error = ValidationError(
                Err(val_error)) if val_error is not None else None
        else:
            assert pre_validation_result is not None
            required_iters = pre_validation_result.required_iters
            error = (ValidationError(Err(pre_validation_result.error))
                     if pre_validation_result.error is not None else None)

        if error is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        assert required_iters is not None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            None,
            block,
        )

        # Always add the block to the database
        await self.block_store.add_block_record(header_block_record,
                                                block_record)
        self.add_block_record(block_record)
        self.clean_block_record(block_record.height -
                                self.constants.BLOCKS_CACHE_SIZE)

        fork_height: Optional[uint32] = await self._reconsider_peak(
            block_record, genesis, fork_point_with_peak)
        if fork_height is not None:
            self.log.info(
                f"💰 Updated wallet peak to height {block_record.height}, weight {block_record.weight}, "
            )
            return ReceiveBlockResult.NEW_PEAK, None, fork_height
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None