Example #1
0
 async def test_weight_proof_extend_new_ses(self, default_1000_blocks):
     blocks = default_1000_blocks
     header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
         blocks)
     # delete last summary
     last_ses_height = sorted(summaries.keys())[-1]
     last_ses = summaries[last_ses_height]
     del summaries[last_ses_height]
     wpf_synced = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
     wp = await wpf_synced.get_proof_of_weight(blocks[last_ses_height -
                                                      10].header_hash)
     assert wp is not None
     wpf_not_synced = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, height_to_hash, header_cache, {}))
     valid, fork_point = await wpf_not_synced.validate_weight_proof(wp)
     assert valid
     assert fork_point == 0
     # extend proof with 100 blocks
     summaries[last_ses_height] = last_ses
     wpf = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
     new_wp = await wpf._create_proof_of_weight(blocks[-1].header_hash)
     valid, fork_point = await wpf.validate_weight_proof(new_wp)
     assert valid
     assert fork_point != 0
Example #2
0
 async def test_weight_proof_extend_multiple_ses(self, default_1000_blocks):
     blocks = default_1000_blocks
     header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
         blocks)
     last_ses_height = sorted(summaries.keys())[-1]
     last_ses = summaries[last_ses_height]
     before_last_ses_height = sorted(summaries.keys())[-2]
     before_last_ses = summaries[before_last_ses_height]
     wpf = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
     wpf_verify = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, {}))
     for x in range(10, -1, -1):
         wp = await wpf.get_proof_of_weight(blocks[before_last_ses_height -
                                                   x].header_hash)
         assert wp is not None
         valid, fork_point = await wpf_verify.validate_weight_proof(wp)
         assert valid
         assert fork_point == 0
     # extend proof with 100 blocks
     summaries[last_ses_height] = last_ses
     summaries[before_last_ses_height] = before_last_ses
     wpf = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
     new_wp = await wpf._create_proof_of_weight(blocks[-1].header_hash)
     valid, fork_point = await wpf.validate_weight_proof(new_wp)
     assert valid
     assert fork_point != 0
Example #3
0
    async def test_weight_proof1000(self, default_1000_blocks):
        blocks = default_1000_blocks
        header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
        wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
        wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
        assert wp is not None
        wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, {}))
        valid, fork_point = wpf.validate_weight_proof(wp)

        assert valid
        assert fork_point == 0
Example #4
0
    async def test_weight_proof_from_database(self):
        connection = await aiosqlite.connect("path to db")
        block_store: BlockStore = await BlockStore.create(connection)
        blocks, peak = await block_store.get_block_records()
        peak_height = blocks[peak].height
        headers = await block_store.get_header_blocks_in_range(0, peak_height)
        sub_height_to_hash = {}
        sub_epoch_summaries = {}
        # peak_header = await block_store.get_full_blocks_at([peak_height])
        if len(blocks) == 0:
            return None, None

        assert peak is not None

        # Sets the other state variables (peak_height and height_to_hash)
        curr: BlockRecord = blocks[peak]
        while True:
            sub_height_to_hash[curr.height] = curr.header_hash
            if curr.sub_epoch_summary_included is not None:
                sub_epoch_summaries[
                    curr.height] = curr.sub_epoch_summary_included
            if curr.height == 0:
                break
            curr = blocks[curr.prev_hash]
        assert len(sub_height_to_hash) == peak_height + 1
        block_cache = BlockCache(blocks, headers, sub_height_to_hash,
                                 sub_epoch_summaries)
        wpf = WeightProofHandler(DEFAULT_CONSTANTS, block_cache)
        wp = await wpf._create_proof_of_weight(sub_height_to_hash[peak_height -
                                                                  50])
        valid, fork_point = wpf.validate_weight_proof_single_proc(wp)

        await connection.close()
        assert valid
        print(f"size of proof is {get_size(wp)}")
Example #5
0
 async def test_weight_proof_bad_peak_hash(self, default_1000_blocks):
     blocks = default_1000_blocks
     header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
     wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
     wpf.log.setLevel(logging.INFO)
     wp = await wpf.get_proof_of_weight(b"sadgfhjhgdgsfadfgh")
     assert wp is None
Example #6
0
 async def test_weight_proof_extend_no_ses(self, default_1000_blocks):
     blocks = default_1000_blocks
     header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
     last_ses_height = sorted(summaries.keys())[-1]
     wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
     wp = await wpf.get_proof_of_weight(blocks[last_ses_height].header_hash)
     assert wp is not None
     # todo for each sampled sub epoch, validate number of segments
     wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, {}))
     valid, fork_point = wpf.validate_weight_proof(wp)
     assert valid
     assert fork_point == 0
     # extend proof with 100 blocks
     new_wp = await wpf._extend_proof_of_weight(wp, block_records[blocks[-1].header_hash])
     valid, fork_point = wpf.validate_weight_proof(new_wp)
     assert valid
     assert fork_point == 0
Example #7
0
 async def test_weight_proof_from_genesis(self, default_400_blocks):
     blocks = default_400_blocks
     header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
     wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
     wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
     assert wp is not None
     wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
     assert wp is not None
Example #8
0
    async def test_weight_proof1000_pre_genesis_empty_slots(
            self, pre_genesis_empty_slots_1000_blocks):
        blocks = pre_genesis_empty_slots_1000_blocks
        header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
            blocks)
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
        wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
        assert wp is not None
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, {}))
        valid, fork_point = wpf.validate_weight_proof_single_proc(wp)

        assert valid
        assert fork_point == 0
Example #9
0
 async def test_weight_proof_summaries_1000_blocks(self, default_1000_blocks):
     blocks = default_1000_blocks
     header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
     wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
     wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
     summaries, sub_epoch_data_weight = _map_summaries(
         wpf.constants.SUB_EPOCH_BLOCKS,
         wpf.constants.GENESIS_CHALLENGE,
         wp.sub_epochs,
         wpf.constants.DIFFICULTY_STARTING,
     )
     assert wpf._validate_summaries_weight(sub_epoch_data_weight, summaries, wp)
    async def test_weight_proof1000_partial_blocks_compact(
            self, default_10000_blocks_compact):
        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            100,
            block_list_input=default_10000_blocks_compact,
            seed=b"asdfghjkl",
            normalized_to_identity=False,
        )
        header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
            blocks)
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
        wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
        assert wp is not None
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, {}))
        valid, fork_point = wpf.validate_weight_proof_single_proc(wp)

        assert valid
        assert fork_point == 0
Example #11
0
async def load_blocks_dont_validate(
    blocks,
) -> Tuple[Dict[bytes32, HeaderBlock], Dict[uint32, bytes32], Dict[
        bytes32, BlockRecord], Dict[bytes32, SubEpochSummary]]:
    header_cache: Dict[bytes32, HeaderBlock] = {}
    height_to_hash: Dict[uint32, bytes32] = {}
    sub_blocks: Dict[bytes32, BlockRecord] = {}
    sub_epoch_summaries: Dict[bytes32, SubEpochSummary] = {}
    prev_block = None
    difficulty = test_constants.DIFFICULTY_STARTING
    block: FullBlock
    for block in blocks:
        if block.height > 0:
            assert prev_block is not None
            difficulty = block.reward_chain_block.weight - prev_block.weight

        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            assert block.reward_chain_block.signage_point_index == 0
            cc_sp: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
        else:
            cc_sp = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )

        quality_string: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                test_constants,
                block.reward_chain_block.pos_ss_cc_challenge_hash,
                cc_sp,
            )
        assert quality_string is not None

        required_iters: uint64 = calculate_iterations_quality(
            test_constants.DIFFICULTY_CONSTANT_FACTOR,
            quality_string,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp,
        )

        sub_block = block_to_block_record(
            test_constants, BlockCache(sub_blocks, height_to_hash),
            required_iters, block, None)
        sub_blocks[block.header_hash] = sub_block
        height_to_hash[block.height] = block.header_hash
        header_cache[block.header_hash] = block.get_block_header()
        if sub_block.sub_epoch_summary_included is not None:
            sub_epoch_summaries[
                block.height] = sub_block.sub_epoch_summary_included
        prev_block = block
    return header_cache, height_to_hash, sub_blocks, sub_epoch_summaries
def batch_pre_validate_blocks(
    constants_dict: Dict,
    blocks_pickled: Dict[bytes, bytes],
    header_blocks_pickled: List[bytes],
    transaction_generators: List[Optional[bytes]],
    check_filter: bool,
    expected_difficulty: List[uint64],
    expected_sub_slot_iters: List[uint64],
    validate_transactions: bool,
) -> List[bytes]:
    assert len(header_blocks_pickled) == len(transaction_generators)
    blocks = {}
    for k, v in blocks_pickled.items():
        blocks[k] = BlockRecord.from_bytes(v)
    results: List[PreValidationResult] = []
    constants: ConsensusConstants = dataclass_from_dict(
        ConsensusConstants, constants_dict)
    for i in range(len(header_blocks_pickled)):
        try:
            header_block: HeaderBlock = HeaderBlock.from_bytes(
                header_blocks_pickled[i])
            generator: Optional[bytes] = transaction_generators[i]
            required_iters, error = validate_finished_header_block(
                constants,
                BlockCache(blocks),
                header_block,
                check_filter,
                expected_difficulty[i],
                expected_sub_slot_iters[i],
            )
            cost_result: Optional[CostResult] = None
            error_int: Optional[uint16] = None
            if error is not None:
                error_int = uint16(error.code.value)
            if constants_dict["NETWORK_TYPE"] == NetworkType.MAINNET.value:
                cost_result = None
            else:
                if not error and generator is not None and validate_transactions:
                    cost_result = calculate_cost_of_program(
                        SerializedProgram.from_bytes(generator),
                        constants.CLVM_COST_RATIO_CONSTANT)
            results.append(
                PreValidationResult(error_int, required_iters, cost_result))
        except Exception:
            error_stack = traceback.format_exc()
            log.error(f"Exception: {error_stack}")
            results.append(
                PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    return [bytes(r) for r in results]
Example #13
0
 async def test_check_num_of_samples(self, default_10000_blocks):
     blocks = default_10000_blocks
     header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
         blocks)
     wpf = WeightProofHandler(
         test_constants,
         BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
     wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
     curr = -1
     samples = 0
     for sub_epoch_segment in wp.sub_epoch_segments:
         if sub_epoch_segment.sub_epoch_n > curr:
             curr = sub_epoch_segment.sub_epoch_n
             samples += 1
     assert samples <= wpf.MAX_SAMPLES
Example #14
0
    async def test_weight_proof_validate_segment(self, default_400_blocks):
        blocks = default_400_blocks
        header_cache, height_to_hash, block_records, summaries = await load_blocks_dont_validate(blocks)
        wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))
        summaries_list: List[SubEpochSummary] = []
        for key in sorted(summaries.keys()):
            summaries_list.append(summaries[key])

        wp = await wpf._create_proof_of_weight(blocks[-1].header_hash)

        res, _, _, _, _ = wpf._validate_segment_slots(
            wp.sub_epoch_segments[0],
            test_constants.SUB_SLOT_ITERS_STARTING,
            test_constants.DIFFICULTY_STARTING,
            None,
        )

        assert res
Example #15
0
async def _test_map_summaries(blocks, header_cache, height_to_hash, block_records, summaries):
    curr = block_records[blocks[-1].header_hash]
    orig_summaries: Dict[int, SubEpochSummary] = {}
    while curr.height > 0:
        if curr.sub_epoch_summary_included is not None:
            orig_summaries[curr.height] = curr.sub_epoch_summary_included
        # next sub block
        curr = block_records[curr.prev_hash]

    wpf = WeightProofHandler(test_constants, BlockCache(block_records, header_cache, height_to_hash, summaries))

    wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
    assert wp is not None
    # sub epoch summaries validate hashes
    summaries, sub_epoch_data_weight = _map_summaries(
        test_constants.SUB_EPOCH_BLOCKS,
        test_constants.GENESIS_CHALLENGE,
        wp.sub_epochs,
        test_constants.DIFFICULTY_STARTING,
    )
    assert len(summaries) == len(orig_summaries)
Example #16
0
    async def test_weight_proof_from_database(self):
        connection = await aiosqlite.connect("path to db")
        block_store: BlockStore = await BlockStore.create(connection)
        sub_blocks, peak = await block_store.get_block_records()
        headers = await block_store.get_header_blocks_in_range(0, 100225)
        sub_height_to_hash = {}
        sub_epoch_summaries = {}
        peak = await block_store.get_full_blocks_at([100225])
        if len(sub_blocks) == 0:
            return None, None

        assert peak is not None
        peak_height = sub_blocks[peak[0].header_hash].height

        # Sets the other state variables (peak_height and height_to_hash)
        curr: BlockRecord = sub_blocks[peak[0].header_hash]
        while True:
            sub_height_to_hash[curr.height] = curr.header_hash
            if curr.sub_epoch_summary_included is not None:
                sub_epoch_summaries[
                    curr.height] = curr.sub_epoch_summary_included
            if curr.height == 0:
                break
            curr = sub_blocks[curr.prev_hash]
        assert len(sub_height_to_hash) == peak_height + 1
        block_cache = BlockCache(sub_blocks, headers, sub_height_to_hash,
                                 sub_epoch_summaries)
        wpf = WeightProofHandler(DEFAULT_CONSTANTS, block_cache)
        await wpf._create_proof_of_weight(sub_height_to_hash[peak_height - 1])
        wp = await wpf._create_proof_of_weight(sub_height_to_hash[peak_height -
                                                                  1])
        valid, fork_point = wpf.validate_weight_proof(wp)

        await connection.close()
        assert valid
        f = open("wp.txt", "a")
        f.write(f"{wp}")
        f.close()
async def pre_validate_blocks_multiprocessing(
    constants: ConsensusConstants,
    constants_json: Dict,
    block_records: BlockchainInterface,
    blocks: Sequence[Union[FullBlock, HeaderBlock]],
    pool: ProcessPoolExecutor,
    validate_transactions: bool,
    check_filter: bool,
) -> Optional[List[PreValidationResult]]:
    """
    This method must be called under the blockchain lock
    If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
    if any validation issue occurs, returns False.

    Args:
        check_filter:
        validate_transactions:
        constants_json:
        pool:
        constants:
        block_records:
        blocks: list of full blocks to validate (must be connected to current chain)
    """
    batch_size = 4
    prev_b: Optional[BlockRecord] = None
    # Collects all the recent blocks (up to the previous sub-epoch)
    recent_blocks: Dict[bytes32, BlockRecord] = {}
    recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
    num_sub_slots_found = 0
    num_blocks_seen = 0
    if blocks[0].height > 0:
        if not block_records.contains_block(blocks[0].prev_header_hash):
            return [
                PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value),
                                    None, None)
            ]
        curr = block_records.block_record(blocks[0].prev_header_hash)
        num_sub_slots_to_look_for = 3 if curr.overflow else 2
        while (curr.sub_epoch_summary_included is None
               or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
               or num_sub_slots_found < num_sub_slots_to_look_for
               ) and curr.height > 0:
            if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
                recent_blocks_compressed[curr.header_hash] = curr

            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
            recent_blocks[curr.header_hash] = curr
            if curr.is_transaction_block:
                num_blocks_seen += 1
            curr = block_records.block_record(curr.prev_hash)
        recent_blocks[curr.header_hash] = curr
        recent_blocks_compressed[curr.header_hash] = curr
    block_record_was_present = []
    for block in blocks:
        block_record_was_present.append(
            block_records.contains_block(block.header_hash))

    diff_ssis: List[Tuple[uint64, uint64]] = []
    for block in blocks:
        if block.height != 0 and prev_b is None:
            prev_b = block_records.block_record(block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            constants,
            len(block.finished_sub_slots) > 0, prev_b, block_records)

        if block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
            log.warning(f"Block: {block.reward_chain_block}")
        overflow = is_overflow_block(
            constants, block.reward_chain_block.signage_point_index)
        challenge = get_block_challenge(constants, block,
                                        BlockCache(recent_blocks),
                                        prev_b is None, overflow, False)
        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            cc_sp_hash: bytes32 = challenge
        else:
            cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )
        q_str: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                constants, challenge, cc_sp_hash)
        if q_str is None:
            for i, block_i in enumerate(blocks):
                if not block_record_was_present[
                        i] and block_records.contains_block(
                            block_i.header_hash):
                    block_records.remove_block_record(block_i.header_hash)
            return None

        required_iters: uint64 = calculate_iterations_quality(
            constants.DIFFICULTY_CONSTANT_FACTOR,
            q_str,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp_hash,
        )

        block_rec = block_to_block_record(
            constants,
            block_records,
            required_iters,
            block,
            None,
        )
        recent_blocks[block_rec.header_hash] = block_rec
        recent_blocks_compressed[block_rec.header_hash] = block_rec
        block_records.add_block_record(
            block_rec)  # Temporarily add block to dict
        prev_b = block_rec
        diff_ssis.append((difficulty, sub_slot_iters))

    for i, block in enumerate(blocks):
        if not block_record_was_present[i]:
            block_records.remove_block_record(block.header_hash)

    recent_sb_compressed_pickled = {
        bytes(k): bytes(v)
        for k, v in recent_blocks_compressed.items()
    }

    futures = []
    # Pool of workers to validate blocks concurrently
    for i in range(0, len(blocks), batch_size):
        end_i = min(i + batch_size, len(blocks))
        blocks_to_validate = blocks[i:end_i]
        if any([
                len(block.finished_sub_slots) > 0
                for block in blocks_to_validate
        ]):
            final_pickled = {
                bytes(k): bytes(v)
                for k, v in recent_blocks.items()
            }
        else:
            final_pickled = recent_sb_compressed_pickled
        hb_pickled: List[bytes] = []
        generators: List[Optional[bytes]] = []
        for block in blocks_to_validate:
            if isinstance(block, FullBlock):
                hb_pickled.append(bytes(block.get_block_header()))
                generators.append(
                    bytes(block.transactions_generator) if block.
                    transactions_generator is not None else None)
            else:
                hb_pickled.append(bytes(block))
                generators.append(None)

        futures.append(asyncio.get_running_loop().run_in_executor(
            pool,
            batch_pre_validate_blocks,
            constants_json,
            final_pickled,
            hb_pickled,
            generators,
            check_filter,
            [diff_ssis[j][0] for j in range(i, end_i)],
            [diff_ssis[j][1] for j in range(i, end_i)],
            validate_transactions,
        ))
    # Collect all results into one flat list
    return [
        PreValidationResult.from_bytes(result)
        for batch_result in (await asyncio.gather(*futures))
        for result in batch_result
    ]
Example #18
0
    async def test_basic_store(self,
                               empty_blockchain,
                               normalized_to_identity: bool = False):
        blockchain = empty_blockchain
        blocks = bt.get_consecutive_blocks(
            10, seed=b"1234", normalized_to_identity=normalized_to_identity)

        store = await FullNodeStore.create(test_constants)

        unfinished_blocks = []
        for block in blocks:
            unfinished_blocks.append(
                UnfinishedBlock(
                    block.finished_sub_slots,
                    block.reward_chain_block.get_unfinished(),
                    block.challenge_chain_sp_proof,
                    block.reward_chain_sp_proof,
                    block.foliage,
                    block.foliage_transaction_block,
                    block.transactions_info,
                    block.transactions_generator,
                ))

        # Add/get candidate block
        assert store.get_candidate_block(
            unfinished_blocks[0].get_hash()) is None
        for height, unf_block in enumerate(unfinished_blocks):
            store.add_candidate_block(unf_block.get_hash(), height, unf_block)

        assert store.get_candidate_block(
            unfinished_blocks[4].get_hash()) == unfinished_blocks[4]
        store.clear_candidate_blocks_below(uint32(8))
        assert store.get_candidate_block(
            unfinished_blocks[5].get_hash()) is None
        assert store.get_candidate_block(
            unfinished_blocks[8].get_hash()) is not None

        # Test seen unfinished blocks
        h_hash_1 = bytes32(token_bytes(32))
        assert not store.seen_unfinished_block(h_hash_1)
        assert store.seen_unfinished_block(h_hash_1)
        store.clear_seen_unfinished_blocks()
        assert not store.seen_unfinished_block(h_hash_1)

        # Add/get unfinished block
        for height, unf_block in enumerate(unfinished_blocks):
            assert store.get_unfinished_block(unf_block.partial_hash) is None
            store.add_unfinished_block(
                height, unf_block,
                PreValidationResult(None, uint64(123532), None))
            assert store.get_unfinished_block(
                unf_block.partial_hash) == unf_block
            store.remove_unfinished_block(unf_block.partial_hash)
            assert store.get_unfinished_block(unf_block.partial_hash) is None

        blocks = bt.get_consecutive_blocks(
            1, skip_slots=5, normalized_to_identity=normalized_to_identity)
        sub_slots = blocks[0].finished_sub_slots
        assert len(sub_slots) == 5

        assert (store.get_finished_sub_slots(
            None,
            {},
            sub_slots[0].challenge_chain.challenge_chain_end_of_slot_vdf.
            challenge,
            False,
        ) == [])
        # Test adding non-connecting sub-slots genesis
        assert store.get_sub_slot(test_constants.GENESIS_CHALLENGE) is None
        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], {}, None) is None
        assert store.new_finished_sub_slot(sub_slots[2], {}, None) is None

        # Test adding sub-slots after genesis
        assert store.new_finished_sub_slot(sub_slots[0], {}, None) is not None
        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash())[0] == sub_slots[0]
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], {}, None) is not None
        for i in range(len(sub_slots)):
            assert store.new_finished_sub_slot(sub_slots[i], {},
                                               None) is not None
            assert store.get_sub_slot(
                sub_slots[i].challenge_chain.get_hash())[0] == sub_slots[i]

        assert store.get_finished_sub_slots(
            None, {}, sub_slots[-1].challenge_chain.get_hash(),
            False) == sub_slots
        with raises(ValueError):
            store.get_finished_sub_slots(
                None, {}, sub_slots[-1].challenge_chain.get_hash(), True)

        assert store.get_finished_sub_slots(
            None, {}, sub_slots[-2].challenge_chain.get_hash(),
            False) == sub_slots[:-1]

        # Test adding genesis peak
        await blockchain.receive_block(blocks[0])
        peak = blockchain.get_peak()
        if peak.overflow:
            store.new_peak(peak, sub_slots[-2], sub_slots[-1], False, {})
        else:
            store.new_peak(peak, None, sub_slots[-1], False, {})

        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[2].challenge_chain.get_hash()) is None
        if peak.overflow:
            assert store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash())[0] == sub_slots[3]
        else:
            assert store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[4].challenge_chain.get_hash())[0] == sub_slots[4]

        assert (store.get_finished_sub_slots(
            peak,
            blockchain,
            sub_slots[-1].challenge_chain.get_hash(),
            False,
        ) == [])

        # Test adding non genesis peak directly
        blocks = bt.get_consecutive_blocks(
            2, skip_slots=2, normalized_to_identity=normalized_to_identity)
        blocks = bt.get_consecutive_blocks(
            3,
            block_list_input=blocks,
            normalized_to_identity=normalized_to_identity)
        for block in blocks:
            await blockchain.receive_block(block)
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        # Add reorg blocks
        blocks_reorg = bt.get_consecutive_blocks(
            20, normalized_to_identity=normalized_to_identity)
        for block in blocks_reorg:
            res, _, _ = await blockchain.receive_block(block)
            if res == ReceiveBlockResult.NEW_PEAK:
                sb = blockchain.block_record(block.header_hash)
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    block.header_hash)
                res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, True,
                                     blockchain)
                assert res[0] is None

        # Add slots to the end
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_reorg,
            skip_slots=2,
            normalized_to_identity=normalized_to_identity)
        for slot in blocks_2[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak())

        assert store.get_sub_slot(
            sub_slots[3].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[4].challenge_chain.get_hash()) is None

        # Test adding signage point
        peak = blockchain.get_peak()
        ss_start_iters = peak.ip_sub_slot_total_iters(test_constants)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                ss_start_iters,
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, blockchain, peak,
                                           peak.sub_slot_iters, sp)

        blocks = blocks_reorg
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity=normalized_to_identity)
            res, _, _ = await blockchain.receive_block(blocks[-1])
            if res == ReceiveBlockResult.NEW_PEAK:
                sb = blockchain.block_record(blocks[-1].header_hash)
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    blocks[-1].header_hash)

                res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, True,
                                     blockchain)
                assert res[0] is None
                if sb.overflow and sp_sub_slot is not None:
                    assert sp_sub_slot != ip_sub_slot
                    break

        peak = blockchain.get_peak()
        assert peak.overflow
        # Overflow peak should result in 2 finished sub slots
        assert len(store.finished_sub_slots) == 2

        # Add slots to the end, except for the last one, which we will use to test invalid SP
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            skip_slots=3,
            normalized_to_identity=normalized_to_identity)
        for slot in blocks_2[-1].finished_sub_slots[:-1]:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak())
        finished_sub_slots = blocks_2[-1].finished_sub_slots
        assert len(store.finished_sub_slots) == 4

        # Test adding signage points for overflow blocks (sp_sub_slot)
        ss_start_iters = peak.sp_sub_slot_total_iters(test_constants)
        # for i in range(peak.signage_point_index, test_constants.NUM_SPS_SUB_SLOT):
        #     if i < peak.signage_point_index:
        #         continue
        #     latest = peak
        #     while latest.total_iters > peak.sp_total_iters(test_constants):
        #         latest = blockchain.blocks[latest.prev_hash]
        #     sp = get_signage_point(
        #         test_constants,
        #         blockchain.blocks,
        #         latest,
        #         ss_start_iters,
        #         uint8(i),
        #         [],
        #         peak.sub_slot_iters,
        #     )
        #     assert store.new_signage_point(i, blockchain.blocks, peak, peak.sub_slot_iters, sp)

        # Test adding signage points for overflow blocks (ip_sub_slot)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, blockchain, peak,
                                           peak.sub_slot_iters, sp)

        # Test adding future signage point, a few slots forward (good)
        saved_sp_hash = None
        for slot_offset in range(1, len(finished_sub_slots)):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    blockchain,
                    peak,
                    peak.ip_sub_slot_total_iters(test_constants) +
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert sp.cc_vdf is not None
                saved_sp_hash = sp.cc_vdf.output.get_hash()
                assert store.new_signage_point(i, blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding future signage point (bad)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants) +
                len(finished_sub_slots) * peak.sub_slot_iters,
                uint8(i),
                finished_sub_slots[:len(finished_sub_slots)],
                peak.sub_slot_iters,
            )
            assert not store.new_signage_point(i, blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding past signage point
        sp = SignagePoint(
            blocks[1].reward_chain_block.challenge_chain_sp_vdf,
            blocks[1].challenge_chain_sp_proof,
            blocks[1].reward_chain_block.reward_chain_sp_vdf,
            blocks[1].reward_chain_sp_proof,
        )
        assert not store.new_signage_point(
            blocks[1].reward_chain_block.signage_point_index,
            {},
            peak,
            blockchain.block_record(
                blocks[1].header_hash).sp_sub_slot_total_iters(test_constants),
            sp,
        )

        # Get signage point by index
        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(),
            4,
            finished_sub_slots[0].reward_chain.get_hash(),
        ) is not None)

        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(), 4,
            std_hash(b"1")) is None)

        # Get signage point by hash
        assert store.get_signage_point(saved_sp_hash) is not None
        assert store.get_signage_point(std_hash(b"2")) is None

        # Test adding signage points before genesis
        store.initialize_genesis_sub_slot()
        assert len(store.finished_sub_slots) == 1
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                BlockCache({}, {}),
                None,
                uint128(0),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, {}, None, peak.sub_slot_iters,
                                           sp)

        blocks_3 = bt.get_consecutive_blocks(
            1, skip_slots=2, normalized_to_identity=normalized_to_identity)
        for slot in blocks_3[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, {}, None)
        assert len(store.finished_sub_slots) == 3
        finished_sub_slots = blocks_3[-1].finished_sub_slots

        for slot_offset in range(1, len(finished_sub_slots) + 1):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    BlockCache({}, {}),
                    None,
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert store.new_signage_point(i, {}, None,
                                               peak.sub_slot_iters, sp)

        # Test adding signage points after genesis
        blocks_4 = bt.get_consecutive_blocks(
            1, normalized_to_identity=normalized_to_identity)
        blocks_5 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_4,
            skip_slots=1,
            normalized_to_identity=normalized_to_identity)

        # If this is not the case, fix test to find a block that is
        assert (blocks_4[-1].reward_chain_block.signage_point_index <
                test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA)
        await blockchain.receive_block(blocks_4[-1])
        sb = blockchain.block_record(blocks_4[-1].header_hash)
        store.new_peak(sb, None, None, False, blockchain)
        for i in range(
                sb.signage_point_index + test_constants.NUM_SP_INTERVALS_EXTRA,
                test_constants.NUM_SPS_SUB_SLOT,
        ):
            if is_overflow_block(test_constants, uint8(i)):
                finished_sub_slots = blocks_5[-1].finished_sub_slots
            else:
                finished_sub_slots = []

            sp = get_signage_point(
                test_constants,
                blockchain,
                sb,
                uint128(0),
                uint8(i),
                finished_sub_slots,
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, empty_blockchain, sb,
                                           peak.sub_slot_iters, sp)

        # Test future EOS cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            1, normalized_to_identity=normalized_to_identity)
        await blockchain.receive_block(blocks[-1])
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity=normalized_to_identity)
            await blockchain.receive_block(blocks[-1])
            sb = blockchain.block_record(blocks[-1].header_hash)
            if sb.first_in_sub_slot:
                break
        assert len(blocks) >= 3
        dependant_sub_slots = blocks[-1].finished_sub_slots
        for block in blocks[:-2]:
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            peak = sb
            res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        assert store.new_finished_sub_slot(dependant_sub_slots[0], blockchain,
                                           peak) is None
        block = blocks[-2]
        sb = blockchain.block_record(block.header_hash)
        sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
            block.header_hash)
        res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, False, blockchain)
        assert res[0] == dependant_sub_slots[0]
        assert res[1] == res[2] == []

        # Test future IP cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            60, normalized_to_identity=normalized_to_identity)

        for block in blocks[:5]:
            await blockchain.receive_block(block)
            sb = blockchain.block_record(block.header_hash)

            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        case_0, case_1 = False, False
        for i in range(5, len(blocks) - 1):
            prev_block = blocks[i]
            block = blocks[i + 1]
            new_ip = NewInfusionPointVDF(
                block.reward_chain_block.get_unfinished().get_hash(),
                block.reward_chain_block.challenge_chain_ip_vdf,
                block.challenge_chain_ip_proof,
                block.reward_chain_block.reward_chain_ip_vdf,
                block.reward_chain_ip_proof,
                block.reward_chain_block.infused_challenge_chain_ip_vdf,
                block.infused_challenge_chain_ip_proof,
            )
            store.add_to_future_ip(new_ip)

            await blockchain.receive_block(prev_block)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                prev_block.header_hash)
            sb = blockchain.block_record(prev_block.header_hash)
            res = store.new_peak(sb, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            if len(block.finished_sub_slots) == 0:
                case_0 = True
                assert res[2] == [new_ip]
            else:
                case_1 = True
                assert res[2] == []
                found_ips = []
                for ss in block.finished_sub_slots:
                    found_ips += store.new_finished_sub_slot(
                        ss, blockchain, sb)
                assert found_ips == [new_ip]

        # If flaky, increase the number of blocks created
        assert case_0 and case_1
Example #19
0
    async def test_weight_proof_edge_cases(self, default_400_blocks):
        blocks: List[FullBlock] = default_400_blocks

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=2)

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=1)

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=2)

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=4,
            normalized_to_identity_cc_eos=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            10,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=4,
            normalized_to_identity_icc_eos=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            10,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=4,
            normalized_to_identity_cc_ip=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            10,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=4,
            normalized_to_identity_cc_sp=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
            skip_slots=4)

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            10,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=True,
        )

        blocks: List[FullBlock] = bt.get_consecutive_blocks(
            300,
            block_list_input=blocks,
            seed=b"asdfghjkl",
            force_overflow=False,
        )

        header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(
            blocks)
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
        wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
        assert wp is not None
        wpf = WeightProofHandler(
            test_constants,
            BlockCache(sub_blocks, header_cache, height_to_hash, {}))
        valid, fork_point = wpf.validate_weight_proof_single_proc(wp)

        assert valid
        assert fork_point == 0