Esempio n. 1
0
    def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]:
        assert self.last_state is not None
        sub_slot_iters = self.last_state.get_sub_slot_iters()
        difficulty = self.last_state.get_difficulty()
        ip_iters = self.last_state.get_last_ip()
        rc_block = block.reward_chain_block
        try:
            block_sp_iters, block_ip_iters = iters_from_block(
                self.constants,
                rc_block,
                sub_slot_iters,
                difficulty,
            )
        except Exception as e:
            log.warning(f"Received invalid unfinished block: {e}.")
            return None
        block_sp_total_iters = self.last_state.total_iters - ip_iters + block_sp_iters
        if is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
            block_sp_total_iters -= self.last_state.get_sub_slot_iters()
        found_index = -1
        for index, (rc, total_iters) in enumerate(self.last_state.reward_challenge_cache):
            if rc == block.rc_prev:
                found_index = index
                break
        if found_index == -1:
            log.warning(f"Will not infuse {block.rc_prev} because its reward chain challenge is not in the chain")
            return None

        new_block_iters = uint64(block_ip_iters - ip_iters)
        if len(self.last_state.reward_challenge_cache) > found_index + 1:
            if self.last_state.reward_challenge_cache[found_index + 1][1] < block_sp_total_iters:
                log.warning(
                    f"Will not infuse unfinished block {block.rc_prev} sp total iters {block_sp_total_iters}, "
                    f"because there is another infusion before its SP"
                )
                return None
            if self.last_state.reward_challenge_cache[found_index][1] > block_sp_total_iters:
                if not is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
                    log.error(
                        f"Will not infuse unfinished block {block.rc_prev}, sp total iters: {block_sp_total_iters}, "
                        f"because its iters are too low"
                    )
                return None

        if new_block_iters > 0:
            return new_block_iters
        return None
Esempio n. 2
0
 def test_is_overflow_block(self):
     assert not is_overflow_block(test_constants, uint8(27))
     assert not is_overflow_block(test_constants, uint8(28))
     assert is_overflow_block(test_constants, uint8(29))
     assert is_overflow_block(test_constants, uint8(30))
     assert is_overflow_block(test_constants, uint8(31))
     with raises(ValueError):
         assert is_overflow_block(test_constants, uint8(32))
Esempio n. 3
0
    async def test_basic_store(self,
                               empty_blockchain,
                               normalized_to_identity: bool = False):
        blockchain = empty_blockchain
        blocks = bt.get_consecutive_blocks(
            10,
            seed=b"1234",
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )

        store = FullNodeStore(test_constants)

        unfinished_blocks = []
        for block in blocks:
            unfinished_blocks.append(
                UnfinishedBlock(
                    block.finished_sub_slots,
                    block.reward_chain_block.get_unfinished(),
                    block.challenge_chain_sp_proof,
                    block.reward_chain_sp_proof,
                    block.foliage,
                    block.foliage_transaction_block,
                    block.transactions_info,
                    block.transactions_generator,
                    [],
                ))

        # Add/get candidate block
        assert store.get_candidate_block(
            unfinished_blocks[0].get_hash()) is None
        for height, unf_block in enumerate(unfinished_blocks):
            store.add_candidate_block(unf_block.get_hash(), uint32(height),
                                      unf_block)

        candidate = store.get_candidate_block(unfinished_blocks[4].get_hash())
        assert candidate is not None
        assert candidate[1] == unfinished_blocks[4]
        store.clear_candidate_blocks_below(uint32(8))
        assert store.get_candidate_block(
            unfinished_blocks[5].get_hash()) is None
        assert store.get_candidate_block(
            unfinished_blocks[8].get_hash()) is not None

        # Test seen unfinished blocks
        h_hash_1 = bytes32(token_bytes(32))
        assert not store.seen_unfinished_block(h_hash_1)
        assert store.seen_unfinished_block(h_hash_1)
        store.clear_seen_unfinished_blocks()
        assert not store.seen_unfinished_block(h_hash_1)

        # Add/get unfinished block
        for height, unf_block in enumerate(unfinished_blocks):
            assert store.get_unfinished_block(unf_block.partial_hash) is None
            store.add_unfinished_block(
                uint32(height), unf_block,
                PreValidationResult(None, uint64(123532), None, False))
            assert store.get_unfinished_block(
                unf_block.partial_hash) == unf_block
            store.remove_unfinished_block(unf_block.partial_hash)
            assert store.get_unfinished_block(unf_block.partial_hash) is None

        blocks = bt.get_consecutive_blocks(
            1,
            skip_slots=5,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
        )
        sub_slots = blocks[0].finished_sub_slots
        assert len(sub_slots) == 5

        assert (store.get_finished_sub_slots(
            BlockCache({}),
            None,
            sub_slots[0].challenge_chain.challenge_chain_end_of_slot_vdf.
            challenge,
        ) == [])
        # Test adding non-connecting sub-slots genesis
        assert store.get_sub_slot(test_constants.GENESIS_CHALLENGE) is None
        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], blockchain, None,
                                           None) is None
        assert store.new_finished_sub_slot(sub_slots[2], blockchain, None,
                                           None) is None

        # Test adding sub-slots after genesis
        assert store.new_finished_sub_slot(sub_slots[0], blockchain, None,
                                           None) is not None
        sub_slot = store.get_sub_slot(sub_slots[0].challenge_chain.get_hash())
        assert sub_slot is not None
        assert sub_slot[0] == sub_slots[0]
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], blockchain, None,
                                           None) is not None
        for i in range(len(sub_slots)):
            assert store.new_finished_sub_slot(sub_slots[i], blockchain, None,
                                               None) is not None
            slot_i = store.get_sub_slot(
                sub_slots[i].challenge_chain.get_hash())
            assert slot_i is not None
            assert slot_i[0] == sub_slots[i]

        assert store.get_finished_sub_slots(
            BlockCache({}), None,
            sub_slots[-1].challenge_chain.get_hash()) == sub_slots
        assert store.get_finished_sub_slots(BlockCache(
            {}), None, std_hash(b"not a valid hash")) is None

        assert (store.get_finished_sub_slots(
            BlockCache({}), None,
            sub_slots[-2].challenge_chain.get_hash()) == sub_slots[:-1])

        # Test adding genesis peak
        await _validate_and_add_block(blockchain, blocks[0])
        peak = blockchain.get_peak()
        peak_full_block = await blockchain.get_full_peak()
        if peak.overflow:
            store.new_peak(peak, peak_full_block, sub_slots[-2], sub_slots[-1],
                           None, blockchain)
        else:
            store.new_peak(peak, peak_full_block, None, sub_slots[-1], None,
                           blockchain)

        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[2].challenge_chain.get_hash()) is None
        if peak.overflow:
            slot_3 = store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash())
            assert slot_3 is not None
            assert slot_3[0] == sub_slots[3]
        else:
            assert store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash()) is None

        slot_4 = store.get_sub_slot(sub_slots[4].challenge_chain.get_hash())
        assert slot_4 is not None
        assert slot_4[0] == sub_slots[4]

        assert (store.get_finished_sub_slots(
            blockchain,
            peak,
            sub_slots[-1].challenge_chain.get_hash(),
        ) == [])

        # Test adding non genesis peak directly
        blocks = bt.get_consecutive_blocks(
            2,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        blocks = bt.get_consecutive_blocks(
            3,
            block_list_input=blocks,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for block in blocks:
            await _validate_and_add_block_no_error(blockchain, block)
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None,
                                 blockchain)
            assert res.added_eos is None

        # Add reorg blocks
        blocks_reorg = bt.get_consecutive_blocks(
            20,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for block in blocks_reorg:
            peak = blockchain.get_peak()
            assert peak is not None

            await _validate_and_add_block_no_error(blockchain, block)

            if blockchain.get_peak().header_hash == block.header_hash:
                sb = blockchain.block_record(block.header_hash)
                fork = find_fork_point_in_chain(
                    blockchain, peak, blockchain.block_record(sb.header_hash))
                if fork > 0:
                    fork_block = blockchain.height_to_block_record(fork)
                else:
                    fork_block = None
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    block.header_hash)
                res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot,
                                     fork_block, blockchain)
                assert res.added_eos is None

        # Add slots to the end
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_reorg,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_2[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak(), await
                                        blockchain.get_full_peak())

        assert store.get_sub_slot(
            sub_slots[3].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[4].challenge_chain.get_hash()) is None

        # Test adding signage point
        peak = blockchain.get_peak()
        ss_start_iters = peak.ip_sub_slot_total_iters(test_constants)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                ss_start_iters,
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(uint8(i), blockchain, peak,
                                           peak.sub_slot_iters, sp)

        blocks = blocks_reorg
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity_cc_eos=normalized_to_identity,
                normalized_to_identity_icc_eos=normalized_to_identity,
                normalized_to_identity_cc_ip=normalized_to_identity,
                normalized_to_identity_cc_sp=normalized_to_identity,
            )
            await _validate_and_add_block(blockchain, blocks[-1])
            if blockchain.get_peak().header_hash == blocks[-1].header_hash:
                sb = blockchain.block_record(blocks[-1].header_hash)
                fork = find_fork_point_in_chain(
                    blockchain, peak, blockchain.block_record(sb.header_hash))
                if fork > 0:
                    fork_block = blockchain.height_to_block_record(fork)
                else:
                    fork_block = None
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    blocks[-1].header_hash)

                res = store.new_peak(sb, blocks[-1], sp_sub_slot, ip_sub_slot,
                                     fork_block, blockchain)
                assert res.added_eos is None
                if sb.overflow and sp_sub_slot is not None:
                    assert sp_sub_slot != ip_sub_slot
                    break

        peak = blockchain.get_peak()
        assert peak.overflow
        # Overflow peak should result in 2 finished sub slots
        assert len(store.finished_sub_slots) == 2

        # Add slots to the end, except for the last one, which we will use to test invalid SP
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            skip_slots=3,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_2[-1].finished_sub_slots[:-1]:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak(), await
                                        blockchain.get_full_peak())
        finished_sub_slots = blocks_2[-1].finished_sub_slots
        assert len(store.finished_sub_slots) == 4

        # Test adding signage points for overflow blocks (sp_sub_slot)
        ss_start_iters = peak.sp_sub_slot_total_iters(test_constants)
        # for i in range(peak.signage_point_index, test_constants.NUM_SPS_SUB_SLOT):
        #     if i < peak.signage_point_index:
        #         continue
        #     latest = peak
        #     while latest.total_iters > peak.sp_total_iters(test_constants):
        #         latest = blockchain.blocks[latest.prev_hash]
        #     sp = get_signage_point(
        #         test_constants,
        #         blockchain.blocks,
        #         latest,
        #         ss_start_iters,
        #         uint8(i),
        #         [],
        #         peak.sub_slot_iters,
        #     )
        #     assert store.new_signage_point(i, blockchain.blocks, peak, peak.sub_slot_iters, sp)

        # Test adding signage points for overflow blocks (ip_sub_slot)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(uint8(i), blockchain, peak,
                                           peak.sub_slot_iters, sp)

        # Test adding future signage point, a few slots forward (good)
        saved_sp_hash = None
        for slot_offset in range(1, len(finished_sub_slots)):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    blockchain,
                    peak,
                    peak.ip_sub_slot_total_iters(test_constants) +
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert sp.cc_vdf is not None
                saved_sp_hash = sp.cc_vdf.output.get_hash()
                assert store.new_signage_point(uint8(i), blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding future signage point (bad)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants) +
                len(finished_sub_slots) * peak.sub_slot_iters,
                uint8(i),
                finished_sub_slots[:len(finished_sub_slots)],
                peak.sub_slot_iters,
            )
            assert not store.new_signage_point(uint8(i), blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding past signage point
        sp = SignagePoint(
            blocks[1].reward_chain_block.challenge_chain_sp_vdf,
            blocks[1].challenge_chain_sp_proof,
            blocks[1].reward_chain_block.reward_chain_sp_vdf,
            blocks[1].reward_chain_sp_proof,
        )
        assert not store.new_signage_point(
            blocks[1].reward_chain_block.signage_point_index,
            blockchain,
            peak,
            blockchain.block_record(
                blocks[1].header_hash).sp_sub_slot_total_iters(test_constants),
            sp,
        )

        # Get signage point by index
        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(),
            uint8(4),
            finished_sub_slots[0].reward_chain.get_hash(),
        ) is not None)

        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(), uint8(4),
            std_hash(b"1")) is None)

        # Get signage point by hash
        # TODO: address hint error and remove ignore
        #       error: Argument 1 to "get_signage_point" of "FullNodeStore" has incompatible type "Optional[bytes32]";
        #       expected "bytes32"  [arg-type]
        assert store.get_signage_point(
            saved_sp_hash) is not None  # type: ignore[arg-type]
        assert store.get_signage_point(std_hash(b"2")) is None

        # Test adding signage points before genesis
        store.initialize_genesis_sub_slot()
        assert len(store.finished_sub_slots) == 1
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                BlockCache({}, {}),
                None,
                uint128(0),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(uint8(i), blockchain, None,
                                           peak.sub_slot_iters, sp)

        blocks_3 = bt.get_consecutive_blocks(
            1,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_3[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, blockchain, None, None)
        assert len(store.finished_sub_slots) == 3
        finished_sub_slots = blocks_3[-1].finished_sub_slots

        for slot_offset in range(1, len(finished_sub_slots) + 1):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    BlockCache({}, {}),
                    None,
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert store.new_signage_point(uint8(i), blockchain, None,
                                               peak.sub_slot_iters, sp)

        # Test adding signage points after genesis
        blocks_4 = bt.get_consecutive_blocks(
            1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        blocks_5 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_4,
            skip_slots=1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )

        # If this is not the case, fix test to find a block that is
        assert (blocks_4[-1].reward_chain_block.signage_point_index <
                test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA)
        await _validate_and_add_block(
            blockchain,
            blocks_4[-1],
            expected_result=ReceiveBlockResult.ADDED_AS_ORPHAN)

        sb = blockchain.block_record(blocks_4[-1].header_hash)
        store.new_peak(sb, blocks_4[-1], None, None, None, blockchain)
        for i in range(
                sb.signage_point_index + test_constants.NUM_SP_INTERVALS_EXTRA,
                test_constants.NUM_SPS_SUB_SLOT,
        ):
            if is_overflow_block(test_constants, uint8(i)):
                finished_sub_slots = blocks_5[-1].finished_sub_slots
            else:
                finished_sub_slots = []

            sp = get_signage_point(
                test_constants,
                blockchain,
                sb,
                uint128(0),
                uint8(i),
                finished_sub_slots,
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(uint8(i), empty_blockchain, sb,
                                           peak.sub_slot_iters, sp)

        # Test future EOS cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        await _validate_and_add_block_no_error(blockchain, blocks[-1])
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity_cc_eos=normalized_to_identity,
                normalized_to_identity_icc_eos=normalized_to_identity,
                normalized_to_identity_cc_ip=normalized_to_identity,
                normalized_to_identity_cc_sp=normalized_to_identity,
            )
            await _validate_and_add_block_no_error(blockchain, blocks[-1])
            sb = blockchain.block_record(blocks[-1].header_hash)
            if sb.first_in_sub_slot:
                break
        assert len(blocks) >= 2
        dependant_sub_slots = blocks[-1].finished_sub_slots
        peak = blockchain.get_peak()
        peak_full_block = await blockchain.get_full_peak()
        for block in blocks[:-2]:
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            peak = sb
            peak_full_block = block
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None,
                                 blockchain)
            assert res.added_eos is None

        assert store.new_finished_sub_slot(dependant_sub_slots[0], blockchain,
                                           peak, peak_full_block) is None
        block = blocks[-2]
        sb = blockchain.block_record(block.header_hash)
        sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
            block.header_hash)
        res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None,
                             blockchain)
        assert res.added_eos == dependant_sub_slots[0]
        assert res.new_signage_points == res.new_infusion_points == []

        # Test future IP cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            60,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
        )

        for block in blocks[:5]:
            await _validate_and_add_block_no_error(blockchain, block)
            sb = blockchain.block_record(block.header_hash)

            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None,
                                 blockchain)
            assert res.added_eos is None

        case_0, case_1 = False, False
        for i in range(5, len(blocks) - 1):
            prev_block = blocks[i]
            block = blocks[i + 1]
            new_ip = NewInfusionPointVDF(
                block.reward_chain_block.get_unfinished().get_hash(),
                block.reward_chain_block.challenge_chain_ip_vdf,
                block.challenge_chain_ip_proof,
                block.reward_chain_block.reward_chain_ip_vdf,
                block.reward_chain_ip_proof,
                block.reward_chain_block.infused_challenge_chain_ip_vdf,
                block.infused_challenge_chain_ip_proof,
            )
            store.add_to_future_ip(new_ip)

            await _validate_and_add_block_no_error(blockchain, prev_block)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                prev_block.header_hash)
            sb = blockchain.block_record(prev_block.header_hash)
            res = store.new_peak(sb, prev_block, sp_sub_slot, ip_sub_slot,
                                 None, blockchain)
            if len(block.finished_sub_slots) == 0:
                case_0 = True
                assert res.new_infusion_points == [new_ip]
            else:
                case_1 = True
                assert res.new_infusion_points == []
                found_ips: List[timelord_protocol.NewInfusionPointVDF] = []
                for ss in block.finished_sub_slots:
                    ipvdf = store.new_finished_sub_slot(
                        ss, blockchain, sb, prev_block)
                    assert ipvdf is not None
                    found_ips += ipvdf
                assert found_ips == [new_ip]

        # If flaky, increase the number of blocks created
        assert case_0 and case_1

        # Try to get two blocks in the same slot, such that we have
        # SP, B2 SP .... SP B1
        #     i2 .........  i1
        # Then do a reorg up to B2, removing all signage points after B2, but not before
        log.warning(f"Adding blocks up to {blocks[-1]}")
        for block in blocks:
            await _validate_and_add_block_no_error(blockchain, block)

        log.warning(f"Starting loop")
        while True:
            log.warning("Looping")
            blocks = bt.get_consecutive_blocks(1,
                                               block_list_input=blocks,
                                               skip_slots=1)
            await _validate_and_add_block_no_error(blockchain, blocks[-1])
            peak = blockchain.get_peak()
            sub_slots = await blockchain.get_sp_and_ip_sub_slots(
                peak.header_hash)
            store.new_peak(peak, blocks[-1], sub_slots[0], sub_slots[1], None,
                           blockchain)

            blocks = bt.get_consecutive_blocks(
                2, block_list_input=blocks, guarantee_transaction_block=True)

            i3 = blocks[-3].reward_chain_block.signage_point_index
            i2 = blocks[-2].reward_chain_block.signage_point_index
            i1 = blocks[-1].reward_chain_block.signage_point_index
            if (len(blocks[-2].finished_sub_slots) == len(
                    blocks[-1].finished_sub_slots) == 0
                    and not is_overflow_block(test_constants,
                                              signage_point_index=i2)
                    and not is_overflow_block(test_constants,
                                              signage_point_index=i1)
                    and i2 > i3 + 3 and i1 > (i2 + 3)):
                # We hit all the conditions that we want
                all_sps: List[Optional[SignagePoint]] = [
                    None
                ] * test_constants.NUM_SPS_SUB_SLOT

                def assert_sp_none(sp_index: int, is_none: bool):
                    sp_to_check: Optional[SignagePoint] = all_sps[sp_index]
                    assert sp_to_check is not None
                    assert sp_to_check.cc_vdf is not None
                    fetched = store.get_signage_point(
                        sp_to_check.cc_vdf.output.get_hash())
                    assert (fetched is None) == is_none
                    if fetched is not None:
                        assert fetched == sp_to_check

                for i in range(i3 + 1, test_constants.NUM_SPS_SUB_SLOT - 3):
                    finished_sub_slots = []
                    sp = get_signage_point(
                        test_constants,
                        blockchain,
                        peak,
                        uint128(peak.ip_sub_slot_total_iters(bt.constants)),
                        uint8(i),
                        finished_sub_slots,
                        peak.sub_slot_iters,
                    )
                    all_sps[i] = sp
                    assert store.new_signage_point(uint8(i), blockchain, peak,
                                                   peak.sub_slot_iters, sp)

                # Adding a new peak clears all SPs after that peak
                await _validate_and_add_block_no_error(blockchain, blocks[-2])
                peak = blockchain.get_peak()
                sub_slots = await blockchain.get_sp_and_ip_sub_slots(
                    peak.header_hash)
                store.new_peak(peak, blocks[-2], sub_slots[0], sub_slots[1],
                               None, blockchain)

                assert_sp_none(i2, False)
                assert_sp_none(i2 + 1, False)
                assert_sp_none(i1, True)
                assert_sp_none(i1 + 1, True)
                assert_sp_none(i1 + 4, True)

                for i in range(i2, test_constants.NUM_SPS_SUB_SLOT):
                    if is_overflow_block(test_constants, uint8(i)):
                        blocks_alt = bt.get_consecutive_blocks(
                            1, block_list_input=blocks[:-1], skip_slots=1)
                        finished_sub_slots = blocks_alt[-1].finished_sub_slots
                    else:
                        finished_sub_slots = []
                    sp = get_signage_point(
                        test_constants,
                        blockchain,
                        peak,
                        uint128(peak.ip_sub_slot_total_iters(bt.constants)),
                        uint8(i),
                        finished_sub_slots,
                        peak.sub_slot_iters,
                    )
                    all_sps[i] = sp
                    assert store.new_signage_point(uint8(i), blockchain, peak,
                                                   peak.sub_slot_iters, sp)

                assert_sp_none(i2, False)
                assert_sp_none(i2 + 1, False)
                assert_sp_none(i1, False)
                assert_sp_none(i1 + 1, False)
                assert_sp_none(i1 + 4, False)

                await _validate_and_add_block_no_error(blockchain, blocks[-1])
                peak = blockchain.get_peak()
                sub_slots = await blockchain.get_sp_and_ip_sub_slots(
                    peak.header_hash)

                # Do a reorg, which should remove everything after B2
                store.new_peak(
                    peak,
                    blocks[-1],
                    sub_slots[0],
                    sub_slots[1],
                    (await
                     blockchain.get_block_records_at([blocks[-2].height]))[0],
                    blockchain,
                )

                assert_sp_none(i2, False)
                assert_sp_none(i2 + 1, False)
                assert_sp_none(i1, True)
                assert_sp_none(i1 + 1, True)
                assert_sp_none(i1 + 4, True)
                break
            else:
                for block in blocks[-2:]:
                    await _validate_and_add_block_no_error(blockchain, block)
    async def test_basic_store(self,
                               empty_blockchain,
                               normalized_to_identity: bool = False):
        blockchain = empty_blockchain
        blocks = bt.get_consecutive_blocks(
            10,
            seed=b"1234",
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )

        store = await FullNodeStore.create(test_constants)

        unfinished_blocks = []
        for block in blocks:
            unfinished_blocks.append(
                UnfinishedBlock(
                    block.finished_sub_slots,
                    block.reward_chain_block.get_unfinished(),
                    block.challenge_chain_sp_proof,
                    block.reward_chain_sp_proof,
                    block.foliage,
                    block.foliage_transaction_block,
                    block.transactions_info,
                    block.transactions_generator,
                    [],
                ))

        # Add/get candidate block
        assert store.get_candidate_block(
            unfinished_blocks[0].get_hash()) is None
        for height, unf_block in enumerate(unfinished_blocks):
            store.add_candidate_block(unf_block.get_hash(), height, unf_block)

        assert store.get_candidate_block(
            unfinished_blocks[4].get_hash()) == unfinished_blocks[4]
        store.clear_candidate_blocks_below(uint32(8))
        assert store.get_candidate_block(
            unfinished_blocks[5].get_hash()) is None
        assert store.get_candidate_block(
            unfinished_blocks[8].get_hash()) is not None

        # Test seen unfinished blocks
        h_hash_1 = bytes32(token_bytes(32))
        assert not store.seen_unfinished_block(h_hash_1)
        assert store.seen_unfinished_block(h_hash_1)
        store.clear_seen_unfinished_blocks()
        assert not store.seen_unfinished_block(h_hash_1)

        # Add/get unfinished block
        for height, unf_block in enumerate(unfinished_blocks):
            assert store.get_unfinished_block(unf_block.partial_hash) is None
            store.add_unfinished_block(
                height, unf_block,
                PreValidationResult(None, uint64(123532), None))
            assert store.get_unfinished_block(
                unf_block.partial_hash) == unf_block
            store.remove_unfinished_block(unf_block.partial_hash)
            assert store.get_unfinished_block(unf_block.partial_hash) is None

        blocks = bt.get_consecutive_blocks(
            1,
            skip_slots=5,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
        )
        sub_slots = blocks[0].finished_sub_slots
        assert len(sub_slots) == 5

        assert (store.get_finished_sub_slots(
            BlockCache({}),
            None,
            sub_slots[0].challenge_chain.challenge_chain_end_of_slot_vdf.
            challenge,
        ) == [])
        # Test adding non-connecting sub-slots genesis
        assert store.get_sub_slot(test_constants.GENESIS_CHALLENGE) is None
        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], {}, None,
                                           None) is None
        assert store.new_finished_sub_slot(sub_slots[2], {}, None,
                                           None) is None

        # Test adding sub-slots after genesis
        assert store.new_finished_sub_slot(sub_slots[0], {}, None,
                                           None) is not None
        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash())[0] == sub_slots[0]
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.new_finished_sub_slot(sub_slots[1], {}, None,
                                           None) is not None
        for i in range(len(sub_slots)):
            assert store.new_finished_sub_slot(sub_slots[i], {}, None,
                                               None) is not None
            assert store.get_sub_slot(
                sub_slots[i].challenge_chain.get_hash())[0] == sub_slots[i]

        assert store.get_finished_sub_slots(
            BlockCache({}), None,
            sub_slots[-1].challenge_chain.get_hash()) == sub_slots
        assert store.get_finished_sub_slots(BlockCache(
            {}), None, std_hash(b"not a valid hash")) is None

        assert (store.get_finished_sub_slots(
            BlockCache({}), None,
            sub_slots[-2].challenge_chain.get_hash()) == sub_slots[:-1])

        # Test adding genesis peak
        await blockchain.receive_block(blocks[0])
        peak = blockchain.get_peak()
        peak_full_block = await blockchain.get_full_peak()
        if peak.overflow:
            store.new_peak(peak, peak_full_block, sub_slots[-2], sub_slots[-1],
                           False, {})
        else:
            store.new_peak(peak, peak_full_block, None, sub_slots[-1], False,
                           {})

        assert store.get_sub_slot(
            sub_slots[0].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[1].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[2].challenge_chain.get_hash()) is None
        if peak.overflow:
            assert store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash())[0] == sub_slots[3]
        else:
            assert store.get_sub_slot(
                sub_slots[3].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[4].challenge_chain.get_hash())[0] == sub_slots[4]

        assert (store.get_finished_sub_slots(
            blockchain,
            peak,
            sub_slots[-1].challenge_chain.get_hash(),
        ) == [])

        # Test adding non genesis peak directly
        blocks = bt.get_consecutive_blocks(
            2,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        blocks = bt.get_consecutive_blocks(
            3,
            block_list_input=blocks,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for block in blocks:
            await blockchain.receive_block(block)
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        # Add reorg blocks
        blocks_reorg = bt.get_consecutive_blocks(
            20,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for block in blocks_reorg:
            res, _, _ = await blockchain.receive_block(block)
            if res == ReceiveBlockResult.NEW_PEAK:
                sb = blockchain.block_record(block.header_hash)
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    block.header_hash)
                res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, True,
                                     blockchain)
                assert res[0] is None

        # Add slots to the end
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_reorg,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_2[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak(), await
                                        blockchain.get_full_peak())

        assert store.get_sub_slot(
            sub_slots[3].challenge_chain.get_hash()) is None
        assert store.get_sub_slot(
            sub_slots[4].challenge_chain.get_hash()) is None

        # Test adding signage point
        peak = blockchain.get_peak()
        ss_start_iters = peak.ip_sub_slot_total_iters(test_constants)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                ss_start_iters,
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, blockchain, peak,
                                           peak.sub_slot_iters, sp)

        blocks = blocks_reorg
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity_cc_eos=normalized_to_identity,
                normalized_to_identity_icc_eos=normalized_to_identity,
                normalized_to_identity_cc_ip=normalized_to_identity,
                normalized_to_identity_cc_sp=normalized_to_identity,
            )
            res, _, _ = await blockchain.receive_block(blocks[-1])
            if res == ReceiveBlockResult.NEW_PEAK:
                sb = blockchain.block_record(blocks[-1].header_hash)
                sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                    blocks[-1].header_hash)

                res = store.new_peak(sb, blocks[-1], sp_sub_slot, ip_sub_slot,
                                     True, blockchain)
                assert res[0] is None
                if sb.overflow and sp_sub_slot is not None:
                    assert sp_sub_slot != ip_sub_slot
                    break

        peak = blockchain.get_peak()
        assert peak.overflow
        # Overflow peak should result in 2 finished sub slots
        assert len(store.finished_sub_slots) == 2

        # Add slots to the end, except for the last one, which we will use to test invalid SP
        blocks_2 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks,
            skip_slots=3,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_2[-1].finished_sub_slots[:-1]:
            store.new_finished_sub_slot(slot, blockchain,
                                        blockchain.get_peak(), await
                                        blockchain.get_full_peak())
        finished_sub_slots = blocks_2[-1].finished_sub_slots
        assert len(store.finished_sub_slots) == 4

        # Test adding signage points for overflow blocks (sp_sub_slot)
        ss_start_iters = peak.sp_sub_slot_total_iters(test_constants)
        # for i in range(peak.signage_point_index, test_constants.NUM_SPS_SUB_SLOT):
        #     if i < peak.signage_point_index:
        #         continue
        #     latest = peak
        #     while latest.total_iters > peak.sp_total_iters(test_constants):
        #         latest = blockchain.blocks[latest.prev_hash]
        #     sp = get_signage_point(
        #         test_constants,
        #         blockchain.blocks,
        #         latest,
        #         ss_start_iters,
        #         uint8(i),
        #         [],
        #         peak.sub_slot_iters,
        #     )
        #     assert store.new_signage_point(i, blockchain.blocks, peak, peak.sub_slot_iters, sp)

        # Test adding signage points for overflow blocks (ip_sub_slot)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, blockchain, peak,
                                           peak.sub_slot_iters, sp)

        # Test adding future signage point, a few slots forward (good)
        saved_sp_hash = None
        for slot_offset in range(1, len(finished_sub_slots)):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    blockchain,
                    peak,
                    peak.ip_sub_slot_total_iters(test_constants) +
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert sp.cc_vdf is not None
                saved_sp_hash = sp.cc_vdf.output.get_hash()
                assert store.new_signage_point(i, blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding future signage point (bad)
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                blockchain,
                peak,
                peak.ip_sub_slot_total_iters(test_constants) +
                len(finished_sub_slots) * peak.sub_slot_iters,
                uint8(i),
                finished_sub_slots[:len(finished_sub_slots)],
                peak.sub_slot_iters,
            )
            assert not store.new_signage_point(i, blockchain, peak,
                                               peak.sub_slot_iters, sp)

        # Test adding past signage point
        sp = SignagePoint(
            blocks[1].reward_chain_block.challenge_chain_sp_vdf,
            blocks[1].challenge_chain_sp_proof,
            blocks[1].reward_chain_block.reward_chain_sp_vdf,
            blocks[1].reward_chain_sp_proof,
        )
        assert not store.new_signage_point(
            blocks[1].reward_chain_block.signage_point_index,
            {},
            peak,
            blockchain.block_record(
                blocks[1].header_hash).sp_sub_slot_total_iters(test_constants),
            sp,
        )

        # Get signage point by index
        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(),
            4,
            finished_sub_slots[0].reward_chain.get_hash(),
        ) is not None)

        assert (store.get_signage_point_by_index(
            finished_sub_slots[0].challenge_chain.get_hash(), 4,
            std_hash(b"1")) is None)

        # Get signage point by hash
        assert store.get_signage_point(saved_sp_hash) is not None
        assert store.get_signage_point(std_hash(b"2")) is None

        # Test adding signage points before genesis
        store.initialize_genesis_sub_slot()
        assert len(store.finished_sub_slots) == 1
        for i in range(
                1, test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA):
            sp = get_signage_point(
                test_constants,
                BlockCache({}, {}),
                None,
                uint128(0),
                uint8(i),
                [],
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, {}, None, peak.sub_slot_iters,
                                           sp)

        blocks_3 = bt.get_consecutive_blocks(
            1,
            skip_slots=2,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        for slot in blocks_3[-1].finished_sub_slots:
            store.new_finished_sub_slot(slot, {}, None, None)
        assert len(store.finished_sub_slots) == 3
        finished_sub_slots = blocks_3[-1].finished_sub_slots

        for slot_offset in range(1, len(finished_sub_slots) + 1):
            for i in range(
                    1,
                    test_constants.NUM_SPS_SUB_SLOT -
                    test_constants.NUM_SP_INTERVALS_EXTRA,
            ):
                sp = get_signage_point(
                    test_constants,
                    BlockCache({}, {}),
                    None,
                    slot_offset * peak.sub_slot_iters,
                    uint8(i),
                    finished_sub_slots[:slot_offset],
                    peak.sub_slot_iters,
                )
                assert store.new_signage_point(i, {}, None,
                                               peak.sub_slot_iters, sp)

        # Test adding signage points after genesis
        blocks_4 = bt.get_consecutive_blocks(
            1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        blocks_5 = bt.get_consecutive_blocks(
            1,
            block_list_input=blocks_4,
            skip_slots=1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )

        # If this is not the case, fix test to find a block that is
        assert (blocks_4[-1].reward_chain_block.signage_point_index <
                test_constants.NUM_SPS_SUB_SLOT -
                test_constants.NUM_SP_INTERVALS_EXTRA)
        await blockchain.receive_block(blocks_4[-1])
        sb = blockchain.block_record(blocks_4[-1].header_hash)
        store.new_peak(sb, blocks_4[-1], None, None, False, blockchain)
        for i in range(
                sb.signage_point_index + test_constants.NUM_SP_INTERVALS_EXTRA,
                test_constants.NUM_SPS_SUB_SLOT,
        ):
            if is_overflow_block(test_constants, uint8(i)):
                finished_sub_slots = blocks_5[-1].finished_sub_slots
            else:
                finished_sub_slots = []

            sp = get_signage_point(
                test_constants,
                blockchain,
                sb,
                uint128(0),
                uint8(i),
                finished_sub_slots,
                peak.sub_slot_iters,
            )
            assert store.new_signage_point(i, empty_blockchain, sb,
                                           peak.sub_slot_iters, sp)

        # Test future EOS cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            1,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
        )
        await blockchain.receive_block(blocks[-1])
        while True:
            blocks = bt.get_consecutive_blocks(
                1,
                block_list_input=blocks,
                normalized_to_identity_cc_eos=normalized_to_identity,
                normalized_to_identity_icc_eos=normalized_to_identity,
                normalized_to_identity_cc_ip=normalized_to_identity,
                normalized_to_identity_cc_sp=normalized_to_identity,
            )
            await blockchain.receive_block(blocks[-1])
            sb = blockchain.block_record(blocks[-1].header_hash)
            if sb.first_in_sub_slot:
                break
        assert len(blocks) >= 2
        dependant_sub_slots = blocks[-1].finished_sub_slots
        peak = blockchain.get_peak()
        peak_full_block = await blockchain.get_full_peak()
        for block in blocks[:-2]:
            sb = blockchain.block_record(block.header_hash)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            peak = sb
            peak_full_block = block
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        assert store.new_finished_sub_slot(dependant_sub_slots[0], blockchain,
                                           peak, peak_full_block) is None
        block = blocks[-2]
        sb = blockchain.block_record(block.header_hash)
        sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
            block.header_hash)
        res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, False,
                             blockchain)
        assert res[0] == dependant_sub_slots[0]
        assert res[1] == res[2] == []

        # Test future IP cache
        store.initialize_genesis_sub_slot()
        blocks = bt.get_consecutive_blocks(
            60,
            normalized_to_identity_cc_ip=normalized_to_identity,
            normalized_to_identity_cc_sp=normalized_to_identity,
            normalized_to_identity_cc_eos=normalized_to_identity,
            normalized_to_identity_icc_eos=normalized_to_identity,
        )

        for block in blocks[:5]:
            await blockchain.receive_block(block)
            sb = blockchain.block_record(block.header_hash)

            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                block.header_hash)
            res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, False,
                                 blockchain)
            assert res[0] is None

        case_0, case_1 = False, False
        for i in range(5, len(blocks) - 1):
            prev_block = blocks[i]
            block = blocks[i + 1]
            new_ip = NewInfusionPointVDF(
                block.reward_chain_block.get_unfinished().get_hash(),
                block.reward_chain_block.challenge_chain_ip_vdf,
                block.challenge_chain_ip_proof,
                block.reward_chain_block.reward_chain_ip_vdf,
                block.reward_chain_ip_proof,
                block.reward_chain_block.infused_challenge_chain_ip_vdf,
                block.infused_challenge_chain_ip_proof,
            )
            store.add_to_future_ip(new_ip)

            await blockchain.receive_block(prev_block)
            sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(
                prev_block.header_hash)
            sb = blockchain.block_record(prev_block.header_hash)
            res = store.new_peak(sb, prev_block, sp_sub_slot, ip_sub_slot,
                                 False, blockchain)
            if len(block.finished_sub_slots) == 0:
                case_0 = True
                assert res[2] == [new_ip]
            else:
                case_1 = True
                assert res[2] == []
                found_ips = []
                for ss in block.finished_sub_slots:
                    found_ips += store.new_finished_sub_slot(
                        ss, blockchain, sb, prev_block)
                assert found_ips == [new_ip]

        # If flaky, increase the number of blocks created
        assert case_0 and case_1
Esempio n. 5
0
    async def _check_for_new_ip(self, iter_to_look_for: uint64):
        if len(self.unfinished_blocks) == 0:
            return None
        infusion_iters = [
            iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.INFUSION_POINT
        ]
        for iteration in infusion_iters:
            if iteration != iter_to_look_for:
                continue
            proofs_with_iter = [
                (chain, info, proof)
                for chain, info, proof, label in self.proofs_finished
                if info.number_of_iterations == iteration and label == self.num_resets
            ]
            if self.last_state.get_challenge(Chain.INFUSED_CHALLENGE_CHAIN) is not None:
                chain_count = 3
            else:
                chain_count = 2
            if len(proofs_with_iter) == chain_count:
                block = None
                ip_iters = None
                for unfinished_block in self.unfinished_blocks:
                    try:
                        _, ip_iters = iters_from_block(
                            self.constants,
                            unfinished_block.reward_chain_block,
                            self.last_state.get_sub_slot_iters(),
                            self.last_state.get_difficulty(),
                        )
                    except Exception as e:
                        log.error(f"Error {e}")
                        continue
                    if ip_iters - self.last_state.get_last_ip() == iteration:
                        block = unfinished_block
                        break
                assert ip_iters is not None
                if block is not None:
                    ip_total_iters = self.last_state.get_total_iters() + iteration
                    challenge = block.reward_chain_block.get_hash()
                    icc_info: Optional[VDFInfo] = None
                    icc_proof: Optional[VDFProof] = None
                    cc_info: Optional[VDFInfo] = None
                    cc_proof: Optional[VDFProof] = None
                    rc_info: Optional[VDFInfo] = None
                    rc_proof: Optional[VDFProof] = None
                    for chain, info, proof in proofs_with_iter:
                        if chain == Chain.CHALLENGE_CHAIN:
                            cc_info = info
                            cc_proof = proof
                        if chain == Chain.REWARD_CHAIN:
                            rc_info = info
                            rc_proof = proof
                        if chain == Chain.INFUSED_CHALLENGE_CHAIN:
                            icc_info = info
                            icc_proof = proof
                    if cc_info is None or cc_proof is None or rc_info is None or rc_proof is None:
                        log.error(f"Insufficient VDF proofs for infusion point ch: {challenge} iterations:{iteration}")
                        return None

                    rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
                    if rc_info.challenge != rc_challenge:
                        assert rc_challenge is not None
                        log.warning(
                            f"Do not have correct challenge {rc_challenge.hex()} "
                            f"has {rc_info.challenge}, partial hash {block.reward_chain_block.get_hash()}"
                        )
                        # This proof is on an outdated challenge, so don't use it
                        continue

                    self.iters_finished.add(iter_to_look_for)
                    self.last_active_time = time.time()
                    log.debug(f"Generated infusion point for challenge: {challenge} iterations: {iteration}.")

                    overflow = is_overflow_block(self.constants, block.reward_chain_block.signage_point_index)

                    if not self.last_state.can_infuse_block(overflow):
                        log.warning("Too many blocks, or overflow in new epoch, cannot infuse, discarding")
                        return None

                    cc_info = dataclasses.replace(cc_info, number_of_iterations=ip_iters)
                    response = timelord_protocol.NewInfusionPointVDF(
                        challenge,
                        cc_info,
                        cc_proof,
                        rc_info,
                        rc_proof,
                        icc_info,
                        icc_proof,
                    )
                    msg = make_msg(ProtocolMessageTypes.new_infusion_point_vdf, response)
                    if self.server is not None:
                        await self.server.send_to_all([msg], NodeType.FULL_NODE)

                    self.proofs_finished = self._clear_proof_list(iteration)

                    if (
                        self.last_state.get_last_block_total_iters() is None
                        and not self.last_state.state_type == StateType.FIRST_SUB_SLOT
                    ):
                        # We don't know when the last block was, so we can't make peaks
                        return None

                    sp_total_iters = (
                        ip_total_iters
                        - ip_iters
                        + calculate_sp_iters(
                            self.constants,
                            block.sub_slot_iters,
                            block.reward_chain_block.signage_point_index,
                        )
                        - (block.sub_slot_iters if overflow else 0)
                    )
                    if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
                        is_transaction_block = True
                        height: uint32 = uint32(0)
                    else:
                        last_block_ti = self.last_state.get_last_block_total_iters()
                        assert last_block_ti is not None
                        is_transaction_block = last_block_ti < sp_total_iters
                        height = uint32(self.last_state.get_height() + 1)

                    if height < 5:
                        # Don't directly update our state for the first few blocks, because we cannot validate
                        # whether the pre-farm is correct
                        return None

                    new_reward_chain_block = RewardChainBlock(
                        uint128(self.last_state.get_weight() + block.difficulty),
                        height,
                        uint128(ip_total_iters),
                        block.reward_chain_block.signage_point_index,
                        block.reward_chain_block.pos_ss_cc_challenge_hash,
                        block.reward_chain_block.proof_of_space,
                        block.reward_chain_block.challenge_chain_sp_vdf,
                        block.reward_chain_block.challenge_chain_sp_signature,
                        cc_info,
                        block.reward_chain_block.reward_chain_sp_vdf,
                        block.reward_chain_block.reward_chain_sp_signature,
                        rc_info,
                        icc_info,
                        is_transaction_block,
                    )
                    if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
                        # Genesis
                        new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
                    elif overflow and self.last_state.deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
                        if self.last_state.peak is not None:
                            assert self.last_state.subslot_end is None
                            # This means the previous block is also an overflow block, and did not manage
                            # to lower the deficit, therefore we cannot lower it either. (new slot)
                            new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
                        else:
                            # This means we are the first infusion in this sub-slot. This may be a new slot or not.
                            assert self.last_state.subslot_end is not None
                            if self.last_state.subslot_end.infused_challenge_chain is None:
                                # There is no ICC, which means we are not finishing a slot. We can reduce the deficit.
                                new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
                            else:
                                # There is an ICC, which means we are finishing a slot. Different slot, so can't change
                                # the deficit
                                new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
                    else:
                        new_deficit = max(self.last_state.deficit - 1, 0)

                    if new_deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1:
                        last_csb_or_eos = ip_total_iters
                    else:
                        last_csb_or_eos = self.last_state.last_challenge_sb_or_eos_total_iters

                    if self.last_state.just_infused_sub_epoch_summary():
                        new_sub_epoch_summary = None
                        passed_ses_height_but_not_yet_included = False
                    else:
                        new_sub_epoch_summary = block.sub_epoch_summary
                        if new_reward_chain_block.height % self.constants.SUB_EPOCH_BLOCKS == 0:
                            passed_ses_height_but_not_yet_included = True
                        else:
                            passed_ses_height_but_not_yet_included = (
                                self.last_state.get_passed_ses_height_but_not_yet_included()
                            )

                    self.new_peak = timelord_protocol.NewPeakTimelord(
                        new_reward_chain_block,
                        block.difficulty,
                        uint8(new_deficit),
                        block.sub_slot_iters,
                        new_sub_epoch_summary,
                        self.last_state.reward_challenge_cache,
                        uint128(last_csb_or_eos),
                        passed_ses_height_but_not_yet_included,
                    )

                    await self._handle_new_peak()
                    # Break so we alternate between checking SP and IP
                    break
async def pre_validate_blocks_multiprocessing(
    constants: ConsensusConstants,
    constants_json: Dict,
    block_records: BlockchainInterface,
    blocks: Sequence[Union[FullBlock, HeaderBlock]],
    pool: ProcessPoolExecutor,
    check_filter: bool,
    npc_results: Dict[uint32, NPCResult],
    get_block_generator: Optional[Callable],
    batch_size: int,
    wp_summaries: Optional[List[SubEpochSummary]] = None,
) -> Optional[List[PreValidationResult]]:
    """
    This method must be called under the blockchain lock
    If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
    if any validation issue occurs, returns False.

    Args:
        check_filter:
        constants_json:
        pool:
        constants:
        block_records:
        blocks: list of full blocks to validate (must be connected to current chain)
        npc_results
        get_block_generator
    """
    prev_b: Optional[BlockRecord] = None
    # Collects all the recent blocks (up to the previous sub-epoch)
    recent_blocks: Dict[bytes32, BlockRecord] = {}
    recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
    num_sub_slots_found = 0
    num_blocks_seen = 0
    if blocks[0].height > 0:
        if not block_records.contains_block(blocks[0].prev_header_hash):
            return [
                PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value),
                                    None, None)
            ]
        curr = block_records.block_record(blocks[0].prev_header_hash)
        num_sub_slots_to_look_for = 3 if curr.overflow else 2
        while (curr.sub_epoch_summary_included is None
               or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
               or num_sub_slots_found < num_sub_slots_to_look_for
               ) and curr.height > 0:
            if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
                recent_blocks_compressed[curr.header_hash] = curr

            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
            recent_blocks[curr.header_hash] = curr
            if curr.is_transaction_block:
                num_blocks_seen += 1
            curr = block_records.block_record(curr.prev_hash)
        recent_blocks[curr.header_hash] = curr
        recent_blocks_compressed[curr.header_hash] = curr
    block_record_was_present = []
    for block in blocks:
        block_record_was_present.append(
            block_records.contains_block(block.header_hash))

    diff_ssis: List[Tuple[uint64, uint64]] = []
    for block in blocks:
        if block.height != 0:
            assert block_records.contains_block(block.prev_header_hash)
            if prev_b is None:
                prev_b = block_records.block_record(block.prev_header_hash)

        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            constants,
            len(block.finished_sub_slots) > 0, prev_b, block_records)

        overflow = is_overflow_block(
            constants, block.reward_chain_block.signage_point_index)
        challenge = get_block_challenge(constants, block,
                                        BlockCache(recent_blocks),
                                        prev_b is None, overflow, False)
        if block.reward_chain_block.challenge_chain_sp_vdf is None:
            cc_sp_hash: bytes32 = challenge
        else:
            cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash(
            )
        q_str: Optional[
            bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
                constants, challenge, cc_sp_hash)
        if q_str is None:
            for i, block_i in enumerate(blocks):
                if not block_record_was_present[
                        i] and block_records.contains_block(
                            block_i.header_hash):
                    block_records.remove_block_record(block_i.header_hash)
            return None

        required_iters: uint64 = calculate_iterations_quality(
            constants.DIFFICULTY_CONSTANT_FACTOR,
            q_str,
            block.reward_chain_block.proof_of_space.size,
            difficulty,
            cc_sp_hash,
        )

        block_rec = block_to_block_record(
            constants,
            block_records,
            required_iters,
            block,
            None,
        )

        if block_rec.sub_epoch_summary_included is not None and wp_summaries is not None:
            idx = int(block.height / constants.SUB_EPOCH_BLOCKS) - 1
            next_ses = wp_summaries[idx]
            if not block_rec.sub_epoch_summary_included.get_hash(
            ) == next_ses.get_hash():
                log.error(
                    "sub_epoch_summary does not match wp sub_epoch_summary list"
                )
                return None
        # Makes sure to not override the valid blocks already in block_records
        if not block_records.contains_block(block_rec.header_hash):
            block_records.add_block_record(
                block_rec)  # Temporarily add block to dict
            recent_blocks[block_rec.header_hash] = block_rec
            recent_blocks_compressed[block_rec.header_hash] = block_rec
        else:
            recent_blocks[block_rec.header_hash] = block_records.block_record(
                block_rec.header_hash)
            recent_blocks_compressed[
                block_rec.header_hash] = block_records.block_record(
                    block_rec.header_hash)
        prev_b = block_rec
        diff_ssis.append((difficulty, sub_slot_iters))

    block_dict: Dict[bytes32, Union[FullBlock, HeaderBlock]] = {}
    for i, block in enumerate(blocks):
        block_dict[block.header_hash] = block
        if not block_record_was_present[i]:
            block_records.remove_block_record(block.header_hash)

    recent_sb_compressed_pickled = {
        bytes(k): bytes(v)
        for k, v in recent_blocks_compressed.items()
    }
    npc_results_pickled = {}
    for k, v in npc_results.items():
        npc_results_pickled[k] = bytes(v)
    futures = []
    # Pool of workers to validate blocks concurrently
    for i in range(0, len(blocks), batch_size):
        end_i = min(i + batch_size, len(blocks))
        blocks_to_validate = blocks[i:end_i]
        if any([
                len(block.finished_sub_slots) > 0
                for block in blocks_to_validate
        ]):
            final_pickled = {
                bytes(k): bytes(v)
                for k, v in recent_blocks.items()
            }
        else:
            final_pickled = recent_sb_compressed_pickled
        b_pickled: Optional[List[bytes]] = None
        hb_pickled: Optional[List[bytes]] = None
        previous_generators: List[Optional[bytes]] = []
        for block in blocks_to_validate:
            # We ONLY add blocks which are in the past, based on header hashes (which are validated later) to the
            # prev blocks dict. This is important since these blocks are assumed to be valid and are used as previous
            # generator references
            prev_blocks_dict: Dict[uint32, Union[FullBlock, HeaderBlock]] = {}
            curr_b: Union[FullBlock, HeaderBlock] = block

            while curr_b.prev_header_hash in block_dict:
                curr_b = block_dict[curr_b.prev_header_hash]
                prev_blocks_dict[curr_b.header_hash] = curr_b

            if isinstance(block, FullBlock):
                assert get_block_generator is not None
                if b_pickled is None:
                    b_pickled = []
                b_pickled.append(bytes(block))
                try:
                    block_generator: Optional[
                        BlockGenerator] = await get_block_generator(
                            block, prev_blocks_dict)
                except ValueError:
                    return None
                if block_generator is not None:
                    previous_generators.append(bytes(block_generator))
                else:
                    previous_generators.append(None)
            else:
                if hb_pickled is None:
                    hb_pickled = []
                hb_pickled.append(bytes(block))

        futures.append(asyncio.get_running_loop().run_in_executor(
            pool,
            batch_pre_validate_blocks,
            constants_json,
            final_pickled,
            b_pickled,
            hb_pickled,
            previous_generators,
            npc_results_pickled,
            check_filter,
            [diff_ssis[j][0] for j in range(i, end_i)],
            [diff_ssis[j][1] for j in range(i, end_i)],
        ))
    # Collect all results into one flat list
    return [
        PreValidationResult.from_bytes(result)
        for batch_result in (await asyncio.gather(*futures))
        for result in batch_result
    ]
    async def test1(self, two_nodes):
        num_blocks = 5
        test_rpc_port = uint16(21522)
        nodes, _ = two_nodes
        full_node_api_1, full_node_api_2 = nodes
        server_1 = full_node_api_1.full_node.server
        server_2 = full_node_api_2.full_node.server

        def stop_node_cb():
            full_node_api_1._close()
            server_1.close_all()

        full_node_rpc_api = FullNodeRpcApi(full_node_api_1.full_node)

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        rpc_cleanup = await start_rpc_server(
            full_node_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FullNodeRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config)
            state = await client.get_blockchain_state()
            assert state["peak"] is None
            assert not state["sync"]["sync_mode"]
            assert state["difficulty"] > 0
            assert state["sub_slot_iters"] > 0

            blocks = bt.get_consecutive_blocks(num_blocks)
            blocks = bt.get_consecutive_blocks(num_blocks, block_list_input=blocks, guarantee_transaction_block=True)

            assert len(await client.get_unfinished_block_headers()) == 0
            assert len((await client.get_block_records(0, 100))) == 0
            for block in blocks:
                if is_overflow_block(test_constants, block.reward_chain_block.signage_point_index):
                    finished_ss = block.finished_sub_slots[:-1]
                else:
                    finished_ss = block.finished_sub_slots

                unf = UnfinishedBlock(
                    finished_ss,
                    block.reward_chain_block.get_unfinished(),
                    block.challenge_chain_sp_proof,
                    block.reward_chain_sp_proof,
                    block.foliage,
                    block.foliage_transaction_block,
                    block.transactions_info,
                    block.transactions_generator,
                    [],
                )
                await full_node_api_1.full_node.respond_unfinished_block(
                    full_node_protocol.RespondUnfinishedBlock(unf), None
                )
                await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block), None)

            assert len(await client.get_unfinished_block_headers()) > 0
            assert len(await client.get_all_block(0, 2)) == 2
            state = await client.get_blockchain_state()

            block = await client.get_block(state["peak"].header_hash)
            assert block == blocks[-1]
            assert (await client.get_block(bytes([1] * 32))) is None

            assert (await client.get_block_record_by_height(2)).header_hash == blocks[2].header_hash

            assert len((await client.get_block_records(0, 100))) == num_blocks * 2

            assert (await client.get_block_record_by_height(100)) is None

            ph = list(blocks[-1].get_included_reward_coins())[0].puzzle_hash
            coins = await client.get_coin_records_by_puzzle_hash(ph)
            print(coins)
            assert len(coins) >= 1

            pid = list(blocks[-1].get_included_reward_coins())[0].parent_coin_info
            pid_2 = list(blocks[-1].get_included_reward_coins())[1].parent_coin_info
            coins = await client.get_coin_records_by_parent_ids([pid, pid_2])
            print(coins)
            assert len(coins) == 2

            additions, removals = await client.get_additions_and_removals(blocks[-1].header_hash)
            assert len(additions) >= 2 and len(removals) == 0

            wallet = WalletTool(full_node_api_1.full_node.constants)
            wallet_receiver = WalletTool(full_node_api_1.full_node.constants, AugSchemeMPL.key_gen(std_hash(b"123123")))
            ph = wallet.get_new_puzzlehash()
            ph_2 = wallet.get_new_puzzlehash()
            ph_receiver = wallet_receiver.get_new_puzzlehash()

            assert len(await client.get_coin_records_by_puzzle_hash(ph)) == 0
            assert len(await client.get_coin_records_by_puzzle_hash(ph_receiver)) == 0
            blocks = bt.get_consecutive_blocks(
                2,
                block_list_input=blocks,
                guarantee_transaction_block=True,
                farmer_reward_puzzle_hash=ph,
                pool_reward_puzzle_hash=ph,
            )
            for block in blocks[-2:]:
                await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
            assert len(await client.get_coin_records_by_puzzle_hash(ph)) == 2
            assert len(await client.get_coin_records_by_puzzle_hash(ph_receiver)) == 0

            coin_to_spend = list(blocks[-1].get_included_reward_coins())[0]

            spend_bundle = wallet.generate_signed_transaction(coin_to_spend.amount, ph_receiver, coin_to_spend)

            assert len(await client.get_all_mempool_items()) == 0
            assert len(await client.get_all_mempool_tx_ids()) == 0
            assert (await client.get_mempool_item_by_tx_id(spend_bundle.name())) is None

            await client.push_tx(spend_bundle)
            coin = spend_bundle.additions()[0]

            assert len(await client.get_all_mempool_items()) == 1
            assert len(await client.get_all_mempool_tx_ids()) == 1
            assert (
                SpendBundle.from_json_dict(list((await client.get_all_mempool_items()).values())[0]["spend_bundle"])
                == spend_bundle
            )
            assert (await client.get_all_mempool_tx_ids())[0] == spend_bundle.name()
            assert (
                SpendBundle.from_json_dict(
                    (await client.get_mempool_item_by_tx_id(spend_bundle.name()))["spend_bundle"]
                )
                == spend_bundle
            )
            assert (await client.get_coin_record_by_name(coin.name())) is None

            await full_node_api_1.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))

            assert (await client.get_coin_record_by_name(coin.name())).coin == coin

            assert len(await client.get_coin_records_by_puzzle_hash(ph_receiver)) == 1
            assert len(list(filter(lambda cr: not cr.spent, (await client.get_coin_records_by_puzzle_hash(ph))))) == 3
            assert len(await client.get_coin_records_by_puzzle_hashes([ph_receiver, ph])) == 5
            assert len(await client.get_coin_records_by_puzzle_hash(ph, False)) == 3
            assert len(await client.get_coin_records_by_puzzle_hash(ph, True)) == 4

            assert len(await client.get_coin_records_by_puzzle_hash(ph, True, 0, 100)) == 4
            assert len(await client.get_coin_records_by_puzzle_hash(ph, True, 50, 100)) == 0
            assert len(await client.get_coin_records_by_puzzle_hash(ph, True, 0, blocks[-1].height + 1)) == 2
            assert len(await client.get_coin_records_by_puzzle_hash(ph, True, 0, 1)) == 0

            assert len(await client.get_connections()) == 0

            await client.open_connection(self_hostname, server_2._port)

            async def num_connections():
                return len(await client.get_connections())

            await time_out_assert(10, num_connections, 1)
            connections = await client.get_connections()
            assert NodeType(connections[0]["type"]) == NodeType.FULL_NODE.value
            assert len(await client.get_connections(NodeType.FULL_NODE)) == 1
            assert len(await client.get_connections(NodeType.FARMER)) == 0
            await client.close_connection(connections[0]["node_id"])
            await time_out_assert(10, num_connections, 0)
        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            await client.await_closed()
            await rpc_cleanup()
Esempio n. 8
0
def block_to_block_record(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    required_iters: uint64,
    full_block: Optional[Union[FullBlock, HeaderBlock]],
    header_block: Optional[HeaderBlock],
):

    if full_block is None:
        assert header_block is not None
        block: Union[HeaderBlock, FullBlock] = header_block
    else:
        block = full_block
    prev_b = blocks.try_block_record(block.prev_header_hash)
    if block.height > 0:
        assert prev_b is not None
    sub_slot_iters, _ = get_next_sub_slot_iters_and_difficulty(
        constants, len(block.finished_sub_slots) > 0, prev_b, blocks
    )
    overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index)
    deficit = calculate_deficit(
        constants,
        block.height,
        prev_b,
        overflow,
        len(block.finished_sub_slots),
    )

    found_ses_hash: Optional[bytes32] = None
    ses: Optional[SubEpochSummary] = None
    if len(block.finished_sub_slots) > 0:
        for sub_slot in block.finished_sub_slots:
            if sub_slot.challenge_chain.subepoch_summary_hash is not None:
                found_ses_hash = sub_slot.challenge_chain.subepoch_summary_hash
    if found_ses_hash:
        assert prev_b is not None
        assert len(block.finished_sub_slots) > 0
        ses = make_sub_epoch_summary(
            constants,
            blocks,
            block.height,
            blocks.block_record(prev_b.prev_hash),
            block.finished_sub_slots[0].challenge_chain.new_difficulty,
            block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
        )
        assert ses.get_hash() == found_ses_hash

    prev_transaction_block_height = uint32(0)
    curr: Optional[BlockRecord] = blocks.try_block_record(block.prev_header_hash)
    while curr is not None and not curr.is_transaction_block:
        curr = blocks.try_block_record(curr.prev_hash)

    if curr is not None and curr.is_transaction_block:
        prev_transaction_block_height = curr.height

    return header_block_to_sub_block_record(
        constants,
        required_iters,
        block,
        sub_slot_iters,
        overflow,
        deficit,
        prev_transaction_block_height,
        ses,
    )
def next_sub_epoch_summary(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    required_iters: uint64,
    block: Union[UnfinishedBlock, FullBlock],
    can_finish_soon: bool = False,
) -> Optional[SubEpochSummary]:
    """
    Returns the sub-epoch summary that can be included in the block after block. If it should include one. Block
    must be eligible to be the last block in the epoch. If not, returns None. Assumes that there is a new slot
    ending after block.

    Args:
        constants: consensus constants being used for this chain
        blocks: interface to cached SBR
        required_iters: required iters of the proof of space in block
        block: the (potentially) last block in the new epoch
        can_finish_soon: this is useful when sending SES to timelords. We might not be able to finish it, but we will
            soon (within MAX_SUB_SLOT_BLOCKS)

    Returns:
        object: the new sub-epoch summary
    """
    signage_point_index = block.reward_chain_block.signage_point_index
    prev_b: Optional[BlockRecord] = blocks.try_block_record(
        block.prev_header_hash)
    if prev_b is None or prev_b.height == 0:
        return None

    if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[
            0].challenge_chain.new_difficulty is not None:
        # We just included a sub-epoch summary
        return None

    assert prev_b is not None
    # This is the ssi of the current block

    sub_slot_iters = get_next_sub_slot_iters_and_difficulty(
        constants,
        len(block.finished_sub_slots) > 0, prev_b, blocks)[0]
    overflow = is_overflow_block(constants, signage_point_index)

    if (len(block.finished_sub_slots) > 0 and
            block.finished_sub_slots[0].challenge_chain.subepoch_summary_hash
            is not None):
        return None

    if can_finish_soon:
        deficit: uint8 = uint8(
            0)  # Assume that our deficit will go to zero soon
        can_finish_se = True
        if height_can_be_first_in_epoch(constants, uint32(prev_b.height + 2)):
            can_finish_epoch = True
            if (prev_b.height + 2) % constants.SUB_EPOCH_BLOCKS > 1:
                curr: BlockRecord = prev_b
                while curr.height % constants.SUB_EPOCH_BLOCKS > 0:
                    if (curr.sub_epoch_summary_included is not None
                            and curr.sub_epoch_summary_included.new_difficulty
                            is not None):
                        can_finish_epoch = False
                    curr = blocks.block_record(curr.prev_hash)

                if (curr.sub_epoch_summary_included is not None
                        and curr.sub_epoch_summary_included.new_difficulty
                        is not None):
                    can_finish_epoch = False
        elif height_can_be_first_in_epoch(
                constants,
                uint32(prev_b.height + constants.MAX_SUB_SLOT_BLOCKS + 2)):
            can_finish_epoch = True
        else:
            can_finish_epoch = False
    else:
        deficit = calculate_deficit(
            constants,
            uint32(prev_b.height + 1),
            prev_b,
            overflow,
            len(block.finished_sub_slots),
        )
        can_finish_se, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants,
            blocks,
            uint32(prev_b.height + 1),
            prev_b.header_hash if prev_b is not None else None,
            deficit,
            False,
        )

    # can't finish se, no summary
    if not can_finish_se:
        return None

    next_difficulty = None
    next_sub_slot_iters = None

    # if can finish epoch, new difficulty and ssi
    if can_finish_epoch:
        sp_iters = calculate_sp_iters(constants, sub_slot_iters,
                                      signage_point_index)
        ip_iters = calculate_ip_iters(constants, sub_slot_iters,
                                      signage_point_index, required_iters)

        next_difficulty = _get_next_difficulty(
            constants,
            blocks,
            block.prev_header_hash,
            uint32(prev_b.height + 1),
            uint64(prev_b.weight -
                   blocks.block_record(prev_b.prev_hash).weight),
            deficit,
            False,  # Already checked above
            True,
            uint128(block.total_iters - ip_iters + sp_iters -
                    (sub_slot_iters if overflow else 0)),
            True,
        )
        next_sub_slot_iters = _get_next_sub_slot_iters(
            constants,
            blocks,
            block.prev_header_hash,
            uint32(prev_b.height + 1),
            sub_slot_iters,
            deficit,
            False,  # Already checked above
            True,
            uint128(block.total_iters - ip_iters + sp_iters -
                    (sub_slot_iters if overflow else 0)),
            True,
        )

    return make_sub_epoch_summary(
        constants,
        blocks,
        uint32(prev_b.height + 2),
        prev_b,
        next_difficulty,
        next_sub_slot_iters,
    )