Example #1
0
    async def test_block_ses_mismatch(self, two_nodes, default_1000_blocks):
        full_node_1, full_node_2, server_1, server_2 = two_nodes
        blocks = default_1000_blocks

        for block in blocks[:501]:
            await full_node_1.full_node.respond_block(
                full_node_protocol.RespondBlock(block))

        peak1 = full_node_1.full_node.blockchain.get_peak()
        full_node_2.full_node.sync_store.set_long_sync(True)
        await server_2.start_client(
            PeerInfo(self_hostname, uint16(server_1._port)),
            full_node_2.full_node.on_connect)
        wp = await full_node_1.full_node.weight_proof_handler.get_proof_of_weight(
            peak1.header_hash)
        summaries1, _ = _validate_sub_epoch_summaries(
            full_node_1.full_node.weight_proof_handler.constants, wp)
        summaries2 = summaries1
        s = summaries1[1]
        # change summary so check would fail on 2 sub epoch
        summaries2[1] = SubEpochSummary(
            s.prev_subepoch_summary_hash,
            s.reward_chain_hash,
            s.num_blocks_overflow,
            s.new_difficulty * 2,
            s.new_sub_slot_iters * 2,
        )
        await full_node_2.full_node.sync_from_fork_point(
            0, 500, peak1.header_hash, summaries2)
        log.info(
            f"full node height {full_node_2.full_node.blockchain.get_peak().height}"
        )
        assert node_height_between(full_node_2, 320, 400)
def make_sub_epoch_summary(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    blocks_included_height: uint32,
    prev_prev_block: BlockRecord,
    new_difficulty: Optional[uint64],
    new_sub_slot_iters: Optional[uint64],
) -> SubEpochSummary:
    """
    Creates a sub-epoch-summary object, assuming that the first block in the new sub-epoch is at height
    "blocks_included_height". Prev_prev_b is the second to last block in the previous sub-epoch. On a new epoch,
    new_difficulty and new_sub_slot_iters are also added.

    Args:
        constants: consensus constants being used for this chain
        blocks: dictionary from header hash to SBR of all included SBR
        blocks_included_height: block height in which the SES will be included
        prev_prev_block: second to last block in epoch
        new_difficulty: difficulty in new epoch
        new_sub_slot_iters: sub slot iters in new epoch
    """
    assert prev_prev_block.height == blocks_included_height - 2
    # First sub_epoch
    # This is not technically because more blocks can potentially be included than 2*MAX_SUB_SLOT_BLOCKS,
    # But assuming less than 128 overflow blocks get infused in the first 2 slots, it's not an issue
    if (blocks_included_height +
            constants.MAX_SUB_SLOT_BLOCKS) // constants.SUB_EPOCH_BLOCKS <= 1:
        return SubEpochSummary(
            constants.GENESIS_CHALLENGE,
            constants.GENESIS_CHALLENGE,
            uint8(0),
            None,
            None,
        )
    curr: BlockRecord = prev_prev_block
    while curr.sub_epoch_summary_included is None:
        curr = blocks.block_record(curr.prev_hash)
    assert curr is not None
    assert curr.finished_reward_slot_hashes is not None
    prev_ses = curr.sub_epoch_summary_included.get_hash()
    return SubEpochSummary(
        prev_ses,
        curr.finished_reward_slot_hashes[-1],
        uint8(curr.height % constants.SUB_EPOCH_BLOCKS),
        new_difficulty,
        new_sub_slot_iters,
    )
    async def get_peak_height_dicts(
            self
    ) -> Tuple[Dict[uint32, bytes32], Dict[uint32, SubEpochSummary]]:
        """
        Returns a dictionary with all blocks, as well as the header hash of the peak,
        if present.
        """

        res = await self.db.execute(
            "SELECT * from block_records WHERE is_peak = 1")
        row = await res.fetchone()
        await res.close()
        if row is None:
            return {}, {}

        peak: bytes32 = bytes.fromhex(row[0])
        cursor = await self.db.execute(
            "SELECT header_hash,prev_hash,height,sub_epoch_summary from block_records"
        )
        rows = await cursor.fetchall()
        await cursor.close()
        hash_to_prev_hash: Dict[bytes32, bytes32] = {}
        hash_to_height: Dict[bytes32, uint32] = {}
        hash_to_summary: Dict[bytes32, SubEpochSummary] = {}

        for row in rows:
            hash_to_prev_hash[bytes.fromhex(row[0])] = bytes.fromhex(row[1])
            hash_to_height[bytes.fromhex(row[0])] = row[2]
            if row[3] is not None:
                hash_to_summary[bytes.fromhex(
                    row[0])] = SubEpochSummary.from_bytes(row[3])

        height_to_hash: Dict[uint32, bytes32] = {}
        sub_epoch_summaries: Dict[uint32, SubEpochSummary] = {}

        curr_header_hash = peak
        curr_height = hash_to_height[curr_header_hash]
        while True:
            height_to_hash[curr_height] = curr_header_hash
            if curr_header_hash in hash_to_summary:
                sub_epoch_summaries[curr_height] = hash_to_summary[
                    curr_header_hash]
            if curr_height == 0:
                break
            curr_header_hash = hash_to_prev_hash[curr_header_hash]
            curr_height = hash_to_height[curr_header_hash]
        return height_to_hash, sub_epoch_summaries
Example #4
0
def gen_ses(height: int) -> SubEpochSummary:
    prev_ses = gen_block_hash(height + 0xFA0000)
    reward_chain_hash = gen_block_hash(height + 0xFC0000)
    return SubEpochSummary(prev_ses, reward_chain_hash, uint8(0), None, None)
Example #5
0
async def run_add_block_benchmark(version: int):

    verbose: bool = "--verbose" in sys.argv
    db_wrapper: DBWrapper = await setup_db("block-store-benchmark.db", version)

    # keep track of benchmark total time
    all_test_time = 0.0

    prev_block = bytes32([0] * 32)
    prev_ses_hash = bytes32([0] * 32)

    header_hashes = []

    try:
        block_store = await BlockStore.create(db_wrapper)

        block_height = 1
        timestamp = uint64(1631794488)
        weight = uint128(10)
        iters = uint128(123456)
        sp_index = uint8(0)
        deficit = uint8(0)
        sub_slot_iters = uint64(10)
        required_iters = uint64(100)
        transaction_block_counter = 0
        prev_transaction_block = bytes32([0] * 32)
        prev_transaction_height = uint32(0)
        total_time = 0.0
        ses_counter = 0

        if verbose:
            print("profiling add_full_block", end="")

        for height in range(block_height, block_height + NUM_ITERS):

            is_transaction = transaction_block_counter == 0
            fees = uint64(random.randint(0, 150000))
            farmer_coin, pool_coin = rewards(uint32(height))
            reward_claims_incorporated = [farmer_coin, pool_coin]

            # TODO: increase fidelity by setting these as well
            finished_challenge_slot_hashes = None
            finished_infused_challenge_slot_hashes = None
            finished_reward_slot_hashes = None

            sub_epoch_summary_included = None
            if ses_counter == 0:
                sub_epoch_summary_included = SubEpochSummary(
                    prev_ses_hash,
                    rand_hash(),
                    uint8(random.randint(0,
                                         255)),  # num_blocks_overflow: uint8
                    None,  # new_difficulty: Optional[uint64]
                    None,  # new_sub_slot_iters: Optional[uint64]
                )

            has_pool_pk = random.randint(0, 1)

            proof_of_space = ProofOfSpace(
                rand_hash(),  # challenge
                rand_g1() if has_pool_pk else None,
                rand_hash() if not has_pool_pk else None,
                rand_g1(),  # plot_public_key
                uint8(32),
                rand_bytes(8 * 32),
            )

            reward_chain_block = RewardChainBlock(
                weight,
                uint32(height),
                iters,
                sp_index,
                rand_hash(),  # pos_ss_cc_challenge_hash
                proof_of_space,
                None if sp_index == 0 else rand_vdf(),
                rand_g2(),  # challenge_chain_sp_signature
                rand_vdf(),  # challenge_chain_ip_vdf
                rand_vdf() if sp_index != 0 else None,  # reward_chain_sp_vdf
                rand_g2(),  # reward_chain_sp_signature
                rand_vdf(),  # reward_chain_ip_vdf
                rand_vdf() if deficit < 16 else None,
                is_transaction,
            )

            pool_target = PoolTarget(
                rand_hash(),  # puzzle_hash
                uint32(0),  # max_height
            )

            foliage_block_data = FoliageBlockData(
                rand_hash(),  # unfinished_reward_block_hash
                pool_target,
                rand_g2() if has_pool_pk else None,  # pool_signature
                rand_hash(),  # farmer_reward_puzzle_hash
                bytes32([0] * 32),  # extension_data
            )

            foliage = Foliage(
                prev_block,
                rand_hash(),  # reward_block_hash
                foliage_block_data,
                rand_g2(),  # foliage_block_data_signature
                rand_hash()
                if is_transaction else None,  # foliage_transaction_block_hash
                rand_g2() if is_transaction else
                None,  # foliage_transaction_block_signature
            )

            foliage_transaction_block = (
                None if not is_transaction else FoliageTransactionBlock(
                    prev_transaction_block,
                    timestamp,
                    rand_hash(),  # filter_hash
                    rand_hash(),  # additions_root
                    rand_hash(),  # removals_root
                    rand_hash(),  # transactions_info_hash
                ))

            transactions_info = (
                None if not is_transaction else TransactionsInfo(
                    rand_hash(),  # generator_root
                    rand_hash(),  # generator_refs_root
                    rand_g2(),  # aggregated_signature
                    fees,
                    uint64(random.randint(0, 12000000000)),  # cost
                    reward_claims_incorporated,
                ))

            full_block = FullBlock(
                [],  # finished_sub_slots
                reward_chain_block,
                rand_vdf_proof()
                if sp_index > 0 else None,  # challenge_chain_sp_proof
                rand_vdf_proof(),  # challenge_chain_ip_proof
                rand_vdf_proof()
                if sp_index > 0 else None,  # reward_chain_sp_proof
                rand_vdf_proof(),  # reward_chain_ip_proof
                rand_vdf_proof()
                if deficit < 4 else None,  # infused_challenge_chain_ip_proof
                foliage,
                foliage_transaction_block,
                transactions_info,
                None if is_transaction else SerializedProgram.from_bytes(
                    clvm_generator),  # transactions_generator
                [],  # transactions_generator_ref_list
            )

            header_hash = full_block.header_hash

            record = BlockRecord(
                header_hash,
                prev_block,
                uint32(height),
                weight,
                iters,
                sp_index,
                rand_class_group_element(),
                None if deficit > 3 else rand_class_group_element(),
                rand_hash(),  # reward_infusion_new_challenge
                rand_hash(),  # challenge_block_info_hash
                sub_slot_iters,
                rand_hash(),  # pool_puzzle_hash
                rand_hash(),  # farmer_puzzle_hash
                required_iters,
                deficit,
                deficit == 16,
                prev_transaction_height,
                timestamp if is_transaction else None,
                prev_transaction_block
                if prev_transaction_block != bytes32([0] * 32) else None,
                None if fees == 0 else fees,
                reward_claims_incorporated,
                finished_challenge_slot_hashes,
                finished_infused_challenge_slot_hashes,
                finished_reward_slot_hashes,
                sub_epoch_summary_included,
            )

            start = time()
            await block_store.add_full_block(header_hash, full_block, record,
                                             False)
            await block_store.set_in_chain([(header_hash, )])
            header_hashes.append(header_hash)
            await block_store.set_peak(header_hash)
            await db_wrapper.db.commit()

            stop = time()
            total_time += stop - start

            # 19 seconds per block
            timestamp = uint64(timestamp + 19)
            weight = uint128(weight + 10)
            iters = uint128(iters + 123456)
            sp_index = uint8((sp_index + 1) % 64)
            deficit = uint8((deficit + 3) % 17)
            ses_counter = (ses_counter + 1) % 384
            prev_block = header_hash

            # every 33 blocks is a transaction block
            transaction_block_counter = (transaction_block_counter + 1) % 33

            if is_transaction:
                prev_transaction_block = header_hash
                prev_transaction_height = uint32(height)

            if ses_counter == 0:
                prev_ses_hash = header_hash

            if verbose:
                print(".", end="")
                sys.stdout.flush()
        block_height += NUM_ITERS

        if verbose:
            print("")
        print(f"{total_time:0.4f}s, add_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block")

        random.shuffle(header_hashes)
        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block(h)
            assert block.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block_bytes")

        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block_bytes(h)
            assert len(block) > 0

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block_bytes")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_blocks_at")

        start = time()
        for h in range(1, block_height):
            blocks = await block_store.get_full_blocks_at([h])
            assert len(blocks) == 1
            assert blocks[0].height == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_blocks_at")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_records_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_blocks_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_blocks_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_blocks_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_record")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_record(h)
            assert blocks.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_in_range")

        start = time()
        for i in range(100):
            h = random.randint(1, block_height - 100)
            blocks = await block_store.get_block_records_in_range(h, h + 99)
            assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_in_range")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_close_to_peak")

        start = time()
        blocks, peak = await block_store.get_block_records_close_to_peak(99)
        assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_close_to_peak")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling is_fully_compactified")

        start = time()
        for h in header_hashes:
            compactified = await block_store.is_fully_compactified(h)
            assert compactified is False

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_random_not_compactified")

        start = time()
        for i in range(1, 5000):
            blocks = await block_store.get_random_not_compactified(100)
            assert len(blocks) == 100
        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_random_not_compactified")
        all_test_time += total_time

        print(f"all tests completed in {all_test_time:0.4f}s")

        db_size = os.path.getsize(Path("block-store-benchmark.db"))
        print(f"database size: {db_size/1000000:.3f} MB")

    finally:
        await db_wrapper.db.close()
    True,
)

error_response = pool_protocol.ErrorResponse(
    uint16(47018),
    "err",
)

### TIMELORD PROTOCOL
sub_epoch_summary = SubEpochSummary(
    bytes32(
        bytes.fromhex(
            "2d0550de416467e7b57e56e962c712b79bee29cae29c73cc908da5978fc9789e")
    ),
    bytes32(
        bytes.fromhex(
            "3d29f5a3fe067ce7edea76c9cebaf3a3afdebc0eb9fbd530f807f1a28ed2df6d")
    ),
    uint8(4),
    uint64(14666749803532899046),
    uint64(10901191956946573440),
)

new_peak_timelord = timelord_protocol.NewPeakTimelord(
    reward_chain_block,
    uint64(7661623532867338566),
    uint8(202),
    uint64(16623089924886538940),
    sub_epoch_summary,
    [(
        bytes32(
Example #7
0
 def get_ses(self, height: uint32) -> SubEpochSummary:
     return SubEpochSummary.from_bytes(self.__sub_epoch_summaries[height])