Esempio n. 1
0
    async def get_block_records_close_to_peak(
        self, blocks_n: int
    ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]:
        """
        Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the
        peak header hash.
        """

        peak = await self.get_peak()
        if peak is None:
            return {}, None

        ret: Dict[bytes32, BlockRecord] = {}
        if self.db_wrapper.db_version == 2:

            async with self.db.execute(
                "SELECT header_hash, block_record FROM full_blocks WHERE height >= ?",
                (peak[1] - blocks_n,),
            ) as cursor:
                for row in await cursor.fetchall():
                    header_hash = bytes32(row[0])
                    ret[header_hash] = BlockRecord.from_bytes(row[1])

        else:
            formatted_str = f"SELECT header_hash, block  from block_records WHERE height >= {peak[1] - blocks_n}"
            async with self.db.execute(formatted_str) as cursor:
                for row in await cursor.fetchall():
                    header_hash = bytes32(self.maybe_from_hex(row[0]))
                    ret[header_hash] = BlockRecord.from_bytes(row[1])

        return ret, peak[0]
Esempio n. 2
0
    async def get_block_records_by_hash(self, header_hashes: List[bytes32]):
        """
        Returns a list of Block Records, ordered by the same order in which header_hashes are passed in.
        Throws an exception if the blocks are not present
        """
        if len(header_hashes) == 0:
            return []

        all_blocks: Dict[bytes32, BlockRecord] = {}
        if self.db_wrapper.db_version == 2:
            async with self.db.execute(
                "SELECT header_hash,block_record FROM full_blocks "
                f'WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)',
                tuple(header_hashes),
            ) as cursor:
                for row in await cursor.fetchall():
                    header_hash = bytes32(row[0])
                    all_blocks[header_hash] = BlockRecord.from_bytes(row[1])
        else:
            formatted_str = f'SELECT block from block_records WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)'
            async with self.db.execute(formatted_str, tuple([hh.hex() for hh in header_hashes])) as cursor:
                for row in await cursor.fetchall():
                    block_rec: BlockRecord = BlockRecord.from_bytes(row[0])
                    all_blocks[block_rec.header_hash] = block_rec

        ret: List[BlockRecord] = []
        for hh in header_hashes:
            if hh not in all_blocks:
                raise ValueError(f"Header hash {hh} not in the blockchain")
            ret.append(all_blocks[hh])
        return ret
Esempio n. 3
0
    async def get_block_records_in_range(
        self,
        start: int,
        stop: int,
    ) -> Dict[bytes32, BlockRecord]:
        """
        Returns a dictionary with all blocks in range between start and stop
        if present.
        """

        ret: Dict[bytes32, BlockRecord] = {}
        if self.db_wrapper.db_version == 2:

            async with self.db.execute(
                "SELECT header_hash, block_record FROM full_blocks WHERE height >= ? AND height <= ?",
                (start, stop),
            ) as cursor:
                for row in await cursor.fetchall():
                    header_hash = bytes32(row[0])
                    ret[header_hash] = BlockRecord.from_bytes(row[1])

        else:

            formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}"

            async with await self.db.execute(formatted_str) as cursor:
                for row in await cursor.fetchall():
                    header_hash = bytes32(self.maybe_from_hex(row[0]))
                    ret[header_hash] = BlockRecord.from_bytes(row[1])

        return ret
Esempio n. 4
0
    async def get_block_records_close_to_peak(
            self, blocks_n: int
    ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]:
        """
        Returns a dictionary with all blocks, as well as the header hash of the peak,
        if present.
        """

        res = await self.db.execute(
            "SELECT header_hash, height from block_records WHERE is_peak = 1")
        row = await res.fetchone()
        await res.close()
        if row is None:
            return {}, None
        header_hash_bytes, peak_height = row
        peak: bytes32 = bytes32(bytes.fromhex(header_hash_bytes))

        formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_height - blocks_n}"
        cursor = await self.db.execute(formatted_str)
        rows = await cursor.fetchall()
        await cursor.close()
        ret: Dict[bytes32, BlockRecord] = {}
        for row in rows:
            header_hash_bytes, block_record_bytes = row
            header_hash = bytes.fromhex(header_hash_bytes)
            ret[header_hash] = BlockRecord.from_bytes(block_record_bytes)
        return ret, peak
Esempio n. 5
0
 async def get_block_record_by_height(self,
                                      height) -> Optional[BlockRecord]:
     try:
         response = await self.fetch("get_block_record_by_height",
                                     {"height": height})
     except Exception:
         return None
     return BlockRecord.from_json_dict(response["block_record"])
 async def get_block_record(self, header_hash) -> Optional[BlockRecord]:
     try:
         response = await self.fetch("get_block_record", {"header_hash": header_hash.hex()})
         if response["block_record"] is None:
             return None
     except Exception:
         return None
     return BlockRecord.from_json_dict(response["block_record"])
Esempio n. 7
0
 async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]:
     cursor = await self.db.execute(
         "SELECT block from block_records WHERE header_hash=?",
         (header_hash.hex(),),
     )
     row = await cursor.fetchone()
     await cursor.close()
     if row is not None:
         return BlockRecord.from_bytes(row[0])
     return None
Esempio n. 8
0
 async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]:
     if len(heights) == 0:
         return []
     heights_db = tuple(heights)
     formatted_str = (
         f'SELECT block from block_records WHERE height in ({"?," * (len(heights_db) - 1)}?) ORDER BY height ASC;'
     )
     cursor = await self.db.execute(formatted_str, heights_db)
     rows = await cursor.fetchall()
     await cursor.close()
     return [BlockRecord.from_bytes(row[0]) for row in rows]
Esempio n. 9
0
    async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]:

        if self.db_wrapper.db_version == 2:

            async with self.db.execute(
                "SELECT block_record FROM full_blocks WHERE header_hash=?",
                (header_hash,),
            ) as cursor:
                row = await cursor.fetchone()
            if row is not None:
                return BlockRecord.from_bytes(row[0])

        else:
            async with self.db.execute(
                "SELECT block from block_records WHERE header_hash=?",
                (header_hash.hex(),),
            ) as cursor:
                row = await cursor.fetchone()
            if row is not None:
                return BlockRecord.from_bytes(row[0])
        return None
def batch_pre_validate_blocks(
    constants_dict: Dict,
    blocks_pickled: Dict[bytes, bytes],
    header_blocks_pickled: List[bytes],
    transaction_generators: List[Optional[bytes]],
    check_filter: bool,
    expected_difficulty: List[uint64],
    expected_sub_slot_iters: List[uint64],
    validate_transactions: bool,
) -> List[bytes]:
    assert len(header_blocks_pickled) == len(transaction_generators)
    blocks = {}
    for k, v in blocks_pickled.items():
        blocks[k] = BlockRecord.from_bytes(v)
    results: List[PreValidationResult] = []
    constants: ConsensusConstants = dataclass_from_dict(
        ConsensusConstants, constants_dict)
    for i in range(len(header_blocks_pickled)):
        try:
            header_block: HeaderBlock = HeaderBlock.from_bytes(
                header_blocks_pickled[i])
            generator: Optional[bytes] = transaction_generators[i]
            required_iters, error = validate_finished_header_block(
                constants,
                BlockCache(blocks),
                header_block,
                check_filter,
                expected_difficulty[i],
                expected_sub_slot_iters[i],
            )
            cost_result: Optional[CostResult] = None
            error_int: Optional[uint16] = None
            if error is not None:
                error_int = uint16(error.code.value)
            if constants_dict["NETWORK_TYPE"] == NetworkType.MAINNET.value:
                cost_result = None
            else:
                if not error and generator is not None and validate_transactions:
                    cost_result = calculate_cost_of_program(
                        SerializedProgram.from_bytes(generator),
                        constants.CLVM_COST_RATIO_CONSTANT)
            results.append(
                PreValidationResult(error_int, required_iters, cost_result))
        except Exception:
            error_stack = traceback.format_exc()
            log.error(f"Exception: {error_stack}")
            results.append(
                PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    return [bytes(r) for r in results]
Esempio n. 11
0
 async def get_block_records(
     self, ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]:
     """
     Returns a dictionary with all blocks, as well as the header hash of the peak,
     if present.
     """
     cursor = await self.db.execute("SELECT * from block_records")
     rows = await cursor.fetchall()
     await cursor.close()
     ret: Dict[bytes32, BlockRecord] = {}
     peak: Optional[bytes32] = None
     for row in rows:
         header_hash = bytes.fromhex(row[0])
         ret[header_hash] = BlockRecord.from_bytes(row[3])
         if row[5]:
             assert peak is None  # Sanity check, only one peak
             peak = header_hash
     return ret, peak
Esempio n. 12
0
    async def get_block_records_in_range(
        self,
        start: int,
        stop: int,
    ) -> Dict[bytes32, BlockRecord]:
        """
        Returns a dictionary with all blocks in range between start and stop
        if present.
        """

        formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}"

        cursor = await self.db.execute(formatted_str)
        rows = await cursor.fetchall()
        await cursor.close()
        ret: Dict[bytes32, BlockRecord] = {}
        for row in rows:
            header_hash = bytes.fromhex(row[0])
            ret[header_hash] = BlockRecord.from_bytes(row[1])

        return ret
Esempio n. 13
0
    async def get_block_records_in_range(
        self,
        start: int,
        stop: int,
    ) -> Dict[bytes32, BlockRecord]:
        """
        Returns a dictionary with all blocks, as well as the header hash of the peak,
        if present.
        """

        formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}"

        cursor = await self.db.execute(formatted_str)
        rows = await cursor.fetchall()
        await cursor.close()
        ret: Dict[bytes32, BlockRecord] = {}
        for row in rows:
            header_hash_bytes, block_record_bytes = row
            header_hash = bytes.fromhex(header_hash_bytes)
            ret[header_hash] = BlockRecord.from_bytes(block_record_bytes)

        return ret
Esempio n. 14
0
    async def get_block_records_by_hash(self, header_hashes: List[bytes32]):
        """
        Returns a list of Block Records, ordered by the same order in which header_hashes are passed in.
        Throws an exception if the blocks are not present
        """
        if len(header_hashes) == 0:
            return []

        header_hashes_db = tuple([hh.hex() for hh in header_hashes])
        formatted_str = f'SELECT block from block_records WHERE header_hash in ({"?," * (len(header_hashes_db) - 1)}?)'
        cursor = await self.db.execute(formatted_str, header_hashes_db)
        rows = await cursor.fetchall()
        await cursor.close()
        all_blocks: Dict[bytes32, BlockRecord] = {}
        for row in rows:
            block_rec: BlockRecord = BlockRecord.from_bytes(row[0])
            all_blocks[block_rec.header_hash] = block_rec
        ret: List[BlockRecord] = []
        for hh in header_hashes:
            if hh not in all_blocks:
                raise ValueError(f"Header hash {hh} not in the blockchain")
            ret.append(all_blocks[hh])
        return ret
Esempio n. 15
0
    async def get_block_records_close_to_peak(
        self, blocks_n: int
    ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]:
        """
        Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the
        peak header hash.
        """

        res = await self.db.execute("SELECT * from block_records WHERE is_peak = 1")
        peak_row = await res.fetchone()
        await res.close()
        if peak_row is None:
            return {}, None

        formatted_str = f"SELECT header_hash, block  from block_records WHERE height >= {peak_row[2] - blocks_n}"
        cursor = await self.db.execute(formatted_str)
        rows = await cursor.fetchall()
        await cursor.close()
        ret: Dict[bytes32, BlockRecord] = {}
        for row in rows:
            header_hash = bytes.fromhex(row[0])
            ret[header_hash] = BlockRecord.from_bytes(row[1])
        return ret, bytes.fromhex(peak_row[0])
Esempio n. 16
0
async def run_add_block_benchmark(version: int):

    verbose: bool = "--verbose" in sys.argv
    db_wrapper: DBWrapper = await setup_db("block-store-benchmark.db", version)

    # keep track of benchmark total time
    all_test_time = 0.0

    prev_block = bytes32([0] * 32)
    prev_ses_hash = bytes32([0] * 32)

    header_hashes = []

    try:
        block_store = await BlockStore.create(db_wrapper)

        block_height = 1
        timestamp = uint64(1631794488)
        weight = uint128(10)
        iters = uint128(123456)
        sp_index = uint8(0)
        deficit = uint8(0)
        sub_slot_iters = uint64(10)
        required_iters = uint64(100)
        transaction_block_counter = 0
        prev_transaction_block = bytes32([0] * 32)
        prev_transaction_height = uint32(0)
        total_time = 0.0
        ses_counter = 0

        if verbose:
            print("profiling add_full_block", end="")

        for height in range(block_height, block_height + NUM_ITERS):

            is_transaction = transaction_block_counter == 0
            fees = uint64(random.randint(0, 150000))
            farmer_coin, pool_coin = rewards(uint32(height))
            reward_claims_incorporated = [farmer_coin, pool_coin]

            # TODO: increase fidelity by setting these as well
            finished_challenge_slot_hashes = None
            finished_infused_challenge_slot_hashes = None
            finished_reward_slot_hashes = None

            sub_epoch_summary_included = None
            if ses_counter == 0:
                sub_epoch_summary_included = SubEpochSummary(
                    prev_ses_hash,
                    rand_hash(),
                    uint8(random.randint(0,
                                         255)),  # num_blocks_overflow: uint8
                    None,  # new_difficulty: Optional[uint64]
                    None,  # new_sub_slot_iters: Optional[uint64]
                )

            has_pool_pk = random.randint(0, 1)

            proof_of_space = ProofOfSpace(
                rand_hash(),  # challenge
                rand_g1() if has_pool_pk else None,
                rand_hash() if not has_pool_pk else None,
                rand_g1(),  # plot_public_key
                uint8(32),
                rand_bytes(8 * 32),
            )

            reward_chain_block = RewardChainBlock(
                weight,
                uint32(height),
                iters,
                sp_index,
                rand_hash(),  # pos_ss_cc_challenge_hash
                proof_of_space,
                None if sp_index == 0 else rand_vdf(),
                rand_g2(),  # challenge_chain_sp_signature
                rand_vdf(),  # challenge_chain_ip_vdf
                rand_vdf() if sp_index != 0 else None,  # reward_chain_sp_vdf
                rand_g2(),  # reward_chain_sp_signature
                rand_vdf(),  # reward_chain_ip_vdf
                rand_vdf() if deficit < 16 else None,
                is_transaction,
            )

            pool_target = PoolTarget(
                rand_hash(),  # puzzle_hash
                uint32(0),  # max_height
            )

            foliage_block_data = FoliageBlockData(
                rand_hash(),  # unfinished_reward_block_hash
                pool_target,
                rand_g2() if has_pool_pk else None,  # pool_signature
                rand_hash(),  # farmer_reward_puzzle_hash
                bytes32([0] * 32),  # extension_data
            )

            foliage = Foliage(
                prev_block,
                rand_hash(),  # reward_block_hash
                foliage_block_data,
                rand_g2(),  # foliage_block_data_signature
                rand_hash()
                if is_transaction else None,  # foliage_transaction_block_hash
                rand_g2() if is_transaction else
                None,  # foliage_transaction_block_signature
            )

            foliage_transaction_block = (
                None if not is_transaction else FoliageTransactionBlock(
                    prev_transaction_block,
                    timestamp,
                    rand_hash(),  # filter_hash
                    rand_hash(),  # additions_root
                    rand_hash(),  # removals_root
                    rand_hash(),  # transactions_info_hash
                ))

            transactions_info = (
                None if not is_transaction else TransactionsInfo(
                    rand_hash(),  # generator_root
                    rand_hash(),  # generator_refs_root
                    rand_g2(),  # aggregated_signature
                    fees,
                    uint64(random.randint(0, 12000000000)),  # cost
                    reward_claims_incorporated,
                ))

            full_block = FullBlock(
                [],  # finished_sub_slots
                reward_chain_block,
                rand_vdf_proof()
                if sp_index > 0 else None,  # challenge_chain_sp_proof
                rand_vdf_proof(),  # challenge_chain_ip_proof
                rand_vdf_proof()
                if sp_index > 0 else None,  # reward_chain_sp_proof
                rand_vdf_proof(),  # reward_chain_ip_proof
                rand_vdf_proof()
                if deficit < 4 else None,  # infused_challenge_chain_ip_proof
                foliage,
                foliage_transaction_block,
                transactions_info,
                None if is_transaction else SerializedProgram.from_bytes(
                    clvm_generator),  # transactions_generator
                [],  # transactions_generator_ref_list
            )

            header_hash = full_block.header_hash

            record = BlockRecord(
                header_hash,
                prev_block,
                uint32(height),
                weight,
                iters,
                sp_index,
                rand_class_group_element(),
                None if deficit > 3 else rand_class_group_element(),
                rand_hash(),  # reward_infusion_new_challenge
                rand_hash(),  # challenge_block_info_hash
                sub_slot_iters,
                rand_hash(),  # pool_puzzle_hash
                rand_hash(),  # farmer_puzzle_hash
                required_iters,
                deficit,
                deficit == 16,
                prev_transaction_height,
                timestamp if is_transaction else None,
                prev_transaction_block
                if prev_transaction_block != bytes32([0] * 32) else None,
                None if fees == 0 else fees,
                reward_claims_incorporated,
                finished_challenge_slot_hashes,
                finished_infused_challenge_slot_hashes,
                finished_reward_slot_hashes,
                sub_epoch_summary_included,
            )

            start = time()
            await block_store.add_full_block(header_hash, full_block, record,
                                             False)
            await block_store.set_in_chain([(header_hash, )])
            header_hashes.append(header_hash)
            await block_store.set_peak(header_hash)
            await db_wrapper.db.commit()

            stop = time()
            total_time += stop - start

            # 19 seconds per block
            timestamp = uint64(timestamp + 19)
            weight = uint128(weight + 10)
            iters = uint128(iters + 123456)
            sp_index = uint8((sp_index + 1) % 64)
            deficit = uint8((deficit + 3) % 17)
            ses_counter = (ses_counter + 1) % 384
            prev_block = header_hash

            # every 33 blocks is a transaction block
            transaction_block_counter = (transaction_block_counter + 1) % 33

            if is_transaction:
                prev_transaction_block = header_hash
                prev_transaction_height = uint32(height)

            if ses_counter == 0:
                prev_ses_hash = header_hash

            if verbose:
                print(".", end="")
                sys.stdout.flush()
        block_height += NUM_ITERS

        if verbose:
            print("")
        print(f"{total_time:0.4f}s, add_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block")

        random.shuffle(header_hashes)
        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block(h)
            assert block.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block_bytes")

        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block_bytes(h)
            assert len(block) > 0

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block_bytes")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_blocks_at")

        start = time()
        for h in range(1, block_height):
            blocks = await block_store.get_full_blocks_at([h])
            assert len(blocks) == 1
            assert blocks[0].height == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_blocks_at")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_records_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_blocks_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_blocks_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_blocks_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_record")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_record(h)
            assert blocks.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_in_range")

        start = time()
        for i in range(100):
            h = random.randint(1, block_height - 100)
            blocks = await block_store.get_block_records_in_range(h, h + 99)
            assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_in_range")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_close_to_peak")

        start = time()
        blocks, peak = await block_store.get_block_records_close_to_peak(99)
        assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_close_to_peak")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling is_fully_compactified")

        start = time()
        for h in header_hashes:
            compactified = await block_store.is_fully_compactified(h)
            assert compactified is False

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_random_not_compactified")

        start = time()
        for i in range(1, 5000):
            blocks = await block_store.get_random_not_compactified(100)
            assert len(blocks) == 100
        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_random_not_compactified")
        all_test_time += total_time

        print(f"all tests completed in {all_test_time:0.4f}s")

        db_size = os.path.getsize(Path("block-store-benchmark.db"))
        print(f"database size: {db_size/1000000:.3f} MB")

    finally:
        await db_wrapper.db.close()
Esempio n. 17
0
def header_block_to_sub_block_record(
    constants: ConsensusConstants,
    required_iters: uint64,
    block: Union[FullBlock, HeaderBlock],
    sub_slot_iters: uint64,
    overflow: bool,
    deficit: uint8,
    prev_transaction_block_height: uint32,
    ses: Optional[SubEpochSummary],
):

    reward_claims_incorporated = (
        block.transactions_info.reward_claims_incorporated if block.transactions_info is not None else None
    )

    cbi = ChallengeBlockInfo(
        block.reward_chain_block.proof_of_space,
        block.reward_chain_block.challenge_chain_sp_vdf,
        block.reward_chain_block.challenge_chain_sp_signature,
        block.reward_chain_block.challenge_chain_ip_vdf,
    )

    if block.reward_chain_block.infused_challenge_chain_ip_vdf is not None:
        icc_output: Optional[ClassgroupElement] = block.reward_chain_block.infused_challenge_chain_ip_vdf.output
    else:
        icc_output = None

    if len(block.finished_sub_slots) > 0:
        finished_challenge_slot_hashes: Optional[List[bytes32]] = [
            sub_slot.challenge_chain.get_hash() for sub_slot in block.finished_sub_slots
        ]
        finished_reward_slot_hashes: Optional[List[bytes32]] = [
            sub_slot.reward_chain.get_hash() for sub_slot in block.finished_sub_slots
        ]
        finished_infused_challenge_slot_hashes: Optional[List[bytes32]] = [
            sub_slot.infused_challenge_chain.get_hash()
            for sub_slot in block.finished_sub_slots
            if sub_slot.infused_challenge_chain is not None
        ]
    elif block.height == 0:
        finished_challenge_slot_hashes = [constants.GENESIS_CHALLENGE]
        finished_reward_slot_hashes = [constants.GENESIS_CHALLENGE]
        finished_infused_challenge_slot_hashes = None
    else:
        finished_challenge_slot_hashes = None
        finished_reward_slot_hashes = None
        finished_infused_challenge_slot_hashes = None
    prev_transaction_block_hash = (
        block.foliage_transaction_block.prev_transaction_block_hash
        if block.foliage_transaction_block is not None
        else None
    )
    timestamp = block.foliage_transaction_block.timestamp if block.foliage_transaction_block is not None else None
    fees = block.transactions_info.fees if block.transactions_info is not None else None

    return BlockRecord(
        block.header_hash,
        block.prev_header_hash,
        block.height,
        block.weight,
        block.total_iters,
        block.reward_chain_block.signage_point_index,
        block.reward_chain_block.challenge_chain_ip_vdf.output,
        icc_output,
        block.reward_chain_block.get_hash(),
        cbi.get_hash(),
        sub_slot_iters,
        block.foliage.foliage_block_data.pool_target.puzzle_hash,
        block.foliage.foliage_block_data.farmer_reward_puzzle_hash,
        required_iters,
        deficit,
        overflow,
        prev_transaction_block_height,
        timestamp,
        prev_transaction_block_hash,
        fees,
        reward_claims_incorporated,
        finished_challenge_slot_hashes,
        finished_infused_challenge_slot_hashes,
        finished_reward_slot_hashes,
        ses,
    )
Esempio n. 18
0
 async def get_blockchain_state(self) -> Dict:
     response = await self.fetch("get_blockchain_state", {})
     if response["blockchain_state"]["peak"] is not None:
         response["blockchain_state"]["peak"] = BlockRecord.from_json_dict(
             response["blockchain_state"]["peak"])
     return response["blockchain_state"]
Esempio n. 19
0
    def new_peak(
        self,
        peak: BlockRecord,
        peak_full_block: FullBlock,
        sp_sub_slot: Optional[
            EndOfSubSlotBundle],  # None if not overflow, or in first/second slot
        ip_sub_slot: Optional[EndOfSubSlotBundle],  # None if in first slot
        reorg: bool,
        blocks: BlockchainInterface,
    ) -> Tuple[Optional[EndOfSubSlotBundle], List[Tuple[uint8, SignagePoint]],
               List[timelord_protocol.NewInfusionPointVDF]]:
        """
        If the peak is an overflow block, must provide two sub-slots: one for the current sub-slot and one for
        the prev sub-slot (since we still might get more blocks with an sp in the previous sub-slot)

        Results in either one or two sub-slots in finished_sub_slots.
        """
        assert len(self.finished_sub_slots) >= 1

        if ip_sub_slot is None:
            # We are still in the first sub-slot, no new sub slots ey
            self.initialize_genesis_sub_slot()
        else:
            # This is not the first sub-slot in the chain
            sp_sub_slot_sps: List[Optional[SignagePoint]] = [
                None
            ] * self.constants.NUM_SPS_SUB_SLOT
            ip_sub_slot_sps: List[Optional[SignagePoint]] = [
                None
            ] * self.constants.NUM_SPS_SUB_SLOT
            if not reorg:
                # If it's not a reorg, we can keep signage points that we had before, in the cache
                for index, (sub_slot, sps,
                            total_iters) in enumerate(self.finished_sub_slots):
                    if sub_slot is None:
                        continue

                    if sub_slot == sp_sub_slot:
                        sp_sub_slot_sps = sps
                    if sub_slot == ip_sub_slot:
                        ip_sub_slot_sps = sps

            self.clear_slots()

            prev_sub_slot_total_iters = peak.sp_sub_slot_total_iters(
                self.constants)
            if sp_sub_slot is not None or prev_sub_slot_total_iters == 0:
                assert peak.overflow or prev_sub_slot_total_iters
                self.finished_sub_slots.append(
                    (sp_sub_slot, sp_sub_slot_sps, prev_sub_slot_total_iters))

            ip_sub_slot_total_iters = peak.ip_sub_slot_total_iters(
                self.constants)
            self.finished_sub_slots.append(
                (ip_sub_slot, ip_sub_slot_sps, ip_sub_slot_total_iters))

        new_eos: Optional[EndOfSubSlotBundle] = None
        new_sps: List[Tuple[uint8, SignagePoint]] = []
        new_ips: List[timelord_protocol.NewInfusionPointVDF] = []

        future_eos: List[EndOfSubSlotBundle] = self.future_eos_cache.get(
            peak.reward_infusion_new_challenge, []).copy()
        for eos in future_eos:
            if self.new_finished_sub_slot(eos, blocks, peak,
                                          peak_full_block) is not None:
                new_eos = eos
                break

        future_sps: List[Tuple[uint8,
                               SignagePoint]] = self.future_sp_cache.get(
                                   peak.reward_infusion_new_challenge,
                                   []).copy()
        for index, sp in future_sps:
            assert sp.cc_vdf is not None
            if self.new_signage_point(index, blocks, peak, peak.sub_slot_iters,
                                      sp):
                new_sps.append((index, sp))

        for ip in self.future_ip_cache.get(peak.reward_infusion_new_challenge,
                                           []):
            new_ips.append(ip)

        self.future_eos_cache.pop(peak.reward_infusion_new_challenge, [])
        self.future_sp_cache.pop(peak.reward_infusion_new_challenge, [])
        self.future_ip_cache.pop(peak.reward_infusion_new_challenge, [])

        return new_eos, new_sps, new_ips
    async def _validate_weight_proof_inner(
        self, weight_proof: WeightProof, skip_segment_validation: bool
    ) -> Tuple[bool, uint32, List[SubEpochSummary], List[BlockRecord]]:
        assert len(weight_proof.sub_epochs) > 0
        if len(weight_proof.sub_epochs) == 0:
            return False, uint32(0), [], []

        peak_height = weight_proof.recent_chain_data[
            -1].reward_chain_block.height
        log.info(f"validate weight proof peak height {peak_height}")

        summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(
            self._constants, weight_proof)
        if summaries is None:
            log.error("weight proof failed sub epoch data validation")
            return False, uint32(0), [], []

        seed = summaries[-2].get_hash()
        rng = random.Random(seed)
        if not validate_sub_epoch_sampling(rng, sub_epoch_weight_list,
                                           weight_proof):
            log.error("failed weight proof sub epoch sample validation")
            return False, uint32(0), [], []

        constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
            self._constants, summaries, weight_proof)

        vdf_tasks: List[asyncio.Future] = []
        recent_blocks_validation_task: asyncio.Future = asyncio.get_running_loop(
        ).run_in_executor(
            self._executor,
            _validate_recent_blocks_and_get_records,
            constants,
            wp_recent_chain_bytes,
            summary_bytes,
            pathlib.Path(self._executor_shutdown_tempfile.name),
        )
        try:
            if not skip_segment_validation:
                segments_validated, vdfs_to_validate = _validate_sub_epoch_segments(
                    constants, rng, wp_segment_bytes, summary_bytes)

                if not segments_validated:
                    return False, uint32(0), [], []

                vdf_chunks = chunks(vdfs_to_validate, self._num_processes)
                for chunk in vdf_chunks:
                    byte_chunks = []
                    for vdf_proof, classgroup, vdf_info in chunk:
                        byte_chunks.append(
                            (bytes(vdf_proof), bytes(classgroup),
                             bytes(vdf_info)))

                    vdf_task: asyncio.Future = asyncio.get_running_loop(
                    ).run_in_executor(
                        self._executor,
                        _validate_vdf_batch,
                        constants,
                        byte_chunks,
                        pathlib.Path(self._executor_shutdown_tempfile.name),
                    )
                    vdf_tasks.append(vdf_task)

                for vdf_task in vdf_tasks:
                    validated = await vdf_task
                    if not validated:
                        return False, uint32(0), [], []

            valid_recent_blocks, records_bytes = await recent_blocks_validation_task
        finally:
            recent_blocks_validation_task.cancel()
            for vdf_task in vdf_tasks:
                vdf_task.cancel()

        if not valid_recent_blocks:
            log.error("failed validating weight proof recent blocks")
            # Verify the data
            return False, uint32(0), [], []

        records = [BlockRecord.from_bytes(b) for b in records_bytes]

        # TODO fix find fork point
        return True, uint32(0), summaries, records
Esempio n. 21
0
    def new_peak(
        self,
        peak: BlockRecord,
        peak_full_block: FullBlock,
        sp_sub_slot: Optional[
            EndOfSubSlotBundle],  # None if not overflow, or in first/second slot
        ip_sub_slot: Optional[EndOfSubSlotBundle],  # None if in first slot
        fork_block: Optional[BlockRecord],
        blocks: BlockchainInterface,
    ) -> Tuple[Optional[EndOfSubSlotBundle], List[Tuple[uint8, SignagePoint]],
               List[timelord_protocol.NewInfusionPointVDF]]:
        """
        If the peak is an overflow block, must provide two sub-slots: one for the current sub-slot and one for
        the prev sub-slot (since we still might get more blocks with an sp in the previous sub-slot)

        Results in either one or two sub-slots in finished_sub_slots.
        """
        assert len(self.finished_sub_slots) >= 1

        if ip_sub_slot is None:
            # We are still in the first sub-slot, no new sub slots ey
            self.initialize_genesis_sub_slot()
        else:
            # This is not the first sub-slot in the chain
            sp_sub_slot_sps: List[Optional[SignagePoint]] = [
                None
            ] * self.constants.NUM_SPS_SUB_SLOT
            ip_sub_slot_sps: List[Optional[SignagePoint]] = [
                None
            ] * self.constants.NUM_SPS_SUB_SLOT

            if fork_block is not None and fork_block.sub_slot_iters != peak.sub_slot_iters:
                # If there was a reorg and a difficulty adjustment, just clear all the slots
                self.clear_slots()
            else:
                interval_iters = calculate_sp_interval_iters(
                    self.constants, peak.sub_slot_iters)
                # If it's not a reorg, or there is a reorg on the same difficulty, we can keep signage points
                # that we had before, in the cache
                for index, (sub_slot, sps,
                            total_iters) in enumerate(self.finished_sub_slots):
                    if sub_slot is None:
                        continue

                    if fork_block is None:
                        # If this is not a reorg, we still want to remove signage points after the new peak
                        fork_block = peak
                    replaced_sps: List[Optional[SignagePoint]] = [
                    ]  # index 0 is the end of sub slot
                    for i, sp in enumerate(sps):
                        if (total_iters +
                                i * interval_iters) < fork_block.total_iters:
                            # Sps before the fork point as still valid
                            replaced_sps.append(sp)
                        else:
                            if sp is not None:
                                log.debug(
                                    f"Reverting {i} {(total_iters + i * interval_iters)} {fork_block.total_iters}"
                                )
                            # Sps after the fork point should be removed
                            replaced_sps.append(None)
                    assert len(sps) == len(replaced_sps)

                    if sub_slot == sp_sub_slot:
                        sp_sub_slot_sps = replaced_sps
                    if sub_slot == ip_sub_slot:
                        ip_sub_slot_sps = replaced_sps

            self.clear_slots()

            prev_sub_slot_total_iters = peak.sp_sub_slot_total_iters(
                self.constants)
            if sp_sub_slot is not None or prev_sub_slot_total_iters == 0:
                assert peak.overflow or prev_sub_slot_total_iters
                self.finished_sub_slots.append(
                    (sp_sub_slot, sp_sub_slot_sps, prev_sub_slot_total_iters))

            ip_sub_slot_total_iters = peak.ip_sub_slot_total_iters(
                self.constants)
            self.finished_sub_slots.append(
                (ip_sub_slot, ip_sub_slot_sps, ip_sub_slot_total_iters))

        new_eos: Optional[EndOfSubSlotBundle] = None
        new_sps: List[Tuple[uint8, SignagePoint]] = []
        new_ips: List[timelord_protocol.NewInfusionPointVDF] = []

        future_eos: List[EndOfSubSlotBundle] = self.future_eos_cache.get(
            peak.reward_infusion_new_challenge, []).copy()
        for eos in future_eos:
            if self.new_finished_sub_slot(eos, blocks, peak,
                                          peak_full_block) is not None:
                new_eos = eos
                break

        future_sps: List[Tuple[uint8,
                               SignagePoint]] = self.future_sp_cache.get(
                                   peak.reward_infusion_new_challenge,
                                   []).copy()
        for index, sp in future_sps:
            assert sp.cc_vdf is not None
            if self.new_signage_point(index, blocks, peak, peak.sub_slot_iters,
                                      sp):
                new_sps.append((index, sp))

        for ip in self.future_ip_cache.get(peak.reward_infusion_new_challenge,
                                           []):
            new_ips.append(ip)

        self.future_eos_cache.pop(peak.reward_infusion_new_challenge, [])
        self.future_sp_cache.pop(peak.reward_infusion_new_challenge, [])
        self.future_ip_cache.pop(peak.reward_infusion_new_challenge, [])

        for eos_op, _, _ in self.finished_sub_slots:
            if eos_op is not None:
                self.recent_eos.put(eos_op.challenge_chain.get_hash(),
                                    (eos_op, time.time()))

        return new_eos, new_sps, new_ips
def batch_pre_validate_blocks(
    constants_dict: Dict,
    blocks_pickled: Dict[bytes, bytes],
    full_blocks_pickled: Optional[List[bytes]],
    header_blocks_pickled: Optional[List[bytes]],
    prev_transaction_generators: List[Optional[bytes]],
    npc_results: Dict[uint32, bytes],
    check_filter: bool,
    expected_difficulty: List[uint64],
    expected_sub_slot_iters: List[uint64],
) -> List[bytes]:
    blocks = {}
    for k, v in blocks_pickled.items():
        blocks[k] = BlockRecord.from_bytes(v)
    results: List[PreValidationResult] = []
    constants: ConsensusConstants = dataclass_from_dict(
        ConsensusConstants, constants_dict)
    if full_blocks_pickled is not None and header_blocks_pickled is not None:
        assert ValueError("Only one should be passed here")
    if full_blocks_pickled is not None:
        for i in range(len(full_blocks_pickled)):
            try:
                block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i])
                tx_additions: List[Coin] = []
                removals: List[bytes32] = []
                npc_result: Optional[NPCResult] = None
                if block.height in npc_results:
                    npc_result = NPCResult.from_bytes(
                        npc_results[block.height])
                    assert npc_result is not None
                    if npc_result.npc_list is not None:
                        removals, tx_additions = tx_removals_and_additions(
                            npc_result.npc_list)
                    else:
                        removals, tx_additions = [], []

                if block.transactions_generator is not None and npc_result is None:
                    prev_generator_bytes = prev_transaction_generators[i]
                    assert prev_generator_bytes is not None
                    assert block.transactions_info is not None
                    block_generator: BlockGenerator = BlockGenerator.from_bytes(
                        prev_generator_bytes)
                    assert block_generator.program == block.transactions_generator
                    npc_result = get_name_puzzle_conditions(
                        block_generator,
                        min(constants.MAX_BLOCK_COST_CLVM,
                            block.transactions_info.cost), True)
                    removals, tx_additions = tx_removals_and_additions(
                        npc_result.npc_list)

                header_block = get_block_header(block, tx_additions, removals)
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int: Optional[uint16] = None
                if error is not None:
                    error_int = uint16(error.code.value)

                results.append(
                    PreValidationResult(error_int, required_iters, npc_result))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    elif header_blocks_pickled is not None:
        for i in range(len(header_blocks_pickled)):
            try:
                header_block = HeaderBlock.from_bytes(header_blocks_pickled[i])
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int = None
                if error is not None:
                    error_int = uint16(error.code.value)
                results.append(
                    PreValidationResult(error_int, required_iters, None))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    return [bytes(r) for r in results]
def batch_pre_validate_blocks(
    constants_dict: Dict,
    blocks_pickled: Dict[bytes, bytes],
    full_blocks_pickled: Optional[List[bytes]],
    header_blocks_pickled: Optional[List[bytes]],
    prev_transaction_generators: List[Optional[bytes]],
    npc_results: Dict[uint32, bytes],
    check_filter: bool,
    expected_difficulty: List[uint64],
    expected_sub_slot_iters: List[uint64],
    validate_signatures: bool,
) -> List[bytes]:
    blocks: Dict[bytes, BlockRecord] = {}
    for k, v in blocks_pickled.items():
        blocks[k] = BlockRecord.from_bytes(v)
    results: List[PreValidationResult] = []
    constants: ConsensusConstants = dataclass_from_dict(
        ConsensusConstants, constants_dict)
    if full_blocks_pickled is not None and header_blocks_pickled is not None:
        assert ValueError("Only one should be passed here")

    # In this case, we are validating full blocks, not headers
    if full_blocks_pickled is not None:
        for i in range(len(full_blocks_pickled)):
            try:
                block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i])
                tx_additions: List[Coin] = []
                removals: List[bytes32] = []
                npc_result: Optional[NPCResult] = None
                if block.height in npc_results:
                    npc_result = NPCResult.from_bytes(
                        npc_results[block.height])
                    assert npc_result is not None
                    if npc_result.npc_list is not None:
                        removals, tx_additions = tx_removals_and_additions(
                            npc_result.npc_list)
                    else:
                        removals, tx_additions = [], []

                if block.transactions_generator is not None and npc_result is None:
                    prev_generator_bytes = prev_transaction_generators[i]
                    assert prev_generator_bytes is not None
                    assert block.transactions_info is not None
                    block_generator: BlockGenerator = BlockGenerator.from_bytes(
                        prev_generator_bytes)
                    assert block_generator.program == block.transactions_generator
                    npc_result = get_name_puzzle_conditions(
                        block_generator,
                        min(constants.MAX_BLOCK_COST_CLVM,
                            block.transactions_info.cost),
                        cost_per_byte=constants.COST_PER_BYTE,
                        mempool_mode=False,
                        height=block.height,
                    )
                    removals, tx_additions = tx_removals_and_additions(
                        npc_result.npc_list)
                if npc_result is not None and npc_result.error is not None:
                    results.append(
                        PreValidationResult(uint16(npc_result.error), None,
                                            npc_result, False))
                    continue

                header_block = get_block_header(block, tx_additions, removals)
                # TODO: address hint error and remove ignore
                #       error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected
                #       "Dict[bytes32, BlockRecord]"  [arg-type]
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),  # type: ignore[arg-type]
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int: Optional[uint16] = None
                if error is not None:
                    error_int = uint16(error.code.value)

                successfully_validated_signatures = False
                # If we failed CLVM, no need to validate signature, the block is already invalid
                if error_int is None:

                    # If this is False, it means either we don't have a signature (not a tx block) or we have an invalid
                    # signature (which also puts in an error) or we didn't validate the signature because we want to
                    # validate it later. receive_block will attempt to validate the signature later.
                    if validate_signatures:
                        if npc_result is not None and block.transactions_info is not None:
                            pairs_pks, pairs_msgs = pkm_pairs(
                                npc_result.npc_list,
                                constants.AGG_SIG_ME_ADDITIONAL_DATA)
                            pks_objects: List[G1Element] = [
                                G1Element.from_bytes(pk) for pk in pairs_pks
                            ]
                            if not AugSchemeMPL.aggregate_verify(
                                    pks_objects, pairs_msgs, block.
                                    transactions_info.aggregated_signature):
                                error_int = uint16(
                                    Err.BAD_AGGREGATE_SIGNATURE.value)
                            else:
                                successfully_validated_signatures = True

                results.append(
                    PreValidationResult(error_int, required_iters, npc_result,
                                        successfully_validated_signatures))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None,
                                        False))
    # In this case, we are validating header blocks
    elif header_blocks_pickled is not None:
        for i in range(len(header_blocks_pickled)):
            try:
                header_block = HeaderBlock.from_bytes(header_blocks_pickled[i])
                # TODO: address hint error and remove ignore
                #       error: Argument 1 to "BlockCache" has incompatible type "Dict[bytes, BlockRecord]"; expected
                #       "Dict[bytes32, BlockRecord]"  [arg-type]
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),  # type: ignore[arg-type]
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int = None
                if error is not None:
                    error_int = uint16(error.code.value)
                results.append(
                    PreValidationResult(error_int, required_iters, None,
                                        False))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None,
                                        False))
    return [bytes(r) for r in results]