Ejemplo n.º 1
0
    async def add_full_block(self, block: FullBlock,
                             block_record: BlockRecord) -> None:
        cached = self.block_cache.get(block.header_hash)
        if cached is not None:
            # Since write to db can fail, we remove from cache here to avoid potential inconsistency
            # Adding to cache only from reading
            self.block_cache.put(block.header_hash, None)
        cursor_1 = await self.db.execute(
            "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?)",
            (
                block.header_hash.hex(),
                block.height,
                int(block.is_transaction_block()),
                int(block.is_fully_compactified()),
                bytes(block),
            ),
        )

        await cursor_1.close()

        cursor_2 = await self.db.execute(
            "INSERT OR REPLACE INTO block_records VALUES(?, ?, ?, ?,?, ?, ?)",
            (
                block.header_hash.hex(),
                block.prev_header_hash.hex(),
                block.height,
                bytes(block_record),
                None if block_record.sub_epoch_summary_included is None else
                bytes(block_record.sub_epoch_summary_included),
                False,
                block.is_transaction_block(),
            ),
        )
        await cursor_2.close()
Ejemplo n.º 2
0
def get_block_header(block: FullBlock, tx_addition_coins: List[Coin],
                     removals_names: List[bytes32]) -> HeaderBlock:
    # Create filter
    byte_array_tx: List[bytes32] = []
    addition_coins = tx_addition_coins + list(
        block.get_included_reward_coins())
    if block.is_transaction_block():
        for coin in addition_coins:
            byte_array_tx.append(bytearray(coin.puzzle_hash))
        for name in removals_names:
            byte_array_tx.append(bytearray(name))

    bip158: PyBIP158 = PyBIP158(byte_array_tx)
    encoded_filter: bytes = bytes(bip158.GetEncoded())

    return HeaderBlock(
        block.finished_sub_slots,
        block.reward_chain_block,
        block.challenge_chain_sp_proof,
        block.challenge_chain_ip_proof,
        block.reward_chain_sp_proof,
        block.reward_chain_ip_proof,
        block.infused_challenge_chain_ip_proof,
        block.foliage,
        block.foliage_transaction_block,
        encoded_filter,
        block.transactions_info,
    )
Ejemplo n.º 3
0
def run_and_get_removals_and_additions(
        block: FullBlock,
        max_cost: int,
        cost_per_byte: int,
        rust_checker: bool,
        safe_mode=False) -> Tuple[List[bytes32], List[Coin]]:
    removals: List[bytes32] = []
    additions: List[Coin] = []

    assert len(block.transactions_generator_ref_list) == 0
    if not block.is_transaction_block():
        return [], []

    if block.transactions_generator is not None:
        npc_result = get_name_puzzle_conditions(
            BlockGenerator(block.transactions_generator, []),
            max_cost,
            cost_per_byte=cost_per_byte,
            safe_mode=safe_mode,
            rust_checker=rust_checker,
        )
        # build removals list
        for npc in npc_result.npc_list:
            removals.append(npc.coin_name)
        additions.extend(additions_for_npc(npc_result.npc_list))

    rewards = block.get_included_reward_coins()
    additions.extend(rewards)
    return removals, additions
Ejemplo n.º 4
0
def get_block_header(block: FullBlock, tx_addition_coins: List[Coin],
                     removals_names: List[bytes32]) -> HeaderBlock:
    # Create filter
    byte_array_tx: List[bytes32] = []
    addition_coins = tx_addition_coins + list(
        block.get_included_reward_coins())
    if block.is_transaction_block():
        for coin in addition_coins:
            # TODO: address hint error and remove ignore
            #       error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32"
            #       [arg-type]
            byte_array_tx.append(bytearray(
                coin.puzzle_hash))  # type: ignore[arg-type]
        for name in removals_names:
            # TODO: address hint error and remove ignore
            #       error: Argument 1 to "append" of "list" has incompatible type "bytearray"; expected "bytes32"
            #       [arg-type]
            byte_array_tx.append(bytearray(name))  # type: ignore[arg-type]

    bip158: PyBIP158 = PyBIP158(byte_array_tx)
    encoded_filter: bytes = bytes(bip158.GetEncoded())

    return HeaderBlock(
        block.finished_sub_slots,
        block.reward_chain_block,
        block.challenge_chain_sp_proof,
        block.challenge_chain_ip_proof,
        block.reward_chain_sp_proof,
        block.reward_chain_ip_proof,
        block.infused_challenge_chain_ip_proof,
        block.foliage,
        block.foliage_transaction_block,
        encoded_filter,
        block.transactions_info,
    )
Ejemplo n.º 5
0
    async def add_full_block(self, block: FullBlock,
                             block_record: BlockRecord) -> None:
        self.block_cache.put(block.header_hash, block)
        cursor_1 = await self.db.execute(
            "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?)",
            (
                block.header_hash.hex(),
                block.height,
                int(block.is_transaction_block()),
                int(block.is_fully_compactified()),
                bytes(block),
            ),
        )

        await cursor_1.close()

        cursor_2 = await self.db.execute(
            "INSERT OR REPLACE INTO block_records VALUES(?, ?, ?, ?,?, ?, ?)",
            (
                block.header_hash.hex(),
                block.prev_header_hash.hex(),
                block.height,
                bytes(block_record),
                None if block_record.sub_epoch_summary_included is None else
                bytes(block_record.sub_epoch_summary_included),
                False,
                block.is_transaction_block(),
            ),
        )
        await cursor_2.close()
        await self.db.commit()
Ejemplo n.º 6
0
 async def get_removals_and_additions(self, block: FullBlock) -> Tuple[List[bytes32], List[Coin]]:
     if block.is_transaction_block():
         if block.transactions_generator is not None:
             block_generator: Optional[BlockGenerator] = await self.get_block_generator(block)
             assert block_generator is not None
             npc_result = get_name_puzzle_conditions(block_generator, self.constants.MAX_BLOCK_COST_CLVM, False)
             removals, additions = block_removals_and_additions(block, npc_result.npc_list)
             return removals, additions
         else:
             return [], list(block.get_included_reward_coins())
     else:
         return [], []
Ejemplo n.º 7
0
    async def add_full_block(
        self, header_hash: bytes32, block: FullBlock, block_record: BlockRecord, in_main_chain: bool
    ) -> None:
        self.block_cache.put(header_hash, block)

        if self.db_wrapper.db_version == 2:

            ses: Optional[bytes] = (
                None
                if block_record.sub_epoch_summary_included is None
                else bytes(block_record.sub_epoch_summary_included)
            )

            await self.db.execute(
                "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
                (
                    header_hash,
                    block.prev_header_hash,
                    block.height,
                    ses,
                    int(block.is_fully_compactified()),
                    in_main_chain,  # in_main_chain
                    self.compress(block),
                    bytes(block_record),
                ),
            )

        else:
            await self.db.execute(
                "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?)",
                (
                    header_hash.hex(),
                    block.height,
                    int(block.is_transaction_block()),
                    int(block.is_fully_compactified()),
                    bytes(block),
                ),
            )

            await self.db.execute(
                "INSERT OR REPLACE INTO block_records VALUES(?, ?, ?, ?,?, ?, ?)",
                (
                    header_hash.hex(),
                    block.prev_header_hash.hex(),
                    block.height,
                    bytes(block_record),
                    None
                    if block_record.sub_epoch_summary_included is None
                    else bytes(block_record.sub_epoch_summary_included),
                    False,
                    block.is_transaction_block(),
                ),
            )
Ejemplo n.º 8
0
 async def get_block(self, header_hash) -> Optional[FullBlock]:
     try:
         response = await self.fetch("get_block",
                                     {"header_hash": header_hash.hex()})
     except Exception:
         return None
     return FullBlock.from_json_dict(response["block"])
Ejemplo n.º 9
0
    async def get_blocks_by_hash(
            self, header_hashes: List[bytes32]) -> List[FullBlock]:
        """
        Returns a list of Full Blocks blocks, ordered by the same order in which header_hashes are passed in.
        Throws an exception if the blocks are not present
        """

        if len(header_hashes) == 0:
            return []

        header_hashes_db = tuple([hh.hex() for hh in header_hashes])
        formatted_str = f'SELECT block from full_blocks WHERE header_hash in ({"?," * (len(header_hashes_db) - 1)}?)'
        cursor = await self.db.execute(formatted_str, header_hashes_db)
        rows = await cursor.fetchall()
        await cursor.close()
        all_blocks: Dict[bytes32, FullBlock] = {}
        for row in rows:
            full_block: FullBlock = FullBlock.from_bytes(row[0])
            all_blocks[full_block.header_hash] = full_block
        ret: List[FullBlock] = []
        for hh in header_hashes:
            if hh not in all_blocks:
                raise ValueError(f"Header hash {hh} not in the blockchain")
            ret.append(all_blocks[hh])
        return ret
Ejemplo n.º 10
0
 async def get_tx_removals_and_additions(
     self,
     block: FullBlock,
     npc_result: Optional[NPCResult] = None
 ) -> Tuple[List[bytes32], List[Coin]]:
     if block.is_transaction_block():
         if block.transactions_generator is not None:
             if npc_result is None:
                 block_generator: Optional[
                     BlockGenerator] = await self.get_block_generator(block)
                 assert block_generator is not None
                 npc_result = get_name_puzzle_conditions(
                     block_generator,
                     self.constants.MAX_BLOCK_COST_CLVM,
                     cost_per_byte=self.constants.COST_PER_BYTE,
                     safe_mode=False,
                     rust_checker=block.height >
                     self.constants.RUST_CONDITION_CHECKER,
                 )
             tx_removals, tx_additions = tx_removals_and_additions(
                 npc_result.npc_list)
             return tx_removals, tx_additions
         else:
             return [], []
     else:
         return [], []
Ejemplo n.º 11
0
 async def get_all_block(self, start: uint32,
                         end: uint32) -> List[FullBlock]:
     response = await self.fetch("get_blocks", {
         "start": start,
         "end": end,
         "exclude_header_hash": True
     })
     return [FullBlock.from_json_dict(r) for r in response["blocks"]]
Ejemplo n.º 12
0
    async def new_block(self, block: FullBlock, tx_additions: List[Coin],
                        tx_removals: List[bytes32]):
        """
        Only called for blocks which are blocks (and thus have rewards and transactions)
        """
        if block.is_transaction_block() is False:
            return
        assert block.foliage_transaction_block is not None

        for coin in tx_additions:
            record: CoinRecord = CoinRecord(
                coin,
                block.height,
                uint32(0),
                False,
                False,
                block.foliage_transaction_block.timestamp,
            )
            await self._add_coin_record(record, False)

        included_reward_coins = block.get_included_reward_coins()
        if block.height == 0:
            assert len(included_reward_coins) == 0
        else:
            assert len(included_reward_coins) >= 2

        for coin in included_reward_coins:
            reward_coin_r: CoinRecord = CoinRecord(
                coin,
                block.height,
                uint32(0),
                False,
                True,
                block.foliage_transaction_block.timestamp,
            )
            await self._add_coin_record(reward_coin_r, False)

        total_amount_spent: int = 0
        for coin_name in tx_removals:
            total_amount_spent += await self._set_spent(
                coin_name, block.height)

        # Sanity check, already checked in block_body_validation
        assert sum([a.amount for a in tx_additions]) <= total_amount_spent
Ejemplo n.º 13
0
    async def get_full_blocks_at(self, heights: List[uint32]) -> List[FullBlock]:
        if len(heights) == 0:
            return []

        heights_db = tuple(heights)
        formatted_str = f'SELECT block from full_blocks WHERE height in ({"?," * (len(heights_db) - 1)}?)'
        cursor = await self.db.execute(formatted_str, heights_db)
        rows = await cursor.fetchall()
        await cursor.close()
        return [FullBlock.from_bytes(row[0]) for row in rows]
Ejemplo n.º 14
0
 async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]:
     cached = self.block_cache.get(header_hash)
     if cached is not None:
         return cached
     cursor = await self.db.execute("SELECT block from full_blocks WHERE header_hash=?", (header_hash.hex(),))
     row = await cursor.fetchone()
     await cursor.close()
     if row is not None:
         return FullBlock.from_bytes(row[0])
     return None
Ejemplo n.º 15
0
    async def new_block(self, block: FullBlock, additions: List[Coin],
                        removals: List[bytes32]):
        """
        Only called for blocks which are blocks (and thus have rewards and transactions)
        """
        if block.is_transaction_block() is False:
            return
        assert block.foliage_transaction_block is not None

        for coin in additions:
            record: CoinRecord = CoinRecord(
                coin,
                block.height,
                uint32(0),
                False,
                False,
                block.foliage_transaction_block.timestamp,
            )
            await self._add_coin_record(record)

        included_reward_coins = block.get_included_reward_coins()
        if block.height == 0:
            assert len(included_reward_coins) == 0
        else:
            assert len(included_reward_coins) >= 2

        for coin in included_reward_coins:
            reward_coin_r: CoinRecord = CoinRecord(
                coin,
                block.height,
                uint32(0),
                False,
                True,
                block.foliage_transaction_block.timestamp,
            )
            await self._add_coin_record(reward_coin_r)

        for coin_name in removals:
            await self._set_spent(coin_name, block.height)
 async def get_blocks(self,
                      start: int,
                      end: int,
                      exclude_reorged: bool = False) -> List[FullBlock]:
     response = await self.fetch(
         "get_blocks", {
             "start": start,
             "end": end,
             "exclude_header_hash": True,
             "exclude_reorged": exclude_reorged
         })
     return [
         FullBlock.from_json_dict(block) for block in response["blocks"]
     ]
Ejemplo n.º 17
0
 async def get_full_block(self, header_hash: bytes32) -> Optional[FullBlock]:
     cached = self.block_cache.get(header_hash)
     if cached is not None:
         log.debug(f"cache hit for block {header_hash.hex()}")
         return cached
     log.debug(f"cache miss for block {header_hash.hex()}")
     cursor = await self.db.execute("SELECT block from full_blocks WHERE header_hash=?", (header_hash.hex(),))
     row = await cursor.fetchone()
     await cursor.close()
     if row is not None:
         block = FullBlock.from_bytes(row[0])
         self.block_cache.put(header_hash, block)
         return block
     return None
Ejemplo n.º 18
0
 async def get_tx_removals_and_additions(
     self, block: FullBlock, npc_result: Optional[NPCResult] = None
 ) -> Tuple[List[bytes32], List[Coin]]:
     if block.is_transaction_block():
         if block.transactions_generator is not None:
             if npc_result is None:
                 block_generator: Optional[BlockGenerator] = await self.get_block_generator(block)
                 assert block_generator is not None
                 npc_result = get_name_puzzle_conditions(block_generator, self.constants.MAX_BLOCK_COST_CLVM, False)
             tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
             return tx_removals, tx_additions
         else:
             return [], []
     else:
         return [], []
Ejemplo n.º 19
0
def get_future_reward_coins(block: FullBlock) -> Tuple[Coin, Coin]:
    pool_amount = calculate_pool_reward(block.height)
    farmer_amount = calculate_base_farmer_reward(block.height)
    if block.is_transaction_block():
        assert block.transactions_info is not None
        farmer_amount = uint64(farmer_amount + block.transactions_info.fees)
    pool_coin: Coin = create_pool_coin(
        block.height, block.foliage.foliage_block_data.pool_target.puzzle_hash,
        pool_amount, constants.GENESIS_CHALLENGE)
    farmer_coin: Coin = create_farmer_coin(
        block.height,
        block.foliage.foliage_block_data.farmer_reward_puzzle_hash,
        farmer_amount,
        constants.GENESIS_CHALLENGE,
    )
    return pool_coin, farmer_coin
Ejemplo n.º 20
0
    async def get_header_blocks_in_range(
        self,
        start: int,
        stop: int,
    ) -> Dict[bytes32, HeaderBlock]:

        formatted_str = f"SELECT header_hash,block from full_blocks WHERE height >= {start} and height <= {stop}"

        cursor = await self.db.execute(formatted_str)
        rows = await cursor.fetchall()
        await cursor.close()
        ret: Dict[bytes32, HeaderBlock] = {}
        for row in rows:
            # Ugly hack, until full_block.get_block_header is rewritten as part of generator runner change
            await asyncio.sleep(0.001)
            header_hash = bytes.fromhex(row[0])
            full_block: FullBlock = FullBlock.from_bytes(row[1])
            ret[header_hash] = full_block.get_block_header()

        return ret
Ejemplo n.º 21
0
def persistent_blocks(
    num_of_blocks: int,
    db_name: str,
    seed: bytes = b"",
    empty_sub_slots=0,
    normalized_to_identity_cc_eos: bool = False,
    normalized_to_identity_icc_eos: bool = False,
    normalized_to_identity_cc_sp: bool = False,
    normalized_to_identity_cc_ip: bool = False,
):
    # try loading from disc, if not create new blocks.db file
    # TODO hash fixtures.py and blocktool.py, add to path, delete if the files changed
    block_path_dir = Path("~/.chia/blocks").expanduser()
    file_path = Path(f"~/.chia/blocks/{db_name}").expanduser()
    if not path.exists(block_path_dir):
        mkdir(block_path_dir.parent)
        mkdir(block_path_dir)

    if file_path.exists():
        try:
            bytes_list = file_path.read_bytes()
            block_bytes_list: List[bytes] = pickle.loads(bytes_list)
            blocks: List[FullBlock] = []
            for block_bytes in block_bytes_list:
                blocks.append(FullBlock.from_bytes(block_bytes))
            if len(blocks) == num_of_blocks:
                print(f"\n loaded {file_path} with {len(blocks)} blocks")
                return blocks
        except EOFError:
            print("\n error reading db file")

    return new_test_db(
        file_path,
        num_of_blocks,
        seed,
        empty_sub_slots,
        normalized_to_identity_cc_eos,
        normalized_to_identity_icc_eos,
        normalized_to_identity_cc_sp,
        normalized_to_identity_cc_ip,
    )
Ejemplo n.º 22
0
def block_removals_and_additions(
        block: FullBlock,
        npc_list: List[NPC]) -> Tuple[List[bytes32], List[Coin]]:
    """
    Returns all coins added and removed in block, including farmer and pool reward.
    """

    removals: List[bytes32] = []
    additions: List[Coin] = []

    # build removals list
    if npc_list is None:
        return [], []
    for npc in npc_list:
        removals.append(npc.coin_name)

    additions.extend(additions_for_npc(npc_list))

    rewards = block.get_included_reward_coins()
    additions.extend(rewards)
    return removals, additions
Ejemplo n.º 23
0
def batch_pre_validate_blocks(
    constants_dict: Dict,
    blocks_pickled: Dict[bytes, bytes],
    full_blocks_pickled: Optional[List[bytes]],
    header_blocks_pickled: Optional[List[bytes]],
    prev_transaction_generators: List[Optional[bytes]],
    npc_results: Dict[uint32, bytes],
    check_filter: bool,
    expected_difficulty: List[uint64],
    expected_sub_slot_iters: List[uint64],
) -> List[bytes]:
    blocks = {}
    for k, v in blocks_pickled.items():
        blocks[k] = BlockRecord.from_bytes(v)
    results: List[PreValidationResult] = []
    constants: ConsensusConstants = dataclass_from_dict(
        ConsensusConstants, constants_dict)
    if full_blocks_pickled is not None and header_blocks_pickled is not None:
        assert ValueError("Only one should be passed here")
    if full_blocks_pickled is not None:
        for i in range(len(full_blocks_pickled)):
            try:
                block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i])
                tx_additions: List[Coin] = []
                removals: List[bytes32] = []
                npc_result: Optional[NPCResult] = None
                if block.height in npc_results:
                    npc_result = NPCResult.from_bytes(
                        npc_results[block.height])
                    assert npc_result is not None
                    if npc_result.npc_list is not None:
                        removals, tx_additions = tx_removals_and_additions(
                            npc_result.npc_list)
                    else:
                        removals, tx_additions = [], []

                if block.transactions_generator is not None and npc_result is None:
                    prev_generator_bytes = prev_transaction_generators[i]
                    assert prev_generator_bytes is not None
                    assert block.transactions_info is not None
                    block_generator: BlockGenerator = BlockGenerator.from_bytes(
                        prev_generator_bytes)
                    assert block_generator.program == block.transactions_generator
                    npc_result = get_name_puzzle_conditions(
                        block_generator,
                        min(constants.MAX_BLOCK_COST_CLVM,
                            block.transactions_info.cost), True)
                    removals, tx_additions = tx_removals_and_additions(
                        npc_result.npc_list)

                header_block = get_block_header(block, tx_additions, removals)
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int: Optional[uint16] = None
                if error is not None:
                    error_int = uint16(error.code.value)

                results.append(
                    PreValidationResult(error_int, required_iters, npc_result))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    elif header_blocks_pickled is not None:
        for i in range(len(header_blocks_pickled)):
            try:
                header_block = HeaderBlock.from_bytes(header_blocks_pickled[i])
                required_iters, error = validate_finished_header_block(
                    constants,
                    BlockCache(blocks),
                    header_block,
                    check_filter,
                    expected_difficulty[i],
                    expected_sub_slot_iters[i],
                )
                error_int = None
                if error is not None:
                    error_int = uint16(error.code.value)
                results.append(
                    PreValidationResult(error_int, required_iters, None))
            except Exception:
                error_stack = traceback.format_exc()
                log.error(f"Exception: {error_stack}")
                results.append(
                    PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
    return [bytes(r) for r in results]
Ejemplo n.º 24
0
 def maybe_decompress(self, block_bytes: bytes) -> FullBlock:
     if self.db_wrapper.db_version == 2:
         return FullBlock.from_bytes(zstd.decompress(block_bytes))
     else:
         return FullBlock.from_bytes(block_bytes)
Ejemplo n.º 25
0
    def test_json(self):
        block = bt.create_genesis_block(test_constants, bytes([0] * 32), b"0")

        dict_block = block.to_json_dict()
        assert FullBlock.from_json_dict(dict_block) == block
Ejemplo n.º 26
0
async def run_add_block_benchmark(version: int):

    verbose: bool = "--verbose" in sys.argv
    db_wrapper: DBWrapper = await setup_db("block-store-benchmark.db", version)

    # keep track of benchmark total time
    all_test_time = 0.0

    prev_block = bytes32([0] * 32)
    prev_ses_hash = bytes32([0] * 32)

    header_hashes = []

    try:
        block_store = await BlockStore.create(db_wrapper)

        block_height = 1
        timestamp = uint64(1631794488)
        weight = uint128(10)
        iters = uint128(123456)
        sp_index = uint8(0)
        deficit = uint8(0)
        sub_slot_iters = uint64(10)
        required_iters = uint64(100)
        transaction_block_counter = 0
        prev_transaction_block = bytes32([0] * 32)
        prev_transaction_height = uint32(0)
        total_time = 0.0
        ses_counter = 0

        if verbose:
            print("profiling add_full_block", end="")

        for height in range(block_height, block_height + NUM_ITERS):

            is_transaction = transaction_block_counter == 0
            fees = uint64(random.randint(0, 150000))
            farmer_coin, pool_coin = rewards(uint32(height))
            reward_claims_incorporated = [farmer_coin, pool_coin]

            # TODO: increase fidelity by setting these as well
            finished_challenge_slot_hashes = None
            finished_infused_challenge_slot_hashes = None
            finished_reward_slot_hashes = None

            sub_epoch_summary_included = None
            if ses_counter == 0:
                sub_epoch_summary_included = SubEpochSummary(
                    prev_ses_hash,
                    rand_hash(),
                    uint8(random.randint(0,
                                         255)),  # num_blocks_overflow: uint8
                    None,  # new_difficulty: Optional[uint64]
                    None,  # new_sub_slot_iters: Optional[uint64]
                )

            has_pool_pk = random.randint(0, 1)

            proof_of_space = ProofOfSpace(
                rand_hash(),  # challenge
                rand_g1() if has_pool_pk else None,
                rand_hash() if not has_pool_pk else None,
                rand_g1(),  # plot_public_key
                uint8(32),
                rand_bytes(8 * 32),
            )

            reward_chain_block = RewardChainBlock(
                weight,
                uint32(height),
                iters,
                sp_index,
                rand_hash(),  # pos_ss_cc_challenge_hash
                proof_of_space,
                None if sp_index == 0 else rand_vdf(),
                rand_g2(),  # challenge_chain_sp_signature
                rand_vdf(),  # challenge_chain_ip_vdf
                rand_vdf() if sp_index != 0 else None,  # reward_chain_sp_vdf
                rand_g2(),  # reward_chain_sp_signature
                rand_vdf(),  # reward_chain_ip_vdf
                rand_vdf() if deficit < 16 else None,
                is_transaction,
            )

            pool_target = PoolTarget(
                rand_hash(),  # puzzle_hash
                uint32(0),  # max_height
            )

            foliage_block_data = FoliageBlockData(
                rand_hash(),  # unfinished_reward_block_hash
                pool_target,
                rand_g2() if has_pool_pk else None,  # pool_signature
                rand_hash(),  # farmer_reward_puzzle_hash
                bytes32([0] * 32),  # extension_data
            )

            foliage = Foliage(
                prev_block,
                rand_hash(),  # reward_block_hash
                foliage_block_data,
                rand_g2(),  # foliage_block_data_signature
                rand_hash()
                if is_transaction else None,  # foliage_transaction_block_hash
                rand_g2() if is_transaction else
                None,  # foliage_transaction_block_signature
            )

            foliage_transaction_block = (
                None if not is_transaction else FoliageTransactionBlock(
                    prev_transaction_block,
                    timestamp,
                    rand_hash(),  # filter_hash
                    rand_hash(),  # additions_root
                    rand_hash(),  # removals_root
                    rand_hash(),  # transactions_info_hash
                ))

            transactions_info = (
                None if not is_transaction else TransactionsInfo(
                    rand_hash(),  # generator_root
                    rand_hash(),  # generator_refs_root
                    rand_g2(),  # aggregated_signature
                    fees,
                    uint64(random.randint(0, 12000000000)),  # cost
                    reward_claims_incorporated,
                ))

            full_block = FullBlock(
                [],  # finished_sub_slots
                reward_chain_block,
                rand_vdf_proof()
                if sp_index > 0 else None,  # challenge_chain_sp_proof
                rand_vdf_proof(),  # challenge_chain_ip_proof
                rand_vdf_proof()
                if sp_index > 0 else None,  # reward_chain_sp_proof
                rand_vdf_proof(),  # reward_chain_ip_proof
                rand_vdf_proof()
                if deficit < 4 else None,  # infused_challenge_chain_ip_proof
                foliage,
                foliage_transaction_block,
                transactions_info,
                None if is_transaction else SerializedProgram.from_bytes(
                    clvm_generator),  # transactions_generator
                [],  # transactions_generator_ref_list
            )

            header_hash = full_block.header_hash

            record = BlockRecord(
                header_hash,
                prev_block,
                uint32(height),
                weight,
                iters,
                sp_index,
                rand_class_group_element(),
                None if deficit > 3 else rand_class_group_element(),
                rand_hash(),  # reward_infusion_new_challenge
                rand_hash(),  # challenge_block_info_hash
                sub_slot_iters,
                rand_hash(),  # pool_puzzle_hash
                rand_hash(),  # farmer_puzzle_hash
                required_iters,
                deficit,
                deficit == 16,
                prev_transaction_height,
                timestamp if is_transaction else None,
                prev_transaction_block
                if prev_transaction_block != bytes32([0] * 32) else None,
                None if fees == 0 else fees,
                reward_claims_incorporated,
                finished_challenge_slot_hashes,
                finished_infused_challenge_slot_hashes,
                finished_reward_slot_hashes,
                sub_epoch_summary_included,
            )

            start = time()
            await block_store.add_full_block(header_hash, full_block, record,
                                             False)
            await block_store.set_in_chain([(header_hash, )])
            header_hashes.append(header_hash)
            await block_store.set_peak(header_hash)
            await db_wrapper.db.commit()

            stop = time()
            total_time += stop - start

            # 19 seconds per block
            timestamp = uint64(timestamp + 19)
            weight = uint128(weight + 10)
            iters = uint128(iters + 123456)
            sp_index = uint8((sp_index + 1) % 64)
            deficit = uint8((deficit + 3) % 17)
            ses_counter = (ses_counter + 1) % 384
            prev_block = header_hash

            # every 33 blocks is a transaction block
            transaction_block_counter = (transaction_block_counter + 1) % 33

            if is_transaction:
                prev_transaction_block = header_hash
                prev_transaction_height = uint32(height)

            if ses_counter == 0:
                prev_ses_hash = header_hash

            if verbose:
                print(".", end="")
                sys.stdout.flush()
        block_height += NUM_ITERS

        if verbose:
            print("")
        print(f"{total_time:0.4f}s, add_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block")

        random.shuffle(header_hashes)
        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block(h)
            assert block.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_block_bytes")

        start = time()
        for h in header_hashes:
            block = await block_store.get_full_block_bytes(h)
            assert len(block) > 0

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_block_bytes")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_full_blocks_at")

        start = time()
        for h in range(1, block_height):
            blocks = await block_store.get_full_blocks_at([h])
            assert len(blocks) == 1
            assert blocks[0].height == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_full_blocks_at")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_records_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_blocks_by_hash")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_blocks_by_hash([h])
            assert len(blocks) == 1
            assert blocks[0].header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_blocks_by_hash")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_record")

        start = time()
        for h in header_hashes:
            blocks = await block_store.get_block_record(h)
            assert blocks.header_hash == h

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_in_range")

        start = time()
        for i in range(100):
            h = random.randint(1, block_height - 100)
            blocks = await block_store.get_block_records_in_range(h, h + 99)
            assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_in_range")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_block_records_close_to_peak")

        start = time()
        blocks, peak = await block_store.get_block_records_close_to_peak(99)
        assert len(blocks) == 100

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_records_close_to_peak")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling is_fully_compactified")

        start = time()
        for h in header_hashes:
            compactified = await block_store.is_fully_compactified(h)
            assert compactified is False

        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_block_record")
        all_test_time += total_time

        total_time = 0.0
        if verbose:
            print("profiling get_random_not_compactified")

        start = time()
        for i in range(1, 5000):
            blocks = await block_store.get_random_not_compactified(100)
            assert len(blocks) == 100
        stop = time()
        total_time += stop - start

        print(f"{total_time:0.4f}s, get_random_not_compactified")
        all_test_time += total_time

        print(f"all tests completed in {all_test_time:0.4f}s")

        db_size = os.path.getsize(Path("block-store-benchmark.db"))
        print(f"database size: {db_size/1000000:.3f} MB")

    finally:
        await db_wrapper.db.close()
Ejemplo n.º 27
0
    async def receive_block(
        self,
        block: FullBlock,
        pre_validation_result: Optional[PreValidationResult] = None,
        fork_point_with_peak: Optional[uint32] = None,
        summaries_to_check: List[SubEpochSummary] = None,  # passed only on long sync
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        This method must be called under the blockchain lock
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height:
            return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None

        npc_result: Optional[NPCResult] = None
        if pre_validation_result is None:
            if block.height == 0:
                prev_b: Optional[BlockRecord] = None
            else:
                prev_b = self.block_record(block.prev_header_hash)
            sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
                self.constants, len(block.finished_sub_slots) > 0, prev_b, self
            )

            if block.is_transaction_block():
                if block.transactions_generator is not None:
                    try:
                        block_generator: Optional[BlockGenerator] = await self.get_block_generator(block)
                    except ValueError:
                        return ReceiveBlockResult.INVALID_BLOCK, Err.GENERATOR_REF_HAS_NO_GENERATOR, None
                    assert block_generator is not None and block.transactions_info is not None
                    npc_result = get_name_puzzle_conditions(
                        block_generator, min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), False
                    )
                    removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
                else:
                    removals, tx_additions = [], []
                header_block = get_block_header(block, tx_additions, removals)
            else:
                npc_result = None
                header_block = get_block_header(block, [], [])

            required_iters, error = validate_finished_header_block(
                self.constants,
                self,
                header_block,
                False,
                difficulty,
                sub_slot_iters,
            )

            if error is not None:
                return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        else:
            npc_result = pre_validation_result.npc_result
            required_iters = pre_validation_result.required_iters
            assert pre_validation_result.error is None
        assert required_iters is not None
        error_code, _ = await validate_block_body(
            self.constants,
            self,
            self.block_store,
            self.coin_store,
            self.get_peak(),
            block,
            block.height,
            npc_result,
            fork_point_with_peak,
            self.get_block_generator,
        )
        if error_code is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error_code, None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            block,
            None,
        )
        # Always add the block to the database
        async with self.block_store.db_wrapper.lock:
            try:
                # Perform the DB operations to update the state, and rollback if something goes wrong
                await self.block_store.db_wrapper.begin_transaction()
                await self.block_store.add_full_block(block, block_record)
                fork_height, peak_height, records = await self._reconsider_peak(
                    block_record, genesis, fork_point_with_peak, npc_result
                )
                await self.block_store.db_wrapper.commit_transaction()

                # Then update the memory cache. It is important that this task is not cancelled and does not throw
                self.add_block_record(block_record)
                for fetched_block_record in records:
                    self.__height_to_hash[fetched_block_record.height] = fetched_block_record.header_hash
                    if fetched_block_record.sub_epoch_summary_included is not None:
                        self.__sub_epoch_summaries[
                            fetched_block_record.height
                        ] = fetched_block_record.sub_epoch_summary_included
                if peak_height is not None:
                    self._peak_height = peak_height
                self.block_store.cache_block(block)
            except BaseException:
                await self.block_store.db_wrapper.rollback_transaction()
                raise
        if fork_height is not None:
            return ReceiveBlockResult.NEW_PEAK, None, fork_height
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
Ejemplo n.º 28
0
    async def receive_block(
        self,
        block: FullBlock,
        pre_validation_result: Optional[PreValidationResult] = None,
        fork_point_with_peak: Optional[uint32] = None,
    ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]:
        """
        This method must be called under the blockchain lock
        Adds a new block into the blockchain, if it's valid and connected to the current
        blockchain, regardless of whether it is the child of a head, or another block.
        Returns a header if block is added to head. Returns an error if the block is
        invalid. Also returns the fork height, in the case of a new peak.
        """
        genesis: bool = block.height == 0

        if self.contains_block(block.header_hash):
            return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None

        if not self.contains_block(block.prev_header_hash) and not genesis:
            return (
                ReceiveBlockResult.DISCONNECTED_BLOCK,
                Err.INVALID_PREV_BLOCK_HASH,
                None,
            )

        if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height:
            return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None

        if pre_validation_result is None:
            if block.height == 0:
                prev_b: Optional[BlockRecord] = None
            else:
                prev_b = self.block_record(block.prev_header_hash)
            sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
                self.constants, len(block.finished_sub_slots) > 0, prev_b, self
            )
            required_iters, error = validate_finished_header_block(
                self.constants,
                self,
                block.get_block_header(),
                False,
                difficulty,
                sub_slot_iters,
            )

            if error is not None:
                return ReceiveBlockResult.INVALID_BLOCK, error.code, None
        else:
            required_iters = pre_validation_result.required_iters
            assert pre_validation_result.error is None
        assert required_iters is not None
        error_code, _ = await validate_block_body(
            self.constants,
            self,
            self.block_store,
            self.coin_store,
            self.get_peak(),
            block,
            block.height,
            pre_validation_result.cost_result if pre_validation_result is not None else None,
            fork_point_with_peak,
        )
        if error_code is not None:
            return ReceiveBlockResult.INVALID_BLOCK, error_code, None

        block_record = block_to_block_record(
            self.constants,
            self,
            required_iters,
            block,
            None,
        )
        # Always add the block to the database
        async with self.block_store.db_wrapper.lock:
            try:
                await self.block_store.db_wrapper.begin_transaction()
                await self.block_store.add_full_block(block, block_record)
                fork_height, peak_height, records = await self._reconsider_peak(
                    block_record, genesis, fork_point_with_peak
                )
                await self.block_store.db_wrapper.commit_transaction()
                self.add_block_record(block_record)
                for fetched_block_record in records:
                    self.__height_to_hash[fetched_block_record.height] = fetched_block_record.header_hash
                    if fetched_block_record.sub_epoch_summary_included is not None:
                        self.__sub_epoch_summaries[
                            fetched_block_record.height
                        ] = fetched_block_record.sub_epoch_summary_included
                if peak_height is not None:
                    self._peak_height = peak_height
            except BaseException:
                await self.block_store.db_wrapper.rollback_transaction()
                raise
        if fork_height is not None:
            return ReceiveBlockResult.NEW_PEAK, None, fork_height
        else:
            return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None
Ejemplo n.º 29
0
    ConditionOpcode.ASSERT_SECONDS_ABSOLUTE[0]: 13,
    ConditionOpcode.ASSERT_HEIGHT_RELATIVE[0]: 14,
    ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE[0]: 15,
}

c = sqlite3.connect(sys.argv[1])

rows = c.execute(
    "SELECT header_hash, height, block FROM full_blocks ORDER BY height")

height_to_hash: List[bytes] = []

for r in rows:
    hh = bytes.fromhex(r[0])
    height = r[1]
    block = FullBlock.from_bytes(r[2])

    if len(height_to_hash) <= height:
        assert len(height_to_hash) == height
        height_to_hash.append(hh)
    else:
        height_to_hash[height] = hh

    if height > 0:
        prev_hh = block.prev_header_hash
        h = height - 1
        while height_to_hash[h] != prev_hh:
            height_to_hash[h] = prev_hh
            ref = c.execute(
                "SELECT block FROM full_blocks WHERE header_hash=?",
                (prev_hh.hex(), ))
Ejemplo n.º 30
0
def unfinished_block_to_full_block(
    unfinished_block: UnfinishedBlock,
    cc_ip_vdf: VDFInfo,
    cc_ip_proof: VDFProof,
    rc_ip_vdf: VDFInfo,
    rc_ip_proof: VDFProof,
    icc_ip_vdf: Optional[VDFInfo],
    icc_ip_proof: Optional[VDFProof],
    finished_sub_slots: List[EndOfSubSlotBundle],
    prev_block: Optional[BlockRecord],
    blocks: BlockchainInterface,
    total_iters_sp: uint128,
    difficulty: uint64,
) -> FullBlock:
    """
    Converts an unfinished block to a finished block. Includes all the infusion point VDFs as well as tweaking
    other properties (height, weight, sub-slots, etc)

    Args:
        unfinished_block: the unfinished block to finish
        cc_ip_vdf: the challenge chain vdf info at the infusion point
        cc_ip_proof: the challenge chain proof
        rc_ip_vdf: the reward chain vdf info at the infusion point
        rc_ip_proof: the reward chain proof
        icc_ip_vdf: the infused challenge chain vdf info at the infusion point
        icc_ip_proof: the infused challenge chain proof
        finished_sub_slots: finished sub slots from the prev block to the infusion point
        prev_block: prev block from the infusion point
        blocks: dictionary from header hash to SBR of all included SBR
        total_iters_sp: total iters at the signage point
        difficulty: difficulty at the infusion point

    """
    # Replace things that need to be replaced, since foliage blocks did not necessarily have the latest information
    if prev_block is None:
        is_transaction_block = True
        new_weight = uint128(difficulty)
        new_height = uint32(0)
        new_foliage = unfinished_block.foliage
        new_foliage_transaction_block = unfinished_block.foliage_transaction_block
        new_tx_info = unfinished_block.transactions_info
        new_generator = unfinished_block.transactions_generator
        new_generator_ref_list = unfinished_block.transactions_generator_ref_list
    else:
        is_transaction_block, _ = get_prev_transaction_block(prev_block, blocks, total_iters_sp)
        new_weight = uint128(prev_block.weight + difficulty)
        new_height = uint32(prev_block.height + 1)
        if is_transaction_block:
            new_fbh = unfinished_block.foliage.foliage_transaction_block_hash
            new_fbs = unfinished_block.foliage.foliage_transaction_block_signature
            new_foliage_transaction_block = unfinished_block.foliage_transaction_block
            new_tx_info = unfinished_block.transactions_info
            new_generator = unfinished_block.transactions_generator
            new_generator_ref_list = unfinished_block.transactions_generator_ref_list
        else:
            new_fbh = None
            new_fbs = None
            new_foliage_transaction_block = None
            new_tx_info = None
            new_generator = None
            new_generator_ref_list = []
        assert (new_fbh is None) == (new_fbs is None)
        new_foliage = replace(
            unfinished_block.foliage,
            prev_block_hash=prev_block.header_hash,
            foliage_transaction_block_hash=new_fbh,
            foliage_transaction_block_signature=new_fbs,
        )
    ret = FullBlock(
        finished_sub_slots,
        RewardChainBlock(
            new_weight,
            new_height,
            unfinished_block.reward_chain_block.total_iters,
            unfinished_block.reward_chain_block.signage_point_index,
            unfinished_block.reward_chain_block.pos_ss_cc_challenge_hash,
            unfinished_block.reward_chain_block.proof_of_space,
            unfinished_block.reward_chain_block.challenge_chain_sp_vdf,
            unfinished_block.reward_chain_block.challenge_chain_sp_signature,
            cc_ip_vdf,
            unfinished_block.reward_chain_block.reward_chain_sp_vdf,
            unfinished_block.reward_chain_block.reward_chain_sp_signature,
            rc_ip_vdf,
            icc_ip_vdf,
            is_transaction_block,
        ),
        unfinished_block.challenge_chain_sp_proof,
        cc_ip_proof,
        unfinished_block.reward_chain_sp_proof,
        rc_ip_proof,
        icc_ip_proof,
        new_foliage,
        new_foliage_transaction_block,
        new_tx_info,
        new_generator,
        new_generator_ref_list,
    )
    return recursive_replace(
        ret,
        "foliage.reward_block_hash",
        ret.reward_chain_block.get_hash(),
    )