Ejemplo n.º 1
0
    async def coins_of_interest_added(self, coins: List[Coin],
                                      height: uint32) -> List[Coin]:
        (
            trade_removals,
            trade_additions,
        ) = await self.trade_manager.get_coins_of_interest()
        trade_adds: List[Coin] = []
        block: Optional[
            BlockRecord] = await self.blockchain.get_block_record_from_db(
                self.blockchain.height_to_hash(height))
        assert block is not None

        pool_rewards = set()
        farmer_rewards = set()

        prev = await self.blockchain.get_block_record_from_db(block.prev_hash)
        # [block 1] [block 2] [tx block 3] [block 4] [block 5] [tx block 6]
        # [tx block 6] will contain rewards for [block 1] [block 2] [tx block 3]
        while prev is not None:
            # step 1 find previous block
            if prev.is_transaction_block:
                break
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        if prev is not None:
            # include last block
            pool_parent = pool_parent_id(uint32(prev.height),
                                         self.constants.GENESIS_CHALLENGE)
            farmer_parent = farmer_parent_id(uint32(prev.height),
                                             self.constants.GENESIS_CHALLENGE)
            pool_rewards.add(pool_parent)
            farmer_rewards.add(farmer_parent)
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        while prev is not None:
            # step 2 traverse from previous block to the block before it
            pool_parent = pool_parent_id(uint32(prev.height),
                                         self.constants.GENESIS_CHALLENGE)
            farmer_parent = farmer_parent_id(uint32(prev.height),
                                             self.constants.GENESIS_CHALLENGE)
            pool_rewards.add(pool_parent)
            farmer_rewards.add(farmer_parent)
            if prev.is_transaction_block:
                break
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        for coin in coins:
            if coin.name() in trade_additions:
                trade_adds.append(coin)

            is_coinbase = False
            is_fee_reward = False
            if coin.parent_coin_info in pool_rewards:
                is_coinbase = True
            if coin.parent_coin_info in farmer_rewards:
                is_fee_reward = True

            info = await self.puzzle_store.wallet_info_for_puzzle_hash(
                coin.puzzle_hash)
            if info is not None:
                wallet_id, wallet_type = info
                await self.coin_added(coin, is_coinbase, is_fee_reward,
                                      uint32(wallet_id), wallet_type, height)

        return trade_adds
Ejemplo n.º 2
0
    async def get_blockchain_state(self, _request: Dict):
        """
        Returns a summary of the node's view of the blockchain.
        """
        if self.service.initialized is False:
            res: Dict = {
                "blockchain_state": {
                    "peak": None,
                    "genesis_challenge_initialized": self.service.initialized,
                    "sync": {
                        "sync_mode": False,
                        "synced": False,
                        "sync_tip_height": 0,
                        "sync_progress_height": 0,
                    },
                    "difficulty": 0,
                    "sub_slot_iters": 0,
                    "space": 0,
                    "mempool_size": 0,
                },
            }
            return res
        peak: Optional[BlockRecord] = self.service.blockchain.get_peak()

        if peak is not None and peak.height > 0:
            difficulty = uint64(
                peak.weight -
                self.service.blockchain.block_record(peak.prev_hash).weight)
            sub_slot_iters = peak.sub_slot_iters
        else:
            difficulty = self.service.constants.DIFFICULTY_STARTING
            sub_slot_iters = self.service.constants.SUB_SLOT_ITERS_STARTING

        sync_mode: bool = self.service.sync_store.get_sync_mode(
        ) or self.service.sync_store.get_long_sync()

        sync_tip_height: Optional[uint32] = uint32(0)
        if sync_mode:
            if self.service.sync_store.get_sync_target_height() is not None:
                sync_tip_height = self.service.sync_store.get_sync_target_height(
                )
                assert sync_tip_height is not None
            if peak is not None:
                sync_progress_height: uint32 = peak.height
                # Don't display we're syncing towards 0, instead show 'Syncing height/height'
                # until sync_store retrieves the correct number.
                if sync_tip_height == uint32(0):
                    sync_tip_height = peak.height
            else:
                sync_progress_height = uint32(0)
        else:
            sync_progress_height = uint32(0)

        if peak is not None and peak.height > 1:
            newer_block_hex = peak.header_hash.hex()
            # Average over the last day
            header_hash = self.service.blockchain.height_to_hash(
                uint32(max(1, peak.height - 4608)))
            assert header_hash is not None
            older_block_hex = header_hash.hex()
            space = await self.get_network_space({
                "newer_block_header_hash":
                newer_block_hex,
                "older_block_header_hash":
                older_block_hex
            })
        else:
            space = {"space": uint128(0)}

        if self.service.mempool_manager is not None:
            mempool_size = len(self.service.mempool_manager.mempool.spends)
        else:
            mempool_size = 0
        if self.service.server is not None:
            is_connected = len(
                self.service.server.get_full_node_connections()) > 0
        else:
            is_connected = False
        synced = await self.service.synced() and is_connected

        assert space is not None
        response: Dict = {
            "blockchain_state": {
                "peak": peak,
                "genesis_challenge_initialized": self.service.initialized,
                "sync": {
                    "sync_mode": sync_mode,
                    "synced": synced,
                    "sync_tip_height": sync_tip_height,
                    "sync_progress_height": sync_progress_height,
                },
                "difficulty": difficulty,
                "sub_slot_iters": sub_slot_iters,
                "space": space["space"],
                "mempool_size": mempool_size,
            },
        }
        self.cached_blockchain_state = dict(response["blockchain_state"])
        return response
Ejemplo n.º 3
0
    async def add_spendbundle(
        self,
        new_spend: SpendBundle,
        npc_result: NPCResult,
        spend_name: bytes32,
        validate_signature=True,
        program: Optional[SerializedProgram] = None,
    ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]:
        """
        Tries to add spend bundle to the mempool
        Returns the cost (if SUCCESS), the result (MempoolInclusion status), and an optional error
        """
        start_time = time.time()
        if self.peak is None:
            return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED

        npc_list = npc_result.npc_list
        if program is None:
            program = simple_solution_generator(new_spend).program
        cost = calculate_cost_of_program(program, npc_result,
                                         self.constants.COST_PER_BYTE)

        log.debug(f"Cost: {cost}")

        if cost > int(self.limit_factor * self.constants.MAX_BLOCK_COST_CLVM):
            return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX

        if npc_result.error is not None:
            return None, MempoolInclusionStatus.FAILED, Err(npc_result.error)
        # build removal list
        removal_names: List[bytes32] = [npc.coin_name for npc in npc_list]

        additions = additions_for_npc(npc_list)

        additions_dict: Dict[bytes32, Coin] = {}
        for add in additions:
            additions_dict[add.name()] = add

        addition_amount = uint64(0)
        # Check additions for max coin amount
        for coin in additions:
            if coin.amount < 0:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_NEGATIVE,
                )
            if coin.amount > self.constants.MAX_COIN_AMOUNT:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_EXCEEDS_MAXIMUM,
                )
            addition_amount = uint64(addition_amount + coin.amount)
        # Check for duplicate outputs
        addition_counter = collections.Counter(_.name() for _ in additions)
        for k, v in addition_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT
        # Check for duplicate inputs
        removal_counter = collections.Counter(name for name in removal_names)
        for k, v in removal_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND
        # Skip if already added
        if spend_name in self.mempool.spends:
            return uint64(cost), MempoolInclusionStatus.SUCCESS, None

        removal_record_dict: Dict[bytes32, CoinRecord] = {}
        removal_coin_dict: Dict[bytes32, Coin] = {}
        removal_amount = uint64(0)
        for name in removal_names:
            removal_record = await self.coin_store.get_coin_record(name)
            if removal_record is None and name not in additions_dict:
                return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT
            elif name in additions_dict:
                removal_coin = additions_dict[name]
                # TODO(straya): what timestamp to use here?
                assert self.peak.timestamp is not None
                removal_record = CoinRecord(
                    removal_coin,
                    uint32(
                        self.peak.height +
                        1),  # In mempool, so will be included in next height
                    uint32(0),
                    False,
                    False,
                    uint64(self.peak.timestamp + 1),
                )

            assert removal_record is not None
            removal_amount = uint64(removal_amount +
                                    removal_record.coin.amount)
            removal_record_dict[name] = removal_record
            removal_coin_dict[name] = removal_record.coin

        removals: List[Coin] = [coin for coin in removal_coin_dict.values()]

        if addition_amount > removal_amount:
            print(addition_amount, removal_amount)
            return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN

        fees = uint64(removal_amount - addition_amount)
        assert_fee_sum: uint64 = uint64(0)

        for npc in npc_list:
            if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
                fee_list: List[ConditionWithArgs] = npc.condition_dict[
                    ConditionOpcode.RESERVE_FEE]
                for cvp in fee_list:
                    fee = int_from_bytes(cvp.vars[0])
                    if fee < 0:
                        return None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED
                    assert_fee_sum = assert_fee_sum + fee
        if fees < assert_fee_sum:
            return (
                None,
                MempoolInclusionStatus.FAILED,
                Err.RESERVE_FEE_CONDITION_FAILED,
            )

        if cost == 0:
            return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN

        fees_per_cost: float = fees / cost
        # If pool is at capacity check the fee, if not then accept even without the fee
        if self.mempool.at_full_capacity(cost):
            if fees_per_cost < self.nonzero_fee_minimum_fpc:
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO
            if fees_per_cost <= self.mempool.get_min_fee_rate(cost):
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
        # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
        # Use this information later when constructing a block
        fail_reason, conflicts = await self.check_removals(removal_record_dict)
        # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others
        tmp_error: Optional[Err] = None
        conflicting_pool_items: Dict[bytes32, MempoolItem] = {}
        if fail_reason is Err.MEMPOOL_CONFLICT:
            for conflicting in conflicts:
                sb: MempoolItem = self.mempool.removals[conflicting.name()]
                conflicting_pool_items[sb.name] = sb
            if not self.can_replace(conflicting_pool_items,
                                    removal_record_dict, fees, fees_per_cost):
                potential = MempoolItem(new_spend, uint64(fees), npc_result,
                                        cost, spend_name, additions, removals,
                                        program)
                self.add_to_potential_tx_set(potential)
                return (
                    uint64(cost),
                    MempoolInclusionStatus.PENDING,
                    Err.MEMPOOL_CONFLICT,
                )

        elif fail_reason:
            return None, MempoolInclusionStatus.FAILED, fail_reason

        if tmp_error:
            return None, MempoolInclusionStatus.FAILED, tmp_error

        # Verify conditions, create hash_key list for aggsig check
        pks: List[G1Element] = []
        msgs: List[bytes32] = []
        error: Optional[Err] = None
        coin_announcements_in_spend: Set[
            bytes32] = coin_announcements_names_for_npc(npc_list)
        puzzle_announcements_in_spend: Set[
            bytes32] = puzzle_announcements_names_for_npc(npc_list)
        for npc in npc_list:
            coin_record: CoinRecord = removal_record_dict[npc.coin_name]
            # Check that the revealed removal puzzles actually match the puzzle hash
            if npc.puzzle_hash != coin_record.coin.puzzle_hash:
                log.warning(
                    "Mempool rejecting transaction because of wrong puzzle_hash"
                )
                log.warning(
                    f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}")
                return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH

            chialisp_height = (self.peak.prev_transaction_block_height
                               if not self.peak.is_transaction_block else
                               self.peak.height)
            assert self.peak.timestamp is not None
            error = mempool_check_conditions_dict(
                coin_record,
                coin_announcements_in_spend,
                puzzle_announcements_in_spend,
                npc.condition_dict,
                uint32(chialisp_height),
                self.peak.timestamp,
            )

            if error:
                if error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or error is Err.ASSERT_HEIGHT_RELATIVE_FAILED:
                    potential = MempoolItem(new_spend, uint64(fees),
                                            npc_result, cost, spend_name,
                                            additions, removals, program)
                    self.add_to_potential_tx_set(potential)
                    return uint64(cost), MempoolInclusionStatus.PENDING, error
                break

            if validate_signature:
                for pk, message in pkm_pairs_for_conditions_dict(
                        npc.condition_dict, npc.coin_name,
                        self.constants.AGG_SIG_ME_ADDITIONAL_DATA):
                    pks.append(pk)
                    msgs.append(message)
        if error:
            return None, MempoolInclusionStatus.FAILED, error

        if validate_signature:
            # Verify aggregated signature
            if not AugSchemeMPL.aggregate_verify(
                    pks, msgs, new_spend.aggregated_signature):
                log.warning(
                    f"Aggsig validation error {pks} {msgs} {new_spend}")
                return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE
        # Remove all conflicting Coins and SpendBundles
        if fail_reason:
            mempool_item: MempoolItem
            for mempool_item in conflicting_pool_items.values():
                self.mempool.remove_from_pool(mempool_item)

        new_item = MempoolItem(new_spend, uint64(fees), npc_result, cost,
                               spend_name, additions, removals, program)
        self.mempool.add_to_pool(new_item, additions, removal_coin_dict)
        log.info(f"add_spendbundle took {time.time() - start_time} seconds")
        return uint64(cost), MempoolInclusionStatus.SUCCESS, None
Ejemplo n.º 4
0
def unfinished_block_to_full_block(
    unfinished_block: UnfinishedBlock,
    cc_ip_vdf: VDFInfo,
    cc_ip_proof: VDFProof,
    rc_ip_vdf: VDFInfo,
    rc_ip_proof: VDFProof,
    icc_ip_vdf: Optional[VDFInfo],
    icc_ip_proof: Optional[VDFProof],
    finished_sub_slots: List[EndOfSubSlotBundle],
    prev_block: Optional[BlockRecord],
    blocks: BlockchainInterface,
    total_iters_sp: uint128,
    difficulty: uint64,
) -> FullBlock:
    """
    Converts an unfinished block to a finished block. Includes all the infusion point VDFs as well as tweaking
    other properties (height, weight, sub-slots, etc)

    Args:
        unfinished_block: the unfinished block to finish
        cc_ip_vdf: the challenge chain vdf info at the infusion point
        cc_ip_proof: the challenge chain proof
        rc_ip_vdf: the reward chain vdf info at the infusion point
        rc_ip_proof: the reward chain proof
        icc_ip_vdf: the infused challenge chain vdf info at the infusion point
        icc_ip_proof: the infused challenge chain proof
        finished_sub_slots: finished sub slots from the prev block to the infusion point
        prev_block: prev block from the infusion point
        blocks: dictionary from header hash to SBR of all included SBR
        total_iters_sp: total iters at the signage point
        difficulty: difficulty at the infusion point

    """
    # Replace things that need to be replaced, since foliage blocks did not necessarily have the latest information
    if prev_block is None:
        is_transaction_block = True
        new_weight = uint128(difficulty)
        new_height = uint32(0)
        new_foliage = unfinished_block.foliage
        new_foliage_transaction_block = unfinished_block.foliage_transaction_block
        new_tx_info = unfinished_block.transactions_info
        new_generator = unfinished_block.transactions_generator
    else:
        is_transaction_block, _ = get_prev_transaction_block(prev_block, blocks, total_iters_sp)
        new_weight = uint128(prev_block.weight + difficulty)
        new_height = uint32(prev_block.height + 1)
        if is_transaction_block:
            new_fbh = unfinished_block.foliage.foliage_transaction_block_hash
            new_fbs = unfinished_block.foliage.foliage_transaction_block_signature
            new_foliage_transaction_block = unfinished_block.foliage_transaction_block
            new_tx_info = unfinished_block.transactions_info
            new_generator = unfinished_block.transactions_generator
        else:
            new_fbh = None
            new_fbs = None
            new_foliage_transaction_block = None
            new_tx_info = None
            new_generator = None
        assert (new_fbh is None) == (new_fbs is None)
        new_foliage = replace(
            unfinished_block.foliage,
            prev_block_hash=prev_block.header_hash,
            foliage_transaction_block_hash=new_fbh,
            foliage_transaction_block_signature=new_fbs,
        )
    ret = FullBlock(
        finished_sub_slots,
        RewardChainBlock(
            new_weight,
            new_height,
            unfinished_block.reward_chain_block.total_iters,
            unfinished_block.reward_chain_block.signage_point_index,
            unfinished_block.reward_chain_block.pos_ss_cc_challenge_hash,
            unfinished_block.reward_chain_block.proof_of_space,
            unfinished_block.reward_chain_block.challenge_chain_sp_vdf,
            unfinished_block.reward_chain_block.challenge_chain_sp_signature,
            cc_ip_vdf,
            unfinished_block.reward_chain_block.reward_chain_sp_vdf,
            unfinished_block.reward_chain_block.reward_chain_sp_signature,
            rc_ip_vdf,
            icc_ip_vdf,
            is_transaction_block,
        ),
        unfinished_block.challenge_chain_sp_proof,
        cc_ip_proof,
        unfinished_block.reward_chain_sp_proof,
        rc_ip_proof,
        icc_ip_proof,
        new_foliage,
        new_foliage_transaction_block,
        new_tx_info,
        new_generator,
        [],
    )
    return recursive_replace(
        ret,
        "foliage.reward_block_hash",
        ret.reward_chain_block.get_hash(),
    )
Ejemplo n.º 5
0
 def pass_blocks(self, blocks: uint32):
     self.block_height = uint32(self.block_height + blocks)
Ejemplo n.º 6
0
if __name__ == "__main__":
    """
    Naive way to calculate cost ratio between vByte and CLVM cost unit.
    AggSig has assigned cost of 20vBytes, simple CLVM program is benchmarked against it.
    """
    wallet_tool = WalletTool()
    benchmark_all_operators()
    secret_key: PrivateKey = AugSchemeMPL.key_gen(bytes([2] * 32))
    puzzles = []
    solutions = []
    private_keys = []
    public_keys = []

    for i in range(0, 1000):
        private_key: PrivateKey = master_sk_to_wallet_sk(secret_key, uint32(i))
        public_key = private_key.public_key()
        solution = wallet_tool.make_solution({
            ConditionOpcode.ASSERT_MY_COIN_ID: [
                ConditionVarPair(ConditionOpcode.ASSERT_MY_COIN_ID,
                                 [token_bytes()])
            ]
        })
        puzzle = puzzle_for_pk(bytes(public_key))
        puzzles.append(puzzle)
        solutions.append(solution)
        private_keys.append(private_key)
        public_keys.append(public_key)

    # Run Puzzle 1000 times
    puzzle_start = time.time()
Ejemplo n.º 7
0
def block_generator() -> BlockGenerator:
    generator_args = [
        GeneratorArg(uint32(0), to_sp(FIRST_GENERATOR)),
        GeneratorArg(uint32(1), to_sp(SECOND_GENERATOR))
    ]
    return BlockGenerator(to_sp(COMPILED_GENERATOR_CODE), generator_args)
Ejemplo n.º 8
0
    async def fetch_blocks_and_validate(
        self,
        peer: WSChiaConnection,
        height_start: uint32,
        height_end: uint32,
        fork_point_with_peak: Optional[uint32],
    ) -> Tuple[bool, bool]:
        """
        Returns whether the blocks validated, and whether the peak was advanced

        """
        if self.wallet_state_manager is None:
            return False, False

        self.log.info(f"Requesting blocks {height_start}-{height_end}")
        request = RequestHeaderBlocks(uint32(height_start), uint32(height_end))
        res: Optional[RespondHeaderBlocks] = await peer.request_header_blocks(
            request)
        if res is None or not isinstance(res, RespondHeaderBlocks):
            raise ValueError("Peer returned no response")
        header_blocks: List[HeaderBlock] = res.header_blocks
        advanced_peak = False
        if header_blocks is None:
            raise ValueError(f"No response from peer {peer}")
        if (self.full_node_peer is not None
                and peer.peer_host == self.full_node_peer.host
                or peer.peer_host == "127.0.0.1"):
            trusted = True
            pre_validation_results: Optional[List[PreValidationResult]] = None
        else:
            trusted = False
            pre_validation_results = await self.wallet_state_manager.blockchain.pre_validate_blocks_multiprocessing(
                header_blocks)
            if pre_validation_results is None:
                return False, advanced_peak
            assert len(header_blocks) == len(pre_validation_results)

        for i in range(len(header_blocks)):
            header_block = header_blocks[i]
            if not trusted and pre_validation_results is not None and pre_validation_results[
                    i].error is not None:
                raise ValidationError(Err(pre_validation_results[i].error))

            fork_point_with_old_peak = None if advanced_peak else fork_point_with_peak
            if header_block.is_transaction_block:
                # Find additions and removals
                (
                    additions,
                    removals,
                ) = await self.wallet_state_manager.get_filter_additions_removals(
                    header_block, header_block.transactions_filter,
                    fork_point_with_old_peak)

                # Get Additions
                added_coins = await self.get_additions(peer, header_block,
                                                       additions)
                if added_coins is None:
                    raise ValueError("Failed to fetch additions")

                # Get removals
                removed_coins = await self.get_removals(
                    peer, header_block, added_coins, removals)
                if removed_coins is None:
                    raise ValueError("Failed to fetch removals")

                header_block_record = HeaderBlockRecord(
                    header_block, added_coins, removed_coins)
            else:
                header_block_record = HeaderBlockRecord(header_block, [], [])
            start_t = time.time()
            if trusted:
                (
                    result,
                    error,
                    fork_h,
                ) = await self.wallet_state_manager.blockchain.receive_block(
                    header_block_record, None, trusted,
                    fork_point_with_old_peak)
            else:
                assert pre_validation_results is not None
                (
                    result,
                    error,
                    fork_h,
                ) = await self.wallet_state_manager.blockchain.receive_block(
                    header_block_record, pre_validation_results[i], trusted,
                    fork_point_with_old_peak)
            self.log.debug(
                f"Time taken to validate {header_block.height} with fork "
                f"{fork_point_with_old_peak}: {time.time() - start_t}")
            if result == ReceiveBlockResult.NEW_PEAK:
                advanced_peak = True
                self.wallet_state_manager.state_changed("new_block")
            elif result == ReceiveBlockResult.INVALID_BLOCK:
                raise ValueError("Value error peer sent us invalid block")
        if advanced_peak:
            await self.wallet_state_manager.create_more_puzzle_hashes()
        return True, advanced_peak
Ejemplo n.º 9
0
    async def test_create_multiple_pool_wallets(self, one_wallet_node_and_rpc):
        client, wallet_node_0, full_node_api = one_wallet_node_and_rpc
        wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
        our_ph_1 = await wallet_0.get_new_puzzlehash()
        our_ph_2 = await wallet_0.get_new_puzzlehash()
        summaries_response = await client.get_wallets()
        for summary in summaries_response:
            if WalletType(int(summary["type"])) == WalletType.POOLING_WALLET:
                assert False

        creation_tx: TransactionRecord = await client.create_new_pool_wallet(
            our_ph_1, "", 0, "localhost:5000", "new", "SELF_POOLING")
        creation_tx_2: TransactionRecord = await client.create_new_pool_wallet(
            our_ph_1, "localhost", 12, "localhost:5000", "new",
            "FARMING_TO_POOL")

        await time_out_assert(
            10,
            full_node_api.full_node.mempool_manager.get_spendbundle,
            creation_tx.spend_bundle,
            creation_tx.name,
        )
        await time_out_assert(
            10,
            full_node_api.full_node.mempool_manager.get_spendbundle,
            creation_tx_2.spend_bundle,
            creation_tx_2.name,
        )

        await self.farm_blocks(full_node_api, our_ph_2, 6)
        assert full_node_api.full_node.mempool_manager.get_spendbundle(
            creation_tx.name) is None
        assert full_node_api.full_node.mempool_manager.get_spendbundle(
            creation_tx_2.name) is None

        await asyncio.sleep(3)
        status_2: PoolWalletInfo = (await client.pw_status(2))[0]
        status_3: PoolWalletInfo = (await client.pw_status(3))[0]

        if status_2.current.state == PoolSingletonState.SELF_POOLING.value:
            assert status_3.current.state == PoolSingletonState.FARMING_TO_POOL.value
        else:
            assert status_2.current.state == PoolSingletonState.FARMING_TO_POOL.value
            assert status_3.current.state == PoolSingletonState.SELF_POOLING.value

        full_config: Dict = load_config(
            wallet_0.wallet_state_manager.root_path, "config.yaml")
        pool_list: List[Dict] = full_config["pool"]["pool_list"]
        assert len(pool_list) == 2

        p2_singleton_ph_2: bytes32 = status_2.p2_singleton_puzzle_hash
        p2_singleton_ph_3: bytes32 = status_3.p2_singleton_puzzle_hash
        assert (await wallet_node_0.wallet_state_manager.interested_store.
                get_interested_puzzle_hash_wallet_id(p2_singleton_ph_2)
                ) is not None
        assert (await wallet_node_0.wallet_state_manager.interested_store.
                get_interested_puzzle_hash_wallet_id(p2_singleton_ph_3)
                ) is not None
        assert len(await wallet_node_0.wallet_state_manager.tx_store.
                   get_unconfirmed_for_wallet(2)) == 0
        assert len(await wallet_node_0.wallet_state_manager.tx_store.
                   get_unconfirmed_for_wallet(3)) == 0
        # Doing a reorg reverts and removes the pool wallets
        await full_node_api.reorg_from_index_to_new_index(
            ReorgProtocol(uint32(0), uint32(20), our_ph_2))
        await asyncio.sleep(5)
        summaries_response = await client.get_wallets()
        assert len(summaries_response) == 1

        with pytest.raises(ValueError):
            await client.pw_status(2)
        with pytest.raises(ValueError):
            await client.pw_status(3)
        # It also removed interested PH, so we can recreated the pool wallet with another wallet_id later
        assert (
            await wallet_node_0.wallet_state_manager.interested_store.
            get_interested_puzzle_hash_wallet_id(p2_singleton_ph_2)) is None
        assert (
            await wallet_node_0.wallet_state_manager.interested_store.
            get_interested_puzzle_hash_wallet_id(p2_singleton_ph_3)) is None
Ejemplo n.º 10
0
    async def new_peak_wallet(self, peak: wallet_protocol.NewPeakWallet,
                              peer: WSChiaConnection):
        if self.wallet_state_manager is None:
            return None

        curr_peak = self.wallet_state_manager.blockchain.get_peak()
        if curr_peak is not None and curr_peak.weight >= peak.weight:
            return None
        if self.new_peak_lock is None:
            self.new_peak_lock = asyncio.Lock()
        async with self.new_peak_lock:
            request = wallet_protocol.RequestBlockHeader(peak.height)
            response: Optional[
                RespondBlockHeader] = await peer.request_block_header(request)

            if response is None or not isinstance(
                    response,
                    RespondBlockHeader) or response.header_block is None:
                return None

            header_block = response.header_block

            if (curr_peak is None and header_block.height <
                    self.constants.WEIGHT_PROOF_RECENT_BLOCKS) or (
                        curr_peak is not None
                        and curr_peak.height > header_block.height - 200):
                top = header_block
                blocks = [top]
                # Fetch blocks backwards until we hit the one that we have,
                # then complete them with additions / removals going forward
                while not self.wallet_state_manager.blockchain.contains_block(
                        top.prev_header_hash) and top.height > 0:
                    request_prev = wallet_protocol.RequestBlockHeader(
                        top.height - 1)
                    response_prev: Optional[
                        RespondBlockHeader] = await peer.request_block_header(
                            request_prev)
                    if response_prev is None:
                        return None
                    if not isinstance(response_prev, RespondBlockHeader):
                        return None
                    prev_head = response_prev.header_block
                    blocks.append(prev_head)
                    top = prev_head
                blocks.reverse()
                await self.complete_blocks(blocks, peer)
                await self.wallet_state_manager.create_more_puzzle_hashes()
            elif header_block.height >= self.constants.WEIGHT_PROOF_RECENT_BLOCKS:
                # Request weight proof
                # Sync if PoW validates
                if self.wallet_state_manager.sync_mode:
                    return None
                weight_request = RequestProofOfWeight(header_block.height,
                                                      header_block.header_hash)
                weight_proof_response: RespondProofOfWeight = await peer.request_proof_of_weight(
                    weight_request, timeout=360)
                if weight_proof_response is None:
                    return None
                weight_proof = weight_proof_response.wp
                if self.wallet_state_manager is None:
                    return None
                if self.server is not None and self.server.is_trusted_peer(
                        peer, self.config["trusted_peers"]):
                    valid, fork_point = self.wallet_state_manager.weight_proof_handler.get_fork_point_no_validations(
                        weight_proof)
                else:
                    valid, fork_point, _ = await self.wallet_state_manager.weight_proof_handler.validate_weight_proof(
                        weight_proof)
                    if not valid:
                        self.log.error(
                            f"invalid weight proof, num of epochs {len(weight_proof.sub_epochs)}"
                            f" recent blocks num ,{len(weight_proof.recent_chain_data)}"
                        )
                        self.log.debug(f"{weight_proof}")
                        return None
                self.log.info(f"Validated, fork point is {fork_point}")
                self.wallet_state_manager.sync_store.add_potential_fork_point(
                    header_block.header_hash, uint32(fork_point))
                self.wallet_state_manager.sync_store.add_potential_peak(
                    header_block)
                self.start_sync()
Ejemplo n.º 11
0
    async def _sync(self) -> None:
        """
        Wallet has fallen far behind (or is starting up for the first time), and must be synced
        up to the LCA of the blockchain.
        """
        if self.wallet_state_manager is None or self.backup_initialized is False or self.server is None:
            return None

        highest_weight: uint128 = uint128(0)
        peak_height: uint32 = uint32(0)
        peak: Optional[HeaderBlock] = None
        potential_peaks: List[Tuple[
            bytes32,
            HeaderBlock]] = self.wallet_state_manager.sync_store.get_potential_peaks_tuples(
            )

        self.log.info(f"Have collected {len(potential_peaks)} potential peaks")

        for header_hash, potential_peak_block in potential_peaks:
            if potential_peak_block.weight > highest_weight:
                highest_weight = potential_peak_block.weight
                peak_height = potential_peak_block.height
                peak = potential_peak_block

        if peak_height is None or peak_height == 0:
            return None

        if self.wallet_state_manager.peak is not None and highest_weight <= self.wallet_state_manager.peak.weight:
            self.log.info("Not performing sync, already caught up.")
            return None

        peers: List[WSChiaConnection] = self.server.get_full_node_connections()
        if len(peers) == 0:
            self.log.info("No peers to sync to")
            return None

        async with self.wallet_state_manager.blockchain.lock:
            fork_height = None
            if peak is not None:
                fork_height = self.wallet_state_manager.sync_store.get_potential_fork_point(
                    peak.header_hash)
                our_peak_height = self.wallet_state_manager.blockchain.get_peak_height(
                )
                ses_heigths = self.wallet_state_manager.blockchain.get_ses_heights(
                )
                if len(ses_heigths) > 2 and our_peak_height is not None:
                    ses_heigths.sort()
                    max_fork_ses_height = ses_heigths[-3]
                    # This is fork point in SES in case where fork was not detected
                    if (self.wallet_state_manager.blockchain.get_peak_height()
                            is not None
                            and fork_height == max_fork_ses_height):
                        peers = self.server.get_full_node_connections()
                        for peer in peers:
                            # Grab a block at peak + 1 and check if fork point is actually our current height
                            potential_height = uint32(our_peak_height + 1)
                            block_response: Optional[
                                Any] = await peer.request_header_blocks(
                                    wallet_protocol.RequestHeaderBlocks(
                                        potential_height, potential_height))
                            if block_response is not None and isinstance(
                                    block_response,
                                    wallet_protocol.RespondHeaderBlocks):
                                our_peak = self.wallet_state_manager.blockchain.get_peak(
                                )
                                if (our_peak is not None and block_response.
                                        header_blocks[0].prev_header_hash
                                        == our_peak.header_hash):
                                    fork_height = our_peak_height
                                break
            if fork_height is None:
                fork_height = uint32(0)
            await self.wallet_state_manager.blockchain.warmup(fork_height)
            batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
            advanced_peak = False
            for i in range(max(0, fork_height - 1), peak_height, batch_size):
                start_height = i
                end_height = min(peak_height, start_height + batch_size)
                peers = self.server.get_full_node_connections()
                added = False
                for peer in peers:
                    try:
                        added, advanced_peak = await self.fetch_blocks_and_validate(
                            peer, uint32(start_height), uint32(end_height),
                            None if advanced_peak else fork_height)
                        if added:
                            break
                    except Exception as e:
                        await peer.close()
                        exc = traceback.format_exc()
                        self.log.error(
                            f"Error while trying to fetch from peer:{e} {exc}")
                if not added:
                    raise RuntimeError(
                        f"Was not able to add blocks {start_height}-{end_height}"
                    )

                peak = self.wallet_state_manager.blockchain.get_peak()
                assert peak is not None
                self.wallet_state_manager.blockchain.clean_block_record(
                    min(
                        end_height - self.constants.BLOCKS_CACHE_SIZE,
                        peak.height - self.constants.BLOCKS_CACHE_SIZE,
                    ))
Ejemplo n.º 12
0
def check_keys(new_root: Path, keychain: Optional[Keychain] = None) -> None:
    if keychain is None:
        keychain = Keychain()
    all_sks = keychain.get_all_private_keys()
    if len(all_sks) == 0:
        print(
            "No keys are present in the keychain. Generate them with 'chia keys generate'"
        )
        return None

    config: Dict = load_config(new_root, "config.yaml")
    pool_child_pubkeys = [
        master_sk_to_pool_sk(sk).get_g1() for sk, _ in all_sks
    ]
    all_targets = []
    stop_searching_for_farmer = "xch_target_address" not in config["farmer"]
    stop_searching_for_pool = "xch_target_address" not in config["pool"]
    number_of_ph_to_search = 500
    selected = config["selected_network"]
    prefix = config["network_overrides"]["config"][selected]["address_prefix"]
    for i in range(number_of_ph_to_search):
        if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
            break
        for sk, _ in all_sks:
            all_targets.append(
                encode_puzzle_hash(
                    create_puzzlehash_for_pk(
                        master_sk_to_wallet_sk(sk, uint32(i)).get_g1()),
                    prefix))
            if all_targets[-1] == config["farmer"].get("xch_target_address"):
                stop_searching_for_farmer = True
            if all_targets[-1] == config["pool"].get("xch_target_address"):
                stop_searching_for_pool = True

    # Set the destinations
    if "xch_target_address" not in config["farmer"]:
        print(
            f"Setting the xch destination address for coinbase fees reward to {all_targets[0]}"
        )
        config["farmer"]["xch_target_address"] = all_targets[0]
    elif config["farmer"]["xch_target_address"] not in all_targets:
        print(
            f"WARNING: using a farmer address which we don't have the private"
            f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
            f"{config['farmer']['xch_target_address']} with {all_targets[0]}")

    if "pool" not in config:
        config["pool"] = {}
    if "xch_target_address" not in config["pool"]:
        print(
            f"Setting the xch destination address for coinbase reward to {all_targets[0]}"
        )
        config["pool"]["xch_target_address"] = all_targets[0]
    elif config["pool"]["xch_target_address"] not in all_targets:
        print(
            f"WARNING: using a pool address which we don't have the private"
            f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding "
            f"{config['pool']['xch_target_address']} with {all_targets[0]}")

    # Set the pool pks in the farmer
    pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys)
    if "pool_public_keys" in config["farmer"]:
        for pk_hex in config["farmer"]["pool_public_keys"]:
            # Add original ones in config
            pool_pubkeys_hex.add(pk_hex)

    config["farmer"]["pool_public_keys"] = pool_pubkeys_hex
    save_config(new_root, "config.yaml", config)
Ejemplo n.º 13
0
    async def get_filter_additions_removals(
        self, new_block: HeaderBlock, transactions_filter: bytes,
        fork_point_with_peak: Optional[uint32]
    ) -> Tuple[List[bytes32], List[bytes32]]:
        """ Returns a list of our coin ids, and a list of puzzle_hashes that positively match with provided filter. """
        # assert new_block.prev_header_hash in self.blockchain.blocks

        tx_filter = PyBIP158([b for b in transactions_filter])

        # Find fork point
        if fork_point_with_peak is not None:
            fork_h: int = fork_point_with_peak
        elif new_block.prev_header_hash != self.constants.GENESIS_CHALLENGE and self.peak is not None:
            # TODO: handle returning of -1
            fork_h = find_fork_point_in_chain(
                self.blockchain,
                self.blockchain.block_record(self.peak.header_hash),
                new_block,
            )
        else:
            fork_h = 0

        # Get all unspent coins
        my_coin_records: Set[
            WalletCoinRecord] = await self.coin_store.get_unspent_coins_at_height(
                uint32(fork_h))

        # Filter coins up to and including fork point
        unspent_coin_names: Set[bytes32] = set()
        for coin in my_coin_records:
            if coin.confirmed_block_height <= fork_h:
                unspent_coin_names.add(coin.name())

        # # Get all blocks after fork point up to but not including this block
        # curr: BlockRecord = self.lockchain.blocks[new_block.prev_header_hash]
        # reorg_blocks: List[HeaderBlockRecord] = []
        # while curr.height > fork_h:
        #     header_block_record = await self.block_store.get_header_block_record(
        #         curr.header_hash
        #     )
        #     reorg_blocks.append(header_block_record)
        #     curr = self.blockchain.blocks[curr.prev_header_hash]
        # reorg_blocks.reverse()

        # For each block, process additions to get all Coins, then process removals to get unspent coins
        # for reorg_block in reorg_blocks:
        #     for addition in reorg_block.additions:
        #         unspent_coin_names.add(addition.name())
        #     for removal in reorg_block.removals:
        #         record = await self.puzzle_store.get_derivation_record_for_puzzle_hash(
        #             removal.puzzle_hash
        #         )
        #         if record is None:
        #             continue
        #         unspent_coin_names.remove(removal)

        my_puzzle_hashes = self.puzzle_store.all_puzzle_hashes

        removals_of_interest: bytes32 = []
        additions_of_interest: bytes32 = []

        (
            trade_removals,
            trade_additions,
        ) = await self.trade_manager.get_coins_of_interest()
        for name, trade_coin in trade_removals.items():
            if tx_filter.Match(bytearray(trade_coin.name())):
                removals_of_interest.append(trade_coin.name())

        for name, trade_coin in trade_additions.items():
            if tx_filter.Match(bytearray(trade_coin.puzzle_hash)):
                additions_of_interest.append(trade_coin.puzzle_hash)

        for coin_name in unspent_coin_names:
            if tx_filter.Match(bytearray(coin_name)):
                removals_of_interest.append(coin_name)

        for puzzle_hash in my_puzzle_hashes:
            if tx_filter.Match(bytearray(puzzle_hash)):
                additions_of_interest.append(puzzle_hash)

        return additions_of_interest, removals_of_interest
Ejemplo n.º 14
0
    async def coin_added(
        self,
        coin: Coin,
        coinbase: bool,
        fee_reward: bool,
        wallet_id: uint32,
        wallet_type: WalletType,
        height: uint32,
    ):
        """
        Adding coin to DB
        """
        self.log.info(f"Adding coin: {coin} at {height}")
        farm_reward = False
        if coinbase or fee_reward:
            farm_reward = True
            now = uint64(int(time.time()))
            if coinbase:
                tx_type: int = TransactionType.COINBASE_REWARD.value
            else:
                tx_type = TransactionType.FEE_REWARD.value
            tx_record = TransactionRecord(
                confirmed_at_height=uint32(height),
                created_at_time=now,
                to_puzzle_hash=coin.puzzle_hash,
                amount=coin.amount,
                fee_amount=uint64(0),
                confirmed=True,
                sent=uint32(0),
                spend_bundle=None,
                additions=[coin],
                removals=[],
                wallet_id=wallet_id,
                sent_to=[],
                trade_id=None,
                type=uint32(tx_type),
                name=coin.name(),
            )
            await self.tx_store.add_transaction_record(tx_record)
        else:
            records = await self.tx_store.tx_with_addition_coin(
                coin.name(), wallet_id)

            if len(records) > 0:
                # This is the change from this transaction
                for record in records:
                    if record.confirmed is False:
                        await self.tx_store.set_confirmed(record.name, height)
            else:
                now = uint64(int(time.time()))
                tx_record = TransactionRecord(
                    confirmed_at_height=uint32(height),
                    created_at_time=now,
                    to_puzzle_hash=coin.puzzle_hash,
                    amount=coin.amount,
                    fee_amount=uint64(0),
                    confirmed=True,
                    sent=uint32(0),
                    spend_bundle=None,
                    additions=[coin],
                    removals=[],
                    wallet_id=wallet_id,
                    sent_to=[],
                    trade_id=None,
                    type=uint32(TransactionType.INCOMING_TX.value),
                    name=coin.name(),
                )
                if coin.amount > 0:
                    await self.tx_store.add_transaction_record(tx_record)

        coin_record: WalletCoinRecord = WalletCoinRecord(
            coin, height, uint32(0), False, farm_reward, wallet_type,
            wallet_id)
        await self.coin_store.add_coin_record(coin_record)

        if wallet_type == WalletType.COLOURED_COIN or wallet_type == WalletType.DISTRIBUTED_ID:
            wallet = self.wallets[wallet_id]
            header_hash: bytes32 = self.blockchain.height_to_hash(height)
            block: Optional[
                HeaderBlockRecord] = await self.block_store.get_header_block_record(
                    header_hash)
            assert block is not None
            assert block.removals is not None
            await wallet.coin_added(coin, header_hash, block.removals, height)

        self.state_changed("coin_added", wallet_id)
Ejemplo n.º 15
0
async def validate_block_body(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    block_store: BlockStore,
    coin_store: CoinStore,
    peak: Optional[BlockRecord],
    block: Union[FullBlock, UnfinishedBlock],
    height: uint32,
    npc_result: Optional[NPCResult],
    fork_point_with_peak: Optional[uint32],
    get_block_generator: Callable,
) -> Tuple[Optional[Err], Optional[NPCResult]]:
    """
    This assumes the header block has been completely validated.
    Validates the transactions and body of the block. Returns None for the first value if everything
    validates correctly, or an Err if something does not validate. For the second value, returns a CostResult
    only if validation succeeded, and there are transactions. In other cases it returns None. The NPC result is
    the result of running the generator with the previous generators refs. It is only present for transaction
    blocks which have spent coins.
    """
    if isinstance(block, FullBlock):
        assert height == block.height
    prev_transaction_block_height: uint32 = uint32(0)

    # 1. For non transaction-blocs: foliage block, transaction filter, transactions info, and generator must
    # be empty. If it is a block but not a transaction block, there is no body to validate. Check that all fields are
    # None
    if block.foliage.foliage_transaction_block_hash is None:
        if (block.foliage_transaction_block is not None
                or block.transactions_info is not None
                or block.transactions_generator is not None):
            return Err.NOT_BLOCK_BUT_HAS_DATA, None

        prev_tb: BlockRecord = blocks.block_record(block.prev_header_hash)
        while not prev_tb.is_transaction_block:
            prev_tb = blocks.block_record(prev_tb.prev_hash)
        assert prev_tb.timestamp is not None
        if (prev_tb.timestamp > constants.INITIAL_FREEZE_END_TIMESTAMP
                and len(block.transactions_generator_ref_list) > 0):
            return Err.NOT_BLOCK_BUT_HAS_DATA, None

        return None, None  # This means the block is valid

    # All checks below this point correspond to transaction blocks
    # 2. For blocks, foliage block, transactions info must not be empty
    if block.foliage_transaction_block is None or block.transactions_info is None:
        return Err.IS_TRANSACTION_BLOCK_BUT_NO_DATA, None
    assert block.foliage_transaction_block is not None

    # keeps track of the reward coins that need to be incorporated
    expected_reward_coins: Set[Coin] = set()

    # 3. The transaction info hash in the Foliage block must match the transaction info
    if block.foliage_transaction_block.transactions_info_hash != std_hash(
            block.transactions_info):
        return Err.INVALID_TRANSACTIONS_INFO_HASH, None

    # 4. The foliage block hash in the foliage block must match the foliage block
    if block.foliage.foliage_transaction_block_hash != std_hash(
            block.foliage_transaction_block):
        return Err.INVALID_FOLIAGE_BLOCK_HASH, None

    # 5. The reward claims must be valid for the previous blocks, and current block fees
    # If height == 0, expected_reward_coins will be left empty
    if height > 0:
        # Add reward claims for all blocks from the prev prev block, until the prev block (including the latter)
        prev_transaction_block = blocks.block_record(
            block.foliage_transaction_block.prev_transaction_block_hash)
        prev_transaction_block_height = prev_transaction_block.height
        assert prev_transaction_block.fees is not None
        pool_coin = create_pool_coin(
            prev_transaction_block_height,
            prev_transaction_block.pool_puzzle_hash,
            calculate_pool_reward(prev_transaction_block.height),
            constants.GENESIS_CHALLENGE,
        )
        farmer_coin = create_farmer_coin(
            prev_transaction_block_height,
            prev_transaction_block.farmer_puzzle_hash,
            uint64(
                calculate_base_farmer_reward(prev_transaction_block.height) +
                prev_transaction_block.fees),
            constants.GENESIS_CHALLENGE,
        )
        # Adds the previous block
        expected_reward_coins.add(pool_coin)
        expected_reward_coins.add(farmer_coin)

        # For the second block in the chain, don't go back further
        if prev_transaction_block.height > 0:
            curr_b = blocks.block_record(prev_transaction_block.prev_hash)
            while not curr_b.is_transaction_block:
                expected_reward_coins.add(
                    create_pool_coin(
                        curr_b.height,
                        curr_b.pool_puzzle_hash,
                        calculate_pool_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                expected_reward_coins.add(
                    create_farmer_coin(
                        curr_b.height,
                        curr_b.farmer_puzzle_hash,
                        calculate_base_farmer_reward(curr_b.height),
                        constants.GENESIS_CHALLENGE,
                    ))
                curr_b = blocks.block_record(curr_b.prev_hash)

    if set(block.transactions_info.reward_claims_incorporated
           ) != expected_reward_coins:
        return Err.INVALID_REWARD_COINS, None

    if block.foliage_transaction_block.timestamp > constants.INITIAL_FREEZE_END_TIMESTAMP:
        if len(block.transactions_info.reward_claims_incorporated) != len(
                expected_reward_coins):
            # No duplicates, after transaction freeze period. Duplicates cause no issues because we filter them out
            # anyway.
            return Err.INVALID_REWARD_COINS, None

    removals: List[bytes32] = []
    coinbase_additions: List[Coin] = list(expected_reward_coins)
    additions: List[Coin] = []
    coin_announcement_names: Set[bytes32] = set()
    puzzle_announcement_names: Set[bytes32] = set()
    npc_list: List[NPC] = []
    removals_puzzle_dic: Dict[bytes32, bytes32] = {}
    cost: uint64 = uint64(0)

    # We check in header validation that timestamp is not more that 10 minutes into the future
    if (block.foliage_transaction_block.timestamp <=
            constants.INITIAL_FREEZE_END_TIMESTAMP
            and block.transactions_generator is not None):
        # 6. No transactions before INITIAL_TRANSACTION_FREEZE timestamp
        return Err.INITIAL_TRANSACTION_FREEZE, None
    else:
        # 7a. The generator root must be the hash of the serialized bytes of
        #     the generator for this block (or zeroes if no generator)
        if block.transactions_generator is not None:
            if std_hash(bytes(block.transactions_generator)
                        ) != block.transactions_info.generator_root:
                return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None
        else:
            if block.transactions_info.generator_root != bytes([0] * 32):
                return Err.INVALID_TRANSACTIONS_GENERATOR_HASH, None

        # 8a. The generator_ref_list must be the hash of the serialized bytes of
        #     the generator ref list for this block (or 'one' bytes [0x01] if no generator)
        # 8b. The generator ref list length must be less than or equal to MAX_GENERATOR_REF_LIST_SIZE entries
        # 8c. The generator ref list must not point to a height >= this block's height
        if block.transactions_generator_ref_list in (None, []):
            if block.transactions_info.generator_refs_root != bytes([1] * 32):
                return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None
        else:
            # If we have a generator reference list, we must have a generator
            if block.transactions_generator is None:
                return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None

            # The generator_refs_root must be the hash of the concatenation of the List[uint32]
            generator_refs_hash = std_hash(b"".join(
                [bytes(i) for i in block.transactions_generator_ref_list]))
            if block.transactions_info.generator_refs_root != generator_refs_hash:
                return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None
            if len(block.transactions_generator_ref_list
                   ) > constants.MAX_GENERATOR_REF_LIST_SIZE:
                return Err.TOO_MANY_GENERATOR_REFS, None
            if any([
                    index >= height
                    for index in block.transactions_generator_ref_list
            ]):
                return Err.FUTURE_GENERATOR_REFS, None

        if block.transactions_generator is not None:
            # Get List of names removed, puzzles hashes for removed coins and conditions created

            assert npc_result is not None
            cost = calculate_cost_of_program(block.transactions_generator,
                                             npc_result,
                                             constants.COST_PER_BYTE)
            npc_list = npc_result.npc_list

            # 7. Check that cost <= MAX_BLOCK_COST_CLVM
            log.debug(
                f"Cost: {cost} max: {constants.MAX_BLOCK_COST_CLVM} "
                f"percent full: {round(100 * (cost / constants.MAX_BLOCK_COST_CLVM), 2)}%"
            )
            if cost > constants.MAX_BLOCK_COST_CLVM:
                return Err.BLOCK_COST_EXCEEDS_MAX, None

            # 8. The CLVM program must not return any errors
            if npc_result.error is not None:
                return Err(npc_result.error), None

            for npc in npc_list:
                removals.append(npc.coin_name)
                removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash

            additions = additions_for_npc(npc_list)
            coin_announcement_names = coin_announcements_names_for_npc(
                npc_list)
            puzzle_announcement_names = puzzle_announcements_names_for_npc(
                npc_list)
        else:
            assert npc_result is None

        # 9. Check that the correct cost is in the transactions info
        if block.transactions_info.cost != cost:
            return Err.INVALID_BLOCK_COST, None

        additions_dic: Dict[bytes32, Coin] = {}
        # 10. Check additions for max coin amount
        # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer
        # We will not even reach here because Coins do type checking (uint64)
        for coin in additions + coinbase_additions:
            additions_dic[coin.name()] = coin
            if coin.amount < 0:
                return Err.COIN_AMOUNT_NEGATIVE, None

            if coin.amount > constants.MAX_COIN_AMOUNT:
                return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

        # 11. Validate addition and removal roots
        root_error = validate_block_merkle_roots(
            block.foliage_transaction_block.additions_root,
            block.foliage_transaction_block.removals_root,
            additions + coinbase_additions,
            removals,
        )
        if root_error:
            return root_error, None

        # 12. The additions and removals must result in the correct filter
        byte_array_tx: List[bytes32] = []

        for coin in additions + coinbase_additions:
            byte_array_tx.append(bytearray(coin.puzzle_hash))
        for coin_name in removals:
            byte_array_tx.append(bytearray(coin_name))

        bip158: PyBIP158 = PyBIP158(byte_array_tx)
        encoded_filter = bytes(bip158.GetEncoded())
        filter_hash = std_hash(encoded_filter)

        if filter_hash != block.foliage_transaction_block.filter_hash:
            return Err.INVALID_TRANSACTIONS_FILTER_HASH, None

        # 13. Check for duplicate outputs in additions
        addition_counter = collections.Counter(
            _.name() for _ in additions + coinbase_additions)
        for k, v in addition_counter.items():
            if v > 1:
                return Err.DUPLICATE_OUTPUT, None

        # 14. Check for duplicate spends inside block
        removal_counter = collections.Counter(removals)
        for k, v in removal_counter.items():
            if v > 1:
                return Err.DOUBLE_SPEND, None

        # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block)
        # The fork point is the last block in common between the peak chain and the chain of `block`
        if peak is None or height == 0:
            fork_h: int = -1
        elif fork_point_with_peak is not None:
            fork_h = fork_point_with_peak
        else:
            fork_h = find_fork_point_in_chain(
                blocks, peak, blocks.block_record(block.prev_header_hash))

        # Get additions and removals since (after) fork_h but not including this block
        # The values include: the coin that was added, the height of the block in which it was confirmed, and the
        # timestamp of the block in which it was confirmed
        additions_since_fork: Dict[bytes32, Tuple[Coin, uint32, uint64]] = {
        }  # This includes coinbase additions
        removals_since_fork: Set[bytes32] = set()

        # For height 0, there are no additions and removals before this block, so we can skip
        if height > 0:
            # First, get all the blocks in the fork > fork_h, < block.height
            prev_block: Optional[FullBlock] = await block_store.get_full_block(
                block.prev_header_hash)
            reorg_blocks: Dict[uint32, FullBlock] = {}
            curr: Optional[FullBlock] = prev_block
            assert curr is not None
            while curr.height > fork_h:
                if curr.height == 0:
                    break
                curr = await block_store.get_full_block(curr.prev_header_hash)
                assert curr is not None
                reorg_blocks[curr.height] = curr
            if fork_h != -1:
                assert len(reorg_blocks) == height - fork_h - 1

            curr = prev_block
            assert curr is not None
            while curr.height > fork_h:
                # Coin store doesn't contain coins from fork, we have to run generator for each block in fork
                if curr.transactions_generator is not None:
                    # These blocks are in the past and therefore assumed to be valid, so get_block_generator won't raise
                    curr_block_generator: Optional[
                        BlockGenerator] = await get_block_generator(curr)
                    assert curr_block_generator is not None and curr.transactions_info is not None
                    curr_npc_result = get_name_puzzle_conditions(
                        curr_block_generator,
                        min(constants.MAX_BLOCK_COST_CLVM,
                            curr.transactions_info.cost), False)
                    removals_in_curr, additions_in_curr = tx_removals_and_additions(
                        curr_npc_result.npc_list)
                else:
                    removals_in_curr = []
                    additions_in_curr = []

                for c_name in removals_in_curr:
                    assert c_name not in removals_since_fork
                    removals_since_fork.add(c_name)
                for c in additions_in_curr:
                    assert c.name() not in additions_since_fork
                    assert curr.foliage_transaction_block is not None
                    additions_since_fork[c.name()] = (
                        c, curr.height,
                        curr.foliage_transaction_block.timestamp)

                for coinbase_coin in curr.get_included_reward_coins():
                    assert coinbase_coin.name() not in additions_since_fork
                    assert curr.foliage_transaction_block is not None
                    additions_since_fork[coinbase_coin.name()] = (
                        coinbase_coin,
                        curr.height,
                        curr.foliage_transaction_block.timestamp,
                    )
                if curr.height == 0:
                    break
                curr = reorg_blocks[curr.height - 1]
                assert curr is not None

        removal_coin_records: Dict[bytes32, CoinRecord] = {}
        for rem in removals:
            if rem in additions_dic:
                # Ephemeral coin
                rem_coin: Coin = additions_dic[rem]
                new_unspent: CoinRecord = CoinRecord(
                    rem_coin,
                    height,
                    height,
                    True,
                    False,
                    block.foliage_transaction_block.timestamp,
                )
                removal_coin_records[new_unspent.name] = new_unspent
            else:
                unspent = await coin_store.get_coin_record(rem)
                if unspent is not None and unspent.confirmed_block_index <= fork_h:
                    # Spending something in the current chain, confirmed before fork
                    # (We ignore all coins confirmed after fork)
                    if unspent.spent == 1 and unspent.spent_block_index <= fork_h:
                        # Check for coins spent in an ancestor block
                        return Err.DOUBLE_SPEND, None
                    removal_coin_records[unspent.name] = unspent
                else:
                    # This coin is not in the current heaviest chain, so it must be in the fork
                    if rem not in additions_since_fork:
                        # Check for spending a coin that does not exist in this fork
                        return Err.UNKNOWN_UNSPENT, None
                    new_coin, confirmed_height, confirmed_timestamp = additions_since_fork[
                        rem]
                    new_coin_record: CoinRecord = CoinRecord(
                        new_coin,
                        confirmed_height,
                        uint32(0),
                        False,
                        False,
                        confirmed_timestamp,
                    )
                    removal_coin_records[
                        new_coin_record.name] = new_coin_record

                # This check applies to both coins created before fork (pulled from coin_store),
                # and coins created after fork (additions_since_fork)
                if rem in removals_since_fork:
                    # This coin was spent in the fork
                    return Err.DOUBLE_SPEND_IN_FORK, None

        removed = 0
        for unspent in removal_coin_records.values():
            removed += unspent.coin.amount

        added = 0
        for coin in additions:
            added += coin.amount

        # 16. Check that the total coin amount for added is <= removed
        if removed < added:
            return Err.MINTING_COIN, None

        fees = removed - added
        assert fees >= 0
        assert_fee_sum: uint128 = uint128(0)

        for npc in npc_list:
            if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
                fee_list: List[ConditionWithArgs] = npc.condition_dict[
                    ConditionOpcode.RESERVE_FEE]
                for cvp in fee_list:
                    fee = int_from_bytes(cvp.vars[0])
                    if fee < 0:
                        return Err.RESERVE_FEE_CONDITION_FAILED, None
                    assert_fee_sum = uint128(assert_fee_sum + fee)

        # 17. Check that the assert fee sum <= fees, and that each reserved fee is non-negative
        if fees < assert_fee_sum:
            return Err.RESERVE_FEE_CONDITION_FAILED, None

        # 18. Check that the fee amount + farmer reward < maximum coin amount
        if fees + calculate_base_farmer_reward(
                height) > constants.MAX_COIN_AMOUNT:
            return Err.COIN_AMOUNT_EXCEEDS_MAXIMUM, None

        # 19. Check that the computed fees are equal to the fees in the block header
        if block.transactions_info.fees != fees:
            return Err.INVALID_BLOCK_FEE_AMOUNT, None

        # 20. Verify that removed coin puzzle_hashes match with calculated puzzle_hashes
        for unspent in removal_coin_records.values():
            if unspent.coin.puzzle_hash != removals_puzzle_dic[unspent.name]:
                return Err.WRONG_PUZZLE_HASH, None

        # 21. Verify conditions
        # create hash_key list for aggsig check
        pairs_pks: List[G1Element] = []
        pairs_msgs: List[bytes] = []
        for npc in npc_list:
            assert height is not None
            unspent = removal_coin_records[npc.coin_name]
            error = mempool_check_conditions_dict(
                unspent,
                coin_announcement_names,
                puzzle_announcement_names,
                npc.condition_dict,
                prev_transaction_block_height,
                block.foliage_transaction_block.timestamp,
            )
            if error:
                return error, None
            for pk, m in pkm_pairs_for_conditions_dict(
                    npc.condition_dict, npc.coin_name,
                    constants.AGG_SIG_ME_ADDITIONAL_DATA):
                pairs_pks.append(pk)
                pairs_msgs.append(m)

        # 22. Verify aggregated signature
        # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster
        if not block.transactions_info.aggregated_signature:
            return Err.BAD_AGGREGATE_SIGNATURE, None

        # noinspection PyTypeChecker
        if not AugSchemeMPL.aggregate_verify(
                pairs_pks, pairs_msgs,
                block.transactions_info.aggregated_signature):
            return Err.BAD_AGGREGATE_SIGNATURE, None

        return None, npc_result
Ejemplo n.º 16
0
 def test_recursive_types(self):
     coin: Optional[Coin] = None
     l1 = [(bytes32([2] * 32), coin)]
     rr = RespondRemovals(uint32(1), bytes32([1] * 32), l1, None)
     RespondRemovals(rr.height, rr.header_hash, rr.coins, rr.proofs)
Ejemplo n.º 17
0
    async def crawl(self):
        try:
            self.connection = await aiosqlite.connect(self.db_path)
            self.crawl_store = await CrawlStore.create(self.connection)
            self.log.info("Started")
            t_start = time.time()
            total_nodes = 0
            self.seen_nodes = set()
            tried_nodes = set()
            for peer in self.bootstrap_peers:
                new_peer = PeerRecord(
                    peer,
                    peer,
                    self.other_peers_port,
                    False,
                    0,
                    0,
                    0,
                    uint64(int(time.time())),
                    uint64(0),
                    "undefined",
                    uint64(0),
                    tls_version="unknown",
                )
                new_peer_reliability = PeerReliability(peer)
                self.crawl_store.maybe_add_peer(new_peer, new_peer_reliability)

            self.host_to_version, self.handshake_time = self.crawl_store.load_host_to_version(
            )
            self.best_timestamp_per_peer = self.crawl_store.load_best_peer_reliability(
            )
            while True:
                self.with_peak = set()
                peers_to_crawl = await self.crawl_store.get_peers_to_crawl(
                    25000, 250000)
                tasks = set()
                for peer in peers_to_crawl:
                    if peer.port == self.other_peers_port:
                        total_nodes += 1
                        if peer.ip_address not in tried_nodes:
                            tried_nodes.add(peer.ip_address)
                        task = asyncio.create_task(self.connect_task(peer))
                        tasks.add(task)
                        if len(tasks) >= 250:
                            await asyncio.wait(
                                tasks, return_when=asyncio.FIRST_COMPLETED)
                        tasks = set(filter(lambda t: not t.done(), tasks))

                if len(tasks) > 0:
                    await asyncio.wait(tasks, timeout=30)

                for response in self.peers_retrieved:
                    for response_peer in response.peer_list:
                        if response_peer.host not in self.best_timestamp_per_peer:
                            self.best_timestamp_per_peer[
                                response_peer.host] = response_peer.timestamp
                        self.best_timestamp_per_peer[response_peer.host] = max(
                            self.best_timestamp_per_peer[response_peer.host],
                            response_peer.timestamp)
                        if (response_peer.host not in self.seen_nodes
                                and response_peer.timestamp >
                                time.time() - 5 * 24 * 3600):
                            self.seen_nodes.add(response_peer.host)
                            new_peer = PeerRecord(
                                response_peer.host,
                                response_peer.host,
                                uint32(response_peer.port),
                                False,
                                uint64(0),
                                uint32(0),
                                uint64(0),
                                uint64(int(time.time())),
                                uint64(response_peer.timestamp),
                                "undefined",
                                uint64(0),
                                tls_version="unknown",
                            )
                            new_peer_reliability = PeerReliability(
                                response_peer.host)
                            if self.crawl_store is not None:
                                self.crawl_store.maybe_add_peer(
                                    new_peer, new_peer_reliability)
                        await self.crawl_store.update_best_timestamp(
                            response_peer.host,
                            self.best_timestamp_per_peer[response_peer.host],
                        )
                for host, version in self.version_cache:
                    self.handshake_time[host] = int(time.time())
                    self.host_to_version[host] = version
                    await self.crawl_store.update_version(
                        host, version, int(time.time()))

                to_remove = set()
                now = int(time.time())
                for host in self.host_to_version.keys():
                    active = True
                    if host not in self.handshake_time:
                        active = False
                    elif self.handshake_time[host] < now - 5 * 24 * 3600:
                        active = False
                    if not active:
                        to_remove.add(host)

                self.host_to_version = {
                    host: version
                    for host, version in self.host_to_version.items()
                    if host not in to_remove
                }
                self.best_timestamp_per_peer = {
                    host: timestamp
                    for host, timestamp in
                    self.best_timestamp_per_peer.items()
                    if timestamp >= now - 5 * 24 * 3600
                }
                versions = {}
                for host, version in self.host_to_version.items():
                    if version not in versions:
                        versions[version] = 0
                    versions[version] += 1
                self.version_cache = []
                self.peers_retrieved = []

                self.server.banned_peers = {}
                if len(peers_to_crawl) == 0:
                    continue

                # Try up to 5 times to write to the DB in case there is a lock that causes a timeout
                for i in range(1, 5):
                    try:
                        await self.crawl_store.load_to_db()
                        await self.crawl_store.load_reliable_peers_to_db()
                    except Exception as e:
                        self.log.error(f"Exception while saving to DB: {e}.")
                        self.log.error("Waiting 5 seconds before retry...")
                        await asyncio.sleep(5)
                        continue
                    break
                total_records = self.crawl_store.get_total_records()
                ipv6_count = self.crawl_store.get_ipv6_peers()
                self.log.error("***")
                self.log.error("Finished batch:")
                self.log.error(f"Total IPs stored in DB: {total_records}.")
                self.log.error(
                    f"Total IPV6 addresses stored in DB: {ipv6_count}")
                self.log.error(
                    f"Total connections attempted since crawler started: {total_nodes}."
                )
                self.log.error(
                    f"Total unique nodes attempted since crawler started: {len(tried_nodes)}."
                )
                t_now = time.time()
                t_delta = int(t_now - t_start)
                if t_delta > 0:
                    self.log.error(
                        f"Avg connections per second: {total_nodes // t_delta}."
                    )
                # Periodically print detailed stats.
                reliable_peers = self.crawl_store.get_reliable_peers()
                self.log.error(
                    f"High quality reachable nodes, used by DNS introducer in replies: {reliable_peers}"
                )
                banned_peers = self.crawl_store.get_banned_peers()
                ignored_peers = self.crawl_store.get_ignored_peers()
                available_peers = len(self.host_to_version)
                addresses_count = len(self.best_timestamp_per_peer)
                total_records = self.crawl_store.get_total_records()
                ipv6_addresses_count = 0
                for host in self.best_timestamp_per_peer.keys():
                    try:
                        _ = ipaddress.IPv6Address(host)
                        ipv6_addresses_count += 1
                    except ValueError:
                        continue
                self.log.error(
                    "IPv4 addresses gossiped with timestamp in the last 5 days with respond_peers messages: "
                    f"{addresses_count - ipv6_addresses_count}.")
                self.log.error(
                    "IPv6 addresses gossiped with timestamp in the last 5 days with respond_peers messages: "
                    f"{ipv6_addresses_count}.")
                ipv6_available_peers = 0
                for host in self.host_to_version.keys():
                    try:
                        _ = ipaddress.IPv6Address(host)
                        ipv6_available_peers += 1
                    except ValueError:
                        continue
                self.log.error(
                    f"Total IPv4 nodes reachable in the last 5 days: {available_peers - ipv6_available_peers}."
                )
                self.log.error(
                    f"Total IPv6 nodes reachable in the last 5 days: {ipv6_available_peers}."
                )
                self.log.error(
                    "Version distribution among reachable in the last 5 days (at least 100 nodes):"
                )
                if "minimum_version_count" in self.config and self.config[
                        "minimum_version_count"] > 0:
                    minimum_version_count = self.config[
                        "minimum_version_count"]
                else:
                    minimum_version_count = 100
                for version, count in sorted(versions.items(),
                                             key=lambda kv: kv[1],
                                             reverse=True):
                    if count >= minimum_version_count:
                        self.log.error(f"Version: {version} - Count: {count}")
                self.log.error(f"Banned addresses in the DB: {banned_peers}")
                self.log.error(
                    f"Temporary ignored addresses in the DB: {ignored_peers}")
                self.log.error(
                    "Peers to crawl from in the next batch (total IPs - ignored - banned): "
                    f"{total_records - banned_peers - ignored_peers}")
                self.log.error("***")
        except Exception as e:
            self.log.error(
                f"Exception: {e}. Traceback: {traceback.format_exc()}.")
Ejemplo n.º 18
0
    async def _reconsider_peak(
        self,
        block_record: BlockRecord,
        genesis: bool,
        fork_point_with_peak: Optional[uint32],
        npc_result: Optional[NPCResult],
    ) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord]]:
        """
        When a new block is added, this is called, to check if the new block is the new peak of the chain.
        This also handles reorgs by reverting blocks which are not in the heaviest chain.
        It returns the height of the fork between the previous chain and the new chain, or returns
        None if there was no update to the heaviest chain.
        """
        peak = self.get_peak()
        if genesis:
            if peak is None:
                block: Optional[
                    FullBlock] = await self.block_store.get_full_block(
                        block_record.header_hash)
                assert block is not None

                if npc_result is not None:
                    tx_removals, tx_additions = tx_removals_and_additions(
                        npc_result.npc_list)
                else:
                    tx_removals, tx_additions = [], []
                await self.coin_store.new_block(block, tx_additions,
                                                tx_removals)
                await self.block_store.set_peak(block_record.header_hash)
                return uint32(0), uint32(0), [block_record]
            return None, None, []

        assert peak is not None
        if block_record.weight > peak.weight:
            # Find the fork. if the block is just being appended, it will return the peak
            # If no blocks in common, returns -1, and reverts all blocks
            if block_record.prev_hash == peak.header_hash:
                fork_height: int = peak.height
            elif fork_point_with_peak is not None:
                fork_height = fork_point_with_peak
            else:
                fork_height = find_fork_point_in_chain(self, block_record,
                                                       peak)

            if block_record.prev_hash != peak.header_hash:
                await self.coin_store.rollback_to_block(fork_height)
            # Rollback sub_epoch_summaries
            heights_to_delete = []
            for ses_included_height in self.__sub_epoch_summaries.keys():
                if ses_included_height > fork_height:
                    heights_to_delete.append(ses_included_height)
            for height in heights_to_delete:
                log.info(f"delete ses at height {height}")
                del self.__sub_epoch_summaries[height]

            # Collect all blocks from fork point to new peak
            blocks_to_add: List[Tuple[FullBlock, BlockRecord]] = []
            curr = block_record.header_hash

            while fork_height < 0 or curr != self.height_to_hash(
                    uint32(fork_height)):
                fetched_full_block: Optional[
                    FullBlock] = await self.block_store.get_full_block(curr)
                fetched_block_record: Optional[
                    BlockRecord] = await self.block_store.get_block_record(curr
                                                                           )
                assert fetched_full_block is not None
                assert fetched_block_record is not None
                blocks_to_add.append(
                    (fetched_full_block, fetched_block_record))
                if fetched_full_block.height == 0:
                    # Doing a full reorg, starting at height 0
                    break
                curr = fetched_block_record.prev_hash

            records_to_add = []
            for fetched_full_block, fetched_block_record in reversed(
                    blocks_to_add):
                records_to_add.append(fetched_block_record)
                if fetched_block_record.is_transaction_block:
                    if fetched_block_record.header_hash == block_record.header_hash:
                        tx_removals, tx_additions = await self.get_tx_removals_and_additions(
                            fetched_full_block, npc_result)
                    else:
                        tx_removals, tx_additions = await self.get_tx_removals_and_additions(
                            fetched_full_block, None)
                    await self.coin_store.new_block(fetched_full_block,
                                                    tx_additions, tx_removals)

            # Changes the peak to be the new peak
            await self.block_store.set_peak(block_record.header_hash)
            return uint32(max(fork_height,
                              0)), block_record.height, records_to_add

        # This is not a heavier block than the heaviest we have seen, so we don't change the coin set
        return None, None, []
Ejemplo n.º 19
0
    async def _reconsider_peak(
            self, block_record: BlockRecord, genesis: bool,
            fork_point_with_peak: Optional[uint32]) -> Optional[uint32]:
        """
        When a new block is added, this is called, to check if the new block is the new peak of the chain.
        This also handles reorgs by reverting blocks which are not in the heaviest chain.
        It returns the height of the fork between the previous chain and the new chain, or returns
        None if there was no update to the heaviest chain.
        """
        peak = self.get_peak()
        if genesis:
            if peak is None:
                block: Optional[
                    HeaderBlockRecord] = await self.block_store.get_header_block_record(
                        block_record.header_hash)
                assert block is not None
                self.__height_to_hash[uint32(0)] = block.header_hash
                for removed in block.removals:
                    self.log.debug(f"Removed: {removed.name()}")
                await self.coins_of_interest_received(block.removals,
                                                      block.additions,
                                                      block.height)
                self._peak_height = uint32(0)
                return uint32(0)
            return None

        assert peak is not None
        if block_record.weight > peak.weight:
            # Find the fork. if the block is just being appended, it will return the peak
            # If no blocks in common, returns -1, and reverts all blocks
            if fork_point_with_peak is not None:
                fork_h: int = fork_point_with_peak
            else:
                fork_h = find_fork_point_in_chain(self, block_record, peak)

            # Rollback to fork
            self.log.debug(
                f"fork_h: {fork_h}, SB: {block_record.height}, peak: {peak.height}"
            )
            await self.reorg_rollback(fork_h)

            # Rollback sub_epoch_summaries
            heights_to_delete = []
            for ses_included_height in self.__sub_epoch_summaries.keys():
                if ses_included_height > fork_h:
                    heights_to_delete.append(ses_included_height)
            for height in heights_to_delete:
                del self.__sub_epoch_summaries[height]

            # Collect all blocks from fork point to new peak
            blocks_to_add: List[Tuple[HeaderBlockRecord, BlockRecord]] = []
            curr = block_record.header_hash
            while fork_h < 0 or curr != self.height_to_hash(uint32(fork_h)):
                fetched_header_block: Optional[
                    HeaderBlockRecord] = await self.block_store.get_header_block_record(
                        curr)
                fetched_block_record: Optional[
                    BlockRecord] = await self.block_store.get_block_record(curr
                                                                           )
                assert fetched_header_block is not None
                assert fetched_block_record is not None
                blocks_to_add.append(
                    (fetched_header_block, fetched_block_record))
                if fetched_header_block.height == 0:
                    # Doing a full reorg, starting at height 0
                    break
                curr = fetched_block_record.prev_hash

            for fetched_header_block, fetched_block_record in reversed(
                    blocks_to_add):
                self.__height_to_hash[
                    fetched_block_record.
                    height] = fetched_block_record.header_hash
                if fetched_block_record.is_transaction_block:
                    await self.coins_of_interest_received(
                        fetched_header_block.removals,
                        fetched_header_block.additions,
                        fetched_header_block.height,
                    )
                if fetched_block_record.sub_epoch_summary_included is not None:
                    self.__sub_epoch_summaries[
                        fetched_block_record.
                        height] = fetched_block_record.sub_epoch_summary_included

            # Changes the peak to be the new peak
            await self.block_store.set_peak(block_record.header_hash)
            self._peak_height = block_record.height
            return uint32(min(fork_h, 0))

        # This is not a heavier block than the heaviest we have seen, so we don't change the coin set
        return None
Ejemplo n.º 20
0
    async def validate_unfinished_block(
            self,
            block: UnfinishedBlock,
            skip_overflow_ss_validation=True) -> PreValidationResult:
        if (not self.contains_block(block.prev_header_hash)
                and not block.prev_header_hash
                == self.constants.GENESIS_CHALLENGE):
            return PreValidationResult(
                uint16(Err.INVALID_PREV_BLOCK_HASH.value), None, None)

        unfinished_header_block = UnfinishedHeaderBlock(
            block.finished_sub_slots,
            block.reward_chain_block,
            block.challenge_chain_sp_proof,
            block.reward_chain_sp_proof,
            block.foliage,
            block.foliage_transaction_block,
            b"",
        )
        prev_b = self.try_block_record(
            unfinished_header_block.prev_header_hash)
        sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
            self.constants,
            len(unfinished_header_block.finished_sub_slots) > 0, prev_b, self)
        required_iters, error = validate_unfinished_header_block(
            self.constants,
            self,
            unfinished_header_block,
            False,
            difficulty,
            sub_slot_iters,
            skip_overflow_ss_validation,
        )

        if error is not None:
            return PreValidationResult(uint16(error.code.value), None, None)

        prev_height = (-1 if block.prev_header_hash
                       == self.constants.GENESIS_CHALLENGE else
                       self.block_record(block.prev_header_hash).height)

        npc_result = None
        if block.transactions_generator is not None:
            assert block.transactions_info is not None
            try:
                block_generator: Optional[
                    BlockGenerator] = await self.get_block_generator(block)
            except ValueError:
                return PreValidationResult(
                    uint16(Err.GENERATOR_REF_HAS_NO_GENERATOR.value), None,
                    None)
            if block_generator is None:
                return PreValidationResult(
                    uint16(Err.GENERATOR_REF_HAS_NO_GENERATOR.value), None,
                    None)
            npc_result = get_name_puzzle_conditions(
                block_generator,
                min(self.constants.MAX_BLOCK_COST_CLVM,
                    block.transactions_info.cost),
                cost_per_byte=self.constants.COST_PER_BYTE,
                safe_mode=False,
            )
        error_code, cost_result = await validate_block_body(
            self.constants,
            self,
            self.block_store,
            self.coin_store,
            self.get_peak(),
            block,
            uint32(prev_height + 1),
            npc_result,
            None,
            self.get_block_generator,
        )

        if error_code is not None:
            return PreValidationResult(uint16(error_code.value), None, None)

        return PreValidationResult(None, required_iters, cost_result)
Ejemplo n.º 21
0
def create_foliage(
    constants: ConsensusConstants,
    reward_block_unfinished: RewardChainBlockUnfinished,
    spend_bundle: Optional[SpendBundle],
    additions: List[Coin],
    removals: List[Coin],
    prev_block: Optional[BlockRecord],
    blocks: BlockchainInterface,
    total_iters_sp: uint128,
    timestamp: uint64,
    farmer_reward_puzzlehash: bytes32,
    pool_target: PoolTarget,
    get_plot_signature: Callable[[bytes32, G1Element], G2Element],
    get_pool_signature: Callable[[PoolTarget, Optional[G1Element]], Optional[G2Element]],
    seed: bytes32 = b"",
) -> Tuple[Foliage, Optional[FoliageTransactionBlock], Optional[TransactionsInfo], Optional[SerializedProgram]]:
    """
    Creates a foliage for a given reward chain block. This may or may not be a tx block. In the case of a tx block,
    the return values are not None. This is called at the signage point, so some of this information may be
    tweaked at the infusion point.

    Args:
        constants: consensus constants being used for this chain
        reward_block_unfinished: the reward block to look at, potentially at the signage point
        spend_bundle: the spend bundle including all transactions
        prev_block: the previous block at the signage point
        blocks: dict from header hash to blocks, of all ancestor blocks
        total_iters_sp: total iters at the signage point
        timestamp: timestamp to put into the foliage block
        farmer_reward_puzzlehash: where to pay out farming reward
        pool_target: where to pay out pool reward
        get_plot_signature: retrieve the signature corresponding to the plot public key
        get_pool_signature: retrieve the signature corresponding to the pool public key
        seed: seed to randomize block

    """

    if prev_block is not None:
        res = get_prev_transaction_block(prev_block, blocks, total_iters_sp)
        is_transaction_block: bool = res[0]
        prev_transaction_block: Optional[BlockRecord] = res[1]
    else:
        # Genesis is a transaction block
        prev_transaction_block = None
        is_transaction_block = True

    random.seed(seed)
    # Use the extension data to create different blocks based on header hash
    extension_data: bytes32 = random.randint(0, 100000000).to_bytes(32, "big")
    if prev_block is None:
        height: uint32 = uint32(0)
    else:
        height = uint32(prev_block.height + 1)

    # Create filter
    byte_array_tx: List[bytes32] = []
    tx_additions: List[Coin] = []
    tx_removals: List[bytes32] = []

    pool_target_signature: Optional[G2Element] = get_pool_signature(
        pool_target, reward_block_unfinished.proof_of_space.pool_public_key
    )

    foliage_data = FoliageBlockData(
        reward_block_unfinished.get_hash(),
        pool_target,
        pool_target_signature,
        farmer_reward_puzzlehash,
        extension_data,
    )

    foliage_block_data_signature: G2Element = get_plot_signature(
        foliage_data.get_hash(),
        reward_block_unfinished.proof_of_space.plot_public_key,
    )

    prev_block_hash: bytes32 = constants.GENESIS_CHALLENGE
    if height != 0:
        assert prev_block is not None
        prev_block_hash = prev_block.header_hash

    solution_program: Optional[SerializedProgram] = None
    generator_block_heights_list: List[uint32] = []

    if is_transaction_block:
        aggregate_sig: G2Element = G2Element()
        cost = uint64(0)

        if spend_bundle is not None:
            solution_program = best_solution_program(spend_bundle)
            aggregate_sig = spend_bundle.aggregated_signature

        # Calculate the cost of transactions
        if solution_program is not None:
            result: CostResult = calculate_cost_of_program(solution_program, constants.CLVM_COST_RATIO_CONSTANT)
            cost = result.cost
            removal_amount = 0
            addition_amount = 0
            for coin in removals:
                removal_amount += coin.amount
            for coin in additions:
                addition_amount += coin.amount
            spend_bundle_fees = removal_amount - addition_amount
        else:
            spend_bundle_fees = 0

        reward_claims_incorporated = []
        if height > 0:
            assert prev_transaction_block is not None
            assert prev_block is not None
            curr: BlockRecord = prev_block
            while not curr.is_transaction_block:
                curr = blocks.block_record(curr.prev_hash)

            assert curr.fees is not None
            pool_coin = create_pool_coin(
                curr.height, curr.pool_puzzle_hash, calculate_pool_reward(curr.height), constants.GENESIS_CHALLENGE
            )

            farmer_coin = create_farmer_coin(
                curr.height,
                curr.farmer_puzzle_hash,
                uint64(calculate_base_farmer_reward(curr.height) + curr.fees),
                constants.GENESIS_CHALLENGE,
            )
            assert curr.header_hash == prev_transaction_block.header_hash
            reward_claims_incorporated += [pool_coin, farmer_coin]

            if curr.height > 0:
                curr = blocks.block_record(curr.prev_hash)
                # Prev block is not genesis
                while not curr.is_transaction_block:
                    pool_coin = create_pool_coin(
                        curr.height,
                        curr.pool_puzzle_hash,
                        calculate_pool_reward(curr.height),
                        constants.GENESIS_CHALLENGE,
                    )
                    farmer_coin = create_farmer_coin(
                        curr.height,
                        curr.farmer_puzzle_hash,
                        calculate_base_farmer_reward(curr.height),
                        constants.GENESIS_CHALLENGE,
                    )
                    reward_claims_incorporated += [pool_coin, farmer_coin]
                    curr = blocks.block_record(curr.prev_hash)
        additions.extend(reward_claims_incorporated.copy())
        for coin in additions:
            tx_additions.append(coin)
            byte_array_tx.append(bytearray(coin.puzzle_hash))
        for coin in removals:
            tx_removals.append(coin.name())
            byte_array_tx.append(bytearray(coin.name()))

        bip158: PyBIP158 = PyBIP158(byte_array_tx)
        encoded = bytes(bip158.GetEncoded())

        removal_merkle_set = MerkleSet()
        addition_merkle_set = MerkleSet()

        # Create removal Merkle set
        for coin_name in tx_removals:
            removal_merkle_set.add_already_hashed(coin_name)

        # Create addition Merkle set
        puzzlehash_coin_map: Dict[bytes32, List[Coin]] = {}

        for coin in tx_additions:
            if coin.puzzle_hash in puzzlehash_coin_map:
                puzzlehash_coin_map[coin.puzzle_hash].append(coin)
            else:
                puzzlehash_coin_map[coin.puzzle_hash] = [coin]

        # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
        for puzzle, coins in puzzlehash_coin_map.items():
            addition_merkle_set.add_already_hashed(puzzle)
            addition_merkle_set.add_already_hashed(hash_coin_list(coins))

        additions_root = addition_merkle_set.get_root()
        removals_root = removal_merkle_set.get_root()

        generator_hash = bytes32([0] * 32)
        if solution_program is not None:
            generator_hash = std_hash(solution_program)

        generator_refs_hash = bytes32([1] * 32)
        if generator_block_heights_list not in (None, []):
            generator_ref_list_bytes = b"".join(
                [(i).to_bytes(4, byteorder="big") for i in generator_block_heights_list]
            )
            generator_refs_hash = std_hash(generator_ref_list_bytes)

        filter_hash: bytes32 = std_hash(encoded)

        transactions_info: Optional[TransactionsInfo] = TransactionsInfo(
            generator_hash,
            generator_refs_hash,
            aggregate_sig,
            uint64(spend_bundle_fees),
            cost,
            reward_claims_incorporated,
        )
        if prev_transaction_block is None:
            prev_transaction_block_hash: bytes32 = constants.GENESIS_CHALLENGE
        else:
            prev_transaction_block_hash = prev_transaction_block.header_hash

        assert transactions_info is not None
        foliage_transaction_block: Optional[FoliageTransactionBlock] = FoliageTransactionBlock(
            prev_transaction_block_hash,
            timestamp,
            filter_hash,
            additions_root,
            removals_root,
            transactions_info.get_hash(),
        )
        assert foliage_transaction_block is not None

        foliage_transaction_block_hash: Optional[bytes32] = foliage_transaction_block.get_hash()
        foliage_transaction_block_signature: Optional[G2Element] = get_plot_signature(
            foliage_transaction_block_hash, reward_block_unfinished.proof_of_space.plot_public_key
        )
        assert foliage_transaction_block_signature is not None
    else:
        foliage_transaction_block_hash = None
        foliage_transaction_block_signature = None
        foliage_transaction_block = None
        transactions_info = None
    assert (foliage_transaction_block_hash is None) == (foliage_transaction_block_signature is None)

    foliage = Foliage(
        prev_block_hash,
        reward_block_unfinished.get_hash(),
        foliage_data,
        foliage_block_data_signature,
        foliage_transaction_block_hash,
        foliage_transaction_block_signature,
    )

    return foliage, foliage_transaction_block, transactions_info, solution_program
Ejemplo n.º 22
0
    "decompress_coin_spend_entry_with_prefix.clvm",
    package_or_requirement="chia.wallet.puzzles")
DECOMPRESS_BLOCK = load_clvm("block_program_zero.clvm",
                             package_or_requirement="chia.wallet.puzzles")
TEST_MULTIPLE = load_clvm("test_multiple_generator_input_arguments.clvm",
                          package_or_requirement="chia.wallet.puzzles")

Nil = Program.from_bytes(b"\x80")

original_generator = hexstr_to_bytes(
    "ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080"
)  # noqa

gen1 = b"aaaaaaaaaa" + original_generator
gen2 = b"bb" + original_generator
FAKE_BLOCK_HEIGHT1 = uint32(100)
FAKE_BLOCK_HEIGHT2 = uint32(200)


@dataclass(frozen=True)
class MultipleCompressorArg:
    arg: List[CompressorArg]
    split_offset: int


def create_multiple_ref_generator(args: MultipleCompressorArg,
                                  spend_bundle: SpendBundle) -> BlockGenerator:
    """
    Decompress a transaction by referencing bytes from multiple input generator references
    """
    compressed_cse_list = compressed_coin_spend_entry_list(spend_bundle)
Ejemplo n.º 23
0
    async def farm_block(self, puzzle_hash: bytes32 = bytes32(b"0" * 32)):
        # Fees get calculated
        fees = uint64(0)
        if self.mempool_manager.mempool.spends:
            for _, item in self.mempool_manager.mempool.spends.items():
                fees = uint64(fees + item.spend_bundle.fees())

        # Rewards get created
        next_block_height: uint32 = uint32(self.block_height + 1) if len(self.block_records) > 0 else self.block_height
        pool_coin: Coin = create_pool_coin(
            next_block_height,
            puzzle_hash,
            calculate_pool_reward(next_block_height),
            self.defaults.GENESIS_CHALLENGE,
        )
        farmer_coin: Coin = create_farmer_coin(
            next_block_height,
            puzzle_hash,
            uint64(calculate_base_farmer_reward(next_block_height) + fees),
            self.defaults.GENESIS_CHALLENGE,
        )
        await self.mempool_manager.coin_store._add_coin_records(
            [self.new_coin_record(pool_coin, True), self.new_coin_record(farmer_coin, True)]
        )

        # Coin store gets updated
        generator_bundle: Optional[SpendBundle] = None
        return_additions: List[Coin] = []
        return_removals: List[Coin] = []
        if (len(self.block_records) > 0) and (self.mempool_manager.mempool.spends):
            peak = self.mempool_manager.peak
            if peak is not None:
                result = await self.mempool_manager.create_bundle_from_mempool(peak.header_hash)

                if result is not None:
                    bundle, additions, removals = result
                    generator_bundle = bundle
                    return_additions = additions
                    return_removals = removals

                await self.mempool_manager.coin_store._add_coin_records(
                    [self.new_coin_record(addition) for addition in additions]
                )
                await self.mempool_manager.coin_store._set_spent(
                    [r.name() for r in removals], uint32(self.block_height + 1)
                )

        # SimBlockRecord is created
        generator: Optional[BlockGenerator] = await self.generate_transaction_generator(generator_bundle)
        self.block_records.append(
            SimBlockRecord(
                [pool_coin, farmer_coin],
                next_block_height,
                self.timestamp,
            )
        )
        self.blocks.append(SimFullBlock(generator, next_block_height))

        # block_height is incremented
        self.block_height = next_block_height

        # mempool is reset
        await self.new_peak()

        # return some debugging data
        return return_additions, return_removals
Ejemplo n.º 24
0
    async def test_basic_reorg(self, cache_size: uint32, tmp_dir, db_version):

        async with DBConnection(db_version) as db_wrapper:
            initial_block_count = 30
            reorg_length = 15
            blocks = bt.get_consecutive_blocks(initial_block_count)
            coin_store = await CoinStore.create(db_wrapper,
                                                cache_size=uint32(cache_size))
            store = await BlockStore.create(db_wrapper)
            hint_store = await HintStore.create(db_wrapper)
            b: Blockchain = await Blockchain.create(coin_store, store,
                                                    test_constants, hint_store,
                                                    tmp_dir)
            try:

                records: List[Optional[CoinRecord]] = []

                for block in blocks:
                    await b.receive_block(block)
                peak = b.get_peak()
                assert peak is not None
                assert peak.height == initial_block_count - 1

                for c, block in enumerate(blocks):
                    if block.is_transaction_block():
                        coins = block.get_included_reward_coins()
                        records = [
                            await coin_store.get_coin_record(coin.name())
                            for coin in coins
                        ]
                        for record in records:
                            assert record is not None
                            assert not record.spent
                            assert record.confirmed_block_index == block.height
                            assert record.spent_block_index == 0

                blocks_reorg_chain = bt.get_consecutive_blocks(
                    reorg_length, blocks[:initial_block_count - 10], seed=b"2")

                for reorg_block in blocks_reorg_chain:
                    result, error_code, _, _ = await b.receive_block(
                        reorg_block)
                    print(
                        f"Height {reorg_block.height} {initial_block_count - 10} result {result}"
                    )
                    if reorg_block.height < initial_block_count - 10:
                        assert result == ReceiveBlockResult.ALREADY_HAVE_BLOCK
                    elif reorg_block.height < initial_block_count - 1:
                        assert result == ReceiveBlockResult.ADDED_AS_ORPHAN
                    elif reorg_block.height >= initial_block_count:
                        assert result == ReceiveBlockResult.NEW_PEAK
                        if reorg_block.is_transaction_block():
                            coins = reorg_block.get_included_reward_coins()
                            records = [
                                await coin_store.get_coin_record(coin.name())
                                for coin in coins
                            ]
                            for record in records:
                                assert record is not None
                                assert not record.spent
                                assert record.confirmed_block_index == reorg_block.height
                                assert record.spent_block_index == 0
                    assert error_code is None
                peak = b.get_peak()
                assert peak is not None
                assert peak.height == initial_block_count - 10 + reorg_length - 1
            finally:
                b.shut_down()
Ejemplo n.º 25
0
    async def add_spendbundle(
        self,
        new_spend: SpendBundle,
        npc_result: NPCResult,
        spend_name: bytes32,
        program: Optional[SerializedProgram] = None,
    ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]:
        """
        Tries to add spend bundle to the mempool
        Returns the cost (if SUCCESS), the result (MempoolInclusion status), and an optional error
        """
        start_time = time.time()
        if self.peak is None:
            return None, MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED

        npc_list = npc_result.npc_list
        assert npc_result.error is None
        if program is None:
            program = simple_solution_generator(new_spend).program
        cost = npc_result.cost

        log.debug(f"Cost: {cost}")

        if cost > int(self.limit_factor * self.constants.MAX_BLOCK_COST_CLVM):
            # we shouldn't ever end up here, since the cost is limited when we
            # execute the CLVM program.
            return None, MempoolInclusionStatus.FAILED, Err.BLOCK_COST_EXCEEDS_MAX

        # build removal list
        removal_names: List[bytes32] = [npc.coin_name for npc in npc_list]
        if set(removal_names) != set([s.name() for s in new_spend.removals()]):
            return None, MempoolInclusionStatus.FAILED, Err.INVALID_SPEND_BUNDLE

        additions = additions_for_npc(npc_list)

        additions_dict: Dict[bytes32, Coin] = {}
        for add in additions:
            additions_dict[add.name()] = add

        addition_amount = uint64(0)
        # Check additions for max coin amount
        for coin in additions:
            if coin.amount < 0:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_NEGATIVE,
                )
            if coin.amount > self.constants.MAX_COIN_AMOUNT:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_EXCEEDS_MAXIMUM,
                )
            addition_amount = uint64(addition_amount + coin.amount)
        # Check for duplicate outputs
        addition_counter = collections.Counter(_.name() for _ in additions)
        for k, v in addition_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT
        # Check for duplicate inputs
        removal_counter = collections.Counter(name for name in removal_names)
        for k, v in removal_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND
        # Skip if already added
        if spend_name in self.mempool.spends:
            return uint64(cost), MempoolInclusionStatus.SUCCESS, None

        removal_record_dict: Dict[bytes32, CoinRecord] = {}
        removal_coin_dict: Dict[bytes32, Coin] = {}
        removal_amount = uint64(0)
        for name in removal_names:
            removal_record = await self.coin_store.get_coin_record(name)
            if removal_record is None and name not in additions_dict:
                return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN_UNSPENT
            elif name in additions_dict:
                removal_coin = additions_dict[name]
                # The timestamp and block-height of this coin being spent needs
                # to be consistent with what we use to check time-lock
                # conditions (below). All spends (including ephemeral coins) are
                # spent simultaneously. Ephemeral coins with an
                # ASSERT_SECONDS_RELATIVE 0 condition are still OK to spend in
                # the same block.
                assert self.peak.timestamp is not None
                removal_record = CoinRecord(
                    removal_coin,
                    uint32(
                        self.peak.height +
                        1),  # In mempool, so will be included in next height
                    uint32(0),
                    False,
                    self.peak.timestamp,
                )

            assert removal_record is not None
            removal_amount = uint64(removal_amount +
                                    removal_record.coin.amount)
            removal_record_dict[name] = removal_record
            removal_coin_dict[name] = removal_record.coin

        removals: List[Coin] = [coin for coin in removal_coin_dict.values()]

        if addition_amount > removal_amount:
            return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN

        fees = uint64(removal_amount - addition_amount)
        assert_fee_sum: uint64 = uint64(0)

        for npc in npc_list:
            if ConditionOpcode.RESERVE_FEE in npc.condition_dict:
                fee_list: List[ConditionWithArgs] = npc.condition_dict[
                    ConditionOpcode.RESERVE_FEE]
                for cvp in fee_list:
                    fee = int_from_bytes(cvp.vars[0])
                    if fee < 0:
                        return None, MempoolInclusionStatus.FAILED, Err.RESERVE_FEE_CONDITION_FAILED
                    assert_fee_sum = assert_fee_sum + fee
        if fees < assert_fee_sum:
            return (
                None,
                MempoolInclusionStatus.FAILED,
                Err.RESERVE_FEE_CONDITION_FAILED,
            )

        if cost == 0:
            return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN

        fees_per_cost: float = fees / cost
        # If pool is at capacity check the fee, if not then accept even without the fee
        if self.mempool.at_full_capacity(cost):
            if fees_per_cost < self.nonzero_fee_minimum_fpc:
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO
            if fees_per_cost <= self.mempool.get_min_fee_rate(cost):
                return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
        # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
        # Use this information later when constructing a block
        fail_reason, conflicts = await self.check_removals(removal_record_dict)
        # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others
        tmp_error: Optional[Err] = None
        conflicting_pool_items: Dict[bytes32, MempoolItem] = {}
        if fail_reason is Err.MEMPOOL_CONFLICT:
            for conflicting in conflicts:
                sb: MempoolItem = self.mempool.removals[conflicting.name()]
                conflicting_pool_items[sb.name] = sb
            if not self.can_replace(conflicting_pool_items,
                                    removal_record_dict, fees, fees_per_cost):
                potential = MempoolItem(new_spend, uint64(fees), npc_result,
                                        cost, spend_name, additions, removals,
                                        program)
                self.potential_cache.add(potential)
                return (
                    uint64(cost),
                    MempoolInclusionStatus.PENDING,
                    Err.MEMPOOL_CONFLICT,
                )

        elif fail_reason:
            return None, MempoolInclusionStatus.FAILED, fail_reason

        if tmp_error:
            return None, MempoolInclusionStatus.FAILED, tmp_error

        # Verify conditions, create hash_key list for aggsig check
        error: Optional[Err] = None
        for npc in npc_list:
            coin_record: CoinRecord = removal_record_dict[npc.coin_name]
            # Check that the revealed removal puzzles actually match the puzzle hash
            if npc.puzzle_hash != coin_record.coin.puzzle_hash:
                log.warning(
                    "Mempool rejecting transaction because of wrong puzzle_hash"
                )
                log.warning(
                    f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}")
                return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH

            chialisp_height = (self.peak.prev_transaction_block_height
                               if not self.peak.is_transaction_block else
                               self.peak.height)
            assert self.peak.timestamp is not None
            error = mempool_check_conditions_dict(
                coin_record,
                npc.condition_dict,
                uint32(chialisp_height),
                self.peak.timestamp,
            )

            if error:
                if error is Err.ASSERT_HEIGHT_ABSOLUTE_FAILED or error is Err.ASSERT_HEIGHT_RELATIVE_FAILED:
                    potential = MempoolItem(new_spend, uint64(fees),
                                            npc_result, cost, spend_name,
                                            additions, removals, program)
                    self.potential_cache.add(potential)
                    return uint64(cost), MempoolInclusionStatus.PENDING, error
                break

        if error:
            return None, MempoolInclusionStatus.FAILED, error

        # Remove all conflicting Coins and SpendBundles
        if fail_reason:
            mempool_item: MempoolItem
            for mempool_item in conflicting_pool_items.values():
                self.mempool.remove_from_pool(mempool_item)

        new_item = MempoolItem(new_spend, uint64(fees), npc_result, cost,
                               spend_name, additions, removals, program)
        self.mempool.add_to_pool(new_item)
        now = time.time()
        log.log(
            logging.DEBUG,
            f"add_spendbundle {spend_name} took {now - start_time:0.2f} seconds. "
            f"Cost: {cost} ({round(100.0 * cost/self.constants.MAX_BLOCK_COST_CLVM, 3)}% of max block cost)",
        )

        return uint64(cost), MempoolInclusionStatus.SUCCESS, None
Ejemplo n.º 26
0
    async def _create_offer_for_ids(
        self, offer: Dict[int, int]
    ) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
        """
        Offer is dictionary of wallet ids and amount
        """
        spend_bundle = None
        try:
            for id in offer.keys():
                amount = offer[id]
                wallet_id = uint32(int(id))
                wallet = self.wallet_state_manager.wallets[wallet_id]
                if isinstance(wallet, CCWallet):
                    balance = await wallet.get_confirmed_balance()
                    if balance < abs(amount) and amount < 0:
                        raise Exception(
                            f"insufficient funds in wallet {wallet_id}")
                    if amount > 0:
                        if spend_bundle is None:
                            to_exclude: List[Coin] = []
                        else:
                            to_exclude = spend_bundle.removals()
                        zero_spend_bundle: SpendBundle = await wallet.generate_zero_val_coin(
                            False, to_exclude)

                        if spend_bundle is None:
                            spend_bundle = zero_spend_bundle
                        else:
                            spend_bundle = SpendBundle.aggregate(
                                [spend_bundle, zero_spend_bundle])

                        additions = zero_spend_bundle.additions()
                        removals = zero_spend_bundle.removals()
                        zero_val_coin: Optional[Coin] = None
                        for add in additions:
                            if add not in removals and add.amount == 0:
                                zero_val_coin = add
                        new_spend_bundle = await wallet.create_spend_bundle_relative_amount(
                            amount, zero_val_coin)
                    else:
                        new_spend_bundle = await wallet.create_spend_bundle_relative_amount(
                            amount)
                elif isinstance(wallet, Wallet):
                    if spend_bundle is None:
                        to_exclude = []
                    else:
                        to_exclude = spend_bundle.removals()
                    new_spend_bundle = await wallet.create_spend_bundle_relative_chia(
                        amount, to_exclude)
                else:
                    return False, None, "unsupported wallet type"
                if new_spend_bundle is None or new_spend_bundle.removals(
                ) == []:
                    raise Exception(f"Wallet {id} was unable to create offer.")
                if spend_bundle is None:
                    spend_bundle = new_spend_bundle
                else:
                    spend_bundle = SpendBundle.aggregate(
                        [spend_bundle, new_spend_bundle])

            if spend_bundle is None:
                return False, None, None

            now = uint64(int(time.time()))
            trade_offer: TradeRecord = TradeRecord(
                confirmed_at_index=uint32(0),
                accepted_at_time=None,
                created_at_time=now,
                my_offer=True,
                sent=uint32(0),
                spend_bundle=spend_bundle,
                tx_spend_bundle=None,
                additions=spend_bundle.additions(),
                removals=spend_bundle.removals(),
                trade_id=std_hash(spend_bundle.name() + bytes(now)),
                status=uint32(TradeStatus.PENDING_ACCEPT.value),
                sent_to=[],
            )
            return True, trade_offer, None
        except Exception as e:
            tb = traceback.format_exc()
            self.log.error(f"Error with creating trade offer: {type(e)}{tb}")
            return False, None, str(e)
Ejemplo n.º 27
0
    async def new_signage_point_harvester(
            self, new_challenge: harvester_protocol.NewSignagePointHarvester,
            peer: WSChiaConnection):
        """
        The harvester receives a new signage point from the farmer, this happens at the start of each slot.
        The harvester does a few things:
        1. The harvester applies the plot filter for each of the plots, to select the proportion which are eligible
        for this signage point and challenge.
        2. The harvester gets the qualities for each plot. This is approximately 7 reads per plot which qualifies.
        Note that each plot may have 0, 1, 2, etc qualities for that challenge: but on average it will have 1.
        3. Checks the required_iters for each quality and the given signage point, to see which are eligible for
        inclusion (required_iters < sp_interval_iters).
        4. Looks up the full proof of space in the plot for each quality, approximately 64 reads per quality
        5. Returns the proof of space to the farmer
        """
        if len(self.harvester.pool_public_keys) == 0 or len(
                self.harvester.farmer_public_keys) == 0:
            # This means that we have not received the handshake yet
            return None

        start = time.time()
        assert len(new_challenge.challenge_hash) == 32

        # Refresh plots to see if there are any new ones
        if start - self.harvester.last_load_time > self.harvester.plot_load_frequency:
            await self.harvester.refresh_plots()
            self.harvester.last_load_time = time.time()

        loop = asyncio.get_running_loop()

        def blocking_lookup(
                filename: Path,
                plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
            # Uses the DiskProver object to lookup qualities. This is a blocking call,
            # so it should be run in a thread pool.
            try:
                plot_id = plot_info.prover.get_id()
                sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
                    plot_id,
                    new_challenge.challenge_hash,
                    new_challenge.sp_hash,
                )
                try:
                    quality_strings = plot_info.prover.get_qualities_for_challenge(
                        sp_challenge_hash)
                except Exception as e:
                    self.harvester.log.error(f"Error using prover object {e}")
                    self.harvester.log.error(
                        f"File: {filename} Plot ID: {plot_id.hex()}, "
                        f"challenge: {sp_challenge_hash}, plot_info: {plot_info}"
                    )
                    return []

                responses: List[Tuple[bytes32, ProofOfSpace]] = []
                if quality_strings is not None:
                    # Found proofs of space (on average 1 is expected per plot)
                    for index, quality_str in enumerate(quality_strings):
                        required_iters: uint64 = calculate_iterations_quality(
                            self.harvester.constants.
                            DIFFICULTY_CONSTANT_FACTOR,
                            quality_str,
                            plot_info.prover.get_size(),
                            new_challenge.difficulty,
                            new_challenge.sp_hash,
                        )
                        sp_interval_iters = calculate_sp_interval_iters(
                            self.harvester.constants,
                            new_challenge.sub_slot_iters)
                        if required_iters < sp_interval_iters:
                            # Found a very good proof of space! will fetch the whole proof from disk,
                            # then send to farmer
                            try:
                                proof_xs = plot_info.prover.get_full_proof(
                                    sp_challenge_hash, index)
                            except Exception as e:
                                self.harvester.log.error(
                                    f"Exception fetching full proof for {filename}. {e}"
                                )
                                self.harvester.log.error(
                                    f"File: {filename} Plot ID: {plot_id.hex()}, challenge: {sp_challenge_hash}, "
                                    f"plot_info: {plot_info}")
                                continue

                            # Look up local_sk from plot to save locked memory
                            (
                                pool_public_key_or_puzzle_hash,
                                farmer_public_key,
                                local_master_sk,
                            ) = parse_plot_info(plot_info.prover.get_memo())
                            local_sk = master_sk_to_local_sk(local_master_sk)
                            plot_public_key = ProofOfSpace.generate_plot_public_key(
                                local_sk.get_g1(), farmer_public_key)
                            responses.append((
                                quality_str,
                                ProofOfSpace(
                                    sp_challenge_hash,
                                    plot_info.pool_public_key,
                                    plot_info.pool_contract_puzzle_hash,
                                    plot_public_key,
                                    uint8(plot_info.prover.get_size()),
                                    proof_xs,
                                ),
                            ))
                return responses
            except Exception as e:
                self.harvester.log.error(f"Unknown error: {e}")
                return []

        async def lookup_challenge(
            filename: Path, plot_info: PlotInfo
        ) -> Tuple[Path, List[harvester_protocol.NewProofOfSpace]]:
            # Executes a DiskProverLookup in a thread pool, and returns responses
            all_responses: List[harvester_protocol.NewProofOfSpace] = []
            if self.harvester._is_shutdown:
                return filename, []
            proofs_of_space_and_q: List[Tuple[
                bytes32, ProofOfSpace]] = await loop.run_in_executor(
                    self.harvester.executor, blocking_lookup, filename,
                    plot_info)
            for quality_str, proof_of_space in proofs_of_space_and_q:
                all_responses.append(
                    harvester_protocol.NewProofOfSpace(
                        new_challenge.challenge_hash,
                        new_challenge.sp_hash,
                        quality_str.hex() + str(filename.resolve()),
                        proof_of_space,
                        new_challenge.signage_point_index,
                    ))
            return filename, all_responses

        awaitables = []
        passed = 0
        total = 0
        for try_plot_filename, try_plot_info in self.harvester.provers.items():
            try:
                if try_plot_filename.exists():
                    # Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
                    # This is being executed at the beginning of the slot
                    total += 1
                    if ProofOfSpace.passes_plot_filter(
                            self.harvester.constants,
                            try_plot_info.prover.get_id(),
                            new_challenge.challenge_hash,
                            new_challenge.sp_hash,
                    ):
                        passed += 1
                        awaitables.append(
                            lookup_challenge(try_plot_filename, try_plot_info))
            except Exception as e:
                self.harvester.log.error(
                    f"Error plot file {try_plot_filename} may no longer exist {e}"
                )

        # Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
        total_proofs_found = 0
        for filename_sublist_awaitable in asyncio.as_completed(awaitables):
            filename, sublist = await filename_sublist_awaitable
            time_taken = time.time() - start
            if time_taken > 5:
                self.harvester.log.warning(
                    f"Looking up qualities on {filename} took: {time.time() - start}. This should be below 5 seconds "
                    f"to minimize risk of losing rewards.")
            else:
                pass
                # If you want additional logs, uncomment the following line
                # self.harvester.log.debug(f"Looking up qualities on {filename} took: {time.time() - start}")
            for response in sublist:
                total_proofs_found += 1
                msg = make_msg(ProtocolMessageTypes.new_proof_of_space,
                               response)
                await peer.send_message(msg)

        now = uint64(int(time.time()))
        farming_info = FarmingInfo(
            new_challenge.challenge_hash,
            new_challenge.sp_hash,
            now,
            uint32(passed),
            uint32(total_proofs_found),
            uint32(total),
        )
        pass_msg = make_msg(ProtocolMessageTypes.farming_info, farming_info)
        await peer.send_message(pass_msg)
        self.harvester.log.info(
            f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..."
            f" Found {total_proofs_found} proofs. Time: {time.time() - start:.5f} s. "
            f"Total {len(self.harvester.provers)} plots")
Ejemplo n.º 28
0
    async def respond_to_offer(
            self, file_path: Path
    ) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
        has_wallets = await self.maybe_create_wallets_for_offer(file_path)
        if not has_wallets:
            return False, None, "Unknown Error"
        trade_offer = None
        try:
            trade_offer_hex = file_path.read_text()
            trade_offer = TradeRecord.from_bytes(
                hexstr_to_bytes(trade_offer_hex))
        except Exception as e:
            return False, None, f"Error: {e}"
        if trade_offer is not None:
            offer_spend_bundle: SpendBundle = trade_offer.spend_bundle

        coinsols = []  # [] of CoinSolutions
        cc_coinsol_outamounts: Dict[bytes32, List[Tuple[Any, int]]] = dict()
        aggsig = offer_spend_bundle.aggregated_signature
        cc_discrepancies: Dict[bytes32, int] = dict()
        chia_discrepancy = None
        wallets: Dict[bytes32, Any] = dict()  # colour to wallet dict

        for coinsol in offer_spend_bundle.coin_solutions:
            puzzle: Program = coinsol.puzzle_reveal
            solution: Program = coinsol.solution

            # work out the deficits between coin amount and expected output for each
            r = cc_utils.uncurry_cc(puzzle)
            if r:
                # Calculate output amounts
                mod_hash, genesis_checker, inner_puzzle = r
                colour = bytes(genesis_checker).hex()
                if colour not in wallets:
                    wallets[
                        colour] = await self.wallet_state_manager.get_wallet_for_colour(
                            colour)
                unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(
                    wallets[colour].id())
                if coinsol.coin in [record.coin for record in unspent]:
                    return False, None, "can't respond to own offer"

                innersol = solution.first()

                total = get_output_amount_for_puzzle_and_solution(
                    inner_puzzle, innersol)
                if colour in cc_discrepancies:
                    cc_discrepancies[colour] += coinsol.coin.amount - total
                else:
                    cc_discrepancies[colour] = coinsol.coin.amount - total
                # Store coinsol and output amount for later
                if colour in cc_coinsol_outamounts:
                    cc_coinsol_outamounts[colour].append((coinsol, total))
                else:
                    cc_coinsol_outamounts[colour] = [(coinsol, total)]

            else:
                # standard chia coin
                unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(
                    1)
                if coinsol.coin in [record.coin for record in unspent]:
                    return False, None, "can't respond to own offer"
                if chia_discrepancy is None:
                    chia_discrepancy = get_output_discrepancy_for_puzzle_and_solution(
                        coinsol.coin, puzzle, solution)
                else:
                    chia_discrepancy += get_output_discrepancy_for_puzzle_and_solution(
                        coinsol.coin, puzzle, solution)
                coinsols.append(coinsol)

        chia_spend_bundle: Optional[SpendBundle] = None
        if chia_discrepancy is not None:
            chia_spend_bundle = await self.wallet_state_manager.main_wallet.create_spend_bundle_relative_chia(
                chia_discrepancy, [])
            if chia_spend_bundle is not None:
                for coinsol in coinsols:
                    chia_spend_bundle.coin_solutions.append(coinsol)

        zero_spend_list: List[SpendBundle] = []
        spend_bundle = None
        # create coloured coin
        self.log.info(cc_discrepancies)
        for colour in cc_discrepancies.keys():
            if cc_discrepancies[colour] < 0:
                my_cc_spends = await wallets[colour].select_coins(
                    abs(cc_discrepancies[colour]))
            else:
                if chia_spend_bundle is None:
                    to_exclude: List = []
                else:
                    to_exclude = chia_spend_bundle.removals()
                my_cc_spends = await wallets[colour].select_coins(0)
                if my_cc_spends is None or my_cc_spends == set():
                    zero_spend_bundle: SpendBundle = await wallets[
                        colour].generate_zero_val_coin(False, to_exclude)
                    if zero_spend_bundle is None:
                        return (
                            False,
                            None,
                            "Unable to generate zero value coin. Confirm that you have chia available",
                        )
                    zero_spend_list.append(zero_spend_bundle)

                    additions = zero_spend_bundle.additions()
                    removals = zero_spend_bundle.removals()
                    my_cc_spends = set()
                    for add in additions:
                        if add not in removals and add.amount == 0:
                            my_cc_spends.add(add)

            if my_cc_spends == set() or my_cc_spends is None:
                return False, None, "insufficient funds"

            # Create SpendableCC list and innersol_list with both my coins and the offered coins
            # Firstly get the output coin
            my_output_coin = my_cc_spends.pop()
            spendable_cc_list = []
            innersol_list = []
            genesis_id = genesis_coin_id_for_genesis_coin_checker(
                Program.from_bytes(bytes.fromhex(colour)))
            # Make the rest of the coins assert the output coin is consumed
            for coloured_coin in my_cc_spends:
                inner_solution = self.wallet_state_manager.main_wallet.make_solution(
                    consumed=[my_output_coin.name()])
                inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(
                    coloured_coin.puzzle_hash)
                assert inner_puzzle is not None

                sigs = await wallets[colour].get_sigs(inner_puzzle,
                                                      inner_solution,
                                                      coloured_coin.name())
                sigs.append(aggsig)
                aggsig = AugSchemeMPL.aggregate(sigs)

                lineage_proof = await wallets[
                    colour].get_lineage_proof_for_coin(coloured_coin)
                spendable_cc_list.append(
                    SpendableCC(coloured_coin, genesis_id, inner_puzzle,
                                lineage_proof))
                innersol_list.append(inner_solution)

            # Create SpendableCC for each of the coloured coins received
            for cc_coinsol_out in cc_coinsol_outamounts[colour]:
                cc_coinsol = cc_coinsol_out[0]
                puzzle = cc_coinsol.puzzle_reveal
                solution = cc_coinsol.solution

                r = uncurry_cc(puzzle)
                if r:
                    mod_hash, genesis_coin_checker, inner_puzzle = r
                    inner_solution = solution.first()
                    lineage_proof = solution.rest().rest().first()
                    spendable_cc_list.append(
                        SpendableCC(cc_coinsol.coin, genesis_id, inner_puzzle,
                                    lineage_proof))
                    innersol_list.append(inner_solution)

            # Finish the output coin SpendableCC with new information
            newinnerpuzhash = await wallets[colour].get_new_inner_hash()
            outputamount = sum([
                c.amount for c in my_cc_spends
            ]) + cc_discrepancies[colour] + my_output_coin.amount
            inner_solution = self.wallet_state_manager.main_wallet.make_solution(
                primaries=[{
                    "puzzlehash": newinnerpuzhash,
                    "amount": outputamount
                }])
            inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(
                my_output_coin.puzzle_hash)
            assert inner_puzzle is not None

            lineage_proof = await wallets[colour].get_lineage_proof_for_coin(
                my_output_coin)
            spendable_cc_list.append(
                SpendableCC(my_output_coin, genesis_id, inner_puzzle,
                            lineage_proof))
            innersol_list.append(inner_solution)

            sigs = await wallets[colour].get_sigs(inner_puzzle, inner_solution,
                                                  my_output_coin.name())
            sigs.append(aggsig)
            aggsig = AugSchemeMPL.aggregate(sigs)
            if spend_bundle is None:
                spend_bundle = spend_bundle_for_spendable_ccs(
                    CC_MOD,
                    Program.from_bytes(bytes.fromhex(colour)),
                    spendable_cc_list,
                    innersol_list,
                    [aggsig],
                )
            else:
                new_spend_bundle = spend_bundle_for_spendable_ccs(
                    CC_MOD,
                    Program.from_bytes(bytes.fromhex(colour)),
                    spendable_cc_list,
                    innersol_list,
                    [aggsig],
                )
                spend_bundle = SpendBundle.aggregate(
                    [spend_bundle, new_spend_bundle])
            # reset sigs and aggsig so that they aren't included next time around
            sigs = []
            aggsig = AugSchemeMPL.aggregate(sigs)
        my_tx_records = []
        if zero_spend_list is not None and spend_bundle is not None:
            zero_spend_list.append(spend_bundle)
            spend_bundle = SpendBundle.aggregate(zero_spend_list)

        if spend_bundle is None:
            return False, None, "spend_bundle missing"

        # Add transaction history for this trade
        now = uint64(int(time.time()))
        if chia_spend_bundle is not None:
            spend_bundle = SpendBundle.aggregate(
                [spend_bundle, chia_spend_bundle])
            # debug_spend_bundle(spend_bundle)
            if chia_discrepancy < 0:
                tx_record = TransactionRecord(
                    confirmed_at_height=uint32(0),
                    created_at_time=now,
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(chia_discrepancy)),
                    fee_amount=uint64(0),
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=chia_spend_bundle,
                    additions=chia_spend_bundle.additions(),
                    removals=chia_spend_bundle.removals(),
                    wallet_id=uint32(1),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                    type=uint32(TransactionType.OUTGOING_TRADE.value),
                    name=chia_spend_bundle.name(),
                )
            else:
                tx_record = TransactionRecord(
                    confirmed_at_height=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(chia_discrepancy)),
                    fee_amount=uint64(0),
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=chia_spend_bundle,
                    additions=chia_spend_bundle.additions(),
                    removals=chia_spend_bundle.removals(),
                    wallet_id=uint32(1),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                    type=uint32(TransactionType.INCOMING_TRADE.value),
                    name=chia_spend_bundle.name(),
                )
            my_tx_records.append(tx_record)

        for colour, amount in cc_discrepancies.items():
            wallet = wallets[colour]
            if chia_discrepancy > 0:
                tx_record = TransactionRecord(
                    confirmed_at_height=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(amount)),
                    fee_amount=uint64(0),
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=spend_bundle,
                    additions=spend_bundle.additions(),
                    removals=spend_bundle.removals(),
                    wallet_id=wallet.id(),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                    type=uint32(TransactionType.OUTGOING_TRADE.value),
                    name=spend_bundle.name(),
                )
            else:
                tx_record = TransactionRecord(
                    confirmed_at_height=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(amount)),
                    fee_amount=uint64(0),
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=spend_bundle,
                    additions=spend_bundle.additions(),
                    removals=spend_bundle.removals(),
                    wallet_id=wallet.id(),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                    type=uint32(TransactionType.INCOMING_TRADE.value),
                    name=token_bytes(),
                )
            my_tx_records.append(tx_record)

        tx_record = TransactionRecord(
            confirmed_at_height=uint32(0),
            created_at_time=uint64(int(time.time())),
            to_puzzle_hash=token_bytes(),
            amount=uint64(0),
            fee_amount=uint64(0),
            confirmed=False,
            sent=uint32(0),
            spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            wallet_id=uint32(0),
            sent_to=[],
            trade_id=std_hash(spend_bundle.name() + bytes(now)),
            type=uint32(TransactionType.OUTGOING_TRADE.value),
            name=spend_bundle.name(),
        )

        now = uint64(int(time.time()))
        trade_record: TradeRecord = TradeRecord(
            confirmed_at_index=uint32(0),
            accepted_at_time=now,
            created_at_time=now,
            my_offer=False,
            sent=uint32(0),
            spend_bundle=offer_spend_bundle,
            tx_spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            trade_id=std_hash(spend_bundle.name() + bytes(now)),
            status=uint32(TradeStatus.PENDING_CONFIRM.value),
            sent_to=[],
        )

        await self.save_trade(trade_record)
        await self.wallet_state_manager.add_pending_transaction(tx_record)
        for tx in my_tx_records:
            await self.wallet_state_manager.add_transaction(tx)

        return True, trade_record, None
Ejemplo n.º 29
0
    def validate_spend_bundle(
        self,
        spend_bundle: SpendBundle,
        now: CoinTimestamp,
        max_cost: int,
    ) -> int:
        # this should use blockchain consensus code

        coin_announcements: Set[bytes32] = set()
        puzzle_announcements: Set[bytes32] = set()

        conditions_dicts = []
        for coin_spend in spend_bundle.coin_spends:
            assert isinstance(coin_spend.coin, Coin)
            err, conditions_dict, cost = conditions_dict_for_solution(
                coin_spend.puzzle_reveal, coin_spend.solution, max_cost)
            if conditions_dict is None:
                raise BadSpendBundleError(f"clvm validation failure {err}")
            conditions_dicts.append(conditions_dict)
            coin_announcements.update(
                coin_announcement_names_for_conditions_dict(
                    conditions_dict,
                    coin_spend.coin,
                ))
            puzzle_announcements.update(
                puzzle_announcement_names_for_conditions_dict(
                    conditions_dict,
                    coin_spend.coin,
                ))

        ephemeral_db = dict(self._db)
        for coin in spend_bundle.additions():
            name = coin.name()
            ephemeral_db[name] = CoinRecord(
                coin,
                uint32(now.height),
                uint32(0),
                False,
                False,
                uint64(now.seconds),
            )

        for coin_spend, conditions_dict in zip(spend_bundle.coin_spends,
                                               conditions_dicts):  # noqa
            prev_transaction_block_height = now.height
            timestamp = now.seconds
            coin_record = ephemeral_db.get(coin_spend.coin.name())
            if coin_record is None:
                raise BadSpendBundleError(
                    f"coin not found for id 0x{coin_spend.coin.name().hex()}"
                )  # noqa
            err = mempool_check_conditions_dict(
                coin_record,
                coin_announcements,
                puzzle_announcements,
                conditions_dict,
                uint32(prev_transaction_block_height),
                uint64(timestamp),
            )
        if err is not None:
            raise BadSpendBundleError(f"condition validation failure {err}")

        return 0
Ejemplo n.º 30
0
    async def create_more_puzzle_hashes(self, from_zero: bool = False):
        """
        For all wallets in the user store, generates the first few puzzle hashes so
        that we can restore the wallet from only the private keys.
        """
        targets = list(self.wallets.keys())

        unused: Optional[
            uint32] = await self.puzzle_store.get_unused_derivation_path()
        if unused is None:
            # This handles the case where the database has entries but they have all been used
            unused = await self.puzzle_store.get_last_derivation_path()
            if unused is None:
                # This handles the case where the database is empty
                unused = uint32(0)

        if self.new_wallet:
            to_generate = self.config["initial_num_public_keys_new_wallet"]
        else:
            to_generate = self.config["initial_num_public_keys"]

        for wallet_id in targets:
            target_wallet = self.wallets[wallet_id]

            last: Optional[
                uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(
                    wallet_id)

            start_index = 0
            derivation_paths: List[DerivationRecord] = []

            if last is not None:
                start_index = last + 1

            # If the key was replaced (from_zero=True), we should generate the puzzle hashes for the new key
            if from_zero:
                start_index = 0

            for index in range(start_index, unused + to_generate):
                if WalletType(target_wallet.type()) == WalletType.RATE_LIMITED:
                    if target_wallet.rl_info.initialized is False:
                        break
                    wallet_type = target_wallet.rl_info.type
                    if wallet_type == "user":
                        rl_pubkey = G1Element.from_bytes(
                            target_wallet.rl_info.user_pubkey)
                    else:
                        rl_pubkey = G1Element.from_bytes(
                            target_wallet.rl_info.admin_pubkey)
                    rl_puzzle: Program = target_wallet.puzzle_for_pk(rl_pubkey)
                    puzzle_hash: bytes32 = rl_puzzle.get_tree_hash()

                    rl_index = self.get_derivation_index(rl_pubkey)
                    if rl_index == -1:
                        break

                    derivation_paths.append(
                        DerivationRecord(
                            uint32(rl_index),
                            puzzle_hash,
                            rl_pubkey,
                            target_wallet.type(),
                            uint32(target_wallet.id()),
                        ))
                    break

                pubkey: G1Element = self.get_public_key(uint32(index))
                puzzle: Program = target_wallet.puzzle_for_pk(bytes(pubkey))
                if puzzle is None:
                    self.log.warning(
                        f"Unable to create puzzles with wallet {target_wallet}"
                    )
                    break
                puzzlehash: bytes32 = puzzle.get_tree_hash()
                self.log.info(
                    f"Puzzle at index {index} wallet ID {wallet_id} puzzle hash {puzzlehash.hex()}"
                )
                derivation_paths.append(
                    DerivationRecord(
                        uint32(index),
                        puzzlehash,
                        pubkey,
                        target_wallet.type(),
                        uint32(target_wallet.id()),
                    ))

            await self.puzzle_store.add_derivation_paths(derivation_paths)
        if unused > 0:
            await self.puzzle_store.set_used_up_to(uint32(unused - 1))