Beispiel #1
0
    def __init__(
        self,
        root_path: Path,
        farmer_config: Dict,
        pool_config: Dict,
        keychain: Keychain,
        consensus_constants: ConsensusConstants,
    ):
        self._root_path = root_path
        self.config = farmer_config
        # Keep track of all sps, keyed on challenge chain signage point hash
        self.sps: Dict[bytes32, List[farmer_protocol.NewSignagePoint]] = {}

        # Keep track of harvester plot identifier (str), target sp index, and PoSpace for each challenge
        self.proofs_of_space: Dict[bytes32, List[Tuple[str, ProofOfSpace]]] = {}

        # Quality string to plot identifier and challenge_hash, for use with harvester.RequestSignatures
        self.quality_str_to_identifiers: Dict[bytes32, Tuple[str, bytes32, bytes32, bytes32]] = {}

        # number of responses to each signage point
        self.number_of_responses: Dict[bytes32, int] = {}

        # A dictionary of keys to time added. These keys refer to keys in the above 4 dictionaries. This is used
        # to periodically clear the memory
        self.cache_add_time: Dict[bytes32, uint64] = {}

        self.cache_clear_task: asyncio.Task
        self.constants = consensus_constants
        self._shut_down = False
        self.server: Any = None
        self.keychain = keychain
        self.state_changed_callback: Optional[Callable] = None
        self.log = log
        all_sks = self.keychain.get_all_private_keys()
        self._private_keys = [master_sk_to_farmer_sk(sk) for sk, _ in all_sks] + [
            master_sk_to_pool_sk(sk) for sk, _ in all_sks
        ]

        if len(self.get_public_keys()) == 0:
            error_str = "No keys exist. Please run 'chia keys generate' or open the UI."
            raise RuntimeError(error_str)

        # This is the farmer configuration
        self.farmer_target_encoded = self.config["xch_target_address"]
        self.farmer_target = decode_puzzle_hash(self.farmer_target_encoded)

        self.pool_public_keys = [G1Element.from_bytes(bytes.fromhex(pk)) for pk in self.config["pool_public_keys"]]

        # This is the pool configuration, which should be moved out to the pool once it exists
        self.pool_target_encoded = pool_config["xch_target_address"]
        self.pool_target = decode_puzzle_hash(self.pool_target_encoded)
        self.pool_sks_map: Dict = {}
        for key in self.get_private_keys():
            self.pool_sks_map[bytes(key.get_g1())] = key

        assert len(self.farmer_target) == 32
        assert len(self.pool_target) == 32
        if len(self.pool_sks_map) == 0:
            error_str = "No keys exist. Please run 'chia keys generate' or open the UI."
            raise RuntimeError(error_str)
Beispiel #2
0
def solution():
    primaries = []
    primaries.append({
        "puzzlehash":
        decode_puzzle_hash(
            "txch1wc3xkqtf9a8u3cqkyeftnwz5rkqvvxpy2hyd0ctxe2p2d63mmurs95szsd"),
        "amount":
        uint64(1 * 10**10)
    })
    primaries.append({
        "puzzlehash":
        decode_puzzle_hash(
            "txch1uagpz8ma9g4ee2hhncjw5pscly2dpr5phve25d5gluarfwaxze3q2n8mfn"),
        "amount":
        uint64(3 * 10**10)
    })

    first_spend: bool = True
    if first_spend:
        solution: Program = Wallet().make_solution(primaries=primaries)
        first_spend = False
    else:
        solution = Wallet().make_solution()
    s = "ff80ffff01ffff33ffa076226b01692f4fc8e0162652b9b8541d80c6182455c8d7e166ca82a6ea3bdf07ff8502540be40080ffff33ffa0e750111f7d2a2b9caaf79e24ea0618f914d08e81bb32aa3688ff3a34bba61662ff8506fc23ac008080ff8080"
    return str(solution) == s
Beispiel #3
0
async def change_payout_instructions(launcher_id: str, address: str) -> None:
    new_pool_configs: List[PoolWalletConfig] = []
    id_found = False
    if decode_puzzle_hash(address):
        old_configs: List[PoolWalletConfig] = load_pool_config(
            DEFAULT_ROOT_PATH)
        for pool_config in old_configs:
            if pool_config.launcher_id == hexstr_to_bytes(launcher_id):
                id_found = True
                pool_config = replace(
                    pool_config,
                    payout_instructions=decode_puzzle_hash(address).hex())
            new_pool_configs.append(pool_config)
        if id_found:
            print(f"Launcher Id: {launcher_id} Found, Updating Config.")
            await update_pool_config(DEFAULT_ROOT_PATH, new_pool_configs)
            print(
                f"Payout Instructions for launcher id: {launcher_id} successfully updated to: {address}."
            )
            print(
                f"You will need to change the payout instructions on every device you use to: {address}."
            )
        else:
            print(f"Launcher Id: {launcher_id} Not found.")
    else:
        print(f"Invalid Address: {address}")
Beispiel #4
0
async def main() -> None:
    rpc_port: uint16 = uint16(8555)
    self_hostname = "localhost"
    path = DEFAULT_ROOT_PATH
    config = load_config(path, "config.yaml")
    client = await FullNodeRpcClient.create(self_hostname, rpc_port, path,
                                            config)
    try:
        farmer_prefarm = (
            await
            client.get_block_record_by_height(1)).reward_claims_incorporated[1]
        pool_prefarm = (
            await
            client.get_block_record_by_height(1)).reward_claims_incorporated[0]

        pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
        farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
        print(farmer_prefarm.amount, farmer_amounts)
        assert farmer_amounts == farmer_prefarm.amount // 2
        assert pool_amounts == pool_prefarm.amount // 2
        address1 = "xch1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6"  # Key 1
        address2 = "xch1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e"  # Key 2

        ph1 = decode_puzzle_hash(address1)
        ph2 = decode_puzzle_hash(address2)

        p_farmer_2 = Program.to(
            binutils.assemble(
                f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))"
            ))
        p_pool_2 = Program.to(
            binutils.assemble(
                f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))"
            ))

        p_solution = Program.to(binutils.assemble("()"))

        sb_farmer = SpendBundle(
            [CoinSolution(farmer_prefarm, p_farmer_2, p_solution)],
            G2Element())
        sb_pool = SpendBundle(
            [CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())

        print(sb_pool, sb_farmer)
        res = await client.push_tx(sb_farmer)
        # res = await client.push_tx(sb_pool)

        print(res)
        up = await client.get_coin_records_by_puzzle_hash(
            farmer_prefarm.puzzle_hash, True)
        uf = await client.get_coin_records_by_puzzle_hash(
            pool_prefarm.puzzle_hash, True)
        print(up)
        print(uf)
    finally:
        client.close()
Beispiel #5
0
 def set_reward_targets(self, farmer_target_encoded: Optional[str], pool_target_encoded: Optional[str]):
     config = load_config(self._root_path, "config.yaml")
     if farmer_target_encoded is not None:
         self.farmer_target_encoded = farmer_target_encoded
         self.farmer_target = decode_puzzle_hash(farmer_target_encoded)
         config["farmer"]["xch_target_address"] = farmer_target_encoded
     if pool_target_encoded is not None:
         self.pool_target_encoded = pool_target_encoded
         self.pool_target = decode_puzzle_hash(pool_target_encoded)
         config["pool"]["xch_target_address"] = pool_target_encoded
     save_config(self._root_path, "config.yaml", config)
Beispiel #6
0
async def test_farmer_reward_target_endpoints(environment):
    (
        farmer_service,
        farmer_rpc_api,
        farmer_rpc_client,
        harvester_service,
        harvester_rpc_api,
        harvester_rpc_client,
    ) = environment
    farmer_api = farmer_service._api

    targets_1 = await farmer_rpc_client.get_reward_targets(False)
    assert "have_pool_sk" not in targets_1
    assert "have_farmer_sk" not in targets_1
    targets_2 = await farmer_rpc_client.get_reward_targets(True)
    assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

    new_ph: bytes32 = create_puzzlehash_for_pk(
        master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1())
    new_ph_2: bytes32 = create_puzzlehash_for_pk(
        master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1())

    await farmer_rpc_client.set_reward_targets(
        encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch"))
    targets_3 = await farmer_rpc_client.get_reward_targets(True)
    assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
    assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
    assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

    new_ph_3: bytes32 = create_puzzlehash_for_pk(
        master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1())
    await farmer_rpc_client.set_reward_targets(
        None, encode_puzzle_hash(new_ph_3, "xch"))
    targets_4 = await farmer_rpc_client.get_reward_targets(True)
    assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
    assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
    assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

    root_path = farmer_api.farmer._root_path
    config = load_config(root_path, "config.yaml")
    assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(
        new_ph, "xch")
    assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
        new_ph_3, "xch")

    new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
    added_char = new_ph_3_encoded + "a"
    with pytest.raises(ValueError):
        await farmer_rpc_client.set_reward_targets(None, added_char)

    replaced_char = new_ph_3_encoded[0:-1] + "a"
    with pytest.raises(ValueError):
        await farmer_rpc_client.set_reward_targets(None, replaced_char)
    async def get_transactions(
        self,
        wallet_id: str,
        start: int = None,
        end: int = None,
        sort_key: SortKey = None,
        reverse: bool = False,
    ) -> List[TransactionRecord]:
        request: Dict[str, Any] = {"wallet_id": wallet_id}

        if start is not None:
            request["start"] = start
        if end is not None:
            request["end"] = end
        if sort_key is not None:
            request["sort_key"] = sort_key.name
        request["reverse"] = reverse

        res = await self.fetch(
            "get_transactions",
            request,
        )
        reverted_tx: List[TransactionRecord] = []
        for modified_tx in res["transactions"]:
            # Server returns address instead of ph, but TransactionRecord requires ph
            modified_tx["to_puzzle_hash"] = decode_puzzle_hash(
                modified_tx["to_address"]).hex()
            del modified_tx["to_address"]
            reverted_tx.append(TransactionRecord.from_json_dict(modified_tx))
        return reverted_tx
Beispiel #8
0
    async def farm_block(self, request):
        raw_puzzle_hash = decode_puzzle_hash(request["address"])
        request = FarmNewBlockProtocol(raw_puzzle_hash)
        msg = make_msg(ProtocolMessageTypes.farm_new_block, request)

        await self.service.server.send_to_all([msg], NodeType.FULL_NODE)
        return {}
Beispiel #9
0
    async def send_transaction(self, request):
        assert self.service.wallet_state_manager is not None

        if await self.service.wallet_state_manager.synced() is False:
            raise ValueError("Wallet needs to be fully synced before sending transactions")

        if (
            self.service.wallet_state_manager.blockchain.get_peak_height()
            < self.service.constants.INITIAL_FREEZE_PERIOD
        ):
            raise ValueError(f"No transactions before block height: {self.service.constants.INITIAL_FREEZE_PERIOD}")

        if self.service.constants.NETWORK_TYPE is NetworkType.MAINNET:
            raise ValueError("Sending transactions not supported, please update your client.")

        wallet_id = int(request["wallet_id"])
        wallet = self.service.wallet_state_manager.wallets[wallet_id]

        if not isinstance(request["amount"], int) or not isinstance(request["amount"], int):
            raise ValueError("An integer amount or fee is required (too many decimals)")
        amount: uint64 = uint64(request["amount"])
        puzzle_hash: bytes32 = decode_puzzle_hash(request["address"])
        if "fee" in request:
            fee = uint64(request["fee"])
        else:
            fee = uint64(0)
        tx: TransactionRecord = await wallet.generate_signed_transaction(amount, puzzle_hash, fee)

        await wallet.push_transaction(tx)

        # Transaction may not have been included in the mempool yet. Use get_transaction to check.
        return {
            "transaction": tx,
            "transaction_id": tx.name,
        }
Beispiel #10
0
    async def setup_keys(self):
        self.all_root_sks: List[PrivateKey] = [
            sk for sk, _ in await self.get_all_private_keys()
        ]
        self._private_keys = [
            master_sk_to_farmer_sk(sk) for sk in self.all_root_sks
        ] + [master_sk_to_pool_sk(sk) for sk in self.all_root_sks]

        if len(self.get_public_keys()) == 0:
            error_str = "No keys exist. Please run 'chia keys generate' or open the UI."
            raise RuntimeError(error_str)

        # This is the farmer configuration
        self.farmer_target_encoded = self.config["xch_target_address"]
        self.farmer_target = decode_puzzle_hash(self.farmer_target_encoded)

        self.pool_public_keys = [
            G1Element.from_bytes(bytes.fromhex(pk))
            for pk in self.config["pool_public_keys"]
        ]

        # This is the self pooling configuration, which is only used for original self-pooled plots
        self.pool_target_encoded = self.pool_config["xch_target_address"]
        self.pool_target = decode_puzzle_hash(self.pool_target_encoded)
        self.pool_sks_map: Dict = {}
        for key in self.get_private_keys():
            self.pool_sks_map[bytes(key.get_g1())] = key

        assert len(self.farmer_target) == 32
        assert len(self.pool_target) == 32
        if len(self.pool_sks_map) == 0:
            error_str = "No keys exist. Please run 'chia keys generate' or open the UI."
            raise RuntimeError(error_str)

        # The variables below are for use with an actual pool

        # From p2_singleton_puzzle_hash to pool state dict
        self.pool_state: Dict[bytes32, Dict] = {}

        # From public key bytes to PrivateKey
        self.authentication_keys: Dict[bytes, PrivateKey] = {}

        # Last time we updated pool_state based on the config file
        self.last_config_access_time: uint64 = uint64(0)

        self.harvester_cache: Dict[str, Dict[str, HarvesterCacheEntry]] = {}
Beispiel #11
0
 async def validate_payout_instructions(
         self, payout_instructions: str) -> Optional[str]:
     """
     Returns the puzzle hash as a hex string from the payout instructions (puzzle hash hex or bech32m address) if it's encoded
     correctly, otherwise returns None.
     """
     try:
         if len(decode_puzzle_hash(payout_instructions)) == 32:
             return decode_puzzle_hash(payout_instructions).hex()
     except ValueError:
         # Not a Chia address
         pass
     try:
         if len(hexstr_to_bytes(payout_instructions)) == 32:
             return payout_instructions
     except ValueError:
         # Not a puzzle hash
         pass
     return None
Beispiel #12
0
    def __init__(self, private_key: PrivateKey, config: Dict,
                 constants: ConsensusConstants):
        self.log = logging.getLogger(__name__)
        self.private_key = private_key
        self.public_key: G1Element = private_key.get_g1()
        self.config = config
        self.constants = constants
        self.node_rpc_client = None

        self.store: Optional[PoolStore] = None

        self.pool_fee = 0.01

        # This number should be held constant and be consistent for every pool in the network
        self.iters_limit = 734000000

        # This number should not be changed, since users will put this into their singletons
        self.relative_lock_height = uint32(100)

        # TODO: potentially tweak these numbers for security and performance
        self.pool_url = "https://myreferencepool.com"
        self.min_difficulty = uint64(
            100)  # 100 difficulty is about 1 proof a day per plot
        self.default_difficulty: uint64 = uint64(100)
        self.max_difficulty = uint64(1000)

        # TODO: store this information in a persistent DB
        self.account_points: Dict[bytes, int] = {
        }  # Points are added by submitting partials
        self.account_rewards_targets: Dict[bytes, bytes] = {}

        self.pending_point_partials: Optional[asyncio.Queue] = None
        self.recent_points_added: LRUCache = LRUCache(20000)

        # This is where the block rewards will get paid out to. The pool needs to support this address forever,
        # since the farmers will encode it into their singleton on the blockchain.
        self.default_pool_puzzle_hash: bytes32 = decode_puzzle_hash(
            "xch12ma5m7sezasgh95wkyr8470ngryec27jxcvxcmsmc4ghy7c4njssnn623q")

        # We need to check for slow farmers. If farmers cannot submit proofs in time, they won't be able to win
        # any rewards either. This number can be tweaked to be more or less strict. More strict ensures everyone
        # gets high rewards, but it might cause some of the slower farmers to not be able to participate in the pool.
        self.partial_time_limit: int = 25

        # There is always a risk of a reorg, in which case we cannot reward farmers that submitted partials in that
        # reorg. That is why we have a time delay before changing any account points.
        self.partial_confirmation_delay: int = 300

        self.full_node_client: Optional[FullNodeRpcClient] = None
        self.confirm_partials_loop_task: Optional[asyncio.Task] = None
        self.difficulty_change_time: Dict[bytes32, uint64] = {}

        self.scan_p2_singleton_puzzle_hashes: Set[bytes32] = set()
        self.blockchain_state = {"peak": None}
 async def get_transactions(
     self,
     wallet_id: str,
 ) -> List[TransactionRecord]:
     res = await self.fetch(
         "get_transactions",
         {"wallet_id": wallet_id},
     )
     reverted_tx: List[TransactionRecord] = []
     for modified_tx in res["transactions"]:
         # Server returns address instead of ph, but TransactionRecord requires ph
         modified_tx["to_puzzle_hash"] = decode_puzzle_hash(modified_tx["to_address"]).hex()
         del modified_tx["to_address"]
         reverted_tx.append(TransactionRecord.from_json_dict(modified_tx))
     return reverted_tx
Beispiel #14
0
def tx(info):
    j = json.dumps(info)
    m: Dict = eval(j)
    inputs: List = m.get("inputs")
    outputs: List = m.get("outputs")

    primaries = []
    for o in outputs:
        output: Dict = o
        address: str = output.get("address")
        value: float = output.get("value")
        primaries.append({
            "puzzlehash": decode_puzzle_hash(address),
            "amount": value
        })

    spends: List[CoinSolution] = []
    pks: List[str] = []
    first_spend = True
    for i in inputs:
        input: Dict = i
        pk: str = input.get("pk")
        pks.append(pk)
        txid: Dict = eval(input.get("txId"))
        parentCoinInfo = txid.get("parentCoinInfo")
        puzzleHash = txid.get("puzzleHash")
        amount = txid.get("amount")

        pa = bytes32(bytes.fromhex(parentCoinInfo[2:]))
        pu = bytes32(bytes.fromhex(puzzleHash[2:]))
        a = uint64(amount)
        coin: Coin = Coin(pa, pu, a)
        child_sk: PrivateKey = PrivateKey.from_bytes(bytes.fromhex(pk))
        child_public_key = child_sk.get_g1()
        puzzle = puzzle_for_pk(child_public_key)

        if first_spend:
            solution: Program = Wallet().make_solution(primaries=primaries)
            first_spend = False
        else:
            solution = Wallet().make_solution()
        spends.append(CoinSolution(coin, puzzle, solution))

    spend_bundle: SpendBundle = SpendBundle(spends, G2Element())
    # return json.dumps(spend_bundle.to_json_dict())
    return sign_tx(pks, spend_bundle)
 def from_json_dict_convenience(cls, modified_tx_input: Dict):
     modified_tx = modified_tx_input.copy()
     if "to_address" in modified_tx:
         modified_tx["to_puzzle_hash"] = decode_puzzle_hash(
             modified_tx["to_address"]).hex()
     if "to_address" in modified_tx:
         del modified_tx["to_address"]
     # Converts memos from a flat dict into a nested list
     memos_dict: Dict[str, List[str]] = {}
     memos_list: List = []
     if "memos" in modified_tx:
         for coin_id, memo in modified_tx["memos"].items():
             if coin_id not in memos_dict:
                 memos_dict[coin_id] = []
             memos_dict[coin_id].append(memo)
     for coin_id, memos in memos_dict.items():
         memos_list.append((coin_id, memos))
     modified_tx["memos"] = memos_list
     return cls.from_json_dict(modified_tx)
Beispiel #16
0
    async def cc_spend(self, request):
        assert self.service.wallet_state_manager is not None
        wallet_id = int(request["wallet_id"])
        wallet: CCWallet = self.service.wallet_state_manager.wallets[wallet_id]
        puzzle_hash: bytes32 = decode_puzzle_hash(request["inner_address"])

        if not isinstance(request["amount"], int) or not isinstance(request["amount"], int):
            raise ValueError("An integer amount or fee is required (too many decimals)")
        amount: uint64 = uint64(request["amount"])
        if "fee" in request:
            fee = uint64(request["fee"])
        else:
            fee = uint64(0)

        tx: TransactionRecord = await wallet.generate_signed_transaction([amount], [puzzle_hash], fee)
        await wallet.wallet_state_manager.add_pending_transaction(tx)

        return {
            "transaction": tx,
            "transaction_id": tx.name,
        }
Beispiel #17
0
    async def send_transaction(self, request):
        assert self.service.wallet_state_manager is not None

        if await self.service.wallet_state_manager.synced() is False:
            raise ValueError(
                "Wallet needs to be fully synced before sending transactions")

        if int(time.time()
               ) < self.service.constants.INITIAL_FREEZE_END_TIMESTAMP:
            end_date = datetime.fromtimestamp(
                float(self.service.constants.INITIAL_FREEZE_END_TIMESTAMP))
            raise ValueError(f"No transactions before: {end_date}")

        wallet_id = int(request["wallet_id"])
        wallet = self.service.wallet_state_manager.wallets[wallet_id]

        if not isinstance(request["amount"], int) or not isinstance(
                request["fee"], int):
            raise ValueError(
                "An integer amount or fee is required (too many decimals)")
        amount: uint64 = uint64(request["amount"])
        puzzle_hash: bytes32 = decode_puzzle_hash(request["address"])
        if "fee" in request:
            fee = uint64(request["fee"])
        else:
            fee = uint64(0)
        async with self.service.wallet_state_manager.lock:
            tx: TransactionRecord = await wallet.generate_signed_transaction(
                amount, puzzle_hash, fee)
            await wallet.push_transaction(tx)

        # Transaction may not have been included in the mempool yet. Use get_transaction to check.
        return {
            "transaction": tx,
            "transaction_id": tx.name,
        }
Beispiel #18
0
    def __init__(self, private_key: PrivateKey, config: Dict,
                 constants: ConsensusConstants):
        self.follow_singleton_tasks: Dict[bytes32, asyncio.Task] = {}
        self.log = logging
        # If you want to log to a file: use filename='example.log', encoding='utf-8'
        self.log.basicConfig(level=logging.INFO)

        # We load our configurations from here
        with open(os.getcwd() + "/config.yaml") as f:
            pool_config: Dict = yaml.safe_load(f)

        # Set our pool info here
        self.info_default_res = pool_config["pool_info"]["default_res"]
        self.info_name = pool_config["pool_info"]["name"]
        self.info_logo_url = pool_config["pool_info"]["logo_url"]
        self.info_description = pool_config["pool_info"]["description"]
        self.welcome_message = pool_config["welcome_message"]

        self.private_key = private_key
        self.public_key: G1Element = private_key.get_g1()
        self.config = config
        self.constants = constants
        self.node_rpc_client = None
        self.wallet_rpc_client = None

        self.store: Optional[PoolStore] = None

        self.pool_fee = pool_config["pool_fee"]

        # This number should be held constant and be consistent for every pool in the network. DO NOT CHANGE
        self.iters_limit = self.constants.POOL_SUB_SLOT_ITERS // 64

        # This number should not be changed, since users will put this into their singletons
        self.relative_lock_height = uint32(100)

        # TODO(pool): potentially tweak these numbers for security and performance
        # This is what the user enters into the input field. This exact value will be stored on the blockchain
        self.pool_url = pool_config["pool_url"]
        self.min_difficulty = uint64(
            pool_config["min_difficulty"]
        )  # 10 difficulty is about 1 proof a day per plot
        self.default_difficulty: uint64 = uint64(
            pool_config["default_difficulty"])

        self.pending_point_partials: Optional[asyncio.Queue] = None
        self.recent_points_added: LRUCache = LRUCache(20000)

        # The time in minutes for an authentication token to be valid. See "Farmer authentication" in SPECIFICATION.md
        self.authentication_token_timeout: uint8 = pool_config[
            "authentication_token_timeout"]

        # This is where the block rewards will get paid out to. The pool needs to support this address forever,
        # since the farmers will encode it into their singleton on the blockchain. WARNING: the default pool code
        # completely spends this wallet and distributes it to users, do don't put any additional funds in here
        # that you do not want to distribute. Even if the funds are in a different address than this one, they WILL
        # be spent by this code! So only put funds that you want to distribute to pool members here.

        # Using 2164248527
        self.default_target_puzzle_hash: bytes32 = bytes32(
            decode_puzzle_hash(pool_config["default_target_address"]))

        # The pool fees will be sent to this address. This MUST be on a different key than the target_puzzle_hash,
        # otherwise, the fees will be sent to the users. Using 690783650
        self.pool_fee_puzzle_hash: bytes32 = bytes32(
            decode_puzzle_hash(pool_config["pool_fee_address"]))

        # This is the wallet fingerprint and ID for the wallet spending the funds from `self.default_target_puzzle_hash`
        self.wallet_fingerprint = pool_config["wallet_fingerprint"]
        self.wallet_id = pool_config["wallet_id"]

        # We need to check for slow farmers. If farmers cannot submit proofs in time, they won't be able to win
        # any rewards either. This number can be tweaked to be more or less strict. More strict ensures everyone
        # gets high rewards, but it might cause some of the slower farmers to not be able to participate in the pool.
        self.partial_time_limit: int = pool_config["partial_time_limit"]

        # There is always a risk of a reorg, in which case we cannot reward farmers that submitted partials in that
        # reorg. That is why we have a time delay before changing any account points.
        self.partial_confirmation_delay: int = pool_config[
            "partial_confirmation_delay"]

        # These are the phs that we want to look for on chain, that we can claim to our pool
        self.scan_p2_singleton_puzzle_hashes: Set[bytes32] = set()

        # Don't scan anything before this height, for efficiency (for example pool start date)
        self.scan_start_height: uint32 = uint32(
            pool_config["scan_start_height"])

        # Interval for scanning and collecting the pool rewards
        self.collect_pool_rewards_interval = pool_config[
            "collect_pool_rewards_interval"]

        # After this many confirmations, a transaction is considered final and irreversible
        self.confirmation_security_threshold = pool_config[
            "confirmation_security_threshold"]

        # Interval for making payout transactions to farmers
        self.payment_interval = pool_config["payment_interval"]

        # We will not make transactions with more targets than this, to ensure our transaction gets into the blockchain
        # faster.
        self.max_additions_per_transaction = pool_config[
            "max_additions_per_transaction"]

        # This is the list of payments that we have not sent yet, to farmers
        self.pending_payments: Optional[asyncio.Queue] = None

        # Keeps track of the latest state of our node
        self.blockchain_state = {"peak": None}

        # Whether or not the wallet is synced (required to make payments)
        self.wallet_synced = False

        # We target these many partials for this number of seconds. We adjust after receiving this many partials.
        self.number_of_partials_target: int = pool_config[
            "number_of_partials_target"]
        self.time_target: int = pool_config["time_target"]

        # Tasks (infinite While loops) for different purposes
        self.confirm_partials_loop_task: Optional[asyncio.Task] = None
        self.collect_pool_rewards_loop_task: Optional[asyncio.Task] = None
        self.create_payment_loop_task: Optional[asyncio.Task] = None
        self.submit_payment_loop_task: Optional[asyncio.Task] = None
        self.get_peak_loop_task: Optional[asyncio.Task] = None

        self.node_rpc_client: Optional[FullNodeRpcClient] = None
        self.wallet_rpc_client: Optional[WalletRpcClient] = None
Beispiel #19
0
def encode_cmd(address):
    print(decode_puzzle_hash(address).hex())
async def main() -> None:
    rpc_port: uint16 = uint16(8555)
    self_hostname = "localhost"
    path = DEFAULT_ROOT_PATH
    config = load_config(path, "config.yaml")
    client = await FullNodeRpcClient.create(self_hostname, rpc_port, path,
                                            config)
    try:
        farmer_prefarm = (
            await
            client.get_block_record_by_height(1)).reward_claims_incorporated[1]
        pool_prefarm = (
            await
            client.get_block_record_by_height(1)).reward_claims_incorporated[0]

        pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
        farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
        print(farmer_prefarm.amount, farmer_amounts)
        assert farmer_amounts == farmer_prefarm.amount // 2
        assert pool_amounts == pool_prefarm.amount // 2
        address1 = "xch1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6"  # Key 1
        address2 = "xch1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e"  # Key 2

        ph1 = decode_puzzle_hash(address1)
        ph2 = decode_puzzle_hash(address2)

        p_farmer_2 = Program.to(
            binutils.assemble(
                f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))"
            ))
        p_pool_2 = Program.to(
            binutils.assemble(
                f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))"
            ))

        print(f"Ph1: {ph1.hex()}")
        print(f"Ph2: {ph2.hex()}")
        assert ph1.hex(
        ) == "1b7ab2079fa635554ad9bd4812c622e46ee3b1875a7813afba127bb0cc9794f9"
        assert ph2.hex(
        ) == "6f184a7074c925ef8688ce56941eb8929be320265f824ec7e351356cc745d38a"

        p_solution = Program.to(binutils.assemble("()"))

        sb_farmer = SpendBundle(
            [CoinSolution(farmer_prefarm, p_farmer_2, p_solution)],
            G2Element())
        sb_pool = SpendBundle(
            [CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())

        print("\n\n\nConditions")
        print_conditions(sb_pool)
        print("\n\n\n")
        print("Farmer to spend")
        print(sb_pool)
        print(sb_farmer)
        print("\n\n\n")
        # res = await client.push_tx(sb_farmer)
        # res = await client.push_tx(sb_pool)

        # print(res)
        up = await client.get_coin_records_by_puzzle_hash(
            farmer_prefarm.puzzle_hash, True)
        uf = await client.get_coin_records_by_puzzle_hash(
            pool_prefarm.puzzle_hash, True)
        print(up)
        print(uf)
    finally:
        client.close()
 def pool_contract_puzzle_hash(self) -> Optional[bytes32]:
     if self.pool_contract_address is not None:
         return decode_puzzle_hash(self.pool_contract_address)
     return None
Beispiel #22
0
    def __init__(self, private_key: PrivateKey, config: Dict,
                 constants: ConsensusConstants):
        self.log = logging
        # If you want to log to a file: use filename='example.log', encoding='utf-8'
        self.log.basicConfig(level=logging.INFO)

        self.private_key = private_key
        self.public_key: G1Element = private_key.get_g1()
        self.config = config
        self.constants = constants
        self.node_rpc_client = None
        self.wallet_rpc_client = None

        self.store: Optional[PoolStore] = None

        self.pool_fee = 0.01

        # This number should be held constant and be consistent for every pool in the network. DO NOT CHANGE
        self.iters_limit = self.constants.POOL_SUB_SLOT_ITERS // 64

        # This number should not be changed, since users will put this into their singletons
        self.relative_lock_height = uint32(100)

        # TODO(pool): potentially tweak these numbers for security and performance
        self.pool_url = "https://myreferencepool.com"
        self.min_difficulty = uint64(
            10)  # 10 difficulty is about 1 proof a day per plot
        self.default_difficulty: uint64 = uint64(10)

        self.pending_point_partials: Optional[asyncio.Queue] = None
        self.recent_points_added: LRUCache = LRUCache(20000)

        # This is where the block rewards will get paid out to. The pool needs to support this address forever,
        # since the farmers will encode it into their singleton on the blockchain.

        self.default_pool_puzzle_hash: bytes32 = bytes32(
            decode_puzzle_hash(
                "xch12ma5m7sezasgh95wkyr8470ngryec27jxcvxcmsmc4ghy7c4njssnn623q"
            ))

        # The pool fees will be sent to this address
        self.pool_fee_puzzle_hash: bytes32 = bytes32(
            decode_puzzle_hash(
                "txch1h8ggpvqzhrquuchquk7s970cy0m0e0yxd4hxqwzqkpzxk9jx9nzqmd67ux"
            ))

        # This is the wallet fingerprint and ID for the wallet spending the funds from `self.default_pool_puzzle_hash`
        self.wallet_fingerprint = 2938470744
        self.wallet_id = "1"

        # We need to check for slow farmers. If farmers cannot submit proofs in time, they won't be able to win
        # any rewards either. This number can be tweaked to be more or less strict. More strict ensures everyone
        # gets high rewards, but it might cause some of the slower farmers to not be able to participate in the pool.
        self.partial_time_limit: int = 25

        # There is always a risk of a reorg, in which case we cannot reward farmers that submitted partials in that
        # reorg. That is why we have a time delay before changing any account points.
        self.partial_confirmation_delay: int = 30

        # Keeps track of when each farmer last changed their difficulty, to rate limit how often they can change it
        # This helps when the farmer is farming from two machines at the same time (with conflicting difficulties)
        self.difficulty_change_time: Dict[bytes32, uint64] = {}

        # These are the phs that we want to look for on chain, that we can claim to our pool
        self.scan_p2_singleton_puzzle_hashes: Set[bytes32] = set()

        # Don't scan anything before this height, for efficiency (for example pool start date)
        self.scan_start_height: uint32 = uint32(1000)

        # Interval for scanning and collecting the pool rewards
        self.collect_pool_rewards_interval = 600

        # After this many confirmations, a transaction is considered final and irreversible
        self.confirmation_security_threshold = 6

        # Interval for making payout transactions to farmers
        self.payment_interval = 600

        # We will not make transactions with more targets than this, to ensure our transaction gets into the blockchain
        # faster.
        self.max_additions_per_transaction = 400

        # This is the list of payments that we have not sent yet, to farmers
        self.pending_payments: Optional[asyncio.Queue] = None

        # Keeps track of the latest state of our node
        self.blockchain_state = {"peak": None}

        # Whether or not the wallet is synced (required to make payments)
        self.wallet_synced = False

        # Tasks (infinite While loops) for different purposes
        self.confirm_partials_loop_task: Optional[asyncio.Task] = None
        self.collect_pool_rewards_loop_task: Optional[asyncio.Task] = None
        self.create_payment_loop_task: Optional[asyncio.Task] = None
        self.submit_payment_loop_task: Optional[asyncio.Task] = None
        self.get_peak_loop_task: Optional[asyncio.Task] = None

        self.node_rpc_client: Optional[FullNodeRpcClient] = None
        self.wallet_rpc_client: Optional[WalletRpcClient] = None
Beispiel #23
0
from clvm.casts import int_from_bytes
from clvm_tools import binutils

from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from chia.types.blockchain_format.program import Program
from chia.types.condition_opcodes import ConditionOpcode
from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
from chia.util.condition_tools import parse_sexp_to_conditions
from chia.util.ints import uint32

address1 = "txch15gx26ndmacfaqlq8m0yajeggzceu7cvmaz4df0hahkukes695rss6lej7h"  # Gene wallet (m/12381/8444/2/42):
address2 = "txch1c2cguswhvmdyz9hr3q6hak2h6p9dw4rz82g4707k2xy2sarv705qcce4pn"  # Mariano address (m/12381/8444/2/0)

ph1 = decode_puzzle_hash(address1)
ph2 = decode_puzzle_hash(address2)

pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)

assert pool_amounts * 2 == calculate_pool_reward(uint32(0))
assert farmer_amounts * 2 == calculate_base_farmer_reward(uint32(0))


def make_puzzle(amount: int) -> int:
    puzzle = f"(q . ((51 0x{ph1.hex()} {amount}) (51 0x{ph2.hex()} {amount})))"
    # print(puzzle)

    puzzle_prog = Program.to(binutils.assemble(puzzle))
    print("Program: ", puzzle_prog)
    puzzle_hash = puzzle_prog.get_tree_hash()
Beispiel #24
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(
                std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)
            )
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Test farmer get_harvesters
            async def test_get_harvesters():
                farmer_res = await client.get_harvesters()
                if len(list(farmer_res["harvesters"])) != 1:
                    return False
                if len(list(farmer_res["harvesters"][0]["plots"])) != num_plots:
                    return False
                return True

            await time_out_assert(30, test_get_harvesters)

            expected_result: PlotRefreshResult = PlotRefreshResult()

            def test_refresh_callback(refresh_result: PlotRefreshResult):
                assert refresh_result.loaded_plots == expected_result.loaded_plots
                assert refresh_result.removed_plots == expected_result.removed_plots
                assert refresh_result.processed_files == expected_result.processed_files
                assert refresh_result.remaining_files == expected_result.remaining_files

            harvester.plot_manager.set_refresh_callback(test_refresh_callback)

            async def test_case(
                trigger, expect_loaded, expect_removed, expect_processed, expected_directories, expect_total_plots
            ):
                expected_result.loaded_plots = expect_loaded
                expected_result.removed_plots = expect_removed
                expected_result.processed_files = expect_processed
                await trigger
                harvester.plot_manager.trigger_refresh()
                assert len(await client_2.get_plot_directories()) == expected_directories
                await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
                result = await client_2.get_plots()
                assert len(result["plots"]) == expect_total_plots
                assert len(harvester.plot_manager.cache) == expect_total_plots
                assert len(harvester.plot_manager.failed_to_open_filenames) == 0

            # Add plot_dir with two new plots
            await test_case(
                client_2.add_plot_directory(str(plot_dir)),
                expect_loaded=2,
                expect_removed=0,
                expect_processed=2,
                expected_directories=2,
                expect_total_plots=num_plots + 2,
            )
            # Add plot_dir_sub with one duplicate
            await test_case(
                client_2.add_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 2,
            )
            # Delete one plot
            await test_case(
                client_2.delete_plot(str(plot_dir / filename)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove directory with the duplicate
            await test_case(
                client_2.remove_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=2,
                expect_total_plots=num_plots + 1,
            )
            # Re-add the directory with the duplicate for other tests
            await test_case(
                client_2.add_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the directory which has the duplicated plot loaded. This removes the duplicated plot from plot_dir
            # and in the same run loads the plot from plot_dir_sub which is not longer seen as duplicate.
            await test_case(
                client_2.remove_plot_directory(str(plot_dir)),
                expect_loaded=1,
                expect_removed=1,
                expect_processed=1,
                expected_directories=2,
                expect_total_plots=num_plots + 1,
            )
            # Re-add the directory now the plot seen as duplicate is from plot_dir, not from plot_dir_sub like before
            await test_case(
                client_2.add_plot_directory(str(plot_dir)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the duplicated plot
            await test_case(
                client_2.delete_plot(str(plot_dir / filename_2)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the directory with the loaded plot which is not longer a duplicate
            await test_case(
                client_2.remove_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=2,
                expect_total_plots=num_plots,
            )
            # Remove the directory which contains all other plots
            await test_case(
                client_2.remove_plot_directory(str(get_plot_dir())),
                expect_loaded=0,
                expect_removed=20,
                expect_processed=0,
                expected_directories=1,
                expect_total_plots=0,
            )
            # Recover the plots to test caching
            # First make sure cache gets written if required and new plots are loaded
            await test_case(
                client_2.add_plot_directory(str(get_plot_dir())),
                expect_loaded=20,
                expect_removed=0,
                expect_processed=20,
                expected_directories=2,
                expect_total_plots=20,
            )
            assert harvester.plot_manager.cache.path().exists()
            unlink(harvester.plot_manager.cache.path())
            # Should not write the cache again on shutdown because it didn't change
            assert not harvester.plot_manager.cache.path().exists()
            harvester.plot_manager.stop_refreshing()
            assert not harvester.plot_manager.cache.path().exists()
            # Manually trigger `save_cache` and make sure it creates a new cache file
            harvester.plot_manager.cache.save()
            assert harvester.plot_manager.cache.path().exists()

            expected_result.loaded_plots = 20
            expected_result.removed_plots = 0
            expected_result.processed_files = 20
            expected_result.remaining_files = 0
            plot_manager: PlotManager = PlotManager(harvester.root_path, test_refresh_callback)
            plot_manager.start_refreshing()
            assert len(harvester.plot_manager.cache) == len(plot_manager.cache)
            await time_out_assert(5, plot_manager.needs_refresh, value=False)
            for path, plot_info in harvester.plot_manager.plots.items():
                assert path in plot_manager.plots
                assert plot_manager.plots[path].prover.get_filename() == plot_info.prover.get_filename()
                assert plot_manager.plots[path].prover.get_id() == plot_info.prover.get_id()
                assert plot_manager.plots[path].prover.get_memo() == plot_info.prover.get_memo()
                assert plot_manager.plots[path].prover.get_size() == plot_info.prover.get_size()
                assert plot_manager.plots[path].pool_public_key == plot_info.pool_public_key
                assert plot_manager.plots[path].pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash
                assert plot_manager.plots[path].plot_public_key == plot_info.plot_public_key
                assert plot_manager.plots[path].file_size == plot_info.file_size
                assert plot_manager.plots[path].time_modified == plot_info.time_modified

            assert harvester.plot_manager.plot_filename_paths == plot_manager.plot_filename_paths
            assert harvester.plot_manager.failed_to_open_filenames == plot_manager.failed_to_open_filenames
            assert harvester.plot_manager.no_key_filenames == plot_manager.no_key_filenames
            plot_manager.stop_refreshing()
            # Modify the content of the plot_manager.dat
            with open(harvester.plot_manager.cache.path(), "r+b") as file:
                file.write(b"\xff\xff")  # Sets Cache.version to 65535
            # Make sure it just loads the plots normally if it fails to load the cache
            plot_manager = PlotManager(harvester.root_path, test_refresh_callback)
            plot_manager.cache.load()
            assert len(plot_manager.cache) == 0
            plot_manager.set_public_keys(
                harvester.plot_manager.farmer_public_keys, harvester.plot_manager.pool_public_keys
            )
            expected_result.loaded_plots = 20
            expected_result.removed_plots = 0
            expected_result.processed_files = 20
            expected_result.remaining_files = 0
            plot_manager.start_refreshing()
            await time_out_assert(5, plot_manager.needs_refresh, value=False)
            assert len(plot_manager.plots) == len(harvester.plot_manager.plots)
            plot_manager.stop_refreshing()

            # Test re-trying if processing a plot failed
            # First save the plot
            retry_test_plot = Path(plot_dir_sub / filename_2).resolve()
            retry_test_plot_save = Path(plot_dir_sub / "save").resolve()
            copy(retry_test_plot, retry_test_plot_save)
            # Invalidate the plot
            with open(plot_dir_sub / filename_2, "r+b") as file:
                file.write(bytes(100))
            # Add it and validate it fails to load
            await harvester.add_plot_directory(str(plot_dir_sub))
            expected_result.loaded_plots = 0
            expected_result.removed_plots = 0
            expected_result.processed_files = 1
            expected_result.remaining_files = 0
            harvester.plot_manager.start_refreshing()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
            # Make sure the file stays in `failed_to_open_filenames` and doesn't get loaded or processed in the next
            # update round
            expected_result.loaded_plots = 0
            expected_result.processed_files = 0
            harvester.plot_manager.trigger_refresh()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
            # Now decrease the re-try timeout, restore the valid plot file and make sure it properly loads now
            harvester.plot_manager.refresh_parameter.retry_invalid_seconds = 0
            move(retry_test_plot_save, retry_test_plot)
            expected_result.loaded_plots = 1
            expected_result.processed_files = 1
            harvester.plot_manager.trigger_refresh()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot not in harvester.plot_manager.failed_to_open_filenames

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1()
            )

            await client.set_reward_targets(encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1()
            )
            await client.set_reward_targets(None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.local_keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(all_sks[0][0], 2, 1)
            pool_list = [
                {
                    "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                    "authentication_public_key": bytes(auth_sk.get_g1()).hex(),
                    "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",
                    "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                    "pool_url": "localhost",
                    "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                    "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
                }
            ]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"]
                == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy")
            await farmer_api.farmer.update_pool_state()
            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"]["payout_instructions"] == "1234vy"

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester_service, farmer_service = simulation
        harvester = harvester_service._node
        farmer_api = farmer_service._api

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            await validate_get_routes(client, farmer_rpc_api)
            await validate_get_routes(client_2, harvester_rpc_api)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Reset cache and force updates cache every second to make sure the farmer gets the most recent data
            update_interval_before = farmer_api.farmer.update_harvester_cache_interval
            farmer_api.farmer.update_harvester_cache_interval = 1
            farmer_api.farmer.harvester_cache = {}

            # Test farmer get_harvesters
            async def test_get_harvesters():
                harvester.plot_manager.trigger_refresh()
                await time_out_assert(5,
                                      harvester.plot_manager.needs_refresh,
                                      value=False)
                farmer_res = await client.get_harvesters()
                if len(list(farmer_res["harvesters"])) != 1:
                    log.error(
                        f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}"
                    )
                    return False
                if len(list(
                        farmer_res["harvesters"][0]["plots"])) != num_plots:
                    log.error(
                        f"test_get_harvesters: invalid plots {list(farmer_res['harvesters'])}"
                    )
                    return False
                return True

            await time_out_assert_custom_interval(30, 1, test_get_harvesters)

            # Reset cache and reset update interval to avoid hitting the rate limit
            farmer_api.farmer.update_harvester_cache_interval = update_interval_before
            farmer_api.farmer.harvester_cache = {}

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.local_keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(
                all_sks[0][0], 2, 1)
            pool_list = [{
                "launcher_id":
                "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                "authentication_public_key":
                bytes(auth_sk.get_g1()).hex(),
                "owner_public_key":
                "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",  # noqa
                "payout_instructions":
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                "pool_url":
                "localhost",
                "p2_singleton_puzzle_hash":
                "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                "target_puzzle_hash":
                "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
            }]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"] ==
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(
                hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]),
                "1234vy")
            await farmer_api.farmer.update_pool_state()
            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"][
                "payout_instructions"] == "1234vy"

            now = time.time()
            # Big arbitrary numbers used to be unlikely to accidentally collide.
            before_24h = (now - (25 * 60 * 60), 29984713)
            since_24h = (now - (23 * 60 * 60), 93049817)
            for p2_singleton_puzzle_hash, pool_dict in farmer_api.farmer.pool_state.items(
            ):
                for key in ["points_found_24h", "points_acknowledged_24h"]:
                    pool_dict[key].insert(0, since_24h)
                    pool_dict[key].insert(0, before_24h)

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)
            client_pool_state = await client.get_pool_state()
            for pool_dict in client_pool_state["pool_state"]:
                for key in ["points_found_24h", "points_acknowledged_24h"]:
                    assert pool_dict[key][0] == list(since_24h)

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
Beispiel #26
0
def create_plots(args,
                 root_path,
                 use_datetime=True,
                 test_private_keys: Optional[List] = None):
    config_filename = config_path_for_filename(root_path, "config.yaml")
    config = load_config(root_path, config_filename)

    if args.tmp2_dir is None:
        args.tmp2_dir = args.tmp_dir

    farmer_public_key: G1Element
    if args.farmer_public_key is not None:
        farmer_public_key = G1Element.from_bytes(
            bytes.fromhex(args.farmer_public_key))
    else:
        farmer_public_key = get_farmer_public_key(args.alt_fingerprint)

    pool_public_key: Optional[G1Element] = None
    pool_contract_puzzle_hash: Optional[bytes32] = None
    if args.pool_public_key is not None:
        if args.pool_contract_address is not None:
            raise RuntimeError(
                "Choose one of pool_contract_address and pool_public_key")
        pool_public_key = G1Element.from_bytes(
            bytes.fromhex(args.pool_public_key))
    else:
        if args.pool_contract_address is None:
            # If nothing is set, farms to the provided key (or the first key)
            pool_public_key = get_pool_public_key(args.alt_fingerprint)
        else:
            # If the pool contract puzzle hash is set, use that
            pool_contract_puzzle_hash = decode_puzzle_hash(
                args.pool_contract_address)

    assert (pool_public_key is None) != (pool_contract_puzzle_hash is None)
    num = args.num

    if args.size < config["min_mainnet_k_size"] and test_private_keys is None:
        log.warning(
            f"Creating plots with size k={args.size}, which is less than the minimum required for mainnet"
        )
    if args.size < 22:
        log.warning("k under 22 is not supported. Increasing k to 22")
        args.size = 22

    if pool_public_key is not None:
        log.info(
            f"Creating {num} plots of size {args.size}, pool public key:  "
            f"{bytes(pool_public_key).hex()} farmer public key: {bytes(farmer_public_key).hex()}"
        )
    else:
        assert pool_contract_puzzle_hash is not None
        log.info(
            f"Creating {num} plots of size {args.size}, pool contract address:  "
            f"{args.pool_contract_address} farmer public key: {bytes(farmer_public_key).hex()}"
        )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    finished_filenames = []
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        # New plots will also include a taproot of the keys, for extensibility
        include_taproot: bool = pool_contract_puzzle_hash is not None
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), farmer_public_key,
            include_taproot)

        # The plot id is based on the harvester, farmer, and pool keys
        if pool_public_key is not None:
            plot_id: bytes32 = ProofOfSpace.calculate_plot_id_pk(
                pool_public_key, plot_public_key)
            plot_memo: bytes32 = stream_plot_info_pk(pool_public_key,
                                                     farmer_public_key, sk)
        else:
            assert pool_contract_puzzle_hash is not None
            plot_id = ProofOfSpace.calculate_plot_id_ph(
                pool_contract_puzzle_hash, plot_public_key)
            plot_memo = stream_plot_info_ph(pool_contract_puzzle_hash,
                                            farmer_public_key, sk)

        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id = bytes32(bytes.fromhex(args.plotid))

        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo = bytes.fromhex(args.memo)

        # Uncomment next two lines if memo is needed for dev debug
        plot_memo_str: str = plot_memo.hex()
        log.info(f"Memo: {plot_memo_str}")

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        resolved_final_dir: str = str(Path(args.final_dir).resolve())
        plot_directories_list: str = config["harvester"]["plot_directories"]

        if args.exclude_final_dir:
            log.info(
                f"NOT adding directory {resolved_final_dir} to harvester for farming"
            )
            if resolved_final_dir in plot_directories_list:
                log.warning(
                    f"Directory {resolved_final_dir} already exists for harvester, please remove it manually"
                )
        else:
            if resolved_final_dir not in plot_directories_list:
                # Adds the directory to the plot directories if it is not present
                log.info(
                    f"Adding directory {resolved_final_dir} to harvester for farming"
                )
                config = add_plot_directory(resolved_final_dir, root_path)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
                args.nobitfield,
            )
            finished_filenames.append(filename)
        else:
            log.info(f"Plot {filename} already exists")

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log.info(filename)
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))
            await client_2.add_plot_directory(str(plot_dir_sub))

            assert len(await client_2.get_plot_directories()) == 3

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 2

            await client_2.delete_plot(str(plot_dir / filename))
            await client_2.delete_plot(str(plot_dir / filename_2))
            await client_2.refresh_plots()
            res_3 = await client_2.get_plots()

            assert len(res_3["plots"]) == num_plots + 1

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 2

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Test farmer get_plots
            farmer_res = await client.get_plots()
            assert len(list(farmer_res.values())[0]["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))
            await client_2.add_plot_directory(str(plot_dir_sub))

            assert len(await client_2.get_plot_directories()) == 3

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 2

            await client_2.delete_plot(str(plot_dir / filename))
            await client_2.delete_plot(str(plot_dir / filename_2))
            await client_2.refresh_plots()
            res_3 = await client_2.get_plots()

            assert len(res_3["plots"]) == num_plots + 1

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 2

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(
                all_sks[0][0], 2, 1)
            pool_list = [{
                "launcher_id":
                "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                "authentication_public_key":
                bytes(auth_sk.get_g1()).hex(),
                "owner_public_key":
                "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",
                "payout_instructions":
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                "pool_url":
                "localhost",
                "p2_singleton_puzzle_hash":
                "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                "target_puzzle_hash":
                "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
            }]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"] ==
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(
                hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]),
                "1234vy")

            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"][
                "payout_instructions"] == "1234vy"

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()