コード例 #1
0
    async def request_signatures(
            self, request: harvester_protocol.RequestSignatures):
        """
        The farmer requests a signature on the header hash, for one of the proofs that we found.
        A signature is created on the header hash using the harvester private key. This can also
        be used for pooling.
        """
        plot_filename = Path(request.plot_identifier[64:]).resolve()
        try:
            plot_info = self.harvester.provers[plot_filename]
        except KeyError:
            self.harvester.log.warning(
                f"KeyError plot {plot_filename} does not exist.")
            return

        # Look up local_sk from plot to save locked memory
        (
            pool_public_key_or_puzzle_hash,
            farmer_public_key,
            local_master_sk,
        ) = parse_plot_info(plot_info.prover.get_memo())
        local_sk = master_sk_to_local_sk(local_master_sk)

        agg_pk = ProofOfSpace.generate_plot_public_key(local_sk.get_g1(),
                                                       farmer_public_key)

        # This is only a partial signature. When combined with the farmer's half, it will
        # form a complete PrependSignature.
        message_signatures: List[Tuple[bytes32, G2Element]] = []
        for message in request.messages:
            signature: G2Element = AugSchemeMPL.sign(local_sk, message, agg_pk)
            message_signatures.append((message, signature))

        response: harvester_protocol.RespondSignatures = harvester_protocol.RespondSignatures(
            request.plot_identifier,
            request.challenge_hash,
            request.sp_hash,
            local_sk.get_g1(),
            farmer_public_key,
            message_signatures,
        )

        return make_msg(ProtocolMessageTypes.respond_signatures, response)
コード例 #2
0
def load_plots(
    provers: Dict[Path, PlotInfo],
    failed_to_open_filenames: Dict[Path, int],
    farmer_public_keys: Optional[List[G1Element]],
    pool_public_keys: Optional[List[G1Element]],
    match_str: Optional[str],
    root_path: Path,
    open_no_key_filenames=False,
) -> Tuple[bool, Dict[Path, PlotInfo], Dict[Path, int], Set[Path]]:
    start_time = time.time()
    config_file = load_config(root_path, "config.yaml", "harvester")
    changed = False
    no_key_filenames: Set[Path] = set()
    log.info(f'Searching directories {config_file["plot_directories"]}')

    plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(config_file)
    all_filenames: List[Path] = []
    for paths in plot_filenames.values():
        all_filenames += paths
    total_size = 0
    new_provers: Dict[Path, PlotInfo] = {}

    if match_str is not None:
        log.info(
            f'Only loading plots that contain "{match_str}" in the file or directory name'
        )

    for filename in all_filenames:
        filename_str = str(filename)
        if match_str is not None and match_str not in filename_str:
            continue
        if filename.exists():
            if filename in failed_to_open_filenames and (
                    time.time() - failed_to_open_filenames[filename]) < 1200:
                # Try once every 20 minutes to open the file
                continue
            if filename in provers:
                stat_info = filename.stat()
                if stat_info.st_mtime == provers[filename].time_modified:
                    total_size += stat_info.st_size
                    new_provers[filename] = provers[filename]
                    continue
            try:
                prover = DiskProver(str(filename))
                expected_size = _expected_plot_size(
                    prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR / 2.0
                stat_info = filename.stat()

                # TODO: consider checking if the file was just written to (which would mean that the file is still
                # being copied). A segfault might happen in this edge case.

                if prover.get_size(
                ) >= 30 and stat_info.st_size < 0.98 * expected_size:
                    log.warning(
                        f"Not farming plot {filename}. Size is {stat_info.st_size / (1024**3)} GiB, but expected"
                        f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied."
                    )
                    continue

                (
                    pool_public_key,
                    farmer_public_key,
                    local_master_sk,
                ) = parse_plot_info(prover.get_memo())
                # Only use plots that correct keys associated with them
                if farmer_public_keys is not None and farmer_public_key not in farmer_public_keys:
                    log.warning(
                        f"Plot {filename} has a farmer public key that is not in the farmer's pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                if pool_public_keys is not None and pool_public_key not in pool_public_keys:
                    log.warning(
                        f"Plot {filename} has a pool public key that is not in the farmer's pool pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                stat_info = filename.stat()
                local_sk = master_sk_to_local_sk(local_master_sk)
                plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key(
                    local_sk.get_g1(), farmer_public_key)
                new_provers[filename] = PlotInfo(
                    prover,
                    pool_public_key,
                    farmer_public_key,
                    plot_public_key,
                    local_sk,
                    stat_info.st_size,
                    stat_info.st_mtime,
                )
                total_size += stat_info.st_size
                changed = True
            except Exception as e:
                tb = traceback.format_exc()
                log.error(f"Failed to open file {filename}. {e} {tb}")
                failed_to_open_filenames[filename] = int(time.time())
                continue
            log.info(
                f"Found plot {filename} of size {new_provers[filename].prover.get_size()}"
            )

    log.info(
        f"Loaded a total of {len(new_provers)} plots of size {total_size / (1024 ** 4)} TiB, in"
        f" {time.time()-start_time} seconds")
    return changed, new_provers, failed_to_open_filenames, no_key_filenames
コード例 #3
0
def create_plots(args,
                 root_path,
                 use_datetime=True,
                 test_private_keys: Optional[List] = None):
    config_filename = config_path_for_filename(root_path, "config.yaml")
    config = load_config(root_path, config_filename)

    if args.tmp2_dir is None:
        args.tmp2_dir = args.tmp_dir

    farmer_public_key: G1Element
    if args.farmer_public_key is not None:
        farmer_public_key = G1Element.from_bytes(
            bytes.fromhex(args.farmer_public_key))
    else:
        farmer_public_key = get_farmer_public_key(args.alt_fingerprint)

    pool_public_key: G1Element
    if args.pool_public_key is not None:
        pool_public_key = bytes.fromhex(args.pool_public_key)
    else:
        pool_public_key = get_pool_public_key(args.alt_fingerprint)
    if args.num is not None:
        num = args.num
    else:
        num = 1

    if args.size < config["min_mainnet_k_size"] and test_private_keys is None:
        log.warning(
            f"Creating plots with size k={args.size}, which is less than the minimum required for mainnet"
        )
    if args.size < 22:
        log.warning("k under 22 is not supported. Increasing k to 22")
        args.size = 22
    log.info(
        f"Creating {num} plots of size {args.size}, pool public key:  "
        f"{bytes(pool_public_key).hex()} farmer public key: {bytes(farmer_public_key).hex()}"
    )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    finished_filenames = []
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), farmer_public_key)

        # The plot id is based on the harvester, farmer, and pool keys
        plot_id: bytes32 = ProofOfSpace.calculate_plot_id_pk(
            pool_public_key, plot_public_key)
        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id = bytes32(bytes.fromhex(args.plotid))

        plot_memo: bytes32 = stream_plot_info(pool_public_key,
                                              farmer_public_key, sk)
        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo = bytes.fromhex(args.memo)

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        resolved_final_dir: str = str(Path(args.final_dir).resolve())
        plot_directories_list: str = config["harvester"]["plot_directories"]

        if args.exclude_final_dir:
            log.info(
                f"NOT adding directory {resolved_final_dir} to harvester for farming"
            )
            if resolved_final_dir in plot_directories_list:
                log.warning(
                    f"Directory {resolved_final_dir} already exists for harvester, please remove it manually"
                )
        else:
            if resolved_final_dir not in plot_directories_list:
                # Adds the directory to the plot directories if it is not present
                log.info(
                    f"Adding directory {resolved_final_dir} to harvester for farming"
                )
                config = add_plot_directory(resolved_final_dir, root_path)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
                args.nobitfield,
            )
            finished_filenames.append(filename)
        else:
            log.info(f"Plot {filename} already exists")

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log.info(filename)
コード例 #4
0
        def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
            # Uses the DiskProver object to lookup qualities. This is a blocking call,
            # so it should be run in a thread pool.
            try:
                sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
                    plot_info.prover.get_id(),
                    new_challenge.challenge_hash,
                    new_challenge.sp_hash,
                )
                try:
                    quality_strings = plot_info.prover.get_qualities_for_challenge(sp_challenge_hash)
                except Exception as e:
                    self.harvester.log.error(f"Error using prover object {e}")
                    return []

                responses: List[Tuple[bytes32, ProofOfSpace]] = []
                if quality_strings is not None:
                    # Found proofs of space (on average 1 is expected per plot)
                    for index, quality_str in enumerate(quality_strings):
                        required_iters: uint64 = calculate_iterations_quality(
                            self.harvester.constants.DIFFICULTY_CONSTANT_FACTOR,
                            quality_str,
                            plot_info.prover.get_size(),
                            new_challenge.difficulty,
                            new_challenge.sp_hash,
                        )
                        sp_interval_iters = calculate_sp_interval_iters(
                            self.harvester.constants, new_challenge.sub_slot_iters
                        )
                        if required_iters < sp_interval_iters:
                            # Found a very good proof of space! will fetch the whole proof from disk,
                            # then send to farmer
                            try:
                                proof_xs = plot_info.prover.get_full_proof(sp_challenge_hash, index)
                            except RuntimeError:
                                self.harvester.log.error(f"Exception fetching full proof for {filename}")
                                continue

                            # Look up local_sk from plot to save locked memory
                            (
                                pool_public_key_or_puzzle_hash,
                                farmer_public_key,
                                local_master_sk,
                            ) = parse_plot_info(plot_info.prover.get_memo())
                            local_sk = master_sk_to_local_sk(local_master_sk)
                            plot_public_key = ProofOfSpace.generate_plot_public_key(
                                local_sk.get_g1(), farmer_public_key
                            )
                            responses.append(
                                (
                                    quality_str,
                                    ProofOfSpace(
                                        sp_challenge_hash,
                                        plot_info.pool_public_key,
                                        plot_info.pool_contract_puzzle_hash,
                                        plot_public_key,
                                        uint8(plot_info.prover.get_size()),
                                        proof_xs,
                                    ),
                                )
                            )
                return responses
            except Exception as e:
                self.harvester.log.error(f"Unknown error: {e}")
                return []
コード例 #5
0
def load_plots(
    provers: Dict[Path, PlotInfo],
    failed_to_open_filenames: Set[Path],
    farmer_public_keys: Optional[List[G1Element]],
    pool_public_keys: Optional[List[G1Element]],
    root_path: Path,
    open_no_key_filenames=False,
) -> Tuple[bool, Dict[Path, PlotInfo], Set[Path], Set[Path]]:
    config_file = load_config(root_path, "config.yaml", "harvester")
    changed = False
    no_key_filenames: Set[Path] = set()
    log.info(f'Searching directories {config_file["plot_directories"]}')

    plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(config_file)
    all_filenames: List[Path] = []
    for paths in plot_filenames.values():
        all_filenames += paths
    total_size = 0

    for filename in all_filenames:
        if filename in provers:
            stat_info = filename.stat()
            if stat_info.st_mtime == provers[filename].time_modified:
                total_size += stat_info.st_size
                continue
        if filename in failed_to_open_filenames:
            continue
        if filename.exists():
            try:
                prover = DiskProver(str(filename))
                (
                    pool_public_key,
                    farmer_public_key,
                    local_master_sk,
                ) = parse_plot_info(prover.get_memo())
                # Only use plots that correct keys associated with them
                if (farmer_public_keys is not None
                        and farmer_public_key not in farmer_public_keys):
                    log.warning(
                        f"Plot {filename} has a farmer public key that is not in the farmer's pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                if (pool_public_keys is not None
                        and pool_public_key not in pool_public_keys):
                    log.warning(
                        f"Plot {filename} has a pool public key that is not in the farmer's pool pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                stat_info = filename.stat()
                local_sk = master_sk_to_local_sk(local_master_sk)
                plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key(
                    local_sk.get_g1(), farmer_public_key)
                provers[filename] = PlotInfo(
                    prover,
                    pool_public_key,
                    farmer_public_key,
                    plot_public_key,
                    local_sk,
                    stat_info.st_size,
                    stat_info.st_mtime,
                )
                total_size += stat_info.st_size
                changed = True
            except Exception as e:
                tb = traceback.format_exc()
                log.error(f"Failed to open file {filename}. {e} {tb}")
                failed_to_open_filenames.add(filename)
                continue
            log.info(
                f"Found plot {filename} of size {provers[filename].prover.get_size()}"
            )

    log.info(
        f"Loaded a total of {len(provers)} plots of size {total_size / (1024 ** 4)} TB"
    )
    return (changed, provers, failed_to_open_filenames, no_key_filenames)
コード例 #6
0
def create_plots(args,
                 root_path,
                 use_datetime=True,
                 test_private_keys: Optional[List] = None):
    config_filename = config_path_for_filename(root_path, "config.yaml")

    if args.tmp2_dir is None:
        args.tmp2_dir = args.final_dir

    farmer_public_key: G1Element
    if args.farmer_public_key is not None:
        farmer_public_key = G1Element.from_bytes(
            bytes.fromhex(args.farmer_public_key))
    else:
        farmer_public_key = get_default_farmer_public_key()

    pool_public_key: G1Element
    if args.pool_public_key is not None:
        pool_public_key = bytes.fromhex(args.pool_public_key)
    else:
        pool_public_key = get_default_pool_public_key()
    if args.num is not None:
        num = args.num
    else:
        num = 1
    log.info(
        f"Creating {num} plots of size {args.size}, pool public key:  "
        f"{bytes(pool_public_key).hex()} farmer public key: {bytes(farmer_public_key).hex()}"
    )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    finished_filenames = []
    config = load_config(root_path, config_filename)
    plot_filenames = get_plot_filenames(config["harvester"])
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), farmer_public_key)

        # The plot id is based on the harvester, farmer, and pool keys
        plot_id: bytes32 = ProofOfSpace.calculate_plot_id(
            pool_public_key, plot_public_key)
        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id: bytes32 = bytes32(bytes.fromhex(args.plotid))

        plot_memo: bytes32 = stream_plot_info(pool_public_key,
                                              farmer_public_key, sk)
        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo: bytes32 = bytes.fromhex(args.memo)

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        if args.final_dir.resolve() not in plot_filenames:
            if (str(args.final_dir.resolve())
                    not in config["harvester"]["plot_directories"]):
                # Adds the directory to the plot directories if it is not present
                config = add_plot_directory(str(args.final_dir.resolve()),
                                            root_path)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
            )
            finished_filenames.append(filename)
        else:
            log.info(f"Plot {filename} already exists")

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log.info(filename)
コード例 #7
0
def check_plots(root_path, num, challenge_start, grep_string, list_duplicates,
                debug_show_memo):
    config = load_config(root_path, "config.yaml")
    if num is not None:
        if num == 0:
            log.warning("Not opening plot files")
        else:
            if num < 5:
                log.warning(
                    f"{num} challenges is too low, setting it to the minimum of 5"
                )
                num = 5
            if num < 30:
                log.warning(
                    "Use 30 challenges (our default) for balance of speed and accurate results"
                )
    else:
        num = 30

    if challenge_start is not None:
        num_start = challenge_start
        num_end = num_start + num
    else:
        num_start = 0
        num_end = num
    challenges = num_end - num_start

    if grep_string is not None:
        match_str = grep_string
    else:
        match_str = None
    if list_duplicates:
        log.warning("Checking for duplicate Plot IDs")
        log.info("Plot filenames expected to end with -[64 char plot ID].plot")

    show_memo: bool = debug_show_memo

    if list_duplicates:
        plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(
            config["harvester"])
        all_filenames: List[Path] = []
        for paths in plot_filenames.values():
            all_filenames += paths
        find_duplicate_plot_IDs(all_filenames)

    if num == 0:
        return

    v = Verifier()
    log.info("Loading plots in config.yaml using plot_tools loading code\n")
    kc: Keychain = Keychain()
    pks = [
        master_sk_to_farmer_sk(sk).get_g1()
        for sk, _ in kc.get_all_private_keys()
    ]
    pool_public_keys = [
        G1Element.from_bytes(bytes.fromhex(pk))
        for pk in config["farmer"]["pool_public_keys"]
    ]
    _, provers, failed_to_open_filenames, no_key_filenames = load_plots(
        {},
        {},
        pks,
        pool_public_keys,
        match_str,
        show_memo,
        root_path,
        open_no_key_filenames=True,
    )
    if len(provers) > 0:
        log.info("")
        log.info("")
        log.info(f"Starting to test each plot with {num} challenges each\n")
    total_good_plots: Counter = Counter()
    total_bad_plots = 0
    total_size = 0
    bad_plots_list: List[Path] = []

    for plot_path, plot_info in provers.items():
        pr = plot_info.prover
        log.info(f"Testing plot {plot_path} k={pr.get_size()}")
        log.info(f"\tPool public key: {plot_info.pool_public_key}")

        # Look up local_sk from plot to save locked memory
        (
            pool_public_key_or_puzzle_hash,
            farmer_public_key,
            local_master_sk,
        ) = parse_plot_info(pr.get_memo())
        local_sk = master_sk_to_local_sk(local_master_sk)
        log.info(f"\tFarmer public key: {farmer_public_key}")
        log.info(f"\tLocal sk: {local_sk}")
        total_proofs = 0
        caught_exception: bool = False
        for i in range(num_start, num_end):
            challenge = std_hash(i.to_bytes(32, "big"))
            # Some plot errors cause get_qualities_for_challenge to throw a RuntimeError
            try:
                for index, quality_str in enumerate(
                        pr.get_qualities_for_challenge(challenge)):
                    # Other plot errors cause get_full_proof or validate_proof to throw an AssertionError
                    try:
                        proof = pr.get_full_proof(challenge, index)
                        total_proofs += 1
                        ver_quality_str = v.validate_proof(
                            pr.get_id(), pr.get_size(), challenge, proof)
                        assert quality_str == ver_quality_str
                    except AssertionError as e:
                        log.error(
                            f"{type(e)}: {e} error in proving/verifying for plot {plot_path}"
                        )
                        caught_exception = True
            except BaseException as e:
                if isinstance(e, KeyboardInterrupt):
                    log.warning("Interrupted, closing")
                    return
                log.error(
                    f"{type(e)}: {e} error in getting challenge qualities for plot {plot_path}"
                )
                caught_exception = True
            if caught_exception is True:
                break
        if total_proofs > 0 and caught_exception is False:
            log.info(
                f"\tProofs {total_proofs} / {challenges}, {round(total_proofs/float(challenges), 4)}"
            )
            total_good_plots[pr.get_size()] += 1
            total_size += plot_path.stat().st_size
        else:
            total_bad_plots += 1
            log.error(
                f"\tProofs {total_proofs} / {challenges}, {round(total_proofs/float(challenges), 4)}"
            )
            bad_plots_list.append(plot_path)
    log.info("")
    log.info("")
    log.info("Summary")
    total_plots: int = sum(list(total_good_plots.values()))
    log.info(
        f"Found {total_plots} valid plots, total size {total_size / (1024 * 1024 * 1024 * 1024):.5f} TiB"
    )
    for (k, count) in sorted(dict(total_good_plots).items()):
        log.info(f"{count} plots of size {k}")
    grand_total_bad = total_bad_plots + len(failed_to_open_filenames)
    if grand_total_bad > 0:
        log.warning(f"{grand_total_bad} invalid plots found:")
        for bad_plot_path in bad_plots_list:
            log.warning(f"{bad_plot_path}")
    if len(no_key_filenames) > 0:
        log.warning(
            f"There are {len(no_key_filenames)} plots with a farmer or pool public key that "
            f"is not on this machine. The farmer private key must be in the keychain in order to "
            f"farm them, use 'chia keys' to transfer keys. The pool public keys must be in the config.yaml"
        )