Beispiel #1
0
    def test_k_21(self):
        challenge: bytes = bytes([i for i in range(0, 32)])

        plot_seed: bytes = bytes([5, 104, 52, 4, 51, 55, 23, 84, 91, 10, 111, 12, 13,
                                  222, 151, 16, 228, 211, 254, 45, 92, 198, 204, 10, 9,
                                  10, 11, 129, 139, 171, 15, 23])

        pl = DiskPlotter()
        pl.create_plot_disk(".", ".", ".", "myplot.dat", 21, bytes([1, 2, 3, 4, 5]), plot_seed, 2*1024)
        pl = None

        pr = DiskProver(str(Path("myplot.dat")))

        total_proofs: int = 0
        iterations: int = 5000

        v = Verifier()
        for i in range(iterations):
            if i % 100 == 0:
                print(i)
            challenge = sha256(i.to_bytes(4, "big")).digest()
            for index, quality in enumerate(pr.get_qualities_for_challenge(challenge)):
                proof = pr.get_full_proof(challenge, index)
                assert len(proof) == 8*pr.get_size()
                computed_quality = v.validate_proof(plot_seed, pr.get_size(), challenge, proof)
                assert computed_quality == quality
                total_proofs += 1

        print(f"total proofs {total_proofs} out of {iterations}\
            {total_proofs / iterations}")
        assert total_proofs == 4647
        pr = None
        Path("myplot.dat").unlink()
Beispiel #2
0
 def __init__(self):
     plot_seeds: List[bytes32] = [
         ProofOfSpace.calculate_plot_seed(pool_pk, plot_pk)
         for plot_pk in plot_pks
     ]
     self.plot_dir = os.path.join("tests", "plots")
     self.filenames: List[str] = [
         "genesis-plots-" + str(k) +
         sha256(int.to_bytes(i, 4, "big")).digest().hex() + ".dat"
         for i in range(num_plots)
     ]
     done_filenames = set()
     try:
         for pn, filename in enumerate(self.filenames):
             if not os.path.exists(os.path.join(self.plot_dir, filename)):
                 plotter = DiskPlotter()
                 plotter.create_plot_disk(
                     self.plot_dir,
                     self.plot_dir,
                     filename,
                     k,
                     b"genesis",
                     plot_seeds[pn],
                 )
                 done_filenames.add(filename)
     except KeyboardInterrupt:
         for filename in self.filenames:
             if filename not in done_filenames and os.path.exists(
                     os.path.join(self.plot_dir, filename)):
                 os.remove(os.path.join(self.plot_dir, filename))
         sys.exit(1)
    def test_faulty_plot_doesnt_crash(self):
        if Path("myplot.dat").exists():
            Path("myplot.dat").unlink()
        if Path("myplotbad.dat").exists():
            Path("myplotbad.dat").unlink()

        plot_id: bytes = bytes([i for i in range(32, 64)])
        pl = DiskPlotter()
        pl.create_plot_disk(
            ".",
            ".",
            ".",
            "myplot.dat",
            21,
            bytes([1, 2, 3, 4, 5]),
            plot_id,
            300,
            32,
            8192,
            8,
            False,
        )
        f = open("myplot.dat", "rb")
        all_data = bytearray(f.read())
        f.close()
        assert len(all_data) > 20000000
        all_data_bad = all_data[:20000000] + bytearray(
            token_bytes(10000)) + all_data[20100000:]
        f_bad = open("myplotbad.dat", "wb")
        f_bad.write(all_data_bad)
        f_bad.close()

        pr = DiskProver(str(Path("myplotbad.dat")))

        iterations: int = 50000
        v = Verifier()
        successes = 0
        failures = 0
        for i in range(iterations):
            if i % 100 == 0:
                print(i)
            challenge = sha256(i.to_bytes(4, "big")).digest()
            try:
                for index, quality in enumerate(
                        pr.get_qualities_for_challenge(challenge)):
                    proof = pr.get_full_proof(challenge, index)
                    computed_quality = v.validate_proof(
                        plot_id, pr.get_size(), challenge, proof)
                    if computed_quality == quality:
                        successes += 1
                    else:
                        print("Did not validate")
                        failures += 1
            except Exception as e:
                print(f"Exception: {e}")
                failures += 1
        print(f"Successes: {successes}")
        print(f"Failures: {failures}")
Beispiel #4
0
def create_plots(args,
                 root_path,
                 use_datetime=True,
                 test_private_keys: Optional[List] = None):
    config_filename = config_path_for_filename(root_path, "config.yaml")
    config = load_config(root_path, config_filename)

    if args.tmp2_dir is None:
        args.tmp2_dir = args.tmp_dir

    farmer_public_key: G1Element
    if args.farmer_public_key is not None:
        farmer_public_key = G1Element.from_bytes(
            bytes.fromhex(args.farmer_public_key))
    else:
        farmer_public_key = get_farmer_public_key(args.alt_fingerprint)

    pool_public_key: G1Element
    if args.pool_public_key is not None:
        pool_public_key = bytes.fromhex(args.pool_public_key)
    else:
        pool_public_key = get_pool_public_key(args.alt_fingerprint)
    if args.num is not None:
        num = args.num
    else:
        num = 1

    if args.size < config["min_mainnet_k_size"] and test_private_keys is None:
        log.warning(
            f"Creating plots with size k={args.size}, which is less than the minimum required for mainnet"
        )
    if args.size < 22:
        log.warning("k under 22 is not supported. Increasing k to 22")
        args.size = 22
    log.info(
        f"Creating {num} plots of size {args.size}, pool public key:  "
        f"{bytes(pool_public_key).hex()} farmer public key: {bytes(farmer_public_key).hex()}"
    )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    finished_filenames = []
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), farmer_public_key)

        # The plot id is based on the harvester, farmer, and pool keys
        plot_id: bytes32 = ProofOfSpace.calculate_plot_id_pk(
            pool_public_key, plot_public_key)
        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id = bytes32(bytes.fromhex(args.plotid))

        plot_memo: bytes32 = stream_plot_info(pool_public_key,
                                              farmer_public_key, sk)
        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo = bytes.fromhex(args.memo)

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        resolved_final_dir: str = str(Path(args.final_dir).resolve())
        plot_directories_list: str = config["harvester"]["plot_directories"]

        if args.exclude_final_dir:
            log.info(
                f"NOT adding directory {resolved_final_dir} to harvester for farming"
            )
            if resolved_final_dir in plot_directories_list:
                log.warning(
                    f"Directory {resolved_final_dir} already exists for harvester, please remove it manually"
                )
        else:
            if resolved_final_dir not in plot_directories_list:
                # Adds the directory to the plot directories if it is not present
                log.info(
                    f"Adding directory {resolved_final_dir} to harvester for farming"
                )
                config = add_plot_directory(resolved_final_dir, root_path)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
                args.nobitfield,
            )
            finished_filenames.append(filename)
        else:
            log.info(f"Plot {filename} already exists")

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log.info(filename)
Beispiel #5
0
def main():
    """
    Script for creating plots and adding them to the plot config file.
    """

    parser = argparse.ArgumentParser(description="Chia plotting script.")
    parser.add_argument("-k", "--size", help="Plot size", type=int, default=20)
    parser.add_argument("-n",
                        "--num_plots",
                        help="Number of plots",
                        type=int,
                        default=10)
    parser.add_argument("-p",
                        "--pool_pub_key",
                        help="Hex public key of pool",
                        type=str,
                        default="")
    parser.add_argument(
        "-t",
        "--tmp_dir",
        help="Temporary directory for plotting files (relative or absolute)",
        type=str,
        default="./plots",
    )
    parser.add_argument(
        "-d",
        "--final_dir",
        help="Final directory for plots (relative or absolute)",
        type=str,
        default="./plots",
    )

    # We need the keys file, to access pool keys (if the exist), and the sk_seed.
    args = parser.parse_args()
    if not os.path.isfile(key_config_filename):
        raise RuntimeError(
            "Keys not generated. Run python3 ./scripts/regenerate_keys.py.")

    # The seed is what will be used to generate a private key for each plot
    key_config = safe_load(open(key_config_filename, "r"))
    sk_seed: bytes = bytes.fromhex(key_config["sk_seed"])

    pool_pk: PublicKey
    if len(args.pool_pub_key) > 0:
        # Use the provided pool public key, useful for using an external pool
        pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key))
    else:
        # Use the pool public key from the config, useful for solo farming
        pool_sk = PrivateKey.from_bytes(
            bytes.fromhex(key_config["pool_sks"][0]))
        pool_pk = pool_sk.get_public_key()

    print(
        f"Creating {args.num_plots} plots of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}"
    )

    for i in range(args.num_plots):
        # Generate a sk based on the seed, plot size (k), and index
        sk: PrivateKey = PrivateKey.from_seed(sk_seed +
                                              args.size.to_bytes(1, "big") +
                                              i.to_bytes(4, "big"))

        # The plot seed is based on the pool and plot pks
        plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed(
            pool_pk, sk.get_public_key())
        filename: str = f"plot-{i}-{args.size}-{plot_seed}.dat"
        full_path: str = os.path.join(args.final_dir, filename)
        if os.path.isfile(full_path):
            print(f"Plot {filename} already exists")
        else:
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(args.tmp_dir, args.final_dir, filename,
                                     args.size, bytes([]), plot_seed)

        # Updates the config if necessary.
        if os.path.isfile(plot_config_filename):
            plot_config = safe_load(open(plot_config_filename, "r"))
        else:
            plot_config = {"plots": {}}
        plot_config_plots_new = deepcopy(plot_config["plots"])
        if full_path not in plot_config_plots_new:
            plot_config_plots_new[full_path] = {
                "sk": bytes(sk).hex(),
                "pool_pk": bytes(pool_pk).hex(),
            }
        plot_config["plots"].update(plot_config_plots_new)

        # Dumps the new config to disk.
        with open(plot_config_filename, "w") as f:
            safe_dump(plot_config, f)
Beispiel #6
0
    def test_k_21(self):
        challenge: bytes = bytes([i for i in range(0, 32)])

        plot_seed: bytes = bytes(
            [
                5,
                104,
                52,
                4,
                51,
                55,
                23,
                84,
                91,
                10,
                111,
                12,
                13,
                222,
                151,
                16,
                228,
                211,
                254,
                45,
                92,
                198,
                204,
                10,
                9,
                10,
                11,
                129,
                139,
                171,
                15,
                23,
            ]
        )

        pl = DiskPlotter()
        pl.create_plot_disk(
            ".", ".", ".", "myplot.dat", 21, bytes([1, 2, 3, 4, 5]), plot_seed, 300, 32, 8192, 8
        )
        pl = None

        pr = DiskProver(str(Path("myplot.dat")))

        total_proofs: int = 0
        iterations: int = 5000

        v = Verifier()
        for i in range(iterations):
            if i % 100 == 0:
                print(i)
            challenge = sha256(i.to_bytes(4, "big")).digest()
            for index, quality in enumerate(pr.get_qualities_for_challenge(challenge)):
                proof = pr.get_full_proof(challenge, index)
                assert len(proof) == 8 * pr.get_size()
                computed_quality = v.validate_proof(
                    plot_seed, pr.get_size(), challenge, proof
                )
                assert computed_quality == quality
                total_proofs += 1

        print(
            f"total proofs {total_proofs} out of {iterations}\
            {total_proofs / iterations}"
        )
        assert total_proofs > 4000
        assert total_proofs < 6000
        pr = None
        sha256_plot_hash = sha256()
        with open("myplot.dat", "rb") as f:
            # Read and update hash string value in blocks of 4K
            for byte_block in iter(lambda: f.read(4096), b""):
                sha256_plot_hash.update(byte_block)
            plot_hash = str(sha256_plot_hash.hexdigest())
        assert (
            plot_hash
            == "80e32f560f3a4347760d6baae8d16fbaf484948088bff05c51bdcc24b7bc40d9"
        )
        print(f"\nPlotfile asserted sha256: {plot_hash}\n")
        Path("myplot.dat").unlink()
Beispiel #7
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(
                std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)
            )
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Test farmer get_harvesters
            async def test_get_harvesters():
                farmer_res = await client.get_harvesters()
                if len(list(farmer_res["harvesters"])) != 1:
                    return False
                if len(list(farmer_res["harvesters"][0]["plots"])) != num_plots:
                    return False
                return True

            await time_out_assert(30, test_get_harvesters)

            expected_result: PlotRefreshResult = PlotRefreshResult()

            def test_refresh_callback(refresh_result: PlotRefreshResult):
                assert refresh_result.loaded_plots == expected_result.loaded_plots
                assert refresh_result.removed_plots == expected_result.removed_plots
                assert refresh_result.processed_files == expected_result.processed_files
                assert refresh_result.remaining_files == expected_result.remaining_files

            harvester.plot_manager.set_refresh_callback(test_refresh_callback)

            async def test_case(
                trigger, expect_loaded, expect_removed, expect_processed, expected_directories, expect_total_plots
            ):
                expected_result.loaded_plots = expect_loaded
                expected_result.removed_plots = expect_removed
                expected_result.processed_files = expect_processed
                await trigger
                harvester.plot_manager.trigger_refresh()
                assert len(await client_2.get_plot_directories()) == expected_directories
                await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
                result = await client_2.get_plots()
                assert len(result["plots"]) == expect_total_plots
                assert len(harvester.plot_manager.cache) == expect_total_plots
                assert len(harvester.plot_manager.failed_to_open_filenames) == 0

            # Add plot_dir with two new plots
            await test_case(
                client_2.add_plot_directory(str(plot_dir)),
                expect_loaded=2,
                expect_removed=0,
                expect_processed=2,
                expected_directories=2,
                expect_total_plots=num_plots + 2,
            )
            # Add plot_dir_sub with one duplicate
            await test_case(
                client_2.add_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 2,
            )
            # Delete one plot
            await test_case(
                client_2.delete_plot(str(plot_dir / filename)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove directory with the duplicate
            await test_case(
                client_2.remove_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=2,
                expect_total_plots=num_plots + 1,
            )
            # Re-add the directory with the duplicate for other tests
            await test_case(
                client_2.add_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the directory which has the duplicated plot loaded. This removes the duplicated plot from plot_dir
            # and in the same run loads the plot from plot_dir_sub which is not longer seen as duplicate.
            await test_case(
                client_2.remove_plot_directory(str(plot_dir)),
                expect_loaded=1,
                expect_removed=1,
                expect_processed=1,
                expected_directories=2,
                expect_total_plots=num_plots + 1,
            )
            # Re-add the directory now the plot seen as duplicate is from plot_dir, not from plot_dir_sub like before
            await test_case(
                client_2.add_plot_directory(str(plot_dir)),
                expect_loaded=0,
                expect_removed=0,
                expect_processed=1,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the duplicated plot
            await test_case(
                client_2.delete_plot(str(plot_dir / filename_2)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=3,
                expect_total_plots=num_plots + 1,
            )
            # Remove the directory with the loaded plot which is not longer a duplicate
            await test_case(
                client_2.remove_plot_directory(str(plot_dir_sub)),
                expect_loaded=0,
                expect_removed=1,
                expect_processed=0,
                expected_directories=2,
                expect_total_plots=num_plots,
            )
            # Remove the directory which contains all other plots
            await test_case(
                client_2.remove_plot_directory(str(get_plot_dir())),
                expect_loaded=0,
                expect_removed=20,
                expect_processed=0,
                expected_directories=1,
                expect_total_plots=0,
            )
            # Recover the plots to test caching
            # First make sure cache gets written if required and new plots are loaded
            await test_case(
                client_2.add_plot_directory(str(get_plot_dir())),
                expect_loaded=20,
                expect_removed=0,
                expect_processed=20,
                expected_directories=2,
                expect_total_plots=20,
            )
            assert harvester.plot_manager.cache.path().exists()
            unlink(harvester.plot_manager.cache.path())
            # Should not write the cache again on shutdown because it didn't change
            assert not harvester.plot_manager.cache.path().exists()
            harvester.plot_manager.stop_refreshing()
            assert not harvester.plot_manager.cache.path().exists()
            # Manually trigger `save_cache` and make sure it creates a new cache file
            harvester.plot_manager.cache.save()
            assert harvester.plot_manager.cache.path().exists()

            expected_result.loaded_plots = 20
            expected_result.removed_plots = 0
            expected_result.processed_files = 20
            expected_result.remaining_files = 0
            plot_manager: PlotManager = PlotManager(harvester.root_path, test_refresh_callback)
            plot_manager.start_refreshing()
            assert len(harvester.plot_manager.cache) == len(plot_manager.cache)
            await time_out_assert(5, plot_manager.needs_refresh, value=False)
            for path, plot_info in harvester.plot_manager.plots.items():
                assert path in plot_manager.plots
                assert plot_manager.plots[path].prover.get_filename() == plot_info.prover.get_filename()
                assert plot_manager.plots[path].prover.get_id() == plot_info.prover.get_id()
                assert plot_manager.plots[path].prover.get_memo() == plot_info.prover.get_memo()
                assert plot_manager.plots[path].prover.get_size() == plot_info.prover.get_size()
                assert plot_manager.plots[path].pool_public_key == plot_info.pool_public_key
                assert plot_manager.plots[path].pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash
                assert plot_manager.plots[path].plot_public_key == plot_info.plot_public_key
                assert plot_manager.plots[path].file_size == plot_info.file_size
                assert plot_manager.plots[path].time_modified == plot_info.time_modified

            assert harvester.plot_manager.plot_filename_paths == plot_manager.plot_filename_paths
            assert harvester.plot_manager.failed_to_open_filenames == plot_manager.failed_to_open_filenames
            assert harvester.plot_manager.no_key_filenames == plot_manager.no_key_filenames
            plot_manager.stop_refreshing()
            # Modify the content of the plot_manager.dat
            with open(harvester.plot_manager.cache.path(), "r+b") as file:
                file.write(b"\xff\xff")  # Sets Cache.version to 65535
            # Make sure it just loads the plots normally if it fails to load the cache
            plot_manager = PlotManager(harvester.root_path, test_refresh_callback)
            plot_manager.cache.load()
            assert len(plot_manager.cache) == 0
            plot_manager.set_public_keys(
                harvester.plot_manager.farmer_public_keys, harvester.plot_manager.pool_public_keys
            )
            expected_result.loaded_plots = 20
            expected_result.removed_plots = 0
            expected_result.processed_files = 20
            expected_result.remaining_files = 0
            plot_manager.start_refreshing()
            await time_out_assert(5, plot_manager.needs_refresh, value=False)
            assert len(plot_manager.plots) == len(harvester.plot_manager.plots)
            plot_manager.stop_refreshing()

            # Test re-trying if processing a plot failed
            # First save the plot
            retry_test_plot = Path(plot_dir_sub / filename_2).resolve()
            retry_test_plot_save = Path(plot_dir_sub / "save").resolve()
            copy(retry_test_plot, retry_test_plot_save)
            # Invalidate the plot
            with open(plot_dir_sub / filename_2, "r+b") as file:
                file.write(bytes(100))
            # Add it and validate it fails to load
            await harvester.add_plot_directory(str(plot_dir_sub))
            expected_result.loaded_plots = 0
            expected_result.removed_plots = 0
            expected_result.processed_files = 1
            expected_result.remaining_files = 0
            harvester.plot_manager.start_refreshing()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
            # Make sure the file stays in `failed_to_open_filenames` and doesn't get loaded or processed in the next
            # update round
            expected_result.loaded_plots = 0
            expected_result.processed_files = 0
            harvester.plot_manager.trigger_refresh()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
            # Now decrease the re-try timeout, restore the valid plot file and make sure it properly loads now
            harvester.plot_manager.refresh_parameter.retry_invalid_seconds = 0
            move(retry_test_plot_save, retry_test_plot)
            expected_result.loaded_plots = 1
            expected_result.processed_files = 1
            harvester.plot_manager.trigger_refresh()
            await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
            assert retry_test_plot not in harvester.plot_manager.failed_to_open_filenames

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1()
            )

            await client.set_reward_targets(encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1()
            )
            await client.set_reward_targets(None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.local_keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(all_sks[0][0], 2, 1)
            pool_list = [
                {
                    "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                    "authentication_public_key": bytes(auth_sk.get_g1()).hex(),
                    "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",
                    "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                    "pool_url": "localhost",
                    "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                    "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
                }
            ]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"]
                == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy")
            await farmer_api.farmer.update_pool_state()
            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"]["payout_instructions"] == "1234vy"

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
Beispiel #8
0
async def create_plots(
    args,
    keys: PlotKeys,
    root_path,
    use_datetime=True,
    test_private_keys: Optional[List] = None
) -> Tuple[Dict[bytes32, Path], Dict[bytes32, Path]]:

    config_filename = config_path_for_filename(root_path, "config.yaml")
    config = load_config(root_path, config_filename)

    if args.tmp2_dir is None:
        args.tmp2_dir = args.tmp_dir

    assert (keys.pool_public_key is None) != (keys.pool_contract_puzzle_hash is
                                              None)
    num = args.num

    if args.size < config["min_mainnet_k_size"] and test_private_keys is None:
        log.warning(
            f"Creating plots with size k={args.size}, which is less than the minimum required for mainnet"
        )
    if args.size < 22:
        log.warning("k under 22 is not supported. Increasing k to 22")
        args.size = 22

    if keys.pool_public_key is not None:
        log.info(
            f"Creating {num} plots of size {args.size}, pool public key:  "
            f"{bytes(keys.pool_public_key).hex()} farmer public key: {bytes(keys.farmer_public_key).hex()}"
        )
    else:
        assert keys.pool_contract_puzzle_hash is not None
        log.info(
            f"Creating {num} plots of size {args.size}, pool contract address:  "
            f"{keys.pool_contract_address} farmer public key: {bytes(keys.farmer_public_key).hex()}"
        )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    created_plots: Dict[bytes32, Path] = {}
    existing_plots: Dict[bytes32, Path] = {}
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        # New plots will also include a taproot of the keys, for extensibility
        include_taproot: bool = keys.pool_contract_puzzle_hash is not None
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), keys.farmer_public_key,
            include_taproot)

        # The plot id is based on the harvester, farmer, and pool keys
        if keys.pool_public_key is not None:
            plot_id: bytes32 = ProofOfSpace.calculate_plot_id_pk(
                keys.pool_public_key, plot_public_key)
            plot_memo: bytes32 = stream_plot_info_pk(keys.pool_public_key,
                                                     keys.farmer_public_key,
                                                     sk)
        else:
            assert keys.pool_contract_puzzle_hash is not None
            plot_id = ProofOfSpace.calculate_plot_id_ph(
                keys.pool_contract_puzzle_hash, plot_public_key)
            plot_memo = stream_plot_info_ph(keys.pool_contract_puzzle_hash,
                                            keys.farmer_public_key, sk)

        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id = bytes32(bytes.fromhex(args.plotid))

        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo = bytes32.fromhex(args.memo)

        # Uncomment next two lines if memo is needed for dev debug
        plot_memo_str: str = plot_memo.hex()
        log.info(f"Memo: {plot_memo_str}")

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        resolved_final_dir: str = str(Path(args.final_dir).resolve())
        plot_directories_list: str = config["harvester"]["plot_directories"]

        if args.exclude_final_dir:
            log.info(
                f"NOT adding directory {resolved_final_dir} to harvester for farming"
            )
            if resolved_final_dir in plot_directories_list:
                log.warning(
                    f"Directory {resolved_final_dir} already exists for harvester, please remove it manually"
                )
        else:
            if resolved_final_dir not in plot_directories_list:
                # Adds the directory to the plot directories if it is not present
                log.info(
                    f"Adding directory {resolved_final_dir} to harvester for farming"
                )
                config = add_plot_directory(root_path, resolved_final_dir)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
                args.nobitfield,
            )
            created_plots[plot_id] = full_path
        else:
            log.info(f"Plot {filename} already exists")
            existing_plots[plot_id] = full_path

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(created_plots)} new plots")
    for created_path in created_plots.values():
        log.info(created_path.name)

    return created_plots, existing_plots
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))
            await client_2.add_plot_directory(str(plot_dir_sub))

            assert len(await client_2.get_plot_directories()) == 3

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 2

            await client_2.delete_plot(str(plot_dir / filename))
            await client_2.delete_plot(str(plot_dir / filename_2))
            await client_2.refresh_plots()
            res_3 = await client_2.get_plots()

            assert len(res_3["plots"]) == num_plots + 1

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 2

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
def create_plots(args,
                 root_path,
                 use_datetime=True,
                 test_private_keys: Optional[List] = None):
    config_filename = config_path_for_filename(root_path, "config.yaml")

    if args.tmp2_dir is None:
        args.tmp2_dir = args.final_dir

    farmer_public_key: G1Element
    if args.farmer_public_key is not None:
        farmer_public_key = G1Element.from_bytes(
            bytes.fromhex(args.farmer_public_key))
    else:
        farmer_public_key = get_default_farmer_public_key()

    pool_public_key: G1Element
    if args.pool_public_key is not None:
        pool_public_key = bytes.fromhex(args.pool_public_key)
    else:
        pool_public_key = get_default_pool_public_key()
    if args.num is not None:
        num = args.num
    else:
        num = 1
    log.info(
        f"Creating {num} plots of size {args.size}, pool public key:  "
        f"{bytes(pool_public_key).hex()} farmer public key: {bytes(farmer_public_key).hex()}"
    )

    tmp_dir_created = False
    if not args.tmp_dir.exists():
        mkdir(args.tmp_dir)
        tmp_dir_created = True

    tmp2_dir_created = False
    if not args.tmp2_dir.exists():
        mkdir(args.tmp2_dir)
        tmp2_dir_created = True

    mkdir(args.final_dir)

    finished_filenames = []
    config = load_config(root_path, config_filename)
    plot_filenames = get_plot_filenames(config["harvester"])
    for i in range(num):
        # Generate a random master secret key
        if test_private_keys is not None:
            assert len(test_private_keys) == num
            sk: PrivateKey = test_private_keys[i]
        else:
            sk = AugSchemeMPL.key_gen(token_bytes(32))

        # The plot public key is the combination of the harvester and farmer keys
        plot_public_key = ProofOfSpace.generate_plot_public_key(
            master_sk_to_local_sk(sk).get_g1(), farmer_public_key)

        # The plot id is based on the harvester, farmer, and pool keys
        plot_id: bytes32 = ProofOfSpace.calculate_plot_id(
            pool_public_key, plot_public_key)
        if args.plotid is not None:
            log.info(f"Debug plot ID: {args.plotid}")
            plot_id: bytes32 = bytes32(bytes.fromhex(args.plotid))

        plot_memo: bytes32 = stream_plot_info(pool_public_key,
                                              farmer_public_key, sk)
        if args.memo is not None:
            log.info(f"Debug memo: {args.memo}")
            plot_memo: bytes32 = bytes.fromhex(args.memo)

        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        if use_datetime:
            filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot"
        else:
            filename = f"plot-k{args.size}-{plot_id}.plot"
        full_path: Path = args.final_dir / filename

        if args.final_dir.resolve() not in plot_filenames:
            if (str(args.final_dir.resolve())
                    not in config["harvester"]["plot_directories"]):
                # Adds the directory to the plot directories if it is not present
                config = add_plot_directory(str(args.final_dir.resolve()),
                                            root_path)

        if not full_path.exists():
            log.info(f"Starting plot {i + 1}/{num}")
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                plot_memo,
                plot_id,
                args.buffer,
                args.buckets,
                args.stripe_size,
                args.num_threads,
            )
            finished_filenames.append(filename)
        else:
            log.info(f"Plot {filename} already exists")

    log.info("Summary:")

    if tmp_dir_created:
        try:
            args.tmp_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
            )

    if tmp2_dir_created:
        try:
            args.tmp2_dir.rmdir()
        except Exception:
            log.info(
                f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
            )

    log.info(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log.info(filename)
Beispiel #11
0
    def __init__(
        self,
        root_path: Path = TEST_ROOT_PATH,
        real_plots: bool = False,
    ):
        create_default_chia_config(root_path)
        initialize_ssl(root_path)
        self.root_path = root_path
        self.n_wesolowski = uint8(0)
        self.real_plots = real_plots

        if not real_plots:
            # No real plots supplied, so we will use the small test plots
            self.use_any_pos = True
            self.plot_config: Dict = {"plots": {}}
            # Can't go much lower than 19, since plots start having no solutions
            k: uint8 = uint8(19)
            # Uses many plots for testing, in order to guarantee proofs of space at every height
            num_plots = 40
            # Use the empty string as the seed for the private key

            self.keychain = Keychain("testing", True)
            self.keychain.delete_all_keys()
            self.keychain.add_private_key_seed(b"block_tools")
            pool_sk: PrivateKey = self.keychain.get_all_private_keys(
            )[0][0].get_private_key()
            pool_pk: PublicKey = pool_sk.get_public_key()

            plot_sks: List[PrivateKey] = [
                PrivateKey.from_seed(pn.to_bytes(4, "big"))
                for pn in range(num_plots)
            ]
            plot_pks: List[PublicKey] = [
                sk.get_public_key() for sk in plot_sks
            ]

            plot_seeds: List[bytes32] = [
                ProofOfSpace.calculate_plot_seed(pool_pk, plot_pk)
                for plot_pk in plot_pks
            ]
            plot_dir = get_plot_dir(root_path)
            mkdir(plot_dir)
            filenames: List[str] = [
                f"genesis-plots-{k}{std_hash(int.to_bytes(i, 4, 'big')).hex()}.dat"
                for i in range(num_plots)
            ]
            done_filenames = set()
            temp_dir = plot_dir / "plot.tmp"
            mkdir(temp_dir)
            try:
                for pn, filename in enumerate(filenames):
                    if not (plot_dir / filename).exists():
                        plotter = DiskPlotter()
                        plotter.create_plot_disk(
                            str(plot_dir),
                            str(plot_dir),
                            str(plot_dir),
                            filename,
                            k,
                            b"genesis",
                            plot_seeds[pn],
                        )
                        done_filenames.add(filename)
                    self.plot_config["plots"][str(plot_dir / filename)] = {
                        "pool_pk": bytes(pool_pk).hex(),
                        "sk": bytes(plot_sks[pn]).hex(),
                        "pool_sk": bytes(pool_sk).hex(),
                    }
                save_config(self.root_path, "plots.yaml", self.plot_config)

            except KeyboardInterrupt:
                for filename in filenames:
                    if (filename not in done_filenames
                            and (plot_dir / filename).exists()):
                        (plot_dir / filename).unlink()
                sys.exit(1)
        else:
            try:
                plot_config = load_config(DEFAULT_ROOT_PATH, "plots.yaml")
                normal_config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
            except FileNotFoundError:
                raise RuntimeError(
                    "Plots not generated. Run chia-create-plots")
            self.keychain = Keychain(testing=False)
            private_keys: List[PrivateKey] = [
                k.get_private_key()
                for (k, _) in self.keychain.get_all_private_keys()
            ]
            pool_pubkeys: List[PublicKey] = [
                sk.get_public_key() for sk in private_keys
            ]
            if len(private_keys) == 0:
                raise RuntimeError(
                    "Keys not generated. Run `chia generate keys`")

            self.prover_dict, _, _ = load_plots(normal_config["harvester"],
                                                plot_config, pool_pubkeys,
                                                DEFAULT_ROOT_PATH)

            new_plot_config: Dict = {"plots": {}}
            for key, value in plot_config["plots"].items():
                for sk in private_keys:
                    if (bytes(sk.get_public_key()).hex() == value["pool_pk"]
                            and key in self.prover_dict):
                        new_plot_config["plots"][key] = value
                        new_plot_config["plots"][key]["pool_sk"] = bytes(
                            sk).hex()

            self.plot_config = new_plot_config
            self.use_any_pos = False
            a = self.plot_config["plots"]
            print(f"Using {len(a)} reals plots to initialize block_tools")

        private_key = self.keychain.get_all_private_keys()[0][0]
        self.fee_target = create_puzzlehash_for_pk(
            BLSPublicKey(bytes(private_key.public_child(1).get_public_key())))
Beispiel #12
0
from chiapos import DiskProver, DiskPlotter, Verifier
from hashlib import sha256
import secrets
import os

challenge: bytes = bytes([i for i in range(0, 32)])

plot_id: bytes = bytes([
    5, 104, 52, 4, 51, 55, 23, 84, 91, 10, 111, 12, 13, 222, 151, 16, 228, 211,
    254, 45, 92, 198, 204, 10, 9, 10, 11, 129, 139, 171, 15, 23
])
filename = "./myplot.dat"
pl = DiskPlotter()
pl.create_plot_disk(filename, 21, bytes([1, 2, 3, 4, 5]), plot_id)
pr = DiskProver(filename)

total_proofs: int = 0
iterations: int = 5000

v = Verifier()
for i in range(iterations):
    challenge = sha256(i.to_bytes(4, "big")).digest()
    for index, quality in enumerate(pr.get_qualities_for_challenge(challenge)):
        proof = pr.get_full_proof(challenge, index)
        total_proofs += 1
        ver_quality = v.validate_proof(plot_id, 21, challenge, proof)
        assert (quality == ver_quality)

os.remove(filename)

print(f"total proofs {total_proofs} out of {iterations}\
Beispiel #13
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = load_config(bt.root_path, "config.yaml")
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create("localhost", test_rpc_port)
            client_2 = await HarvesterRpcClient.create("localhost",
                                                       test_rpc_port_2)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(5, have_connections, True)

            await client.get_latest_challenges()

            async def have_challenges():
                return len(await client.get_latest_challenges()) > 0

            await time_out_assert(5, have_challenges, True)

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)
            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info(bt.pool_pk, bt.farmer_pk,
                                 AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            print(await client_2.get_plot_directories())
            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))

            assert len(await client_2.get_plot_directories()) == 2

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 1

            await client_2.delete_plot(str(plot_dir / filename))
            res_3 = await client_2.get_plots()
            assert len(res_3["plots"]) == num_plots

            await client_2.remove_plot_directory(str(plot_dir))
            print(await client_2.get_plot_directories())
            assert len(await client_2.get_plot_directories()) == 1

        except AssertionError:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
            raise

        client.close()
        client_2.close()
        await client.await_closed()
        await client_2.await_closed()
        await rpc_cleanup()
        await rpc_cleanup_2()
Beispiel #14
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)
            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info(bt.pool_pk, bt.farmer_pk,
                                 AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))

            assert len(await client_2.get_plot_directories()) == 2

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 1

            await client_2.delete_plot(str(plot_dir / filename))
            res_3 = await client_2.get_plots()
            assert len(res_3["plots"]) == num_plots

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 1

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
Beispiel #15
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        full_node_1, _, harvester, farmer, _, _, _ = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        rpc_cleanup = await start_farmer_rpc_server(farmer, stop_node_cb,
                                                    test_rpc_port)
        rpc_cleanup_2 = await start_harvester_rpc_server(
            harvester, stop_node_cb_2, test_rpc_port_2)

        try:
            client = await FarmerRpcClient.create(test_rpc_port)
            client_2 = await HarvesterRpcClient.create(test_rpc_port_2)

            await asyncio.sleep(3)
            assert len(await client.get_connections()) == 2

            challenges = await client.get_latest_challenges()
            assert len(challenges) > 0

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir()
            plotter = DiskPlotter()
            pool_pk = harvester.pool_pubkeys[0]
            plot_sk = PrivateKey.from_seed(b"Farmer harvester rpc test seed")
            plot_seed = ProofOfSpace.calculate_plot_seed(
                pool_pk, plot_sk.get_public_key())
            filename = "test_farmer_harvester_rpc_plot.dat"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                b"genesis",
                plot_seed,
                2 * 1024,
            )
            await client_2.add_plot(str(plot_dir / filename), plot_sk)

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 1

            await client_2.delete_plot(str(plot_dir / filename))
            res_3 = await client_2.get_plots()
            assert len(res_3["plots"]) == num_plots

            filename = "test_farmer_harvester_rpc_plot_2.dat"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                b"genesis",
                plot_seed,
                2 * 1024,
            )
            await client_2.add_plot(str(plot_dir / filename), plot_sk, pool_pk)
            assert len((await client_2.get_plots())["plots"]) == num_plots + 1
            await client_2.delete_plot(str(plot_dir / filename))
            assert len((await client_2.get_plots())["plots"]) == num_plots

        except AssertionError:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
            raise

        client.close()
        client_2.close()
        await client.await_closed()
        await client_2.await_closed()
        await rpc_cleanup()
        await rpc_cleanup_2()
    def __init__(
        self, root_path: Path = TEST_ROOT_PATH, real_plots: bool = False,
    ):
        create_default_chia_config(root_path)
        initialize_ssl(root_path)
        self.root_path = root_path
        self.wallet_sk: PrivateKey = PrivateKey.from_seed(b"coinbase")
        self.coinbase_target = std_hash(bytes(self.wallet_sk.get_public_key()))
        self.fee_target = std_hash(bytes(self.wallet_sk.get_public_key()))
        self.n_wesolowski = uint8(0)

        if not real_plots:
            # No real plots supplied, so we will use the small test plots
            self.use_any_pos = True
            self.plot_config: Dict = {"plots": {}}
            # Can't go much lower than 19, since plots start having no solutions
            k: uint8 = uint8(19)
            # Uses many plots for testing, in order to guarantee proofs of space at every height
            num_plots = 40
            # Use the empty string as the seed for the private key
            pool_sk: PrivateKey = PrivateKey.from_seed(b"")
            pool_pk: PublicKey = pool_sk.get_public_key()
            plot_sks: List[PrivateKey] = [
                PrivateKey.from_seed(pn.to_bytes(4, "big")) for pn in range(num_plots)
            ]
            plot_pks: List[PublicKey] = [sk.get_public_key() for sk in plot_sks]

            plot_seeds: List[bytes32] = [
                ProofOfSpace.calculate_plot_seed(pool_pk, plot_pk)
                for plot_pk in plot_pks
            ]
            plot_dir = root_path / "plots"
            mkdir(plot_dir)
            filenames: List[str] = [
                f"genesis-plots-{k}{std_hash(int.to_bytes(i, 4, 'big')).hex()}.dat"
                for i in range(num_plots)
            ]
            done_filenames = set()
            temp_dir = plot_dir / "plot.tmp"
            mkdir(temp_dir)
            try:
                for pn, filename in enumerate(filenames):
                    if not (plot_dir / filename).exists():
                        plotter = DiskPlotter()
                        plotter.create_plot_disk(
                            str(plot_dir),
                            str(plot_dir),
                            str(plot_dir),
                            filename,
                            k,
                            b"genesis",
                            plot_seeds[pn],
                        )
                        done_filenames.add(filename)
                    self.plot_config["plots"][str(plot_dir / filename)] = {
                        "pool_pk": bytes(pool_pk).hex(),
                        "sk": bytes(plot_sks[pn]).hex(),
                        "pool_sk": bytes(pool_sk).hex(),
                    }
            except KeyboardInterrupt:
                for filename in filenames:
                    if (
                        filename not in done_filenames
                        and (plot_dir / filename).exists()
                    ):
                        (plot_dir / filename).unlink()
                sys.exit(1)
        else:
            # Real plots supplied, so we will use these instead of the test plots
            config = load_config_cli(root_path, "config.yaml", "harvester")
            try:
                key_config = load_config(root_path, "keys.yaml")
            except FileNotFoundError:
                raise RuntimeError("Keys not generated. Run `chia generate keys`")
            try:
                plot_config = load_config(root_path, "plots.yaml")
            except FileNotFoundError:
                raise RuntimeError("Plots not generated. Run chia-create-plots")

            pool_sks: List[PrivateKey] = [
                PrivateKey.from_bytes(bytes.fromhex(ce))
                for ce in key_config["pool_sks"]
            ]

            for key, value in plot_config["plots"].items():
                for pool_sk in pool_sks:
                    if bytes(pool_sk.get_public_key()).hex() == value["pool_pk"]:
                        plot_config["plots"][key]["pool_sk"] = bytes(pool_sk).hex()

            self.plot_config = plot_config
            self.use_any_pos = False
Beispiel #17
0
def main():
    """
    Script for creating plots and adding them to the plot config file.
    """
    root_path = DEFAULT_ROOT_PATH
    plot_config_filename = config_path_for_filename(root_path, "plots.yaml")

    parser = argparse.ArgumentParser(description="Chia plotting script.")
    parser.add_argument("-k", "--size", help="Plot size", type=int, default=26)
    parser.add_argument(
        "-n", "--num_plots", help="Number of plots", type=int, default=1
    )
    parser.add_argument(
        "-i", "--index", help="First plot index", type=int, default=None
    )
    parser.add_argument(
        "-p", "--pool_pub_key", help="Hex public key of pool", type=str, default=""
    )
    parser.add_argument(
        "-s", "--sk_seed", help="Secret key seed in hex", type=str, default=None
    )
    parser.add_argument(
        "-t",
        "--tmp_dir",
        help="Temporary directory for plotting files",
        type=Path,
        default=Path("."),
    )
    parser.add_argument(
        "-2",
        "--tmp2_dir",
        help="Second temporary directory for plotting files",
        type=Path,
        default=Path("."),
    )
    new_plots_root = path_from_root(
        root_path,
        load_config(root_path, "config.yaml")
        .get("harvester", {})
        .get("new_plot_root", "plots"),
    )
    parser.add_argument(
        "-d",
        "--final_dir",
        help="Final directory for plots (relative or absolute)",
        type=Path,
        default=new_plots_root,
    )

    args = parser.parse_args()

    if args.sk_seed is None and args.index is not None:
        log(
            f"You have specified the -i (index) argument without the -s (sk_seed) argument."
            f" The program has changes, so that the sk_seed is now generated randomly, so -i is no longer necessary."
            f" Please run the program without -i."
        )
        quit()

    if args.index is None:
        args.index = 0

    # The seed is what will be used to generate a private key for each plot
    if args.sk_seed is not None:
        sk_seed: bytes = bytes.fromhex(args.sk_seed)
        log(f"Using the provided sk_seed {sk_seed.hex()}.")
    else:
        sk_seed = token_bytes(32)
        log(
            f"Using sk_seed {sk_seed.hex()}. Note that sk seed is now generated randomly, as opposed "
            f"to from keys.yaml. If you want to use a specific seed, use the -s argument."
        )

    pool_pk: PublicKey
    if len(args.pool_pub_key) > 0:
        # Use the provided pool public key, useful for using an external pool
        pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key))
    else:
        # Use the pool public key from the config, useful for solo farming
        keychain = Keychain()
        all_public_keys = keychain.get_all_public_keys()
        if len(all_public_keys) == 0:
            raise RuntimeError(
                "There are no private keys in the keychain, so we cannot create a plot. "
                "Please generate keys using 'chia keys generate' or pass in a pool pk with -p"
            )
        pool_pk = all_public_keys[0].get_public_key()

    log(
        f"Creating {args.num_plots} plots, from index {args.index} to "
        f"{args.index + args.num_plots - 1}, of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}"
    )

    mkdir(args.tmp_dir)
    mkdir(args.tmp2_dir)
    mkdir(args.final_dir)
    finished_filenames = []
    for i in range(args.index, args.index + args.num_plots):
        # Generate a sk based on the seed, plot size (k), and index
        sk: PrivateKey = PrivateKey.from_seed(
            sk_seed + args.size.to_bytes(1, "big") + i.to_bytes(4, "big")
        )

        # The plot seed is based on the pool and plot pks
        plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed(
            pool_pk, sk.get_public_key()
        )
        dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M")

        filename: str = f"plot-k{args.size}-{dt_string}-{plot_seed}.dat"
        full_path: Path = args.final_dir / filename

        plot_config = load_config(root_path, plot_config_filename)
        plot_config_plots_new = deepcopy(plot_config.get("plots", []))
        filenames = [Path(k).name for k in plot_config_plots_new.keys()]
        already_in_config = any(plot_seed.hex() in fname for fname in filenames)
        if already_in_config:
            log(f"Plot {filename} already exists (in config)")
            continue

        if not full_path.exists():
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(args.tmp_dir),
                str(args.tmp2_dir),
                str(args.final_dir),
                filename,
                args.size,
                bytes([]),
                plot_seed,
            )
            finished_filenames.append(filename)
        else:
            log(f"Plot {filename} already exists")

        # Updates the config if necessary.
        plot_config = load_config(root_path, plot_config_filename)
        plot_config_plots_new = deepcopy(plot_config.get("plots", []))
        plot_config_plots_new[str(full_path)] = {
            "sk": bytes(sk).hex(),
            "pool_pk": bytes(pool_pk).hex(),
        }
        plot_config["plots"].update(plot_config_plots_new)

        # Dumps the new config to disk.
        save_config(root_path, plot_config_filename, plot_config)
    log("")
    log("Summary:")
    try:
        args.tmp_dir.rmdir()
    except Exception:
        log(
            f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty."
        )
    try:
        args.tmp2_dir.rmdir()
    except Exception:
        log(
            f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty."
        )
    log(f"Created a total of {len(finished_filenames)} new plots")
    for filename in finished_filenames:
        log(filename)
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Test farmer get_plots
            farmer_res = await client.get_plots()
            assert len(list(farmer_res.values())[0]["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))
            await client_2.add_plot_directory(str(plot_dir_sub))

            assert len(await client_2.get_plot_directories()) == 3

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 2

            await client_2.delete_plot(str(plot_dir / filename))
            await client_2.delete_plot(str(plot_dir / filename_2))
            await client_2.refresh_plots()
            res_3 = await client_2.get_plots()

            assert len(res_3["plots"]) == num_plots + 1

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 2

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(
                all_sks[0][0], 2, 1)
            pool_list = [{
                "launcher_id":
                "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                "authentication_public_key":
                bytes(auth_sk.get_g1()).hex(),
                "owner_public_key":
                "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",
                "payout_instructions":
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                "pool_url":
                "localhost",
                "p2_singleton_puzzle_hash":
                "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                "target_puzzle_hash":
                "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
            }]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"] ==
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(
                hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]),
                "1234vy")

            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"][
                "payout_instructions"] == "1234vy"

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
def main():
    """
    Script for creating plots and adding them to the plot config file.
    """
    root_path = DEFAULT_ROOT_PATH
    plot_config_filename = config_path_for_filename(root_path, "plots.yaml")
    key_config_filename = config_path_for_filename(root_path, "keys.yaml")

    parser = argparse.ArgumentParser(description="Chia plotting script.")
    parser.add_argument("-k", "--size", help="Plot size", type=int, default=20)
    parser.add_argument("-n",
                        "--num_plots",
                        help="Number of plots",
                        type=int,
                        default=10)
    parser.add_argument("-i",
                        "--index",
                        help="First plot index",
                        type=int,
                        default=0)
    parser.add_argument("-p",
                        "--pool_pub_key",
                        help="Hex public key of pool",
                        type=str,
                        default="")
    parser.add_argument(
        "-t",
        "--tmp_dir",
        help=
        "Temporary directory for plotting files (relative to final directory)",
        type=Path,
        default=Path("./plots.tmp"),
    )

    new_plots_root = path_from_root(
        root_path,
        load_config(root_path,
                    "config.yaml").get("harvester",
                                       {}).get("new_plot_root", "plots"),
    )
    parser.add_argument(
        "-d",
        "--final_dir",
        help="Final directory for plots (relative or absolute)",
        type=Path,
        default=new_plots_root,
    )

    # We need the keys file, to access pool keys (if the exist), and the sk_seed.
    args = parser.parse_args()
    if not key_config_filename.exists():
        raise RuntimeError("Keys not generated. Run chia-generate-keys")

    # The seed is what will be used to generate a private key for each plot
    key_config = load_config(root_path, key_config_filename)
    sk_seed: bytes = bytes.fromhex(key_config["sk_seed"])

    pool_pk: PublicKey
    if len(args.pool_pub_key) > 0:
        # Use the provided pool public key, useful for using an external pool
        pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key))
    else:
        # Use the pool public key from the config, useful for solo farming
        pool_sk = PrivateKey.from_bytes(
            bytes.fromhex(key_config["pool_sks"][0]))
        pool_pk = pool_sk.get_public_key()

    print(
        f"Creating {args.num_plots} plots, from index {args.index} to "
        f"{args.index + args.num_plots - 1}, of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}"
    )

    tmp_dir = args.final_dir / args.tmp_dir
    mkdir(tmp_dir)
    mkdir(args.final_dir)
    for i in range(args.index, args.index + args.num_plots):
        # Generate a sk based on the seed, plot size (k), and index
        sk: PrivateKey = PrivateKey.from_seed(sk_seed +
                                              args.size.to_bytes(1, "big") +
                                              i.to_bytes(4, "big"))

        # The plot seed is based on the pool and plot pks
        plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed(
            pool_pk, sk.get_public_key())
        filename: str = f"plot-{i}-{args.size}-{plot_seed}.dat"
        full_path: Path = args.final_dir / filename
        if not full_path.exists():
            # Creates the plot. This will take a long time for larger plots.
            plotter: DiskPlotter = DiskPlotter()
            plotter.create_plot_disk(
                str(tmp_dir),
                str(args.final_dir),
                filename,
                args.size,
                bytes([]),
                plot_seed,
            )
        else:
            print(f"Plot {filename} already exists")

        # Updates the config if necessary.
        plot_config = load_config(root_path, plot_config_filename)
        plot_config_plots_new = deepcopy(plot_config.get("plots", []))
        relative_path = make_path_relative(full_path, root_path)
        if (relative_path not in plot_config_plots_new
                and full_path not in plot_config_plots_new):
            plot_config_plots_new[str(full_path)] = {
                "sk": bytes(sk).hex(),
                "pool_pk": bytes(pool_pk).hex(),
            }
        plot_config["plots"].update(plot_config_plots_new)

        # Dumps the new config to disk.
        save_config(root_path, plot_config_filename, plot_config)
    try:
        tmp_dir.rmdir()
    except Exception:
        print(f"warning: couldn't delete {tmp_dir}")
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester_service, farmer_service = simulation
        harvester = harvester_service._node
        farmer_api = farmer_service._api

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port,
                                                  bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname,
                                                       test_rpc_port_2,
                                                       bt.root_path, config)

            await validate_get_routes(client, farmer_rpc_api)
            await validate_get_routes(client_2, harvester_rpc_api)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk,
                                    AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            # Reset cache and force updates cache every second to make sure the farmer gets the most recent data
            update_interval_before = farmer_api.farmer.update_harvester_cache_interval
            farmer_api.farmer.update_harvester_cache_interval = 1
            farmer_api.farmer.harvester_cache = {}

            # Test farmer get_harvesters
            async def test_get_harvesters():
                harvester.plot_manager.trigger_refresh()
                await time_out_assert(5,
                                      harvester.plot_manager.needs_refresh,
                                      value=False)
                farmer_res = await client.get_harvesters()
                if len(list(farmer_res["harvesters"])) != 1:
                    log.error(
                        f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}"
                    )
                    return False
                if len(list(
                        farmer_res["harvesters"][0]["plots"])) != num_plots:
                    log.error(
                        f"test_get_harvesters: invalid plots {list(farmer_res['harvesters'])}"
                    )
                    return False
                return True

            await time_out_assert_custom_interval(30, 1, test_get_harvesters)

            # Reset cache and reset update interval to avoid hitting the rate limit
            farmer_api.farmer.update_harvester_cache_interval = update_interval_before
            farmer_api.farmer.harvester_cache = {}

            targets_1 = await client.get_reward_targets(False)
            assert "have_pool_sk" not in targets_1
            assert "have_farmer_sk" not in targets_1
            targets_2 = await client.get_reward_targets(True)
            assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]

            new_ph: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.farmer_master_sk,
                                       uint32(10)).get_g1())
            new_ph_2: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(472)).get_g1())

            await client.set_reward_targets(
                encode_puzzle_hash(new_ph, "xch"),
                encode_puzzle_hash(new_ph_2, "xch"))
            targets_3 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
            assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]

            new_ph_3: bytes32 = create_puzzlehash_for_pk(
                master_sk_to_wallet_sk(bt.pool_master_sk,
                                       uint32(1888)).get_g1())
            await client.set_reward_targets(
                None, encode_puzzle_hash(new_ph_3, "xch"))
            targets_4 = await client.get_reward_targets(True)
            assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
            assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
            assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]

            root_path = farmer_api.farmer._root_path
            config = load_config(root_path, "config.yaml")
            assert config["farmer"][
                "xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
            assert config["pool"]["xch_target_address"] == encode_puzzle_hash(
                new_ph_3, "xch")

            new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
            added_char = new_ph_3_encoded + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, added_char)

            replaced_char = new_ph_3_encoded[0:-1] + "a"
            with pytest.raises(ValueError):
                await client.set_reward_targets(None, replaced_char)

            assert len((await client.get_pool_state())["pool_state"]) == 0
            all_sks = farmer_api.farmer.local_keychain.get_all_private_keys()
            auth_sk = master_sk_to_pooling_authentication_sk(
                all_sks[0][0], 2, 1)
            pool_list = [{
                "launcher_id":
                "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa",
                "authentication_public_key":
                bytes(auth_sk.get_g1()).hex(),
                "owner_public_key":
                "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5",  # noqa
                "payout_instructions":
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8",
                "pool_url":
                "localhost",
                "p2_singleton_puzzle_hash":
                "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17",
                "target_puzzle_hash":
                "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58",
            }]
            config["pool"]["pool_list"] = pool_list
            save_config(root_path, "config.yaml", config)
            await farmer_api.farmer.update_pool_state()

            pool_state = (await client.get_pool_state())["pool_state"]
            assert len(pool_state) == 1
            assert (
                pool_state[0]["pool_config"]["payout_instructions"] ==
                "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8"
            )
            await client.set_payout_instructions(
                hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]),
                "1234vy")
            await farmer_api.farmer.update_pool_state()
            pool_state = (await client.get_pool_state())["pool_state"]
            assert pool_state[0]["pool_config"][
                "payout_instructions"] == "1234vy"

            now = time.time()
            # Big arbitrary numbers used to be unlikely to accidentally collide.
            before_24h = (now - (25 * 60 * 60), 29984713)
            since_24h = (now - (23 * 60 * 60), 93049817)
            for p2_singleton_puzzle_hash, pool_dict in farmer_api.farmer.pool_state.items(
            ):
                for key in ["points_found_24h", "points_acknowledged_24h"]:
                    pool_dict[key].insert(0, since_24h)
                    pool_dict[key].insert(0, before_24h)

            sp = farmer_protocol.NewSignagePoint(std_hash(b"1"),
                                                 std_hash(b"2"),
                                                 std_hash(b"3"), uint64(1),
                                                 uint64(1000000), uint8(2))
            await farmer_api.new_signage_point(sp)
            client_pool_state = await client.get_pool_state()
            for pool_dict in client_pool_state["pool_state"]:
                for key in ["points_found_24h", "points_acknowledged_24h"]:
                    assert pool_dict[key][0] == list(since_24h)

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()