def patch_default_seeder_config(root_path: Path, filename="config.yaml") -> None: """ Checks if the seeder: section exists in the config. If not, the default seeder settings are appended to the file """ existing_config = load_config(root_path, "config.yaml") if "seeder" in existing_config: print("Chia Seeder section exists in config. No action required.") return print("Chia Seeder section does not exist in config. Patching...") config = load_config(root_path, "config.yaml") # The following ignores root_path when the second param is absolute, which this will be seeder_config = load_config(root_path, pkg_resources.resource_filename("chia.util", "initial-config.yaml")) # Patch in the values with anchors, since pyyaml tends to change # the anchors to things like id001, etc config["seeder"] = seeder_config["seeder"] config["seeder"]["network_overrides"] = config["network_overrides"] config["seeder"]["selected_network"] = config["selected_network"] config["seeder"]["logging"] = config["logging"] # When running as crawler, we default to a much lower client timeout config["full_node"]["peer_connect_timeout"] = 2 save_config(root_path, "config.yaml", config)
def migrate_from( old_root: Path, new_root: Path, manifest: List[str], do_not_migrate_settings: List[str], ): """ Copy all the files in "manifest" to the new config directory. """ if old_root == new_root: print("same as new path, exiting") return 1 if not old_root.is_dir(): print(f"{old_root} not found - this is ok if you did not install this version") return 0 print(f"\n{old_root} found") print(f"Copying files from {old_root} to {new_root}\n") for f in manifest: old_path = old_root / f new_path = new_root / f copy_files_rec(old_path, new_path) # update config yaml with new keys config: Dict = load_config(new_root, "config.yaml") config_str: str = initial_config_file("config.yaml") default_config: Dict = yaml.safe_load(config_str) flattened_keys = unflatten_properties({k: "" for k in do_not_migrate_settings}) dict_add_new_default(config, default_config, flattened_keys) save_config(new_root, "config.yaml", config) create_all_ssl(new_root) return 1
async def update_pool_config(root_path: Path, pool_config_list: List[PoolWalletConfig]): full_config = load_config(root_path, "config.yaml") full_config["pool"]["pool_list"] = [ c.to_json_dict() for c in pool_config_list ] save_config(root_path, "config.yaml", full_config)
def test_multiple_writers(self, root_path_populated_with_config, default_config_dict): """ Test whether multiple readers/writers encounter data corruption. When using non-atomic operations to write to the config, partial/incomplete writes can cause readers to yield bad/corrupt data. Access to config.yaml isn't currently synchronized, so the best we can currently hope for is that the file contents are written-to as a whole. """ # Artifically inflate the size of the default config. This is done to (hopefully) force # save_config() to require multiple writes. When save_config() was using shutil.move() # multiple writes were observed, leading to read failures when data was partially written. default_config_dict["xyz"] = "x" * 32768 root_path: Path = root_path_populated_with_config save_config(root_path=root_path, filename="config.yaml", config_data=default_config_dict) num_workers: int = 30 args = list( map(lambda _: (root_path, default_config_dict), range(num_workers))) # Spin-off several processes (not threads) to read and write config data. If any # read failures are detected, the failing process will assert. with Pool(processes=num_workers) as pool: res = pool.starmap_async(run_reader_and_writer_tasks, args) try: res.get(timeout=60) except TimeoutError: pytest.skip( "Timed out waiting for reader/writer processes to complete" )
def add_plot_directory(str_path: str, root_path: Path) -> Dict: config = load_config(root_path, "config.yaml") if str(Path(str_path).resolve() ) not in config["harvester"]["plot_directories"]: config["harvester"]["plot_directories"].append( str(Path(str_path).resolve())) save_config(root_path, "config.yaml", config) return config
def write_config(root_path: Path, config: Dict): """ Wait for a random amount of time and write out the config data. With a large config, we expect save_config() to require multiple writes. """ sleep(random.random()) # log.warning(f"[pid:{os.getpid()}:{threading.get_ident()}] write_config") # save_config(root_path=root_path, filename="config.yaml", config_data=modified_config) save_config(root_path=root_path, filename="config.yaml", config_data=config)
def add_plot_directory(root_path: Path, str_path: str) -> Dict: log.debug(f"add_plot_directory {str_path}") config = load_config(root_path, "config.yaml") if str(Path(str_path).resolve()) not in get_plot_directories( root_path, config): config["harvester"]["plot_directories"].append( str(Path(str_path).resolve())) save_config(root_path, "config.yaml", config) return config
def set_reward_targets(self, farmer_target_encoded: Optional[str], pool_target_encoded: Optional[str]): config = load_config(self._root_path, "config.yaml") if farmer_target_encoded is not None: self.farmer_target_encoded = farmer_target_encoded self.farmer_target = decode_puzzle_hash(farmer_target_encoded) config["farmer"]["xch_target_address"] = farmer_target_encoded if pool_target_encoded is not None: self.pool_target_encoded = pool_target_encoded self.pool_target = decode_puzzle_hash(pool_target_encoded) config["pool"]["xch_target_address"] = pool_target_encoded save_config(self._root_path, "config.yaml", config)
def remove_plot_directory(str_path: str, root_path: Path) -> None: config = load_config(root_path, "config.yaml") str_paths: List[str] = config["harvester"]["plot_directories"] # If path str matches exactly, remove if str_path in str_paths: str_paths.remove(str_path) # If path matcehs full path, remove new_paths = [Path(sp).resolve() for sp in str_paths] if Path(str_path).resolve() in new_paths: new_paths.remove(Path(str_path).resolve()) config["harvester"]["plot_directories"] = [str(np) for np in new_paths] save_config(root_path, "config.yaml", config)
async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str): for p2_singleton_puzzle_hash, pool_state_dict in self.pool_state.items(): if launcher_id == pool_state_dict["pool_config"].launcher_id: config = load_config(self._root_path, "config.yaml") new_list = [] for list_element in config["pool"]["pool_list"]: if hexstr_to_bytes(list_element["launcher_id"]) == bytes(launcher_id): list_element["payout_instructions"] = payout_instructions new_list.append(list_element) config["pool"]["pool_list"] = new_list save_config(self._root_path, "config.yaml", config) # Force a GET /farmer which triggers the PUT /farmer if it detects the changed instructions pool_state_dict["next_farmer_update"] = 0 return self.log.warning(f"Launcher id: {launcher_id} not found")
def test_save_config(self, root_path_populated_with_config, default_config_dict): """ Test modifying the config and saving it to disk. The modified value(s) should be present after calling load_config(). """ root_path: Path = root_path_populated_with_config config: Dict = copy.deepcopy(default_config_dict) # When: modifying the config config["harvester"]["farmer_peer"]["host"] = "oldmacdonald.eie.io" # Sanity check that we didn't modify the default config assert config["harvester"]["farmer_peer"]["host"] != default_config_dict["harvester"]["farmer_peer"]["host"] # When: saving the modified config save_config(root_path=root_path, filename="config.yaml", config_data=config) # Expect: modifications should be preserved in the config read from disk loaded: Dict = load_config(root_path=root_path, filename="config.yaml") assert loaded["harvester"]["farmer_peer"]["host"] == "oldmacdonald.eie.io"
async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str): for p2_singleton_puzzle_hash, pool_state_dict in self.pool_state.items( ): if launcher_id == pool_state_dict["pool_config"].launcher_id: config = load_config(self._root_path, "config.yaml") new_list = [] for list_element in config["pool"]["pool_list"]: if bytes.fromhex( list_element["launcher_id"]) == bytes(launcher_id): list_element[ "payout_instructions"] = payout_instructions new_list.append(list_element) config["pool"]["pool_list"] = new_list save_config(self._root_path, "config.yaml", config) await self.update_pool_state() return self.log.warning(f"Launcher id: {launcher_id} not found")
def test_pool_config(): test_root = Path("/tmp") test_path = Path("/tmp/config") eg_config = test_path / "config.yaml" to_config = test_path / "test_pool_config.yaml" create_default_chia_config(test_root, ["config.yaml"]) assert eg_config.exists() eg_config.rename(to_config) config = load_config(test_root, "test_pool_config.yaml") auth_sk: PrivateKey = AugSchemeMPL.key_gen(b"1" * 32) d = { "authentication_public_key": bytes(auth_sk.get_g1()).hex(), "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5", "p2_singleton_puzzle_hash": "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", "pool_url": "localhost", "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", } pwc = PoolWalletConfig.from_json_dict(d) config_a = config.copy() config_b = config.copy() config_a["wallet"]["pool_list"] = [d] config_b["wallet"]["pool_list"] = [pwc.to_json_dict()] print(config["wallet"]["pool_list"]) save_config(test_root, "test_pool_config_a.yaml", config_a) save_config(test_root, "test_pool_config_b.yaml", config_b) assert config_a == config_b
def db_upgrade_func( root_path: Path, in_db_path: Optional[Path] = None, out_db_path: Optional[Path] = None, no_update_config: bool = False, ): update_config: bool = in_db_path is None and out_db_path is None and not no_update_config config: Dict selected_network: str db_pattern: str if in_db_path is None or out_db_path is None: config = load_config(root_path, "config.yaml")["full_node"] selected_network = config["selected_network"] db_pattern = config["database_path"] db_path_replaced: str if in_db_path is None: db_path_replaced = db_pattern.replace("CHALLENGE", selected_network) in_db_path = path_from_root(root_path, db_path_replaced) if out_db_path is None: db_path_replaced = db_pattern.replace("CHALLENGE", selected_network).replace( "_v1_", "_v2_") out_db_path = path_from_root(root_path, db_path_replaced) mkdir(out_db_path.parent) asyncio.run(convert_v1_to_v2(in_db_path, out_db_path)) if update_config: print("updating config.yaml") config = load_config(root_path, "config.yaml") new_db_path = db_pattern.replace("_v1_", "_v2_") config["full_node"]["database_path"] = new_db_path print(f"database_path: {new_db_path}") save_config(root_path, "config.yaml", config) print(f"\n\nLEAVING PREVIOUS DB FILE UNTOUCHED {in_db_path}\n")
async def test_farmer_get_pool_state(environment): ( farmer_service, farmer_rpc_api, farmer_rpc_client, harvester_service, harvester_rpc_api, harvester_rpc_client, ) = environment farmer_api = farmer_service._api assert len((await farmer_rpc_client.get_pool_state())["pool_state"]) == 0 pool_list = [{ "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", "owner_public_key": "aa11e92274c0f6a2449fd0c7cfab4a38f943289dbe2214c808b36390c34eacfaa1d4c8f3c6ec582ac502ff32228679a0", # noqa "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", "pool_url": "localhost", "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17", "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", }] root_path = farmer_api.farmer._root_path config = load_config(root_path, "config.yaml") config["pool"]["pool_list"] = pool_list save_config(root_path, "config.yaml", config) await farmer_api.farmer.update_pool_state() pool_state = (await farmer_rpc_client.get_pool_state())["pool_state"] assert len(pool_state) == 1 assert (pool_state[0]["pool_config"]["payout_instructions"] == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8") await farmer_rpc_client.set_payout_instructions( hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy") await farmer_api.farmer.update_pool_state() pool_state = (await farmer_rpc_client.get_pool_state())["pool_state"] assert pool_state[0]["pool_config"]["payout_instructions"] == "1234vy" now = time.time() # Big arbitrary numbers used to be unlikely to accidentally collide. before_24h = (now - (25 * 60 * 60), 29984713) since_24h = (now - (23 * 60 * 60), 93049817) for p2_singleton_puzzle_hash, pool_dict in farmer_api.farmer.pool_state.items( ): for key in ["points_found_24h", "points_acknowledged_24h"]: pool_dict[key].insert(0, since_24h) pool_dict[key].insert(0, before_24h) sp = farmer_protocol.NewSignagePoint(std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)) await farmer_api.new_signage_point(sp) client_pool_state = await farmer_rpc_client.get_pool_state() for pool_dict in client_pool_state["pool_state"]: for key in ["points_found_24h", "points_acknowledged_24h"]: assert pool_dict[key][0] == list(since_24h)
def configure( root_path: Path, set_farmer_peer: str, set_node_introducer: str, set_fullnode_port: str, set_harvester_port: str, set_log_level: str, enable_upnp: str, set_outbound_peer_count: str, set_peer_count: str, testnet: str, ): config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml") change_made = False if set_node_introducer: try: if set_node_introducer.index(":"): host, port = ( ":".join(set_node_introducer.split(":")[:-1]), set_node_introducer.split(":")[-1], ) config["full_node"]["introducer_peer"]["host"] = host config["full_node"]["introducer_peer"]["port"] = int(port) config["introducer"]["port"] = int(port) print("Node introducer updated") change_made = True except ValueError: print("Node introducer address must be in format [IP:Port]") if set_farmer_peer: try: if set_farmer_peer.index(":"): host, port = ( ":".join(set_farmer_peer.split(":")[:-1]), set_farmer_peer.split(":")[-1], ) config["full_node"]["farmer_peer"]["host"] = host config["full_node"]["farmer_peer"]["port"] = int(port) config["harvester"]["farmer_peer"]["host"] = host config["harvester"]["farmer_peer"]["port"] = int(port) print( "Farmer peer updated, make sure your harvester has the proper cert installed" ) change_made = True except ValueError: print("Farmer address must be in format [IP:Port]") if set_fullnode_port: config["full_node"]["port"] = int(set_fullnode_port) config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port) config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port) config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port) config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port) config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port) config["introducer"]["port"] = int(set_fullnode_port) print("Default full node port updated") change_made = True if set_harvester_port: config["harvester"]["port"] = int(set_harvester_port) config["farmer"]["harvester_peer"]["port"] = int(set_harvester_port) print("Default harvester port updated") change_made = True if set_log_level: levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"] if set_log_level in levels: config["logging"]["log_level"] = set_log_level print( f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log" ) change_made = True else: print(f"Logging level not updated. Use one of: {levels}") if enable_upnp is not None: config["full_node"]["enable_upnp"] = str2bool(enable_upnp) if str2bool(enable_upnp): print("uPnP enabled") else: print("uPnP disabled") change_made = True if set_outbound_peer_count is not None: config["full_node"]["target_outbound_peer_count"] = int( set_outbound_peer_count) print("Target outbound peer count updated") change_made = True if set_peer_count is not None: config["full_node"]["target_peer_count"] = int(set_peer_count) print("Target peer count updated") change_made = True if testnet is not None: if testnet == "true" or testnet == "t": print("Setting Testnet") testnet_port = "58444" testnet_introducer = "beta1_introducer.chia.net" testnet_dns_introducer = "dns-introducer-testnet7.chia.net" testnet = "testnet7" config["full_node"]["port"] = int(testnet_port) config["full_node"]["introducer_peer"]["port"] = int(testnet_port) config["farmer"]["full_node_peer"]["port"] = int(testnet_port) config["timelord"]["full_node_peer"]["port"] = int(testnet_port) config["wallet"]["full_node_peer"]["port"] = int(testnet_port) config["wallet"]["introducer_peer"]["port"] = int(testnet_port) config["introducer"]["port"] = int(testnet_port) config["full_node"]["introducer_peer"]["host"] = testnet_introducer config["full_node"]["dns_servers"] = [testnet_dns_introducer] config["selected_network"] = testnet config["harvester"]["selected_network"] = testnet config["pool"]["selected_network"] = testnet config["farmer"]["selected_network"] = testnet config["timelord"]["selected_network"] = testnet config["full_node"]["selected_network"] = testnet config["ui"]["selected_network"] = testnet config["introducer"]["selected_network"] = testnet config["wallet"]["selected_network"] = testnet print( "Default full node port, introducer and network setting updated" ) change_made = True elif testnet == "false" or testnet == "f": print("Setting Mainnet") mainnet_port = "8444" mainnet_introducer = "introducer.chia.net" mainnet_dns_introducer = "dns-introducer.chia.net" net = "mainnet" config["full_node"]["port"] = int(mainnet_port) config["full_node"]["introducer_peer"]["port"] = int(mainnet_port) config["farmer"]["full_node_peer"]["port"] = int(mainnet_port) config["timelord"]["full_node_peer"]["port"] = int(mainnet_port) config["wallet"]["full_node_peer"]["port"] = int(mainnet_port) config["wallet"]["introducer_peer"]["port"] = int(mainnet_port) config["introducer"]["port"] = int(mainnet_port) config["full_node"]["introducer_peer"]["host"] = mainnet_introducer config["full_node"]["dns_servers"] = [mainnet_dns_introducer] config["selected_network"] = net config["harvester"]["selected_network"] = net config["pool"]["selected_network"] = net config["farmer"]["selected_network"] = net config["timelord"]["selected_network"] = net config["full_node"]["selected_network"] = net config["ui"]["selected_network"] = net config["introducer"]["selected_network"] = net config["wallet"]["selected_network"] = net print( "Default full node port, introducer and network setting updated" ) change_made = True else: print("Please choose True or False") if change_made: print("Restart any running chia services for changes to take effect") save_config(root_path, "config.yaml", config) return 0
def check_keys(new_root: Path) -> None: keychain: Keychain = Keychain() all_sks = keychain.get_all_private_keys() if len(all_sks) == 0: print( "No keys are present in the keychain. Generate them with 'chia keys generate'" ) return config: Dict = load_config(new_root, "config.yaml") pool_child_pubkeys = [ master_sk_to_pool_sk(sk).get_g1() for sk, _ in all_sks ] all_targets = [] stop_searching_for_farmer = "xch_target_address" not in config["farmer"] stop_searching_for_pool = "xch_target_address" not in config["pool"] number_of_ph_to_search = 500 selected = config["selected_network"] prefix = config["network_overrides"]["config"][selected]["address_prefix"] for i in range(number_of_ph_to_search): if stop_searching_for_farmer and stop_searching_for_pool and i > 0: break for sk, _ in all_sks: all_targets.append( encode_puzzle_hash( create_puzzlehash_for_pk( master_sk_to_wallet_sk(sk, uint32(i)).get_g1()), prefix)) if all_targets[-1] == config["farmer"].get("xch_target_address"): stop_searching_for_farmer = True if all_targets[-1] == config["pool"].get("xch_target_address"): stop_searching_for_pool = True # Set the destinations if "xch_target_address" not in config["farmer"]: print( f"Setting the xch destination address for coinbase fees reward to {all_targets[0]}" ) config["farmer"]["xch_target_address"] = all_targets[0] elif config["farmer"]["xch_target_address"] not in all_targets: print( f"WARNING: using a farmer address which we don't have the private" f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding " f"{config['farmer']['xch_target_address']} with {all_targets[0]}") if "pool" not in config: config["pool"] = {} if "xch_target_address" not in config["pool"]: print( f"Setting the xch destination address for coinbase reward to {all_targets[0]}" ) config["pool"]["xch_target_address"] = all_targets[0] elif config["pool"]["xch_target_address"] not in all_targets: print( f"WARNING: using a pool address which we don't have the private" f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding " f"{config['pool']['xch_target_address']} with {all_targets[0]}") # Set the pool pks in the farmer pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys) if "pool_public_keys" in config["farmer"]: for pk_hex in config["farmer"]["pool_public_keys"]: # Add original ones in config pool_pubkeys_hex.add(pk_hex) config["farmer"]["pool_public_keys"] = pool_pubkeys_hex save_config(new_root, "config.yaml", config)
async def test_wallet_make_transaction(self, two_wallet_nodes): test_rpc_port = uint16(21529) test_rpc_port_node = uint16(21530) num_blocks = 5 full_nodes, wallets = two_wallet_nodes full_node_api = full_nodes[0] full_node_server = full_node_api.full_node.server wallet_node, server_2 = wallets[0] wallet_node_2, server_3 = wallets[1] wallet = wallet_node.wallet_state_manager.main_wallet wallet_2 = wallet_node_2.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() ph_2 = await wallet_2.get_new_puzzlehash() await server_2.start_client( PeerInfo("localhost", uint16(full_node_server._port)), None) for i in range(0, num_blocks): await full_node_api.farm_new_transaction_block( FarmNewBlockProtocol(ph)) initial_funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks) ]) initial_funds_eventually = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1) ]) wallet_rpc_api = WalletRpcApi(wallet_node) config = bt.config hostname = config["self_hostname"] daemon_port = config["daemon_port"] def stop_node_cb(): pass full_node_rpc_api = FullNodeRpcApi(full_node_api.full_node) rpc_cleanup_node = await start_rpc_server( full_node_rpc_api, hostname, daemon_port, test_rpc_port_node, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup = await start_rpc_server( wallet_rpc_api, hostname, daemon_port, test_rpc_port, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) await time_out_assert(5, wallet.get_confirmed_balance, initial_funds) await time_out_assert(5, wallet.get_unconfirmed_balance, initial_funds) client = await WalletRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config) await validate_get_routes(client, wallet_rpc_api) client_node = await FullNodeRpcClient.create(self_hostname, test_rpc_port_node, bt.root_path, config) try: addr = encode_puzzle_hash( await wallet_node_2.wallet_state_manager.main_wallet. get_new_puzzlehash(), "xch") tx_amount = 15600000 try: await client.send_transaction("1", 100000000000000001, addr) raise Exception("Should not create high value tx") except ValueError: pass # Tests sending a basic transaction tx = await client.send_transaction("1", tx_amount, addr) transaction_id = tx.name async def tx_in_mempool(): tx = await client.get_transaction("1", transaction_id) return tx.is_in_mempool() await time_out_assert(5, tx_in_mempool, True) await time_out_assert(5, wallet.get_unconfirmed_balance, initial_funds - tx_amount) assert ( await client.get_wallet_balance("1") )["unconfirmed_wallet_balance"] == initial_funds - tx_amount assert (await client.get_wallet_balance("1") )["confirmed_wallet_balance"] == initial_funds for i in range(0, 5): await full_node_api.farm_new_transaction_block( FarmNewBlockProtocol(ph_2)) async def eventual_balance(): return ( await client.get_wallet_balance("1"))["confirmed_wallet_balance"] await time_out_assert(5, eventual_balance, initial_funds_eventually - tx_amount) # Tests offline signing ph_3 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash( ) ph_4 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash( ) ph_5 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash( ) # Test basic transaction to one output signed_tx_amount = 888000 tx_res: TransactionRecord = await client.create_signed_transaction( [{ "amount": signed_tx_amount, "puzzle_hash": ph_3 }]) assert tx_res.fee_amount == 0 assert tx_res.amount == signed_tx_amount assert len(tx_res.additions) == 2 # The output and the change assert any([ addition.amount == signed_tx_amount for addition in tx_res.additions ]) push_res = await client_node.push_tx(tx_res.spend_bundle) assert push_res["success"] assert ( await client.get_wallet_balance("1") )["confirmed_wallet_balance"] == initial_funds_eventually - tx_amount for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) await time_out_assert( 5, eventual_balance, initial_funds_eventually - tx_amount - signed_tx_amount) # Test transaction to two outputs, from a specified coin, with a fee coin_to_spend = None for addition in tx_res.additions: if addition.amount != signed_tx_amount: coin_to_spend = addition assert coin_to_spend is not None tx_res = await client.create_signed_transaction( [{ "amount": 444, "puzzle_hash": ph_4 }, { "amount": 999, "puzzle_hash": ph_5 }], coins=[coin_to_spend], fee=100, ) assert tx_res.fee_amount == 100 assert tx_res.amount == 444 + 999 assert len(tx_res.additions) == 3 # The outputs and the change assert any( [addition.amount == 444 for addition in tx_res.additions]) assert any( [addition.amount == 999 for addition in tx_res.additions]) assert sum([rem.amount for rem in tx_res.removals]) - sum( [ad.amount for ad in tx_res.additions]) == 100 push_res = await client_node.push_tx(tx_res.spend_bundle) assert push_res["success"] for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) new_balance = initial_funds_eventually - tx_amount - signed_tx_amount - 444 - 999 - 100 await time_out_assert(5, eventual_balance, new_balance) send_tx_res: TransactionRecord = await client.send_transaction_multi( "1", [{ "amount": 555, "puzzle_hash": ph_4 }, { "amount": 666, "puzzle_hash": ph_5 }], fee=200) assert send_tx_res is not None assert send_tx_res.fee_amount == 200 assert send_tx_res.amount == 555 + 666 assert len( send_tx_res.additions) == 3 # The outputs and the change assert any( [addition.amount == 555 for addition in send_tx_res.additions]) assert any( [addition.amount == 666 for addition in send_tx_res.additions]) assert (sum([rem.amount for rem in send_tx_res.removals]) - sum([ad.amount for ad in send_tx_res.additions]) == 200) await asyncio.sleep(3) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) new_balance = new_balance - 555 - 666 - 200 await time_out_assert(5, eventual_balance, new_balance) address = await client.get_next_address("1", True) assert len(address) > 10 transactions = await client.get_transactions("1") assert len(transactions) > 1 all_transactions = await client.get_transactions("1") # Test transaction pagination some_transactions = await client.get_transactions("1", 0, 5) some_transactions_2 = await client.get_transactions("1", 5, 10) assert some_transactions == all_transactions[0:5] assert some_transactions_2 == all_transactions[5:10] # Testing sorts # Test the default sort (CONFIRMED_AT_HEIGHT) assert all_transactions == sorted( all_transactions, key=attrgetter("confirmed_at_height")) all_transactions = await client.get_transactions("1", reverse=True) assert all_transactions == sorted( all_transactions, key=attrgetter("confirmed_at_height"), reverse=True) # Test RELEVANCE await client.send_transaction("1", 1, encode_puzzle_hash(ph_2, "xch") ) # Create a pending tx all_transactions = await client.get_transactions( "1", sort_key=SortKey.RELEVANCE) sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed")) assert all_transactions == sorted_transactions all_transactions = await client.get_transactions( "1", sort_key=SortKey.RELEVANCE, reverse=True) sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed"), reverse=True) assert all_transactions == sorted_transactions pks = await client.get_public_keys() assert len(pks) == 1 assert (await client.get_height_info()) > 0 created_tx = await client.send_transaction("1", tx_amount, addr) async def tx_in_mempool_2(): tx = await client.get_transaction("1", created_tx.name) return tx.is_in_mempool() await time_out_assert(5, tx_in_mempool_2, True) assert len(await wallet.wallet_state_manager.tx_store. get_unconfirmed_for_wallet(1)) == 2 await client.delete_unconfirmed_transactions("1") assert len(await wallet.wallet_state_manager.tx_store. get_unconfirmed_for_wallet(1)) == 0 sk_dict = await client.get_private_key(pks[0]) assert sk_dict["fingerprint"] == pks[0] assert sk_dict["sk"] is not None assert sk_dict["pk"] is not None assert sk_dict["seed"] is not None mnemonic = await client.generate_mnemonic() assert len(mnemonic) == 24 await client.add_key(mnemonic) pks = await client.get_public_keys() assert len(pks) == 2 await client.log_in_and_skip(pks[1]) sk_dict = await client.get_private_key(pks[1]) assert sk_dict["fingerprint"] == pks[1] fingerprint = await client.get_logged_in_fingerprint() assert fingerprint == pks[1] # Add in reward addresses into farmer and pool for testing delete key checks # set farmer to first private key sk = await wallet_node.get_key_for_fingerprint(pks[0]) test_ph = create_puzzlehash_for_pk( master_sk_to_wallet_sk(sk, uint32(0)).get_g1()) test_config = load_config(wallet_node.root_path, "config.yaml") test_config["farmer"]["xch_target_address"] = encode_puzzle_hash( test_ph, "txch") # set pool to second private key sk = await wallet_node.get_key_for_fingerprint(pks[1]) test_ph = create_puzzlehash_for_pk( master_sk_to_wallet_sk(sk, uint32(0)).get_g1()) test_config["pool"]["xch_target_address"] = encode_puzzle_hash( test_ph, "txch") save_config(wallet_node.root_path, "config.yaml", test_config) # Check first key sk_dict = await client.check_delete_key(pks[0]) assert sk_dict["fingerprint"] == pks[0] assert sk_dict["used_for_farmer_rewards"] is True assert sk_dict["used_for_pool_rewards"] is False # Check second key sk_dict = await client.check_delete_key(pks[1]) assert sk_dict["fingerprint"] == pks[1] assert sk_dict["used_for_farmer_rewards"] is False assert sk_dict["used_for_pool_rewards"] is True # Check unknown key sk_dict = await client.check_delete_key(123456) assert sk_dict["fingerprint"] == 123456 assert sk_dict["used_for_farmer_rewards"] is False assert sk_dict["used_for_pool_rewards"] is False await client.delete_key(pks[0]) await client.log_in_and_skip(pks[1]) assert len(await client.get_public_keys()) == 1 assert not (await client.get_sync_status()) wallets = await client.get_wallets() assert len(wallets) == 1 balance = await client.get_wallet_balance(wallets[0]["id"]) assert balance["unconfirmed_wallet_balance"] == 0 test_wallet_backup_path = Path("test_wallet_backup_file") await client.create_backup(test_wallet_backup_path) assert test_wallet_backup_path.exists() test_wallet_backup_path.unlink() try: await client.send_transaction(wallets[0]["id"], 100, addr) raise Exception("Should not create tx if no balance") except ValueError: pass await client.delete_all_keys() assert len(await client.get_public_keys()) == 0 finally: # Checks that the RPC manages to stop the node client.close() client_node.close() await client.await_closed() await client_node.await_closed() await rpc_cleanup() await rpc_cleanup_node()
async def test1(self, simulation): test_rpc_port = uint16(21522) test_rpc_port_2 = uint16(21523) harvester, farmer_api = simulation def stop_node_cb(): pass def stop_node_cb_2(): pass config = bt.config hostname = config["self_hostname"] daemon_port = config["daemon_port"] farmer_rpc_api = FarmerRpcApi(farmer_api.farmer) harvester_rpc_api = HarvesterRpcApi(harvester) rpc_cleanup = await start_rpc_server( farmer_rpc_api, hostname, daemon_port, test_rpc_port, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup_2 = await start_rpc_server( harvester_rpc_api, hostname, daemon_port, test_rpc_port_2, stop_node_cb_2, bt.root_path, config, connect_to_daemon=False, ) try: client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config) client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config) async def have_connections(): return len(await client.get_connections()) > 0 await time_out_assert(15, have_connections, True) assert (await client.get_signage_point(std_hash(b"2"))) is None assert len(await client.get_signage_points()) == 0 async def have_signage_points(): return len(await client.get_signage_points()) > 0 sp = farmer_protocol.NewSignagePoint( std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2) ) await farmer_api.new_signage_point(sp) await time_out_assert(5, have_signage_points, True) assert (await client.get_signage_point(std_hash(b"2"))) is not None async def have_plots(): return len((await client_2.get_plots())["plots"]) > 0 await time_out_assert(5, have_plots, True) res = await client_2.get_plots() num_plots = len(res["plots"]) assert num_plots > 0 plot_dir = get_plot_dir() / "subdir" plot_dir.mkdir(parents=True, exist_ok=True) plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir" plot_dir_sub.mkdir(parents=True, exist_ok=True) plotter = DiskPlotter() filename = "test_farmer_harvester_rpc_plot.plot" filename_2 = "test_farmer_harvester_rpc_plot2.plot" plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename, 18, stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))), token_bytes(32), 128, 0, 2000, 0, False, ) # Making a plot with a puzzle hash encoded into it instead of pk plot_id_2 = token_bytes(32) plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) # Making the same plot, in a different dir. This should not be farmed plotter.create_plot_disk( str(plot_dir_sub), str(plot_dir_sub), str(plot_dir_sub), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) res_2 = await client_2.get_plots() assert len(res_2["plots"]) == num_plots # Test farmer get_harvesters async def test_get_harvesters(): farmer_res = await client.get_harvesters() if len(list(farmer_res["harvesters"])) != 1: return False if len(list(farmer_res["harvesters"][0]["plots"])) != num_plots: return False return True await time_out_assert(30, test_get_harvesters) expected_result: PlotRefreshResult = PlotRefreshResult() def test_refresh_callback(refresh_result: PlotRefreshResult): assert refresh_result.loaded_plots == expected_result.loaded_plots assert refresh_result.removed_plots == expected_result.removed_plots assert refresh_result.processed_files == expected_result.processed_files assert refresh_result.remaining_files == expected_result.remaining_files harvester.plot_manager.set_refresh_callback(test_refresh_callback) async def test_case( trigger, expect_loaded, expect_removed, expect_processed, expected_directories, expect_total_plots ): expected_result.loaded_plots = expect_loaded expected_result.removed_plots = expect_removed expected_result.processed_files = expect_processed await trigger harvester.plot_manager.trigger_refresh() assert len(await client_2.get_plot_directories()) == expected_directories await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) result = await client_2.get_plots() assert len(result["plots"]) == expect_total_plots assert len(harvester.plot_manager.cache) == expect_total_plots assert len(harvester.plot_manager.failed_to_open_filenames) == 0 # Add plot_dir with two new plots await test_case( client_2.add_plot_directory(str(plot_dir)), expect_loaded=2, expect_removed=0, expect_processed=2, expected_directories=2, expect_total_plots=num_plots + 2, ) # Add plot_dir_sub with one duplicate await test_case( client_2.add_plot_directory(str(plot_dir_sub)), expect_loaded=0, expect_removed=0, expect_processed=1, expected_directories=3, expect_total_plots=num_plots + 2, ) # Delete one plot await test_case( client_2.delete_plot(str(plot_dir / filename)), expect_loaded=0, expect_removed=1, expect_processed=0, expected_directories=3, expect_total_plots=num_plots + 1, ) # Remove directory with the duplicate await test_case( client_2.remove_plot_directory(str(plot_dir_sub)), expect_loaded=0, expect_removed=1, expect_processed=0, expected_directories=2, expect_total_plots=num_plots + 1, ) # Re-add the directory with the duplicate for other tests await test_case( client_2.add_plot_directory(str(plot_dir_sub)), expect_loaded=0, expect_removed=0, expect_processed=1, expected_directories=3, expect_total_plots=num_plots + 1, ) # Remove the directory which has the duplicated plot loaded. This removes the duplicated plot from plot_dir # and in the same run loads the plot from plot_dir_sub which is not longer seen as duplicate. await test_case( client_2.remove_plot_directory(str(plot_dir)), expect_loaded=1, expect_removed=1, expect_processed=1, expected_directories=2, expect_total_plots=num_plots + 1, ) # Re-add the directory now the plot seen as duplicate is from plot_dir, not from plot_dir_sub like before await test_case( client_2.add_plot_directory(str(plot_dir)), expect_loaded=0, expect_removed=0, expect_processed=1, expected_directories=3, expect_total_plots=num_plots + 1, ) # Remove the duplicated plot await test_case( client_2.delete_plot(str(plot_dir / filename_2)), expect_loaded=0, expect_removed=1, expect_processed=0, expected_directories=3, expect_total_plots=num_plots + 1, ) # Remove the directory with the loaded plot which is not longer a duplicate await test_case( client_2.remove_plot_directory(str(plot_dir_sub)), expect_loaded=0, expect_removed=1, expect_processed=0, expected_directories=2, expect_total_plots=num_plots, ) # Remove the directory which contains all other plots await test_case( client_2.remove_plot_directory(str(get_plot_dir())), expect_loaded=0, expect_removed=20, expect_processed=0, expected_directories=1, expect_total_plots=0, ) # Recover the plots to test caching # First make sure cache gets written if required and new plots are loaded await test_case( client_2.add_plot_directory(str(get_plot_dir())), expect_loaded=20, expect_removed=0, expect_processed=20, expected_directories=2, expect_total_plots=20, ) assert harvester.plot_manager.cache.path().exists() unlink(harvester.plot_manager.cache.path()) # Should not write the cache again on shutdown because it didn't change assert not harvester.plot_manager.cache.path().exists() harvester.plot_manager.stop_refreshing() assert not harvester.plot_manager.cache.path().exists() # Manually trigger `save_cache` and make sure it creates a new cache file harvester.plot_manager.cache.save() assert harvester.plot_manager.cache.path().exists() expected_result.loaded_plots = 20 expected_result.removed_plots = 0 expected_result.processed_files = 20 expected_result.remaining_files = 0 plot_manager: PlotManager = PlotManager(harvester.root_path, test_refresh_callback) plot_manager.start_refreshing() assert len(harvester.plot_manager.cache) == len(plot_manager.cache) await time_out_assert(5, plot_manager.needs_refresh, value=False) for path, plot_info in harvester.plot_manager.plots.items(): assert path in plot_manager.plots assert plot_manager.plots[path].prover.get_filename() == plot_info.prover.get_filename() assert plot_manager.plots[path].prover.get_id() == plot_info.prover.get_id() assert plot_manager.plots[path].prover.get_memo() == plot_info.prover.get_memo() assert plot_manager.plots[path].prover.get_size() == plot_info.prover.get_size() assert plot_manager.plots[path].pool_public_key == plot_info.pool_public_key assert plot_manager.plots[path].pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash assert plot_manager.plots[path].plot_public_key == plot_info.plot_public_key assert plot_manager.plots[path].file_size == plot_info.file_size assert plot_manager.plots[path].time_modified == plot_info.time_modified assert harvester.plot_manager.plot_filename_paths == plot_manager.plot_filename_paths assert harvester.plot_manager.failed_to_open_filenames == plot_manager.failed_to_open_filenames assert harvester.plot_manager.no_key_filenames == plot_manager.no_key_filenames plot_manager.stop_refreshing() # Modify the content of the plot_manager.dat with open(harvester.plot_manager.cache.path(), "r+b") as file: file.write(b"\xff\xff") # Sets Cache.version to 65535 # Make sure it just loads the plots normally if it fails to load the cache plot_manager = PlotManager(harvester.root_path, test_refresh_callback) plot_manager.cache.load() assert len(plot_manager.cache) == 0 plot_manager.set_public_keys( harvester.plot_manager.farmer_public_keys, harvester.plot_manager.pool_public_keys ) expected_result.loaded_plots = 20 expected_result.removed_plots = 0 expected_result.processed_files = 20 expected_result.remaining_files = 0 plot_manager.start_refreshing() await time_out_assert(5, plot_manager.needs_refresh, value=False) assert len(plot_manager.plots) == len(harvester.plot_manager.plots) plot_manager.stop_refreshing() # Test re-trying if processing a plot failed # First save the plot retry_test_plot = Path(plot_dir_sub / filename_2).resolve() retry_test_plot_save = Path(plot_dir_sub / "save").resolve() copy(retry_test_plot, retry_test_plot_save) # Invalidate the plot with open(plot_dir_sub / filename_2, "r+b") as file: file.write(bytes(100)) # Add it and validate it fails to load await harvester.add_plot_directory(str(plot_dir_sub)) expected_result.loaded_plots = 0 expected_result.removed_plots = 0 expected_result.processed_files = 1 expected_result.remaining_files = 0 harvester.plot_manager.start_refreshing() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames # Make sure the file stays in `failed_to_open_filenames` and doesn't get loaded or processed in the next # update round expected_result.loaded_plots = 0 expected_result.processed_files = 0 harvester.plot_manager.trigger_refresh() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames # Now decrease the re-try timeout, restore the valid plot file and make sure it properly loads now harvester.plot_manager.refresh_parameter.retry_invalid_seconds = 0 move(retry_test_plot_save, retry_test_plot) expected_result.loaded_plots = 1 expected_result.processed_files = 1 harvester.plot_manager.trigger_refresh() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) assert retry_test_plot not in harvester.plot_manager.failed_to_open_filenames targets_1 = await client.get_reward_targets(False) assert "have_pool_sk" not in targets_1 assert "have_farmer_sk" not in targets_1 targets_2 = await client.get_reward_targets(True) assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"] new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1()) new_ph_2: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1() ) await client.set_reward_targets(encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch")) targets_3 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2 assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"] new_ph_3: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1() ) await client.set_reward_targets(None, encode_puzzle_hash(new_ph_3, "xch")) targets_4 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3 assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"] root_path = farmer_api.farmer._root_path config = load_config(root_path, "config.yaml") assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(new_ph, "xch") assert config["pool"]["xch_target_address"] == encode_puzzle_hash(new_ph_3, "xch") new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch") added_char = new_ph_3_encoded + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, added_char) replaced_char = new_ph_3_encoded[0:-1] + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, replaced_char) assert len((await client.get_pool_state())["pool_state"]) == 0 all_sks = farmer_api.farmer.local_keychain.get_all_private_keys() auth_sk = master_sk_to_pooling_authentication_sk(all_sks[0][0], 2, 1) pool_list = [ { "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", "authentication_public_key": bytes(auth_sk.get_g1()).hex(), "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5", "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", "pool_url": "localhost", "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17", "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", } ] config["pool"]["pool_list"] = pool_list save_config(root_path, "config.yaml", config) await farmer_api.farmer.update_pool_state() pool_state = (await client.get_pool_state())["pool_state"] assert len(pool_state) == 1 assert ( pool_state[0]["pool_config"]["payout_instructions"] == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8" ) await client.set_payout_instructions(hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy") await farmer_api.farmer.update_pool_state() pool_state = (await client.get_pool_state())["pool_state"] assert pool_state[0]["pool_config"]["payout_instructions"] == "1234vy" finally: # Checks that the RPC manages to stop the node client.close() client_2.close() await client.await_closed() await client_2.await_closed() await rpc_cleanup() await rpc_cleanup_2()
async def test_wallet_rpc(self, two_wallet_nodes, trusted): test_rpc_port = uint16(21529) test_rpc_port_2 = uint16(21536) test_rpc_port_node = uint16(21530) num_blocks = 5 full_nodes, wallets = two_wallet_nodes full_node_api = full_nodes[0] full_node_server = full_node_api.full_node.server wallet_node, server_2 = wallets[0] wallet_node_2, server_3 = wallets[1] wallet = wallet_node.wallet_state_manager.main_wallet wallet_2 = wallet_node_2.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() ph_2 = await wallet_2.get_new_puzzlehash() await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None) await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None) if trusted: wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} wallet_node_2.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} else: wallet_node.config["trusted_peers"] = {} wallet_node_2.config["trusted_peers"] = {} for i in range(0, num_blocks): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) initial_funds = sum( [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)] ) initial_funds_eventually = sum( [ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1) ] ) wallet_rpc_api = WalletRpcApi(wallet_node) wallet_rpc_api_2 = WalletRpcApi(wallet_node_2) config = bt.config hostname = config["self_hostname"] daemon_port = config["daemon_port"] def stop_node_cb(): pass full_node_rpc_api = FullNodeRpcApi(full_node_api.full_node) rpc_cleanup_node = await start_rpc_server( full_node_rpc_api, hostname, daemon_port, test_rpc_port_node, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup = await start_rpc_server( wallet_rpc_api, hostname, daemon_port, test_rpc_port, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup_2 = await start_rpc_server( wallet_rpc_api_2, hostname, daemon_port, test_rpc_port_2, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) await time_out_assert(5, wallet.get_confirmed_balance, initial_funds) await time_out_assert(5, wallet.get_unconfirmed_balance, initial_funds) client = await WalletRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config) client_2 = await WalletRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config) client_node = await FullNodeRpcClient.create(self_hostname, test_rpc_port_node, bt.root_path, config) try: await time_out_assert(5, client.get_synced) addr = encode_puzzle_hash(await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash(), "xch") tx_amount = 15600000 try: await client.send_transaction("1", 100000000000000001, addr) raise Exception("Should not create high value tx") except ValueError: pass # Tests sending a basic transaction tx = await client.send_transaction("1", tx_amount, addr, memos=["this is a basic tx"]) transaction_id = tx.name async def tx_in_mempool(): tx = await client.get_transaction("1", transaction_id) return tx.is_in_mempool() await time_out_assert(5, tx_in_mempool, True) await time_out_assert(5, wallet.get_unconfirmed_balance, initial_funds - tx_amount) assert (await client.get_wallet_balance("1"))["unconfirmed_wallet_balance"] == initial_funds - tx_amount assert (await client.get_wallet_balance("1"))["confirmed_wallet_balance"] == initial_funds for i in range(0, 5): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2)) async def eventual_balance(): return (await client.get_wallet_balance("1"))["confirmed_wallet_balance"] async def eventual_balance_det(c, wallet_id: str): return (await c.get_wallet_balance(wallet_id))["confirmed_wallet_balance"] # Checks that the memo can be retrieved tx_confirmed = await client.get_transaction("1", transaction_id) assert tx_confirmed.confirmed assert len(tx_confirmed.get_memos()) == 1 assert [b"this is a basic tx"] in tx_confirmed.get_memos().values() assert list(tx_confirmed.get_memos().keys())[0] in [a.name() for a in tx.spend_bundle.additions()] await time_out_assert(5, eventual_balance, initial_funds_eventually - tx_amount) # Tests offline signing ph_3 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash() ph_4 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash() ph_5 = await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash() # Test basic transaction to one output and coin announcement signed_tx_amount = 888000 tx_coin_announcements = [ Announcement( std_hash(b"coin_id_1"), std_hash(b"message"), b"\xca", ), Announcement( std_hash(b"coin_id_2"), bytes(Program.to("a string")), ), ] tx_res: TransactionRecord = await client.create_signed_transaction( [{"amount": signed_tx_amount, "puzzle_hash": ph_3}], coin_announcements=tx_coin_announcements ) assert tx_res.fee_amount == 0 assert tx_res.amount == signed_tx_amount assert len(tx_res.additions) == 2 # The output and the change assert any([addition.amount == signed_tx_amount for addition in tx_res.additions]) # check error for a ASSERT_ANNOUNCE_CONSUMED_FAILED and if the error is not there throw a value error try: push_res = await client_node.push_tx(tx_res.spend_bundle) except ValueError as error: error_string = error.args[0]["error"] # noqa: # pylint: disable=E1126 if error_string.find("ASSERT_ANNOUNCE_CONSUMED_FAILED") == -1: raise ValueError from error # # Test basic transaction to one output and puzzle announcement signed_tx_amount = 888000 tx_puzzle_announcements = [ Announcement( std_hash(b"puzzle_hash_1"), b"message", b"\xca", ), Announcement( std_hash(b"puzzle_hash_2"), bytes(Program.to("a string")), ), ] tx_res: TransactionRecord = await client.create_signed_transaction( [{"amount": signed_tx_amount, "puzzle_hash": ph_3}], puzzle_announcements=tx_puzzle_announcements ) assert tx_res.fee_amount == 0 assert tx_res.amount == signed_tx_amount assert len(tx_res.additions) == 2 # The output and the change assert any([addition.amount == signed_tx_amount for addition in tx_res.additions]) # check error for a ASSERT_ANNOUNCE_CONSUMED_FAILED and if the error is not there throw a value error try: push_res = await client_node.push_tx(tx_res.spend_bundle) except ValueError as error: error_string = error.args[0]["error"] # noqa: # pylint: disable=E1126 if error_string.find("ASSERT_ANNOUNCE_CONSUMED_FAILED") == -1: raise ValueError from error # Test basic transaction to one output signed_tx_amount = 888000 tx_res: TransactionRecord = await client.create_signed_transaction( [{"amount": signed_tx_amount, "puzzle_hash": ph_3, "memos": ["My memo"]}] ) assert tx_res.fee_amount == 0 assert tx_res.amount == signed_tx_amount assert len(tx_res.additions) == 2 # The output and the change assert any([addition.amount == signed_tx_amount for addition in tx_res.additions]) push_res = await client.push_tx(tx_res.spend_bundle) assert push_res["success"] assert (await client.get_wallet_balance("1"))[ "confirmed_wallet_balance" ] == initial_funds_eventually - tx_amount for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) await time_out_assert(5, eventual_balance, initial_funds_eventually - tx_amount - signed_tx_amount) # Test transaction to two outputs, from a specified coin, with a fee coin_to_spend = None for addition in tx_res.additions: if addition.amount != signed_tx_amount: coin_to_spend = addition assert coin_to_spend is not None tx_res = await client.create_signed_transaction( [{"amount": 444, "puzzle_hash": ph_4, "memos": ["hhh"]}, {"amount": 999, "puzzle_hash": ph_5}], coins=[coin_to_spend], fee=100, ) assert tx_res.fee_amount == 100 assert tx_res.amount == 444 + 999 assert len(tx_res.additions) == 3 # The outputs and the change assert any([addition.amount == 444 for addition in tx_res.additions]) assert any([addition.amount == 999 for addition in tx_res.additions]) assert sum([rem.amount for rem in tx_res.removals]) - sum([ad.amount for ad in tx_res.additions]) == 100 push_res = await client_node.push_tx(tx_res.spend_bundle) assert push_res["success"] for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) found: bool = False for addition in tx_res.spend_bundle.additions(): if addition.amount == 444: cr: Optional[CoinRecord] = await client_node.get_coin_record_by_name(addition.name()) assert cr is not None spend: CoinSpend = await client_node.get_puzzle_and_solution( addition.parent_coin_info, cr.confirmed_block_index ) sb: SpendBundle = SpendBundle([spend], G2Element()) assert compute_memos(sb) == {addition.name(): [b"hhh"]} found = True assert found new_balance = initial_funds_eventually - tx_amount - signed_tx_amount - 444 - 999 - 100 await time_out_assert(5, eventual_balance, new_balance) send_tx_res: TransactionRecord = await client.send_transaction_multi( "1", [ {"amount": 555, "puzzle_hash": ph_4, "memos": ["FiMemo"]}, {"amount": 666, "puzzle_hash": ph_5, "memos": ["SeMemo"]}, ], fee=200, ) assert send_tx_res is not None assert send_tx_res.fee_amount == 200 assert send_tx_res.amount == 555 + 666 assert len(send_tx_res.additions) == 3 # The outputs and the change assert any([addition.amount == 555 for addition in send_tx_res.additions]) assert any([addition.amount == 666 for addition in send_tx_res.additions]) assert ( sum([rem.amount for rem in send_tx_res.removals]) - sum([ad.amount for ad in send_tx_res.additions]) == 200 ) await asyncio.sleep(3) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) new_balance = new_balance - 555 - 666 - 200 await time_out_assert(5, eventual_balance, new_balance) address = await client.get_next_address("1", True) assert len(address) > 10 transactions = await client.get_transactions("1") assert len(transactions) > 1 all_transactions = await client.get_transactions("1") # Test transaction pagination some_transactions = await client.get_transactions("1", 0, 5) some_transactions_2 = await client.get_transactions("1", 5, 10) assert some_transactions == all_transactions[0:5] assert some_transactions_2 == all_transactions[5:10] # Testing sorts # Test the default sort (CONFIRMED_AT_HEIGHT) assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height")) all_transactions = await client.get_transactions("1", reverse=True) assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height"), reverse=True) # Test RELEVANCE await client.send_transaction("1", 1, encode_puzzle_hash(ph_2, "xch")) # Create a pending tx all_transactions = await client.get_transactions("1", sort_key=SortKey.RELEVANCE) sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed")) assert all_transactions == sorted_transactions all_transactions = await client.get_transactions("1", sort_key=SortKey.RELEVANCE, reverse=True) sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed"), reverse=True) assert all_transactions == sorted_transactions # Checks that the memo can be retrieved tx_confirmed = await client.get_transaction("1", send_tx_res.name) assert tx_confirmed.confirmed if isinstance(tx_confirmed, SpendBundle): memos = compute_memos(tx_confirmed) else: memos = tx_confirmed.get_memos() assert len(memos) == 2 print(memos) assert [b"FiMemo"] in memos.values() assert [b"SeMemo"] in memos.values() assert list(memos.keys())[0] in [a.name() for a in send_tx_res.spend_bundle.additions()] assert list(memos.keys())[1] in [a.name() for a in send_tx_res.spend_bundle.additions()] ############## # CATS # ############## # Creates a wallet and a CAT with 20 mojos res = await client.create_new_cat_and_wallet(20) assert res["success"] cat_0_id = res["wallet_id"] asset_id = bytes.fromhex(res["asset_id"]) assert len(asset_id) > 0 bal_0 = await client.get_wallet_balance(cat_0_id) assert bal_0["confirmed_wallet_balance"] == 0 assert bal_0["pending_coin_removal_count"] == 1 col = await client.get_cat_asset_id(cat_0_id) assert col == asset_id assert (await client.get_cat_name(cat_0_id)) == "CAT Wallet" await client.set_cat_name(cat_0_id, "My cat") assert (await client.get_cat_name(cat_0_id)) == "My cat" wid, name = await client.cat_asset_id_to_name(col) assert wid == cat_0_id assert name == "My cat" should_be_none = await client.cat_asset_id_to_name(bytes([0] * 32)) assert should_be_none is None verified_asset_id = next(iter(DEFAULT_CATS.items()))[1]["asset_id"] should_be_none, name = await client.cat_asset_id_to_name(bytes.fromhex(verified_asset_id)) assert should_be_none is None assert name == next(iter(DEFAULT_CATS.items()))[1]["name"] await asyncio.sleep(1) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) await time_out_assert(10, eventual_balance_det, 20, client, cat_0_id) bal_0 = await client.get_wallet_balance(cat_0_id) assert bal_0["pending_coin_removal_count"] == 0 assert bal_0["unspent_coin_count"] == 1 # Creates a second wallet with the same CAT res = await client_2.create_wallet_for_existing_cat(asset_id) assert res["success"] cat_1_id = res["wallet_id"] colour_1 = bytes.fromhex(res["asset_id"]) assert colour_1 == asset_id await asyncio.sleep(1) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) bal_1 = await client_2.get_wallet_balance(cat_1_id) assert bal_1["confirmed_wallet_balance"] == 0 addr_0 = await client.get_next_address(cat_0_id, False) addr_1 = await client_2.get_next_address(cat_1_id, False) assert addr_0 != addr_1 await client.cat_spend(cat_0_id, 4, addr_1, 0, ["the cat memo"]) await asyncio.sleep(1) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) await time_out_assert(10, eventual_balance_det, 16, client, cat_0_id) await time_out_assert(10, eventual_balance_det, 4, client_2, cat_1_id) ########## # Offers # ########## # Create an offer of 5 chia for one CAT offer, trade_record = await client.create_offer_for_ids({uint32(1): -5, cat_0_id: 1}, validate_only=True) all_offers = await client.get_all_offers() assert len(all_offers) == 0 assert offer is None offer, trade_record = await client.create_offer_for_ids({uint32(1): -5, cat_0_id: 1}, fee=uint64(1)) summary = await client.get_offer_summary(offer) assert summary == {"offered": {"xch": 5}, "requested": {col.hex(): 1}} assert await client.check_offer_validity(offer) all_offers = await client.get_all_offers(file_contents=True) assert len(all_offers) == 1 assert TradeStatus(all_offers[0].status) == TradeStatus.PENDING_ACCEPT assert all_offers[0].offer == bytes(offer) trade_record = await client_2.take_offer(offer, fee=uint64(1)) assert TradeStatus(trade_record.status) == TradeStatus.PENDING_CONFIRM await client.cancel_offer(offer.name(), secure=False) trade_record = await client.get_offer(offer.name(), file_contents=True) assert trade_record.offer == bytes(offer) assert TradeStatus(trade_record.status) == TradeStatus.CANCELLED await client.cancel_offer(offer.name(), fee=uint64(1), secure=True) trade_record = await client.get_offer(offer.name()) assert TradeStatus(trade_record.status) == TradeStatus.PENDING_CANCEL new_offer, new_trade_record = await client.create_offer_for_ids({uint32(1): -5, cat_0_id: 1}, fee=uint64(1)) all_offers = await client.get_all_offers() assert len(all_offers) == 2 await asyncio.sleep(1) for i in range(0, 5): await client.farm_block(encode_puzzle_hash(ph_2, "xch")) await asyncio.sleep(0.5) async def is_trade_confirmed(client, trade) -> bool: trade_record = await client.get_offer(trade.name()) return TradeStatus(trade_record.status) == TradeStatus.CONFIRMED time_out_assert(15, is_trade_confirmed, True, client, offer) # Test trade sorting def only_ids(trades): return [t.trade_id for t in trades] trade_record = await client.get_offer(offer.name()) all_offers = await client.get_all_offers(include_completed=True) # confirmed at index descending assert len(all_offers) == 2 assert only_ids(all_offers) == only_ids([trade_record, new_trade_record]) all_offers = await client.get_all_offers( include_completed=True, reverse=True ) # confirmed at index ascending assert only_ids(all_offers) == only_ids([new_trade_record, trade_record]) all_offers = await client.get_all_offers(include_completed=True, sort_key="RELEVANCE") # most relevant assert only_ids(all_offers) == only_ids([new_trade_record, trade_record]) all_offers = await client.get_all_offers( include_completed=True, sort_key="RELEVANCE", reverse=True ) # least relevant assert only_ids(all_offers) == only_ids([trade_record, new_trade_record]) # Test pagination all_offers = await client.get_all_offers(include_completed=True, start=0, end=1) assert len(all_offers) == 1 all_offers = await client.get_all_offers(include_completed=True, start=50) assert len(all_offers) == 0 all_offers = await client.get_all_offers(include_completed=True, start=0, end=50) assert len(all_offers) == 2 # Keys and addresses address = await client.get_next_address("1", True) assert len(address) > 10 all_transactions = await client.get_transactions("1") some_transactions = await client.get_transactions("1", 0, 5) some_transactions_2 = await client.get_transactions("1", 5, 10) assert len(all_transactions) > 1 assert some_transactions == all_transactions[0:5] assert some_transactions_2 == all_transactions[5:10] transaction_count = await client.get_transaction_count("1") assert transaction_count == len(all_transactions) pks = await client.get_public_keys() assert len(pks) == 1 assert (await client.get_height_info()) > 0 created_tx = await client.send_transaction("1", tx_amount, addr) async def tx_in_mempool_2(): tx = await client.get_transaction("1", created_tx.name) return tx.is_in_mempool() await time_out_assert(5, tx_in_mempool_2, True) assert len(await wallet.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(1)) == 1 await client.delete_unconfirmed_transactions("1") assert len(await wallet.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(1)) == 0 sk_dict = await client.get_private_key(pks[0]) assert sk_dict["fingerprint"] == pks[0] assert sk_dict["sk"] is not None assert sk_dict["pk"] is not None assert sk_dict["seed"] is not None mnemonic = await client.generate_mnemonic() assert len(mnemonic) == 24 await client.add_key(mnemonic) pks = await client.get_public_keys() assert len(pks) == 2 await client.log_in_and_skip(pks[1]) sk_dict = await client.get_private_key(pks[1]) assert sk_dict["fingerprint"] == pks[1] # Add in reward addresses into farmer and pool for testing delete key checks # set farmer to first private key sk = await wallet_node.get_key_for_fingerprint(pks[0]) test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(0)).get_g1()) test_config = load_config(wallet_node.root_path, "config.yaml") test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch") # set pool to second private key sk = await wallet_node.get_key_for_fingerprint(pks[1]) test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(0)).get_g1()) test_config["pool"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch") save_config(wallet_node.root_path, "config.yaml", test_config) # Check first key sk_dict = await client.check_delete_key(pks[0]) assert sk_dict["fingerprint"] == pks[0] assert sk_dict["used_for_farmer_rewards"] is True assert sk_dict["used_for_pool_rewards"] is False # Check second key sk_dict = await client.check_delete_key(pks[1]) assert sk_dict["fingerprint"] == pks[1] assert sk_dict["used_for_farmer_rewards"] is False assert sk_dict["used_for_pool_rewards"] is True # Check unknown key sk_dict = await client.check_delete_key(123456) assert sk_dict["fingerprint"] == 123456 assert sk_dict["used_for_farmer_rewards"] is False assert sk_dict["used_for_pool_rewards"] is False await client.delete_key(pks[0]) await client.log_in_and_skip(pks[1]) assert len(await client.get_public_keys()) == 1 assert not (await client.get_sync_status()) wallets = await client.get_wallets() assert len(wallets) == 1 balance = await client.get_wallet_balance(wallets[0]["id"]) assert balance["unconfirmed_wallet_balance"] == 0 try: await client.send_transaction(wallets[0]["id"], 100, addr) raise Exception("Should not create tx if no balance") except ValueError: pass # Delete all keys await client.delete_all_keys() assert len(await client.get_public_keys()) == 0 finally: # Checks that the RPC manages to stop the node client.close() client_2.close() client_node.close() await client.await_closed() await client_2.await_closed() await client_node.await_closed() await rpc_cleanup() await rpc_cleanup_2() await rpc_cleanup_node()
def chia_init( root_path: Path, *, should_check_keys: bool = True, fix_ssl_permissions: bool = False, testnet: bool = False, v1_db: bool = False, ): """ Standard first run initialization or migration steps. Handles config creation, generation of SSL certs, and setting target addresses (via check_keys). should_check_keys can be set to False to avoid blocking when accessing a passphrase protected Keychain. When launching the daemon from the GUI, we want the GUI to handle unlocking the keychain. """ if os.environ.get("CHIA_ROOT", None) is not None: print( f"warning, your CHIA_ROOT is set to {os.environ['CHIA_ROOT']}. " f"Please unset the environment variable and run chia init again\n" f"or manually migrate config.yaml") print(f"Chia directory {root_path}") if root_path.is_dir() and Path( root_path / "config" / "config.yaml").exists(): # This is reached if CHIA_ROOT is set, or if user has run chia init twice # before a new update. if testnet: configure(root_path, "", "", "", "", "", "", "", "", testnet="true", peer_connect_timeout="") if fix_ssl_permissions: fix_ssl(root_path) if should_check_keys: check_keys(root_path) print(f"{root_path} already exists, no migration action taken") return -1 create_default_chia_config(root_path) if testnet: configure(root_path, "", "", "", "", "", "", "", "", testnet="true", peer_connect_timeout="") create_all_ssl(root_path) if fix_ssl_permissions: fix_ssl(root_path) if should_check_keys: check_keys(root_path) config: Dict if v1_db: config = load_config(root_path, "config.yaml") db_pattern = config["database_path"] new_db_path = db_pattern.replace("_v2_", "_v1_") config["full_node"]["database_path"] = new_db_path save_config(root_path, "config.yaml", config) else: config = load_config(root_path, "config.yaml")["full_node"] db_path_replaced: str = config["database_path"].replace( "CHALLENGE", config["selected_network"]) db_path = path_from_root(root_path, db_path_replaced) mkdir(db_path.parent) import sqlite3 with sqlite3.connect(db_path) as connection: connection.execute("CREATE TABLE database_version(version int)") connection.execute("INSERT INTO database_version VALUES (2)") connection.commit() print("") print("To see your keys, run 'chia keys show --show-mnemonic-seed'") return 0
def configure( root_path: Path, set_farmer_peer: str, set_node_introducer: str, set_fullnode_port: str, set_log_level: str, enable_upnp: str, ): config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml") change_made = False if set_node_introducer: try: if set_node_introducer.index(":"): host, port = ( ":".join(set_node_introducer.split(":")[:-1]), set_node_introducer.split(":")[-1], ) config["full_node"]["introducer_peer"]["host"] = host config["full_node"]["introducer_peer"]["port"] = int(port) config["introducer"]["port"] = int(port) print("Node introducer updated") change_made = True except ValueError: print("Node introducer address must be in format [IP:Port]") if set_farmer_peer: try: if set_farmer_peer.index(":"): host, port = ( ":".join(set_farmer_peer.split(":")[:-1]), set_farmer_peer.split(":")[-1], ) config["full_node"]["farmer_peer"]["host"] = host config["full_node"]["farmer_peer"]["port"] = int(port) config["harvester"]["farmer_peer"]["host"] = host config["harvester"]["farmer_peer"]["port"] = int(port) print( "Farmer peer updated, make sure your harvester has the proper cert installed" ) change_made = True except ValueError: print("Farmer address must be in format [IP:Port]") if set_fullnode_port: config["full_node"]["port"] = int(set_fullnode_port) config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port) config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port) config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port) config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port) config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port) config["introducer"]["port"] = int(set_fullnode_port) print("Default full node port updated") change_made = True if set_log_level: levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"] if set_log_level in levels: config["logging"]["log_level"] = set_log_level print( f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log" ) change_made = True else: print(f"Logging level not updated. Use one of: {levels}") if enable_upnp is not None: config["full_node"]["enable_upnp"] = str2bool(enable_upnp) if str2bool(enable_upnp): print("uPnP enabled") else: print("uPnP disabled") change_made = True if change_made: print("Restart any running chia services for changes to take effect") save_config(root_path, "config.yaml", config) return 0
async def test1(self, simulation): test_rpc_port = uint16(21522) test_rpc_port_2 = uint16(21523) harvester, farmer_api = simulation def stop_node_cb(): pass def stop_node_cb_2(): pass config = bt.config hostname = config["self_hostname"] daemon_port = config["daemon_port"] farmer_rpc_api = FarmerRpcApi(farmer_api.farmer) harvester_rpc_api = HarvesterRpcApi(harvester) rpc_cleanup = await start_rpc_server( farmer_rpc_api, hostname, daemon_port, test_rpc_port, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup_2 = await start_rpc_server( harvester_rpc_api, hostname, daemon_port, test_rpc_port_2, stop_node_cb_2, bt.root_path, config, connect_to_daemon=False, ) try: client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config) client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config) async def have_connections(): return len(await client.get_connections()) > 0 await time_out_assert(15, have_connections, True) assert (await client.get_signage_point(std_hash(b"2"))) is None assert len(await client.get_signage_points()) == 0 async def have_signage_points(): return len(await client.get_signage_points()) > 0 sp = farmer_protocol.NewSignagePoint(std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)) await farmer_api.new_signage_point(sp) await time_out_assert(5, have_signage_points, True) assert (await client.get_signage_point(std_hash(b"2"))) is not None async def have_plots(): return len((await client_2.get_plots())["plots"]) > 0 await time_out_assert(5, have_plots, True) res = await client_2.get_plots() num_plots = len(res["plots"]) assert num_plots > 0 plot_dir = get_plot_dir() / "subdir" plot_dir.mkdir(parents=True, exist_ok=True) plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir" plot_dir_sub.mkdir(parents=True, exist_ok=True) plotter = DiskPlotter() filename = "test_farmer_harvester_rpc_plot.plot" filename_2 = "test_farmer_harvester_rpc_plot2.plot" plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename, 18, stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))), token_bytes(32), 128, 0, 2000, 0, False, ) # Making a plot with a puzzle hash encoded into it instead of pk plot_id_2 = token_bytes(32) plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) # Making the same plot, in a different dir. This should not be farmed plotter.create_plot_disk( str(plot_dir_sub), str(plot_dir_sub), str(plot_dir_sub), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) res_2 = await client_2.get_plots() assert len(res_2["plots"]) == num_plots # Test farmer get_plots farmer_res = await client.get_plots() assert len(list(farmer_res.values())[0]["plots"]) == num_plots assert len(await client_2.get_plot_directories()) == 1 await client_2.add_plot_directory(str(plot_dir)) await client_2.add_plot_directory(str(plot_dir_sub)) assert len(await client_2.get_plot_directories()) == 3 res_2 = await client_2.get_plots() assert len(res_2["plots"]) == num_plots + 2 await client_2.delete_plot(str(plot_dir / filename)) await client_2.delete_plot(str(plot_dir / filename_2)) await client_2.refresh_plots() res_3 = await client_2.get_plots() assert len(res_3["plots"]) == num_plots + 1 await client_2.remove_plot_directory(str(plot_dir)) assert len(await client_2.get_plot_directories()) == 2 targets_1 = await client.get_reward_targets(False) assert "have_pool_sk" not in targets_1 assert "have_farmer_sk" not in targets_1 targets_2 = await client.get_reward_targets(True) assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"] new_ph: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1()) new_ph_2: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1()) await client.set_reward_targets( encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch")) targets_3 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2 assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"] new_ph_3: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1()) await client.set_reward_targets( None, encode_puzzle_hash(new_ph_3, "xch")) targets_4 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3 assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"] root_path = farmer_api.farmer._root_path config = load_config(root_path, "config.yaml") assert config["farmer"][ "xch_target_address"] == encode_puzzle_hash(new_ph, "xch") assert config["pool"]["xch_target_address"] == encode_puzzle_hash( new_ph_3, "xch") new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch") added_char = new_ph_3_encoded + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, added_char) replaced_char = new_ph_3_encoded[0:-1] + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, replaced_char) assert len((await client.get_pool_state())["pool_state"]) == 0 all_sks = farmer_api.farmer.keychain.get_all_private_keys() auth_sk = master_sk_to_pooling_authentication_sk( all_sks[0][0], 2, 1) pool_list = [{ "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", "authentication_public_key": bytes(auth_sk.get_g1()).hex(), "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5", "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", "pool_url": "localhost", "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17", "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", }] config["pool"]["pool_list"] = pool_list save_config(root_path, "config.yaml", config) await farmer_api.farmer.update_pool_state() pool_state = (await client.get_pool_state())["pool_state"] assert len(pool_state) == 1 assert ( pool_state[0]["pool_config"]["payout_instructions"] == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8" ) await client.set_payout_instructions( hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy") pool_state = (await client.get_pool_state())["pool_state"] assert pool_state[0]["pool_config"][ "payout_instructions"] == "1234vy" finally: # Checks that the RPC manages to stop the node client.close() client_2.close() await client.await_closed() await client_2.await_closed() await rpc_cleanup() await rpc_cleanup_2()
def configure( root_path: Path, testnet: str, crawler_db_path: str, minimum_version_count: int, domain_name: str, nameserver: str, ): # Run the parent config, in case anything there (testnet) needs to be run, THEN load the config for local changes chia_configure.configure(root_path, "", "", "", "", "", "", "", "", testnet, "") config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml") change_made = False if testnet is not None: if testnet == "true" or testnet == "t": print("Updating Chia Seeder to testnet settings") port = 58444 network = "testnet7" bootstrap = ["testnet-node.chia.net"] config["seeder"]["port"] = port config["seeder"]["other_peers_port"] = port config["seeder"]["selected_network"] = network config["seeder"]["bootstrap_peers"] = bootstrap change_made = True elif testnet == "false" or testnet == "f": print("Updating Chia Seeder to mainnet settings") port = 8444 network = "mainnet" bootstrap = ["node.chia.net"] config["seeder"]["port"] = port config["seeder"]["other_peers_port"] = port config["seeder"]["selected_network"] = network config["seeder"]["bootstrap_peers"] = bootstrap change_made = True else: print("Please choose True or False") if crawler_db_path is not None: config["seeder"]["crawler_db_path"] = crawler_db_path change_made = True if minimum_version_count is not None: config["seeder"]["minimum_version_count"] = minimum_version_count change_made = True if domain_name is not None: config["seeder"]["domain_name"] = domain_name change_made = True if nameserver is not None: config["seeder"]["nameserver"] = nameserver change_made = True if change_made: print( "Restart any running Chia Seeder services for changes to take effect" ) save_config(root_path, "config.yaml", config) return 0
async def test1(self, simulation): test_rpc_port = uint16(21522) test_rpc_port_2 = uint16(21523) harvester_service, farmer_service = simulation harvester = harvester_service._node farmer_api = farmer_service._api def stop_node_cb(): pass def stop_node_cb_2(): pass config = bt.config hostname = config["self_hostname"] daemon_port = config["daemon_port"] farmer_rpc_api = FarmerRpcApi(farmer_api.farmer) harvester_rpc_api = HarvesterRpcApi(harvester) rpc_cleanup = await start_rpc_server( farmer_rpc_api, hostname, daemon_port, test_rpc_port, stop_node_cb, bt.root_path, config, connect_to_daemon=False, ) rpc_cleanup_2 = await start_rpc_server( harvester_rpc_api, hostname, daemon_port, test_rpc_port_2, stop_node_cb_2, bt.root_path, config, connect_to_daemon=False, ) try: client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config) client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config) await validate_get_routes(client, farmer_rpc_api) await validate_get_routes(client_2, harvester_rpc_api) async def have_connections(): return len(await client.get_connections()) > 0 await time_out_assert(15, have_connections, True) assert (await client.get_signage_point(std_hash(b"2"))) is None assert len(await client.get_signage_points()) == 0 async def have_signage_points(): return len(await client.get_signage_points()) > 0 sp = farmer_protocol.NewSignagePoint(std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)) await farmer_api.new_signage_point(sp) await time_out_assert(5, have_signage_points, True) assert (await client.get_signage_point(std_hash(b"2"))) is not None async def have_plots(): return len((await client_2.get_plots())["plots"]) > 0 await time_out_assert(5, have_plots, True) res = await client_2.get_plots() num_plots = len(res["plots"]) assert num_plots > 0 plot_dir = get_plot_dir() / "subdir" plot_dir.mkdir(parents=True, exist_ok=True) plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir" plot_dir_sub.mkdir(parents=True, exist_ok=True) plotter = DiskPlotter() filename = "test_farmer_harvester_rpc_plot.plot" filename_2 = "test_farmer_harvester_rpc_plot2.plot" plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename, 18, stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))), token_bytes(32), 128, 0, 2000, 0, False, ) # Making a plot with a puzzle hash encoded into it instead of pk plot_id_2 = token_bytes(32) plotter.create_plot_disk( str(plot_dir), str(plot_dir), str(plot_dir), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) # Making the same plot, in a different dir. This should not be farmed plotter.create_plot_disk( str(plot_dir_sub), str(plot_dir_sub), str(plot_dir_sub), filename_2, 18, stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))), plot_id_2, 128, 0, 2000, 0, False, ) res_2 = await client_2.get_plots() assert len(res_2["plots"]) == num_plots # Reset cache and force updates cache every second to make sure the farmer gets the most recent data update_interval_before = farmer_api.farmer.update_harvester_cache_interval farmer_api.farmer.update_harvester_cache_interval = 1 farmer_api.farmer.harvester_cache = {} # Test farmer get_harvesters async def test_get_harvesters(): harvester.plot_manager.trigger_refresh() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) farmer_res = await client.get_harvesters() if len(list(farmer_res["harvesters"])) != 1: log.error( f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}" ) return False if len(list( farmer_res["harvesters"][0]["plots"])) != num_plots: log.error( f"test_get_harvesters: invalid plots {list(farmer_res['harvesters'])}" ) return False return True await time_out_assert_custom_interval(30, 1, test_get_harvesters) # Reset cache and reset update interval to avoid hitting the rate limit farmer_api.farmer.update_harvester_cache_interval = update_interval_before farmer_api.farmer.harvester_cache = {} targets_1 = await client.get_reward_targets(False) assert "have_pool_sk" not in targets_1 assert "have_farmer_sk" not in targets_1 targets_2 = await client.get_reward_targets(True) assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"] new_ph: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1()) new_ph_2: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1()) await client.set_reward_targets( encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch")) targets_3 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2 assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"] new_ph_3: bytes32 = create_puzzlehash_for_pk( master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1()) await client.set_reward_targets( None, encode_puzzle_hash(new_ph_3, "xch")) targets_4 = await client.get_reward_targets(True) assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3 assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"] root_path = farmer_api.farmer._root_path config = load_config(root_path, "config.yaml") assert config["farmer"][ "xch_target_address"] == encode_puzzle_hash(new_ph, "xch") assert config["pool"]["xch_target_address"] == encode_puzzle_hash( new_ph_3, "xch") new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch") added_char = new_ph_3_encoded + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, added_char) replaced_char = new_ph_3_encoded[0:-1] + "a" with pytest.raises(ValueError): await client.set_reward_targets(None, replaced_char) assert len((await client.get_pool_state())["pool_state"]) == 0 all_sks = farmer_api.farmer.local_keychain.get_all_private_keys() auth_sk = master_sk_to_pooling_authentication_sk( all_sks[0][0], 2, 1) pool_list = [{ "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", "authentication_public_key": bytes(auth_sk.get_g1()).hex(), "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5", # noqa "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", "pool_url": "localhost", "p2_singleton_puzzle_hash": "16e4bac26558d315cded63d4c5860e98deb447cc59146dd4de06ce7394b14f17", "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", }] config["pool"]["pool_list"] = pool_list save_config(root_path, "config.yaml", config) await farmer_api.farmer.update_pool_state() pool_state = (await client.get_pool_state())["pool_state"] assert len(pool_state) == 1 assert ( pool_state[0]["pool_config"]["payout_instructions"] == "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8" ) await client.set_payout_instructions( hexstr_to_bytes(pool_state[0]["pool_config"]["launcher_id"]), "1234vy") await farmer_api.farmer.update_pool_state() pool_state = (await client.get_pool_state())["pool_state"] assert pool_state[0]["pool_config"][ "payout_instructions"] == "1234vy" now = time.time() # Big arbitrary numbers used to be unlikely to accidentally collide. before_24h = (now - (25 * 60 * 60), 29984713) since_24h = (now - (23 * 60 * 60), 93049817) for p2_singleton_puzzle_hash, pool_dict in farmer_api.farmer.pool_state.items( ): for key in ["points_found_24h", "points_acknowledged_24h"]: pool_dict[key].insert(0, since_24h) pool_dict[key].insert(0, before_24h) sp = farmer_protocol.NewSignagePoint(std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)) await farmer_api.new_signage_point(sp) client_pool_state = await client.get_pool_state() for pool_dict in client_pool_state["pool_state"]: for key in ["points_found_24h", "points_acknowledged_24h"]: assert pool_dict[key][0] == list(since_24h) finally: # Checks that the RPC manages to stop the node client.close() client_2.close() await client.await_closed() await client_2.await_closed() await rpc_cleanup() await rpc_cleanup_2()