async def setup_two_nodes(consensus_constants: ConsensusConstants): """ Setup and teardown of two full nodes, with blockchains and separate DBs. """ with TempKeyring() as keychain1, TempKeyring() as keychain2: node_iters = [ setup_full_node( consensus_constants, "blockchain_test.db", 21234, await create_block_tools_async(constants=test_constants, keychain=keychain1), simulator=False, ), setup_full_node( consensus_constants, "blockchain_test_2.db", 21235, await create_block_tools_async(constants=test_constants, keychain=keychain2), simulator=False, ), ] fn1 = await node_iters[0].__anext__() fn2 = await node_iters[1].__anext__() yield fn1, fn2, fn1.full_node.server, fn2.full_node.server await _teardown_nodes(node_iters)
async def setup_full_system(consensus_constants: ConsensusConstants, b_tools=None, b_tools_1=None, connect_to_daemon=False): with TempKeyring() as keychain1, TempKeyring() as keychain2: if b_tools is None: b_tools = await create_block_tools_async(constants=test_constants, keychain=keychain1) if b_tools_1 is None: b_tools_1 = await create_block_tools_async( constants=test_constants, keychain=keychain2) node_iters = [ setup_introducer(21233), setup_harvester(21234, 21235, consensus_constants, b_tools), setup_farmer(21235, consensus_constants, b_tools, uint16(21237)), setup_vdf_clients(8000), setup_timelord(21236, 21237, False, consensus_constants, b_tools), setup_full_node(consensus_constants, "blockchain_test.db", 21237, b_tools, 21233, False, 10, True, connect_to_daemon), setup_full_node(consensus_constants, "blockchain_test_2.db", 21238, b_tools_1, 21233, False, 10, True, connect_to_daemon), setup_vdf_client(7999), setup_timelord(21239, 21238, True, consensus_constants, b_tools_1), ] introducer, introducer_server = await node_iters[0].__anext__() harvester, harvester_server = await node_iters[1].__anext__() farmer, farmer_server = await node_iters[2].__anext__() async def num_connections(): count = len(harvester.server.all_connections.items()) return count await time_out_assert_custom_interval(10, 3, num_connections, 1) vdf_clients = await node_iters[3].__anext__() timelord, timelord_server = await node_iters[4].__anext__() node_api_1 = await node_iters[5].__anext__() node_api_2 = await node_iters[6].__anext__() vdf_sanitizer = await node_iters[7].__anext__() sanitizer, sanitizer_server = await node_iters[8].__anext__() yield ( node_api_1, node_api_2, harvester, farmer, introducer, timelord, vdf_clients, vdf_sanitizer, sanitizer, node_api_1.full_node.server, ) await _teardown_nodes(node_iters)
async def setup_simulators_and_wallets( simulator_count: int, wallet_count: int, dic: Dict, starting_height=None, key_seed=None, starting_port=50000, initial_num_public_keys=5, ): with TempKeyring() as keychain1, TempKeyring() as keychain2: simulators: List[FullNodeAPI] = [] wallets = [] node_iters = [] consensus_constants = constants_for_dic(dic) for index in range(0, simulator_count): port = starting_port + index db_name = f"blockchain_test_{port}.db" bt_tools = await create_block_tools_async( consensus_constants, const_dict=dic, keychain=keychain1 ) # block tools modifies constants sim = setup_full_node( bt_tools.constants, db_name, port, bt_tools, simulator=True, ) simulators.append(await sim.__anext__()) node_iters.append(sim) for index in range(0, wallet_count): if key_seed is None: seed = std_hash(uint32(index)) else: seed = key_seed port = starting_port + 5000 + index bt_tools = await create_block_tools_async( consensus_constants, const_dict=dic, keychain=keychain2 ) # block tools modifies constants wlt = setup_wallet_node( port, bt_tools.constants, bt_tools, None, key_seed=seed, starting_height=starting_height, initial_num_public_keys=initial_num_public_keys, ) wallets.append(await wlt.__anext__()) node_iters.append(wlt) yield simulators, wallets await _teardown_nodes(node_iters)
def main() -> None: # Use a temp keychain which will be deleted when it exits scope with TempKeyring() as keychain: # If launched with -D, we should connect to the keychain via the daemon instead # of using a local keychain if "-D" in sys.argv: keychain = None sys.argv.remove( "-D" ) # Remove -D to avoid conflicting with load_config_cli's argparse usage config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME) config["database_path"] = config["simulator_database_path"] config["peer_db_path"] = config["simulator_peer_db_path"] config["introducer_peer"]["host"] = "127.0.0.1" config["introducer_peer"]["port"] = 58555 config["selected_network"] = "testnet0" config["simulation"] = True kwargs = service_kwargs_for_full_node_simulator( DEFAULT_ROOT_PATH, config, create_block_tools(test_constants, root_path=DEFAULT_ROOT_PATH, keychain=keychain), ) return run_service(**kwargs)
async def setup_n_nodes(consensus_constants: ConsensusConstants, n: int): """ Setup and teardown of n full nodes, with blockchains and separate DBs. """ port_start = 21244 node_iters = [] keyrings_to_cleanup = [] for i in range(n): keyring = TempKeyring() keyrings_to_cleanup.append(keyring) node_iters.append( setup_full_node( consensus_constants, f"blockchain_test_{i}.db", port_start + i, await create_block_tools_async(constants=test_constants, keychain=keyring.get_keychain()), simulator=False, ) ) nodes = [] for ni in node_iters: nodes.append(await ni.__anext__()) yield nodes await _teardown_nodes(node_iters) for keyring in keyrings_to_cleanup: keyring.cleanup()
async def setup_node_and_wallet(consensus_constants: ConsensusConstants, starting_height=None, key_seed=None): with TempKeyring() as keychain: btools = await create_block_tools_async(constants=test_constants, keychain=keychain) node_iters = [ setup_full_node(consensus_constants, "blockchain_test.db", 21234, btools, simulator=False), setup_wallet_node(21235, consensus_constants, btools, None, starting_height=starting_height, key_seed=key_seed), ] full_node_api = await node_iters[0].__anext__() wallet, s2 = await node_iters[1].__anext__() yield full_node_api, wallet, full_node_api.full_node.server, s2 await _teardown_nodes(node_iters)
async def extra_node(self): with TempKeyring() as keychain: b_tools = await create_block_tools_async( constants=test_constants_modified, keychain=keychain) async for _ in setup_full_node(test_constants_modified, "blockchain_test_3.db", 21240, b_tools): yield _
def dummy_set_passphrase(service, user, passphrase, keyring_path, index, num_workers): with TempKeyring(existing_keyring_path=keyring_path, delete_on_cleanup=False): if platform == "linux" or platform == "win32" or platform == "cygwin": # FileKeyring's setup_keyring_file_watcher needs to be called explicitly here, # otherwise file events won't be detected in the child process KeyringWrapper.get_shared_instance( ).keyring.setup_keyring_file_watcher() # Write out a file indicating this process is ready to begin ready_file_path: Path = Path( keyring_path).parent / "ready" / f"{index}.ready" with open(ready_file_path, "w") as f: f.write(f"{os.getpid()}\n") # Wait up to 30 seconds for all processes to indicate readiness start_file_path: Path = Path(ready_file_path.parent) / "start" remaining_attempts = 120 while remaining_attempts > 0: if start_file_path.exists(): break else: sleep(0.25) remaining_attempts -= 1 assert remaining_attempts >= 0 KeyringWrapper.get_shared_instance().set_passphrase( service=service, user=user, passphrase=passphrase) found_passphrase = KeyringWrapper.get_shared_instance().get_passphrase( service, user) if found_passphrase != passphrase: log.error( f"[pid:{os.getpid()}] error: didn't get expected passphrase: " f"get_passphrase: {found_passphrase}" # lgtm [py/clear-text-logging-sensitive-data] f", expected: {passphrase}" # lgtm [py/clear-text-logging-sensitive-data] ) # Write out a file indicating this process has completed its work finished_file_path: Path = Path( keyring_path).parent / "finished" / f"{index}.finished" with open(finished_file_path, "w") as f: f.write(f"{os.getpid()}\n") assert found_passphrase == passphrase
def dummy_set_passphrase(service, user, passphrase, keyring_path): with TempKeyring(existing_keyring_path=keyring_path, delete_on_cleanup=False): if platform == "linux": # FileKeyring's setup_keyring_file_watcher needs to be called explicitly here, # otherwise file events won't be detected in the child process KeyringWrapper.get_shared_instance().keyring.setup_keyring_file_watcher() KeyringWrapper.get_shared_instance().set_passphrase(service=service, user=user, passphrase=passphrase) # Wait a short while between writing and reading. Without proper locking, this helps ensure # the concurrent processes get into a bad state sleep(random.random() * 10 % 3) found_passphrase = KeyringWrapper.get_shared_instance().get_passphrase(service, user) if found_passphrase != passphrase: log.error( f"[pid:{os.getpid()}] error: didn't get expected passphrase: " f"get_passphrase: {found_passphrase}" # lgtm [py/clear-text-logging-sensitive-data] f", expected: {passphrase}" # lgtm [py/clear-text-logging-sensitive-data] ) assert found_passphrase == passphrase
if __name__ == "__main__": from tests.block_tools import create_block_tools, test_constants from tests.util.keyring import TempKeyring from chia.util.default_root import DEFAULT_ROOT_PATH with TempKeyring() as keychain: # TODO: mariano: fix this with new consensus bt = create_block_tools(root_path=DEFAULT_ROOT_PATH, keychain=keychain) # TODO: address hint error and remove ignore # error: Argument 2 to "create_genesis_block" of "BlockTools" has incompatible type "bytes"; expected # "bytes32" [arg-type] new_genesis_block = bt.create_genesis_block( test_constants, b"0") # type: ignore[arg-type] print(bytes(new_genesis_block))
from chia.util.ints import uint8, uint32, uint64, uint128 from tests.blockchain.blockchain_test_utils import ( _validate_and_add_block, _validate_and_add_block_multi_result, _validate_and_add_block_no_error, ) from tests.setup_nodes import test_constants as test_constants_original from tests.util.blockchain import create_blockchain from tests.util.keyring import TempKeyring def cleanup_keyring(keyring: TempKeyring): keyring.cleanup() temp_keyring = TempKeyring() keychain = temp_keyring.get_keychain() atexit.register(cleanup_keyring, temp_keyring) # Attempt to cleanup the temp keychain test_constants = test_constants_original.replace( **{ "DISCRIMINANT_SIZE_BITS": 32, "SUB_SLOT_ITERS_STARTING": 2**12 }) bt = create_block_tools(constants=test_constants, keychain=keychain) @pytest.fixture(scope="session") def event_loop(): loop = asyncio.get_event_loop() yield loop
async def setup_wallet_node( port, consensus_constants: ConsensusConstants, local_bt, full_node_port=None, introducer_port=None, key_seed=None, starting_height=None, initial_num_public_keys=5, ): with TempKeyring() as keychain: config = bt.config["wallet"] config["port"] = port config["rpc_port"] = port + 1000 if starting_height is not None: config["starting_height"] = starting_height config["initial_num_public_keys"] = initial_num_public_keys entropy = token_bytes(32) if key_seed is None: key_seed = entropy keychain.add_private_key(bytes_to_mnemonic(key_seed), "") first_pk = keychain.get_first_public_key() assert first_pk is not None db_path_key_suffix = str(first_pk.get_fingerprint()) db_name = f"test-wallet-db-{port}-KEY.sqlite" db_path_replaced: str = db_name.replace("KEY", db_path_key_suffix) db_path = bt.root_path / db_path_replaced if db_path.exists(): db_path.unlink() config["database_path"] = str(db_name) config["testing"] = True config["introducer_peer"]["host"] = self_hostname if introducer_port is not None: config["introducer_peer"]["port"] = introducer_port config["peer_connect_interval"] = 10 else: config["introducer_peer"] = None if full_node_port is not None: config["full_node_peer"] = {} config["full_node_peer"]["host"] = self_hostname config["full_node_peer"]["port"] = full_node_port else: del config["full_node_peer"] kwargs = service_kwargs_for_wallet(local_bt.root_path, config, consensus_constants, keychain) kwargs.update( parse_cli_args=False, connect_to_daemon=False, ) service = Service(**kwargs) await service.start(new_wallet=True) yield service._node, service._node.server service.stop() await service.wait_closed() if db_path.exists(): db_path.unlink() keychain.delete_all_keys()
async def get_temp_keyring(self): with TempKeyring() as keychain: yield keychain
from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval from tests.util.keyring import TempKeyring import asyncio import atexit import json import aiohttp import pytest def cleanup_keyring(keyring: TempKeyring): keyring.cleanup() temp_keyring1 = TempKeyring() temp_keyring2 = TempKeyring() atexit.register(cleanup_keyring, temp_keyring1) atexit.register(cleanup_keyring, temp_keyring2) b_tools = create_block_tools(constants=test_constants_modified, keychain=temp_keyring1.get_keychain()) b_tools_1 = create_block_tools(constants=test_constants_modified, keychain=temp_keyring2.get_keychain()) new_config = b_tools._config new_config["daemon_port"] = 55401 b_tools.change_config(new_config) class TestDaemon: @pytest.fixture(scope="function") async def get_daemon(self): async for _ in setup_daemon(btools=b_tools): yield _
def empty_keyring(self): with TempKeyring(user="******", service="chia-user-chia-1.8") as keychain: yield keychain KeyringWrapper.cleanup_shared_instance()