def test_pickling_error(): """Test communicating results that need to be pickled fails without correct setting""" client, server = make_queue_pairs('localhost') # Attempt to push a non-JSON-able object to the queue with pytest.raises(TypeError): client.send_inputs(Test())
def test_kwargs(): """Test sending function keyword arguments""" client, server = make_queue_pairs('localhost') client.send_inputs(1, input_kwargs={'hello': 'world'}) _, task = server.get_task() assert task.args == (1, ) assert task.kwargs == {'hello': 'world'}
def test_filtering(): """Test filtering tasks by topic""" client, server = make_queue_pairs('localhost', clean_slate=True, topics=['priority']) # Simulate a result being sent through the method server client.send_inputs("hello", topic="priority") topic, task = server.get_task() assert topic == "priority" task.set_result(1) server.send_result(task, topic) # Make sure it does not appear if we pull only from "default" output = client.get_result(timeout=1, topic='default') assert output is None # Make sure it works if we specify the topic output = client.get_result(topic='priority') assert output is not None # Make sure it works if we do not specify anything server.send_result(task, topic) output = client.get_result() assert output is not None
def test_methods(): """Test sending a method name""" client, server = make_queue_pairs('localhost') # Push inputs to method server and make sure it is received client.send_inputs(1, method='test') _, task = server.get_task() assert task.args == (1, ) assert task.method == 'test' assert task.kwargs == {}
def test_pickling(): """Test communicating results that need to be pickled fails without correct setting""" client, server = make_queue_pairs('localhost', use_pickle=True) # Attempt to push a non-JSONable object to the queue client.send_inputs(Test()) _, task = server.get_task() assert task.args[0].x is None # Set the value # Test sending the value back x = Test() x.x = 1 task.set_result(x) server.send_result(task) result = client.get_result() assert result.args[0].x is None assert result.value.x == 1
handlers=[ logging.FileHandler(os.path.join(out_dir, 'runtime.log')), logging.StreamHandler(sys.stdout) ]) # Write the configuration config.run_dir = os.path.join(out_dir, 'run-info') parsl.load(config) # Save Parsl configuration with open(os.path.join(out_dir, 'parsl_config.txt'), 'w') as fp: print(str(config), file=fp) # Connect to the redis server client_queues, server_queues = make_queue_pairs(args.redishost, args.redisport, use_pickle=True) # Apply wrappers to function to affix static settings my_generate_molecules = partial(generate_molecules, episodes=args.rl_episodes) my_generate_molecules = update_wrapper(my_generate_molecules, generate_molecules) # Create the method server and task generator doer = ParslMethodServer([ my_generate_molecules, compute_score, compute_atomization_energy, compute_reference_energy ], server_queues, default_executors=['htex'])
xtb_per_node=args.qc_parallelism, ml_tasks_per_node=1) else: # ML nodes: N for updating models, 1 for MolDQN, 1 for inference runs config = theta_nwchem_config(os.path.join(out_dir, 'run-info'), nodes_per_nwchem=args.qc_parallelism, ml_prefetch=args.ml_prefetch) # Save Parsl configuration with open(os.path.join(out_dir, 'parsl_config.txt'), 'w') as fp: print(str(config), file=fp) # Connect to the redis server client_queues, server_queues = make_queue_pairs( args.redishost, args.redisport, serialization_method="pickle", topics=['simulate', 'screen'], keep_inputs=False) # Apply wrappers to functions to affix static settings # Update wrapper changes the __name__ field, which is used by the Method Server # TODO (wardlt): Have users set the method name explicitly my_compute_atomization = partial(compute_atomization_energy, compute_hessian=args.qc_spec != "xtb", qc_config=qc_spec, reference_energies=ref_energies, compute_config=compute_config, code=code) my_compute_atomization = update_wrapper(my_compute_atomization, compute_atomization_energy)
args.ps_globus_config) ps.store.init_store(ps.store.STORES.GLOBUS, name='globus', endpoints=endpoints) ps_names = { 'simulate': args.simulate_ps_backend, 'infer': args.infer_ps_backend, 'train': args.train_ps_backend } # Connect to the redis server client_queues, server_queues = make_queue_pairs( args.redishost, name=start_time.strftime("%d%b%y-%H%M%S"), port=args.redisport, topics=['simulate', 'infer', 'train'], serialization_method='pickle', keep_inputs=True, proxystore_name=ps_names, proxystore_threshold=args.ps_threshold) # Apply wrappers to functions to affix static settings # Update wrapper changes the __name__ field, which is used by the Method Server my_evaluate_mpnn = partial(evaluate_mpnn, batch_size=128, cache=True) my_evaluate_mpnn = update_wrapper(my_evaluate_mpnn, evaluate_mpnn) my_update_mpnn = partial(update_mpnn, num_epochs=args.num_epochs, learning_rate=args.learning_rate, bootstrap=True, timeout=2700)
handlers=[ logging.FileHandler(os.path.join(out_dir, 'runtime.log')), logging.StreamHandler(sys.stdout) ]) # Write the configuration config.run_dir = os.path.join(out_dir, 'run-info') parsl.load(config) # Save Parsl configuration with open(os.path.join(out_dir, 'parsl_config.txt'), 'w') as fp: print(str(config), file=fp) # Connect to the redis server client_queues, server_queues = make_queue_pairs(args.redishost, args.redisport, use_pickle=True, topics=['simulator', 'ML']) # Apply wrappers to functions to affix static settings # Update wrapper changes the __name__ field, which is used by the Method Server # TODO (wardlt): Have users set the method name explicitly my_generate_molecules = partial(generate_molecules, episodes=args.rl_episodes) my_generate_molecules = update_wrapper(my_generate_molecules, generate_molecules) my_compute_atomization = partial(compute_atomization_energy, qc_config=qc_spec, reference_energies=ref_energies, compute_config=compute_config, code=code)
# Make Parsl engine config, n_slots = parsl_config(args.compute) # Configure the file if args.proxy_store == 'file': ps_file_dir = out_path / 'file-store' ps_file_dir.mkdir(exist_ok=True) store = ps.store.init_store(ps.store.STORES.FILE, name='file', store_dir=str(ps_file_dir)) elif args.proxy_store == 'redis': store = ps.store.init_store(ps.store.STORES.REDIS, name='redis', hostname=args.redishost, port=args.redisport) else: raise ValueError('ProxyStore config not recognized: {}') # Make the task queues and task server client_q, server_q = make_queue_pairs(args.redishost, args.redisport, name='jscreen', keep_inputs=False, serialization_method='pickle', proxystore_threshold=1000, proxystore_name=store.name) task_server = ParslTaskServer([screen_fun], server_q, config) # Make the thinker thinker = ScreenEngine(client_q, store, search_paths, out_path, n_slots, args.molecules_per_chunk) # Run the program try: task_server.start() thinker.run() finally: client_q.send_kill_signal() task_server.join() store.cleanup()
def server_and_queue() -> Tuple[ParslMethodServer, ClientQueues]: client, server = make_queue_pairs('localhost', clean_slate=True) return ParslMethodServer([f], server), client