def start_daemon(): """ Start a daemon runner for the currently configured profile """ daemon_client = DaemonClient() configure_logging(daemon=True, daemon_log_file=daemon_client.daemon_log_file) runner = DaemonRunner(rmq_config=get_rmq_config(), rmq_submit=False) def shutdown_daemon(num, frame): logger.info('Received signal to shut down the daemon runner') runner.close() signal.signal(signal.SIGINT, shutdown_daemon) signal.signal(signal.SIGTERM, shutdown_daemon) logger.info('Starting a daemon runner') set_runner(runner) tick_legacy_workflows(runner) try: runner.start() except SystemError as exception: logger.info('Received a SystemError: {}'.format(exception)) runner.close() logger.info('Daemon runner stopped')
def create_test_runner(with_communicator=False): prefix = 'aiidatest-{}'.format(uuid.uuid4()) if with_communicator: rmq_config = work.rmq.get_rmq_config(prefix) else: rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=False) work.set_runner(runner) return runner
def test_base(fresh_aiida_env, vasp_params, potentials, vasp_kpoints, vasp_structure, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.base') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() # ~ os_env = os.environ.copy() # ~ sp.call(['verdi', 'daemon', 'start'], env=os_env) # ~ print sp.check_output(['verdi', 'daemon', 'status'], env=os_env) # ~ print sp.check_output(['which', 'verdi'], env=os_env) kpoints, _ = vasp_kpoints inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = vasp_structure inputs.incar = vasp_params inputs.kpoints = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node( 'parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.settings = get_data_node('parameter', dict={'parser_settings': {'add_structure': False, 'should_parse_CONTCAR': False}}) # ~ workchain = run(base_wf_proc, **inputs) results = work.run(base_wf_proc, **inputs) # ~ workchain = load_node(running.pk) # ~ timeout = 5 # ~ waiting_for = 0 # ~ while not workchain.is_terminated and waiting_for < timeout: # ~ time.sleep(1) # ~ waiting_for += 1 assert 'retrieved' in results assert 'output_parameters' in results assert 'remote_folder' in results
def test_relax_wf(fresh_aiida_env, vasp_params, potentials, mock_vasp): """Test submitting only, not correctness, with mocked vasp code.""" from aiida.orm import WorkflowFactory, Code from aiida import work rmq_config = None runner = work.Runner(poll_interval=0., rmq_config=rmq_config, enable_persistence=True) work.set_runner(runner) base_wf_proc = WorkflowFactory('vasp.relax') mock_vasp.store() print(mock_vasp.get_remote_exec_path()) comp = mock_vasp.get_computer() create_authinfo(computer=comp).store() structure = PoscarParser( file_path=data_path('test_relax_wf', 'inp', 'POSCAR')).get_quantity( 'poscar-structure', {})['poscar-structure'] kpoints = KpParser( file_path=data_path('test_relax_wf', 'inp', 'KPOINTS')).get_quantity( 'kpoints-kpoints', {})['kpoints-kpoints'] incar_add = IncarParser( file_path=data_path('test_relax_wf', 'inp', 'INCAR')).get_quantity( 'incar', {})['incar'].get_dict() incar_add = { k: v for k, v in incar_add.items() if k not in ['isif', 'ibrion'] } incar_add['system'] = 'test-case:test_relax_wf' restart_clean_workdir = get_data_node('bool', False) restart_clean_workdir.store() inputs = AttributeDict() inputs.code = Code.get_from_string('mock-vasp@localhost') inputs.structure = structure inputs.incar_add = get_data_node('parameter', dict=incar_add) inputs.kpoints = AttributeDict() inputs.kpoints.mesh = kpoints inputs.potcar_family = get_data_node('str', POTCAR_FAMILY_NAME) inputs.potcar_mapping = get_data_node('parameter', dict=POTCAR_MAP) inputs.options = get_data_node('parameter', dict={ 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 } }) inputs.max_iterations = get_data_node('int', 1) inputs.convergence = AttributeDict() inputs.convergence.shape = AttributeDict() inputs.convergence.on = get_data_node('bool', True) inputs.convergence.positions = get_data_node('float', 0.1) inputs.restart = AttributeDict() inputs.restart.clean_workdir = restart_clean_workdir inputs.relax = AttributeDict() results = work.run(base_wf_proc, **inputs) assert 'relaxed_structure' in results
def tearDown(self): super(TestImmutableInputWorkchain, self).tearDown() work.set_runner(None) self.assertIsNone(Process.current())
def tearDown(self): super(TestWorkChainAbortChildren, self).tearDown() work.set_runner(None) self.assertIsNone(Process.current())
def tearDown(self): super(TestWorkchainWithOldWorkflows, self).tearDown() work.set_runner(None) self.runner.close() self.runner = None self.assertIsNone(Process.current())
def tearDown(self): super(TestWorkChainExpose, self).tearDown() work.set_runner(None) self.runner.close() self.runner = None self.assertIsNone(Process.current())
def tearDown(self): super(TestSerializeWorkChain, self).tearDown() work.set_runner(None) self.assertIsNone(Process.current())
def tearDown(self): super(TestJobProcess, self).tearDown() self.assertIsNone(Process.current()) self.runner.close() self.runner = None work.set_runner(None)
def setUp(self): """ """ from aiida import work from aiida.orm.code import Code from aiida.orm.nodes.parameter import Dict from aiida.orm.nodes.structure import StructureData from aiida.orm.nodes.remote import RemoteData from ase.spacegroup import crystal from aiida_quantumespresso.calculations.pw import PwCalculation from aiida_yambo.calculations.gw import YamboCalculation from aiida.common.links import LinkType from aiida.orm.computer import Computer as AiidaOrmComputer from aiida.common.datastructures import calc_states from aiida.plugins.utils import DataFactory runner = work.Runner(poll_interval=0., rmq_config=None, enable_persistence=None) work.set_runner(runner) self.computer = AiidaOrmComputer(name="testcase") # conf_attrs hostname, description, enabled_state, transport_type, scheduler_type, workdir # mpirun_command , default_mpiprocs_per_machine, self.computer._set_hostname_string("localhost") self.computer._set_enabled_state_string('True') self.computer._set_transport_type_string("local") self.computer._set_scheduler_type_string("direct") self.computer._set_workdir_string("/tmp/testcase/{username}/base") self.computer.store() create_authinfo(computer=self.computer).store() self.code_yambo = Code() self.code_yambo.label = "yambo" os_env = os.environ.copy() yambo_path = subprocess.check_output(['which', 'mock_yambo'], env=os_env).strip() self.code_yambo.set_remote_computer_exec((self.computer, yambo_path)) self.code_yambo.set_input_plugin_name('yambo.yambo') self.code_p2y = Code() self.code_p2y.label = "p2y" p2y_path = subprocess.check_output(['which', 'mock_p2y'], env=os_env).strip() self.code_p2y.set_remote_computer_exec((self.computer, p2y_path)) self.code_p2y.set_input_plugin_name('yambo.yambo') self.code_yambo.store() self.code_p2y.store() self.calc_pw = PwCalculation() self.calc_pw.set_computer(self.computer) self.calc_pw.set_resources({ "num_machines": 1, "num_mpiprocs_per_machine": 16, 'default_mpiprocs_per_machine': 16 }) StructureData = DataFactory('structure') cell = [[15.8753100000, 0.0000000000, 0.0000000000], [0.0000000000, 15.8753100000, 0.0000000000], [0.0000000000, 0.0000000000, 2.4696584760]] s = StructureData(cell=cell) self.calc_pw.use_structure(s) print((self.calc_pw.store_all(), " pw calc")) pw_remote_folder = RemoteData(computer=self.computer, remote_path="/tmp/testcase/work/calcPW") print((pw_remote_folder.store(), "pw remote data")) self.calc_pw._set_state(calc_states.PARSING) pw_remote_folder.add_link_from(self.calc_pw, label='remote_folder', link_type=LinkType.CREATE) outputs = Dict( dict={ "lsda": False, "number_of_bands": 80, "number_of_electrons": 8.0, "number_of_k_points": 147, "non_colinear_calculation": False }) outputs.store() outputs.add_link_from(self.calc_pw, label='output_parameters', link_type=LinkType.CREATE) self.calc = YamboCalculation() self.calc.set_computer(self.computer) self.calc.use_code(self.code_p2y) p2y_settings = { u'ADDITIONAL_RETRIEVE_LIST': [u'r-*', u'o-*', u'l-*', u'l_*', u'LOG/l-*_CPU_1'], u'INITIALISE': True } yambo_settings = { u'ADDITIONAL_RETRIEVE_LIST': [u'r-*', u'o-*', u'l-*', u'l_*', u'LOG/l-*_CPU_1'] } self.calc.use_settings(Dict(dict=p2y_settings)) self.calc.set_resources({ "num_machines": 1, "num_mpiprocs_per_machine": 16, 'default_mpiprocs_per_machine': 16 }) self.calc.use_parent_calculation(self.calc_pw) print((self.calc.store_all(), " yambo calc")) self.calc._set_state(calc_states.PARSING) a = 5.388 cell = crystal('Si', [(0, 0, 0)], spacegroup=227, cellpar=[a, a, a, 90, 90, 90], primitive_cell=True) self.struc = StructureData(ase=cell) self.struc.store() self.parameters = Dict( dict={ "BndsRnXp": [1.0, 48.0], "Chimod": "Hartree", "DysSolver": "n", "FFTGvecs": 25, "FFTGvecs_units": "Ry", "GbndRnge": [1.0, 48.0], "HF_and_locXC": True, "LongDrXp": [1.0, 0.0, 0.0], "NGsBlkXp": 2, "NGsBlkXp_units": "Ry", "QPkrange": [[1, 145, 3, 5]], "SE_CPU": "1 2 4", "SE_ROLEs": "q qp b", "X_all_q_CPU": "1 1 4 2", "X_all_q_ROLEs": "q k c v", "em1d": True, "gw0": True, "ppa": True, "rim_cut": True }) self.yambo_settings = Dict( dict={ "ADDITIONAL_RETRIEVE_LIST": [ "r-*", "o-*", "l-*", "l_*", "LOG/l-*_CPU_1", "aiida/ndb.QP", "aiida/ndb.HF_and_locXC" ] }) self.p2y_settings = Dict( dict={ "ADDITIONAL_RETRIEVE_LIST": [ 'r-*', 'o-*', 'l-*', 'l_*', 'LOG/l-*_CPU_1', 'aiida/ndb.QP', 'aiida/ndb.HF_and_locXC' ], 'INITIALISE': True }) self.yambo_calc_set = Dict( dict={ 'resources': { "num_machines": 1, "num_mpiprocs_per_machine": 16 }, 'max_wallclock_seconds': 60 * 29, 'max_memory_kb': 1 * 88 * 1000000, "queue_name": "s3parvc3", #'custom_scheduler_commands': u"#PBS -A Pra14_3622" , 'environment_variables': { "OMP_NUM_THREADS": "1" } }) self.p2y_calc_set = Dict( dict={ 'resources': { "num_machines": 1, "num_mpiprocs_per_machine": 2 }, 'max_wallclock_seconds': 60 * 2, 'max_memory_kb': 1 * 10 * 1000000, "queue_name": "s3parvc3", # 'custom_scheduler_commands': u"#PBS -A Pra14_3622" , 'environment_variables': { "OMP_NUM_THREADS": "2" } }) self.remote_folder = RemoteData(computer=self.computer, remote_path="/tmp/testcase/work/calcX") self.remote_folder.store() self.remote_folder.add_link_from(self.calc, label='remote_folder', link_type=LinkType.CREATE) self.calc._set_state(calc_states.FINISHED)