def test_store_indirect(): """Test the `BasisData.store` method when called indirectly because its is an input.""" basis = BasisData(io.BytesIO(b'basis')) basis.element = 'Ar' node = CalcJobNode() node.add_incoming(basis, link_type=LinkType.INPUT_CALC, link_label='basis') node.store_all()
def test_store_indirect(): """Test the `PseudoPotentialData.store` method when called indirectly because its is an input.""" pseudo = PseudoPotentialData(io.BytesIO(b'pseudo')) pseudo.element = 'Ar' node = CalcJobNode() node.add_incoming(pseudo, link_type=LinkType.INPUT_CALC, link_label='pseudo') node.store_all()
def test_process_show(self): """Test verdi process show""" workchain_one = WorkChainNode() workchain_two = WorkChainNode() workchains = [workchain_one, workchain_two] workchain_two.set_attribute('process_label', 'workchain_one_caller') workchain_two.store() workchain_one.add_incoming(workchain_two, link_type=LinkType.CALL_WORK, link_label='called') workchain_one.store() calcjob_one = CalcJobNode() calcjob_two = CalcJobNode() calcjob_one.set_attribute('process_label', 'process_label_one') calcjob_two.set_attribute('process_label', 'process_label_two') calcjob_one.add_incoming(workchain_one, link_type=LinkType.CALL_CALC, link_label='one') calcjob_two.add_incoming(workchain_one, link_type=LinkType.CALL_CALC, link_label='two') calcjob_one.store() calcjob_two.store() # Running without identifiers should not except and not print anything options = [] result = self.cli_runner.invoke(cmd_process.process_show, options) self.assertIsNone(result.exception, result.output) self.assertEqual(len(get_result_lines(result)), 0) # Giving a single identifier should print a non empty string message options = [str(workchain_one.pk)] result = self.cli_runner.invoke(cmd_process.process_show, options) lines = get_result_lines(result) self.assertClickResultNoException(result) self.assertTrue(len(lines) > 0) self.assertIn('workchain_one_caller', result.output) self.assertIn('process_label_one', lines[-2]) self.assertIn('process_label_two', lines[-1]) # Giving multiple identifiers should print a non empty string message options = [str(node.pk) for node in workchains] result = self.cli_runner.invoke(cmd_process.process_show, options) self.assertIsNone(result.exception, result.output) self.assertTrue(len(get_result_lines(result)) > 0)
def _inner(file_path, input_settings=None): # Create a test computer computer = localhost process_type = 'aiida.calculations:{}'.format('vasp.vasp') node = CalcJobNode(computer=computer, process_type=process_type) node.set_attribute('input_filename', 'INCAR') node.set_attribute('output_filename', 'OUTCAR') #node.set_attribute('error_filename', 'aiida.err') node.set_attribute('scheduler_stderr', '_scheduler-stderr.txt') node.set_attribute('scheduler_stdout', '_scheduler-stdout.txt') node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) if input_settings is None: input_settings = {} settings = Dict(dict=input_settings) node.add_incoming(settings, link_type=LinkType.INPUT_CALC, link_label='settings') settings.store() node.store() # Create a `FolderData` that will represent the `retrieved` folder. Store the test # output fixture in there and link it. retrieved = FolderData() retrieved.put_object_from_tree(file_path) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() return node
def _generate_remote_data(computer, remote_path, entry_point_name=None, extras_root=[]): """Return a `KpointsData` with a mesh of npoints in each direction.""" from aiida.common.links import LinkType from aiida.orm import CalcJobNode, RemoteData, Dict from aiida.plugins.entry_point import format_entry_point_string entry_point = format_entry_point_string('aiida.calculations', entry_point_name) remote = RemoteData(remote_path=remote_path) remote.computer = computer if entry_point_name is not None: creator = CalcJobNode(computer=computer, process_type=entry_point) creator.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) remote.add_incoming(creator, link_type=LinkType.CREATE, link_label='remote_folder') for extra in extras_root: to_link = extra[0] if isinstance(to_link, dict): to_link = Dict(dict=to_link) to_link.store() creator.add_incoming(to_link, link_type=LinkType.INPUT_CALC, link_label=extra[1]) creator.store() return remote
def generate_calcjob_node( self, entry_point_name, retrieved=None, computer_name="localhost", options=None, mark_completed=False, remote_path=None, input_nodes=None, ): """Fixture to generate a mock `CalcJobNode` for testing parsers. Parameters ---------- entry_point_name : str entry point name of the calculation class retrieved : aiida.orm.FolderData containing the file(s) to be parsed computer_name : str used to get or create a ``Computer``, by default 'localhost' options : None or dict any additional metadata options to set on the node remote_path : str path to a folder on the computer mark_completed : bool if True, set the process state to finished, and the exit_status = 0 input_nodes: dict mapping of link label to node Returns ------- aiida.orm.CalcJobNode instance with the `retrieved` node linked as outgoing """ from aiida.common.links import LinkType from aiida.engine import ExitCode, ProcessState from aiida.orm import CalcJobNode, Node, RemoteData from aiida.plugins.entry_point import format_entry_point_string process = self.get_calc_cls(entry_point_name) computer = self.get_or_create_computer(computer_name) entry_point = format_entry_point_string("aiida.calculations", entry_point_name) calc_node = CalcJobNode(computer=computer, process_type=entry_point) calc_node.set_options({ k: v.default() if callable(v.default) else v.default for k, v in process.spec_options.items() if v.has_default() }) calc_node.set_option("resources", { "num_machines": 1, "num_mpiprocs_per_machine": 1 }) calc_node.set_option("max_wallclock_seconds", 1800) if options: calc_node.set_options(options) if mark_completed: calc_node.set_process_state(ProcessState.FINISHED) calc_node.set_exit_status(ExitCode().status) if input_nodes is not None: for label, in_node in input_nodes.items(): in_node_map = in_node if isinstance(in_node, Node): in_node_map = {None: in_node_map} for sublabel, in_node in in_node_map.items(): in_node.store() link_label = (label if sublabel is None else "{}__{}".format(label, sublabel)) calc_node.add_incoming(in_node, link_type=LinkType.INPUT_CALC, link_label=link_label) calc_node.store() if retrieved is not None: retrieved.add_incoming(calc_node, link_type=LinkType.CREATE, link_label="retrieved") retrieved.store() if remote_path is not None: remote = RemoteData(remote_path=remote_path, computer=computer) remote.add_incoming(calc_node, link_type=LinkType.CREATE, link_label="remote_folder") remote.store() return calc_node
def _generate_calc_job_node( entry_point_name, results_folder, inputs=None, computer=None, outputs=None, outfile_override=None, ): """ Generate a CalcJob node with fake retrieved node in the tests/data """ calc_class = CalculationFactory(entry_point_name) entry_point = format_entry_point_string('aiida.calculations', entry_point_name) builder = calc_class.get_builder() if not computer: computer = db_test_app.localhost node = CalcJobNode(computer=computer, process_type=entry_point) # Monkypatch the inputs if inputs is not None: inputs = AttributeDict(inputs) node.__dict__['inputs'] = inputs # Add direct inputs, pseudos are omitted for k, v in inputs.items(): if isinstance(v, Node): if not v.is_stored: v.store() node.add_incoming(v, link_type=LinkType.INPUT_CALC, link_label=k) options = builder.metadata.options options.update(inputs.metadata.options) node.set_attribute('input_filename', options.input_filename) node.set_attribute('seedname', options.seedname) node.set_attribute('output_filename', options.output_filename) node.set_attribute('error_filename', 'aiida.err') node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) node.store() filepath = this_folder.parent / 'data' / results_folder retrieved = FolderData() retrieved.put_object_from_tree(str(filepath.resolve())) # Apply overriding output files if outfile_override is not None: for key, content in outfile_override.items(): if content is None: retrieved.delete_object(key) continue buf = BytesIO(content.encode()) retrieved.put_object_from_filelike(buf, key) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() if outputs is not None: for label, out_node in outputs.items(): out_node.add_incoming(node, link_type=LinkType.CREATE, link_label=label) if not out_node.is_stored: out_node.store() return node