def test_builder_restart_work_chain(self): """Verify that nested namespaces imploded into flat link labels can be reconstructed into nested namespaces.""" caller = orm.WorkChainNode().store() node = orm.WorkChainNode(process_type=ExampleWorkChain.build_process_type()) node.add_incoming(self.inputs['dynamic']['namespace']['alp'], LinkType.INPUT_WORK, 'dynamic__namespace__alp') node.add_incoming(self.inputs['name']['spaced'], LinkType.INPUT_WORK, 'name__spaced') node.add_incoming(self.inputs['name_spaced'], LinkType.INPUT_WORK, 'name_spaced') node.add_incoming(self.inputs['boolean'], LinkType.INPUT_WORK, 'boolean') node.add_incoming(orm.Int(DEFAULT_INT).store(), LinkType.INPUT_WORK, 'default') node.add_incoming(caller, link_type=LinkType.CALL_WORK, link_label='CALL_WORK') node.store() builder = node.get_builder_restart() self.assertIn('dynamic', builder) self.assertIn('namespace', builder.dynamic) self.assertIn('alp', builder.dynamic.namespace) self.assertIn('name', builder) self.assertIn('spaced', builder.name) self.assertIn('name_spaced', builder) self.assertIn('boolean', builder) self.assertIn('default', builder) self.assertEqual(builder.dynamic.namespace['alp'], self.inputs['dynamic']['namespace']['alp']) self.assertEqual(builder.name.spaced, self.inputs['name']['spaced']) self.assertEqual(builder.name_spaced, self.inputs['name_spaced']) self.assertEqual(builder.boolean, self.inputs['boolean']) self.assertEqual(builder.default, orm.Int(DEFAULT_INT))
def test_workcalculation(self, temp_dir): """Test simple master/slave WorkChainNodes""" from aiida.common.links import LinkType master = orm.WorkChainNode() slave = orm.WorkChainNode() input_1 = orm.Int(3).store() input_2 = orm.Int(5).store() output_1 = orm.Int(2).store() master.add_incoming(input_1, LinkType.INPUT_WORK, 'input_1') slave.add_incoming(master, LinkType.CALL_WORK, 'CALL') slave.add_incoming(input_2, LinkType.INPUT_WORK, 'input_2') master.store() slave.store() output_1.add_incoming(master, LinkType.RETURN, 'RETURN') master.seal() slave.seal() uuids_values = [(v.uuid, v.value) for v in (output_1, )] filename1 = os.path.join(temp_dir, 'export1.tar.gz') export([output_1], outfile=filename1, silent=True) self.clean_db() self.insert_data() import_data(filename1, silent=True) for uuid, value in uuids_values: self.assertEqual(orm.load_node(uuid).value, value)
def _generate_wc_job_node(entry_point_name, computer, inputs=None): """Fixture to generate a mock `WorkChainNode` for testing parsers. :param entry_point_name: entry point name of the workchain class :param computer: a `Computer` instance :param inputs: any optional nodes to add as input links to the corrent CalcJobNode :return: `WorkChainNode` instance attached inputs if `inputs` is defined """ from aiida import orm from aiida.common import LinkType from aiida.plugins.entry_point import format_entry_point_string entry_point = format_entry_point_string('aiida.workflows', entry_point_name) node = orm.WorkChainNode(computer=computer, process_type=entry_point) if inputs: for link_label, input_node in flatten_inputs(inputs): input_node.store() node.add_incoming(input_node, link_type=LinkType.INPUT_WORK, link_label=link_label) node.store() return node
def populate_restapi_database(clear_database_before_test): """Populates the database with a considerable set of nodes to test the restAPI""" # pylint: disable=unused-argument from aiida import orm struct_forcif = orm.StructureData().store() orm.StructureData().store() orm.StructureData().store() orm.Dict().store() orm.Dict().store() orm.CifData(ase=struct_forcif.get_ase()).store() orm.KpointsData().store() orm.FolderData().store() orm.CalcFunctionNode().store() orm.CalcJobNode().store() orm.CalcJobNode().store() orm.WorkFunctionNode().store() orm.WorkFunctionNode().store() orm.WorkChainNode().store()
def _generate_work_chain_node(entry_point_name, computer, test_name=None, inputs=None, attributes=None): """Fixture to generate a mock `WorkChainNode` for testing parsers. :param entry_point_name: entry point name of the calculation class :param computer: a `Computer` instance :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder. :param inputs: any optional nodes to add as input links to the corrent CalcJobNode :param attributes: any optional attributes to set on the node :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node """ from aiida import orm from aiida.common import LinkType from aiida.plugins.entry_point import format_entry_point_string entry_point = format_entry_point_string('aiida.workchains', entry_point_name) node = orm.WorkChainNode(computer=computer, process_type=entry_point) if attributes: node.set_attribute_many(attributes) if inputs: for link_label, input_node in flatten_inputs(inputs): input_node.store() node.add_incoming(input_node, link_type=LinkType.INPUT_WORK, link_label=link_label) if test_name is not None: basepath = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.join(basepath, 'parsers', 'fixtures', entry_point_name[len('quantumespresso.'):], test_name) retrieved = orm.FolderData() retrieved.put_object_from_tree(filepath) retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') retrieved.store() remote_folder = orm.RemoteData(computer=computer, remote_path='/tmp') remote_folder.add_incoming(node, link_type=LinkType.CREATE, link_label='remote_folder') remote_folder.store() return node
def create_provenance(self): """create an example provenance graph """ pd0 = orm.Dict() pd0.label = 'pd0' pd0.store() pd1 = orm.Dict() pd1.label = 'pd1' pd1.store() wc1 = orm.WorkChainNode() wc1.set_process_state(ProcessState.RUNNING) wc1.add_incoming(pd0, link_type=LinkType.INPUT_WORK, link_label='input1') wc1.add_incoming(pd1, link_type=LinkType.INPUT_WORK, link_label='input2') wc1.store() calc1 = orm.CalcJobNode() calc1.computer = self.computer calc1.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) calc1.label = 'calc1' calc1.set_process_state(ProcessState.FINISHED) calc1.set_exit_status(0) calc1.add_incoming(pd0, link_type=LinkType.INPUT_CALC, link_label='input1') calc1.add_incoming(pd1, link_type=LinkType.INPUT_CALC, link_label='input2') calc1.add_incoming(wc1, link_type=LinkType.CALL_CALC, link_label='call1') calc1.store() rd1 = orm.RemoteData() rd1.label = 'rd1' rd1.set_remote_path('/x/y.py') rd1.computer = self.computer rd1.store() rd1.add_incoming(calc1, link_type=LinkType.CREATE, link_label='output') pd2 = orm.Dict() pd2.label = 'pd2' pd2.store() calcf1 = orm.CalcFunctionNode() calcf1.label = 'calcf1' calcf1.set_process_state(ProcessState.FINISHED) calcf1.set_exit_status(200) calcf1.add_incoming(rd1, link_type=LinkType.INPUT_CALC, link_label='input1') calcf1.add_incoming(pd2, link_type=LinkType.INPUT_CALC, link_label='input2') calcf1.add_incoming(wc1, link_type=LinkType.CALL_CALC, link_label='call2') calcf1.store() pd3 = orm.Dict() pd3.label = 'pd3' fd1 = orm.FolderData() fd1.label = 'fd1' pd3.add_incoming(calcf1, link_type=LinkType.CREATE, link_label='output1') pd3.store() fd1.add_incoming(calcf1, link_type=LinkType.CREATE, link_label='output2') fd1.store() pd3.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output1') fd1.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output2') return AttributeDict({ 'pd0': pd0, 'pd1': pd1, 'calc1': calc1, 'rd1': rd1, 'pd2': pd2, 'calcf1': calcf1, 'pd3': pd3, 'fd1': fd1, 'wc1': wc1 })