def test_input_and_create_links(self, temp_dir): """ Simple test that will verify that INPUT and CREATE links are properly exported and correctly recreated upon import. """ node_work = orm.CalculationNode() node_input = orm.Int(1).store() node_output = orm.Int(2).store() node_work.add_incoming(node_input, LinkType.INPUT_CALC, 'input') node_work.store() node_output.add_incoming(node_work, LinkType.CREATE, 'output') node_work.seal() export_links = get_all_node_links() export_file = os.path.join(temp_dir, 'export.aiida') export([node_output], filename=export_file) self.clean_db() import_data(export_file) import_links = get_all_node_links() export_set = [tuple(_) for _ in export_links] import_set = [tuple(_) for _ in import_links] self.assertSetEqual(set(export_set), set(import_set))
def test_input_code(self, temp_dir): """ This test checks that when a calculation is exported then the corresponding code is also exported. It also checks that the links are also in place after the import. """ code_label = 'test_code1' code = orm.Code() code.set_remote_computer_exec((self.computer, '/bin/true')) code.label = code_label code.store() code_uuid = code.uuid calc = orm.CalcJobNode() calc.computer = self.computer calc.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) calc.add_incoming(code, LinkType.INPUT_CALC, 'code') calc.store() calc.seal() links_count = 1 export_links = get_all_node_links() export_file = os.path.join(temp_dir, 'export.aiida') export([calc], filename=export_file) self.clean_db() import_data(export_file) # Check that the code node is there self.assertEqual(orm.load_node(code_uuid).label, code_label) # Check that the link is in place import_links = get_all_node_links() self.assertListEqual(sorted(export_links), sorted(import_links)) self.assertEqual( len(export_links), links_count, 'Expected to find only one link from code to ' 'the calculation node before export. {} found.'.format( len(export_links))) self.assertEqual( len(import_links), links_count, 'Expected to find only one link from code to ' 'the calculation node after import. {} found.'.format( len(import_links)))
def test_high_level_workflow_links(self, temp_dir): """ This test checks that all the needed links are correctly exported and imported. INPUT_CALC, INPUT_WORK, CALL_CALC, CALL_WORK, CREATE, and RETURN links connecting Data nodes and high-level Calculation and Workflow nodes: CalcJobNode, CalcFunctionNode, WorkChainNode, WorkFunctionNode """ high_level_calc_nodes = [['CalcJobNode', 'CalcJobNode'], ['CalcJobNode', 'CalcFunctionNode'], ['CalcFunctionNode', 'CalcJobNode'], ['CalcFunctionNode', 'CalcFunctionNode']] high_level_work_nodes = [['WorkChainNode', 'WorkChainNode'], ['WorkChainNode', 'WorkFunctionNode'], ['WorkFunctionNode', 'WorkChainNode'], ['WorkFunctionNode', 'WorkFunctionNode']] for calcs in high_level_calc_nodes: for works in high_level_work_nodes: self.reset_database() graph_nodes, _ = self.construct_complex_graph(calc_nodes=calcs, work_nodes=works) # Getting the input, create, return and call links builder = orm.QueryBuilder() builder.append(orm.Node, project='uuid') builder.append( orm.Node, project='uuid', edge_project=['label', 'type'], edge_filters={ 'type': { 'in': ( LinkType.INPUT_CALC.value, LinkType.INPUT_WORK.value, LinkType.CREATE.value, LinkType.RETURN.value, LinkType.CALL_CALC.value, LinkType.CALL_WORK.value ) } } ) self.assertEqual( builder.count(), 13, msg='Failed with c1={}, c2={}, w1={}, w2={}'.format(calcs[0], calcs[1], works[0], works[1]) ) export_links = builder.all() export_file = os.path.join(temp_dir, 'export.tar.gz') export(graph_nodes, outfile=export_file, silent=True, overwrite=True) self.reset_database() import_data(export_file, silent=True) import_links = get_all_node_links() export_set = [tuple(_) for _ in export_links] import_set = [tuple(_) for _ in import_links] self.assertSetEqual( set(export_set), set(import_set), msg='Failed with c1={}, c2={}, w1={}, w2={}'.format(calcs[0], calcs[1], works[0], works[1]) )
def test_double_return_links_for_workflows(self, temp_dir): """ This test checks that double return links to a node can be exported and imported without problems, """ work1 = orm.WorkflowNode() work2 = orm.WorkflowNode().store() data_in = orm.Int(1).store() data_out = orm.Int(2).store() work1.add_incoming(data_in, LinkType.INPUT_WORK, 'input_i1') work1.add_incoming(work2, LinkType.CALL_WORK, 'call') work1.store() data_out.add_incoming(work1, LinkType.RETURN, 'return1') data_out.add_incoming(work2, LinkType.RETURN, 'return2') links_count = 4 work1.seal() work2.seal() uuids_wanted = set(_.uuid for _ in (work1, data_out, data_in, work2)) links_wanted = get_all_node_links() export_file = os.path.join(temp_dir, 'export.aiida') export([data_out, work1, work2, data_in], filename=export_file, silent=True) self.reset_database() import_data(export_file, silent=True) uuids_in_db = [ str(uuid) for [uuid] in orm.QueryBuilder().append( orm.Node, project='uuid').all() ] self.assertListEqual(sorted(uuids_wanted), sorted(uuids_in_db)) links_in_db = get_all_node_links() self.assertListEqual(sorted(links_wanted), sorted(links_in_db)) # Assert number of links, checking both RETURN links are included self.assertEqual(len(links_wanted), links_count) # Before export self.assertEqual(len(links_in_db), links_count) # After import
def test_complex_workflow_graph_links(self, temp_dir): """ This test checks that all the needed links are correctly exported and imported. More precisely, it checks that INPUT, CREATE, RETURN and CALL links connecting Data nodes, CalcJobNodes and WorkCalculations are exported and imported correctly. """ graph_nodes, _ = self.construct_complex_graph() # Getting the input, create, return and call links builder = orm.QueryBuilder() builder.append(orm.Node, project='uuid') builder.append( orm.Node, project='uuid', edge_project=['label', 'type'], edge_filters={ 'type': { 'in': ( LinkType.INPUT_CALC.value, LinkType.INPUT_WORK.value, LinkType.CREATE.value, LinkType.RETURN.value, LinkType.CALL_CALC.value, LinkType.CALL_WORK.value ) } } ) export_links = builder.all() export_file = os.path.join(temp_dir, 'export.tar.gz') export(graph_nodes, outfile=export_file, silent=True) self.reset_database() import_data(export_file, silent=True) import_links = get_all_node_links() export_set = [tuple(_) for _ in export_links] import_set = [tuple(_) for _ in import_links] self.assertSetEqual(set(export_set), set(import_set))
def test_multiple_post_return_links(self, temp_dir): # pylint: disable=too-many-locals """Check extra RETURN links can be added to existing Nodes, when label is not unique""" data = orm.Int(1).store() calc = orm.CalculationNode().store() work = orm.WorkflowNode().store() link_label = 'output_data' data.add_incoming(calc, LinkType.CREATE, link_label) data.add_incoming(work, LinkType.RETURN, link_label) calc.seal() work.seal() data_uuid = data.uuid calc_uuid = calc.uuid work_uuid = work.uuid before_links = get_all_node_links() data_provenance = os.path.join(temp_dir, 'data.aiida') all_provenance = os.path.join(temp_dir, 'all.aiida') export([data], filename=data_provenance, return_backward=False) export([data], filename=all_provenance, return_backward=True) self.clean_db() # import data provenance import_data(data_provenance) no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count() self.assertEqual( no_of_work, 0, msg= f'{no_of_work} WorkflowNode(s) was/were found, however, none should be present' ) nodes = orm.QueryBuilder().append(orm.Node, project='uuid') self.assertEqual( nodes.count(), 2, msg= f'{no_of_work} Node(s) was/were found, however, exactly two should be present' ) for node in nodes.iterall(): self.assertIn(node[0], [data_uuid, calc_uuid]) links = get_all_node_links() self.assertEqual( len(links), 1, msg='Only a single Link (from Calc. to Data) is expected, ' 'instead {} were found (in, out, label, type): {}'.format( len(links), links)) for from_uuid, to_uuid, found_label, found_type in links: self.assertEqual(from_uuid, calc_uuid) self.assertEqual(to_uuid, data_uuid) self.assertEqual(found_label, link_label) self.assertEqual(found_type, LinkType.CREATE.value) # import data+logic provenance import_data(all_provenance) no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count() self.assertEqual( no_of_work, 1, msg= f'{no_of_work} WorkflowNode(s) was/were found, however, exactly one should be present' ) nodes = orm.QueryBuilder().append(orm.Node, project='uuid') self.assertEqual( nodes.count(), 3, msg= f'{no_of_work} Node(s) was/were found, however, exactly three should be present' ) for node in nodes.iterall(): self.assertIn(node[0], [data_uuid, calc_uuid, work_uuid]) links = get_all_node_links() self.assertEqual( len(links), 2, msg= f'Exactly two Links are expected, instead {len(links)} were found (in, out, label, type): {links}' ) self.assertListEqual(sorted(links), sorted(before_links))