def test_remove_nodes_bulk(): """Test node removal with `skip_orm=True`.""" group = orm.Group(label='test_removing_nodes').store().backend_entity node_01 = orm.Data().store().backend_entity node_02 = orm.Data().store().backend_entity node_03 = orm.Data().store().backend_entity node_04 = orm.Data().store().backend_entity nodes = [node_01, node_02, node_03] group.add_nodes(nodes) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) # Remove a node that is not in the group: nothing should happen group.remove_nodes([node_04], skip_orm=True) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) # Remove one Node nodes.remove(node_03) group.remove_nodes([node_03], skip_orm=True) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) # Remove a list of Nodes and check nodes.remove(node_01) nodes.remove(node_02) group.remove_nodes([node_01, node_02], skip_orm=True) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes)
def test_cls_label_clashes(clear_database_before_test): """Test behaviour when multiple group classes have the same label.""" group_01, _ = orm.Group.objects.get_or_create('a') node_01 = orm.Data().store() group_01.add_nodes(node_01) group_02, _ = orm.UpfFamily.objects.get_or_create('a') node_02 = orm.Data().store() group_02.add_nodes(node_02) # Requests for non-existing groups should return `None` assert GroupPath('b').get_group() is None assert GroupPath('a').group_ids == [group_01.pk] assert GroupPath('a').get_group().pk == group_01.pk expected = [('a', node_01.pk)] assert [(r.group_path.path, r.node.pk) for r in GroupPath('a').walk_nodes()] == expected assert GroupPath('a', cls=orm.UpfFamily).group_ids == [group_02.pk] assert GroupPath('a', cls=orm.UpfFamily).get_group().pk == group_02.pk expected = [('a', node_02.pk)] assert [(r.group_path.path, r.node.pk) for r in GroupPath('a', cls=orm.UpfFamily).walk_nodes() ] == expected
def test_remove_nodes(self): """Test node removal.""" node_01 = orm.Data().store() node_02 = orm.Data().store() node_03 = orm.Data().store() node_04 = orm.Data().store() nodes = [node_01, node_02, node_03] group = orm.Group(label='test_remove_nodes').store() # Add initial nodes group.add_nodes(nodes) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) # Remove a node that is not in the group: nothing should happen group.remove_nodes(node_04) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) # Remove one orm.Node nodes.remove(node_03) group.remove_nodes(node_03) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) # Remove a list of Nodes and check nodes.remove(node_01) nodes.remove(node_02) group.remove_nodes([node_01, node_02]) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes))
def test_creation(self): """Test the creation of Groups.""" node = orm.Data() stored_node = orm.Data().store() group = orm.Group(label='testgroup') with self.assertRaises(exceptions.ModificationNotAllowed): # group unstored group.add_nodes(node) with self.assertRaises(exceptions.ModificationNotAllowed): # group unstored group.add_nodes(stored_node) group.store() with self.assertRaises(ValueError): # node unstored group.add_nodes(node) group.add_nodes(stored_node) nodes = list(group.nodes) self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].pk, stored_node.pk)
def test_node_iterator(self): """Test the indexing and slicing functionality of the node iterator.""" node_00 = orm.Data().store() node_01 = orm.Data().store() node_02 = orm.Data().store() node_03 = orm.Data().store() nodes = [node_00, node_01, node_02, node_03] group = orm.Group(label='label', description='description').store() group.add_nodes(nodes) # Indexing node_indexed = group.nodes[0] self.assertTrue(isinstance(node_indexed, orm.Data)) self.assertIn(node_indexed.uuid, [node.uuid for node in nodes]) # Slicing nodes_sliced = group.nodes[1:3] self.assertTrue(isinstance(nodes_sliced, list)) self.assertEqual(len(nodes_sliced), 2) self.assertTrue( all([isinstance(node, orm.Data) for node in nodes_sliced])) self.assertTrue( all([ node.uuid in set(node.uuid for node in nodes) for node in nodes_sliced ]))
def test_serialize_round_trip(self): """ Test the serialization of a dictionary with Nodes in various data structure Also make sure that the serialized data is json-serializable """ node_a = orm.Data().store() node_b = orm.Data().store() data = { 'test': 1, 'list': [1, 2, 3, node_a], 'dict': { ('Si', ): node_b, 'foo': 'bar' }, 'baz': 'aar' } serialized_data = serialize.serialize(data) deserialized_data = serialize.deserialize(serialized_data) # For now manual element-for-element comparison until we come up with general # purpose function that can equate two node instances properly self.assertEqual(data['test'], deserialized_data['test']) self.assertEqual(data['baz'], deserialized_data['baz']) self.assertEqual(data['list'][:3], deserialized_data['list'][:3]) self.assertEqual(data['list'][3].uuid, deserialized_data['list'][3].uuid) self.assertEqual(data['dict'][('Si', )].uuid, deserialized_data['dict'][('Si', )].uuid)
def test_show_limit(self): """Test `--limit` option of the `verdi group show` command.""" label = 'test_group_limit' nodes = [orm.Data().store(), orm.Data().store()] group = orm.Group(label=label).store() group.add_nodes(nodes) # Default should include all nodes in the output result = self.cli_runner.invoke(cmd_group.group_show, [label]) self.assertClickResultNoException(result) for node in nodes: self.assertIn(str(node.pk), result.output) # Repeat test with `limit=1`, use also the `--raw` option to only display nodes result = self.cli_runner.invoke(cmd_group.group_show, [label, '--limit', '1', '--raw']) self.assertClickResultNoException(result) # The current `verdi group show` does not support ordering so we cannot rely on that for now to test if only # one of the nodes is shown self.assertEqual(len(result.output.strip().split('\n')), 1) self.assertTrue( str(nodes[0].pk) in result.output or str(nodes[1].pk) in result.output)
def test_link_manager_with_nested_namespaces(clear_database_before_test): """Test the ``LinkManager`` works with nested namespaces.""" inp1 = orm.Data() inp1.store() calc = orm.CalculationNode() calc.add_incoming(inp1, link_type=LinkType.INPUT_CALC, link_label='nested__sub__namespace') calc.store() # Attach outputs out1 = orm.Data() out1.add_incoming(calc, link_type=LinkType.CREATE, link_label='nested__sub__namespace') out1.store() # Check that the recommended way of dereferencing works assert calc.inputs.nested.sub.namespace.uuid == inp1.uuid assert calc.outputs.nested.sub.namespace.uuid == out1.uuid # Leafs will return an ``AttributeDict`` instance assert isinstance(calc.outputs.nested.sub, AttributeDict) # Check the legacy way still works with pytest.warns(Warning): assert calc.inputs.nested__sub__namespace.uuid == inp1.uuid assert calc.outputs.nested__sub__namespace.uuid == out1.uuid # Must raise a AttributeError, otherwise tab competion will not work with pytest.raises(AttributeError): _ = calc.outputs.nested.not_existent # Must raise a KeyError with pytest.raises(KeyError): _ = calc.outputs.nested['not_existent']
def test_add_nodes(clear_db, get_upf_data): """Test the `SsspFamily.add_nodes` method.""" upf_he = get_upf_data(element='He').store() upf_ne = get_upf_data(element='Ne').store() upf_ar = get_upf_data(element='Ar').store() family = SsspFamily(label='SSSP').store() with pytest.raises(TypeError): family.add_nodes(orm.Data().store()) with pytest.raises(TypeError): family.add_nodes([orm.Data().store(), orm.Data().store()]) with pytest.raises(TypeError): family.add_nodes([upf_ar, orm.Data().store()]) assert family.count() == 0 family.add_nodes(upf_he) assert family.count() == 1 # Check that adding a duplicate element raises, and that no extra nodes have been added. with pytest.raises(ValueError): family.add_nodes([upf_ar, upf_he, upf_ne]) assert family.count() == 1 family.add_nodes([upf_ar, upf_ne]) assert family.count() == 3
def test_count(self): """Test the `count` method.""" node_00 = orm.Data().store() node_01 = orm.Data().store() nodes = [node_00, node_01] group = orm.Group(label='label', description='description').store() group.add_nodes(nodes) self.assertEqual(group.count(), len(nodes))
def test_query(backend): """Test if queries are working.""" from aiida.common.exceptions import NotExistent, MultipleObjectsError default_user = backend.users.create('*****@*****.**') g_1 = backend.groups.create(label='testquery1', user=default_user).store() g_2 = backend.groups.create(label='testquery2', user=default_user).store() n_1 = orm.Data().store().backend_entity n_2 = orm.Data().store().backend_entity n_3 = orm.Data().store().backend_entity n_4 = orm.Data().store().backend_entity g_1.add_nodes([n_1, n_2]) g_2.add_nodes([n_1, n_3]) newuser = backend.users.create(email='*****@*****.**') g_3 = backend.groups.create(label='testquery3', user=newuser).store() # I should find it g_1copy = backend.groups.get(uuid=g_1.uuid) assert g_1.pk == g_1copy.pk # Try queries res = backend.groups.query(nodes=n_4) assert [_.pk for _ in res] == [] res = backend.groups.query(nodes=n_1) assert [_.pk for _ in res] == [_.pk for _ in [g_1, g_2]] res = backend.groups.query(nodes=n_2) assert [_.pk for _ in res] == [_.pk for _ in [g_1]] # I try to use 'get' with zero or multiple results with pytest.raises(NotExistent): backend.groups.get(nodes=n_4) with pytest.raises(MultipleObjectsError): backend.groups.get(nodes=n_1) assert backend.groups.get(nodes=n_2).pk == g_1.pk # Query by user res = backend.groups.query(user=newuser) assert set(_.pk for _ in res) == set(_.pk for _ in [g_3]) # Same query, but using a string (the username=email) instead of a DbUser object res = backend.groups.query(user=newuser) assert set(_.pk for _ in res) == set(_.pk for _ in [g_3]) res = backend.groups.query(user=default_user) assert set(_.pk for _ in res) == set(_.pk for _ in [g_1, g_2])
def _create_branchy_graph(): """ Creates a basic branchy graph which has two concatenated calculations: * calc_1 takes data_0 as an input and returns data_1 and data_o. * calc_2 takes data_1 and data_i as inputs and returns data_2. This graph looks like this:: +--------+ +--------+ | data_o | | data_i | +--------+ +--------+ ^ | | v +--------+ +--------+ +--------+ +--------+ +--------+ | data_0 | --> | calc_1 | --> | data_1 | --> | calc_2 | --> | data_2 | +--------+ +--------+ +--------+ +--------+ +--------+ """ data_0 = orm.Data().store() calc_1 = orm.CalculationNode() calc_1.add_incoming(data_0, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_0') calc_1.store() data_1 = orm.Data() data_o = orm.Data() data_1.add_incoming(calc_1, link_type=LinkType.CREATE, link_label='create_data_1') data_o.add_incoming(calc_1, link_type=LinkType.CREATE, link_label='create_data_o') data_1.store() data_o.store() data_i = orm.Data().store() calc_2 = orm.CalculationNode() calc_2.add_incoming(data_1, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_1') calc_2.add_incoming(data_i, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_i') calc_2.store() data_2 = orm.Data() data_2.add_incoming(calc_2, link_type=LinkType.CREATE, link_label='create_data_2') data_2.store() output_dict = { 'data_i': data_i, 'data_0': data_0, 'data_1': data_1, 'data_2': data_2, 'data_o': data_o, 'calc_1': calc_1, 'calc_2': calc_2, } return output_dict
def test_clear(self): """Test the `clear` method to remove all nodes.""" node_01 = orm.Data().store() node_02 = orm.Data().store() node_03 = orm.Data().store() nodes = [node_01, node_02, node_03] group = orm.Group(label='test_clear_nodes').store() # Add initial nodes group.add_nodes(nodes) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) group.clear() self.assertEqual(list(group.nodes), [])
def setUpClass(cls): super().setUpClass() from aiida import orm cls.computer = orm.Computer(label='comp', hostname='localhost', transport_type='local', scheduler_type='direct', workdir='/tmp/aiida').store() cls.code = orm.Code(remote_computer_exec=(cls.computer, '/bin/true')).store() cls.group = orm.Group(label='test_group').store() cls.node = orm.Data().store() # some of the export tests write in the current directory, # make sure it is writeable and we don't pollute the current one cls.old_cwd = os.getcwd() cls.cwd = tempfile.mkdtemp(__name__) os.chdir(cls.cwd) # Utility helper cls.fixture_archive = 'export/migrate' cls.newest_archive = f'export_v{EXPORT_VERSION}_simple.aiida' cls.penultimate_archive = 'export_v0.6_simple.aiida'
def test_node_show(self): """Test `verdi node show`""" node = orm.Data().store() node.label = 'SOMELABEL' options = [str(node.pk)] result = self.cli_runner.invoke(cmd_node.node_show, options) self.assertClickResultNoException(result) # Let's check some content in the output. At least the UUID and the label should be in there self.assertIn(node.label, result.output) self.assertIn(node.uuid, result.output) ## Let's now test the '--print-groups' option options.append('--print-groups') result = self.cli_runner.invoke(cmd_node.node_show, options) self.assertClickResultNoException(result) # I don't check the list of groups - it might be in an autogroup # Let's create a group and put the node in there group_name = 'SOMEGROUPNAME' group = orm.Group(group_name).store() group.add_nodes(node) result = self.cli_runner.invoke(cmd_node.node_show, options) self.assertClickResultNoException(result) # Now the group should be in there self.assertIn(group_name, result.output)
def setUpClass(cls, *args, **kwargs): super(TestVerdiNode, cls).setUpClass(*args, **kwargs) node = orm.Data() cls.ATTR_KEY_ONE = 'a' cls.ATTR_VAL_ONE = '1' cls.ATTR_KEY_TWO = 'b' cls.ATTR_VAL_TWO = 'test' node.set_attribute_many({ cls.ATTR_KEY_ONE: cls.ATTR_VAL_ONE, cls.ATTR_KEY_TWO: cls.ATTR_VAL_TWO }) cls.EXTRA_KEY_ONE = 'x' cls.EXTRA_VAL_ONE = '2' cls.EXTRA_KEY_TWO = 'y' cls.EXTRA_VAL_TWO = 'other' node.set_extra_many({ cls.EXTRA_KEY_ONE: cls.EXTRA_VAL_ONE, cls.EXTRA_KEY_TWO: cls.EXTRA_VAL_TWO }) node.store() cls.node = node
def test_parser_get_outputs_for_parsing(self): """Make sure that the `get_output_for_parsing` method returns the correct output nodes.""" ArithmeticAddCalculation.define = CustomCalcJob.define node = orm.CalcJobNode(computer=self.computer, process_type=CustomCalcJob.build_process_type()) node.set_option('resources', { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }) node.set_option('max_wallclock_seconds', 1800) node.store() retrieved = orm.FolderData().store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') output = orm.Data().store() output.add_incoming(node, link_type=LinkType.CREATE, link_label='output') parser = ArithmeticAddParser(node) outputs_for_parsing = parser.get_outputs_for_parsing() self.assertIn('retrieved', outputs_for_parsing) self.assertEqual(outputs_for_parsing['retrieved'].uuid, retrieved.uuid) self.assertIn('output', outputs_for_parsing) self.assertEqual(outputs_for_parsing['output'].uuid, output.uuid)
def test_basics(self): """ Testing the correct translation for the `--force` and `--verbose` options. This just checks that the calls do not except and that in all cases with the force flag there is no messages. """ from aiida.common.exceptions import NotExistent newnode = orm.Data().store() newnodepk = newnode.pk options_list = [] options_list.append(['--create-forward']) options_list.append(['--call-calc-forward']) options_list.append(['--call-work-forward']) options_list.append(['--force']) options_list.append(['--verbose']) options_list.append(['--verbose', '--force']) for options in options_list: run_options = [str(newnodepk)] run_options.append('--dry-run') for an_option in options: run_options.append(an_option) result = self.cli_runner.invoke(cmd_node.node_delete, run_options) self.assertClickResultNoException(result) # To delete the created node run_options = [str(newnodepk)] run_options.append('--force') result = self.cli_runner.invoke(cmd_node.node_delete, run_options) self.assertClickResultNoException(result) with self.assertRaises(NotExistent): orm.load_node(newnodepk)
def test_first_multiple_projections(self): """Test `first()` returns correct types and numbers for multiple projections.""" orm.Data().store() orm.Data().store() result = orm.QueryBuilder().append(orm.User, tag='user', project=['email']).append( orm.Data, with_user='******', project=['*']).first() self.assertEqual(type(result), list) self.assertEqual(len(result), 2) self.assertIsInstance(result[0], six.string_types) self.assertIsInstance(result[1], orm.Data)
def test_description(self): """Test the update of the description both for stored and unstored groups.""" node = orm.Data().store() group_01 = orm.Group(label='testgroupdescription1', description='group_01').store() group_01.add_nodes(node) group_02 = orm.Group(label='testgroupdescription2', description='group_02') # Preliminary checks self.assertTrue(group_01.is_stored) self.assertFalse(group_02.is_stored) self.assertEqual(group_01.description, 'group_01') self.assertEqual(group_02.description, 'group_02') # Change group_01.description = 'new1' group_02.description = 'new2' # Test that the groups remained in their proper stored state and that # the description was updated self.assertTrue(group_01.is_stored) self.assertFalse(group_02.is_stored) self.assertEqual(group_01.description, 'new1') self.assertEqual(group_02.description, 'new2') # Store group_02 and check that the description is OK group_02.store() self.assertTrue(group_02.is_stored) self.assertEqual(group_02.description, 'new2')
def setUpClass(cls, *args, **kwargs): super().setUpClass(*args, **kwargs) node = orm.Data() cls.ATTR_KEY_ONE = 'a' cls.ATTR_VAL_ONE = '1' cls.ATTR_KEY_TWO = 'b' cls.ATTR_VAL_TWO = 'test' node.set_attribute_many({cls.ATTR_KEY_ONE: cls.ATTR_VAL_ONE, cls.ATTR_KEY_TWO: cls.ATTR_VAL_TWO}) cls.EXTRA_KEY_ONE = 'x' cls.EXTRA_VAL_ONE = '2' cls.EXTRA_KEY_TWO = 'y' cls.EXTRA_VAL_TWO = 'other' node.set_extra_many({cls.EXTRA_KEY_ONE: cls.EXTRA_VAL_ONE, cls.EXTRA_KEY_TWO: cls.EXTRA_VAL_TWO}) node.store() cls.node = node # Set up a FolderData for the node repo cp tests. folder_node = orm.FolderData() cls.content_file1 = 'nobody expects' cls.content_file2 = 'the minister of silly walks' cls.key_file1 = 'some/nested/folder/filename.txt' cls.key_file2 = 'some_other_file.txt' folder_node.put_object_from_filelike(io.StringIO(cls.content_file1), cls.key_file1) folder_node.put_object_from_filelike(io.StringIO(cls.content_file2), cls.key_file2) folder_node.store() cls.folder_node = folder_node
def test_add_nodes(self): """Test different ways of adding nodes.""" node_01 = orm.Data().store() node_02 = orm.Data().store() node_03 = orm.Data().store() nodes = [node_01, node_02, node_03] group = orm.Group(label='test_adding_nodes').store() # Single node group.add_nodes(node_01) # List of nodes group.add_nodes([node_02, node_03]) # Check self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) # Try to add a node that is already present: there should be no problem group.add_nodes(node_01) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes))
def setUpClass(cls, *args, **kwargs): """Only run to prepare an export file""" super().setUpClass() data = orm.Data() data.label = 'my_test_data_node' data.store() data.set_extra_many({'b': 2, 'c': 3}) cls.tmp_folder = tempfile.mkdtemp() cls.export_file = os.path.join(cls.tmp_folder, 'export.aiida') export([data], outfile=cls.export_file, silent=True)
def test_objects_find(self): """Put logs in and find them""" node = orm.Data().store() for _ in range(10): record = self.log_record record['dbnode_id'] = node.id Log(**record) entries = Log.objects.all() self.assertEqual(10, len(entries)) self.assertIsInstance(entries[0], Log)
def test_find_limit(self): """ Test the limit option of log.find """ node = orm.Data().store() limit = 2 for _ in range(limit * 2): self.log_record['dbnode_id'] = node.id Log(**self.log_record) entries = Log.objects.find(limit=limit) self.assertEqual(len(entries), limit)
def test_add_nodes_skip_orm(): """Test the `SqlaGroup.add_nodes` method with the `skip_orm=True` flag.""" group = orm.Group(label='test_adding_nodes').store().backend_entity node_01 = orm.Data().store().backend_entity node_02 = orm.Data().store().backend_entity node_03 = orm.Data().store().backend_entity node_04 = orm.Data().store().backend_entity node_05 = orm.Data().store().backend_entity nodes = [node_01, node_02, node_03, node_04, node_05] group.add_nodes([node_01], skip_orm=True) group.add_nodes([node_02, node_03], skip_orm=True) group.add_nodes((node_04, node_05), skip_orm=True) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) # Try to add a node that is already present: there should be no problem group.add_nodes([node_01], skip_orm=True) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes)
def test_add_nodes_skip_orm_batch(): """Test the `SqlaGroup.add_nodes` method with the `skip_orm=True` flag and batches.""" nodes = [orm.Data().store().backend_entity for _ in range(100)] # Add nodes to groups using different batch size. Check in the end the correct addition. batch_sizes = (1, 3, 10, 1000) for batch_size in batch_sizes: group = orm.Group(label='test_batches_' + str(batch_size)).store() group.backend_entity.add_nodes(nodes, skip_orm=True, batch_size=batch_size) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes)
def test_walk_nodes(clear_database_before_test): """Test the ``GroupPath.walk_nodes()`` function.""" group, _ = orm.Group.objects.get_or_create('a') node = orm.Data() node.set_attribute_many({'i': 1, 'j': 2}) node.store() group.add_nodes(node) group_path = GroupPath() assert [(r.group_path.path, r.node.attributes) for r in group_path.walk_nodes()] == [('a', { 'i': 1, 'j': 2 })]
def test_query_with_group(): """Docs.""" group = orm.Group(label='group').store() data = orm.Data().store() group.add_nodes([data]) builder = orm.QueryBuilder().append(orm.Data, filters={ 'id': data.pk }, tag='data').append(orm.Group, with_node='data') loaded = builder.one()[0] assert loaded.pk == group.pk
def test_calc_and_data_nodes_with_comments(self, temp_dir): """ Test comments for CalculatioNode and Data node are correctly ex-/imported """ # Create user, nodes, and comments user = orm.User.objects.get_default() calc_node = orm.CalculationNode().store() calc_node.seal() data_node = orm.Data().store() comment_one = orm.Comment(calc_node, user, self.comments[0]).store() comment_two = orm.Comment(calc_node, user, self.comments[1]).store() comment_three = orm.Comment(data_node, user, self.comments[2]).store() comment_four = orm.Comment(data_node, user, self.comments[3]).store() # Get values prior to export calc_uuid = calc_node.uuid data_uuid = data_node.uuid calc_comments_uuid = [c.uuid for c in [comment_one, comment_two]] data_comments_uuid = [c.uuid for c in [comment_three, comment_four]] # Export nodes export_file = os.path.join(temp_dir, 'export.tar.gz') export([calc_node, data_node], outfile=export_file, silent=True) # Clean database and reimport exported file self.reset_database() import_data(export_file, silent=True) # Get nodes and comments builder = orm.QueryBuilder() builder.append(orm.Node, tag='node', project=['uuid']) builder.append(orm.Comment, with_node='node', project=['uuid']) nodes_and_comments = builder.all() self.assertEqual(len(nodes_and_comments), len(self.comments)) for entry in nodes_and_comments: self.assertEqual(len(entry), 2) # 1 Node + 1 Comment import_node_uuid = str(entry[0]) import_comment_uuid = str(entry[1]) self.assertIn(import_node_uuid, [calc_uuid, data_uuid]) if import_node_uuid == calc_uuid: # Calc node comments self.assertIn(import_comment_uuid, calc_comments_uuid) else: # Data node comments self.assertIn(import_comment_uuid, data_comments_uuid)