Ejemplo n.º 1
0
    def test_get_node_info_multiple_call_links(self):
        """Test the `get_node_info` utility.

        Regression test for #2868:
            Verify that all `CALL` links are included in the formatted string even if link labels are identical.
        """
        from aiida.cmdline.utils.common import get_node_info

        workflow = orm.WorkflowNode().store()
        node_one = orm.CalculationNode()
        node_two = orm.CalculationNode()

        node_one.add_incoming(workflow,
                              link_type=LinkType.CALL_CALC,
                              link_label='CALL_IDENTICAL')
        node_two.add_incoming(workflow,
                              link_type=LinkType.CALL_CALC,
                              link_label='CALL_IDENTICAL')
        node_one.store()
        node_two.store()

        node_info = get_node_info(workflow)
        self.assertTrue('CALL_IDENTICAL' in node_info)
        self.assertTrue(str(node_one.pk) in node_info)
        self.assertTrue(str(node_two.pk) in node_info)
Ejemplo n.º 2
0
    def test_delete(self):
        """Test `verdi group delete` command."""
        orm.Group(label='group_test_delete_01').store()
        orm.Group(label='group_test_delete_02').store()

        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_01'])
        self.assertClickResultNoException(result)

        # Verify that removed group is not present in list
        result = self.cli_runner.invoke(cmd_group.group_list)
        self.assertClickResultNoException(result)
        self.assertNotIn('group_test_delete_01', result.output)

        node_01 = orm.CalculationNode().store()
        node_02 = orm.CalculationNode().store()

        # Add some nodes and then use `verdi group delete` to delete a group that contains nodes
        group = orm.load_group(label='group_test_delete_02')
        group.add_nodes([node_01, node_02])
        self.assertEqual(group.count(), 2)

        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_02'])
        self.assertClickResultNoException(result)

        with self.assertRaises(exceptions.NotExistent):
            orm.load_group(label='group_test_delete_02')
Ejemplo n.º 3
0
    def test_delete(self):
        """Test `verdi group delete` command."""
        orm.Group(label='group_test_delete_01').store()
        orm.Group(label='group_test_delete_02').store()

        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_01'])
        self.assertClickResultNoException(result)

        # Verify that removed group is not present in list
        result = self.cli_runner.invoke(cmd_group.group_list)
        self.assertClickResultNoException(result)
        self.assertNotIn('group_test_delete_01', result.output)

        node_01 = orm.CalculationNode().store()
        node_02 = orm.CalculationNode().store()

        # Add some nodes and then use `verdi group delete --clear` to delete a node even when it contains nodes
        group = orm.load_group(label='group_test_delete_02')
        group.add_nodes([node_01, node_02])
        self.assertEqual(group.count(), 2)

        # Calling delete on a group without the `--clear` option should raise
        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_02'])
        self.assertIsNotNone(result.exception, result.output)

        # With `--clear` option should delete group and nodes
        result = self.cli_runner.invoke(
            cmd_group.group_delete,
            ['--force', '--clear', 'group_test_delete_02'])
        self.assertClickResultNoException(result)

        with self.assertRaises(exceptions.NotExistent):
            group = orm.load_group(label='group_test_delete_02')
Ejemplo n.º 4
0
    def test_delete(self):
        """Test `verdi group delete` command."""
        orm.Group(label='group_test_delete_01').store()
        orm.Group(label='group_test_delete_02').store()
        orm.Group(label='group_test_delete_03').store()

        # dry run
        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--dry-run', 'group_test_delete_01'])
        self.assertClickResultNoException(result)
        orm.load_group(label='group_test_delete_01')

        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_01'])
        self.assertClickResultNoException(result)

        # Verify that removed group is not present in list
        result = self.cli_runner.invoke(cmd_group.group_list)
        self.assertClickResultNoException(result)
        self.assertNotIn('group_test_delete_01', result.output)

        node_01 = orm.CalculationNode().store()
        node_02 = orm.CalculationNode().store()
        node_pks = {node_01.pk, node_02.pk}

        # Add some nodes and then use `verdi group delete` to delete a group that contains nodes
        group = orm.load_group(label='group_test_delete_02')
        group.add_nodes([node_01, node_02])
        self.assertEqual(group.count(), 2)

        result = self.cli_runner.invoke(cmd_group.group_delete,
                                        ['--force', 'group_test_delete_02'])

        with self.assertRaises(exceptions.NotExistent):
            orm.load_group(label='group_test_delete_02')

        # check nodes still exist
        for pk in node_pks:
            orm.load_node(pk)

        # delete the group and the nodes it contains
        group = orm.load_group(label='group_test_delete_03')
        group.add_nodes([node_01, node_02])
        result = self.cli_runner.invoke(
            cmd_group.group_delete,
            ['--force', '--delete-nodes', 'group_test_delete_03'])
        self.assertClickResultNoException(result)

        # check group and nodes no longer exist
        with self.assertRaises(exceptions.NotExistent):
            orm.load_group(label='group_test_delete_03')
        for pk in node_pks:
            with self.assertRaises(exceptions.NotExistent):
                orm.load_node(pk)
Ejemplo n.º 5
0
    def _create_branchy_graph():
        """
        Creates a basic branchy graph which has two concatenated calculations:

        * calc_1 takes data_0 as an input and returns data_1 and data_o.
        * calc_2 takes data_1 and data_i as inputs and returns data_2.

        This graph looks like this::

                           +--------+                    +--------+
                           | data_o |                    | data_i |
                           +--------+                    +--------+
                               ^                             |
                               |                             v
            +--------+     +--------+     +--------+     +--------+     +--------+
            | data_0 | --> | calc_1 | --> | data_1 | --> | calc_2 | --> | data_2 |
            +--------+     +--------+     +--------+     +--------+     +--------+
        """
        data_0 = orm.Data().store()
        calc_1 = orm.CalculationNode()
        calc_1.add_incoming(data_0, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_0')
        calc_1.store()

        data_1 = orm.Data()
        data_o = orm.Data()
        data_1.add_incoming(calc_1, link_type=LinkType.CREATE, link_label='create_data_1')
        data_o.add_incoming(calc_1, link_type=LinkType.CREATE, link_label='create_data_o')
        data_1.store()
        data_o.store()

        data_i = orm.Data().store()
        calc_2 = orm.CalculationNode()
        calc_2.add_incoming(data_1, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_1')
        calc_2.add_incoming(data_i, link_type=LinkType.INPUT_CALC, link_label='inpcalc_data_i')
        calc_2.store()

        data_2 = orm.Data()
        data_2.add_incoming(calc_2, link_type=LinkType.CREATE, link_label='create_data_2')
        data_2.store()

        output_dict = {
            'data_i': data_i,
            'data_0': data_0,
            'data_1': data_1,
            'data_2': data_2,
            'data_o': data_o,
            'calc_1': calc_1,
            'calc_2': calc_2,
        }
        return output_dict
Ejemplo n.º 6
0
    def test_exclude_logs_flag(self, temp_dir):
        """Test that the `include_logs` argument for `export` works."""
        log_msg = 'Testing logging of critical failure'

        # Create node
        calc = orm.CalculationNode()
        calc.store()
        calc.seal()

        # Create log message
        calc.logger.critical(log_msg)

        # Save uuids prior to export
        calc_uuid = calc.uuid

        # Export, excluding logs
        export_file = os.path.join(temp_dir, 'export.tar.gz')
        export([calc], outfile=export_file, silent=True, include_logs=False)

        # Clean database and reimport exported data
        self.reset_database()
        import_data(export_file, silent=True)

        # Finding all the log messages
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid']).all()
        import_logs = orm.QueryBuilder().append(orm.Log,
                                                project=['uuid']).all()

        # There should be exactly: 1 orm.CalculationNode, 0 Logs
        self.assertEqual(len(import_calcs), 1)
        self.assertEqual(len(import_logs), 0)

        # Check it's the correct node
        self.assertEqual(str(import_calcs[0][0]), calc_uuid)
Ejemplo n.º 7
0
    def test_input_and_create_links(self, temp_dir):
        """
        Simple test that will verify that INPUT and CREATE links are properly exported and
        correctly recreated upon import.
        """
        node_work = orm.CalculationNode()
        node_input = orm.Int(1).store()
        node_output = orm.Int(2).store()

        node_work.add_incoming(node_input, LinkType.INPUT_CALC, 'input')
        node_work.store()
        node_output.add_incoming(node_work, LinkType.CREATE, 'output')

        node_work.seal()

        export_links = get_all_node_links()
        export_file = os.path.join(temp_dir, 'export.aiida')
        export([node_output], filename=export_file)

        self.clean_db()

        import_data(export_file)
        import_links = get_all_node_links()

        export_set = [tuple(_) for _ in export_links]
        import_set = [tuple(_) for _ in import_links]

        self.assertSetEqual(set(export_set), set(import_set))
Ejemplo n.º 8
0
def test_link_manager_with_nested_namespaces(clear_database_before_test):
    """Test the ``LinkManager`` works with nested namespaces."""
    inp1 = orm.Data()
    inp1.store()

    calc = orm.CalculationNode()
    calc.add_incoming(inp1, link_type=LinkType.INPUT_CALC, link_label='nested__sub__namespace')
    calc.store()

    # Attach outputs
    out1 = orm.Data()
    out1.add_incoming(calc, link_type=LinkType.CREATE, link_label='nested__sub__namespace')
    out1.store()

    # Check that the recommended way of dereferencing works
    assert calc.inputs.nested.sub.namespace.uuid == inp1.uuid
    assert calc.outputs.nested.sub.namespace.uuid == out1.uuid

    # Leafs will return an ``AttributeDict`` instance
    assert isinstance(calc.outputs.nested.sub, AttributeDict)

    # Check the legacy way still works
    with pytest.warns(Warning):
        assert calc.inputs.nested__sub__namespace.uuid == inp1.uuid
        assert calc.outputs.nested__sub__namespace.uuid == out1.uuid

    # Must raise a AttributeError, otherwise tab competion will not work
    with pytest.raises(AttributeError):
        _ = calc.outputs.nested.not_existent

    # Must raise a KeyError
    with pytest.raises(KeyError):
        _ = calc.outputs.nested['not_existent']
Ejemplo n.º 9
0
    def test_missing_node_repo_folder_export(self, temp_dir):
        """
        Make sure `~aiida.tools.importexport.common.exceptions.ArchiveExportError` is raised during export when missing
        Node repository folder.
        Create and store a new Node and manually remove its repository folder.
        Attempt to export it and make sure `~aiida.tools.importexport.common.exceptions.ArchiveExportError` is raised,
        due to the missing folder.
        """
        node = orm.CalculationNode().store()
        node.seal()
        node_uuid = node.uuid

        node_repo = RepositoryFolder(section=Repository._section_name, uuid=node_uuid)  # pylint: disable=protected-access
        self.assertTrue(
            node_repo.exists(), msg='Newly created and stored Node should have had an existing repository folder'
        )

        # Removing the Node's local repository folder
        shutil.rmtree(node_repo.abspath, ignore_errors=True)
        self.assertFalse(
            node_repo.exists(), msg='Newly created and stored Node should have had its repository folder removed'
        )

        # Try to export, check it raises and check the raise message
        filename = os.path.join(temp_dir, 'export.aiida')
        with self.assertRaises(exceptions.ArchiveExportError) as exc:
            export([node], filename=filename)

        self.assertIn(f'Unable to find the repository folder for Node with UUID={node_uuid}', str(exc.exception))
        self.assertFalse(os.path.exists(filename), msg='The archive file should not exist')
Ejemplo n.º 10
0
    def test_critical_log_msg_and_metadata(self, temp_dir):
        """ Testing logging of critical message """
        message = 'Testing logging of critical failure'
        calc = orm.CalculationNode()

        # Firing a log for an unstored node should not end up in the database
        calc.logger.critical(message)
        # There should be no log messages for the unstored object
        self.assertEqual(len(orm.Log.objects.all()), 0)

        # After storing the node, logs above log level should be stored
        calc.store()
        calc.seal()
        calc.logger.critical(message)

        # Store Log metadata
        log_metadata = orm.Log.objects.get(dbnode_id=calc.id).metadata

        export_file = os.path.join(temp_dir, 'export.tar.gz')
        export([calc], outfile=export_file, silent=True)

        self.reset_database()

        import_data(export_file, silent=True)

        # Finding all the log messages
        logs = orm.Log.objects.all()

        self.assertEqual(len(logs), 1)
        self.assertEqual(logs[0].message, message)
        self.assertEqual(logs[0].metadata, log_metadata)
    def test_missing_node_repo_folder_import(self, temp_dir):
        """
        Make sure `~aiida.tools.importexport.common.exceptions.CorruptArchive` is raised during import when missing
        Node repository folder.
        Create and export a Node and manually remove its repository folder in the export file.
        Attempt to import it and make sure `~aiida.tools.importexport.common.exceptions.CorruptArchive` is raised,
        due to the missing folder.
        """
        import tarfile

        from aiida.common.folders import SandboxFolder
        from aiida.tools.importexport.common.archive import extract_tar
        from aiida.tools.importexport.common.config import NODES_EXPORT_SUBFOLDER
        from aiida.tools.importexport.common.utils import export_shard_uuid

        node = orm.CalculationNode().store()
        node.seal()
        node_uuid = node.uuid

        node_repo = RepositoryFolder(section=Repository._section_name, uuid=node_uuid)  # pylint: disable=protected-access
        self.assertTrue(
            node_repo.exists(), msg='Newly created and stored Node should have had an existing repository folder'
        )

        # Export and reset db
        filename = os.path.join(temp_dir, 'export.aiida')
        export([node], filename=filename, file_format='tar.gz', silent=True)
        self.reset_database()

        # Untar export file, remove repository folder, re-tar
        node_shard_uuid = export_shard_uuid(node_uuid)
        node_top_folder = node_shard_uuid.split('/')[0]
        with SandboxFolder() as folder:
            extract_tar(filename, folder, silent=True, nodes_export_subfolder=NODES_EXPORT_SUBFOLDER)
            node_folder = folder.get_subfolder(os.path.join(NODES_EXPORT_SUBFOLDER, node_shard_uuid))
            self.assertTrue(
                node_folder.exists(), msg="The Node's repository folder should still exist in the export file"
            )

            # Removing the Node's repository folder from the export file
            shutil.rmtree(
                folder.get_subfolder(os.path.join(NODES_EXPORT_SUBFOLDER, node_top_folder)).abspath, ignore_errors=True
            )
            self.assertFalse(
                node_folder.exists(),
                msg="The Node's repository folder should now have been removed in the export file"
            )

            filename_corrupt = os.path.join(temp_dir, 'export_corrupt.aiida')
            with tarfile.open(filename_corrupt, 'w:gz', format=tarfile.PAX_FORMAT, dereference=True) as tar:
                tar.add(folder.abspath, arcname='')

        # Try to import, check it raises and check the raise message
        with self.assertRaises(exceptions.CorruptArchive) as exc:
            import_data(filename_corrupt, silent=True)

        self.assertIn(
            'Unable to find the repository folder for Node with UUID={}'.format(node_uuid), str(exc.exception)
        )
Ejemplo n.º 12
0
    def test_dangling_link_to_existing_db_node(self, temp_dir):
        """A dangling link that references a Node that is not included in the archive should `not` be importable"""
        struct = orm.StructureData()
        struct.store()
        struct_uuid = struct.uuid

        calc = orm.CalculationNode()
        calc.add_incoming(struct, LinkType.INPUT_CALC, 'input')
        calc.store()
        calc.seal()
        calc_uuid = calc.uuid

        filename = os.path.join(temp_dir, 'export.aiida')
        export([struct], filename=filename, file_format='tar.gz')

        unpack = SandboxFolder()
        with tarfile.open(filename, 'r:gz', format=tarfile.PAX_FORMAT) as tar:
            tar.extractall(unpack.abspath)

        with open(unpack.get_abs_path('data.json'), 'r',
                  encoding='utf8') as fhandle:
            data = json.load(fhandle)
        data['links_uuid'].append({
            'output': calc.uuid,
            'input': struct.uuid,
            'label': 'input',
            'type': LinkType.INPUT_CALC.value
        })

        with open(unpack.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle)

        with tarfile.open(filename, 'w:gz', format=tarfile.PAX_FORMAT) as tar:
            tar.add(unpack.abspath, arcname='')

        # Make sure the CalculationNode is still in the database
        builder = orm.QueryBuilder().append(orm.CalculationNode,
                                            project='uuid')
        self.assertEqual(
            builder.count(),
            1,
            msg=
            f'There should be a single CalculationNode, instead {builder.count()} has been found'
        )
        self.assertEqual(builder.all()[0][0], calc_uuid)

        with self.assertRaises(DanglingLinkError):
            import_data(filename)

        # Using the flag `ignore_unknown_nodes` should import it without problems
        import_data(filename, ignore_unknown_nodes=True)
        builder = orm.QueryBuilder().append(orm.StructureData, project='uuid')
        self.assertEqual(
            builder.count(),
            1,
            msg=
            f'There should be a single StructureData, instead {builder.count()} has been found'
        )
        self.assertEqual(builder.all()[0][0], struct_uuid)
Ejemplo n.º 13
0
    def test_mtime_of_imported_comments(self, temp_dir):
        """
        Test mtime does not change for imported comments
        This is related to correct usage of `comment_mode` when importing.
        """
        # Get user
        user = orm.User.objects.get_default()

        comment_content = 'You get what you give'

        # Create node
        calc = orm.CalculationNode().store()
        calc.seal()

        # Create comment
        orm.Comment(calc, user, comment_content).store()
        calc.store()

        # Save UUIDs and mtime
        calc_uuid = calc.uuid
        builder = orm.QueryBuilder().append(orm.Comment, project=['uuid', 'mtime']).all()
        comment_uuid = str(builder[0][0])
        comment_mtime = builder[0][1]

        builder = orm.QueryBuilder().append(orm.CalculationNode, project=['uuid', 'mtime']).all()
        calc_uuid = str(builder[0][0])
        calc_mtime = builder[0][1]

        # Export, reset database and reimport
        export_file = os.path.join(temp_dir, 'export.aiida')
        export([calc], filename=export_file, silent=True)
        self.reset_database()
        import_data(export_file, silent=True)

        # Retrieve node and comment
        builder = orm.QueryBuilder().append(orm.CalculationNode, tag='calc', project=['uuid', 'mtime'])
        builder.append(orm.Comment, with_node='calc', project=['uuid', 'mtime'])

        import_entities = builder.all()[0]

        self.assertEqual(len(import_entities), 4)  # Check we have the correct amount of returned values

        import_calc_uuid = str(import_entities[0])
        import_calc_mtime = import_entities[1]
        import_comment_uuid = str(import_entities[2])
        import_comment_mtime = import_entities[3]

        # Check we have the correct UUIDs
        self.assertEqual(import_calc_uuid, calc_uuid)
        self.assertEqual(import_comment_uuid, comment_uuid)

        # Make sure the mtime is the same after import as it was before export
        self.assertEqual(import_comment_mtime, comment_mtime)
        self.assertEqual(import_calc_mtime, calc_mtime)
Ejemplo n.º 14
0
    def test_add_remove_nodes(self):
        """Test `verdi group remove-nodes` command."""
        node_01 = orm.CalculationNode().store()
        node_02 = orm.CalculationNode().store()
        node_03 = orm.CalculationNode().store()

        result = self.cli_runner.invoke(
            cmd_group.group_add_nodes,
            ['--force', '--group=dummygroup1', node_01.uuid])
        self.assertClickResultNoException(result)

        # Check if node is added in group using group show command
        result = self.cli_runner.invoke(cmd_group.group_show, ['dummygroup1'])
        self.assertClickResultNoException(result)
        self.assertIn('CalculationNode', result.output)
        self.assertIn(str(node_01.pk), result.output)

        # Remove same node
        result = self.cli_runner.invoke(
            cmd_group.group_remove_nodes,
            ['--force', '--group=dummygroup1', node_01.uuid])
        self.assertIsNone(result.exception, result.output)

        # Check if node is added in group using group show command
        result = self.cli_runner.invoke(cmd_group.group_show,
                                        ['-r', 'dummygroup1'])
        self.assertClickResultNoException(result)
        self.assertNotIn('CalculationNode', result.output)
        self.assertNotIn(str(node_01.pk), result.output)

        # Add all three nodes and then use `verdi group remove-nodes --clear` to remove them all
        group = orm.load_group(label='dummygroup1')
        group.add_nodes([node_01, node_02, node_03])
        self.assertEqual(group.count(), 3)

        result = self.cli_runner.invoke(
            cmd_group.group_remove_nodes,
            ['--force', '--clear', '--group=dummygroup1'])
        self.assertClickResultNoException(result)
        self.assertEqual(group.count(), 0)
Ejemplo n.º 15
0
    def test_copy_existing_group(self):
        """Test user is prompted to continue if destination group exists and is not empty"""
        source_label = 'source_copy_existing_group'
        dest_label = 'dest_copy_existing_group'

        # Create source group with nodes
        calc_s1 = orm.CalculationNode().store()
        calc_s2 = orm.CalculationNode().store()
        nodes_source_group = {str(node.uuid) for node in [calc_s1, calc_s2]}
        source_group = orm.Group(label=source_label).store()
        source_group.add_nodes([calc_s1, calc_s2])

        # Copy using `verdi group copy` - making sure all is successful
        options = [source_label, dest_label]
        result = self.cli_runner.invoke(cmd_group.group_copy, options)
        self.assertClickResultNoException(result)
        self.assertIn(
            'Success: Nodes copied from group<{}> to group<{}>'.format(
                source_label, dest_label), result.output, result.exception)

        # Check destination group exists with source group's nodes
        dest_group = orm.load_group(label=dest_label)
        self.assertEqual(dest_group.count(), 2)
        nodes_dest_group = {str(node.uuid) for node in dest_group.nodes}
        self.assertSetEqual(nodes_source_group, nodes_dest_group)

        # Copy again, making sure an abort error is raised, since no user input can be made and default is abort
        result = self.cli_runner.invoke(cmd_group.group_copy, options)
        self.assertIsNotNone(result.exception, result.output)
        self.assertIn(
            'Warning: Destination group<{}> already exists and is not empty.'.
            format(dest_label), result.output, result.exception)

        # Check destination group is unchanged
        dest_group = orm.load_group(label=dest_label)
        self.assertEqual(dest_group.count(), 2)
        nodes_dest_group = {str(node.uuid) for node in dest_group.nodes}
        self.assertSetEqual(nodes_source_group, nodes_dest_group)
Ejemplo n.º 16
0
    def test_calc_and_data_nodes_with_comments(self, temp_dir):
        """ Test comments for CalculatioNode and Data node are correctly ex-/imported """
        # Create user, nodes, and comments
        user = orm.User.objects.get_default()

        calc_node = orm.CalculationNode().store()
        calc_node.seal()
        data_node = orm.Data().store()

        comment_one = orm.Comment(calc_node, user, self.comments[0]).store()
        comment_two = orm.Comment(calc_node, user, self.comments[1]).store()

        comment_three = orm.Comment(data_node, user, self.comments[2]).store()
        comment_four = orm.Comment(data_node, user, self.comments[3]).store()

        # Get values prior to export
        calc_uuid = calc_node.uuid
        data_uuid = data_node.uuid
        calc_comments_uuid = [c.uuid for c in [comment_one, comment_two]]
        data_comments_uuid = [c.uuid for c in [comment_three, comment_four]]

        # Export nodes
        export_file = os.path.join(temp_dir, 'export.tar.gz')
        export([calc_node, data_node], outfile=export_file, silent=True)

        # Clean database and reimport exported file
        self.reset_database()
        import_data(export_file, silent=True)

        # Get nodes and comments
        builder = orm.QueryBuilder()
        builder.append(orm.Node, tag='node', project=['uuid'])
        builder.append(orm.Comment, with_node='node', project=['uuid'])
        nodes_and_comments = builder.all()

        self.assertEqual(len(nodes_and_comments), len(self.comments))
        for entry in nodes_and_comments:
            self.assertEqual(len(entry), 2)  # 1 Node + 1 Comment

            import_node_uuid = str(entry[0])
            import_comment_uuid = str(entry[1])

            self.assertIn(import_node_uuid, [calc_uuid, data_uuid])
            if import_node_uuid == calc_uuid:
                # Calc node comments
                self.assertIn(import_comment_uuid, calc_comments_uuid)
            else:
                # Data node comments
                self.assertIn(import_comment_uuid, data_comments_uuid)
Ejemplo n.º 17
0
    def test_get_node_summary(self):
        """Test the `get_node_summary` utility."""
        from aiida.cmdline.utils.common import get_node_summary

        computer_label = self.computer.name  # pylint: disable=no-member

        code = orm.Code(
            input_plugin_name='arithmetic.add',
            remote_computer_exec=[self.computer, '/remote/abs/path'],
        )
        code.store()

        node = orm.CalculationNode()
        node.computer = self.computer
        node.add_incoming(code,
                          link_type=LinkType.INPUT_CALC,
                          link_label='code')
        node.store()

        summary = get_node_summary(node)
        self.assertIn(node.uuid, summary)
        self.assertIn(computer_label, summary)
Ejemplo n.º 18
0
    def test_create_log_unserializable_metadata(self):
        """Test that unserializable data will be removed before reaching the database causing an error."""
        import functools

        def unbound_method(argument):
            return argument

        partial = functools.partial(unbound_method, 'argument')

        node = orm.CalculationNode().store()

        # An unbound method in the `args` of the metadata
        node.logger.error('problem occurred in method %s', unbound_method)

        # A partial in the `args` of the metadata
        node.logger.error('problem occurred in partial %s', partial)

        # An exception which will include an `exc_info` object
        try:
            raise ValueError
        except ValueError:
            node.logger.exception('caught an exception')

        self.assertEqual(len(Log.objects.all()), 3)
Ejemplo n.º 19
0
    def test_db_log_handler(self):
        """
        Verify that the db log handler is attached correctly
        by firing a log message through the regular logging module
        attached to a calculation node
        """
        from aiida.orm.logs import OrderSpecifier, ASCENDING

        message = 'Testing logging of critical failure'
        node = orm.CalculationNode()

        # Firing a log for an unstored should not end up in the database
        node.logger.critical(message)

        logs = Log.objects.find()

        self.assertEqual(len(logs), 0)

        # After storing the node, logs above log level should be stored
        node.store()
        node.logger.critical(message)
        logs = Log.objects.find()

        self.assertEqual(len(logs), 1)
        self.assertEqual(logs[0].message, message)

        # Launching a second log message ensuring that both messages are correctly stored
        message2 = message + ' - Second message'
        node.logger.critical(message2)

        order_by = [OrderSpecifier('time', ASCENDING)]
        logs = Log.objects.find(order_by=order_by)

        self.assertEqual(len(logs), 2)
        self.assertEqual(logs[0].message, message)
        self.assertEqual(logs[1].message, message2)
Ejemplo n.º 20
0
    def test_reimport_of_comments_for_single_node(self, temp_dir):
        """
        When a node with comments already exist in the DB, and more comments are
        imported for the same node (same UUID), test that only new comment-entries
        are added.

        Part I:
        Create CalculationNode and 1 Comment for it.
        Export CalculationNode with its 1 Comment to export file #1 "EXISTING database".
        Add 3 Comments to CalculationNode.
        Export CalculationNode with its 4 Comments to export file #2 "FULL database".
        Reset database.

        Part II:
        Reimport export file #1 "EXISTING database".
        Add 3 Comments to CalculationNode.
        Export CalculationNode with its 4 Comments to export file #3 "NEW database".
        Reset database.

        Part III:
        Reimport export file #1 "EXISTING database" (1 CalculationNode, 1 Comment).
        Import export file #2 "FULL database" (1 CalculationNode, 4 Comments).
        Check the database EXACTLY contains 1 CalculationNode and 4 Comments,
        with matching UUIDS all the way through all previous Parts.

        Part IV:
        Import export file #3 "NEW database" (1 CalculationNode, 4 Comments).
        Check the database EXACTLY contains 1 CalculationNode and 7 Comments,
        with matching UUIDS all the way through all previous Parts.
        NB! There should now be 7 Comments in the database. 6 of which are identical
        in pairs, except for their UUID.
        """
        export_filenames = {
            'EXISTING': 'export_EXISTING_db.tar.gz',
            'FULL': 'export_FULL_db.tar.gz',
            'NEW': 'export_NEW_db.tar.gz'
        }

        # Get user
        # Will have to do this again after resetting the DB
        user = orm.User.objects.get_default()

        ## Part I
        # Create node and save UUID
        calc = orm.CalculationNode()
        calc.store()
        calc.seal()
        calc_uuid = calc.uuid

        # Create first comment
        orm.Comment(calc, user, self.comments[0]).store()

        # There should be exactly: 1 CalculationNode, 1 Comment
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_comments.count(), 1)

        # Save Comment UUID before export
        existing_comment_uuids = [str(export_comments.all()[0][0])]

        # Export "EXISTING" DB
        export_file_existing = os.path.join(temp_dir,
                                            export_filenames['EXISTING'])
        export([calc], outfile=export_file_existing, silent=True)

        # Add remaining Comments
        for comment in self.comments[1:]:
            orm.Comment(calc, user, comment).store()

        # There should be exactly: 1 CalculationNode, 3 Comments (len(self.comments))
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_comments.count(), len(self.comments))

        # Save Comment UUIDs before export, there should be 4 UUIDs in total (len(self.comments))
        full_comment_uuids = set(existing_comment_uuids)
        for comment_uuid in export_comments.all():
            full_comment_uuids.add(str(comment_uuid[0]))
        self.assertEqual(len(full_comment_uuids), len(self.comments))

        # Export "FULL" DB
        export_file_full = os.path.join(temp_dir, export_filenames['FULL'])
        export([calc], outfile=export_file_full, silent=True)

        # Clean database
        self.reset_database()

        ## Part II
        # Reimport "EXISTING" DB
        import_data(export_file_existing, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 1 Comment
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_comments.count(), 1)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        self.assertIn(str(import_comments.all()[0][0]), existing_comment_uuids)

        # Add remaining Comments (again)
        calc = orm.load_node(
            import_calcs.all()[0][0])  # Reload CalculationNode
        user = orm.User.objects.get_default()  # Get user - again
        for comment in self.comments[1:]:
            orm.Comment(calc, user, comment).store()

        # There should be exactly: 1 CalculationNode, 4 Comments (len(self.comments))
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_comments.count(), len(self.comments))

        # Save Comment UUIDs before export, there should be 4 UUIDs in total (len(self.comments))
        new_comment_uuids = set(existing_comment_uuids)
        for comment_uuid in export_comments.all():
            new_comment_uuids.add(str(comment_uuid[0]))
        self.assertEqual(len(new_comment_uuids), len(self.comments))

        # Export "NEW" DB
        export_file_new = os.path.join(temp_dir, export_filenames['NEW'])
        export([calc], outfile=export_file_new, silent=True)

        # Clean database
        self.reset_database()

        ## Part III
        # Reimport "EXISTING" DB
        import_data(export_file_existing, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 1 Comment
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_comments.count(), 1)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        self.assertIn(str(import_comments.all()[0][0]), existing_comment_uuids)

        # Import "FULL" DB
        import_data(export_file_full, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 4 Comments (len(self.comments))
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_comments = orm.QueryBuilder().append(orm.Comment,
                                                    project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_comments.count(), len(self.comments))
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        for comment in import_comments.all():
            comment_uuid = str(comment[0])
            self.assertIn(comment_uuid, full_comment_uuids)

        ## Part IV
        # Import "NEW" DB
        import_data(export_file_new, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 7 Comments (org. (1) + 2 x added (3) Comments)
        # 4 of the comments are identical in pairs, except for the UUID.
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_comments = orm.QueryBuilder().append(
            orm.Comment, project=['uuid', 'content'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_comments.count(), 7)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        total_comment_uuids = full_comment_uuids.copy()
        total_comment_uuids.update(new_comment_uuids)
        for comment in import_comments.all():
            comment_uuid = str(comment[0])
            comment_content = str(comment[1])
            self.assertIn(comment_uuid, total_comment_uuids)
            self.assertIn(comment_content, self.comments)
Ejemplo n.º 21
0
    def test_reexport(self, temp_dir):
        """
        Export something, import and reexport and check if everything is valid.
        The export is rather easy::

            ___       ___          ___
           |   | INP |   | CREATE |   |
           | p | --> | c | -----> | a |
           |___|     |___|        |___|

        """
        import numpy as np
        import string
        import random
        from datetime import datetime

        from aiida.common.hashing import make_hash

        def get_hash_from_db_content(grouplabel):
            """Helper function to get hash"""
            builder = orm.QueryBuilder()
            builder.append(orm.Dict, tag='param', project='*')
            builder.append(orm.CalculationNode,
                           tag='calc',
                           project='*',
                           edge_tag='p2c',
                           edge_project=('label', 'type'))
            builder.append(orm.ArrayData,
                           tag='array',
                           project='*',
                           edge_tag='c2a',
                           edge_project=('label', 'type'))
            builder.append(orm.Group,
                           filters={'label': grouplabel},
                           project='*',
                           tag='group',
                           with_node='array')
            # I want the query to contain something!
            self.assertTrue(builder.count() > 0)
            # The hash is given from the preservable entries in an export-import cycle,
            # uuids, attributes, labels, descriptions, arrays, link-labels, link-types:
            hash_ = make_hash([(
                item['param']['*'].attributes,
                item['param']['*'].uuid,
                item['param']['*'].label,
                item['param']['*'].description,
                item['calc']['*'].uuid,
                item['calc']['*'].attributes,
                item['array']['*'].attributes,
                [
                    item['array']['*'].get_array(name).tolist()
                    for name in item['array']['*'].get_arraynames()
                ],
                item['array']['*'].uuid,
                item['group']['*'].uuid,
                item['group']['*'].label,
                item['p2c']['label'],
                item['p2c']['type'],
                item['c2a']['label'],
                item['c2a']['type'],
                item['group']['*'].label,
            ) for item in builder.dict()])
            return hash_

        # Creating a folder for the import/export files
        chars = string.ascii_uppercase + string.digits
        size = 10
        grouplabel = 'test-group'

        nparr = np.random.random((4, 3, 2))  # pylint: disable=no-member
        trial_dict = {}
        # give some integers:
        trial_dict.update({str(k): np.random.randint(100) for k in range(10)})
        # give some floats:
        trial_dict.update({str(k): np.random.random() for k in range(10, 20)})  # pylint: disable=no-member
        # give some booleans:
        trial_dict.update(
            {str(k): bool(np.random.randint(1))
             for k in range(20, 30)})
        # give some text:
        trial_dict.update({
            str(k): ''.join(random.choice(chars) for _ in range(size))
            for k in range(20, 30)
        })

        param = orm.Dict(dict=trial_dict)
        param.label = str(datetime.now())
        param.description = 'd_' + str(datetime.now())
        param.store()
        calc = orm.CalculationNode()
        # setting also trial dict as attributes, but randomizing the keys)
        for key, value in trial_dict.items():
            calc.set_attribute(str(int(key) + np.random.randint(10)), value)
        array = orm.ArrayData()
        array.set_array('array', nparr)
        array.store()
        # LINKS
        # the calculation has input the parameters-instance
        calc.add_incoming(param,
                          link_type=LinkType.INPUT_CALC,
                          link_label='input_parameters')
        calc.store()
        # I want the array to be an output of the calculation
        array.add_incoming(calc,
                           link_type=LinkType.CREATE,
                           link_label='output_array')
        group = orm.Group(label='test-group')
        group.store()
        group.add_nodes(array)

        calc.seal()

        hash_from_dbcontent = get_hash_from_db_content(grouplabel)

        # I export and reimport 3 times in a row:
        for i in range(3):
            # Always new filename:
            filename = os.path.join(temp_dir, 'export-{}.aiida'.format(i))
            # Loading the group from the string
            group = orm.Group.get(label=grouplabel)
            # exporting based on all members of the group
            # this also checks if group memberships are preserved!
            export([group] + list(group.nodes), filename=filename, silent=True)
            # cleaning the DB!
            self.clean_db()
            self.create_user()
            # reimporting the data from the file
            import_data(filename, silent=True, ignore_unknown_nodes=True)
            # creating the hash from db content
            new_hash = get_hash_from_db_content(grouplabel)
            # I check for equality against the first hash created, which implies that hashes
            # are equal in all iterations of this process
            self.assertEqual(hash_from_dbcontent, new_hash)
Ejemplo n.º 22
0
def test_link_manager(clear_database_before_test):
    """Test the LinkManager via .inputs and .outputs from a ProcessNode."""
    # I first create a calculation with two inputs and two outputs

    # Create inputs
    inp1 = orm.Data()
    inp1.store()
    inp2 = orm.Data()
    inp2.store()
    inp3 = orm.Data()
    inp3.store()

    # Create calc with inputs
    calc = orm.CalculationNode()
    calc.add_incoming(inp1, link_type=LinkType.INPUT_CALC, link_label='inp1label')
    calc.add_incoming(inp2, link_type=LinkType.INPUT_CALC, link_label='inp2label')
    calc.store()

    # Attach outputs
    out1 = orm.Data()
    out2 = orm.Data()
    out1.add_incoming(calc, link_type=LinkType.CREATE, link_label='out1label')
    out1.store()
    out2.add_incoming(calc, link_type=LinkType.CREATE, link_label='out2label')
    out2.store()

    expected_inputs = {'inp1label': inp1.uuid, 'inp2label': inp2.uuid}
    expected_outputs = {'out1label': out1.uuid, 'out2label': out2.uuid}

    #### Check the 'inputs' manager ###
    # Check that dir() return all keys and nothing else, important
    # for tab competion (we skip anything that starts with an underscore)
    assert len([key for key in dir(calc.inputs) if not key.startswith('_')]) == len(expected_inputs)
    assert set(key for key in dir(calc.inputs) if not key.startswith('_')) == set(expected_inputs)
    # Check that it works also as an iterator
    assert len(list(calc.inputs)) == len(expected_inputs)
    assert set(calc.inputs) == set(expected_inputs)

    for key, val in expected_inputs.items():
        # calc.inputs.a.uuid == ..., ...
        assert getattr(calc.inputs, key).uuid == val
        # calc.inputs['a'].uuid == ..., ...
        assert calc.inputs[key].uuid == val

    # I check the attribute fetching directly
    assert calc.inputs.inp1label.uuid == expected_inputs['inp1label']

    ## Check for not-existing links
    # - Must raise a AttributeError, otherwise tab competion will not work
    # - Actually raises a NotExistentAttributeError
    # - NotExistentAttributeError should also be caught by NotExistent,
    #   for backwards-compatibility for AiiDA 1.0, 1.1, 1.2
    for exception in [AttributeError, NotExistent, NotExistentAttributeError]:
        with pytest.raises(exception):
            getattr(calc.inputs, 'NotExistentLabel')

    # - Must raise a KeyError to behave like a dictionary
    # - Actually raises a NotExistentKeyError
    # - NotExistentKeyError should also be caught by NotExistent,
    #   for backwards-compatibility for AiiDA 1.0, 1.1, 1.2
    for exception in [KeyError, NotExistent, NotExistentKeyError]:
        with pytest.raises(exception):
            _ = calc.inputs['NotExistentLabel']

    #### Check the 'outputs' manager ###
    # Check that dir() return all keys and nothing else, important
    # for tab competion (we skip anything that starts with an underscore)
    assert len([key for key in dir(calc.outputs) if not key.startswith('_')]) == len(expected_outputs)
    assert set(key for key in dir(calc.outputs) if not key.startswith('_')) == set(expected_outputs)
    # Check that it works also as an iterator
    assert len(list(calc.outputs)) == len(expected_outputs)
    assert set(calc.outputs) == set(expected_outputs)

    for key, val in expected_outputs.items():
        # calc.outputs.a.uuid == ..., ...
        assert getattr(calc.outputs, key).uuid == val
        # calc.outputs['a'].uuid == ..., ...
        assert calc.outputs[key].uuid == val

    # I check the attribute fetching directly
    assert calc.outputs.out1label.uuid == expected_outputs['out1label']

    # Must raise a AttributeError, otherwise tab competion will not work
    with pytest.raises(AttributeError):
        getattr(calc.outputs, 'NotExistentLabel')

    # Must raise a KeyError
    with pytest.raises(KeyError):
        _ = calc.outputs['NotExistentLabel']
Ejemplo n.º 23
0
    def test_cycle_structure_data(self):
        """
        Create an export with some orm.CalculationNode and Data nodes and import it after having
        cleaned the database. Verify that the nodes and their attributes are restored
        properly after importing the created export archive
        """
        from aiida.common.links import LinkType

        test_label = 'Test structure'
        test_cell = [[8.34, 0.0, 0.0],
                     [0.298041701839357, 8.53479766274308, 0.0],
                     [0.842650688117053, 0.47118495164127, 10.6965192730702]]
        test_kinds = [{
            'symbols': ['Fe'],
            'weights': [1.0],
            'mass': 55.845,
            'name': 'Fe'
        }, {
            'symbols': ['S'],
            'weights': [1.0],
            'mass': 32.065,
            'name': 'S'
        }]

        structure = orm.StructureData(cell=test_cell)
        structure.append_atom(symbols=['Fe'], position=[0, 0, 0])
        structure.append_atom(symbols=['S'], position=[2, 2, 2])
        structure.label = test_label
        structure.store()

        parent_process = orm.CalculationNode()
        parent_process.set_attribute('key', 'value')
        parent_process.store()
        child_calculation = orm.CalculationNode()
        child_calculation.set_attribute('key', 'value')
        remote_folder = orm.RemoteData(computer=self.computer,
                                       remote_path='/').store()

        remote_folder.add_incoming(parent_process,
                                   link_type=LinkType.CREATE,
                                   link_label='link')
        child_calculation.add_incoming(remote_folder,
                                       link_type=LinkType.INPUT_CALC,
                                       link_label='link')
        child_calculation.store()
        structure.add_incoming(child_calculation,
                               link_type=LinkType.CREATE,
                               link_label='link')

        parent_process.seal()
        child_calculation.seal()

        with tempfile.NamedTemporaryFile() as handle:

            nodes = [
                structure, child_calculation, parent_process, remote_folder
            ]
            export(nodes, outfile=handle.name, overwrite=True, silent=True)

            # Check that we have the expected number of nodes in the database
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(),
                             len(nodes))

            # Clean the database and verify there are no nodes left
            self.clean_db()
            self.create_user()
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), 0)

            # After importing we should have the original number of nodes again
            import_data(handle.name, silent=True)
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(),
                             len(nodes))

            # Verify that orm.CalculationNodes have non-empty attribute dictionaries
            builder = orm.QueryBuilder().append(orm.CalculationNode)
            for [calculation] in builder.iterall():
                self.assertIsInstance(calculation.attributes, dict)
                self.assertNotEqual(len(calculation.attributes), 0)

            # Verify that the structure data maintained its label, cell and kinds
            builder = orm.QueryBuilder().append(orm.StructureData)
            for [structure] in builder.iterall():
                self.assertEqual(structure.label, test_label)
                # Check that they are almost the same, within numerical precision
                self.assertTrue(
                    np.abs(np.array(structure.cell) -
                           np.array(test_cell)).max() < 1.e-12)

            builder = orm.QueryBuilder().append(orm.StructureData,
                                                project=['attributes.kinds'])
            for [kinds] in builder.iterall():
                self.assertEqual(len(kinds), 2)
                for kind in kinds:
                    self.assertIn(kind, test_kinds)

            # Check that there is a StructureData that is an output of a orm.CalculationNode
            builder = orm.QueryBuilder()
            builder.append(orm.CalculationNode,
                           project=['uuid'],
                           tag='calculation')
            builder.append(orm.StructureData, with_incoming='calculation')
            self.assertGreater(len(builder.all()), 0)

            # Check that there is a RemoteData that is a child and parent of a orm.CalculationNode
            builder = orm.QueryBuilder()
            builder.append(orm.CalculationNode, tag='parent')
            builder.append(orm.RemoteData,
                           project=['uuid'],
                           with_incoming='parent',
                           tag='remote')
            builder.append(orm.CalculationNode, with_incoming='remote')
            self.assertGreater(len(builder.all()), 0)
Ejemplo n.º 24
0
    def _create_basic_graph():
        """
        Creates a basic graph which has one parent workflow (work_2) that calls
        a child workflow (work_1) which calls a calculation function (calc_0).
        There is one input (data_i) and one output (data_o). It has at least one
        link of each class:

        * CALL_WORK from work_2 to work_1.
        * CALL_CALC from work_1 to calc_0.
        * INPUT_CALC from data_i to calc_0 and CREATE from calc_0 to data_o.
        * INPUT_WORK from data_i to work_1 and RETURN from work_1 to data_o.
        * INPUT_WORK from data_i to work_2 and RETURN from work_2 to data_o.

        This graph looks like this::

                     input_work      +--------+       return
               +-------------------> | work_2 | --------------------+
               |                     +--------+                     |
               |                         |                          |
               |                         | call_work                |
               |                         |                          |
               |                         v                          |
               |       input_work    +--------+      return         |
               |  +----------------> | work_1 | -----------------+  |
               |  |                  +--------+                  |  |
               |  |                      |                       |  |
               |  |                      | call_calc             |  |
               |  |                      |                       |  |
               |  |                      v                       v  v
            +--------+  input_calc   +--------+    create     +--------+
            | data_i | ------------> | calc_0 | ------------> | data_o |
            +--------+               +--------+               +--------+
        """
        data_i = orm.Data().store()

        work_2 = orm.WorkflowNode()
        work_2.add_incoming(data_i, link_type=LinkType.INPUT_WORK, link_label='inpwork2')
        work_2.store()

        work_1 = orm.WorkflowNode()
        work_1.add_incoming(data_i, link_type=LinkType.INPUT_WORK, link_label='inpwork1')
        work_1.add_incoming(work_2, link_type=LinkType.CALL_WORK, link_label='callwork')
        work_1.store()

        calc_0 = orm.CalculationNode()
        calc_0.add_incoming(data_i, link_type=LinkType.INPUT_CALC, link_label='inpcalc0')
        calc_0.add_incoming(work_1, link_type=LinkType.CALL_CALC, link_label='callcalc')
        calc_0.store()

        data_o = orm.Data()
        data_o.add_incoming(calc_0, link_type=LinkType.CREATE, link_label='create0')
        data_o.store()
        data_o.add_incoming(work_2, link_type=LinkType.RETURN, link_label='return2')
        data_o.add_incoming(work_1, link_type=LinkType.RETURN, link_label='return1')

        output_dict = {
            'data_i': data_i,
            'data_o': data_o,
            'calc_0': calc_0,
            'work_1': work_1,
            'work_2': work_2,
        }
        return output_dict
Ejemplo n.º 25
0
    def test_multiple_post_return_links(self, temp_dir):  # pylint: disable=too-many-locals
        """Check extra RETURN links can be added to existing Nodes, when label is not unique"""
        data = orm.Int(1).store()
        calc = orm.CalculationNode().store()
        work = orm.WorkflowNode().store()
        link_label = 'output_data'

        data.add_incoming(calc, LinkType.CREATE, link_label)
        data.add_incoming(work, LinkType.RETURN, link_label)

        calc.seal()
        work.seal()

        data_uuid = data.uuid
        calc_uuid = calc.uuid
        work_uuid = work.uuid
        before_links = get_all_node_links()

        data_provenance = os.path.join(temp_dir, 'data.aiida')
        all_provenance = os.path.join(temp_dir, 'all.aiida')

        export([data], filename=data_provenance, return_backward=False)
        export([data], filename=all_provenance, return_backward=True)

        self.clean_db()

        # import data provenance
        import_data(data_provenance)

        no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count()
        self.assertEqual(
            no_of_work,
            0,
            msg=
            f'{no_of_work} WorkflowNode(s) was/were found, however, none should be present'
        )

        nodes = orm.QueryBuilder().append(orm.Node, project='uuid')
        self.assertEqual(
            nodes.count(),
            2,
            msg=
            f'{no_of_work} Node(s) was/were found, however, exactly two should be present'
        )
        for node in nodes.iterall():
            self.assertIn(node[0], [data_uuid, calc_uuid])

        links = get_all_node_links()
        self.assertEqual(
            len(links),
            1,
            msg='Only a single Link (from Calc. to Data) is expected, '
            'instead {} were found (in, out, label, type): {}'.format(
                len(links), links))
        for from_uuid, to_uuid, found_label, found_type in links:
            self.assertEqual(from_uuid, calc_uuid)
            self.assertEqual(to_uuid, data_uuid)
            self.assertEqual(found_label, link_label)
            self.assertEqual(found_type, LinkType.CREATE.value)

        # import data+logic provenance
        import_data(all_provenance)

        no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count()
        self.assertEqual(
            no_of_work,
            1,
            msg=
            f'{no_of_work} WorkflowNode(s) was/were found, however, exactly one should be present'
        )

        nodes = orm.QueryBuilder().append(orm.Node, project='uuid')
        self.assertEqual(
            nodes.count(),
            3,
            msg=
            f'{no_of_work} Node(s) was/were found, however, exactly three should be present'
        )
        for node in nodes.iterall():
            self.assertIn(node[0], [data_uuid, calc_uuid, work_uuid])

        links = get_all_node_links()
        self.assertEqual(
            len(links),
            2,
            msg=
            f'Exactly two Links are expected, instead {len(links)} were found (in, out, label, type): {links}'
        )
        self.assertListEqual(sorted(links), sorted(before_links))
Ejemplo n.º 26
0
    def test_reimport_of_logs_for_single_node(self, temp_dir):
        """
        When a node with logs already exist in the DB, and more logs are imported
        for the same node (same UUID), test that only new log-entries are added.

        Part I:
        Create CalculationNode and 1 Log for it.
        Export CalculationNode with its 1 Log to export file #1 "EXISTING database".
        Add 2 Logs to CalculationNode.
        Export CalculationNode with its 3 Logs to export file #2 "FULL database".
        Reset database.

        Part II:
        Reimport export file #1 "EXISTING database".
        Add 2 Logs to CalculationNode (different UUID than for "FULL database").
        Export CalculationNode with its 3 Logs to export file #3 "NEW database".
        Reset database.

        Part III:
        Reimport export file #1 "EXISTING database" (1 CalculationNode, 1 Log).
        Import export file #2 "FULL database" (1 CalculationNode, 3 Logs).
        Check the database EXACTLY contains 1 CalculationNode and 3 Logs,
        with matching UUIDS all the way through all previous Parts.

        Part IV:
        Import export file #3 "NEW database" (1 CalculationNode, 3 Logs).
        Check the database EXACTLY contains 1 CalculationNode and 5 Logs,
        with matching UUIDS all the way through all previous Parts.
        NB! There should now be 5 Logs in the database. 4 of which are identical
        in pairs, except for their UUID.
        """
        export_filenames = {
            'EXISTING': 'export_EXISTING_db.tar.gz',
            'FULL': 'export_FULL_db.tar.gz',
            'NEW': 'export_NEW_db.tar.gz'
        }

        log_msgs = [
            'Life is like riding a bicycle.', 'To keep your balance,',
            'you must keep moving.'
        ]

        ## Part I
        # Create node and save UUID
        calc = orm.CalculationNode()
        calc.store()
        calc.seal()
        calc_uuid = calc.uuid

        # Create first log message
        calc.logger.critical(log_msgs[0])

        # There should be exactly: 1 CalculationNode, 1 Log
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_logs.count(), 1)

        # Save Log UUID before export
        existing_log_uuids = [str(export_logs.all()[0][0])]

        # Export "EXISTING" DB
        export_file_existing = os.path.join(temp_dir,
                                            export_filenames['EXISTING'])
        export([calc], outfile=export_file_existing, silent=True)

        # Add remaining Log messages
        for log_msg in log_msgs[1:]:
            calc.logger.critical(log_msg)

        # There should be exactly: 1 CalculationNode, 3 Logs (len(log_msgs))
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_logs.count(), len(log_msgs))

        # Save Log UUIDs before export, there should be 3 UUIDs in total (len(log_msgs))
        full_log_uuids = set(existing_log_uuids)
        for log_uuid in export_logs.all():
            full_log_uuids.add(str(log_uuid[0]))
        self.assertEqual(len(full_log_uuids), len(log_msgs))

        # Export "FULL" DB
        export_file_full = os.path.join(temp_dir, export_filenames['FULL'])
        export([calc], outfile=export_file_full, silent=True)

        # Clean database
        self.reset_database()

        ## Part II
        # Reimport "EXISTING" DB
        import_data(export_file_existing, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 1 Log
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_logs.count(), 1)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        self.assertIn(str(import_logs.all()[0][0]), existing_log_uuids)

        # Add remaining Log messages (again)
        calc = orm.load_node(import_calcs.all()[0][0])
        for log_msg in log_msgs[1:]:
            calc.logger.critical(log_msg)

        # There should be exactly: 1 CalculationNode, 3 Logs (len(log_msgs))
        export_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        export_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(export_calcs.count(), 1)
        self.assertEqual(export_logs.count(), len(log_msgs))

        # Save Log UUIDs before export, there should be 3 UUIDs in total (len(log_msgs))
        new_log_uuids = set(existing_log_uuids)
        for log_uuid in export_logs.all():
            new_log_uuids.add(str(log_uuid[0]))
        self.assertEqual(len(new_log_uuids), len(log_msgs))

        # Export "NEW" DB
        export_file_new = os.path.join(temp_dir, export_filenames['NEW'])
        export([calc], outfile=export_file_new, silent=True)

        # Clean database
        self.reset_database()

        ## Part III
        # Reimport "EXISTING" DB
        import_data(export_file_existing, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 1 Log
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_logs.count(), 1)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        self.assertIn(str(import_logs.all()[0][0]), existing_log_uuids)

        # Import "FULL" DB
        import_data(export_file_full, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 3 Logs (len(log_msgs))
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_logs = orm.QueryBuilder().append(orm.Log, project=['uuid'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_logs.count(), len(log_msgs))
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        for log in import_logs.all():
            log_uuid = str(log[0])
            self.assertIn(log_uuid, full_log_uuids)

        ## Part IV
        # Import "NEW" DB
        import_data(export_file_new, silent=True)

        # Check the database is correctly imported.
        # There should be exactly: 1 CalculationNode, 5 Logs (len(log_msgs))
        # 4 of the logs are identical in pairs, except for the UUID.
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid'])
        import_logs = orm.QueryBuilder().append(orm.Log,
                                                project=['uuid', 'message'])
        self.assertEqual(import_calcs.count(), 1)
        self.assertEqual(import_logs.count(), 5)
        # Furthermore, the UUIDs should be the same
        self.assertEqual(str(import_calcs.all()[0][0]), calc_uuid)
        total_log_uuids = full_log_uuids.copy()
        total_log_uuids.update(new_log_uuids)
        for log in import_logs.all():
            log_uuid = str(log[0])
            log_message = str(log[1])
            self.assertIn(log_uuid, total_log_uuids)
            self.assertIn(log_message, log_msgs)
Ejemplo n.º 27
0
    def test_multiple_imports_for_single_node(self, temp_dir):
        """Test multiple imports for single node with different logs are imported correctly"""
        log_msgs = [
            'Life is like riding a bicycle.', 'To keep your balance,',
            'you must keep moving.'
        ]

        # Create Node and initial log message and save UUIDs prior to export
        node = orm.CalculationNode().store()
        node.seal()
        node.logger.critical(log_msgs[0])
        node_uuid = node.uuid
        log_uuid_existing = orm.QueryBuilder().append(orm.Log,
                                                      project=['uuid']).all()
        log_uuid_existing = str(log_uuid_existing[0][0])

        # Export as "EXISTING" DB
        export_file_existing = os.path.join(temp_dir, 'export_EXISTING.tar.gz')
        export([node], outfile=export_file_existing, silent=True)

        # Add 2 more Logs and save UUIDs for all three Logs prior to export
        node.logger.critical(log_msgs[1])
        node.logger.critical(log_msgs[2])
        log_uuids_full = orm.QueryBuilder().append(orm.Log,
                                                   project=['uuid']).all()
        log_uuids_full = [str(log[0]) for log in log_uuids_full]

        # Export as "FULL" DB
        export_file_full = os.path.join(temp_dir, 'export_FULL.tar.gz')
        export([node], outfile=export_file_full, silent=True)

        # Clean database and reimport "EXISTING" DB
        self.reset_database()
        import_data(export_file_existing, silent=True)

        # Check correct import
        builder = orm.QueryBuilder().append(orm.Node,
                                            tag='node',
                                            project=['uuid'])
        builder.append(orm.Log, with_node='node', project=['uuid', 'message'])
        builder = builder.all()

        self.assertEqual(len(builder), 1)  # There is 1 Log in "EXISTING" DB

        imported_node_uuid = builder[0][0]
        self.assertEqual(imported_node_uuid, node_uuid)

        imported_log_uuid = builder[0][1]
        imported_log_message = builder[0][2]
        self.assertEqual(imported_log_uuid, log_uuid_existing)
        self.assertEqual(imported_log_message, log_msgs[0])

        # Import "FULL" DB
        import_data(export_file_full, silent=True)

        # Since the UUID of the node is identical with the node already in the DB,
        # the Logs should be added to the existing node, avoiding the addition of
        # the single Log already present.
        # Check this by retrieving all Logs for the node.
        builder = orm.QueryBuilder().append(orm.Node,
                                            tag='node',
                                            project=['uuid'])
        builder.append(orm.Log, with_node='node', project=['uuid', 'message'])
        builder = builder.all()

        self.assertEqual(len(builder),
                         len(log_msgs))  # There should now be 3 Logs

        imported_node_uuid = builder[0][0]
        self.assertEqual(imported_node_uuid, node_uuid)
        for log in builder:
            imported_log_uuid = log[1]
            imported_log_content = log[2]

            self.assertIn(imported_log_uuid, log_uuids_full)
            self.assertIn(imported_log_content, log_msgs)
Ejemplo n.º 28
0
    def test_export_of_imported_logs(self, temp_dir):
        """Test export of imported Log"""
        log_msg = 'Testing export of imported log'

        # Create node
        calc = orm.CalculationNode()
        calc.store()
        calc.seal()

        # Create log message
        calc.logger.critical(log_msg)

        # Save uuids prior to export
        calc_uuid = calc.uuid
        log_uuid = orm.QueryBuilder().append(orm.Log, project=['uuid']).all()
        log_uuid = str(log_uuid[0][0])

        # Export
        export_file = os.path.join(temp_dir, 'export.tar.gz')
        export([calc], outfile=export_file, silent=True)

        # Clean database and reimport exported data
        self.reset_database()
        import_data(export_file, silent=True)

        # Finding all the log messages
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid']).all()
        import_logs = orm.QueryBuilder().append(orm.Log,
                                                project=['uuid']).all()

        # There should be exactly: 1 CalculationNode, 1 Log
        self.assertEqual(len(import_calcs), 1)
        self.assertEqual(len(import_logs), 1)

        # Check the UUIDs are the same
        self.assertEqual(str(import_calcs[0][0]), calc_uuid)
        self.assertEqual(str(import_logs[0][0]), log_uuid)

        # Re-export
        calc = orm.load_node(import_calcs[0][0])
        re_export_file = os.path.join(temp_dir, 're_export.tar.gz')
        export([calc], outfile=re_export_file, silent=True)

        # Clean database and reimport exported data
        self.reset_database()
        import_data(re_export_file, silent=True)

        # Finding all the log messages
        import_calcs = orm.QueryBuilder().append(orm.CalculationNode,
                                                 project=['uuid']).all()
        import_logs = orm.QueryBuilder().append(orm.Log,
                                                project=['uuid']).all()

        # There should be exactly: 1 CalculationNode, 1 Log
        self.assertEqual(len(import_calcs), 1)
        self.assertEqual(len(import_logs), 1)

        # Check the UUIDs are the same
        self.assertEqual(str(import_calcs[0][0]), calc_uuid)
        self.assertEqual(str(import_logs[0][0]), log_uuid)
Ejemplo n.º 29
0
    def test_comment_querybuilder(self):
        # pylint: disable=too-many-locals
        """Test querying for comments by joining on nodes in the QueryBuilder."""
        user_one = self.user
        user_two = orm.User(email='[email protected]').store()

        node_one = orm.Data().store()
        comment_one = Comment(node_one, user_one, 'comment_one').store()

        node_two = orm.Data().store()
        comment_two = Comment(node_two, user_one, 'comment_two').store()
        comment_three = Comment(node_two, user_one, 'comment_three').store()

        node_three = orm.CalculationNode().store()
        comment_four = Comment(node_three, user_two,
                               'new_user_comment').store()

        node_four = orm.CalculationNode().store()
        comment_five = Comment(node_four, user_one, 'user one comment').store()
        comment_six = Comment(node_four, user_two, 'user two comment').store()

        # Retrieve a node by joining on a specific comment
        builder = orm.QueryBuilder()
        builder.append(Comment, tag='comment', filters={'id': comment_one.id})
        builder.append(orm.Node, with_comment='comment', project=['uuid'])
        nodes = builder.all()

        self.assertEqual(len(nodes), 1)
        for node in nodes:
            self.assertIn(str(node[0]), [node_one.uuid])

        # Retrieve a comment by joining on a specific node
        builder = orm.QueryBuilder()
        builder.append(orm.Node, tag='node', filters={'id': node_two.id})
        builder.append(Comment, with_node='node', project=['uuid'])
        comments = builder.all()

        self.assertEqual(len(comments), 2)
        for comment in comments:
            self.assertIn(str(comment[0]),
                          [comment_two.uuid, comment_three.uuid])

        # Retrieve a user by joining on a specific comment
        builder = orm.QueryBuilder()
        builder.append(Comment, tag='comment', filters={'id': comment_four.id})
        builder.append(orm.User, with_comment='comment', project=['email'])
        users = builder.all()

        self.assertEqual(len(users), 1)
        for user in users:
            self.assertEqual(str(user[0]), user_two.email)

        # Retrieve a comment by joining on a specific user
        builder = orm.QueryBuilder()
        builder.append(orm.User, tag='user', filters={'email': user_one.email})
        builder.append(Comment, with_user='******', project=['uuid'])
        comments = builder.all()

        self.assertEqual(len(comments), 5)
        for comment in comments:
            self.assertIn(str(comment[0]), [
                self.comment.uuid, comment_one.uuid, comment_two.uuid,
                comment_three.uuid, comment_five.uuid
            ])

        # Retrieve users from comments of a single node by joining specific node
        builder = orm.QueryBuilder()
        builder.append(orm.Node, tag='node', filters={'id': node_four.id})
        builder.append(Comment,
                       tag='comments',
                       with_node='node',
                       project=['uuid'])
        builder.append(orm.User, with_comment='comments', project=['email'])
        comments_and_users = builder.all()

        self.assertEqual(len(comments_and_users), 2)
        for entry in comments_and_users:
            self.assertEqual(len(entry), 2)

            comment_uuid = str(entry[0])
            user_email = str(entry[1])

            self.assertIn(comment_uuid, [comment_five.uuid, comment_six.uuid])
            self.assertIn(user_email, [user_one.email, user_two.email])
Ejemplo n.º 30
0
    def test_group_name_and_type_change(self, temp_dir):
        """ Group's name and type columns have changed
        Change for columns:
        “name”            --> “label”
        "type"            --> "type_string"
        Furthermore, type_strings have been updated to:
        ""                --> "user"
        "data.upf.family" --> "data.upf"
        "aiida.import"    --> "auto.import"
        "autogroup.run"   --> "auto.run"

        The new columns are called on group instances, and will fail if not present.
        A user created Group is validated to have the "user" value as a type_string.
        A UPF file is created and imported/uploaded as a UPF family,
        in order to create a Group with type_string="data.upf".
        Any import will create a Group with type_string "auto.import", which is checked.
        The type_string="auto.run" is not directly checked, but if the three checks
        above succeed, it is understood that "auto.run" is also correctly ex-/imported
        as the type_string content for the relevant Groups.
        """
        from aiida.orm.nodes.data.upf import upload_upf_family
        # To be saved
        groups_label = ['Users', 'UpfData']
        upf_filename = 'Al.test_file.UPF'
        # regular upf file version 2 header
        upf_contents = '\n'.join([
            "<UPF version=\"2.0.1\">",
            'Human readable section is completely irrelevant for parsing!',
            '<PP_HEADER',
            'contents before element tag',
            "element=\"Al\"",
            'contents following element tag',
            '>',
        ])
        path_to_upf = os.path.join(temp_dir, upf_filename)
        with open(path_to_upf, 'w') as upf_file:
            upf_file.write(upf_contents)

        # Create Groups
        node1 = orm.CalculationNode().store()
        node2 = orm.CalculationNode().store()
        node1.seal()
        node2.seal()
        group_user = orm.Group(label=groups_label[0]).store()
        group_user.add_nodes([node1, node2])

        upload_upf_family(temp_dir, groups_label[1], '')
        group_upf = orm.load_group(groups_label[1])

        # Save uuids and type
        groups_uuid = [str(g.uuid) for g in [group_user, group_upf]]
        groups_type_string = [g.type_string for g in [group_user, group_upf]]

        # Assert correct type strings exists prior to export
        self.assertListEqual(groups_type_string, ['core', 'core.upf'])

        # Export node
        filename = os.path.join(temp_dir, 'export.aiida')
        export([group_user, group_upf], filename=filename)

        # Clean the database and reimport
        self.clean_db()
        import_data(filename)

        # Retrieve Groups and make sure exactly 3 are retrieved (including the "import group")
        builder = orm.QueryBuilder()
        builder.append(orm.Group, project=['uuid'])
        imported_groups = builder.all()

        self.assertEqual(builder.count(), 3)

        # Check uuids are the same after import
        imported_groups_uuid = [str(g[0]) for g in imported_groups]

        # We do not know the "import group"'s uuid, so go through known uuids
        for group_uuid in groups_uuid:
            self.assertIn(group_uuid, imported_groups_uuid)

            # Pop known uuid from imported_groups_uuid, eventually leaving
            # only the "import group"
            imported_groups_uuid.remove(group_uuid)

            # Load group
            imported_group = orm.load_group(group_uuid)

            # Check whether types are correctly imported
            self.assertIn(imported_group.type_string, groups_type_string)

            # Assert labels are imported correctly
            self.assertIn(imported_group.label, groups_label)

        # Check type_string content of "import group"
        import_group = orm.load_group(imported_groups_uuid[0])
        self.assertEqual(import_group.type_string, 'core.import')