Exemple #1
0
def test_validate_transfer_inputs(aiida_localhost, tmp_path, temp_dir):
    """Test the `TransferCalculation` validators."""
    from aiida.orm import Computer
    from aiida.calculations.transfer import check_node_type, validate_transfer_inputs

    fake_localhost = Computer(
        label='localhost-fake',
        description='extra localhost computer set up by test',
        hostname='localhost-fake',
        workdir=temp_dir,
        transport_type='local',
        scheduler_type='direct'
    )
    fake_localhost.store()
    fake_localhost.set_minimum_job_poll_interval(0.)
    fake_localhost.configure()

    inputs = {
        'source_nodes': {
            'unused_node': orm.RemoteData(computer=aiida_localhost, remote_path=str(tmp_path)),
        },
        'instructions':
        orm.Dict(
            dict={
                'local_files': [('inexistent_node', None, None)],
                'remote_files': [('inexistent_node', None, None)],
                'symlink_files': [('inexistent_node', None, None)],
            }
        ),
        'metadata': {
            'computer': fake_localhost
        },
    }
    expected_list = []
    expected_list.append((
        f' > remote node `unused_node` points to computer `{aiida_localhost}`, '
        f'not the one being used (`{fake_localhost}`)'
    ))
    expected_list.append(check_node_type('local_files', 'inexistent_node', None, orm.FolderData))
    expected_list.append(check_node_type('remote_files', 'inexistent_node', None, orm.RemoteData))
    expected_list.append(check_node_type('symlink_files', 'inexistent_node', None, orm.RemoteData))
    expected_list.append(' > node `unused_node` provided as inputs is not being used')

    expected = '\n\n'
    for addition in expected_list:
        expected = expected + addition + '\n'

    result = validate_transfer_inputs(inputs, None)
    assert result == expected

    result = check_node_type('list_name', 'node_label', None, orm.RemoteData)
    expected = ' > node `node_label` requested on list `list_name` not found among inputs'
    assert result == expected

    result = check_node_type('list_name', 'node_label', orm.FolderData(), orm.RemoteData)
    expected_type = orm.RemoteData.class_node_type
    expected = f' > node `node_label`, requested on list `list_name` should be of type `{expected_type}`'
    assert result == expected
Exemple #2
0
    def _generate_work_chain_node(entry_point_name,
                                  computer,
                                  test_name=None,
                                  inputs=None,
                                  attributes=None):
        """Fixture to generate a mock `WorkChainNode` for testing parsers.

        :param entry_point_name: entry point name of the calculation class
        :param computer: a `Computer` instance
        :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder.
        :param inputs: any optional nodes to add as input links to the corrent CalcJobNode
        :param attributes: any optional attributes to set on the node
        :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node
        """
        from aiida import orm
        from aiida.common import LinkType
        from aiida.plugins.entry_point import format_entry_point_string

        entry_point = format_entry_point_string('aiida.workchains',
                                                entry_point_name)

        node = orm.WorkChainNode(computer=computer, process_type=entry_point)

        if attributes:
            node.set_attribute_many(attributes)

        if inputs:
            for link_label, input_node in flatten_inputs(inputs):
                input_node.store()
                node.add_incoming(input_node,
                                  link_type=LinkType.INPUT_WORK,
                                  link_label=link_label)

        if test_name is not None:
            basepath = os.path.dirname(os.path.abspath(__file__))
            filepath = os.path.join(basepath, 'parsers', 'fixtures',
                                    entry_point_name[len('quantumespresso.'):],
                                    test_name)

            retrieved = orm.FolderData()
            retrieved.put_object_from_tree(filepath)
            retrieved.add_incoming(node,
                                   link_type=LinkType.CREATE,
                                   link_label='retrieved')
            retrieved.store()

            remote_folder = orm.RemoteData(computer=computer,
                                           remote_path='/tmp')
            remote_folder.add_incoming(node,
                                       link_type=LinkType.CREATE,
                                       link_label='remote_folder')
            remote_folder.store()

        return node
Exemple #3
0
    def _generate_calc_job_node(entry_point_name, computer, test_name=None, inputs=None, attributes=None):
        """Fixture to generate a mock `CalcJobNode` for testing parsers.

        :param entry_point_name: entry point name of the calculation class
        :param computer: a `Computer` instance
        :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder.
        :param inputs: any optional nodes to add as input links to the corrent CalcJobNode
        :param attributes: any optional attributes to set on the node
        :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node
        """
        import os
        from aiida import orm
        from aiida.common import LinkType
        from aiida.plugins.entry_point import format_entry_point_string

        entry_point = format_entry_point_string('aiida.calculations', entry_point_name)

        node = orm.CalcJobNode(computer=computer, process_type=entry_point)
        node.set_attribute('input_filename', 'aiida.in')
        node.set_attribute('output_filename', 'aiida.out')
        node.set_attribute('error_filename', 'aiida.err')
        node.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        node.set_option('max_wallclock_seconds', 1800)

        if attributes:
            node.set_attributes(attributes)

        if inputs:
            for link_label, input_node in flatten_inputs(inputs):
                input_node.store()
                node.add_incoming(input_node, link_type=LinkType.INPUT_CALC, link_label=link_label)

        node.store()

        if test_name is not None:
            basepath = os.path.dirname(os.path.abspath(__file__))
            filepath = os.path.join(basepath, 'parsers', 'fixtures', entry_point_name[len('quantumespresso.'):], test_name)

            retrieved = orm.FolderData()
            retrieved.put_object_from_tree(filepath)
            retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved')
            retrieved.store()

            remote_folder = orm.RemoteData(computer=computer, remote_path='/tmp')
            remote_folder.add_incoming(node, link_type=LinkType.CREATE, link_label='remote_folder')
            remote_folder.store()

        return node
Exemple #4
0
def test_integration_transfer(aiida_localhost, tmp_path):
    """Test a default `TransferCalculation`."""
    from aiida.calculations.transfer import TransferCalculation
    from aiida.engine import run

    content_local = 'Content of local file'
    srcfile_local = tmp_path / 'file_local.txt'
    srcfile_local.write_text(content_local)
    srcnode_local = orm.FolderData(tree=str(tmp_path))

    content_remote = 'Content of remote file'
    srcfile_remote = tmp_path / 'file_remote.txt'
    srcfile_remote.write_text(content_remote)
    srcnode_remote = orm.RemoteData(computer=aiida_localhost, remote_path=str(tmp_path))

    list_of_nodes = {}
    list_of_nodes['source_local'] = srcnode_local
    list_for_local = [('source_local', 'file_local.txt', 'file_local.txt')]
    list_of_nodes['source_remote'] = srcnode_remote
    list_for_remote = [('source_remote', 'file_remote.txt', 'file_remote.txt')]

    instructions = orm.Dict(
        dict={
            'retrieve_files': True,
            'local_files': list_for_local,
            'remote_files': list_for_remote,
        }
    )
    inputs = {'instructions': instructions, 'source_nodes': list_of_nodes, 'metadata': {'computer': aiida_localhost}}

    output_nodes = run(TransferCalculation, **inputs)

    output_remotedir = output_nodes['remote_folder']
    output_retrieved = output_nodes['retrieved']

    # Check the retrieved folder
    assert sorted(output_retrieved.list_object_names()) == sorted(['file_local.txt', 'file_remote.txt'])
    assert output_retrieved.get_object_content('file_local.txt') == content_local
    assert output_retrieved.get_object_content('file_remote.txt') == content_remote

    # Check the remote folder
    assert 'file_local.txt' in output_remotedir.listdir()
    assert 'file_remote.txt' in output_remotedir.listdir()
    output_remotedir.getfile(relpath='file_local.txt', destpath=str(tmp_path / 'retrieved_local.txt'))
    output_remotedir.getfile(relpath='file_remote.txt', destpath=str(tmp_path / 'retrieved_remote.txt'))
    assert (tmp_path / 'retrieved_local.txt').read_text() == content_local
    assert (tmp_path / 'retrieved_remote.txt').read_text() == content_remote
Exemple #5
0
def test_get_transfer(fixture_sandbox, aiida_localhost, generate_calc_job, tmp_path):
    """Test a default `TransferCalculation`."""

    file1 = tmp_path / 'file1.txt'
    file1.write_text('file 1 content')
    folder = tmp_path / 'folder'
    folder.mkdir()
    file2 = folder / 'file2.txt'
    file2.write_text('file 2 content')
    data_source = orm.RemoteData(computer=aiida_localhost, remote_path=str(tmp_path))

    entry_point_name = 'core.transfer'
    list_of_files = [
        ('data_source', 'file1.txt', 'folder/file1.txt'),
        ('data_source', 'folder/file2.txt', 'file2.txt'),
    ]
    list_of_nodes = {'data_source': data_source}
    instructions = orm.Dict(dict={'retrieve_files': True, 'symlink_files': list_of_files})
    inputs = {'instructions': instructions, 'source_nodes': list_of_nodes, 'metadata': {'computer': aiida_localhost}}

    # Generate calc_info and verify basics
    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)
    assert isinstance(calc_info, datastructures.CalcInfo)
    assert isinstance(calc_info.codes_info, list)
    assert len(calc_info.codes_info) == 0
    assert calc_info.skip_submit

    # Check that the lists were set correctly
    copy_list = [
        (aiida_localhost.uuid, os.path.join(data_source.get_remote_path(), 'file1.txt'), 'folder/file1.txt'),
        (aiida_localhost.uuid, os.path.join(data_source.get_remote_path(), 'folder/file2.txt'), 'file2.txt'),
    ]
    retrieve_list = [('folder/file1.txt'), ('file2.txt')]
    assert sorted(calc_info.remote_symlink_list) == sorted(copy_list)
    assert sorted(calc_info.remote_copy_list) == sorted(list())
    assert sorted(calc_info.local_copy_list) == sorted(list())
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)

    # Now without symlinks
    instructions = orm.Dict(dict={'retrieve_files': True, 'remote_files': list_of_files})
    inputs = {'instructions': instructions, 'source_nodes': list_of_nodes, 'metadata': {'computer': aiida_localhost}}
    calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs)
    assert sorted(calc_info.remote_symlink_list) == sorted(list())
    assert sorted(calc_info.remote_copy_list) == sorted(copy_list)
    assert sorted(calc_info.local_copy_list) == sorted(list())
    assert sorted(calc_info.retrieve_list) == sorted(retrieve_list)
def test_runstm_failstm(aiida_profile, generate_workchain_stm,
                        generate_wc_job_node, generate_calc_job_node,
                        fixture_localhost):

    process = generate_workchain_stm()
    process.checks()

    ldos_basewc = generate_wc_job_node("siesta.base", fixture_localhost)
    ldos_basewc.set_process_state(ProcessState.FINISHED)
    ldos_basewc.set_exit_status(ExitCode(0).status)
    #Now is_finished_ok, but need to set outputs
    remote_folder = orm.RemoteData(computer=fixture_localhost,
                                   remote_path='/tmp')
    remote_folder.store()
    remote_folder.add_incoming(ldos_basewc,
                               link_type=LinkType.RETURN,
                               link_label='remote_folder')
    process.ctx.siesta_ldos = ldos_basewc

    process.run_stm()

    #Fake the stm calculation
    name = 'default'
    entry_point_calc_job = 'siesta.stm'
    inputs = AttributeDict({'spin_option': orm.Str("q")})
    attributes = AttributeDict({
        'input_filename': 'stm.in',
        'output_filename': 'stm.out'
    })
    stm_node = generate_calc_job_node(entry_point_calc_job, fixture_localhost,
                                      name, inputs, attributes)
    stm_node.set_process_state(ProcessState.FINISHED)
    process.ctx.stm_calc = stm_node

    result = process.run_results()

    assert result == SiestaSTMWorkChain.exit_codes.ERROR_STM_PLUGIN
Exemple #7
0
    def create_provenance(self):
        """create an example provenance graph
        """
        pd0 = orm.Dict()
        pd0.label = 'pd0'
        pd0.store()

        pd1 = orm.Dict()
        pd1.label = 'pd1'
        pd1.store()

        wc1 = orm.WorkChainNode()
        wc1.set_process_state(ProcessState.RUNNING)
        wc1.add_incoming(pd0,
                         link_type=LinkType.INPUT_WORK,
                         link_label='input1')
        wc1.add_incoming(pd1,
                         link_type=LinkType.INPUT_WORK,
                         link_label='input2')
        wc1.store()

        calc1 = orm.CalcJobNode()
        calc1.computer = self.computer
        calc1.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        calc1.label = 'calc1'
        calc1.set_process_state(ProcessState.FINISHED)
        calc1.set_exit_status(0)
        calc1.add_incoming(pd0,
                           link_type=LinkType.INPUT_CALC,
                           link_label='input1')
        calc1.add_incoming(pd1,
                           link_type=LinkType.INPUT_CALC,
                           link_label='input2')
        calc1.add_incoming(wc1,
                           link_type=LinkType.CALL_CALC,
                           link_label='call1')
        calc1.store()

        rd1 = orm.RemoteData()
        rd1.label = 'rd1'
        rd1.set_remote_path('/x/y.py')
        rd1.computer = self.computer
        rd1.store()
        rd1.add_incoming(calc1, link_type=LinkType.CREATE, link_label='output')

        pd2 = orm.Dict()
        pd2.label = 'pd2'
        pd2.store()

        calcf1 = orm.CalcFunctionNode()
        calcf1.label = 'calcf1'
        calcf1.set_process_state(ProcessState.FINISHED)
        calcf1.set_exit_status(200)
        calcf1.add_incoming(rd1,
                            link_type=LinkType.INPUT_CALC,
                            link_label='input1')
        calcf1.add_incoming(pd2,
                            link_type=LinkType.INPUT_CALC,
                            link_label='input2')
        calcf1.add_incoming(wc1,
                            link_type=LinkType.CALL_CALC,
                            link_label='call2')
        calcf1.store()

        pd3 = orm.Dict()
        pd3.label = 'pd3'

        fd1 = orm.FolderData()
        fd1.label = 'fd1'

        pd3.add_incoming(calcf1,
                         link_type=LinkType.CREATE,
                         link_label='output1')
        pd3.store()
        fd1.add_incoming(calcf1,
                         link_type=LinkType.CREATE,
                         link_label='output2')
        fd1.store()

        pd3.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output1')
        fd1.add_incoming(wc1, link_type=LinkType.RETURN, link_label='output2')

        return AttributeDict({
            'pd0': pd0,
            'pd1': pd1,
            'calc1': calc1,
            'rd1': rd1,
            'pd2': pd2,
            'calcf1': calcf1,
            'pd3': pd3,
            'fd1': fd1,
            'wc1': wc1
        })
    def _generate_calc_job_node(
        entry_point_name='base', computer=None, test_name=None, inputs=None, attributes=None, retrieve_temporary=None
    ):
        """Fixture to generate a mock `CalcJobNode` for testing parsers.

        :param entry_point_name: entry point name of the calculation class
        :param computer: a `Computer` instance
        :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder.
        :param inputs: any optional nodes to add as input links to the corrent CalcJobNode
        :param attributes: any optional attributes to set on the node
        :param retrieve_temporary: optional tuple of an absolute filepath of a temporary directory and a list of
            filenames that should be written to this directory, which will serve as the `retrieved_temporary_folder`.
            For now this only works with top-level files and does not support files nested in directories.
        :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node.
        """
        from aiida import orm
        from aiida.common import LinkType
        from aiida.plugins.entry_point import format_entry_point_string

        if computer is None:
            computer = fixture_localhost

        filepath_folder = None

        if test_name is not None:
            basepath = os.path.dirname(os.path.abspath(__file__))
            filename = os.path.join(entry_point_name[len('quantumespresso.'):], test_name)
            filepath_folder = os.path.join(basepath, 'parsers', 'fixtures', filename)
            filepath_input = os.path.join(filepath_folder, 'aiida.in')

        entry_point = format_entry_point_string('aiida.calculations', entry_point_name)

        node = orm.CalcJobNode(computer=computer, process_type=entry_point)
        node.set_attribute('input_filename', 'aiida.in')
        node.set_attribute('output_filename', 'aiida.out')
        node.set_attribute('error_filename', 'aiida.err')
        node.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        node.set_option('max_wallclock_seconds', 1800)

        if attributes:
            node.set_attribute_many(attributes)

        if filepath_folder:
            from qe_tools.utils.exceptions import ParsingError
            from aiida_quantumespresso.tools.pwinputparser import PwInputFile
            try:
                parsed_input = PwInputFile(filepath_input)
            except ParsingError:
                pass
            else:
                inputs['structure'] = parsed_input.get_structuredata()
                inputs['parameters'] = orm.Dict(dict=parsed_input.namelists)

        if inputs:
            metadata = inputs.pop('metadata', {})
            options = metadata.get('options', {})

            for name, option in options.items():
                node.set_option(name, option)

            for link_label, input_node in flatten_inputs(inputs):
                input_node.store()
                node.add_incoming(input_node, link_type=LinkType.INPUT_CALC, link_label=link_label)

        node.store()

        if retrieve_temporary:
            dirpath, filenames = retrieve_temporary
            for filename in filenames:
                shutil.copy(os.path.join(filepath_folder, filename), os.path.join(dirpath, filename))

        if filepath_folder:
            retrieved = orm.FolderData()
            retrieved.put_object_from_tree(filepath_folder)

            # Remove files that are supposed to be only present in the retrieved temporary folder
            if retrieve_temporary:
                for filename in filenames:
                    retrieved.delete_object(filename)

            retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved')
            retrieved.store()

            remote_folder = orm.RemoteData(computer=computer, remote_path='/tmp')
            remote_folder.add_incoming(node, link_type=LinkType.CREATE, link_label='remote_folder')
            remote_folder.store()

        return node
Exemple #9
0
    def test_complex_graph_import_export(self, temp_dir):
        """
        This test checks that a small and bit complex graph can be correctly
        exported and imported.

        It will create the graph, store it to the database, export it to a file
        and import it. In the end it will check if the initial nodes are present
        at the imported graph.
        """
        from aiida.common.exceptions import NotExistent

        calc1 = orm.CalcJobNode()
        calc1.computer = self.computer
        calc1.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        calc1.label = 'calc1'
        calc1.store()

        pd1 = orm.Dict()
        pd1.label = 'pd1'
        pd1.store()

        pd2 = orm.Dict()
        pd2.label = 'pd2'
        pd2.store()

        rd1 = orm.RemoteData()
        rd1.label = 'rd1'
        rd1.set_remote_path('/x/y.py')
        rd1.computer = self.computer
        rd1.store()
        rd1.add_incoming(calc1, link_type=LinkType.CREATE, link_label='link')

        calc2 = orm.CalcJobNode()
        calc2.computer = self.computer
        calc2.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        calc2.label = 'calc2'
        calc2.add_incoming(pd1,
                           link_type=LinkType.INPUT_CALC,
                           link_label='link1')
        calc2.add_incoming(pd2,
                           link_type=LinkType.INPUT_CALC,
                           link_label='link2')
        calc2.add_incoming(rd1,
                           link_type=LinkType.INPUT_CALC,
                           link_label='link3')
        calc2.store()

        fd1 = orm.FolderData()
        fd1.label = 'fd1'
        fd1.store()
        fd1.add_incoming(calc2, link_type=LinkType.CREATE, link_label='link')

        calc1.seal()
        calc2.seal()

        node_uuids_labels = {
            calc1.uuid: calc1.label,
            pd1.uuid: pd1.label,
            pd2.uuid: pd2.label,
            rd1.uuid: rd1.label,
            calc2.uuid: calc2.label,
            fd1.uuid: fd1.label
        }

        filename = os.path.join(temp_dir, 'export.aiida')
        export([fd1], filename=filename, silent=True)

        self.clean_db()
        self.create_user()

        import_data(filename, silent=True, ignore_unknown_nodes=True)

        for uuid, label in node_uuids_labels.items():
            try:
                orm.load_node(uuid)
            except NotExistent:
                self.fail(
                    'Node with UUID {} and label {} was not found.'.format(
                        uuid, label))
Exemple #10
0
    def test_cycle_structure_data(self):
        """
        Create an export with some orm.CalculationNode and Data nodes and import it after having
        cleaned the database. Verify that the nodes and their attributes are restored
        properly after importing the created export archive
        """
        from aiida.common.links import LinkType

        test_label = 'Test structure'
        test_cell = [[8.34, 0.0, 0.0],
                     [0.298041701839357, 8.53479766274308, 0.0],
                     [0.842650688117053, 0.47118495164127, 10.6965192730702]]
        test_kinds = [{
            'symbols': ['Fe'],
            'weights': [1.0],
            'mass': 55.845,
            'name': 'Fe'
        }, {
            'symbols': ['S'],
            'weights': [1.0],
            'mass': 32.065,
            'name': 'S'
        }]

        structure = orm.StructureData(cell=test_cell)
        structure.append_atom(symbols=['Fe'], position=[0, 0, 0])
        structure.append_atom(symbols=['S'], position=[2, 2, 2])
        structure.label = test_label
        structure.store()

        parent_process = orm.CalculationNode()
        parent_process.set_attribute('key', 'value')
        parent_process.store()
        child_calculation = orm.CalculationNode()
        child_calculation.set_attribute('key', 'value')
        remote_folder = orm.RemoteData(computer=self.computer,
                                       remote_path='/').store()

        remote_folder.add_incoming(parent_process,
                                   link_type=LinkType.CREATE,
                                   link_label='link')
        child_calculation.add_incoming(remote_folder,
                                       link_type=LinkType.INPUT_CALC,
                                       link_label='link')
        child_calculation.store()
        structure.add_incoming(child_calculation,
                               link_type=LinkType.CREATE,
                               link_label='link')

        parent_process.seal()
        child_calculation.seal()

        with tempfile.NamedTemporaryFile() as handle:

            nodes = [
                structure, child_calculation, parent_process, remote_folder
            ]
            export(nodes, outfile=handle.name, overwrite=True, silent=True)

            # Check that we have the expected number of nodes in the database
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(),
                             len(nodes))

            # Clean the database and verify there are no nodes left
            self.clean_db()
            self.create_user()
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), 0)

            # After importing we should have the original number of nodes again
            import_data(handle.name, silent=True)
            self.assertEqual(orm.QueryBuilder().append(orm.Node).count(),
                             len(nodes))

            # Verify that orm.CalculationNodes have non-empty attribute dictionaries
            builder = orm.QueryBuilder().append(orm.CalculationNode)
            for [calculation] in builder.iterall():
                self.assertIsInstance(calculation.attributes, dict)
                self.assertNotEqual(len(calculation.attributes), 0)

            # Verify that the structure data maintained its label, cell and kinds
            builder = orm.QueryBuilder().append(orm.StructureData)
            for [structure] in builder.iterall():
                self.assertEqual(structure.label, test_label)
                # Check that they are almost the same, within numerical precision
                self.assertTrue(
                    np.abs(np.array(structure.cell) -
                           np.array(test_cell)).max() < 1.e-12)

            builder = orm.QueryBuilder().append(orm.StructureData,
                                                project=['attributes.kinds'])
            for [kinds] in builder.iterall():
                self.assertEqual(len(kinds), 2)
                for kind in kinds:
                    self.assertIn(kind, test_kinds)

            # Check that there is a StructureData that is an output of a orm.CalculationNode
            builder = orm.QueryBuilder()
            builder.append(orm.CalculationNode,
                           project=['uuid'],
                           tag='calculation')
            builder.append(orm.StructureData, with_incoming='calculation')
            self.assertGreater(len(builder.all()), 0)

            # Check that there is a RemoteData that is a child and parent of a orm.CalculationNode
            builder = orm.QueryBuilder()
            builder.append(orm.CalculationNode, tag='parent')
            builder.append(orm.RemoteData,
                           project=['uuid'],
                           with_incoming='parent',
                           tag='remote')
            builder.append(orm.CalculationNode, with_incoming='remote')
            self.assertGreater(len(builder.all()), 0)
def test_runldos_errorldos(aiida_profile, generate_workchain_stm,
                           generate_psml_data, generate_wc_job_node,
                           fixture_localhost, fixture_code,
                           generate_structure):

    process = generate_workchain_stm()
    process.checks()

    #Fake the base siesta wc in context, now with some inputs
    psml = generate_psml_data('Si')
    inputs = AttributeDict({
        'structure':
        generate_structure(),
        'code':
        fixture_code("siesta.siesta"),
        'parameters':
        orm.Dict(dict={"sm": "sm"}),
        'options':
        orm.Dict(
            dict={
                'resources': {
                    'num_machines': 1
                },
                'max_wallclock_seconds': 1800,
                'withmpi': False
            }),
        'pseudos': {
            'Si': psml,
            'SiDiff': psml
        },
    })
    first_basewc = generate_wc_job_node("siesta.base", fixture_localhost,
                                        inputs)
    first_basewc.set_process_state(ProcessState.FINISHED)
    first_basewc.set_exit_status(ExitCode(0).status)
    #Now is_finished_ok, next step will submit ldos calc
    #Also needed to fake outputs ports of the basewc: remote_folder and output_parameters
    #It is different respect to a CalcJob, nodes has to be stored before and link is "RETURN"
    remote_folder = orm.RemoteData(computer=fixture_localhost,
                                   remote_path='/tmp')
    remote_folder.store()
    remote_folder.add_incoming(first_basewc,
                               link_type=LinkType.RETURN,
                               link_label='remote_folder')
    out_par = orm.Dict(dict={"E_Fermi": -1})
    out_par.store()
    out_par.add_incoming(first_basewc,
                         link_type=LinkType.RETURN,
                         link_label='output_parameters')
    process.ctx.workchain_base = first_basewc

    process.run_siesta_with_ldos()

    #Here we might check ToContext(siesta_ldos=running)

    ldos_basewc = generate_wc_job_node("siesta.base", fixture_localhost)
    ldos_basewc.set_process_state(ProcessState.FINISHED)
    #We don't set exit status so that if appears as not is_finished_ok
    process.ctx.siesta_ldos = ldos_basewc

    result = process.run_stm()

    assert result == SiestaSTMWorkChain.exit_codes.ERROR_LDOS_WC
Exemple #12
0
    def _generate_calc_job_node(entry_point_name,
                                computer,
                                test_name=None,
                                inputs=None,
                                attributes=None):
        """Fixture to generate a mock `CalcJobNode` for testing parsers.
        :param entry_point_name: entry point name of the calculation class
        :param computer: a `Computer` instance
        :param test_name: relative path of directory
        :param inputs: any optional nodes to add as input links to the corrent CalcJobNode
        :param attributes: any optional attributes to set on the node
        :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node
        """
        # pylint: disable=too-many-locals
        import os
        from aiida.common import LinkType
        from aiida.plugins.entry_point import format_entry_point_string

        entry_point = format_entry_point_string('aiida.calculations',
                                                entry_point_name)

        node = orm.CalcJobNode(computer=computer, process_type=entry_point)
        node.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        node.set_option('max_wallclock_seconds', 1800)

        if attributes:
            node.set_attribute_many(attributes)

        if inputs:
            metadata = inputs.pop('metadata', {})
            options = metadata.get('options', {})

            for name, option in options.items():
                node.set_option(name, option)

            for link_label, input_node in flatten_inputs(inputs):
                input_node.store()
                node.add_incoming(input_node,
                                  link_type=LinkType.INPUT_CALC,
                                  link_label=link_label)

        node.store()

        if test_name is not None:
            basepath = os.path.dirname(os.path.abspath(__file__))
            filepath = os.path.join(basepath, 'parsers', 'fixtures', 'catmap',
                                    test_name)

            retrieved = orm.FolderData()
            retrieved.put_object_from_tree(filepath)
            retrieved.add_incoming(node,
                                   link_type=LinkType.CREATE,
                                   link_label='retrieved')
            retrieved.store()

            remote_folder = orm.RemoteData(computer=computer,
                                           remote_path='/tmp')
            remote_folder.add_incoming(node,
                                       link_type=LinkType.CREATE,
                                       link_label='remote_folder')
            remote_folder.store()

        return node
Exemple #13
0
    def _generate_calc_job_node(  # pylint: disable=too-many-arguments,too-many-locals
        entry_point_name,
        computer,
        seedname=None,
        test_name=None,
        inputs=None,
        attributes=None,
    ):
        """Fixture to generate a mock `CalcJobNode` for testing parsers.

        :param entry_point_name: entry point name of the calculation class
        :param computer: a `Computer` instance
        :param test_name: relative path of directory with test output files in the `fixtures/{entry_point_name}` folder.
        :param inputs: any optional nodes to add as input links to the corrent CalcJobNode
        :param attributes: any optional attributes to set on the node
        :return: `CalcJobNode` instance with an attached `FolderData` as the `retrieved` node
        """
        from aiida import orm
        from aiida.common import LinkType
        from aiida.plugins.entry_point import format_entry_point_string

        entry_point = format_entry_point_string('aiida.calculations',
                                                entry_point_name)

        # If no seedname is specified, use the default 'aiida'
        evaluated_seedname = seedname or 'aiida'
        node = orm.CalcJobNode(computer=computer, process_type=entry_point)
        node.set_attribute('input_filename',
                           '{}.win'.format(evaluated_seedname))
        node.set_attribute('output_filename',
                           '{}.wout'.format(evaluated_seedname))
        node.set_attribute('error_filename',
                           '{}.werr'.format(evaluated_seedname))
        node.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        node.set_option('max_wallclock_seconds', 1800)
        node.set_option('seedname', evaluated_seedname)

        if attributes:
            node.set_attribute_many(attributes)

        if inputs:
            for link_label, input_node in flatten_inputs(inputs):
                input_node.store()
                node.add_incoming(input_node,
                                  link_type=LinkType.INPUT_CALC,
                                  link_label=link_label)

        node.store()

        if test_name is not None:
            # TODO: remove cast to 'str' when Python2 support is dropped
            filepath = str(shared_datadir / test_name)

            retrieved = orm.FolderData()
            retrieved.put_object_from_tree(filepath)
            retrieved.add_incoming(node,
                                   link_type=LinkType.CREATE,
                                   link_label='retrieved')
            retrieved.store()

            remote_folder = orm.RemoteData(computer=computer,
                                           remote_path='/tmp')
            remote_folder.add_incoming(node,
                                       link_type=LinkType.CREATE,
                                       link_label='remote_folder')
            remote_folder.store()

        return node
Exemple #14
0
def test_restart_wf_submit(
    db_test_app,
    get_structure,
    upload_basis_set_family,
    file_regression,
    data_regression,
):
    """Test restarting from a previous fort.9 file."""
    code = db_test_app.get_or_create_code("crystal17.main")

    # Prepare input parameters
    params = {
        "title": "NiO Bulk with AFM spin",
        "scf.single": "UHF",
        "scf.k_points": (8, 8),
        "scf.spinlock.SPINLOCK": (0, 15),
        "scf.numerical.FMIXING": 30,
        "scf.post_scf": ["PPAN"],
    }

    instruct = get_structure("NiO_afm")

    kind_data = KindData(
        data={
            "kind_names": ["Ni1", "Ni2", "O"],
            "spin_alpha": [True, False, False],
            "spin_beta": [False, True, False],
        })

    sym_calc = run_get_node(
        WorkflowFactory("crystal17.sym3d"),
        structure=instruct,
        settings=DataFactory("dict")(dict={
            "symprec": 0.01,
            "compute_primitive": True
        }),
    ).node
    instruct = sym_calc.get_outgoing().get_node_by_label("structure")
    symmetry = sym_calc.get_outgoing().get_node_by_label("symmetry")

    upload_basis_set_family()

    # set up calculation
    process_class = code.get_builder().process_class
    builder = process_class.create_builder(
        params,
        instruct,
        "sto3g",
        symmetry=symmetry,
        kinds=kind_data,
        code=code,
        metadata=db_test_app.get_default_metadata(with_mpi=True),
        unflatten=True,
    )

    with resource_context("crystal", "nio_sto3g_afm_scf_maxcyc") as path:
        builder.wf_folder = orm.RemoteData(computer=code.computer,
                                           remote_path=str(path))

        process_options = builder.process_class(
            inputs=builder).metadata.options

        with db_test_app.sandbox_folder() as folder:
            calc_info = db_test_app.generate_calcinfo("crystal17.main", folder,
                                                      builder)
            with folder.open(process_options.input_file_name) as f:
                input_content = f.read()

    file_regression.check(input_content)
    data_regression.check(sanitize_calc_info(calc_info))