Exemplo n.º 1
0
    def test_create_file_from_filelike_py3(self):
        """Test `aiida.common.folders.Folder.create_file_from_filelike` for python 3."""
        unicode_string = 'unicode_string'
        byte_string = b'byte_string'

        try:
            tempdir = tempfile.mkdtemp()
            folder = Folder(tempdir)

            folder.create_file_from_filelike(six.StringIO(unicode_string),
                                             'random.dat',
                                             mode='w',
                                             encoding='utf-8')
            folder.create_file_from_filelike(six.BytesIO(byte_string),
                                             'random.dat',
                                             mode='wb',
                                             encoding=None)

            # For python three we make no exceptions, if you pass a unicode stream with binary mode, one should expect
            # a TypeError. Same for the inverse case of wanting to write in unicode mode but passing a byte stream
            with self.assertRaises(TypeError):
                folder.create_file_from_filelike(six.StringIO(unicode_string),
                                                 'random.dat',
                                                 mode='wb')

            with self.assertRaises(TypeError):
                folder.create_file_from_filelike(six.BytesIO(byte_string),
                                                 'random.dat',
                                                 mode='w')

        finally:
            shutil.rmtree(tempdir)
Exemplo n.º 2
0
    def test_get_abs_path_without_limit(self):
        from aiida.common.folders import Folder

        fd = Folder('/tmp')
        # Should not raise any exception
        self.assertEquals(fd.get_abs_path('test_file.txt'),
                          '/tmp/test_file.txt')
Exemplo n.º 3
0
def _collect_files(base, path=''):
    """
    Recursively collects files from the tree, starting at a given path.
    """
    from aiida.common.folders import Folder
    from aiida.common.utils import md5_file,sha1_file
    import os
    if os.path.isdir(os.path.join(base,path)):
        folder = Folder(os.path.join(base,path))
        files_now = []
        if path != '':
            if not path.endswith(os.sep):
                path = "{}{}".format(path,os.sep)
            if path != '':
                files_now.append({
                    'name': path,
                    'type': 'folder',
                })
        for f in sorted(folder.get_content_list()):
            files = _collect_files(base,path=os.path.join(path,f))
            files_now.extend(files)
        return files_now
    else:
        with open(os.path.join(base,path)) as f:
            return [{
                'name': path,
                'contents': f.read(),
                'md5': md5_file(os.path.join(base,path)),
                'sha1': sha1_file(os.path.join(base,path)),
                'type': 'file',
                }]
Exemplo n.º 4
0
    def test_create_file_from_filelike(self):
        """Test `aiida.common.folders.Folder.create_file_from_filelike`."""
        unicode_string = 'unicode_string'
        byte_string = b'byte_string'

        try:
            tempdir = tempfile.mkdtemp()
            folder = Folder(tempdir)

            folder.create_file_from_filelike(io.StringIO(unicode_string),
                                             'random.dat',
                                             mode='w',
                                             encoding='utf-8')
            folder.create_file_from_filelike(io.BytesIO(byte_string),
                                             'random.dat',
                                             mode='wb',
                                             encoding=None)

            with self.assertRaises(TypeError):
                folder.create_file_from_filelike(io.StringIO(unicode_string),
                                                 'random.dat',
                                                 mode='wb')

            with self.assertRaises(TypeError):
                folder.create_file_from_filelike(io.BytesIO(byte_string),
                                                 'random.dat',
                                                 mode='w')

        finally:
            shutil.rmtree(tempdir)
Exemplo n.º 5
0
    def test_create_file_from_filelike_py2():
        """Test `aiida.common.folders.Folder.create_file_from_filelike` for python 2."""
        unicode_string = u'unicode_string'
        byte_string = 'byte_string'

        try:
            tempdir = tempfile.mkdtemp()
            folder = Folder(tempdir)

            # Passing a stream with matching file mode should work ofcourse
            folder.create_file_from_filelike(six.StringIO(unicode_string),
                                             'random.dat',
                                             mode='w',
                                             encoding='utf-8')
            folder.create_file_from_filelike(six.StringIO(byte_string),
                                             'random.dat',
                                             mode='wb',
                                             encoding=None)

            # For python 2 the `create_file_from_filelike` should be able to deal with incoherent arguments, such as
            # the examples below where a unicode string is passed with a binary mode, or a byte stream in unicode mode.
            folder.create_file_from_filelike(six.StringIO(unicode_string),
                                             'random.dat',
                                             mode='wb',
                                             encoding=None)
            folder.create_file_from_filelike(six.StringIO(byte_string),
                                             'random.dat',
                                             mode='w',
                                             encoding='utf-8')

        finally:
            shutil.rmtree(tempdir)
Exemplo n.º 6
0
def test_submit_test_function(clear_database_before_test, sto_calc_inputs):
    """
    Test the ``submit_test`` method
    """
    from aiida_castep.calculations.castep import CastepCalculation, submit_test
    from aiida.common.folders import Folder

    # Test with process class and inputs
    res = submit_test(CastepCalculation, **sto_calc_inputs)
    fcontent = Folder(res[1]).get_content_list()
    assert 'aiida.cell' in fcontent
    assert 'aiida.param' in fcontent
    # Nothing should change for the nested dic
    assert sto_calc_inputs['metadata'].get('dry_run') is not True
    assert sto_calc_inputs['metadata'].get('store_provenance') is not False

    # Test with builder
    builder = CastepCalculation.get_builder()
    builder._data = sto_calc_inputs
    res = submit_test(builder)
    fcontent = Folder(res[1]).get_content_list()
    assert 'aiida.cell' in fcontent
    assert 'aiida.param' in fcontent

    # Nothing should change in the process builder
    assert builder.metadata.get('dry_run') is not True
    assert builder.metadata.get('store_provenance') is not False
Exemplo n.º 7
0
    def prepare_for_submission(self, folder: Folder) -> CalcInfo:
        """Prepare the calculation for submission.

        :param folder: a temporary folder on the local file system.
        :returns: the `CalcInfo` instance
        """
        echo_value = "fail" if self.node.get_option(
            "fail_calcjob") else "success"
        with folder.open(self.options.input_filename, "w",
                         encoding="utf8") as handle:
            handle.write(f"sleep {self.inputs.time.value}\n")
            handle.write(f'echo "{echo_value}"\n')

        with folder.open(self.options.payload_filename, "wb") as handle:
            json.dump(self.inputs.payload.get_dict(), handle)

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [
            self.options.output_filename,
            self.options.payload_filename,
        ]

        return calcinfo
Exemplo n.º 8
0
def _collect_files(base, path=''):
    """
    Recursively collects files from the tree, starting at a given path.
    """
    from aiida.common.folders import Folder
    from aiida.common.utils import md5_file, sha1_file
    import os

    def get_filename(file_dict):
        return file_dict['name']

    if os.path.isdir(os.path.join(base, path)):
        folder = Folder(os.path.join(base, path))
        files_now = []
        if path != '':
            if not path.endswith(os.sep):
                path = "{}{}".format(path, os.sep)
            if path != '':
                files_now.append({
                    'name': path,
                    'type': 'folder',
                })
        for f in folder.get_content_list():
            files = _collect_files(base, path=os.path.join(path, f))
            files_now.extend(files)
        return sorted(files_now, key=get_filename)
    elif path == '.aiida/calcinfo.json':
        files = []
        with open(os.path.join(base, path)) as f:
            files.append({
                'name': path,
                'contents': f.read(),
                'md5': md5_file(os.path.join(base, path)),
                'sha1': sha1_file(os.path.join(base, path)),
                'type': 'file',
            })
        import json
        with open(os.path.join(base, path)) as f:
            calcinfo = json.load(f)
        if 'local_copy_list' in calcinfo:
            for local_copy in calcinfo['local_copy_list']:
                with open(local_copy[0]) as f:
                    files.append({
                        'name': os.path.normpath(local_copy[1]),
                        'contents': f.read(),
                        'md5': md5_file(local_copy[0]),
                        'sha1': sha1_file(local_copy[0]),
                        'type': 'file',
                    })
        return files
    else:
        with open(os.path.join(base, path)) as f:
            return [{
                'name': path,
                'contents': f.read(),
                'md5': md5_file(os.path.join(base, path)),
                'sha1': sha1_file(os.path.join(base, path)),
                'type': 'file',
            }]
Exemplo n.º 9
0
 def test_get_abs_path_without_limit(self):
     """
     Check that the absolute path function can get an absolute path
     """
     folder = Folder('/tmp')
     # Should not raise any exception
     self.assertEqual(folder.get_abs_path('test_file.txt'),
                      '/tmp/test_file.txt')
Exemplo n.º 10
0
def test_prepare(vasp_calc, vasp_chgcar, vasp_wavecar, vasp_inputs,
                 localhost_dir):
    """Check that preparing creates all necessary files."""
    from aiida.common.folders import Folder
    from aiida_vasp.calcs.vasp import VaspCalculation
    wavecar, _ = vasp_wavecar
    chgcar, _ = vasp_chgcar

    inputs_dict = {
        'gga': 'PE',
        'gga_compat': False,
        'lorbit': 11,
        'sigma': 0.5,
        'magmom': '30 * 2*0.',
        'icharg': 11
    }

    inputs = vasp_inputs(parameters=inputs_dict)
    inputs.charge_density = chgcar
    inputs.wavefunctions = wavecar

    calc = vasp_calc(inputs=inputs)
    temp_folder = Folder(str(localhost_dir.parent))
    calcinfo = calc.prepare_for_submission(temp_folder)
    input_files = temp_folder.get_content_list()

    for file_name in ['INCAR', 'KPOINTS', 'POSCAR', 'POTCAR']:
        assert file_name in input_files

    assert 'EIGENVAL' in calcinfo.retrieve_list
    assert 'DOSCAR' in calcinfo.retrieve_list
    assert 'wannier90*' in calcinfo.retrieve_list

    assert calcinfo.codes_info[0].stdout_name == VaspCalculation._VASP_OUTPUT
    assert calcinfo.codes_info[0].join_files is True

    inputs_dict.update({'icharg': 2})

    inputs = vasp_inputs(parameters=inputs_dict)
    inputs.charge_density = chgcar
    inputs.wavefunctions = wavecar

    calc = vasp_calc(inputs=inputs)
    temp_folder = Folder(str(localhost_dir.parent))

    calcinfo = calc.prepare_for_submission(temp_folder)

    assert 'WAVECAR' in [item[1] for item in calcinfo.local_copy_list]
Exemplo n.º 11
0
def output_test(pk, testname, skip_uuids_from_inputs=[]):
    """
    This is the function that should be used to create a new test from an
    existing calculation.

    It is possible to simplify the file removing unwanted nodes.

    :param pk: PK of Calculation, used for test
    :param testname: the name of this test, used to create a new folder.
        The folder name will be of the form test_PLUGIN_TESTNAME,
        with PLUGIN substituted by the plugin name, with dots replaced by
        underscores. Testname can contain only digits, letters and underscores.
    :param skip_uuids_from_inputs: a list of UUIDs of input nodes to be
        skipped
    """
    import os
    import json

    from aiida.common.folders import Folder
    from aiida.orm import JobCalculation
    from aiida.orm.utils import load_node
    from aiida.orm.importexport import export_tree
    c = load_node(pk, parent_class=JobCalculation)
    outfolder = "test_{}_{}".format(c.get_parser_name().replace('.', '_'),
                                    testname)

    if not is_valid_folder_name(outfolder):
        raise ValueError("The testname is invalid; it can contain only "
                         "letters, digits or underscores")

    if os.path.exists(outfolder):
        raise ValueError("Out folder '{}' already exists".format(outfolder))

    inputs = []
    for node in c.get_inputs():
        if node.uuid not in skip_uuids_from_inputs:
            inputs.append(node.dbnode)

    folder = Folder(outfolder)
    to_export = [c.dbnode] + inputs
    try:
        to_export.append(c.out.retrieved.dbnode)
    except AttributeError:
        raise ValueError("No output retrieved node; without it, we cannot "
                         "test the parser!")
    export_tree(to_export,
                folder=folder,
                also_parents=False,
                also_calc_outputs=False)

    # Create an empty checks file
    with open(os.path.join(outfolder, '_aiida_checks.json'), 'w') as f:
        json.dump({}, f)

    for path, dirlist, filelist in os.walk(outfolder):
        if len(dirlist) == 0 and len(filelist) == 0:
            with open("{}/.gitignore".format(path), 'w') as f:
                f.write("# This is a placeholder file, used to make git "
                        "store an empty folder")
                f.flush()
Exemplo n.º 12
0
def get_temp_folder():
    """Returns AiiDA folder object.

    Useful for calculation.submit_test()
    """

    return Folder(tempfile.mkdtemp())
Exemplo n.º 13
0
    def prepare_for_submission(self, folder: Folder) -> CalcInfo:
        """Prepare the calculation for submission.

        Convert the input nodes into the corresponding input files in the format that the code will expect. In addition,
        define and return a `CalcInfo` instance, which is a simple data structure that contains information for the
        engine, for example, on what files to copy to the remote machine, what files to retrieve once it has completed,
        specific scheduler settings and more.

        :param folder: a temporary folder on the local file system.
        :returns: the `CalcInfo` instance
        """
        with folder.open(self.options.input_filename, 'w',
                         encoding='utf8') as handle:
            handle.write('echo $(({x} + {y}))\n'.format(x=self.inputs.x.value,
                                                        y=self.inputs.y.value))

        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdin_name = self.options.input_filename
        codeinfo.stdout_name = self.options.output_filename

        calcinfo = CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.retrieve_list = [self.options.output_filename]

        return calcinfo
Exemplo n.º 14
0
def get_temp_folder():
    """Returns AiiDA folder object.
    
    Useful for calculation.submit_test()
    """
    from aiida.common.folders import Folder
    import tempfile

    return Folder(tempfile.mkdtemp())
    def test_empty_repo_folder_export(self, temp_dir):
        """Check a Node's empty repository folder is exported properly"""
        from aiida.common.folders import Folder
        from aiida.tools.importexport.dbexport import export_tree

        node = orm.Dict().store()
        node_uuid = node.uuid

        node_repo = RepositoryFolder(section=Repository._section_name, uuid=node_uuid)  # pylint: disable=protected-access
        self.assertTrue(
            node_repo.exists(), msg='Newly created and stored Node should have had an existing repository folder'
        )
        for filename, is_file in node_repo.get_content_list(only_paths=False):
            abspath_filename = os.path.join(node_repo.abspath, filename)
            if is_file:
                os.remove(abspath_filename)
            else:
                shutil.rmtree(abspath_filename, ignore_errors=False)
        self.assertFalse(
            node_repo.get_content_list(),
            msg='Repository folder should be empty, instead the following was found: {}'.format(
                node_repo.get_content_list()
            )
        )

        archive_variants = {
            'archive folder': os.path.join(temp_dir, 'export_tree'),
            'tar archive': os.path.join(temp_dir, 'export.tar.gz'),
            'zip archive': os.path.join(temp_dir, 'export.zip')
        }

        export_tree([node], folder=Folder(archive_variants['archive folder']), silent=True)
        export([node], filename=archive_variants['tar archive'], file_format='tar.gz', silent=True)
        export([node], filename=archive_variants['zip archive'], file_format='zip', silent=True)

        for variant, filename in archive_variants.items():
            self.reset_database()
            node_count = orm.QueryBuilder().append(orm.Dict, project='uuid').count()
            self.assertEqual(node_count, 0, msg='After DB reset {} Dict Nodes was (wrongly) found'.format(node_count))

            import_data(filename, silent=True)
            builder = orm.QueryBuilder().append(orm.Dict, project='uuid')
            imported_node_count = builder.count()
            self.assertEqual(
                imported_node_count,
                1,
                msg='After {} import a single Dict Node should have been found, '
                'instead {} was/were found'.format(variant, imported_node_count)
            )
            imported_node_uuid = builder.all()[0][0]
            self.assertEqual(
                imported_node_uuid,
                node_uuid,
                msg='The wrong UUID was found for the imported {}: '
                '{}. It should have been: {}'.format(variant, imported_node_uuid, node_uuid)
            )
Exemplo n.º 16
0
def test_prepare_for_submission(vasp2w90_calc_and_ref, tmp_path):
    """Test that the lwannier90 flag is written at the prepare for submission step."""
    from aiida.common.folders import Folder
    vasp_calc, reference = vasp2w90_calc_and_ref
    folder = Folder(os.fspath(tmp_path))
    vasp_calc.prepare_for_submission(folder)
    with managed_temp_file() as temp_file:
        vasp_calc.write_incar(temp_file)
        with open(temp_file, 'r') as result_incar_fo:
            assert result_incar_fo.readlines() == reference['incar']
Exemplo n.º 17
0
def test_submit_test(clear_database_before_test, sto_calc_inputs):
    """
    Test the ``submit_test`` method
    """
    from aiida_castep.calculations.castep import CastepCalculation
    from aiida.common.folders import Folder
    builder = CastepCalculation.get_builder()
    builder._update(sto_calc_inputs)
    res = CastepCalculation.submit_test(builder)
    fcontent = Folder(res[1]).get_content_list()
    assert 'aiida.cell' in fcontent
    assert 'aiida.param' in fcontent
Exemplo n.º 18
0
def test_incar_validate(vasp_calc, vasp_inputs, localhost_dir):
    """Test incar with invaid tags raises exception"""
    from aiida.common import InputValidationError
    from aiida.common.folders import Folder
    inputs_dict = {
        'gga': 'PE',
        'smear': 3  # <- Invalid tag
    }
    inputs = vasp_inputs(parameters=inputs_dict)
    calc = vasp_calc(inputs=inputs)

    temp_folder = Folder(str(localhost_dir.parent))
    with pytest.raises(InputValidationError):
        calc.prepare_for_submission(temp_folder)
Exemplo n.º 19
0
    def prepare_for_submission(self, folder: folders.Folder):
        """Create input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        self._validate_inputs()

        dos_filenames = OpenmxCalculation.dos_filenames

        remote_symlink_list = [
            (self.inputs.openmx_output_folder.computer.uuid,
             os.path.join(self.inputs.openmx_output_folder.get_remote_path(),
                          dos_filenames['val']),
             os.path.join(self._DATA_PATH, dos_filenames['val'])),
            (self.inputs.openmx_output_folder.computer.uuid,
             os.path.join(self.inputs.openmx_output_folder.get_remote_path(),
                          dos_filenames['vec']),
             os.path.join(self._DATA_PATH, dos_filenames['vec'])),
        ]

        retrieve_list = self._generate_retrieve_list()

        input_file_content = self._write_input_file()
        with folder.open(self._INPUT_FILE, 'w') as handle:
            handle.write(input_file_content)

        # Fill out the `CodeInfo`
        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = ([
            dos_filenames['val'], dos_filenames['vec']
        ])
        codeinfo.stdin_name = self._INPUT_FILE
        codeinfo.stdout_name = self._OUTPUT_FILE
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.with_mpi = True

        # Fill out the `CalcInfo`
        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.remote_symlink_list = remote_symlink_list
        calcinfo.retrieve_list = retrieve_list
        calcinfo.retrieve_list.append(self._OUTPUT_FILE)

        return calcinfo
Exemplo n.º 20
0
def test_write_wavecar(localhost_dir, vasp_calc, vasp_inputs, vasp_wavecar):
    """Test that WAVECAR file is written correctly."""
    from aiida.common.folders import Folder
    wavecar, _ = vasp_wavecar
    inputs = vasp_inputs(
        parameters={
            'gga': 'PE',
            'gga_compat': False,
            'lorbit': 11,
            'sigma': 0.5,
            'magmom': '30 * 2*0.',
            'istart': 1
        })
    inputs.wavefunctions = wavecar
    calc = vasp_calc(inputs=inputs)
    temp_folder = Folder(str(localhost_dir.parent))
    calcinfo = calc.prepare_for_submission(temp_folder)

    assert 'WAVECAR' in [item[1] for item in calcinfo.local_copy_list]
Exemplo n.º 21
0
def test_write_chgcar(localhost_dir, vasp_calc, vasp_inputs, vasp_chgcar):
    """Test that CHGAR file is written correctly."""
    from aiida.common.folders import Folder
    chgcar, _ = vasp_chgcar

    inputs = vasp_inputs(
        parameters={
            'gga': 'PE',
            'gga_compat': False,
            'lorbit': 11,
            'sigma': 0.5,
            'magmom': '30 * 2*0.',
            'icharg': 1
        })

    inputs.charge_density = chgcar

    calc = vasp_calc(inputs=inputs)
    temp_folder = Folder(str(localhost_dir.dirpath()))

    calcinfo = calc.prepare_for_submission(temp_folder)

    assert 'CHGCAR' in [item[1] for item in calcinfo.local_copy_list]
Exemplo n.º 22
0
    def prepare_for_submission(self, folder: folders.Folder):
        """Create input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        # To be filled out below
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        # Create the subfolders for pseudopotentials and orbitals
        folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True)
        folder.get_subfolder(self._ORBITAL_SUBFOLDER, create=True)

        # Get an uppercase-key-only version of the settings dictionary (also check for case-insensitive duplicates)
        if 'settings' in self.inputs:
            settings = uppercase_dict_keys(self.inputs.settings.get_dict(),
                                           dict_name='settings')
        else:
            settings = {}

        # Get an uppercase-key-only verion of the parameters dictionary (also check for case-insensitive duplicates)
        parameters = uppercase_dict_keys(self.inputs.parameters.get_dict(),
                                         dict_name='parameters')

        # No reserved parameter keywords should be provided
        self._check_reserved_keywords(parameters)

        # Load parameter schema
        with open(self._INPUT_SCHEMA, 'r') as stream:
            schema = json.load(stream)

        # Automatically generate input parameters for derived fields, e.g. structure -> Atoms.Unitvectors, etc.
        parameters = self._generate_input_parameters(
            self.inputs.structure, self.inputs.kpoints, parameters,
            self.inputs.pseudos, self.inputs.orbitals,
            self.inputs.orbital_configurations)

        # Get a lowercase-value-only version of the parameters dictionary
        parameters = lowercase_dict_values(parameters)

        # Validate input parameters
        self._validate_inputs(self.inputs.structure, self.inputs.kpoints,
                              parameters, self.inputs.pseudos,
                              self.inputs.orbitals,
                              self.inputs.orbital_configurations, schema)

        # Get input file contents and lists of the pseudopotential and orbital files which need to be copied
        input_file_content = write_input_file(parameters, schema)
        local_copy_pseudo_list, local_copy_orbital_list = self._generate_local_copy_lists(
            self.inputs.pseudos, self.inputs.orbitals)

        local_copy_list += local_copy_pseudo_list
        local_copy_list += local_copy_orbital_list

        # Add output files to retrieve which have been specified to write in the input parameters
        retrieve_list = []
        if parameters.get('BAND_NKPATH', 0) > 0 and parameters.get(
                'SCF_EIGENVALUESOLVER', 'band') == 'band':
            retrieve_list.append(self._DATAFILE_BAND_FILE)
        if parameters.get('MD_TYPE', 'nomd') != 'nomd':
            retrieve_list.append(self._DATAFILE_MD_FILE)
            retrieve_list.append(self._DATAFILE_MD2_FILE)

        # Write input file
        with folder.open(self._INPUT_FILE, 'w') as handle:
            handle.write(input_file_content)

        # Fill out the `CodeInfo`
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.withmpi = True
        codeinfo.cmdline_params = ([self._INPUT_FILE] +
                                   list(settings.pop('CMDLINE', [])))
        codeinfo.stdout_name = self._OUTPUT_FILE

        # Fill out the `CalcInfo`
        calcinfo = datastructures.CalcInfo()
        calcinfo.uuid = str(self.uuid)
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list
        calcinfo.retrieve_list = retrieve_list
        calcinfo.retrieve_list.append(self._OUTPUT_FILE)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        # TODO: pop parser settings and report remaining unknown settings

        return calcinfo
Exemplo n.º 23
0
    def test_unicode(cls):
        """
        Check that there are no exceptions raised when
        using unicode folders.
        """
        tmpsource = tempfile.mkdtemp()
        tmpdest = tempfile.mkdtemp()
        with io.open(os.path.join(tmpsource, 'sąžininga'),
                     'w',
                     encoding='utf8') as fhandle:
            fhandle.write(u'test')
        with io.open(os.path.join(tmpsource, 'žąsis'), 'w',
                     encoding='utf8') as fhandle:
            fhandle.write(u'test')
        folder = Folder(tmpdest)
        folder.insert_path(tmpsource, 'destination')
        folder.insert_path(tmpsource, u'šaltinis')

        folder = Folder(os.path.join(tmpsource, u'šaltinis'))
        folder.insert_path(tmpsource, 'destination')
        folder.insert_path(tmpdest, u'kitas-šaltinis')
Exemplo n.º 24
0
#~ vc = VaspCalculation()

incar = {
    'SYSTEM': 'TestSystem',
    'ediff': 1E-5,
    'GGA_COMPAT': False,
}
poscar = pmg.io.vasp.Poscar.from_file(sys.argv[1]).as_dict()
potcar = {}
kpoints = {}

ParameterData = DataFactory('parameter')
code = Code.get_from_string('vasp')

vc = code.new_calc()

vc.use_settings(ParameterData(dict=incar))
vc.use_structure(ParameterData(dict=poscar))
vc.use_potentials(ParameterData(dict=potcar))
vc.use_kpoints(ParameterData(dict=kpoints))

vc.label = 'Test Vasp Calculation'
vc.description = 'Test Vasp Calculation Plugin (with invalid data)'
vc.set_max_wallclock_seconds(1)  #should not run at all
vc.set_withmpi(True)
vc.set_resources({'num_machines': 2, 'num_mpiprocs_per_machine': 12})
vc.set_queue_name('bogus_queue_to_prevent_accidental_queuing')

fldr = Folder(sys.argv[2])
vc.submit_test(fldr)
Exemplo n.º 25
0
    def test_unicode(self):
        """
        Check that there are no exceptions raised when 
        using unicode folders.
        """
        from aiida.common.folders import Folder
        import os, tempfile

        tmpsource = tempfile.mkdtemp()
        tmpdest = tempfile.mkdtemp()
        with open(os.path.join(tmpsource, "sąžininga"), 'w') as f:
            f.write("test")
        with open(os.path.join(tmpsource, "žąsis"), 'w') as f:
            f.write("test")
        fd = Folder(tmpdest)
        fd.insert_path(tmpsource, "destination")
        fd.insert_path(tmpsource, u"šaltinis")

        fd = Folder(os.path.join(tmpsource, u"šaltinis"))
        fd.insert_path(tmpsource, "destination")
        fd.insert_path(tmpdest, u"kitas-šaltinis")
Exemplo n.º 26
0
    def presubmit(self, folder: Folder) -> CalcInfo:
        """Prepares the calculation folder with all inputs, ready to be copied to the cluster.

        :param folder: a SandboxFolder that can be used to write calculation input files and the scheduling script.

        :return calcinfo: the CalcInfo object containing the information needed by the daemon to handle operations.

        """
        # pylint: disable=too-many-locals,too-many-statements,too-many-branches
        from aiida.common.exceptions import PluginInternalError, ValidationError, InvalidOperation, InputValidationError
        from aiida.common import json
        from aiida.common.utils import validate_list_of_string_tuples
        from aiida.common.datastructures import CodeInfo, CodeRunMode
        from aiida.orm import load_node, Code, Computer
        from aiida.plugins import DataFactory
        from aiida.schedulers.datastructures import JobTemplate

        computer = self.node.computer
        inputs = self.node.get_incoming(link_type=LinkType.INPUT_CALC)

        if not self.inputs.metadata.dry_run and self.node.has_cached_links(
        ):  # type: ignore[union-attr]
            raise InvalidOperation(
                'calculation node has unstored links in cache')

        codes = [_ for _ in inputs.all_nodes() if isinstance(_, Code)]

        for code in codes:
            if not code.can_run_on(computer):
                raise InputValidationError(
                    'The selected code {} for calculation {} cannot run on computer {}'
                    .format(code.pk, self.node.pk, computer.label))

            if code.is_local() and code.get_local_executable(
            ) in folder.get_content_list():
                raise PluginInternalError(
                    f'The plugin created a file {code.get_local_executable()} that is also the executable name!'
                )

        calc_info = self.prepare_for_submission(folder)
        calc_info.uuid = str(self.node.uuid)
        scheduler = computer.get_scheduler()

        # I create the job template to pass to the scheduler
        job_tmpl = JobTemplate()
        job_tmpl.shebang = computer.get_shebang()
        job_tmpl.submit_as_hold = False
        job_tmpl.rerunnable = False
        job_tmpl.job_environment = {}
        # 'email', 'email_on_started', 'email_on_terminated',
        job_tmpl.job_name = f'aiida-{self.node.pk}'
        job_tmpl.sched_output_path = self.options.scheduler_stdout
        if self.options.scheduler_stderr == self.options.scheduler_stdout:
            job_tmpl.sched_join_files = True
        else:
            job_tmpl.sched_error_path = self.options.scheduler_stderr
            job_tmpl.sched_join_files = False

        # Set retrieve path, add also scheduler STDOUT and STDERR
        retrieve_list = calc_info.retrieve_list or []
        if (job_tmpl.sched_output_path is not None
                and job_tmpl.sched_output_path not in retrieve_list):
            retrieve_list.append(job_tmpl.sched_output_path)
        if not job_tmpl.sched_join_files:
            if (job_tmpl.sched_error_path is not None
                    and job_tmpl.sched_error_path not in retrieve_list):
                retrieve_list.append(job_tmpl.sched_error_path)
        retrieve_list.extend(
            self.node.get_option('additional_retrieve_list') or [])
        self.node.set_retrieve_list(retrieve_list)

        retrieve_singlefile_list = calc_info.retrieve_singlefile_list or []
        # a validation on the subclasses of retrieve_singlefile_list
        for _, subclassname, _ in retrieve_singlefile_list:
            file_sub_class = DataFactory(subclassname)
            if not issubclass(file_sub_class, orm.SinglefileData):
                raise PluginInternalError(
                    '[presubmission of calc {}] retrieve_singlefile_list subclass problem: {} is '
                    'not subclass of SinglefileData'.format(
                        self.node.pk, file_sub_class.__name__))
        if retrieve_singlefile_list:
            self.node.set_retrieve_singlefile_list(retrieve_singlefile_list)

        # Handle the retrieve_temporary_list
        retrieve_temporary_list = calc_info.retrieve_temporary_list or []
        self.node.set_retrieve_temporary_list(retrieve_temporary_list)

        # the if is done so that if the method returns None, this is
        # not added. This has two advantages:
        # - it does not add too many \n\n if most of the prepend_text are empty
        # - most importantly, skips the cases in which one of the methods
        #   would return None, in which case the join method would raise
        #   an exception
        prepend_texts = [computer.get_prepend_text()] + \
            [code.get_prepend_text() for code in codes] + \
            [calc_info.prepend_text, self.node.get_option('prepend_text')]
        job_tmpl.prepend_text = '\n\n'.join(prepend_text
                                            for prepend_text in prepend_texts
                                            if prepend_text)

        append_texts = [self.node.get_option('append_text'), calc_info.append_text] + \
            [code.get_append_text() for code in codes] + \
            [computer.get_append_text()]
        job_tmpl.append_text = '\n\n'.join(append_text
                                           for append_text in append_texts
                                           if append_text)

        # Set resources, also with get_default_mpiprocs_per_machine
        resources = self.node.get_option('resources')
        scheduler.preprocess_resources(
            resources, computer.get_default_mpiprocs_per_machine())
        job_tmpl.job_resource = scheduler.create_job_resource(**resources)

        subst_dict = {
            'tot_num_mpiprocs': job_tmpl.job_resource.get_tot_num_mpiprocs()
        }

        for key, value in job_tmpl.job_resource.items():
            subst_dict[key] = value
        mpi_args = [
            arg.format(**subst_dict) for arg in computer.get_mpirun_command()
        ]
        extra_mpirun_params = self.node.get_option(
            'mpirun_extra_params')  # same for all codes in the same calc

        # set the codes_info
        if not isinstance(calc_info.codes_info, (list, tuple)):
            raise PluginInternalError(
                'codes_info passed to CalcInfo must be a list of CalcInfo objects'
            )

        codes_info = []
        for code_info in calc_info.codes_info:

            if not isinstance(code_info, CodeInfo):
                raise PluginInternalError(
                    'Invalid codes_info, must be a list of CodeInfo objects')

            if code_info.code_uuid is None:
                raise PluginInternalError(
                    'CalcInfo should have the information of the code to be launched'
                )
            this_code = load_node(code_info.code_uuid, sub_classes=(Code, ))

            this_withmpi = code_info.withmpi  # to decide better how to set the default
            if this_withmpi is None:
                if len(calc_info.codes_info) > 1:
                    raise PluginInternalError(
                        'For more than one code, it is necessary to set withmpi in codes_info'
                    )
                else:
                    this_withmpi = self.node.get_option('withmpi')

            if this_withmpi:
                this_argv = (mpi_args + extra_mpirun_params +
                             [this_code.get_execname()] +
                             (code_info.cmdline_params
                              if code_info.cmdline_params is not None else []))
            else:
                this_argv = [this_code.get_execname()
                             ] + (code_info.cmdline_params if
                                  code_info.cmdline_params is not None else [])

            # overwrite the old cmdline_params and add codename and mpirun stuff
            code_info.cmdline_params = this_argv

            codes_info.append(code_info)
        job_tmpl.codes_info = codes_info

        # set the codes execution mode

        if len(codes) > 1:
            try:
                job_tmpl.codes_run_mode = calc_info.codes_run_mode
            except KeyError as exc:
                raise PluginInternalError(
                    'Need to set the order of the code execution (parallel or serial?)'
                ) from exc
        else:
            job_tmpl.codes_run_mode = CodeRunMode.SERIAL
        ########################################################################

        custom_sched_commands = self.node.get_option(
            'custom_scheduler_commands')
        if custom_sched_commands:
            job_tmpl.custom_scheduler_commands = custom_sched_commands

        job_tmpl.import_sys_environment = self.node.get_option(
            'import_sys_environment')

        job_tmpl.job_environment = self.node.get_option(
            'environment_variables')

        queue_name = self.node.get_option('queue_name')
        account = self.node.get_option('account')
        qos = self.node.get_option('qos')
        if queue_name is not None:
            job_tmpl.queue_name = queue_name
        if account is not None:
            job_tmpl.account = account
        if qos is not None:
            job_tmpl.qos = qos
        priority = self.node.get_option('priority')
        if priority is not None:
            job_tmpl.priority = priority
        max_memory_kb = self.node.get_option('max_memory_kb')
        if max_memory_kb is not None:
            job_tmpl.max_memory_kb = max_memory_kb
        max_wallclock_seconds = self.node.get_option('max_wallclock_seconds')
        if max_wallclock_seconds is not None:
            job_tmpl.max_wallclock_seconds = max_wallclock_seconds
        max_memory_kb = self.node.get_option('max_memory_kb')
        if max_memory_kb is not None:
            job_tmpl.max_memory_kb = max_memory_kb

        submit_script_filename = self.node.get_option('submit_script_filename')
        script_content = scheduler.get_submit_script(job_tmpl)
        folder.create_file_from_filelike(io.StringIO(script_content),
                                         submit_script_filename,
                                         'w',
                                         encoding='utf8')

        subfolder = folder.get_subfolder('.aiida', create=True)
        subfolder.create_file_from_filelike(io.StringIO(json.dumps(job_tmpl)),
                                            'job_tmpl.json',
                                            'w',
                                            encoding='utf8')
        subfolder.create_file_from_filelike(io.StringIO(json.dumps(calc_info)),
                                            'calcinfo.json',
                                            'w',
                                            encoding='utf8')

        if calc_info.local_copy_list is None:
            calc_info.local_copy_list = []

        if calc_info.remote_copy_list is None:
            calc_info.remote_copy_list = []

        # Some validation
        this_pk = self.node.pk if self.node.pk is not None else '[UNSTORED]'
        local_copy_list = calc_info.local_copy_list
        try:
            validate_list_of_string_tuples(local_copy_list, tuple_length=3)
        except ValidationError as exception:
            raise PluginInternalError(
                f'[presubmission of calc {this_pk}] local_copy_list format problem: {exception}'
            ) from exception

        remote_copy_list = calc_info.remote_copy_list
        try:
            validate_list_of_string_tuples(remote_copy_list, tuple_length=3)
        except ValidationError as exception:
            raise PluginInternalError(
                f'[presubmission of calc {this_pk}] remote_copy_list format problem: {exception}'
            ) from exception

        for (remote_computer_uuid, _, dest_rel_path) in remote_copy_list:
            try:
                Computer.objects.get(uuid=remote_computer_uuid)  # pylint: disable=unused-variable
            except exceptions.NotExistent as exception:
                raise PluginInternalError(
                    '[presubmission of calc {}] '
                    'The remote copy requires a computer with UUID={}'
                    'but no such computer was found in the '
                    'database'.format(this_pk,
                                      remote_computer_uuid)) from exception
            if os.path.isabs(dest_rel_path):
                raise PluginInternalError(
                    '[presubmission of calc {}] '
                    'The destination path of the remote copy '
                    'is absolute! ({})'.format(this_pk, dest_rel_path))

        return calc_info
Exemplo n.º 27
0
    'generation_style': 'Gamma',
    'kpoints': [[8,8,8]],
    'usershift': [0,0,0],
}

code = Code.get_from_string('vasp')
calc = code.new_calc()

ParameterData = DataFactory('parameter')
SinglefileData = DataFactory('singlefile')
calc.use_settings(ParameterData(dict=incar))
calc.use_structure(ParameterData(dict=poscar))
#~ calc.use_potentials(ParameterData(dict=potcar))
calc.use_potentials(SinglefileData(file=potcar))
calc.use_kpoints(ParameterData(dict=kpoints))

calc.label = 'Vasp Selfconsistent Test Run'
calc.description = 'Test the Vasp Plugin with a simple selfconsistent run for fcc InAs'
calc.set_max_wallclock_seconds(60)
calc.set_withmpi(False)
calc.set_resources({'num_machines':1, 'num_mpiprocs_per_machine':1})
calc.set_queue_name('dphys_compute')

if sys.argv[1] == '--submit':
    calc.store_all()
    print "created calculation; with uuid='{}' and PK={}".format(calc.uuid,calc.pk)
    calc.submit()
if sys.argv[1] == '--test':
    from aiida.common.folders import Folder
    calc.submit_test(Folder(sys.argv[3]))
Exemplo n.º 28
0
def create_builder_from_file(input_folder, input_file_name, code, metadata, pseudo_folder_path=None, use_first=False):
    """Create a populated process builder for a `PwCalculation` from a standard QE input file and pseudo (upf) files

    :param input_folder: the folder containing the input file
    :type input_folder: aiida.common.folders.Folder or str
    :param input_file_name: the name of the input file
    :type input_file_name: str
    :param code: the code associated with the calculation
    :type code: aiida.orm.Code or str
    :param metadata: metadata values for the calculation (e.g. resources)
    :type metadata: dict
    :param pseudo_folder_path: the folder containing the upf files (if None, then input_folder is used)
    :type pseudo_folder_path: aiida.common.folders.Folder or str or None
    :param use_first: passed to UpfData.get_or_create
    :type use_first: bool
    :raises NotImplementedError: if the structure is not ibrav=0
    :return: a builder instance for PwCalculation
    """
    PwCalculation = CalculationFactory('quantumespresso.pw')

    builder = PwCalculation.get_builder()
    builder.metadata = metadata

    if isinstance(code, six.string_types):
        code = Code.get_from_string(code)
    builder.code = code

    # read input_file
    if isinstance(input_folder, six.string_types):
        input_folder = Folder(input_folder)

    with input_folder.open(input_file_name) as input_file:
        parsed_file = PwInputFile(input_file)

    builder.structure = parsed_file.get_structuredata()
    builder.kpoints = parsed_file.get_kpointsdata()

    if parsed_file.namelists['SYSTEM']['ibrav'] != 0:
        raise NotImplementedError('Found ibrav != 0: `aiida-quantumespresso` currently only supports ibrav = 0.')

    # Then, strip the namelist items that the plugin doesn't allow or sets later.
    # NOTE: If any of the position or cell units are in alat or crystal
    # units, that will be taken care of by the input parsing tools, and
    # we are safe to fake that they were never there in the first place.
    parameters_dict = copy.deepcopy(parsed_file.namelists)
    for namelist, blocked_key in PwCalculation._blocked_keywords:  # pylint: disable=protected-access
        for key in list(parameters_dict[namelist].keys()):
            # take into account that celldm and celldm(*) must be blocked
            if re.sub('[(0-9)]', '', key) == blocked_key:
                parameters_dict[namelist].pop(key, None)
    builder.parameters = Dict(dict=parameters_dict)

    # Get or create a UpfData node for the pseudopotentials used for the calculation.
    pseudos_map = {}
    if pseudo_folder_path is None:
        pseudo_folder_path = input_folder
    if isinstance(pseudo_folder_path, six.string_types):
        pseudo_folder_path = Folder(pseudo_folder_path)
    names = parsed_file.atomic_species['names']
    pseudo_file_names = parsed_file.atomic_species['pseudo_file_names']
    pseudo_file_map = {}
    for name, fname in zip(names, pseudo_file_names):
        if fname not in pseudo_file_map:
            local_path = pseudo_folder_path.get_abs_path(fname)
            upf_node, _ = UpfData.get_or_create(local_path, use_first=use_first, store_upf=False)
            pseudo_file_map[fname] = upf_node
        pseudos_map[name] = pseudo_file_map[fname]
    builder.pseudos = pseudos_map

    settings_dict = {}
    if parsed_file.k_points['type'] == 'gamma':
        settings_dict['gamma_only'] = True

    # If there are any fixed coordinates (i.e. force modification) present in the input file, specify in settings
    fixed_coords = parsed_file.atomic_positions['fixed_coords']
    # Function ``any()`` only works for 1-dimensional lists so we have to call it twice manually.
    if any((any(fc_xyz) for fc_xyz in fixed_coords)):
        settings_dict['FIXED_COORDS'] = fixed_coords

    if settings_dict:
        builder.settings = settings_dict

    return builder
Exemplo n.º 29
0
    def dryrun_test(cls, inputs, castep_exe='castep.serial', verbose=True):
        """
        Do a dryrun test in a folder with prepared builder or inputs
        """

        if isinstance(inputs, ProcessBuilder):
            res = cls.submit_test(inputs)
        else:
            res = cls.submit_test(cls, **inputs)
        folder = Folder(res[1])
        dry_run_node = res[0]
        seedname = dry_run_node.get_option('seedname')

        def _print(inp):
            if verbose:
                print(inp)

        # Do a dryrun
        try:
            output = check_output([castep_exe, "-v"], universal_newlines=True)
        except OSError:
            _print("CASTEP executable '{}' is not found".format(castep_exe))
            return None

        # Now start dryrun
        _print("Running with {}".format(
            check_output(["which", castep_exe], universal_newlines=True)))
        _print(output)

        _print("Starting dryrun...")
        call([castep_exe, "--dryrun", seedname], cwd=folder.abspath)

        # Check if any *err files
        contents = folder.get_content_list()
        for fname in contents:
            if fnmatch(fname, "*.err"):
                with folder.open(fname) as fhandle:
                    _print("Error found in {}:\fname".format(fname))
                    _print(fhandle.read())
                raise InputValidationError("Error found during dryrun")

        # Gather information from the dryrun file
        dryrun_results = {}
        out_file = seedname + '.castep'
        with folder.open(out_file) as fhandle:
            for line in fhandle:
                mth = re.match(r"\s*k-Points For SCF Sampling:\s+(\d+)\s*",
                               line)
                if mth:
                    dryrun_results["num_kpoints"] = int(mth.group(1))
                    _print("Number of k-points: {}".format(mth.group(1)))
                    mth = None
                    continue
                mth = re.match(
                    r"\| Approx\. total storage required"
                    r" per process\s+([0-9.]+)\sMB\s+([0-9.]+)", line)
                if mth:
                    dryrun_results["memory_MB"] = (float(mth.group(1)))
                    dryrun_results["disk_MB"] = (float(mth.group(2)))
                    _print("RAM: {} MB, DISK: {} MB".format(
                        mth.group(1), mth.group(2)))
                    mth = None
                    continue

        return folder, dryrun_results
Exemplo n.º 30
0
    def test_unicode(self):
        """Check that there are no exceptions raised when using unicode folders."""
        tmpsource = tempfile.mkdtemp()
        tmpdest = tempfile.mkdtemp()

        with open(os.path.join(tmpsource, 'sąžininga'), 'w',
                  encoding='utf8') as fhandle:
            fhandle.write('test')
        with open(os.path.join(tmpsource, 'žąsis'), 'w',
                  encoding='utf8') as fhandle:
            fhandle.write('test')

        folder = Folder(tmpdest)
        folder.insert_path(tmpsource, 'destination')
        folder.insert_path(tmpsource, 'šaltinis')

        self.assertEqual(sorted(folder.get_content_list()),
                         sorted(['destination', 'šaltinis']))
        self.assertEqual(
            sorted(folder.get_subfolder('destination').get_content_list()),
            sorted(['sąžininga', 'žąsis']))
        self.assertEqual(
            sorted(folder.get_subfolder('šaltinis').get_content_list()),
            sorted(['sąžininga', 'žąsis']))

        folder = Folder(os.path.join(tmpsource, 'šaltinis'))
        folder.insert_path(tmpdest, 'destination')
        folder.insert_path(tmpdest, 'kitas-šaltinis')
        self.assertEqual(sorted(folder.get_content_list()),
                         sorted(['destination', 'kitas-šaltinis']))