Exemple #1
0
    def _include_extra_kpoints(self,
                               kpn_node,
                               kpn_name,
                               kpn_settings,
                               report_fn=None):
        """Write extra kpoints to the cell"""

        try:
            mesh, offset = kpn_node.get_kpoints_mesh()
            has_mesh = True
        except AttributeError:
            # Not defined as mesh
            try:
                bs_kpts_list = kpn_node.get_kpoints()
                num_kpoints = len(bs_kpts_list)
                has_mesh = False
                if num_kpoints == 0:
                    raise InputValidationError(
                        "At least one k points must be provided")
            except AttributeError:
                raise InputValidationError(
                    "No valid {}_kpoints have been found from node {}".format(
                        kpn_name.lower(), kpn_node.pk))

            # Do we have weights defined?
            try:
                _, weights = kpn_node.get_kpoints(also_weights=True)
            except AttributeError:
                # If not, fill with fractions
                if kpn_settings['need_weights'] is True:
                    weights = np.ones(num_kpoints, dtype=float) / num_kpoints
                    if report_fn is not None:
                        report_fn(
                            'Warning:filling evenly distributed weights for {}_kpoints'
                            .format(kpn_name))

        # now add to the cell file
        if has_mesh is True:
            mesh_name = "{}_kpoint_mp_grid".format(kpn_name)
            self.cell_file[mesh_name] = "{} {} {}".format(*mesh)
            if offset != [0., 0., 0.]:
                self.cell_file[mesh_name.replace(
                    "grid", "offset")] = "{} {} {}".format(*offset)
        else:
            extra_kpts_lines = []
            if kpn_settings['need_weights'] is True:
                for kpoint, weight in zip(bs_kpts_list, weights):
                    extra_kpts_lines.append(
                        f"{kpoint[0]:18.10f} {kpoint[1]:18.10f} {kpoint[2]:18.10f} {weight:18.14f}"
                    )
            else:
                for kpoint in bs_kpts_list:
                    extra_kpts_lines.append(
                        f"{kpoint[0]:18.10f} {kpoint[1]:18.10f} {kpoint[2]:18.10f}"
                    )
            bname = "{}_kpoint_list".format(kpn_name).upper()
            self.cell_file[bname] = Block(extra_kpts_lines)
Exemple #2
0
def _create_win_string(  # pylint: disable=too-many-arguments
    parameters,
    kpoints,
    structure=None,
    kpoint_path=None,
    projections=None,
    random_projections=False,
):
    from aiida.plugins import DataFactory
    from aiida.orm import List

    # prepare the main input text
    input_file_lines = []
    if isinstance(parameters, DataFactory('dict')):
        parameters = parameters.get_dict()
    try:
        parameters.setdefault('mp_grid', kpoints.get_kpoints_mesh()[0])
    except AttributeError:
        pass
    input_file_lines += _format_parameters(parameters)

    block_inputs = {}
    if projections is None:
        # If no projections are specified, random projections is used (Dangerous!)
        if random_projections:
            block_inputs['projections'] = ['random']
        else:
            block_inputs['projections'] = []
    elif isinstance(projections, (tuple, list)):
        if random_projections:
            raise InputValidationError(
                'random_projections cannot be True with (tuple,list) projections.'
                'Instead, use "random" string as first element of the list.')
        block_inputs['projections'] = projections
    elif isinstance(projections, List):
        if random_projections:
            raise InputValidationError(
                'random_projections cannot be True if with List-type projections.'
                'Instead, use "random" string as first element of the List.')
        block_inputs['projections'] = projections.get_list()
    else:
        block_inputs['projections'] = _format_all_projections(
            projections, random_projections=True)

    if structure is not None:
        block_inputs['unit_cell_cart'] = _format_unit_cell(structure)
        block_inputs['atoms_cart'] = _format_atoms_cart(structure)
    if kpoints is not None:
        block_inputs['kpoints'] = _format_kpoints(kpoints)
    if kpoint_path is not None:
        block_inputs['kpoint_path'] = _format_kpoint_path(kpoint_path)
    input_file_lines += _format_block_inputs(block_inputs)

    return '\n'.join(input_file_lines) + '\n'
Exemple #3
0
    def _handle_retrieved_parent_folder(self, inp, folder):
        """Enable restart from the retrieved folder."""
        if "Restart" not in self.inputs.retrieved_parent_folder._repository.list_object_names(
        ):  # pylint: disable=protected-access
            raise InputValidationError(
                "Restart was requested but the restart "
                "folder was not found in the previos calculation.")

        dest_folder = folder.get_abs_path("RestartInitial")

        # we first copy the whole restart folder
        copytree(
            os.path.join(
                self.inputs.retrieved_parent_folder._repository.
                _get_base_folder().abspath, "Restart"),  # pylint: disable=protected-access
            dest_folder)

        # once this is done, we rename the files to match temperature, pressure and number of unit cells
        for i_system, system_name in enumerate(inp.system_order):
            system = inp.params["System"][system_name]
            current_folder = folder.get_abs_path(
                "RestartInitial/System_{}".format(i_system))
            content = os.listdir(current_folder)
            if len(content) != 1:
                raise InputValidationError(
                    "Restart folder should contain 1 file only, got {}".format(
                        len(content)))
            old_fname = content[0]
            if system["type"] == "Box":
                system_or_box = "Box"
                (n_x, n_y, n_z) = (1, 1, 1)
                if 'ExternalPressure' not in system:
                    system['ExternalPressure'] = 0
            elif system["type"] == "Framework":
                system_or_box = system_name
                try:
                    (n_x, n_y,
                     n_z) = tuple(map(int, system['UnitCells'].split()))
                except KeyError:
                    (n_x, n_y, n_z) = 1, 1, 1

            external_pressure = system[
                'ExternalPressure'] if 'ExternalPressure' in system else 0

            new_fname = "restart_{:s}_{:d}.{:d}.{:d}_{:f}_{:g}".format(
                system_or_box, n_x, n_y, n_z, system['ExternalTemperature'],
                external_pressure)
            os.rename(os.path.join(current_folder, old_fname),
                      os.path.join(current_folder, new_fname))
Exemple #4
0
    def prepare_for_submission(self, tempfolder):
        wannier_folder = self.inputs.wannier_folder
        pos_kind = self.inputs.pos_kind.value

        # get the prefix from the *_hr.dat file
        for filename in wannier_folder.list_object_names():
            if filename.endswith('_hr.dat'):
                prefix = filename.rsplit('_hr.dat', 1)[0]
                break
        else:
            raise InputValidationError(
                "'wannier_folder' does not contain a *_hr.dat file.")

        calcinfo, codeinfo = super(ParseCalculation,
                                   self).prepare_for_submission(tempfolder)

        # add Wannier90 output files to local_copy_list
        calcinfo.local_copy_list = [
            (wannier_folder.uuid, filename, filename)
            for filename in wannier_folder.list_object_names()
        ]
        codeinfo.cmdline_params += [
            '-p',
            prefix,
            '--pos-kind',
            pos_kind,
        ]

        return calcinfo
Exemple #5
0
    def _create_FORCE_SETS(self, folder):
        if 'force_sets' in self.inputs:
            force_sets = self.inputs.force_sets
        else:
            force_sets = None
        if 'displacement_dataset' in self.inputs.settings.attributes:
            dataset = self.inputs.settings['displacement_dataset']
        elif 'dataset' in self.inputs.settings.attributes:
            dataset = self.inputs.settings['dataset']
        elif ('dataset' in self.inputs and
              'displacements' in self.inputs.dataset.get_arraynames()):
            dataset = {'displacements':
                       self.inputs.dataset.get_array('displacements')}
            if 'forces' in self.inputs.dataset.get_arraynames():
                dataset['forces'] = self.inputs.dataset.get_array('forces')
                if force_sets is not None:
                    force_sets = None
        else:
            dataset = None

        # can work both for type-I and type-II
        force_sets_txt = get_FORCE_SETS_txt(dataset, force_sets=force_sets)
        if force_sets_txt is None:
            msg = ("Displacements or forces were not found.")
            raise InputValidationError(msg)

        with folder.open(
                self._INPUT_FORCE_SETS, 'w', encoding='utf8') as handle:
            handle.write(force_sets_txt)
Exemple #6
0
    def run(self):
        import plumpy
        from aiida.engine.processes.calcjobs.tasks import RETRIEVE_COMMAND
        from aiida.common.folders import SandboxFolder

        _ = super(VaspImmigrant, self).run()

        # Make sure the retrieve list is set (done in presubmit so we need to call that also)
        with SandboxFolder() as folder:
            self.presubmit(folder)

        settings = self.inputs.get('settings', None)
        settings = settings.get_dict() if settings else {}
        remote_path = settings.get('import_from_path', None)
        if not remote_path:
            raise InputValidationError(
                'immigrant calculations need an input "settings" containing a key "import_from_path"!'
            )
        self.node.set_remote_workdir(remote_path)  # pylint: disable=protected-access
        remotedata = get_data_node('remote',
                                   computer=self.node.computer,
                                   remote_path=remote_path)
        remotedata.add_incoming(self.node,
                                link_type=LinkType.CREATE,
                                link_label='remote_folder')
        remotedata.store()

        return plumpy.Wait(msg='Waiting to retrieve', data=RETRIEVE_COMMAND)
Exemple #7
0
    def _prepare_input_files(self, folder):
        basis_dict = self._validate_basis_input(dict(self.inputs))
        params = self.inputs.parameters.get_dict()
        # check if both SPINLOCK and is_magnetic are present; in this case SPINLOCK takes precedence
        if 'spinlock' in params['scf'] and self.inputs.is_magnetic:
            self.logger.warning(
                'Both SPINLOCK and is_magnetic are present, dropping is_magnetic'
            )
            self.inputs.is_magnetic = False
        # create input files: d12, taking into account
        try:
            basis_dict['basis_family'].set_structure(self.inputs.structure)
            if "use_oxistates" in self.inputs:
                basis_dict['basis_family'].set_oxistates(
                    self.inputs.use_oxistates.get_dict())
                self.out('oxidation_states',
                         Dict(dict=self.inputs.use_oxistates.get_dict()))
            elif self.inputs.guess_oxistates:
                oxi_states = guess_oxistates(self.inputs.structure)
                basis_dict['basis_family'].set_oxistates(oxi_states)
                # save oxidation states for future reference
                self.out('oxidation_states', Dict(dict=oxi_states))

            if self.inputs.is_magnetic:
                try:
                    spinlock = guess_spinlock(self.inputs.structure)
                    params['scf']['spinlock'] = {}
                    params['scf']['spinlock']['SPINLOCK'] = [
                        spinlock, int(self.inputs.spinlock_steps)
                    ]
                    # adding SPIN keywords if they're not present
                    if 'single' in params['scf']:
                        params['scf']['single'] = 'UHF'
                    elif 'dft' in params['scf']:
                        params['scf']['dft']['SPIN'] = True
                except NotImplementedError:
                    self.logger.info(
                        "is_magnetic is True for non-magnetic structure")

            d12_file = D12(parameters=self.inputs.parameters.get_dict(),
                           basis=basis_dict['basis_family'])

        except (AttributeError, ValueError, NotImplementedError) as err:
            raise InputValidationError(
                "an input file could not be created from the parameters: {}".
                format(err))
        with open(
                folder.get_abs_path(
                    self.inputs.metadata.options.input_filename), 'w') as f:
            f.write(str(d12_file))

        # create input files: fort.34
        with open(folder.get_abs_path(self._GEOMETRY_FILE_NAME), 'w') as f:
            Fort34(basis=basis_dict['basis_family']).from_aiida(
                self.inputs.structure).write(f)
Exemple #8
0
def check_restart(builder, verbose=False):
    """
    Check the RemoteData reference by the builder is satisfied
    :returns: True if OK
    :raises: InputValidationError if error is found
    """
    import os
    from .utils import _lowercase_dict

    def _print(inp):
        if verbose:
            print(inp)

    paramdict = builder[INPUT_LINKNAMES['parameters']].get_dict()['PARAM']
    paramdict = _lowercase_dict(paramdict, "paramdict")
    stemp = paramdict.get("reuse", None)
    if not stemp:
        stemp = paramdict.get("continuation", None)
    if stemp is not None:
        fname = os.path.split(stemp)[-1]
        _print("This calculation requires a restart file: '{}'".format(fname))
    else:
        # No restart file needed
        _print("This calculation does not require a restart file.")
        return True

    # Now check if the remote folder has this file
    remote_data = builder.get(INPUT_LINKNAMES["parent_calc_folder"])
    if not remote_data:
        raise InputValidationError(
            "Restart requires "
            "parent_folder to be specified".format(fname))
    else:
        _print("Checking remote directory")
        folder_list = remote_data.listdir()
        if fname not in folder_list:
            raise InputValidationError(
                "Restart file {}"
                " is not in the remote folder".format(fname))
        else:
            _print("Check finished, restart file '{}' exists.".format(fname))
            return True
Exemple #9
0
 def _handle_system_section(self, system_dict, folder):
     """Handle framework(s) and/or box(es)."""
     for name, sparams in system_dict.items():
         if sparams["type"] == "Framework":
             try:
                 self.inputs.framework[name].export(
                     folder.get_abs_path(name + '.cif'), fileformat='cif')
             except KeyError:
                 raise InputValidationError(
                     "You specified '{}' framework in the input dictionary, but did not provide the input "
                     "framework with the same name".format(name))
Exemple #10
0
    def prepare_for_retrieval_and_parsing(self, open_transport):
        """
        Tell the daemon that the calculation is computed and ready to be parsed.

        :param open_transport: An open instance of the transport class of the
            calculation's computer. See the tutorial for more information.
        :type open_transport: aiida.transport.plugins.local.LocalTransport
            or aiida.transport.plugins.ssh.SshTransport

        The next time the daemon updates the status of calculations, it will
        see this job is in the 'COMPUTED' state and will retrieve its output
        files and parse the results.

        If the daemon is not currently running, nothing will happen until it is
        started again.

        This method also stores the calculation and all input nodes. It also
        copies the original input file to the calculation's repository folder.

        :raises aiida.common.exceptions.InputValidationError: if
            ``open_transport`` is a different type of transport than the
            computer's.
        :raises aiida.common.exceptions.InvalidOperation: if
            ``open_transport`` is not open.
        """

        # Check that the create_input_nodes method has run successfully.
        if not self.get_attr('input_nodes_created', False):
            raise InvalidOperation(
                'You must run the create_input_nodes method before calling '
                'prepare_for_retrieval_and_parsing!')

        # Check that open_transport is the correct transport type.
        if type(open_transport) is not self.get_computer().get_transport_class(
        ):
            raise InputValidationError(
                'The transport passed as the `open_transport` parameter is '
                'not the same transport type linked to the computer. Please '
                'obtain the correct transport class using the '
                "`get_transport_class` method of the calculation's computer. "
                'See the tutorial for more information.')

        # Check that open_transport is actually open.
        if not open_transport._is_open:
            raise InvalidOperation(
                'The transport passed as the `open_transport` parameter is '
                "not open. Please execute the open the transport using it's "
                '`open` method, or execute the call to this method within a '
                '`with` statement context guard. See the tutorial for more '
                'information.')

        # Prepare the calculation for retrieval
        self._prepare_for_retrieval(open_transport)
Exemple #11
0
    def prepare_for_submission(self, folder):

        in_dict = self.inputs[INPUT_LINKNAMES['parameters']].get_dict()

        # Check if task is correctly set
        all_tasks = [t.lower() for t in self._acceptable_tasks]
        if in_dict['PARAM']['task'].lower() not in all_tasks:
            raise InputValidationError("Wrong TASK value {}"
                                       " set in PARAM".format(
                                           in_dict['PARAM']['task'].lower()))
        return super(TaskSpecificCalculation,
                     self).prepare_for_submission(folder)
Exemple #12
0
    def get_structuredata(self):
        """Return a StructureData object based on the data in the input file.

        All of the names corresponding of the ``Kind`` objects composing the ``StructureData`` object will match those
        found in the ``ATOMIC_SPECIES`` block, so the pseudo potentials can be linked to the calculation using the kind
        name for each specific type of atom (in the event that you wish to use different pseudo's for two or more of the
        same atom).

        :return: structure data node of the structure defined in the input file.
        :rtype: :class:`~aiida.orm.nodes.data.structure.StructureData`
        """
        from aiida.orm.nodes.data.structure import StructureData, Kind, Site

        valid_elements_regex = re.compile(
            """
            (?P<ele>
    H  | He |
    Li | Be | B  | C  | N  | O  | F  | Ne |
    Na | Mg | Al | Si | P  | S  | Cl | Ar |
    K  | Ca | Sc | Ti | V  | Cr | Mn | Fe | Co | Ni | Cu | Zn | Ga | Ge | As | Se | Br | Kr |
    Rb | Sr | Y  | Zr | Nb | Mo | Tc | Ru | Rh | Pd | Ag | Cd | In | Sn | Sb | Te | I  | Xe |
    Cs | Ba | Hf | Ta | W  | Re | Os | Ir | Pt | Au | Hg | Tl | Pb | Bi | Po | At | Rn |
    Fr | Ra | Rf | Db | Sg | Bh | Hs | Mt |

    La | Ce | Pr | Nd | Pm | Sm | Eu | Gd | Tb | Dy | Ho | Er | Tm | Yb | Lu | # Lanthanides
    Ac | Th | Pa | U  | Np | Pu | Am | Cm | Bk | Cf | Es | Fm | Md | No | Lr | # Actinides
            )
            [^a-z]  # Any specification of an element is followed by some number
                    # or capital letter or special character.
        """, re.X | re.I)

        data = self.get_structure_from_qeinput()
        species = self.atomic_species

        structure = StructureData()
        structure.set_attribute('cell', data['cell'])

        for mass, name, pseudo in zip(species['masses'], species['names'],
                                      species['pseudo_file_names']):
            try:
                symbols = valid_elements_regex.search(pseudo).group(
                    'ele').capitalize()
            except Exception:
                raise InputValidationError(
                    'could not determine element name from pseudo name: {}'.
                    format(pseudo))
            structure.append_kind(Kind(name=name, symbols=symbols, mass=mass))

        for symbol, position in zip(data['atom_names'], data['positions']):
            structure.append_site(Site(kind_name=symbol, position=position))

        return structure
Exemple #13
0
    def _validate_basis_input(self, inputdict):
        """Input validation; returns the dict of validated data"""
        validated_dict = {}

        # basis family input
        basis_present = False
        validated_dict['basis_family'] = inputdict.pop('basis_family', None)
        if validated_dict['basis_family'] is not None:
            basis_present = True
            if not isinstance(validated_dict['basis_family'],
                              CrystalBasisFamilyData):
                raise InputValidationError(
                    "basis_family not of type CrystalBasisFamilyData: {}".
                    format(validated_dict['basis_family']))

        basis_inputs = [
            _ for _ in inputdict if _.startswith(self._BASIS_PREFIX)
        ]
        basis_dict = {}

        if (not basis_present) and (not basis_inputs):
            raise InputValidationError(
                'No basis sets specified for calculation!')
        for basis_name in basis_inputs:
            if basis_present:
                raise ValueError(
                    "Either basis or basis family (not both) must be present in calculation inputs"
                )

            _, symbol = basis_name.split('_')
            if symbol not in chemical_symbols:
                raise InputValidationError(
                    'Basis set provided for element not in periodic table: {}'.
                    format(symbol))
            basis = inputdict.pop(basis_name)
            basis_dict[symbol] = basis
            basis_present = True
        validated_dict['basis'] = basis_dict
        return validated_dict
Exemple #14
0
    def _prepare_pseudo_potentials(self):
        """
        Prepare the pseudopotential part of the cell file
        """

        # --------- PSEUDOPOTENTIALS --------
        # Check if we are using UPF pseudos
        # Now only support simple elemental pseudopotentials

        species_pot_map = {}
        pseudos = self.inputs.pseudos
        # Make kindname unique
        for kind in self.inputs[in_ln['structure']].kinds:
            symbols = kind.symbols
            # If the site has multiple symbols, add all of them to the list
            mixture = False
            if len(symbols) > 1:
                mixture = True
            for symbol in symbols:
                if symbol == kind.name:
                    pseudo_name = symbol
                else:
                    pseudo_name = symbol + ':' + kind.name

                if not mixture:
                    # Get the pseudopotential is defined by the kind.name
                    ps_node = pseudos[kind.name]
                else:
                    # If with mixture the pseudopotential is deined as '<kind_name>_<symbol>'
                    ps_node = pseudos[kind.name + '_' + symbol]

                # If we are using OTFG, just add the string property of it
                if isinstance(ps_node, OTFGData):
                    species_pot_map[pseudo_name] = "{:5} {}".format(
                        pseudo_name, ps_node.string)
                else:
                    # If we are dealing with file based pseudopotentials objects
                    # Add the specification to the file
                    try:
                        species_pot_map[pseudo_name] = "{:5} {}".format(
                            pseudo_name, ps_node.filename)
                        # Add to the copy list
                        self.local_copy_list_to_append.add(
                            (ps_node.uuid, ps_node.filename, ps_node.filename))
                    except Exception as error:
                        raise InputValidationError(
                            'Unknown node as pseudo: {}. Exception raised: {}'.
                            format(ps_node, error))

        # Ensure it is a list
        self.cell_file["SPECIES_POT"] = Block(list(species_pot_map.values()))
Exemple #15
0
    def _parsed_object(self):
        """
        Return an instance of parsevasp.incar.Incar.

        Corresponds to the stored data in inputs.parameters.

        """

        incar_dict = self._data_obj.get_dict()

        try:
            return Incar(incar_dict=incar_dict, logger=self._logger)
        except SystemExit as error:
            raise InputValidationError(error.args[0])
Exemple #16
0
def get_potcar_input(dir_path, structure=None, potential_family=None, potential_mapping=None):
    """Read potentials from a POTCAR file or set it up from a structure."""
    local_potcar = dir_path / 'POTCAR'
    structure = structure or get_poscar_input(dir_path)
    potentials = {}
    if local_potcar.exists():
        potentials = MultiPotcarIo.read(str(local_potcar)).get_potentials_dict(structure)
        potentials = {kind: potentials[kind] for kind in potentials}
    elif potential_family:
        potentials = PotcarData.get_potcars_from_structure(structure, potential_family, mapping=potential_mapping)
    else:
        raise InputValidationError('no POTCAR found in remote folder and potential_family was not passed')

    return potentials
def _kind_element_from_kind_section(section):
    """
    Get both kind and chemical symbol from a section, implementing
    the same auto-detection for chemical symbol/element from a KIND parameter
    as CP2K does.
    """
    try:
        kind = section["_"]
    except KeyError:
        raise InputValidationError(
            "No default parameter '_' found in KIND section.")

    try:
        element = section["ELEMENT"]
    except KeyError:
        # if there is no ELEMENT, CP2K automatically guesses it from the KIND, do the same
        match = ELEMENT_MATCH.match(kind)
        try:
            element = match["sym"]
        except TypeError:
            raise InputValidationError(
                f"Unable to figure out atomic symbol from KIND '{kind}'.")

    return kind, element
Exemple #18
0
def _format_parameter_values(parameters_dict):
    """
    Turn the values of the parameters dictionary into the appropriate string.
    """
    result_dict = {}
    for key, value in parameters_dict.items():
        key = key.lower()
        if key == 'exclude_bands':
            if len(set(value)) < len(value):
                raise InputValidationError(
                    "The 'exclude_bands' input contains duplicate entries.")
            result_dict[key] = list_to_grouped_string(value)
        else:
            result_dict[key] = conv_to_fortran_withlists(value,
                                                         quote_strings=False)
    return result_dict
Exemple #19
0
    def prepare_for_submission(self, folder):
        """
        This is the routine to be called when you want to create
        the input files and related stuff with a plugin.

          
        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        code = self.inputs.code  # an aiida Code
        parameters = self.inputs.parameters  # an aiida Dict

        input_dict = parameters.get_dict()  # a python dict
        if 'x1' not in input_dict or 'x2' not in input_dict:
            raise InputValidationError(
                'The input parameters node should contain both keys "x1" and "x2", '
                'but it doesn\'t.')

        ##############################
        # END OF INITIAL INPUT CHECK #
        ##############################

        input_filename = self.inputs.metadata.options.input_filename
        output_filename = self.inputs.metadata.options.output_filename

        # write all the input to a file
        with folder.open(input_filename, 'w') as infile:
            json.dump(input_dict, infile)

        # ============================ calcinfo ================================

        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [output_filename]
        calcinfo.retrieve_temporary_list = [[
            'path/hugefiles*[0-9].xml', '.', '1'
        ]]

        codeinfo = CodeInfo()
        codeinfo.cmdline_params = [input_filename, output_filename]
        codeinfo.code_uuid = code.uuid
        calcinfo.codes_info = [codeinfo]

        return calcinfo
def _uppercase_dict(d, dict_name):
    from collections import Counter

    if isinstance(d, dict):
        new_dict = dict((str(k).upper(), v) for k, v in six.iteritems(d))
        if len(new_dict) != len(d):

            num_items = Counter(str(k).upper() for k in d.keys())
            double_keys = ",".join([k for k, v in num_items if v > 1])
            raise InputValidationError(
                "Inside the dictionary '{}' there are the following keys that "
                "are repeated more than once when compared case-insensitively: "
                "{}."
                "This is not allowed.".format(dict_name, double_keys))
        return new_dict
    else:
        raise TypeError(
            "_lowercase_dict accepts only dictionaries as argument")
Exemple #21
0
    def prepare_for_submission(self, folder):
        """Create the input files from the input nodes passed to this instance of the `CalcJob`.

        :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
        :return: `aiida.common.datastructures.CalcInfo` instance
        """

        # symlinks
        calcinfo.remote_symlink_list = []
        calcinfo.remote_copy_list = []

        comp_uuid = self.inputs.parent_calc_folder.computer.uuid
        remote_path = self.inputs.parent_calc_folder.get_remote_path()
        copy_info = (comp_uuid, remote_path, self._DEFAULT_PARENT_CALC_FLDR_NAME)
        if self.inputs.code.computer.uuid == comp_uuid:  # if running on the same computer - make a symlink
            # if not - copy the folder
            calcinfo.remote_symlink_list.append(copy_info)
        else:
            calcinfo.remote_copy_list.append(copy_info)


        # create code info
        codeinfo = CodeInfo()
        codeinfo.cmdline_params = ["freeze", '-o', self._DEFAULT_OUTPUT_FILE]
        codeinfo.stdout_name = self._DEFAULT_OUTPUT_FILE
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo.uuid = self.uuid
        calcinfo.cmdline_params = codeinfo.cmdline_params
        calcinfo.codes_info = [codeinfo]
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        calcinfo.retrieve_list = [
            self._DEFAULT_OUTPUT_FILE,
        ]

        # check for left over settings
        if settings:
            raise InputValidationError("The following keys have been found " +
                                       "in the settings input node {}, ".format(self.pk) + "but were not understood: " +
                                       ",".join(settings.keys()))

        return calcinfo
Exemple #22
0
def _uppercase_dict(in_dict, dict_name):
    """
    Make sure the dictionary's keys are in upper case
    :param dict_name: A string of the name for the dictionary. Only used in
    warning message.
    """
    if isinstance(in_dict, dict):
        new_dict = dict(
            (str(key).upper(), value) for key, value in in_dict.items())
        if len(new_dict) != len(in_dict):
            num_items = Counter(str(key).upper() for key in in_dict.keys())
            double_keys = ",".join(
                [key for key, value in num_items if value > 1])
            raise InputValidationError(
                "Inside the dictionary '{}' there are the following keys that "
                "are repeated more than once when compared case-insensitively: {}."
                "This is not allowed.".format(dict_name, double_keys))
        return new_dict
    raise TypeError("_uppercase_dict accepts only dictionaries as argument")
    def prepare_for_submission(self, folder):
        """
        Create input files.

            :param folder: aiida.common.folders.Folder subclass where
                the plugin should put all its files.
        """
        # create input files: d3
        structure = self.inputs.get('structure', None)
        try:
            d3_content = D3(self.inputs.parameters.get_dict(), structure)
        except (ValueError, NotImplementedError) as err:
            raise InputValidationError(
                "an input file could not be created from the parameters: {}".
                format(err))
        with folder.open(self._INPUT_FILE_NAME, "w") as f:
            d3_content.write(f)

        # create input files: fort.9
        with self.inputs.wavefunction.open(mode="rb") as f:
            folder.create_file_from_filelike(f,
                                             self._WAVEFUNCTION_FILE_NAME,
                                             mode="wb")

        # Prepare CodeInfo object for aiida
        codeinfo = CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.stdin_name = self._INPUT_FILE_NAME
        codeinfo.stdout_name = self._OUTPUT_FILE_NAME
        codeinfo.withmpi = False

        # Prepare CalcInfo object for aiida
        calcinfo = CalcInfo()
        calcinfo.uuid = self.uuid
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = []
        calcinfo.remote_copy_list = []
        calcinfo.retrieve_list = [self._PROPERTIES_FILE_NAME]
        calcinfo.local_copy_list = []

        return calcinfo
Exemple #24
0
    def _create_additional_files(self, folder):
        self.logger.info("create_additional_files")

        self._internal_retrieve_list = [
            self._OUTPUT_TOTAL_DOS, self._OUTPUT_PROJECTED_DOS,
            self._OUTPUT_THERMAL_PROPERTIES, self._OUTPUT_BAND_STRUCTURE
        ]
        self._calculation_cmd = [['--pdos=auto'], ['-t', '--dos'],
                                 [
                                     '--band=auto', '--band-points=101',
                                     '--band-const-interval'
                                 ]]

        if 'force_sets' in self.inputs:
            force_sets = self.inputs.force_sets
        else:
            force_sets = None
        if 'displacement_dataset' in self.inputs.settings.attributes:
            displacement_dataset = self.inputs.settings['displacement_dataset']
        else:
            displacement_dataset = None

        if force_sets is not None and displacement_dataset is not None:
            force_sets_txt = get_FORCE_SETS_txt(force_sets,
                                                displacement_dataset)
            force_sets_filename = folder.get_abs_path(self._INPUT_FORCE_SETS)
            with open(force_sets_filename, 'w') as infile:
                infile.write(force_sets_txt)
            # First run with --writefc, and with --readfc for remaining runs
            self._additional_cmd_params = [
                ['--readfc'] for i in range(len(self._calculation_cmd) - 1)
            ]
            self._additional_cmd_params.insert(0, ['--writefc'])
            self._internal_retrieve_list.append(self._INOUT_FORCE_CONSTANTS)
        else:
            msg = ("no force_sets nor force_constants are specified for "
                   "this calculation")
            raise InputValidationError(msg)
Exemple #25
0
    def combine_dictlists(dict_list1, dict_list2):
        """
        Creates a list of every dict in dict_list1 updated with every
        dict in dict_list2
        """
        out_list = []
        # excpetion handling for the case of empty dicts
        dict_list1_empty = not any([bool(x) for x in dict_list1])
        dict_list2_empty = not any([bool(x) for x in dict_list2])
        if dict_list1_empty and dict_list2_empty:
            raise InputValidationError('One dict must not be empty')
        if dict_list1_empty:
            return dict_list2
        if dict_list2_empty:
            return dict_list2

        for dict_1 in dict_list1:
            for dict_2 in dict_list2:
                temp_1 = dict_1.copy()
                temp_2 = dict_2.copy()
                temp_1.update(temp_2)
                out_list.append(temp_1)
        return out_list
Exemple #26
0
    def _generate_input_files(cls, neb_parameters, settings_dict):
        """Generate the input data for the NEB part of the calculation."""
        # I put the first-level keys as uppercase (i.e., namelist and card names)
        # and the second-level keys as lowercase
        # (deeper levels are unchanged)
        input_params = _uppercase_dict(neb_parameters.get_dict(), dict_name='parameters')
        input_params = {k: _lowercase_dict(v, dict_name=k) for k, v in input_params.items()}

        # Force default values for blocked keywords. NOTE: this is different from PW/CP
        for blocked in cls._blocked_keywords:
            namelist = blocked[0].upper()
            key = blocked[1].lower()
            value = blocked[2]
            if namelist in input_params:
                if key in input_params[namelist]:
                    raise InputValidationError(
                        f"You cannot specify explicitly the '{key}' key in the '{namelist}' namelist."
                    )
            else:
                input_params[namelist] = {}
            input_params[namelist][key] = value

        # Create an empty dictionary for the compulsory namelist 'PATH' if not present
        if 'PATH' not in input_params:
            input_params['PATH'] = {}

        # In case of climbing image, we need the corresponding card
        ci_scheme = input_params['PATH'].get('ci_scheme', 'no-ci').lower()
        climbing_image_list = settings_dict.pop('CLIMBING_IMAGES', None)
        if ci_scheme == 'manual':
            manual_climbing_image = True
            if climbing_image_list is None:
                raise InputValidationError(
                    "'ci_scheme' is {}, but no climbing images were specified for this "
                    'calculation.'.format(ci_scheme)
                )
            if not isinstance(climbing_image_list, list):
                raise InputValidationError('Climbing images should be provided as a list.')
            num_of_images = input_params['PATH'].get('num_of_images', 2)
            if any([(i < 2 or i >= num_of_images) for i in climbing_image_list]):
                raise InputValidationError(
                    'The climbing images should be in the range between the first '
                    'and the last image (excluded).'
                )
            climbing_image_card = 'CLIMBING_IMAGES\n'
            climbing_image_card += ', '.join([str(_) for _ in climbing_image_list]) + '\n'
        else:
            manual_climbing_image = False
            if climbing_image_list is not None:
                raise InputValidationError(f"Climbing images are not accepted when 'ci_scheme' is {ci_scheme}.")

        input_data = '&PATH\n'
        # namelist content; set to {} if not present, so that we leave an empty namelist
        namelist = input_params.pop('PATH', {})
        for key, value in sorted(namelist.items()):
            input_data += convert_input_to_namelist_entry(key, value)
        input_data += '/\n'

        # Write CI cards now
        if manual_climbing_image:
            input_data += climbing_image_card

        if input_params:
            raise InputValidationError(
                'The following namelists are specified in input_params, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(input_params.keys())))
            )

        return input_data
Exemple #27
0
    def prepare_for_submission(self, folder):
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """
        # pylint: disable=too-many-branches,too-many-statements
        import numpy as np

        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        # Convert settings dictionary to have uppercase keys, or create an empty one if none was given.
        if 'settings' in self.inputs:
            settings_dict = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings_dict = {}

        first_structure = self.inputs.first_structure
        last_structure = self.inputs.last_structure

        # Check that the first and last image have the same cell
        if abs(np.array(first_structure.cell) - np.array(last_structure.cell)).max() > 1.e-4:
            raise InputValidationError('Different cell in the fist and last image')

        # Check that the first and last image have the same number of sites
        if len(first_structure.sites) != len(last_structure.sites):
            raise InputValidationError('Different number of sites in the fist and last image')

        # Check that sites in the initial and final structure have the same kinds
        if first_structure.get_site_kindnames() != last_structure.get_site_kindnames():
            raise InputValidationError(
                'Mismatch between the kind names and/or order between '
                'the first and final image'
            )

        # Check that a pseudo potential was specified for each kind present in the `StructureData`
        # self.inputs.pw.pseudos is a plumpy.utils.AttributesFrozendict
        kindnames = [kind.name for kind in first_structure.kinds]
        if set(kindnames) != set(self.inputs.pw.pseudos.keys()):
            raise InputValidationError(
                'Mismatch between the defined pseudos and the list of kinds of the structure.\nPseudos: {};\n'
                'Kinds: {}'.format(', '.join(list(self.inputs.pw.pseudos.keys())), ', '.join(list(kindnames)))
            )

        ##############################
        # END OF INITIAL INPUT CHECK #
        ##############################

        # Create the subfolder that will contain the pseudopotentials
        folder.get_subfolder(self._PSEUDO_SUBFOLDER, create=True)
        # Create the subfolder for the output data (sometimes Quantum ESPRESSO codes crash if the folder does not exist)
        folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

        # We first prepare the NEB-specific input file.
        neb_input_filecontent = self._generate_input_files(self.inputs.parameters, settings_dict)
        with folder.open(self.inputs.metadata.options.input_filename, 'w') as handle:
            handle.write(neb_input_filecontent)

        # We now generate the PW input files for each input structure
        local_copy_pseudo_list = []
        for i, structure in enumerate([first_structure, last_structure]):
            # We need to a pass a copy of the settings_dict for each structure
            this_settings_dict = copy.deepcopy(settings_dict)
            pw_input_filecontent, this_local_copy_pseudo_list = PwCalculation._generate_PWCPinputdata(  # pylint: disable=protected-access
                self.inputs.pw.parameters, this_settings_dict, self.inputs.pw.pseudos, structure, self.inputs.pw.kpoints
            )
            local_copy_pseudo_list += this_local_copy_pseudo_list
            with folder.open(f'pw_{i + 1}.in', 'w') as handle:
                handle.write(pw_input_filecontent)

        # We need to pop the settings that were used in the PW calculations
        for key in list(settings_dict.keys()):
            if key not in list(this_settings_dict.keys()):
                settings_dict.pop(key)

        # We avoid to copy twice the same pseudopotential to the same filename
        local_copy_pseudo_list = set(local_copy_pseudo_list)
        # We check that two different pseudopotentials are not copied
        # with the same name (otherwise the first is overwritten)
        if len({filename for (uuid, filename, local_path) in local_copy_pseudo_list}) < len(local_copy_pseudo_list):
            raise InputValidationError('Same filename for two different pseudopotentials')

        local_copy_list += local_copy_pseudo_list

        # If present, add also the Van der Waals table to the pseudo dir. Note that the name of the table is not checked
        # but should be the one expected by Quantum ESPRESSO.
        vdw_table = self.inputs.get('pw.vdw_table', None)
        if vdw_table:
            local_copy_list.append(
                (vdw_table.uuid, vdw_table.filename, os.path.join(self._PSEUDO_SUBFOLDER, vdw_table.filename))
            )

        # operations for restart
        parent_calc_folder = self.inputs.get('parent_folder', None)
        symlink = settings_dict.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage)  # a boolean
        if symlink:
            if parent_calc_folder is not None:
                # I put the symlink to the old parent ./out folder
                remote_symlink_list.append((
                    parent_calc_folder.computer.uuid,
                    os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER,
                                 '*'),  # asterisk: make individual symlinks for each file
                    self._OUTPUT_SUBFOLDER
                ))
                # and to the old parent prefix.path
                remote_symlink_list.append((
                    parent_calc_folder.computer.uuid,
                    os.path.join(parent_calc_folder.get_remote_path(), f'{self._PREFIX}.path'), f'{self._PREFIX}.path'
                ))
        else:
            # copy remote output dir and .path file, if specified
            if parent_calc_folder is not None:
                remote_copy_list.append((
                    parent_calc_folder.computer.uuid,
                    os.path.join(parent_calc_folder.get_remote_path(), self._OUTPUT_SUBFOLDER,
                                 '*'), self._OUTPUT_SUBFOLDER
                ))
                # and copy the old parent prefix.path
                remote_copy_list.append((
                    parent_calc_folder.computer.uuid,
                    os.path.join(parent_calc_folder.get_remote_path(), f'{self._PREFIX}.path'), f'{self._PREFIX}.path'
                ))

        # here we may create an aiida.EXIT file
        create_exit_file = settings_dict.pop('ONLY_INITIALIZATION', False)
        if create_exit_file:
            exit_filename = f'{self._PREFIX}.EXIT'
            with folder.open(exit_filename, 'w') as handle:
                handle.write('\n')

        calcinfo = CalcInfo()
        codeinfo = CodeInfo()

        calcinfo.uuid = self.uuid
        cmdline_params = settings_dict.pop('CMDLINE', [])
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list
        # In neb calculations there is no input read from standard input!!
        codeinfo.cmdline_params = (['-input_images', '2'] + list(cmdline_params))
        codeinfo.stdout_name = self.inputs.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid
        calcinfo.codes_info = [codeinfo]

        # Retrieve the output files and the xml files
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.inputs.metadata.options.output_filename)
        calcinfo.retrieve_list.append((
            os.path.join(self._OUTPUT_SUBFOLDER, self._PREFIX + '_*[0-9]', 'PW.out'),  # source relative path (globbing)
            '.',  # destination relative path
            2  # depth to preserve
        ))

        for xml_filepath in self.xml_filepaths:  # pylint: disable=not-an-iterable
            calcinfo.retrieve_list.append([xml_filepath, '.', 3])

        calcinfo.retrieve_list += settings_dict.pop('ADDITIONAL_RETRIEVE_LIST', [])
        calcinfo.retrieve_list += self._internal_retrieve_list

        # We might still have parser options in the settings dictionary: pop them.
        _pop_parser_options(self, settings_dict)

        if settings_dict:
            unknown_keys = ', '.join(list(settings_dict.keys()))
            raise InputValidationError(f'`settings` contained unexpected keys: {unknown_keys}')

        return calcinfo
def validate_basissets(inp, basissets, structure):
    """
    Verify that all referenced basissets are present in the input.
    Currently supports 2 modes: either all of the basisssets are explicitly
    listed in a KIND section, or none of them are, at which point they're
    verified against the symbols in the structure.
    """
    # pylint: disable=too-many-branches

    basisset_used = {_identifier(bset): 0 for _, bset in _unpack(basissets)}
    basisset_kw_used = False

    for secpath, section in inp.param_iter(sections=True):
        # ignore non-kind sections
        if secpath[-1].upper() != "KIND":
            continue

        if "BASIS_SET" not in section:
            # ignore kind sections without a BASIS_SET keyword
            continue

        basisset_kw_used = True

        kind = section["_"]
        element = section.get("ELEMENT", kind)

        # the BASIS_SET keyword can be repeated, even for the same type
        bsnames = section["BASIS_SET"]

        # the keyword BASIS_SET can occur multiple times in which case
        # the specified basis sets are merged (given they match the same type)
        if isinstance(bsnames, str):
            bsnames = [bsnames]

        for bsname in bsnames:
            # test for new-style basis set specification
            try:
                bstype, bsname = bsname.split(maxsplit=1)
            except ValueError:
                bstype = "ORB"

            try:
                basisset_used[(element, bsname)] += 1
            except KeyError:
                raise InputValidationError(f"'BASIS_SET {bstype} {bsname}' for element {element} (from kind {kind})"
                                           " not found in basissets input namespace")

    if basisset_kw_used:
        for (sym, name), used in basisset_used.items():
            if not used:
                raise InputValidationError(f"Basis sets provided in calculation for kind {sym} ({name}),"
                                           " but not used in input")
        # if all basissets are referenced in the input, we're done
        return

    if not structure:  # no support for COORD section (yet)
        raise InputValidationError("No explicit structure given and basis sets not referenced in input")

    if isinstance(inp["FORCE_EVAL"], Sequence):
        raise InputValidationError(
            "Automated BASIS_SET keyword creation is not yet supported with multiple FORCE_EVALs."
            " Please explicitly reference a BASIS_SET for each KIND.")

    allowed_labels = structure.get_kind_names() + list(structure.get_symbols_set())

    for label, bset in _unpack(basissets):
        try:
            label, bstype = label.split("_", maxsplit=1)
        except ValueError:
            bstype = "ORB"

        if label not in allowed_labels:
            raise InputValidationError(f"Basis sets provided in calculation for kind {bset.element} ({bset.name}),"
                                       f" with label {label} could not be matched to a kind in the structure")

        if "SUBSYS" not in inp["FORCE_EVAL"]:
            inp["FORCE_EVAL"]["SUBSYS"] = {}

        if "KIND" not in inp["FORCE_EVAL"]["SUBSYS"]:
            inp["FORCE_EVAL"]["SUBSYS"]["KIND"] = []

        kind_sec = next((s for s in inp["FORCE_EVAL"]["SUBSYS"]["KIND"] if s.get("_", "") == label), None)

        if not kind_sec:
            inp["FORCE_EVAL"]["SUBSYS"]["KIND"].append({"_": label})
            kind_sec = inp["FORCE_EVAL"]["SUBSYS"]["KIND"][-1]

        kind_sec["BASIS_SET"] = f"{bstype} {bset.name}"
        if "ELEMENT" not in kind_sec:
            kind_sec["ELEMENT"] = bset.element
def validate_pseudos(inp, pseudos, structure):
    """Verify that all referenced pseudos are present in the input"""

    # pylint: disable=too-many-branches

    # there can be only one pseudo per kind, thus, no _unpack
    pseudo_used = {_identifier(pseudo): 0 for _, pseudo in pseudos.items()}
    pseudo_kw_used = False

    for secpath, section in inp.param_iter(sections=True):
        # ignore non-kind sections
        if secpath[-1].upper() != "KIND":
            continue

        kind = section["_"]
        element = section.get("ELEMENT", kind)

        pname = section.get("POTENTIAL", section.get("POT"))

        if pname is None:
            # ignore kind sections without a POTENTIAL keyword (or POT alias)
            continue

        try:
            ptype, pname = pname.split(maxsplit=1)
        except ValueError:
            ptype = "GTH"

        pseudo_kw_used = True

        try:
            pseudo_used[(element, pname)] += 1
        except KeyError:
            raise InputValidationError(f"'POTENTIAL {ptype} {pname}' for element {element} (from kind {kind})"
                                       " not found in pseudos input namespace")

    if pseudo_kw_used:
        for (sym, name), used in pseudo_used.items():
            if not used:
                raise InputValidationError(f"Pseudos provided in calculation for kind {sym} ({name}),"
                                           " but not used in input")
        return

    if not structure:  # no support for COORD section (yet)
        raise InputValidationError("No explicit structure given and pseudos not referenced in input")

    if isinstance(inp["FORCE_EVAL"], Sequence):
        raise InputValidationError(
            "Automated POTENTIAL keyword creation is not yet supported with multiple FORCE_EVALs."
            " Please explicitly reference a POTENTIAL for each KIND.")

    allowed_labels = structure.get_kind_names() + list(structure.get_symbols_set())

    for label, pseudo in pseudos.items():
        if label not in allowed_labels:
            raise InputValidationError(f"Pseudo provided in calculation for kind {pseudo.element} ({pseudo.name}),"
                                       f" with label {label} could not be matched to a kind in the structure")

        if "SUBSYS" not in inp["FORCE_EVAL"]:
            inp["FORCE_EVAL"]["SUBSYS"] = {}

        if "KIND" not in inp["FORCE_EVAL"]["SUBSYS"]:
            inp["FORCE_EVAL"]["SUBSYS"]["KIND"] = []

        kind_sec = next((s for s in inp["FORCE_EVAL"]["SUBSYS"]["KIND"] if s.get("_", "") == label), None)

        if not kind_sec:
            inp["FORCE_EVAL"]["SUBSYS"]["KIND"].append({"_": label})
            kind_sec = inp["FORCE_EVAL"]["SUBSYS"]["KIND"][-1]

        kind_sec["POTENTIAL"] = f"GTH {pseudo.name}"

        if "ELEMENT" not in kind_sec:
            kind_sec["ELEMENT"] = pseudo.element
def _get_vasp_builder(structure, settings_dict, pressure=0.0, label=None):
    """
    Generate the input paramemeters needed to run a calculation for VASP

    :param structure:  StructureData object containing the crystal structure
    :param settings:  dict object containing a dictionary with the
        INCAR parameters
    :return: Calculation process object, input dictionary
    """

    code_string = settings_dict['code_string']
    VaspWorkflow = WorkflowFactory('vasp.vasp')
    builder = VaspWorkflow.get_builder()
    if label:
        builder.metadata.label = label
    builder.code = Code.get_from_string(code_string)
    builder.structure = structure
    options = Dict(dict=settings_dict['options'])
    builder.options = options
    builder.clean_workdir = Bool(False)

    if 'parser_settings' in settings_dict:
        parser_settings_dict = settings_dict['parser_settings']
    else:
        parser_settings_dict = {}
    if 'add_forces' not in parser_settings_dict:
        parser_settings_dict.update({'add_forces': True})

    builder.settings = DataFactory('dict')(
        dict={
            'parser_settings': parser_settings_dict
        })

    incar = dict(settings_dict['parameters'])
    keys_lower = [key.lower() for key in incar]
    if 'ediff' not in keys_lower:
        incar.update({'EDIFF': 1.0E-8})
    builder.parameters = Dict(dict=incar)
    builder.potential_family = Str(settings_dict['potential_family'])
    builder.potential_mapping = Dict(dict=settings_dict['potential_mapping'])

    kpoints = KpointsData()
    kpoints.set_cell_from_structure(structure)
    if 'kpoints_density' in settings_dict:
        kpoints.set_kpoints_mesh_from_density(settings_dict['kpoints_density'])
    elif 'kpoints_mesh' in settings_dict:
        if 'kpoints_offset' in settings_dict:
            kpoints_offset = settings_dict['kpoints_offset']
        else:
            kpoints_offset = [0.0, 0.0, 0.0]

        kpoints.set_kpoints_mesh(settings_dict['kpoints_mesh'],
                                 offset=kpoints_offset)
    else:
        raise InputValidationError(
            'no kpoint definition in input. '
            'Define either kpoints_density or kpoints_mesh')

    builder.kpoints = kpoints

    return builder