Exemple #1
0
    def test_znucl_typat(self):
        """Test the order of typat and znucl in the Abinit input and enforce_typat, enforce_znucl."""

        # Ga  Ga1  1  0.33333333333333  0.666666666666667  0.500880  1.0
        # Ga  Ga2  1  0.66666666666667  0.333333333333333  0.000880  1.0
        # N  N3  1  0.333333333333333  0.666666666666667  0.124120  1.0
        # N  N4  1  0.666666666666667  0.333333333333333  0.624120  1.0
        gan = Structure.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "abinit", "gan.cif"))

        # By default, znucl is filled using the first new type found in sites.
        def_vars = structure_to_abivars(gan)
        def_znucl = def_vars["znucl"]
        self.assertArrayEqual(def_znucl, [31, 7])
        def_typat = def_vars["typat"]
        self.assertArrayEqual(def_typat, [1, 1, 2, 2])

        # But it's possible to enforce a particular value of typat and znucl.
        enforce_znucl = [7, 31]
        enforce_typat = [2, 2, 1, 1]
        enf_vars = structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=enforce_typat)
        self.assertArrayEqual(enf_vars["znucl"], enforce_znucl)
        self.assertArrayEqual(enf_vars["typat"], enforce_typat)
        self.assertArrayEqual(def_vars["xred"], enf_vars["xred"])

        assert [s.symbol for s in species_by_znucl(gan)] == ["Ga", "N"]

        for itype1, itype2 in zip(def_typat, enforce_typat):
            assert def_znucl[itype1 - 1] == enforce_znucl[itype2 - 1]

        with self.assertRaises(Exception):
            structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=None)
Exemple #2
0
    def to_string(self,
                  post=None,
                  with_structure=True,
                  with_pseudos=True,
                  exclude=None):
        r"""
        String representation.

        Args:
            post: String that will be appended to the name of the variables
                Note that post is usually autodetected when we have multiple datatasets
                It is mainly used when we have an input file with a single dataset
                so that we can prevent the code from adding "1" to the name of the variables
                (In this case, indeed, Abinit complains if ndtset=1 is not specified
                and we don't want ndtset=1 simply because the code will start to add
                _DS1_ to all the input and output files.
            with_structure: False if section with structure variables should not be printed.
            with_pseudos: False if JSON section with pseudo data should not be added.
            exclude: List of variable names that should be ignored.
        """
        lines = []
        app = lines.append

        if self.comment:
            app("# " + self.comment.replace("\n", "\n#"))

        post = post if post is not None else ""
        exclude = set(exclude) if exclude is not None else set()

        # Default is no sorting else alphabetical order.
        keys = sorted(k for k, v in self.items()
                      if k not in exclude and v is not None)

        # Extract the items from the dict and add the geo variables at the end
        items = [(k, self[k]) for k in keys]
        if with_structure:
            items.extend(
                list(aobj.structure_to_abivars(self.structure).items()))

        for name, value in items:
            # Build variable, convert to string and append it
            vname = name + post
            app(str(InputVariable(vname, value)))

        s = "\n".join(lines)
        if not with_pseudos:
            return s

        # Add JSON section with pseudo potentials.
        ppinfo = ["\n\n\n#<JSON>"]
        d = {"pseudos": [p.as_dict() for p in self.pseudos]}
        ppinfo.extend(json.dumps(d, indent=4).splitlines())
        ppinfo.append("</JSON>")

        s += "\n#".join(ppinfo)
        return s
Exemple #3
0
    def _generate_inputdata(self,
                            parameters: orm.Dict,
                            pseudos,
                            structure: orm.StructureData,
                            kpoints: orm.KpointsData) -> ty.Tuple[str, list]:
        """Generate the input file content and list of pseudopotential files to copy.

        :param parameters: input parameters Dict
        :param pseudos: pseudopotential input namespace
        :param structure: input structure
        :param kpoints: input kpoints
        :returns: input file content, pseudopotential copy list
        """
        local_copy_pseudo_list = []

        # abipy has its own subclass of Pymatgen's `Structure`, so we use that
        pmg_structure = structure.get_pymatgen()
        abi_structure = AbiStructure.as_structure(pmg_structure)
        abi_structure = abi_structure.abi_sanitize(primitive=True)

        for kind in structure.get_kind_names():
            pseudo = pseudos[kind]
            local_copy_pseudo_list.append((pseudo.uuid, pseudo.filename, f'{self._PSEUDO_SUBFOLDER}{pseudo.filename}'))
        # Pseudopotentials _must_ be listed in the same order as 'znucl' in the input file.
        # So, we need to get 'znucl' as abipy will write it then construct the appropriate 'pseudos' string.
        znucl = structure_to_abivars(abi_structure)['znucl']
        ordered_pseudo_filenames = [pseudos[constants.elements[Z]['symbol']].filename for Z in znucl]
        pseudo_parameters = {
            'pseudos': '"' + ', '.join(ordered_pseudo_filenames) + '"',
            'pp_dirpath': f'"{self._PSEUDO_SUBFOLDER}"'
        }

        input_parameters = parameters.get_dict()
        # k-points are provided to abipy separately from the main input parameters, so we pop out
        # parameters related to the k-points
        shiftk = input_parameters.pop('shiftk', [0.0, 0.0, 0.0])
        # NOTE: currently, only k-point mesh are supported, not k-point paths
        kpoints_mesh = kpoints.get_kpoints_mesh()[0]

        # use abipy to write the input file
        input_parameters = {**input_parameters, **pseudo_parameters}
        # give abipy the HGH_TABLE only so it won't error, but don't actually print these to file
        abi_input = AbinitInput(
            structure=abi_structure,
            pseudos=HGH_TABLE,
            abi_kwargs=input_parameters
        )
        abi_input.set_kmesh(
            ngkpt=kpoints_mesh,
            shiftk=shiftk
        )

        return abi_input.to_string(with_pseudos=False), local_copy_pseudo_list
Exemple #4
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """

        # Add the used defined input parameters to the input file
        inp = self.inputs.parameters.get_dict()

        # Add the structure related variables to the dictionary
        if 'structure' in self.inputs:
            inp.update(structure_to_abivars(self.inputs.structure.get_pymatgen()))

        lines = []
        for inp_var, value in inp.items():
            lines.append(str(InputVariable(inp_var, value)))

        with io.open(folder.get_abs_path(self._DEFAULT_INPUT_FILE), mode="w", encoding="utf-8") as fobj:
            fobj.writelines(lines)

        # Create code info
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        # codeinfo.stdin_name = self.options.input_filename
        # This gives the path to the input file to Abinit rather than passing the input from standard input
        #codeinfo.cmdline_params = ['<', self.options.input_filename]
        codeinfo.cmdline_params = [self.options.input_filename]
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        # Prepare a `CalcInfo` to be returned to the engine
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.stdin_name = self.options.input_filename
        calcinfo.stdout_name = self.options.output_filename       
        calcinfo.retrieve_list = [self.metadata.options.output_filename]
        calcinfo.retrieve_list = [self._DEFAULT_OUTPUT_FILE, self._DEFAULT_GSR_FILE_NAME]
        #calcinfo.retrieve_list += settings.pop('additional_retrieve_list', [])

        return calcinfo
Exemple #5
0
    def to_string(self, with_pseudos=True):
        """
        String representation i.e. the input file read by Abinit.

        Args:
            with_pseudos: False if JSON section with pseudo data should not be added.
        """
        if self.ndtset > 1:
            # Multi dataset mode.
            lines = [f"ndtset {int(self.ndtset)}"]

            def has_same_variable(kref, vref, other_inp):
                """True if variable kref is present in other_inp with the same value."""
                if kref not in other_inp:
                    return False
                otherv = other_inp[kref]
                return np.array_equal(vref, otherv)

            # Don't repeat variable that are common to the different datasets.
            # Put them in the `Global Variables` section and exclude these variables in inp.to_string
            global_vars = set()
            for k0, v0 in self[0].items():
                isame = True
                for i in range(1, self.ndtset):
                    isame = has_same_variable(k0, v0, self[i])
                    if not isame:
                        break
                if isame:
                    global_vars.add(k0)
            # print("global_vars vars", global_vars)

            w = 92
            if global_vars:
                lines.append(w * "#")
                lines.append("### Global Variables.")
                lines.append(w * "#")
                for key in global_vars:
                    vname = key
                    lines.append(str(InputVariable(vname, self[0][key])))

            has_same_structures = self.has_same_structures
            if has_same_structures:
                # Write structure here and disable structure output in input.to_string
                lines.append(w * "#")
                lines.append("#" + ("STRUCTURE").center(w - 1))
                lines.append(w * "#")
                for key, value in aobj.structure_to_abivars(
                        self[0].structure).items():
                    vname = key
                    lines.append(str(InputVariable(vname, value)))

            for i, inp in enumerate(self):
                header = f"### DATASET {i + 1} ###"
                is_last = i == self.ndtset - 1
                s = inp.to_string(
                    post=str(i + 1),
                    with_pseudos=is_last and with_pseudos,
                    with_structure=not has_same_structures,
                    exclude=global_vars,
                )
                if s:
                    header = len(header) * "#" + "\n" + header + "\n" + len(
                        header) * "#" + "\n"
                    s = "\n" + header + s + "\n"

                lines.append(s)

            return "\n".join(lines)

        # single datasets ==> don't append the dataset index to the variables.
        # this trick is needed because Abinit complains if ndtset is not specified
        # and we have variables that end with the dataset index e.g. acell1
        # We don't want to specify ndtset here since abinit will start to add DS# to
        # the input and output files thus complicating the algorithms we have to use to locate the files.
        return self[0].to_string(with_pseudos=with_pseudos)
Exemple #6
0
    def prepare_for_submission(self, folder):
        """
        Create input files.

        :param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files needed by
            the calculation.
        :return: `aiida.common.datastructures.CalcInfo` instance
        """
        ### SETUP ###
        local_copy_list = []

        ### INPUT CHECK ###
        # PSEUDOS
        for kind in self.inputs.structure.get_kind_names():
            if kind not in self.inputs.pseudos:
                raise ValueError(f'no pseudo available for element {kind}')
            elif not isinstance(self.inputs.pseudos[kind], (Psp8Data, JthXmlData)):
                raise ValueError(f'pseudo for element {kind} is not of type Psp8Data or JthXmlData')

        # KPOINTS
        if 'ngkpt' in self.inputs.parameters.keys():
            raise ValueError('`ngkpt` should not be specified in input parameters')
        if 'kptopt' in self.inputs.parameters.keys():
            raise ValueError('`kptopt` should not be specified in input parameters')

        ### PREPARATION ###
        # PSEUDOS
        folder.get_subfolder(self._DEFAULT_PSEUDO_SUBFOLDER, create=True)
        for kind in self.inputs.structure.get_kind_names():
            psp = self.inputs.pseudos[kind]
            local_copy_list.append((psp.uuid, psp.filename, self._DEFAULT_PSEUDO_SUBFOLDER + kind + '.psp8'))

        # KPOINTS
        kpoints_mesh = self.inputs.kpoints.get_kpoints_mesh()[0]

        ### INPUTS ###
        input_parameters = self.inputs.parameters.get_dict()
        shiftk = input_parameters.pop('shiftk', [0.0, 0.0, 0.0])

        # TODO: There must be a better way to do this
        # maybe we can convert the PseudoPotential objects into pymatgen Pseudo objects?
        znucl = structure_to_abivars(self.inputs.structure.get_pymatgen())['znucl']
        pseudo_parameters = {
            'pseudos': '"' + ', '.join([Element.from_Z(Z).symbol + '.psp8' for Z in znucl]) + '"',
            'pp_dirpath': '"' + self._DEFAULT_PSEUDO_SUBFOLDER + '"'
        }

        input_parameters = {**input_parameters, **pseudo_parameters}

        abin = AbinitInput(
            structure=self.inputs.structure.get_pymatgen(),
            pseudos=HGH_TABLE,
            abi_kwargs=input_parameters
        )
        abin.set_kmesh(
            ngkpt=kpoints_mesh,
            shiftk=shiftk
        )

        with io.open(folder.get_abs_path(self._DEFAULT_INPUT_FILE), mode='w', encoding='utf-8') as f:
            f.write(abin.to_string(with_pseudos=False))

        ### CODE ###
        codeinfo = datastructures.CodeInfo()
        codeinfo.code_uuid = self.inputs.code.uuid
        codeinfo.cmdline_params = [self.options.input_filename]
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.withmpi = self.inputs.metadata.options.withmpi

        ### CALC INFO ###
        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.stdin_name = self.options.input_filename
        calcinfo.stdout_name = self.options.output_filename
        calcinfo.retrieve_list = [self.metadata.options.output_filename]
        calcinfo.retrieve_list = [self._DEFAULT_OUTPUT_FILE, self._DEFAULT_GSR_FILE_NAME, self._DEFAULT_TRAJECT_FILE_NAME]
        calcinfo.remote_symlink_list = []
        calcinfo.remote_copy_list = []
        calcinfo.local_copy_list = local_copy_list
        if 'parent_calc_folder' in self.inputs:
            comp_uuid = self.inputs.parent_calc_folder.computer.uuid
            remote_path = self.inputs.parent_calc_folder.get_remote_path()
            copy_info = (comp_uuid, remote_path, self._DEFAULT_PARENT_CALC_FLDR_NAME)
            # If running on the same computer - make a symlink.
            if self.inputs.code.computer.uuid == comp_uuid:
                calcinfo.remote_symlink_list.append(copy_info)
            # If not - copy the folder.
            else:
                calcinfo.remote_copy_list.append(copy_info)

        return calcinfo