Esempio n. 1
0
    def test_pawxml_pseudos(self):
        """Test O.GGA_PBE-JTH-paw.xml."""
        oxygen = Pseudo.from_file(ref_file("O.GGA_PBE-JTH-paw.xml"))
        assert repr(oxygen)
        assert str(oxygen)
        assert isinstance(oxygen.as_dict(), dict)

        self.assertTrue(oxygen.ispaw)
        self.assertTrue(
            oxygen.symbol == "O"
            and (oxygen.Z, oxygen.core, oxygen.valence) == (8, 2, 6),
            oxygen.Z_val == 6,
        )

        assert oxygen.xc.type == "GGA" and oxygen.xc.name == "PBE"
        assert oxygen.supports_soc
        assert oxygen.md5 is not None
        self.assertAlmostEqual(oxygen.paw_radius, 1.4146523028)

        # Test pickle
        new_objs = self.serialize_with_pickle(oxygen, test_eq=False)
        # Test MSONable
        self.assertMSONable(oxygen)

        for o in new_objs:
            self.assertTrue(o.ispaw)
            self.assertTrue(
                o.symbol == "O" and (o.Z, o.core, o.valence) == (8, 2, 6),
                o.Z_val == 6,
            )

            self.assertAlmostEqual(o.paw_radius, 1.4146523028)
Esempio n. 2
0
 def from_dict(cls, d):
     """
     JSON interface used in pymatgen for easier serialization.
     """
     pseudos = [Pseudo.from_file(p["filepath"]) for p in d["pseudos"]]
     return cls(d["structure"],
                pseudos,
                comment=d["comment"],
                abi_args=d["abi_args"])
Esempio n. 3
0
def aiida_psp8_to_abipy_pseudo(aiida_pseudo: Psp8Data,
                               pseudo_dir: str = '') -> Pseudo:
    """Convert an aiida-pseudo Psp8Data into a pymatgen/abipy Pseudo"""
    with tempfile.NamedTemporaryFile('w', encoding='utf-8') as f:
        f.write(aiida_pseudo.get_content())
        abinit_pseudo = Pseudo.from_file(f.name)
        f.close()
    abinit_pseudo.path = pseudo_dir + aiida_pseudo.attributes['filename']
    return abinit_pseudo
Esempio n. 4
0
    def __init__(self, ps_name):
        """

        """
        self.param = {}
        self.df_data = {}
        self.results = {}
        self.etotal_data = {}
        self.df_extra = np.inf
        self.ps_name = ps_name
        self.pseudo = Pseudo.from_file(ps_name+'.psp8')
Esempio n. 5
0
    def from_dict(cls, d):
        pseudos = []
        for p in d['pseudos']:
            pseudos.append(Pseudo.from_file(p['filepath']))

        dtsets = d['datasets']
        abiinput = cls(pseudos, ndtset=dtsets[0]['ndtset'], decorators=d["decorators"])

        for n, ds in enumerate(dtsets):
            abiinput.set_vars(dtset=n, **ds)

        return abiinput
Esempio n. 6
0
    def set_pseudos_vars(self, pseudos_path):
        # this should be compatible with both version prior and after 0.3 of the pseudo dojo
        pseudos_name = []
        pseudos_md5 = []
        for path in pseudos_path:
            pseudo = Pseudo.from_file(path)
            pseudos_name.append(pseudo.basename)
            pseudos_md5.append(pseudo.md5)

        self.pseudos_name = pseudos_name
        self.pseudos_md5 = pseudos_md5
        self.pseudos_path = pseudos_path
Esempio n. 7
0
    def from_dict(cls, d):
        pseudos = []
        for p in d['pseudos']:
            pseudos.append(Pseudo.from_file(p['filepath']))

        dtsets = d['datasets']
        abiinput = cls(pseudos,
                       ndtset=dtsets[0]['ndtset'],
                       decorators=d["decorators"])

        for n, ds in enumerate(dtsets):
            abiinput.set_vars(dtset=n, **ds)

        return abiinput
Esempio n. 8
0
def get_pseudos(top):
    """
    Find pseudos within top, return :class:`PseudoTable` object sorted by atomic number Z.
    """
    from monty.os.path import find_exts
    from pymatgen.io.abinit.pseudos import PseudoTable, Pseudo
    exts = ("psp8",)
    pseudos = []
    for p in find_exts(top, exts, exclude_dirs="_*"):
        try:
            pseudos.append(Pseudo.from_file(p))
        except Exception as exc:
            from warnings import warn
            warn("Exception in pseudo %s:\n%s" % (p.filepath, exc))

    return PseudoTable(pseudos).sort_by_z()
Esempio n. 9
0
def get_pseudos(top):
    """
    Find pseudos within top, return :class:`PseudoTable` object sorted by atomic number Z.
    """
    from monty.os.path import find_exts
    from pymatgen.io.abinit.pseudos import PseudoTable, Pseudo
    exts = ("psp8",)
    pseudos = []
    for p in find_exts(top, exts, exclude_dirs="_*"):
        try:
            pseudos.append(Pseudo.from_file(p))
        except Exception as exc:
            from warnings import warn
            warn("Exception in pseudo %s:\n%s" % (p.filepath, exc))

    return PseudoTable(pseudos).sort_by_z()
Esempio n. 10
0
def compare_pseudos(filepaths, ecut=30):
    """
    This function receives a list of pseudopotential files, call
    Abinit to produced the PSPS.nc files and produces matplotlib plots
    comparing the behaviour of the pseudos in real and in reciprocal space.

    Args:
        filepaths: List of file names.
        ecut: Cutoff energy in Ha for the wavefunctions.
    """
    pseudos = [Pseudo.from_file(path) for path in filepaths]

    psps_files = [p.open_pspsfile(ecut=ecut) for p in pseudos]

    p0 = psps_files[0]
    p0.compare(psps_files[1:])

    for pfile in psps_files:
        pfile.close()
Esempio n. 11
0
def compare_pseudos(filepaths, ecut=30):
    """
    This function receives a list of pseudopotential files, call
    Abinit to produced the PSPS.nc files and produces matplotlib plots
    comparing the behaviour of the pseudos in real and in reciprocal space.

    Args:
        filepaths: List of file names.
        ecut: Cutoff energy in Ha for the wavefunctions.
    """
    pseudos = [Pseudo.from_file(path) for path in filepaths]

    psps_files = [p.open_pspsfile(ecut=ecut) for p in pseudos]

    p0 = psps_files[0]
    p0.compare(psps_files[1:])

    for pfile in psps_files:
        pfile.close()
Esempio n. 12
0
    def test_oncvpsp_pseudo_fr(self):
        """
        Test the ONCVPSP Pb pseudo (relativistic version with SO).
        """
        pb = Pseudo.from_file(ref_file("Pb-d-3_r.psp8"))
        repr(pb)
        str(pb)

        # Data persistence
        self.serialize_with_pickle(pb, test_eq=False)
        self.assertMSONable(pb)

        self.assertTrue(pb.symbol == "Pb")
        self.assertEqual(pb.Z, 82.0)
        self.assertEqual(pb.Z_val, 14.0)
        self.assertTrue(pb.isnc)
        self.assertFalse(pb.ispaw)
        self.assertEqual(pb.l_max, 2)
        self.assertEqual(pb.l_local, 4)
        self.assertTrue(pb.supports_soc)
Esempio n. 13
0
    def setUp(self):
        nc_pseudo_fnames = collections.defaultdict(list)
        nc_pseudo_fnames["Si"] = ref_files("14si.pspnc", "14si.4.hgh",
                                           "14-Si.LDA.fhi")

        self.nc_pseudos = collections.defaultdict(list)

        for symbol, fnames in nc_pseudo_fnames.items():
            for fname in fnames:
                root, ext = os.path.splitext(fname)
                pseudo = Pseudo.from_file(fname)
                self.nc_pseudos[symbol].append(pseudo)

                # Save the pseudo as instance attribute whose name
                # is constructed with the rule: symbol_ppformat
                attr_name = symbol + "_" + ext[1:]
                if hasattr(self, attr_name):
                    raise RuntimeError(
                        f"self has already the attribute {attr_name}")

                setattr(self, attr_name, pseudo)
Esempio n. 14
0
def dojopseudo_from_file(filepath):
    """
    Factory function used to construct a :class:`Pseudo` object from file.
    A DojoPseudo has a DojoReport section and this function adds the report
    to the object.

    Args:
        filepath: Path of the pseudopotential file or djrepo file.

    .. note::

        We cannot subclass Pseudo because it's actually the abstract base class
        and Pseudo.from_file is the factory function that returns the concreate subclass.
    """
    filepath = os.path.abspath(filepath)

    dojo_report = None
    if filepath.endswith(".djrepo"):
        dojo_report = DojoReport.from_file(filepath)
        pp_basename = dojo_report["basename"]
        filepath = os.path.join(os.path.dirname(filepath), pp_basename)

    # Init pseudo from file. Return None if parser error.
    pseudo = Pseudo.from_file(filepath)
    if pseudo is None: return pseudo
    #pseudo.__class__.dojo_report = property(lambda self: self.a + 1)

    if dojo_report is not None:
        # We've already read the report.
        pseudo.dojo_report = dojo_report
        return pseudo

    # Read DojoReport and add it to pseudos
    root, ext = os.path.splitext(filepath)
    djrepo = root + ".djrepo"
    if not os.path.exists(djrepo):
        raise RuntimeError("Cannot find djrepo file at %s" % djrepo)
    pseudo.dojo_report = DojoReport.from_file(djrepo)

    return pseudo
Esempio n. 15
0
def dojopseudo_from_file(filepath):
    """
    Factory function used to construct a :class:`Pseudo` object from file.
    A DojoPseudo has a DojoReport section and this function adds the report
    to the object.

    Args:
        filepath: Path of the pseudopotential file or djrepo file.

    .. note::

        We cannot subclass Pseudo because it's actually the abstract base class
        and Pseudo.from_file is the factory function that returns the concreate subclass.
    """
    filepath = os.path.abspath(filepath)

    dojo_report = None
    if filepath.endswith(".djrepo"):
        dojo_report = DojoReport.from_file(filepath)
        pp_basename = dojo_report["basename"]
        filepath = os.path.join(os.path.dirname(filepath), pp_basename)

    # Init pseudo from file. Return None if parser error.
    pseudo = Pseudo.from_file(filepath)
    if pseudo is None: return pseudo
    #pseudo.__class__.dojo_report = property(lambda self: self.a + 1)

    if dojo_report is not None:
        # We've already read the report.
        pseudo.dojo_report = dojo_report
        return pseudo

    # Read DojoReport and add it to pseudos
    root, ext = os.path.splitext(filepath)
    djrepo = root + ".djrepo"
    if not os.path.exists(djrepo):
        raise RuntimeError("Cannot find djrepo file at %s" % djrepo)
    pseudo.dojo_report = DojoReport.from_file(djrepo)

    return pseudo
Esempio n. 16
0
def main():
    try:
        top = sys.argv[1]
    except IndexError:
        print("Usage: extract_djreport TOPDIR")

    # Find all .psp8 files starting from top.
    paths = find_exts(top, ["psp8"], exclude_dirs="_*")
    #print(paths)

    for path in paths:

        try:
            pseudo = Pseudo.from_file(path)
        except Exception as exc:
            print(path, exc)
            raise

        if pseudo is None:
            print("Parser error in %s" % path)
            continue

        report_file = path.replace(".psp8", ".djrepo")
        if os.path.exists(report_file):
            #print("New DOJO file already exists. Ignoring", pseudo.filepath)
            continue

        print("Moving DOJOREPORT to", report_file)

        report = remove_dojo_report(pseudo.filepath)

        # Change md5 and pseudo_type
        report["md5"] = pseudo.compute_md5()
        if report["pseudo_type"] == "norm-conserving":
            report["pseudo_type"] = "NC"

        with open(report_file, "wt") as fh:
            json.dump(report, fh, indent=-1, sort_keys=True)
Esempio n. 17
0
    def test_oncvpsp_pseudo_sr(self):
        """
        Test the ONCVPSP Ge pseudo (scalar relativistic version).
        """
        ger = Pseudo.from_file(ref_file("ge.oncvpsp"))
        assert repr(ger)
        assert str(ger)
        assert isinstance(ger.as_dict(), dict)
        ger.as_tmpfile()

        self.assertTrue(ger.symbol == "Ge")
        self.assertEqual(ger.Z, 32.0)
        self.assertEqual(ger.Z_val, 4.0)
        self.assertTrue(ger.isnc)
        self.assertFalse(ger.ispaw)
        self.assertEqual(ger.l_max, 2)
        self.assertEqual(ger.l_local, 4)
        self.assertEqual(ger.rcore, None)
        assert not ger.supports_soc

        # Data persistence
        self.serialize_with_pickle(ger, test_eq=False)
        self.assertMSONable(ger)
Esempio n. 18
0
    def from_files(cls, density_path, pseudopotential_paths, with_core=True, workdir=None, **kwargs):
        """
        Uses the abinit density files and the bader_ executable from Henkelmann et al. to calculate
        the bader charges of the system. If pseudopotentials are given, the atomic charges will be
        extracted as well. See also :cite:`Henkelman2006`.

        The extraction of the core charges may be a time consuming calculation, depending on the
        size of the system. A tuning of the parameters may be required (see Density.ae_core_density_on_mesh).

        Args:
            density_path: Path to the abinit density file. Can be a fortran _DEN or a netCDF DEN.nc file.
                In case of fortran file, requires cut3d (version >= 8.6.1) for the convertion.
            pseudopotential_paths: Dictionary {element: pseudopotential path} for all the elements present
                in the system.
            with_core: Core charges will be extracted from the pseudopotentials with the
                Density.ae_core_density_on_mesh method. Requires pseudopotential_paths.
            workdir: Working directory. If None, a temporary directory is created.
            kwargs: arguments passed to the method ``Density.ae_core_density_on_mesh``

        Returns:
            An instance of :class:`BaderCharges`
        """
        # read the valence density
        # if density is not a netcdf file, convert with cut3d
        if not density_path.endswith('.nc'):
            dff = DensityFortranFile.from_file(density_path)
            density = dff.get_density()
        else:
            density = Density.from_file(density_path)

        structure = density.structure

        atomic_charges = None

        if with_core:
            if not pseudopotential_paths:
                raise ValueError("pseudopotentials should be provided to extract the core densities")

            try:
                from pseudo_dojo.ppcodes.oncvpsp import psp8_get_densities
            except ImportError as exc:
                print("PseudoDojo package required to extract core densities. "
                      "Please install it with `pip install pseudo_dojo`")
                raise exc

            # extract core charge from pseudopotentials on a radial grid in the correct units
            rhoc = {}
            for specie, ppath in pseudopotential_paths.items():
                r = psp8_get_densities(ppath)
                rhoc[specie] = [r.rmesh * bohr_to_angstrom, r.aecore / (4.0 * np.pi) / (bohr_to_angstrom ** 3)]

            workdir = tempfile.mkdtemp() if workdir is None else workdir

            # extrapolate the core density on the density grid
            core_density = Density.ae_core_density_on_mesh(density, structure, rhoc, **kwargs)
            density += core_density
            atomic_charges = [s.specie.Z for s in structure]

        elif pseudopotential_paths:
            pseudos = {k: Pseudo.from_file(p) for k, p in pseudopotential_paths.items()}
            atomic_charges = [pseudos[s.specie.name].Z_val for s in structure]

        chgcar_path = os.path.join(workdir, 'CHGCAR')
        density.to_chgcar(chgcar_path)

        ba = BaderAnalysis(chgcar_path)

        charges = [ba.get_charge(i) for i in range(len(structure))]

        return cls(charges, structure, atomic_charges)
Esempio n. 19
0
 def pseudos(self):
     return [Pseudo.as_pseudo(ref_file("14si.pspnc"))]
Esempio n. 20
0
 def pseudos(self):
     return [Pseudo.as_pseudo(ref_file("14si.pspnc"))]
Esempio n. 21
0
def oncv_run(options):
    """
    Run oncvpsp, generate djrepo file, plot results. Requires input file.
    """
    # Select calc_type
    calc_type = dict(nor="non-relativistic",
                     sr="scalar-relativistic",
                     fr="fully-relativistic")[options.rel]

    # Build names of psp8 and djson files from input and relativistic mode.
    in_path = options.filename
    root, _ = os.path.splitext(in_path)

    # Enforce convention on output files.
    if options.rel == "nor":
        if not root.endswith("_nor"): root += "_nor"
    elif options.rel == "fr":
        if not root.endswith("_r"):
            root += "_r"
            cprint(
                "FR calculation with input file without `_r` suffix. Will add `_r` to output files",
                "yellow")

    # Build names of output files.
    psp8_path = root + ".psp8"
    djrepo_path = root + ".djrepo"
    out_path = root + ".out"
    if os.path.exists(psp8_path):
        cprint(
            "%s already exists and will be overwritten" %
            os.path.relpath(psp8_path), "yellow")
    if os.path.exists(djrepo_path):
        cprint(
            "%s already exists and will be overwritten" %
            os.path.relpath(djrepo_path), "yellow")
    if os.path.exists(out_path):
        cprint(
            "%s already exists and will be overwritten" %
            os.path.relpath(out_path), "yellow")

    # Build Generator and start generation.
    oncv_ppgen = OncvGenerator.from_file(in_path, calc_type, workdir=None)
    print(oncv_ppgen)
    print(oncv_ppgen.input_str)

    oncv_ppgen.start()
    retcode = oncv_ppgen.wait()

    if oncv_ppgen.status != oncv_ppgen.S_OK:
        cprint("oncvpsp returned %s. Exiting" % retcode, "red")
        return 1

    # Tranfer final output file.
    shutil.copy(oncv_ppgen.stdout_path, out_path)

    # Parse the output file
    onc_parser = OncvOutputParser(out_path)
    onc_parser.scan()
    if not onc_parser.run_completed:
        cprint("oncvpsp output is not complete. Exiting", "red")
        return 1

    # Extract psp8 files from the oncvpsp output and write it to file.
    s = onc_parser.get_psp8_str()
    with open(psp8_path, "wt") as fh:
        fh.write(s)

    # Write upf if available.
    upf_str = onc_parser.get_upf_str()
    if upf_str is not None:
        with open(psp8_path.replace(".psp8", ".upf"), "wt") as fh:
            fh.write(upf_str)

    pseudo = Pseudo.from_file(psp8_path)
    if pseudo is None:
        cprint("Cannot parse psp8 file: %s" % psp8_path, "red")
        return 1

    # Initialize and write djson file.
    report = DojoReport.empty_from_pseudo(pseudo,
                                          onc_parser.hints,
                                          devel=False)
    report.json_write()

    return 0
Esempio n. 22
0
    def from_files(cls, density_path, pseudopotential_paths, with_core=True, workdir=None, **kwargs):
        """
        Uses the abinit density files and the bader_ executable from Henkelmann et al. to calculate
        the bader charges of the system. If pseudopotentials are given, the atomic charges will be
        extracted as well. See also :cite:`Henkelman2006`.

        The extraction of the core charges may be a time consuming calculation, depending on the
        size of the system. A tuning of the parameters may be required (see Density.ae_core_density_on_mesh).

        Args:
            density_path: Path to the abinit density file. Can be a fortran _DEN or a netCDF DEN.nc file.
                In case of fortran file, requires cut3d (version >= 8.6.1) for the convertion.
            pseudopotential_paths: Dictionary {element: pseudopotential path} for all the elements present
                in the system.
            with_core: Core charges will be extracted from the pseudopotentials with the
                Density.ae_core_density_on_mesh method. Requires pseudopotential_paths.
            workdir: Working directory. If None, a temporary directory is created.
            kwargs: arguments passed to the method ``Density.ae_core_density_on_mesh``

        Returns:
            An instance of :class:`BaderCharges`
        """
        # read the valence density
        # if density is not a netcdf file, convert with cut3d
        if not density_path.endswith('.nc'):
            dff = DensityFortranFile.from_file(density_path)
            density = dff.get_density()
        else:
            density = Density.from_file(density_path)

        structure = density.structure

        atomic_charges = None

        if with_core:
            if not pseudopotential_paths:
                raise ValueError("pseudopotentials should be provided to extract the core densities")

            try:
                from pseudo_dojo.ppcodes.oncvpsp import psp8_get_densities
            except ImportError as exc:
                print("PseudoDojo package required to extract core densities. "
                      "Please install it with `pip install pseudo_dojo`")
                raise exc

            # extract core charge from pseudopotentials on a radial grid in the correct units
            rhoc = {}
            for specie, ppath in pseudopotential_paths.items():
                r = psp8_get_densities(ppath)
                rhoc[specie] = [r.rmesh * bohr_to_angstrom, r.aecore / (4.0 * np.pi) / (bohr_to_angstrom ** 3)]

            workdir = tempfile.mkdtemp() if workdir is None else workdir

            # extrapolate the core density on the density grid
            core_density = Density.ae_core_density_on_mesh(density, structure, rhoc, **kwargs)
            density += core_density
            atomic_charges = [s.specie.Z for s in structure]

        elif pseudopotential_paths:
            pseudos = {k: Pseudo.from_file(p) for k, p in pseudopotential_paths.items()}
            atomic_charges = [pseudos[s.specie.name].Z_val for s in structure]

        chgcar_path = os.path.join(workdir, 'CHGCAR')
        density.to_chgcar(chgcar_path)

        ba = BaderAnalysis(chgcar_path)

        charges = [ba.get_charge(i) for i in range(len(structure))]

        return cls(charges, structure, atomic_charges)
Esempio n. 23
0
def oncv_run(options):
    """
    Run oncvpsp, generate djrepo file, plot results. Requires input file.
    """
    # Select calc_type
    calc_type = dict(nor="non-relativistic",
                     sr="scalar-relativistic",
                     fr="fully-relativistic")[options.rel]

    # Build names of psp8 and djson files from input and relativistic mode.
    in_path = options.filename
    root, _ = os.path.splitext(in_path)

    # Enforce convention on output files.
    if options.rel == "nor":
        if not root.endswith("_nor"): root += "_nor"
    elif options.rel == "fr":
        if not root.endswith("_r"):
            root += "_r"
            cprint("FR calculation with input file without `_r` suffix. Will add `_r` to output files", "yellow")

    # Build names of output files.
    psp8_path = root + ".psp8"
    djrepo_path = root + ".djrepo"
    out_path = root + ".out"
    if os.path.exists(psp8_path):
        cprint("%s already exists and will be overwritten" % os.path.relpath(psp8_path), "yellow")
    if os.path.exists(djrepo_path):
        cprint("%s already exists and will be overwritten" % os.path.relpath(djrepo_path), "yellow")
    if os.path.exists(out_path):
        cprint("%s already exists and will be overwritten" % os.path.relpath(out_path), "yellow")

    # Build Generator and start generation.
    oncv_ppgen = OncvGenerator.from_file(in_path, calc_type, workdir=None)
    print(oncv_ppgen)
    print(oncv_ppgen.input_str)

    oncv_ppgen.start()
    retcode = oncv_ppgen.wait()

    if oncv_ppgen.status != oncv_ppgen.S_OK:
       cprint("oncvpsp returned %s. Exiting" % retcode, "red")
       return 1

    # Tranfer final output file.
    shutil.copy(oncv_ppgen.stdout_path, out_path)

    # Parse the output file
    onc_parser = OncvOutputParser(out_path)
    onc_parser.scan()
    if not onc_parser.run_completed:
        cprint("oncvpsp output is not complete. Exiting", "red")
        return 1

    # Extract psp8 files from the oncvpsp output and write it to file.
    s = onc_parser.get_pseudo_str()
    with open(psp8_path, "wt") as fh:
        fh.write(s)

    pseudo = Pseudo.from_file(psp8_path)
    if pseudo is None:
        cprint("Cannot parse psp8 file: %s" % psp8_path, "red")
        return 1

    # Initialize and write djson file.
    report = DojoReport.empty_from_pseudo(pseudo, onc_parser.hints, devel=False)
    report.json_write()

    return 0
Esempio n. 24
0
    def change_icmod3(self,
                      fcfact_list=(3, 4, 5),
                      rcfact_list=(1.3, 1.35, 1.4, 1.45, 1.5, 1.55)):
        """
        Change the value of fcfact and rcfact in the template. Generate the new pseudos
        and create new directories with the pseudopotentials in the current workding directory.

        Return:
            List of `Pseudo` objects

        Old version with icmod == 1.

        # icmod fcfact
        1 0.085

        New version with icmod == 3.
        # icmod, fcfact (rcfact)
            3    5.0  1.3
        """
        magic = "# icmod fcfact"
        for i, line in enumerate(self.template_lines):
            if line.strip() == magic: break
        else:
            raise ValueError("Cannot find magic line `%s` in template:\n%s" %
                             (magic, "\n".join(self.template_lines)))

        # Extract the parameters from the line.
        pos = i + 1
        line = self.template_lines[pos]

        tokens = line.split()
        icmod = int(tokens[0])

        #if len(tokens) != 3:
        #    raise ValueError("Expecting line with 3 numbers but got:\n%s" % line)
        #icmod, old_fcfact, old_rcfact = int(tokens[0]), float(tokens[1]), float(tokens[2])
        #if icmod != 3:
        #    raise ValueError("Expecting icmod == 3 but got %s" % icmod)

        base_name = os.path.basename(self.filepath).replace(".in", "")
        ppgens = []
        for fcfact, rcfact in product(fcfact_list, rcfact_list):
            new_input = self.template_lines[:]
            new_input[pos] = "%i %s %s\n" % (3, fcfact, rcfact)
            input_str = "".join(new_input)
            #print(input_str)
            ppgen = OncvGenerator(input_str, calc_type=self.calc_type)

            name = base_name + "_fcfact%3.2f_rcfact%3.2f" % (fcfact, rcfact)
            ppgen.name = name
            ppgen.stdin_basename = name + ".in"
            ppgen.stdout_basename = name + ".out"

            # Attach fcfact and rcfact to ppgen
            ppgen.fcfact, ppgen.rcfact = fcfact, rcfact

            if not ppgen.start() == 1:
                raise RuntimeError("ppgen.start() failed!")
            ppgens.append(ppgen)

        for ppgen in ppgens:
            retcode = ppgen.wait()
            ppgen.check_status()

        # Ignore errored calculations.
        ok_ppgens = [gen for gen in ppgens if gen.status == gen.S_OK]
        print("%i/%i generations completed with S_OK" %
              (len(ok_ppgens), len(ppgens)))

        ok_pseudos = []
        for ppgen in ok_ppgens:
            # Copy files to dest
            pseudo = ppgen.pseudo
            #dest = os.path.basename(self.filepath) + "_fcfact%3.2f_rcfact%3.2f" % (ppgen.fcfact, ppgen.rcfact)
            dest = os.path.split(self.filepath)[0]
            shutil.copy(os.path.join(ppgen.workdir, ppgen.stdin_basename),
                        dest)
            shutil.copy(os.path.join(ppgen.workdir, ppgen.stdout_basename),
                        dest)

            # Reduce the number of ecuts in the DOJO_REPORT
            # Re-parse the output and use devel=True to overwrite initial psp8 file
            psp8_path = os.path.join(dest, ppgen.name + ".psp8")
            out_path = os.path.join(dest, ppgen.name + ".out")

            parser = OncvOutputParser(out_path)
            parser.scan()

            # Rewrite pseudo file in devel mode.
            with open(psp8_path, "w") as fh:
                fh.write(parser.get_pseudo_str(devel=True))

            # Build new pseudo.
            p = Pseudo.from_file(psp8_path)
            ok_pseudos.append(p)

        return ok_pseudos
Esempio n. 25
0
    def check_status(self):
        """Check the status of the run, set and return self.status attribute."""
        if self.status == self.S_OK:
            return self._status

        parser = self.OutputParser(self.stdout_path)

        try:
            parser.scan()
        except parser.Error:
            self._status = self.S_ERROR
            return self._status

        logger.info("run_completed:", parser.run_completed)
        if self.status == self.S_DONE and not parser.run_completed:
            logger.info("Run is not completed!")
            self._status = self.S_ERROR

        if parser.run_completed:
            logger.info("setting status to S_OK")
            self._status = self.S_OK
            #########################################
            # Here we initialize results and plotter.
            #########################################
            if parser.warnings:
                self.errors.extend(parser.warnings)

            try:
                self._results = parser.get_results()
            except parser.Error:
                # File may not be completed.
                time.sleep(2)
                try:
                    self._results = parser.get_results()
                except:
                    raise

            self._plotter = parser.make_plotter()

            # Write Abinit pseudopotential.
            filepath = os.path.join(self.workdir,
                                    parser.atsym + "." + self.format)
            self.filepath = filepath
            #if os.path.exists(filepath):
            #    raise RuntimeError("File %s already exists" % filepath)

            if self.format == 'psp8':

                # Write psp8 file.
                with open(filepath, "wt") as fh:
                    fh.write(parser.get_psp8_str())

                # Add upf string (if present).
                upf_str = parser.get_upf_str()
                if upf_str is not None:
                    with open(filepath.replace(".psp8", ".upf"), "wt") as fh:
                        fh.write(upf_str)

                # Initialize self.pseudo from file.
                self._pseudo = p = Pseudo.from_file(filepath)

                # Add md5 checksum to dojo_report
                if p.has_dojo_report:
                    p.dojo_report["md5"] = p.compute_md5()
                    p.write_dojo_report(report=p.dojo_report)

        if parser.errors:
            logger.warning("setting status to S_ERROR")
            self._status = self.S_ERROR
            self.errors.extend(parser.errors)
            print(self.errors)

        return self._status
Esempio n. 26
0
    def check_status(self):
        """Check the status of the run, set and return self.status attribute."""
        if self.status == self.S_OK:
            return self._status

        parser = self.OutputParser(self.stdout_path)

        try:
            parser.scan()
        except parser.Error:
            self._status = self.S_ERROR
            return self._status

        logger.info("run_completed:", parser.run_completed)
        if self.status == self.S_DONE and not parser.run_completed:
            logger.info("Run is not completed!")
            self._status = self.S_ERROR

        if parser.run_completed:
            logger.info("setting status to S_OK")
            self._status = self.S_OK
            #########################################
            # Here we initialize results and plotter.
            #########################################
            if parser.warnings:
                self.errors.extend(parser.warnings)

            try:
                self._results = parser.get_results()
            except parser.Error:
                # File may not be completed.
                time.sleep(2)
                try:
                    self._results = parser.get_results()
                except:
                    raise

            self._plotter = parser.make_plotter()

            # Write Abinit pseudopotential.
            filepath = os.path.join(self.workdir, parser.atsym + ".psp8")
            #if os.path.exists(filepath):
            #    raise RuntimeError("File %s already exists" % filepath)

            # Initialize self.pseudo from file.
            with open(filepath, "wt") as fh:
                fh.write(parser.get_pseudo_str())

            self._pseudo = p = Pseudo.from_file(filepath)

            # Add md5 checksum to dojo_report
            if p.has_dojo_report:
                p.dojo_report["md5"] = p.compute_md5()
                p.write_dojo_report(report=p.dojo_report)

        if parser.errors:
            logger.warning("setting status to S_ERROR")
            self._status = self.S_ERROR
            self.errors.extend(parser.errors)
            print(self.errors)

        return self._status
Esempio n. 27
0
    def change_icmod3(self, fcfact_list=(3, 4, 5), rcfact_list=(1.3, 1.35, 1.4, 1.45, 1.5, 1.55)):
        """
        Change the value of fcfact and rcfact in the template. Generate the new pseudos
        and create new directories with the pseudopotentials in the current workding directory.

        Return:
            List of `Pseudo` objects

        Old version with icmod == 1.

        # icmod fcfact
        1 0.085

        New version with icmod == 3.
        # icmod, fcfact (rcfact)
            3    5.0  1.3
        """
        magic = "# icmod fcfact"
        for i, line in enumerate(self.template_lines):
            if line.strip() == magic: break
        else:
            raise ValueError("Cannot find magic line `%s` in template:\n%s" % (magic, "\n".join(self.template_lines)))

        # Extract the parameters from the line.
        pos = i + 1
        line = self.template_lines[pos]

        tokens = line.split()
        icmod = int(tokens[0])

        #if len(tokens) != 3:
        #    raise ValueError("Expecting line with 3 numbers but got:\n%s" % line)
        #icmod, old_fcfact, old_rcfact = int(tokens[0]), float(tokens[1]), float(tokens[2])
        #if icmod != 3:
        #    raise ValueError("Expecting icmod == 3 but got %s" % icmod)

        base_name = os.path.basename(self.filepath).replace(".in", "")
        ppgens = []
        for fcfact, rcfact in product(fcfact_list, rcfact_list):
            new_input = self.template_lines[:]
            new_input[pos] = "%i %s %s\n" % (3, fcfact, rcfact)
            input_str = "".join(new_input)
            #print(input_str)
            ppgen = OncvGenerator(input_str, calc_type=self.calc_type)

            name = base_name + "_fcfact%3.2f_rcfact%3.2f" % (fcfact, rcfact)
            ppgen.name = name
            ppgen.stdin_basename = name + ".in"
            ppgen.stdout_basename = name + ".out"

            # Attach fcfact and rcfact to ppgen
            ppgen.fcfact, ppgen.rcfact = fcfact, rcfact

            if not ppgen.start() == 1:
                raise RuntimeError("ppgen.start() failed!")
            ppgens.append(ppgen)

        for ppgen in ppgens:
            retcode = ppgen.wait()
            ppgen.check_status()

        # Ignore errored calculations.
        ok_ppgens = [gen for gen in ppgens if gen.status == gen.S_OK]
        print("%i/%i generations completed with S_OK" % (len(ok_ppgens), len(ppgens)))

        ok_pseudos = []
        for ppgen in ok_ppgens:
            # Copy files to dest
            pseudo = ppgen.pseudo
            #dest = os.path.basename(self.filepath) + "_fcfact%3.2f_rcfact%3.2f" % (ppgen.fcfact, ppgen.rcfact)
            dest = os.path.split(self.filepath)[0]
            shutil.copy(os.path.join(ppgen.workdir,ppgen.stdin_basename), dest)
            shutil.copy(os.path.join(ppgen.workdir,ppgen.stdout_basename), dest)

            # Reduce the number of ecuts in the DOJO_REPORT
            # Re-parse the output and use devel=True to overwrite initial psp8 file
            psp8_path = os.path.join(dest, ppgen.name + ".psp8")
            out_path = os.path.join(dest, ppgen.name + ".out")

            parser = OncvOutputParser(out_path)
            parser.scan()

            # Rewrite pseudo file in devel mode.
            with open(psp8_path, "w") as fh:
                fh.write(parser.get_pseudo_str(devel=True))

            # Build new pseudo.
            p = Pseudo.from_file(psp8_path)
            ok_pseudos.append(p)

        return ok_pseudos