Exemplo n.º 1
0
    def test_check_and_correct(self):
        os.chdir(os.path.join(test_dir, "bad_vasprun"))
        h = VasprunXMLValidator()
        self.assertTrue(h.check())

        #Unconverged still has a valid vasprun.
        os.chdir(os.path.join(test_dir, "unconverged"))
        self.assertFalse(h.check())
Exemplo n.º 2
0
    def test_check_and_correct(self):
        os.chdir(os.path.join(test_dir, "bad_vasprun"))
        h = VasprunXMLValidator()
        self.assertTrue(h.check())

        #Unconverged still has a valid vasprun.
        os.chdir(os.path.join(test_dir, "unconverged"))
        self.assertFalse(h.check())
Exemplo n.º 3
0
    def test_check_and_correct(self):
        os.chdir(os.path.join(test_dir, "bad_vasprun"))
        h = VasprunXMLValidator()
        self.assertTrue(h.check())

        # Unconverged still has a valid vasprun.
        os.chdir(os.path.join(test_dir, "unconverged"))
        shutil.copy("vasprun.xml.electronic", "vasprun.xml")
        self.assertFalse(h.check())
        os.remove("vasprun.xml")
Exemplo n.º 4
0
    def test_check_and_correct(self):
        os.chdir(os.path.join(test_dir, "bad_vasprun"))
        h = VasprunXMLValidator()
        self.assertTrue(h.check())

        # Unconverged still has a valid vasprun.
        os.chdir(os.path.join(test_dir, "unconverged"))
        shutil.copy("vasprun.xml.electronic", "vasprun.xml")
        self.assertFalse(h.check())
        os.remove("vasprun.xml")
Exemplo n.º 5
0
    def run_task(self, fw_spec):
        dec = MontyDecoder()
        jobs = dec.process_decoded(self["jobs"])
        fw_env = fw_spec.get("_fw_env", {})
        #Override VASP and gamma VASP commands using fw_env
        if fw_env.get("vasp_cmd"):
            for j in jobs:
                j.vasp_cmd = os.path.expandvars(fw_env["vasp_cmd"])
                j.gamma_vasp_cmd = j.gamma_vasp_cmd
                logging.info("Vasp command is {}".format(j.vasp_cmd))
        if fw_env.get("gamma_vasp_cmd"):
            for j in jobs:
                j.gamma_vasp_cmd = os.path.expandvars(fw_env["gamma_vasp_cmd"])
                logging.info("Vasp gamma command is {}".format(
                    j.gamma_vasp_cmd))
        #Override custodian scratch dir.
        cust_params = self.get("custodian_params", {})
        if fw_env.get("scratch_root"):
            cust_params["scratch_dir"] = os.path.expandvars(
                fw_env["scratch_root"])

        logging.info("Running with custodian params %s" % cust_params)
        handlers = [
            VaspErrorHandler(),
            MeshSymmetryErrorHandler(),
            UnconvergedErrorHandler(),
            NonConvergingErrorHandler(),
            PotimErrorHandler()
        ]
        validators = [VasprunXMLValidator()]
        c = Custodian(handlers=[h.as_dict() for h in handlers],
                      jobs=jobs,
                      validators=[v.as_dict() for v in validators],
                      **cust_params)
        output = c.run()
        return FWAction(stored_data=output)
Exemplo n.º 6
0
 def test_as_dict(self):
     h = VasprunXMLValidator()
     d = h.as_dict()
     h2 = VasprunXMLValidator.from_dict(d)
     self.assertIsInstance(h2, VasprunXMLValidator)
Exemplo n.º 7
0
from custodian.vasp.handlers import VaspErrorHandler,UnconvergedErrorHandler, \
        NonConvergingErrorHandler,FrozenJobErrorHandler,StdErrHandler,\
        WalltimeHandler,PositiveEnergyErrorHandler
from custodian import Custodian
import argparse

handlers = [
    VaspErrorHandler(),
    FrozenJobErrorHandler(),
    StdErrHandler(),
    NonConvergingErrorHandler(),
    WalltimeHandler(),
    PositiveEnergyErrorHandler(),
    UnconvergedErrorHandler()
]
validators = [VaspFilesValidator(), VasprunXMLValidator()]


def runvasp(cmd,
            opt=False,
            max_errors=3,
            backup=False,
            auto_gamma=False,
            auto_npar=False,
            ediffg=-.05):
    """
    cmd example:
    cmd=['mpirun', '-np', '32' , '-machinefile', 'hosts','vasp_std']
    """
    if opt:
        jobs = cvj.full_opt_run(cmd,
Exemplo n.º 8
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                DriftErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                AliasingErrorHandler(),
                DriftErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, str):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(
                vasp_cmd,
                auto_npar=auto_npar,
                ediffg=self.get("ediffg"),
                half_kpts_first_relax=self.get("half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd,
                                           auto_npar=auto_npar,
                                           ediffg=self.get("ediffg"),
                                           half_kpts_first_relax=self.get(
                                               "half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get(
                                            "half_kpts_first_relax",
                                            HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [
                VaspNEBJob(vasp_cmd,
                           final=False,
                           auto_npar=auto_npar,
                           gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, str):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = [
            ]  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            stored_custodian_data = {
                "custodian": loadfn(zpath("custodian.json"))
            }
            return FWAction(stored_data=stored_custodian_data)
Exemplo n.º 9
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                AliasingErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd,
                                                 auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=False)
        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=5,
                                        half_kpts_first_relax=False)
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        validators = [VasprunXMLValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()
Exemplo n.º 10
0
    def run_task(self, fw_spec):

        # write a file containing the formula and task_type for somewhat
        # easier file system browsing
        self._write_formula_file(fw_spec)

        fw_env = fw_spec.get("_fw_env", {})

        if "mpi_cmd" in fw_env:
            mpi_cmd = fw_spec["_fw_env"]["mpi_cmd"]
        elif which("mpirun"):
            mpi_cmd = "mpirun"
        elif which("aprun"):
            mpi_cmd = "aprun"
        else:
            raise ValueError("No MPI command found!")

        # TODO: last two env vars, i.e. SGE and LoadLeveler, are untested
        env_vars = ['PBS_NP', 'SLURM_NTASKS', 'NSLOTS', 'LOADL_TOTAL_TASKS']
        for env_var in env_vars:
            nproc = os.environ.get(env_var, None)
            if nproc is not None: break
        if nproc is None:
            raise ValueError("None of the env vars {} found to set nproc!".format(env_vars))

        v_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("vasp_cmd", "vasp")))
        gv_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("gvasp_cmd", "gvasp")))

        print 'host:', os.environ['HOSTNAME']

        for job in self.jobs:
            job.vasp_cmd = v_exe
            job.gamma_vasp_cmd = gv_exe

        incar_errors = check_incar(fw_spec['task_type'])
        if incar_errors:
            raise ValueError("Critical error: INCAR does not pass checks: {}".format(incar_errors))

        logging.basicConfig(level=logging.DEBUG)
        c = Custodian(self.handlers, self.jobs, max_errors=self.max_errors, gzipped_output=False, validators=[VasprunXMLValidator()])  # manual gzip
        custodian_out = c.run()

        if self.gzip_output:
            for f in os.listdir(os.getcwd()):
                if not f.lower().endswith("gz") and not f.endswith(".OU") and not f.endswith(".ER"):
                    with open(f, 'rb') as f_in, \
                            GzipFile('{}.gz'.format(f), 'wb') as f_out:
                        f_out.writelines(f_in)
                    os.remove(f)

        all_errors = set()
        for run in custodian_out:
            for correction in run['corrections']:
                all_errors.update(correction['errors'])

        stored_data = {'error_list': list(all_errors)}
        update_spec = {'prev_vasp_dir': os.getcwd(),
                       'prev_task_type': fw_spec['task_type'],
                       'mpsnl': fw_spec['mpsnl'],
                       'snlgroup_id': fw_spec['snlgroup_id'],
                       'run_tags': fw_spec['run_tags'],
                       'parameters': fw_spec.get('parameters')}

        return FWAction(stored_data=stored_data, update_spec=update_spec)
Exemplo n.º 11
0
 def test_as_dict(self):
     h = VasprunXMLValidator()
     d = h.as_dict()
     h2 = VasprunXMLValidator.from_dict(d)
     self.assertIsInstance(h2, VasprunXMLValidator)
Exemplo n.º 12
0
from pymatgen.io.vasp.inputs import Incar, Poscar, VaspInput,Potcar, Kpoints
import os,shutil
from custodian.vasp.jobs import VaspJob
from custodian.vasp.handlers import VaspErrorHandler, UnconvergedErrorHandler,MeshSymmetryErrorHandler, NonConvergingErrorHandler, PotimErrorHandler
from custodian.vasp.validators import VasprunXMLValidator
from custodian.custodian import Custodian
inc=Incar.from_file("INCAR")
pot=Potcar.from_file("POTCAR")
pos=Poscar.from_file("POSCAR")
kp=Kpoints.from_file("KPOINTS")
shutil.copy2('/users/knc6/bin/vdw_kernel.bindat','./')
vinput = VaspInput.from_directory(".")
job=VaspJob(['mpirun', '-np', '16', '/users/knc6/VASP/vasp54/src/vasp.5.4.1/bin/vasp_std'], final=False, backup=False)
handlers = [VaspErrorHandler(), MeshSymmetryErrorHandler(),UnconvergedErrorHandler(), NonConvergingErrorHandler(),PotimErrorHandler()]
validators = [VasprunXMLValidator()]
c = Custodian(handlers, [job],max_errors=5,validators=validators)
c.run()