Beispiel #1
0
    def test_postprocess(self):
        with cd(os.path.join(test_dir, "postprocess")):
            with ScratchDir(".", copy_from_current_on_enter=True):
                shutil.copy("INCAR", "INCAR.backup")

                v = VaspJob("hello",
                            final=False,
                            suffix=".test",
                            copy_magmom=True)
                v.postprocess()
                incar = Incar.from_file("INCAR")
                incar_prev = Incar.from_file("INCAR.test")

                for f in [
                        "INCAR",
                        "KPOINTS",
                        "CONTCAR",
                        "OSZICAR",
                        "OUTCAR",
                        "POSCAR",
                        "vasprun.xml",
                ]:
                    self.assertTrue(os.path.isfile(f"{f}.test"))
                    os.remove(f"{f}.test")
                shutil.move("INCAR.backup", "INCAR")

                self.assertAlmostEqual(incar["MAGMOM"],
                                       [3.007, 1.397, -0.189, -0.189])
                self.assertAlmostEqual(incar_prev["MAGMOM"], [5, -5, 0.6, 0.6])
Beispiel #2
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PositiveEnergyErrorHandler()
    ]

    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe)
    elif 'static' in task_type or 'deformed' in task_type:
        jobs = [VaspJob(v_exe)]
    else:
        # non-SCF runs
        jobs = [VaspJob(v_exe)]
        handlers = []

    params = {
        'jobs': [j_decorate(j.as_dict()) for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }

    return VaspCustodianTask(params)
Beispiel #3
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe, gzipped=False)
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler(),
            NonConvergingErrorHandler()
        ]
    else:
        jobs = [VaspJob(v_exe)]
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler()
        ]

    params = {
        'jobs': [j_decorate(j.to_dict) for j in jobs],
        'handlers': [h.to_dict for h in handlers],
        'max_errors': 10
    }

    return VaspCustodianTask(params)
Beispiel #4
0
 def test_setup(self):
     with cd(test_dir):
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello")
             v.setup()
             incar = Incar.from_file("INCAR")
             count = multiprocessing.cpu_count()
             if count > 1:
                 self.assertGreater(incar["NPAR"], 1)
Beispiel #5
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(),
                        UnconvergedErrorHandler(), NonConvergingErrorHandler(),
                        PotimErrorHandler(), PositiveEnergyErrorHandler(),
                        FrozenJobErrorHandler()],
            "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(),
                       UnconvergedErrorHandler(), NonConvergingErrorHandler(),
                       PotimErrorHandler(), PositiveEnergyErrorHandler(),
                       FrozenJobErrorHandler(), AliasingErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=False)
        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"),
                                        max_steps=5, half_kpts_first_relax=False)
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        if self.get("max_force_threshold"):
            handlers.append(MaxForceErrorHandler(max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        validators = [VasprunXMLValidator()]

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()
Beispiel #6
0
 def test_setup(self):
     with cd(test_dir):
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello")
             v.setup()
             incar = Incar.from_file("INCAR")
             count = multiprocessing.cpu_count()
             if count > 1:
                 self.assertGreater(incar["NPAR"], 1)
 def test_setup(self):
     with cd(test_dir):
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello", auto_npar=True)
             v.setup()
             incar = Incar.from_file("INCAR")
             count = multiprocessing.cpu_count()
             # Need at least 3 CPUs for NPAR to be greater than 1
             if count > 3:
                 self.assertGreater(incar["NPAR"], 1)
Beispiel #8
0
 def test_setup(self):
     with cd(test_dir):
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello", auto_npar=True)
             v.setup()
             incar = Incar.from_file("INCAR")
             count = multiprocessing.cpu_count()
             # Need at least 3 CPUs for NPAR to be greater than 1
             if count > 3:
                 self.assertGreater(incar["NPAR"], 1)
Beispiel #9
0
 def test_setup(self):
     os.chdir(test_dir)
     v = VaspJob("hello")
     v.setup()
     incar = Incar.from_file("INCAR")
     count = multiprocessing.cpu_count()
     if count > 1:
         self.assertGreater(incar["NPAR"], 1)
     shutil.copy("INCAR.orig", "INCAR")
     os.remove("INCAR.orig")
     os.remove("KPOINTS.orig")
     os.remove("POTCAR.orig")
     os.remove("POSCAR.orig")
Beispiel #10
0
 def test_setup(self):
     os.chdir(test_dir)
     v = VaspJob("hello")
     v.setup()
     incar = Incar.from_file("INCAR")
     count = multiprocessing.cpu_count()
     if count > 1:
         self.assertGreater(incar["NPAR"], 1)
     shutil.copy("INCAR.orig", "INCAR")
     os.remove("INCAR.orig")
     os.remove("KPOINTS.orig")
     os.remove("POTCAR.orig")
     os.remove("POSCAR.orig")
Beispiel #11
0
 def run_task(self, fw_spec):
     #workdir=fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     #with cd(workdir):
     incar = Incar.from_file('INCAR')
     kpoints = Kpoints.from_file('KPOINTS')
     poscar = Poscar.from_file('POSCAR')
     potcar = Potcar.from_file('POTCAR')
     try:
         out = Outcar(work_dir + '/OUTCAR')
         if len(out.run_stats) != 7:
             raise VaspDoneError()
     except:
         try:
             contcar = Structure.from_file('CONTCAR')
             os.rename('CONTCAR', 'POSCAR')
         except:
             pass
         job = VaspJob(vasp_cmd)
         handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
                   NonConvergingErrorHandler(nionic_steps=2, change_algo=True),MeshSymmetryErrorHandler()]
         c = Custodian(handlers, [job], max_errors=10)
         c.run()
     else:
         print 'Vasp job was already done well. No need to rerun!'
Beispiel #12
0
def run_custodian(directory):
    """
    Run VASP under supervision of a custodian in a certain directory.

    Args:
        directory:

    Returns:

    """

    directory = os.path.abspath(directory)
    os.chdir(directory)

    output = os.path.join(directory, "out")
    vasp_cmd = shlex.split(VASP_RUN_COMMAND)

    # Choose not to use certain error messages to be handled
    error_subset = list(VaspErrorHandler.error_msgs.keys())
    error_subset.remove("brmix")
    vasp_handler = VaspErrorHandler(output_filename=output,
                                    errors_subset_to_catch=error_subset)

    quotas_handler = QuotasErrorHandler(output_filename=output)

    handlers = [
        quotas_handler,
        UnconvergedErrorHandler(output_filename=output)
    ]

    jobs = [VaspJob(vasp_cmd=vasp_cmd, output_file=output, stderr_file=output)]

    c = Custodian(handlers, jobs, max_errors=3)
    c.run()
Beispiel #13
0
def get_runs(vasp_command, target=1e-3, max_steps=10, mode="linear"):
    energy = 0
    vinput = VaspInput.from_directory(".")
    kpoints = vinput["KPOINTS"].kpts[0]
    for i in range(max_steps):
        if mode == "linear":
            m = [k * (i + 1) for k in kpoints]
        else:
            m = [k + 1 for k in kpoints]
        if i == 0:
            settings = None
            backup = True
        else:
            backup = False
            v = Vasprun("vasprun.xml")
            e_per_atom = v.final_energy / len(v.final_structure)
            ediff = abs(e_per_atom - energy)
            if ediff < target:
                logging.info("Converged to {} eV/atom!".format(ediff))
                break
            else:
                energy = e_per_atom
                settings = [
                    {"dict": "INCAR",
                     "action": {"_set": {"ISTART": 1}}},
                    {'dict': 'KPOINTS',
                     'action': {'_set': {'kpoints': [m]}}},
                    {"filename": "CONTCAR",
                     "action": {"_file_copy": {"dest": "POSCAR"}}}]
        yield VaspJob(vasp_command, final=False, backup=backup,
                      suffix=".kpoints.{}".format("x".join(map(str, m))),
                      settings_override=settings)
Beispiel #14
0
 def run(self, fw_spec):
     # class VaspJob(Job):
     #     """
     #     A basic vasp job. Just runs whatever is in the directory. But conceivably
     #     can be a complex processing of inputs etc. with initialization.
     #     """
     #
     #     def __init__(self, vasp_cmd, output_file="vasp.out", stderr_file="std_err.txt",
     #                  suffix="", final=True, backup=True, auto_npar=True,
     #                  auto_gamma=True, settings_override=None,
     #                  gamma_vasp_cmd=None, copy_magmom=False, auto_continue=False):
     try:
         vasp_cmd = os.environ['VASP_CMD'].split()
     except:
         raise ValueError('Unable to find vasp command')
     if 'custodian_jobs' in fw_spec:
         jobs = fw_spec['custodian_jobs']
     else:
         jobs = [
             VaspJob(vasp_cmd=vasp_cmd,
                     auto_npar=False,
                     output_file=os.path.join(self.run_dir, 'vasp.out'),
                     stderr_file=os.path.join(self.run_dir, 'std_err.txt'),
                     backup=False,
                     auto_gamma=False)
         ]
     custodian = Custodian(handlers=self.custodian_handlers,
                           jobs=jobs,
                           validators=None,
                           max_errors=10,
                           polling_time_step=10,
                           monitor_freq=30)
     custodian.run()
Beispiel #15
0
    def test_postprocess(self):
        os.chdir(os.path.join(test_dir, 'postprocess'))
        shutil.copy('INCAR', 'INCAR.backup')

        v = VaspJob("hello", final=False, suffix=".test", copy_magmom=True)
        v.postprocess()
        incar = Incar.from_file("INCAR")
        incar_prev = Incar.from_file("INCAR.test")

        for f in ['INCAR', 'KPOINTS', 'CONTCAR', 'OSZICAR', 'OUTCAR',
                  'POSCAR', 'vasprun.xml']:
            self.assertTrue(os.path.isfile('{}.test'.format(f)))
            os.remove('{}.test'.format(f))
        shutil.move('INCAR.backup', 'INCAR')

        self.assertAlmostEqual(incar['MAGMOM'], [3.007, 1.397, -0.189, -0.189])
        self.assertAlmostEqual(incar_prev["MAGMOM"], [5, -5, 0.6, 0.6])
Beispiel #16
0
 def run_task(self, fw_spec):
     workdir = fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     os.chdir(workdir)
     jobs = VaspJob.double_relaxation_run(vasp_cmd)
     handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
               NonConvergingErrorHandler(nionic_steps=5, change_algo=True),MeshSymmetryErrorHandler()]
     c = Custodian(handlers, jobs, max_errors=10)
     c.run()
Beispiel #17
0
    def test_postprocess(self):
        with cd(os.path.join(test_dir, 'postprocess')):
            with ScratchDir('.', copy_from_current_on_enter=True) as d:
                shutil.copy('INCAR', 'INCAR.backup')

                v = VaspJob("hello", final=False, suffix=".test", copy_magmom=True)
                v.postprocess()
                incar = Incar.from_file("INCAR")
                incar_prev = Incar.from_file("INCAR.test")

                for f in ['INCAR', 'KPOINTS', 'CONTCAR', 'OSZICAR', 'OUTCAR',
                          'POSCAR', 'vasprun.xml']:
                    self.assertTrue(os.path.isfile('{}.test'.format(f)))
                    os.remove('{}.test'.format(f))
                shutil.move('INCAR.backup', 'INCAR')

                self.assertAlmostEqual(incar['MAGMOM'], [3.007, 1.397, -0.189, -0.189])
                self.assertAlmostEqual(incar_prev["MAGMOM"], [5, -5, 0.6, 0.6])
Beispiel #18
0
def get_runs(vasp_command, target=1e-3, max_steps=10, mode="linear"):
    """
    Generate the runs using a generator until convergence is achieved.
    """
    energy = 0
    vinput = VaspInput.from_directory(".")
    kpoints = vinput["KPOINTS"].kpts[0]
    for i in range(max_steps):
        if mode == "linear":
            m = [k * (i + 1) for k in kpoints]
        else:
            m = [k + 1 for k in kpoints]
        if i == 0:
            settings = None
            backup = True
        else:
            backup = False
            v = Vasprun("vasprun.xml")
            e_per_atom = v.final_energy / len(v.final_structure)
            ediff = abs(e_per_atom - energy)
            if ediff < target:
                logging.info(f"Converged to {ediff} eV/atom!")
                break
            energy = e_per_atom
            settings = [
                {
                    "dict": "INCAR",
                    "action": {
                        "_set": {
                            "ISTART": 1
                        }
                    }
                },
                {
                    "dict": "KPOINTS",
                    "action": {
                        "_set": {
                            "kpoints": [m]
                        }
                    }
                },
                {
                    "filename": "CONTCAR",
                    "action": {
                        "_file_copy": {
                            "dest": "POSCAR"
                        }
                    },
                },
            ]
        yield VaspJob(
            vasp_command,
            final=False,
            backup=backup,
            suffix=f".kpoints.{'x'.join(map(str, m))}",
            settings_override=settings,
        )
Beispiel #19
0
def _get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe, gzipped=False)
    else:
        jobs = [VaspJob(v_exe)]

    handlers = [VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler()]
    params = {'jobs': [j.to_dict for j in jobs],
              'handlers': [h.to_dict for h in handlers], 'max_errors': 10, 'auto_npar': False, 'auto_gamma': False}

    return VaspCustodianTask(params)
Beispiel #20
0
def get_runs(args):
    """
    Get the runs.
    """
    vasp_command = args.command.split()
    converged = False
    job_number = 0

    while (not converged) and (job_number < args.max_relax):

        suffix = ".{}{}".format("relax", job_number + 1)

        if job_number == 0:
            backup = True
            # assume the initial guess is poor,
            # start with conjugate gradients
            settings = [{"dict": "INCAR", "action": {"_set": {"IBRION": 2}}}]

        else:
            backup = False
            v = Vasprun("vasprun.xml")

            if len(v.ionic_steps) == 1:
                converged = True

            if job_number < 2 and not converged:

                settings = [
                    {"dict": "INCAR", "action": {"_set": {"ISTART": 1}}},
                    {"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}}},
                ]

            # switch to RMM-DIIS once we are near the
            # local minimum (assumed after 2 runs of CG)
            else:
                settings = [
                    {"dict": "INCAR", "action": {"_set": {"ISTART": 1, "IBRION": 1}}},
                    {"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}}},
                ]

        job_number += 1
        yield VaspJob(
            vasp_command,
            final=converged,
            backup=backup,
            suffix=suffix,
            settings_override=settings,
        )
Beispiel #21
0
 def test_setup_run_no_kpts(self):
     # just make sure v.setup() and v.run() exit cleanly when no KPOINTS file is present
     with cd(os.path.join(test_dir, "kspacing")):
         with ScratchDir(".", copy_from_current_on_enter=True):
             v = VaspJob("hello", auto_npar=True)
             v.setup()
             with self.assertRaises(FileNotFoundError):
                 # a FileNotFoundError indicates that v.run() tried to run
                 # subprocess.Popen(cmd, stdout=f_std, stderr=f_err) with
                 # cmd == "hello", so it successfully parsed the input file
                 # directory.
                 v.run()
Beispiel #22
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    handlers = [VaspErrorHandler(), FrozenJobErrorHandler(),
                MeshSymmetryErrorHandler(), NonConvergingErrorHandler(), PositiveEnergyErrorHandler()]

    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe, gzipped=False)
    elif 'static' in task_type:
        jobs = [VaspJob(v_exe)]
    else:
        # non-SCF runs
        jobs = [VaspJob(v_exe)]
        handlers = []

    params = {'jobs': [j_decorate(j.as_dict()) for j in jobs],
              'handlers': [h.as_dict() for h in handlers], 'max_errors': 5}

    return VaspCustodianTask(params)
Beispiel #23
0
    def run_task(self, fw_spec):
        directory = os.path.abspath(self["directory"])
        os.chdir(directory)

        output = os.path.join(directory, "out")
        # TODO Make the output file more general
        vasp_cmd = fw_spec["_fw_env"]["vasp_command"]

        handlers = [
            VaspErrorHandler(output_filename=output),
            UnconvergedErrorHandler(output_filename=output)
        ]

        jobs = [
            VaspJob(vasp_cmd=vasp_cmd,
                    output_file=output,
                    stderr_file=output,
                    auto_npar=False)
        ]

        c = Custodian(handlers, jobs, max_errors=10)
        c.run()
Beispiel #24
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                        NonConvergingErrorHandler(),PotimErrorHandler(),
                        PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(),
                        DriftErrorHandler()],
            "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                       NonConvergingErrorHandler(),PotimErrorHandler(),
                       PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
                       StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)

        jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        validators = []

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()
Beispiel #25
0
    def run_task(self, fw_spec):
        directory = self.get("directory", os.getcwd())
        os.chdir(directory)

        stdout_file = self.get("stdout_file", os.path.join(directory, "vasp.out"))
        stderr_file = self.get("stderr_file", os.path.join(directory, "vasp.out"))
        vasp_cmd = fw_spec["_fw_env"]["vasp_cmd"].split(" ")

        default_handlers = [VaspErrorHandler(), UnconvergedErrorHandler()]

        handlers = self.get("handlers", default_handlers)
        for handler in handlers:
            handler.output_filename = stdout_file

        jobs = [VaspJob(vasp_cmd=vasp_cmd,
                        output_file=stdout_file,
                        stderr_file=stderr_file,
                        auto_npar=False)]

        c = Custodian(handlers, jobs, max_errors=10,
                      monitor_freq=self.get("monitor_freq", 30))
        c.run()
Beispiel #26
0
def runvasp(cmd,
            opt=False,
            max_errors=3,
            backup=False,
            auto_gamma=False,
            auto_npar=False,
            ediffg=-.05):
    """
    cmd example:
    cmd=['mpirun', '-np', '32' , '-machinefile', 'hosts','vasp_std']
    """
    if opt:
        jobs = cvj.full_opt_run(cmd,
                                auto_npar=auto_npar,
                                ediffg=ediffg,
                                backup=backup,
                                auto_gamma=auto_gamma)
    else:
        jobs = [
            cvj(cmd, auto_npar=auto_npar, backup=backup, auto_gamma=auto_gamma)
        ]
    c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors)
    c.run()
Beispiel #27
0
    def run_task(self, fw_spec):
        directory = os.path.abspath(self["directory"])
        os.chdir(directory)

        stdout_file = self.get("stdout_file",
                               os.path.join(self["directory"], "out"))
        stderr_file = self.get("stderr_file",
                               os.path.join(self["directory"], "out"))
        vasp_cmd = fw_spec["_fw_env"]["vasp_cmd"].split(" ")

        handlers = [
            VaspErrorHandler(output_filename=stdout_file),
            UnconvergedErrorHandler(output_filename=stdout_file)
        ]

        jobs = [
            VaspJob(vasp_cmd=vasp_cmd,
                    output_file=stdout_file,
                    stderr_file=stderr_file,
                    auto_npar=False)
        ]

        c = Custodian(handlers, jobs, max_errors=10)
        c.run()
Beispiel #28
0
    def run_task(self, fw_spec):
        # Edison setting
        # vasp_cmd = ['aprun', '-n', str(fw_spec["_queueadapter"]["mppwidth"]), fw_spec["_fw_env"]['vasp_cmd']]
        # Vesta setting
        cobalt_partname = os.environ['COBALT_PARTNAME']
        vasp_cmd = [
            'runjob', '-n',
            str(fw_spec["_queueadapter"]["nnodes"]), '--block',
            cobalt_partname, '-p', '1', ":", fw_spec["_fw_env"]['vasp_cmd']
        ]
        job = VaspJob(vasp_cmd=vasp_cmd, auto_gamma=False, auto_npar=False)
        if self["handlers"] == "all":
            hnames = [
                "VaspErrorHandler", "MeshSymmetryErrorHandler",
                "UnconvergedErrorHandler", "NonConvergingErrorHandler",
                "PotimErrorHandler", "WalltimeHandler"
            ]
        else:
            hnames = self["handlers"]
        handlers = [load_class("custodian.vasp.handlers", n)() for n in hnames]
        c = Custodian(handlers, [job], **self.get("custodian_params", {}))
        output = c.run()

        chgcar_dir = os.getcwd()

        MyDB.db_access().connect()
        collection = MyDB.db_access().collection(fw_spec['collection'])
        collection.update(
            {
                "mp-id": fw_spec["mp-id"],
                "pair_index": fw_spec["pair_index"]
            }, {"$set": {
                "chgcar_dir": chgcar_dir
            }})
        MyDB.db_access().close()
        return FWAction(stored_data=output)
Beispiel #29
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.as_dict()
    spec['vasp']['poscar'] = poscar.as_dict()
    spec['vasp']['kpoints'] = kpoints.as_dict()
    spec['vasp']['potcar'] = potcar.as_dict()
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('')
    for j in jobs:  # turn off auto npar, it doesn't work for >1 node
        j.auto_npar = False
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler()
    ]
    c_params = {
        'jobs': [j.as_dict() for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st Firework - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=1))

    # 2nd Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    # 3rd Firework - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({
        'jobs': [VaspJob('', auto_npar=False).as_dict()],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors':
        5
    })
    fws.append(
        Firework([copytask, setuptask, custodiantask],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=3))
    connections[2] = [3]

    # 4th Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
Beispiel #30
0
from custodian.custodian import Custodian
from custodian.vasp.handlers import VaspErrorHandler, FrozenJobErrorHandler, \
        UnconvergedErrorHandler, MeshSymmetryErrorHandler, MaxForceErrorHandler, \
        PotimErrorHandler, NonConvergingErrorHandler, WalltimeHandler
from custodian.vasp.jobs import VaspJob

vasp_cmd = ['ibrun', '/home1/05018/tg843171/vasp.5.4.4_vtst/bin/vasp_std']
handlers = [FrozenJobErrorHandler(timeout=60)]
jobs = [VaspJob(vasp_cmd, final=True, suffix="", auto_npar=False)]
c = Custodian(handlers, jobs, max_errors=2)
c.run()
Beispiel #31
0
 def test_static(self):
     # Just a basic test of init.
     VaspJob.double_relaxation_run(["vasp"])
Beispiel #32
0
    def launch_workflow(
        self,
        launchpad_dir="",
        k_product=50,
        job=None,
        user_incar_settings=None,
        potcar_functional="PBE",
        additional_handlers=[],
    ):

        """
            Creates a list of Fireworks. Each Firework represents calculations
            that will be done on a slab system of a compound in a specific
            orientation. Each Firework contains a oriented unit cell relaxation job
            and a WriteSlabVaspInputs which creates os. Firework(s) depending
            on whether or not Termination=True. Vasp outputs from all slab and
            oriented unit cell calculations will then be inserted into a database.
            Args:
                launchpad_dir (str path): The path to my_launchpad.yaml. Defaults to
                    the current working directory containing your runs
                k_product: kpts[0][0]*a. Decide k density without
                    kpoint0, default to 50
                cwd: (str path): The curent working directory. Location of where you
                    want your vasp outputs to be.
                job (VaspJob): The command (cmd) entered into VaspJob object. Default
                    is specifically set for running vasp jobs on Carver at NERSC
                    (use aprun for Hopper or Edison).
                user_incar_settings(dict): A dict specifying additional incar
                    settings, default to None (ediff_per_atom=False)
                potcar_functional (str): default to PBE
        """

        launchpad = LaunchPad.from_file(os.path.join(os.environ["HOME"], launchpad_dir, "my_launchpad.yaml"))
        if self.reset:
            launchpad.reset("", require_password=False)

        # Scratch directory reffered to by custodian.
        # May be different on non-Nersc systems.

        if not job:
            job = VaspJob(["mpirun", "-n", "64", "vasp"], auto_npar=False, copy_magmom=True)

        handlers = [
            VaspErrorHandler(),
            NonConvergingErrorHandler(),
            UnconvergedErrorHandler(),
            PotimErrorHandler(),
            PositiveEnergyErrorHandler(),
            FrozenJobErrorHandler(timeout=3600),
        ]
        if additional_handlers:
            handlers.extend(additional_handlers)

        cust_params = {
            "custodian_params": {"scratch_dir": os.path.join("/global/scratch2/sd/", os.environ["USER"])},
            "jobs": job.double_relaxation_run(job.vasp_cmd, auto_npar=False),
            "handlers": handlers,
            "max_errors": 100,
        }  # will return a list of jobs
        # instead of just being one job

        fws = []
        for key in self.miller_dict.keys():
            # Enumerate through all compounds in the dictionary,
            # the key is the compositional formula of the compound
            print key
            for miller_index in self.miller_dict[key]:
                # Enumerates through all miller indices we
                # want to create slabs of that compound from

                print str(miller_index)

                max_norm = max(miller_index) if self.max_normal_search else None
                # Whether or not we want to use the
                # max_normal_search algorithm from surface.py
                print "true or false max norm is ", max_norm, self.max_normal_search

                slab = SlabGenerator(
                    self.unit_cells_dict[key][0], miller_index, self.ssize, self.vsize, max_normal_search=max_norm
                )
                oriented_uc = slab.oriented_unit_cell

                if self.fail_safe and len(oriented_uc) > 199:
                    break
                # This method only creates the oriented unit cell, the
                # slabs are created in the WriteSlabVaspInputs task.
                # WriteSlabVaspInputs will create the slabs from
                # the contcar of the oriented unit cell calculation
                handler = []
                tasks = []

                folderbulk = "/%s_%s_k%s_s%sv%s_%s%s%s" % (
                    oriented_uc.composition.reduced_formula,
                    "bulk",
                    k_product,
                    self.ssize,
                    self.vsize,
                    str(miller_index[0]),
                    str(miller_index[1]),
                    str(miller_index[2]),
                )
                cwd = os.getcwd()
                if self.get_bulk_e:
                    tasks.extend(
                        [
                            WriteUCVaspInputs(
                                oriented_ucell=oriented_uc,
                                folder=folderbulk,
                                cwd=cwd,
                                user_incar_settings=user_incar_settings,
                                potcar_functional=potcar_functional,
                                k_product=k_product,
                            ),
                            RunCustodianTask(dir=folderbulk, cwd=cwd, **cust_params),
                            VaspSlabDBInsertTask(
                                struct_type="oriented_unit_cell",
                                loc=folderbulk,
                                cwd=cwd,
                                miller_index=miller_index,
                                **self.vaspdbinsert_params
                            ),
                        ]
                    )

                    # Slab will inherit average final magnetic moment
                    # of the bulk from outcar, will have to generalize
                    # this for systems with different elements later
                    # element = oriented_uc.species[0]
                    # out = Outcar(cwd+folderbulk)
                    # out_mag = out.magnetization
                    # tot_mag = [mag['tot'] for mag in out_mag]
                    # magmom = np.mean(tot_mag)
                    # user_incar_settings['MAGMOM'] = {element: magmom}

                tasks.append(
                    WriteSlabVaspInputs(
                        folder=folderbulk,
                        cwd=cwd,
                        user_incar_settings=user_incar_settings,
                        terminations=self.terminations,
                        custodian_params=cust_params,
                        vaspdbinsert_parameters=self.vaspdbinsert_params,
                        potcar_functional=potcar_functional,
                        k_product=k_product,
                        miller_index=miller_index,
                        min_slab_size=self.ssize,
                        min_vacuum_size=self.vsize,
                        ucell=self.unit_cells_dict[key][0],
                    )
                )

                fw = Firework(tasks, name=folderbulk)

                fws.append(fw)
        wf = Workflow(fws, name="Surface Calculations")
        launchpad.add_wf(wf)
Beispiel #33
0
 def test_to_from_dict(self):
     v = VaspJob("hello")
     v2 = VaspJob.from_dict(v.as_dict())
     self.assertEqual(type(v2), type(v))
     self.assertEqual(v2.vasp_cmd, "hello")
Beispiel #34
0
 def __init__(self, parameters):
     self.update(parameters)
     self.jobs = [VaspJob.from_dict(d) for d in self['jobs']]
     self.handlers = [VaspErrorHandler.from_dict(d)
                      for d in self['handlers']]
     self.max_errors = self.get('max_errors', 1)
Beispiel #35
0
 def test_continue(self):
     # Test the continuation functionality
     with cd(os.path.join(test_dir, 'postprocess')):
         # Test default functionality
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello", auto_continue=True)
             v.setup()
             self.assertTrue(os.path.exists("continue.json"), "continue.json not created")
             v.setup()
             self.assertEqual(Poscar.from_file("CONTCAR").structure,
                              Poscar.from_file("POSCAR").structure)
             self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1)
             v.postprocess()
             self.assertFalse(os.path.exists("continue.json"),
                              "continue.json not deleted after postprocessing")
         # Test explicit action functionality
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello", auto_continue=[{"dict": "INCAR",
                                                  "action": {"_set": {"ISTART": 1}}}])
             v.setup()
             v.setup()
             self.assertNotEqual(Poscar.from_file("CONTCAR").structure,
                                 Poscar.from_file("POSCAR").structure)
             self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1)
             v.postprocess()
Beispiel #36
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                DriftErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                AliasingErrorHandler(),
                DriftErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, str):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(
                vasp_cmd,
                auto_npar=auto_npar,
                ediffg=self.get("ediffg"),
                half_kpts_first_relax=self.get("half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd,
                                           auto_npar=auto_npar,
                                           ediffg=self.get("ediffg"),
                                           half_kpts_first_relax=self.get(
                                               "half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get(
                                            "half_kpts_first_relax",
                                            HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [
                VaspNEBJob(vasp_cmd,
                           final=False,
                           auto_npar=auto_npar,
                           gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, str):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = [
            ]  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            stored_custodian_data = {
                "custodian": loadfn(zpath("custodian.json"))
            }
            return FWAction(stored_data=stored_custodian_data)
Beispiel #37
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.as_dict()
    spec['vasp']['poscar'] = poscar.as_dict()
    spec['vasp']['kpoints'] = kpoints.as_dict()
    spec['vasp']['potcar'] = potcar.as_dict()
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('', gzipped=False)
    for j in jobs: # turn off auto npar, it doesn't work for >1 node
            j.auto_npar = False
    handlers = [VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler(),
                    NonConvergingErrorHandler()]
    c_params = {'jobs': [j.as_dict() for j in jobs], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5}
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st Firework - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(Firework(tasks, spec, name=get_name(structure, spec['task_type']), fw_id=1))

    # 2nd Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=2))
    connections[1] = [2]

    # 3rd Firework - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({'jobs': [VaspJob('', auto_npar=False).as_dict()], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5})
    fws.append(Firework([copytask, setuptask, custodiantask], spec, name=get_name(structure, spec['task_type']), fw_id=3))
    connections[2] = [3]

    # 4th Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
Beispiel #38
0
 def test_to_from_dict(self):
     v = VaspJob("hello")
     v2 = VaspJob.from_dict(v.as_dict())
     self.assertEqual(type(v2), type(v))
     self.assertEqual(v2.vasp_cmd, "hello")
Beispiel #39
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                        NonConvergingErrorHandler(),PotimErrorHandler(),
                        PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(),
                        DriftErrorHandler()],
            "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                       NonConvergingErrorHandler(),PotimErrorHandler(),
                       PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
                       StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd, auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [VaspNEBJob(vasp_cmd, final=False, auto_npar=auto_npar,
                               gamma_vasp_cmd=gamma_vasp_cmd)]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, six.string_types):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(MaxForceErrorHandler(max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = []  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            return FWAction(stored_data=loadfn(zpath("custodian.json")))
Beispiel #40
0
import sys

from custodian.custodian import Custodian
from custodian.vasp.handlers import VaspErrorHandler, \
    UnconvergedErrorHandler, AliasingErrorHandler, FrozenJobErrorHandler, \
    PositiveEnergyErrorHandler, MeshSymmetryErrorHandler
from custodian.vasp.jobs import VaspJob

handlers = [
    VaspErrorHandler(),
    UnconvergedErrorHandler(),
    AliasingErrorHandler(),
    FrozenJobErrorHandler(),
    PositiveEnergyErrorHandler(),
    MeshSymmetryErrorHandler()
]
jobs = [VaspJob(sys.argv[1:])]
c = Custodian(handlers, jobs, max_errors=10)
c.run()
Beispiel #41
0
 def test_continue(self):
     # Test the continuation functionality
     with cd(os.path.join(test_dir, 'postprocess')):
         # Test default functionality
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello", auto_continue=True)
             v.setup()
             self.assertTrue(os.path.exists("continue.json"),
                             "continue.json not created")
             v.setup()
             self.assertEqual(
                 Poscar.from_file("CONTCAR").structure,
                 Poscar.from_file("POSCAR").structure)
             self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1)
             v.postprocess()
             self.assertFalse(
                 os.path.exists("continue.json"),
                 "continue.json not deleted after postprocessing")
         # Test explicit action functionality
         with ScratchDir('.', copy_from_current_on_enter=True) as d:
             v = VaspJob("hello",
                         auto_continue=[{
                             "dict": "INCAR",
                             "action": {
                                 "_set": {
                                     "ISTART": 1
                                 }
                             }
                         }])
             v.setup()
             v.setup()
             self.assertNotEqual(
                 Poscar.from_file("CONTCAR").structure,
                 Poscar.from_file("POSCAR").structure)
             self.assertEqual(Incar.from_file('INCAR')['ISTART'], 1)
             v.postprocess()
Beispiel #42
0
def get_jobs(args):
    # Returns a generator of jobs. Allows of "infinite" jobs.
    vasp_command = args.command.split()
    # save initial INCAR for rampU runs
    n_ramp_u = args.jobs.count('rampU')
    ramps = 0
    if n_ramp_u:
        incar = Incar.from_file('INCAR')
        ldauu = incar['LDAUU']
        ldauj = incar['LDAUJ']

    njobs = len(args.jobs)
    post_settings = [
    ]  # append to this list to have settings applied on next job
    for i, job in enumerate(args.jobs):
        final = False if i != njobs - 1 else True
        if any(c.isdigit() for c in job):
            suffix = "." + job
        else:
            suffix = ".{}{}".format(job, i + 1)
        settings = post_settings
        post_settings = []
        backup = True if i == 0 else False
        copy_magmom = False
        vinput = VaspInput.from_directory(".")
        if i > 0:
            settings.append({
                "file": "CONTCAR",
                "action": {
                    "_file_copy": {
                        "dest": "POSCAR"
                    }
                }
            })

        job_type = job.lower()
        auto_npar = True

        if args.no_auto_npar:
            auto_npar = False

        if job_type.startswith("static_derived"):
            from pymatgen.io.vasp.sets import MPStaticSet
            vis = MPStaticSet.from_prev_calc(".",
                                             user_incar_settings={
                                                 "LWAVE": True,
                                                 "EDIFF": 1e-6
                                             },
                                             ediff_per_atom=False)
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": dict(vis.incar)
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': vis.kpoints.as_dict()
                }
            }])

        if job_type.startswith("static_dielectric_derived"):
            from pymatgen.io.vasp.sets import MPStaticSet, MPStaticDielectricDFPTVaspInputSet

            # vis = MPStaticSet.from_prev_calc(
            #     ".", user_incar_settings={"EDIFF": 1e-6, "IBRION": 8,
            #                               "LEPSILON": True, 'LREAL':False,
            #                               "LPEAD": True, "ISMEAR": 0,
            #                               "SIGMA": 0.01},
            #     ediff_per_atom=False)
            vis = MPStaticDielectricDFPTVaspInputSet()
            incar = vis.get_incar(vinput["POSCAR"].structure)
            unset = {}
            for k in [
                    "NPAR", "KPOINT_BSE", "LAECHG", "LCHARG", "LVHAR", "NSW"
            ]:
                incar.pop(k, None)
                if k in vinput["INCAR"]:
                    unset[k] = 1
            kpoints = vis.get_kpoints(vinput["POSCAR"].structure)
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": dict(incar),
                    "_unset": unset
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': kpoints.as_dict()
                }
            }])
            auto_npar = False
        elif job_type.startswith("static"):
            m = [i * args.static_kpoint for i in vinput["KPOINTS"].kpts[0]]
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": {
                        "NSW": 0
                    }
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': {
                        'kpoints': [m]
                    }
                }
            }])

        elif job_type.startswith("nonscf_derived"):
            from pymatgen.io.vasp.sets import MPNonSCFSet
            vis = MPNonSCFSet.from_prev_calc(
                ".", copy_chgcar=False, user_incar_settings={"LWAVE": True})
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": dict(vis.incar)
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': vis.kpoints.as_dict()
                }
            }])

        elif job_type.startswith("optics_derived"):
            from pymatgen.io.vasp.sets import MPNonSCFSet
            vis = MPNonSCFSet.from_prev_calc(".",
                                             optics=True,
                                             copy_chgcar=False,
                                             nedos=2001,
                                             mode="uniform",
                                             nbands_factor=5,
                                             user_incar_settings={
                                                 "LWAVE": True,
                                                 "ALGO": "Exact",
                                                 "SIGMA": 0.01,
                                                 "EDIFF": 1e-6
                                             },
                                             ediff_per_atom=False)
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": dict(vis.incar)
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': vis.kpoints.as_dict()
                }
            }])

        elif job_type.startswith("rampu"):
            f = ramps / (n_ramp_u - 1)
            settings.append({
                "dict": "INCAR",
                "action": {
                    "_set": {
                        "LDAUJ": [j * f for j in ldauj],
                        "LDAUU": [u * f for u in ldauu]
                    }
                }
            })
            copy_magmom = True
            ramps += 1
        elif job_type.startswith("quick_relax") or job_type.startswith(\
                "quickrelax"):
            kpoints = vinput["KPOINTS"]
            incar = vinput["INCAR"]
            structure = vinput["POSCAR"].structure
            if "ISMEAR" in incar:
                post_settings.append({
                    "dict": "INCAR",
                    "action": {
                        "_set": {
                            "ISMEAR": incar["ISMEAR"]
                        }
                    }
                })
            else:
                post_settings.append({
                    "dict": "INCAR",
                    "action": {
                        "_unset": {
                            "ISMEAR": 1
                        }
                    }
                })
            post_settings.append({
                "dict": "KPOINTS",
                "action": {
                    "_set": kpoints.as_dict()
                }
            })
            # lattice vectors with length < 9 will get >1 KPOINT
            low_kpoints = Kpoints.gamma_automatic(
                [max(int(18 / l), 1) for l in structure.lattice.abc])
            settings.extend([{
                "dict": "INCAR",
                "action": {
                    "_set": {
                        "ISMEAR": 0
                    }
                }
            }, {
                'dict': 'KPOINTS',
                'action': {
                    '_set': low_kpoints.as_dict()
                }
            }])

            # let vasp determine encut (will be lower than
            # needed for compatibility with other runs)
            if "ENCUT" in incar:
                post_settings.append({
                    "dict": "INCAR",
                    "action": {
                        "_set": {
                            "ENCUT": incar["ENCUT"]
                        }
                    }
                })
                settings.append({
                    "dict": "INCAR",
                    "action": {
                        "_unset": {
                            "ENCUT": 1
                        }
                    }
                })

        elif job_type.startswith("relax"):
            pass
        elif job_type.startswith("full_relax"):
            for j in VaspJob.full_opt_run(vasp_command):
                yield j
        else:
            print("Unsupported job type: {}".format(job))
            sys.exit(-1)

        if not job_type.startswith("full_relax"):
            yield VaspJob(vasp_command,
                          final=final,
                          suffix=suffix,
                          backup=backup,
                          settings_override=settings,
                          copy_magmom=copy_magmom,
                          auto_npar=auto_npar)
Beispiel #43
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                AliasingErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd,
                                                 auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=False)
        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=5,
                                        half_kpts_first_relax=False)
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        validators = [VasprunXMLValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()
Beispiel #44
0
def get_jobs(args):
    # Returns a generator of jobs. Allows of "infinite" jobs.
    vasp_command = args.command.split()
    # save initial INCAR for rampU runs
    n_ramp_u = args.jobs.count('rampU')
    ramps = 0
    if n_ramp_u:
        incar = Incar.from_file('INCAR')
        ldauu = incar['LDAUU']
        ldauj = incar['LDAUJ']

    njobs = len(args.jobs)
    post_settings = []  # append to this list to have settings applied on next job
    for i, job in enumerate(args.jobs):
        final = False if i != njobs - 1 else True
        if any(c.isdigit() for c in job):
            suffix = "." + job
        else:
            suffix = ".{}{}".format(job, i + 1)
        settings = post_settings
        post_settings = []
        backup = True if i == 0 else False
        copy_magmom = False
        vinput = VaspInput.from_directory(".")
        if i > 0:
            settings.append(
                {"file": "CONTCAR",
                 "action": {"_file_copy": {"dest": "POSCAR"}}})

        job_type = job.lower()
        auto_npar = True

        if args.no_auto_npar:
            auto_npar = False

        if job_type.startswith("static_derived"):
            from pymatgen.io.vasp.sets import MPStaticSet
            vis = MPStaticSet.from_prev_calc(
                ".", user_incar_settings={"LWAVE": True, "EDIFF": 1e-6},
                ediff_per_atom=False)
            settings.extend([
                {"dict"  : "INCAR",
                 "action": {"_set": dict(vis.incar)}},
                {'dict': 'KPOINTS',
                 'action': {'_set': vis.kpoints.as_dict()}}])

        if job_type.startswith("static_dielectric_derived"):
            from pymatgen.io.vasp.sets import MPStaticSet, MPStaticDielectricDFPTVaspInputSet

            # vis = MPStaticSet.from_prev_calc(
            #     ".", user_incar_settings={"EDIFF": 1e-6, "IBRION": 8,
            #                               "LEPSILON": True, 'LREAL':False,
            #                               "LPEAD": True, "ISMEAR": 0,
            #                               "SIGMA": 0.01},
            #     ediff_per_atom=False)
            vis = MPStaticDielectricDFPTVaspInputSet()
            incar = vis.get_incar(vinput["POSCAR"].structure)
            unset = {}
            for k in ["NPAR", "KPOINT_BSE", "LAECHG", "LCHARG", "LVHAR",
                      "NSW"]:
                incar.pop(k, None)
                if k in vinput["INCAR"]:
                    unset[k] = 1
            kpoints = vis.get_kpoints(vinput["POSCAR"].structure)
            settings.extend([
                {"dict": "INCAR",
                 "action": {"_set": dict(incar),
                            "_unset": unset}},
                {'dict': 'KPOINTS',
                 'action': {'_set': kpoints.as_dict()}}])
            auto_npar = False
        elif job_type.startswith("static"):
            m = [i * args.static_kpoint for i in vinput["KPOINTS"].kpts[0]]
            settings.extend([
                {"dict": "INCAR",
                 "action": {"_set": {"NSW": 0}}},
                {'dict': 'KPOINTS',
                 'action': {'_set': {'kpoints': [m]}}}])

        elif job_type.startswith("nonscf_derived"):
            from pymatgen.io.vasp.sets import MPNonSCFSet
            vis = MPNonSCFSet.from_prev_calc(".", copy_chgcar=False,
                                             user_incar_settings={"LWAVE": True})
            settings.extend([
                {"dict": "INCAR",
                 "action": {"_set": dict(vis.incar)}},
                {'dict': 'KPOINTS',
                 'action': {'_set': vis.kpoints.as_dict()}}])

        elif job_type.startswith("optics_derived"):
            from pymatgen.io.vasp.sets import MPNonSCFSet
            vis = MPNonSCFSet.from_prev_calc(
                ".", optics=True, copy_chgcar=False,
                nedos=2001, mode="uniform", nbands_factor=5,
                user_incar_settings={"LWAVE": True, "ALGO": "Exact", "SIGMA": 0.01, "EDIFF": 1e-6},
                ediff_per_atom=False)
            settings.extend([
                {"dict": "INCAR",
                 "action": {"_set": dict(vis.incar)}},
                {'dict': 'KPOINTS',
                 'action': {'_set': vis.kpoints.as_dict()}}])

        elif job_type.startswith("rampu"):
            f = ramps / (n_ramp_u - 1)
            settings.append(
                {"dict": "INCAR",
                 "action": {"_set": {"LDAUJ": [j * f for j in ldauj],
                                     "LDAUU": [u * f for u in ldauu]}}})
            copy_magmom = True
            ramps += 1
        elif job_type.startswith("quick_relax") or job_type.startswith(\
                "quickrelax"):
            kpoints = vinput["KPOINTS"]
            incar = vinput["INCAR"]
            structure = vinput["POSCAR"].structure
            if "ISMEAR" in incar:
                post_settings.append(
                    {"dict": "INCAR",
                     "action": {"_set": {"ISMEAR": incar["ISMEAR"]}}})
            else:
                post_settings.append(
                    {"dict": "INCAR",
                     "action": {"_unset": {"ISMEAR": 1}}})
            post_settings.append({"dict": "KPOINTS",
                                  "action": {"_set": kpoints.as_dict()}})
            # lattice vectors with length < 9 will get >1 KPOINT
            low_kpoints = Kpoints.gamma_automatic(
                [max(int(18/l), 1) for l in structure.lattice.abc])
            settings.extend([
                {"dict": "INCAR",
                 "action": {"_set": {"ISMEAR": 0}}},
                {'dict': 'KPOINTS',
                 'action': {'_set': low_kpoints.as_dict()}}])

            # let vasp determine encut (will be lower than
            # needed for compatibility with other runs)
            if "ENCUT" in incar:
                post_settings.append(
                    {"dict": "INCAR",
                     "action": {"_set": {"ENCUT": incar["ENCUT"]}}})
                settings.append(
                    {"dict": "INCAR",
                     "action": {"_unset": {"ENCUT": 1}}})

        elif job_type.startswith("relax"):
            pass
        elif job_type.startswith("full_relax"):
            for j in VaspJob.full_opt_run(
                    vasp_command):
                yield j
        else:
            print("Unsupported job type: {}".format(job))
            sys.exit(-1)

        if not job_type.startswith("full_relax"):
            yield VaspJob(vasp_command, final=final, suffix=suffix,
                          backup=backup, settings_override=settings,
                          copy_magmom=copy_magmom, auto_npar=auto_npar)
Beispiel #45
0
 def test_static(self):
     # Just a basic test of init.
     VaspJob.double_relaxation_run(["vasp"])