Пример #1
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe, gzipped=False)
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler(),
            NonConvergingErrorHandler()
        ]
    else:
        jobs = [VaspJob(v_exe)]
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler()
        ]

    params = {
        'jobs': [j_decorate(j.to_dict) for j in jobs],
        'handlers': [h.to_dict for h in handlers],
        'max_errors': 10
    }

    return VaspCustodianTask(params)
Пример #2
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PositiveEnergyErrorHandler()
    ]

    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe)
    elif 'static' in task_type or 'deformed' in task_type:
        jobs = [VaspJob(v_exe)]
    else:
        # non-SCF runs
        jobs = [VaspJob(v_exe)]
        handlers = []

    params = {
        'jobs': [j_decorate(j.as_dict()) for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }

    return VaspCustodianTask(params)
Пример #3
0
 def run_task(self, fw_spec):
     #workdir=fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     #with cd(workdir):
     incar = Incar.from_file('INCAR')
     kpoints = Kpoints.from_file('KPOINTS')
     poscar = Poscar.from_file('POSCAR')
     potcar = Potcar.from_file('POTCAR')
     try:
         out = Outcar(work_dir + '/OUTCAR')
         if len(out.run_stats) != 7:
             raise VaspDoneError()
     except:
         try:
             contcar = Structure.from_file('CONTCAR')
             os.rename('CONTCAR', 'POSCAR')
         except:
             pass
         job = VaspJob(vasp_cmd)
         handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
                   NonConvergingErrorHandler(nionic_steps=2, change_algo=True),MeshSymmetryErrorHandler()]
         c = Custodian(handlers, [job], max_errors=10)
         c.run()
     else:
         print 'Vasp job was already done well. No need to rerun!'
Пример #4
0
 def run_task(self, fw_spec):
     workdir = fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     os.chdir(workdir)
     jobs = VaspJob.double_relaxation_run(vasp_cmd)
     handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
               NonConvergingErrorHandler(nionic_steps=5, change_algo=True),MeshSymmetryErrorHandler()]
     c = Custodian(handlers, jobs, max_errors=10)
     c.run()
Пример #5
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                        NonConvergingErrorHandler(),PotimErrorHandler(),
                        PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(),
                        DriftErrorHandler()],
            "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                       NonConvergingErrorHandler(),PotimErrorHandler(),
                       PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
                       StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)

        jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        validators = []

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()
Пример #6
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from custodian.vasp.jobs import VaspJob as cvj
from custodian.vasp.validators import VaspFilesValidator, VasprunXMLValidator
from custodian.vasp.handlers import VaspErrorHandler,UnconvergedErrorHandler, \
        NonConvergingErrorHandler,FrozenJobErrorHandler,StdErrHandler,\
        WalltimeHandler,PositiveEnergyErrorHandler
from custodian import Custodian
import argparse

handlers = [
    VaspErrorHandler(),
    FrozenJobErrorHandler(),
    StdErrHandler(),
    NonConvergingErrorHandler(),
    WalltimeHandler(),
    PositiveEnergyErrorHandler(),
    UnconvergedErrorHandler()
]
validators = [VaspFilesValidator(), VasprunXMLValidator()]


def runvasp(cmd,
            opt=False,
            max_errors=3,
            backup=False,
            auto_gamma=False,
            auto_npar=False,
            ediffg=-.05):
    """
Пример #7
0
 def test_frozen_job(self):
     h = FrozenJobErrorHandler()
     d = h.correct()
     self.assertEqual(d['errors'], ['Frozen job'])
     self.assertEqual(Incar.from_file("INCAR")['ALGO'], "Normal")
Пример #8
0
from custodian.custodian import Custodian
from custodian.vasp.handlers import VaspErrorHandler, FrozenJobErrorHandler, \
        UnconvergedErrorHandler, MeshSymmetryErrorHandler, MaxForceErrorHandler, \
        PotimErrorHandler, NonConvergingErrorHandler, WalltimeHandler
from custodian.vasp.jobs import VaspJob

vasp_cmd = ['ibrun', '/home1/05018/tg843171/vasp.5.4.4_vtst/bin/vasp_std']
handlers = [FrozenJobErrorHandler(timeout=60)]
jobs = [VaspJob(vasp_cmd, final=True, suffix="", auto_npar=False)]
c = Custodian(handlers, jobs, max_errors=2)
c.run()
Пример #9
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.as_dict()
    spec['vasp']['poscar'] = poscar.as_dict()
    spec['vasp']['kpoints'] = kpoints.as_dict()
    spec['vasp']['potcar'] = potcar.as_dict()
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('')
    for j in jobs:  # turn off auto npar, it doesn't work for >1 node
        j.auto_npar = False
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler()
    ]
    c_params = {
        'jobs': [j.as_dict() for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st Firework - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=1))

    # 2nd Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    # 3rd Firework - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({
        'jobs': [VaspJob('', auto_npar=False).as_dict()],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors':
        5
    })
    fws.append(
        Firework([copytask, setuptask, custodiantask],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=3))
    connections[2] = [3]

    # 4th Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
Пример #10
0
 def test_frozen_job(self):
     h = FrozenJobErrorHandler()
     d = h.correct()
     self.assertEqual(d['errors'], ['Frozen job'])
     self.assertEqual(Incar.from_file("INCAR")['ALGO'], "Normal")
Пример #11
0
 def test_frozen_job(self):
     h = FrozenJobErrorHandler()
     d = h.correct()
     self.assertEqual(d["errors"], ["Frozen job"])
     self.assertEqual(Incar.from_file("INCAR")["ALGO"], "Normal")
Пример #12
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                DriftErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                AliasingErrorHandler(),
                DriftErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, str):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(
                vasp_cmd,
                auto_npar=auto_npar,
                ediffg=self.get("ediffg"),
                half_kpts_first_relax=self.get("half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd,
                                           auto_npar=auto_npar,
                                           ediffg=self.get("ediffg"),
                                           half_kpts_first_relax=self.get(
                                               "half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get(
                                            "half_kpts_first_relax",
                                            HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [
                VaspNEBJob(vasp_cmd,
                           final=False,
                           auto_npar=auto_npar,
                           gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, str):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = [
            ]  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            stored_custodian_data = {
                "custodian": loadfn(zpath("custodian.json"))
            }
            return FWAction(stored_data=stored_custodian_data)
Пример #13
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                AliasingErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd,
                                                 auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=False)
        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=5,
                                        half_kpts_first_relax=False)
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        validators = [VasprunXMLValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()
Пример #14
0
    def launch_workflow(self,
                        launchpad_dir="",
                        k_product=50,
                        job=None,
                        user_incar_settings=None,
                        potcar_functional='PBE',
                        additional_handlers=[]):
        """
            Creates a list of Fireworks. Each Firework represents calculations
            that will be done on a slab system of a compound in a specific
            orientation. Each Firework contains a oriented unit cell relaxation job
            and a WriteSlabVaspInputs which creates os. Firework(s) depending
            on whether or not Termination=True. Vasp outputs from all slab and
            oriented unit cell calculations will then be inserted into a database.
            Args:
                launchpad_dir (str path): The path to my_launchpad.yaml. Defaults to
                    the current working directory containing your runs
                k_product: kpts[0][0]*a. Decide k density without
                    kpoint0, default to 50
                cwd: (str path): The curent working directory. Location of where you
                    want your vasp outputs to be.
                job (VaspJob): The command (cmd) entered into VaspJob object. Default
                    is specifically set for running vasp jobs on Carver at NERSC
                    (use aprun for Hopper or Edison).
                user_incar_settings(dict): A dict specifying additional incar
                    settings, default to None (ediff_per_atom=False)
                potcar_functional (str): default to PBE
        """

        launchpad = LaunchPad.from_file(
            os.path.join(os.environ["HOME"], launchpad_dir,
                         "my_launchpad.yaml"))
        if self.reset:
            launchpad.reset('', require_password=False)

        # Scratch directory reffered to by custodian.
        # May be different on non-Nersc systems.

        if not job:
            job = VaspJob(["mpirun", "-n", "64", "vasp"],
                          auto_npar=False,
                          copy_magmom=True)

        handlers = [
            VaspErrorHandler(),
            NonConvergingErrorHandler(),
            UnconvergedErrorHandler(),
            PotimErrorHandler(),
            PositiveEnergyErrorHandler(),
            FrozenJobErrorHandler(timeout=3600)
        ]
        if additional_handlers:
            handlers.extend(additional_handlers)

        cust_params = {
            "custodian_params": {
                "scratch_dir":
                os.path.join("/global/scratch2/sd/", os.environ["USER"])
            },
            "jobs": job.double_relaxation_run(job.vasp_cmd, auto_npar=False),
            "handlers": handlers,
            "max_errors": 100
        }  # will return a list of jobs
        # instead of just being one job

        fws = []
        for key in self.miller_dict.keys():
            # Enumerate through all compounds in the dictionary,
            # the key is the compositional formula of the compound
            print key
            for miller_index in self.miller_dict[key]:
                # Enumerates through all miller indices we
                # want to create slabs of that compound from

                print str(miller_index)

                max_norm = max(
                    miller_index) if self.max_normal_search else None
                # Whether or not we want to use the
                # max_normal_search algorithm from surface.py
                print 'true or false max norm is ', max_norm, self.max_normal_search

                slab = SlabGenerator(self.unit_cells_dict[key][0],
                                     miller_index,
                                     self.ssize,
                                     self.vsize,
                                     max_normal_search=max_norm)
                oriented_uc = slab.oriented_unit_cell

                if self.fail_safe and len(oriented_uc) > 199:
                    break
                # This method only creates the oriented unit cell, the
                # slabs are created in the WriteSlabVaspInputs task.
                # WriteSlabVaspInputs will create the slabs from
                # the contcar of the oriented unit cell calculation
                handler = []
                tasks = []

                folderbulk = '/%s_%s_k%s_s%sv%s_%s%s%s' % (
                    oriented_uc.composition.reduced_formula, 'bulk', k_product,
                    self.ssize, self.vsize, str(miller_index[0]),
                    str(miller_index[1]), str(miller_index[2]))
                cwd = os.getcwd()
                if self.get_bulk_e:
                    tasks.extend([
                        WriteUCVaspInputs(
                            oriented_ucell=oriented_uc,
                            folder=folderbulk,
                            cwd=cwd,
                            user_incar_settings=user_incar_settings,
                            potcar_functional=potcar_functional,
                            k_product=k_product),
                        RunCustodianTask(dir=folderbulk,
                                         cwd=cwd,
                                         **cust_params),
                        VaspSlabDBInsertTask(struct_type="oriented_unit_cell",
                                             loc=folderbulk,
                                             cwd=cwd,
                                             miller_index=miller_index,
                                             **self.vaspdbinsert_params)
                    ])

                    # Slab will inherit average final magnetic moment
                    # of the bulk from outcar, will have to generalize
                    # this for systems with different elements later
                    # element = oriented_uc.species[0]
                    # out = Outcar(cwd+folderbulk)
                    # out_mag = out.magnetization
                    # tot_mag = [mag['tot'] for mag in out_mag]
                    # magmom = np.mean(tot_mag)
                    # user_incar_settings['MAGMOM'] = {element: magmom}

                tasks.append(
                    WriteSlabVaspInputs(
                        folder=folderbulk,
                        cwd=cwd,
                        user_incar_settings=user_incar_settings,
                        terminations=self.terminations,
                        custodian_params=cust_params,
                        vaspdbinsert_parameters=self.vaspdbinsert_params,
                        potcar_functional=potcar_functional,
                        k_product=k_product,
                        miller_index=miller_index,
                        min_slab_size=self.ssize,
                        min_vacuum_size=self.vsize,
                        ucell=self.unit_cells_dict[key][0]))

                fw = Firework(tasks, name=folderbulk)

                fws.append(fw)
        wf = Workflow(fws, name='Surface Calculations')
        launchpad.add_wf(wf)
Пример #15
0
def get_aneb_wf(
    structure,
    working_ion,
    insert_coords,
    insert_coords_combinations,
    n_images,
    vasp_input_set=None,
    override_default_vasp_params=None,
    handler_group=None,
    selective_dynamics_scheme="fix_two_atom",
    launch_mode="all",
    vasp_cmd=VASP_CMD,
    db_file=DB_FILE,
    wall_time=None,
    additional_fields=None,
    tags=None,
    powerup_dicts=None,
    name="ApproxNEB",
):
    """
    Workflow for running the "ApproxNEB" algorithm to estimate
    energetic barriers for a working ion in a structure (host)
    between end point positions specified by insert_coords and
    insert_coords_combinations. Note this workflow is only
    intended for the dilute lattice limit (where one working
    ion is in a large supercell structure of the host and
    little volume change upon insertion is expected).
    By default workflow sets appropriate VASP input parameters
    and Custodian handler groups.

    This workflow uses an "approx_neb" collection to organize
    outputs and generate inputs for new VASP calculations for
    easier data management and analysis. An "approx_neb"
    additional field is automatically added to all task docs
    generated to assist record keeping.

    To make modifications to docs generated by this
    workflow, use of the additional_fields and tags arguments
    is recommended to ensure all fireworks, tasks collection
    docs, and approx_neb collection docs are modified.

    Args:
    structure (Structure): structure of empty host
    working_ion: specie of site to insert in structure
        (e.g. "Li").
    insert_coords (1x3 array or list of 1x3 arrays):
        fractional coordinates of site(s) to insert in
        structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]).
    insert_coords_combinations (list of strings): list of
        strings corresponding to the list index of
        insert_coords to specify which combination
        of end_points to use for path interpolation.
        (e.g. ["0+1", "0+2"])
    n_images: n_images (int): number of images
        interpolated between end point structures for
        each path set by insert_coords_combinations
    vasp_input_set (VaspInputSet class): can use to
        define VASP input parameters.
        See pymatgen.io.vasp.sets module for more
        information. MPRelaxSet() and
        override_default_vasp_params are used if
        vasp_input_set = None.
    override_default_vasp_params (dict): if provided,
        vasp_input_set is disregarded and the Vasp Input
        Set is created by passing override_default_vasp_params
        to MPRelaxSet(). Allows for easy modification of
        MPRelaxSet().
        For example, to set ISIF=2 in the INCAR use:
        {"user_incar_settings":{"ISIF":2}}
    handler_group (str or [ErrorHandler]): group of handlers to
        use for RunVaspCustodian firetask. See handler_groups
        dict in the code for the groups and complete list of
        handlers in each group. Alternatively, you can specify a
        list of ErrorHandler objects.
    selective_dynamics_scheme (str): "fix_two_atom"
    launch_mode (str): "all" or "screening"
    vasp_cmd (str): the name of the full executable for running
        VASP.
    db_file (str): path to file containing the database
        credentials.
    wall_time (int): Total walltime in seconds. If this is None and
        the job is running on a PBS system, the handler will attempt to
        determine the walltime from the PBS_WALLTIME environment
        variable. If the wall time cannot be determined or is not
        set, this handler will have no effect.
    additional_fields (dict): specifies more information
        to be stored in the approx_neb collection to
        assist user record keeping.
    tags (list): list of strings to be stored in the
        approx_neb collection under the "tags" field to
        assist user record keeping.
    powerup_dicts (list): additional powerups given to all the dynamically
        created image fireworks
    name (str): name for the workflow returned

    Returns: Workflow
    """
    approx_neb_params = override_default_vasp_params or {
        "user_incar_settings": {
            "EDIFF": 0.0005,
            "EDIFFG": -0.05,
            "IBRION": 1,
            "ISIF": 3,
            "ISMEAR": 0,
            "LDAU": False,
            "NSW": 400,
            "ADDGRID": True,
            "ISYM": 1,
            "NELMIN": 4,
        }
    }

    handler_group = handler_group or [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler(),
        PositiveEnergyErrorHandler(),
        FrozenJobErrorHandler(),
        StdErrHandler(),
        WalltimeHandler(wall_time=wall_time),
    ]

    wf_uuid = str(uuid4())
    additional_fields = deepcopy(additional_fields)

    host_fw = HostFW(
        structure=structure,
        approx_neb_wf_uuid=wf_uuid,
        db_file=db_file,
        vasp_input_set=vasp_input_set,
        vasp_cmd=vasp_cmd,
        override_default_vasp_params=deepcopy(approx_neb_params),
        additional_fields=additional_fields,
        tags=tags,
    )

    # modifies incar settings needed for end point and image structure relaxations
    if "user_incar_settings" not in approx_neb_params.keys():
        approx_neb_params = {"user_incar_settings": {}}
    approx_neb_params["user_incar_settings"]["ISIF"] = 2
    approx_neb_params["user_incar_settings"]["ISYM"] = 0
    approx_neb_params["user_incar_settings"]["LDAU"] = False

    end_point_fws = []
    for n, coord in enumerate(insert_coords):
        end_point_fws.append(
            EndPointFW(
                approx_neb_wf_uuid=wf_uuid,
                insert_specie=working_ion,
                insert_coords=coord,
                end_points_index=n,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                parents=host_fw,
            ))

    evaluate_path_fws = []
    for end_points_combo in insert_coords_combinations:
        if isinstance(end_points_combo, (str)):
            combo = end_points_combo.split("+")
            if len(combo) == 2:
                c = [int(combo[0]), int(combo[-1])]
            else:
                raise ValueError(
                    "string format in insert_coords_combinations is incorrect")

        evaluate_path_fws.append(
            EvaluatePathFW(
                approx_neb_wf_uuid=wf_uuid,
                end_points_combo=end_points_combo,
                mobile_specie=working_ion,
                n_images=n_images,
                selective_dynamics_scheme=selective_dynamics_scheme,
                launch_mode=launch_mode,
                vasp_cmd=vasp_cmd,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                handler_group=handler_group,
                parents=[end_point_fws[c[0]], end_point_fws[c[1]]],
                add_additional_fields=additional_fields,
                add_tags=tags,
            ))

    wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws)

    wf = use_custodian(wf, custodian_params={"handler_group": handler_group})
    if isinstance(tags, (list)):
        wf = add_tags(wf, tags)
    if isinstance(additional_fields, (dict)):
        wf = add_additional_fields_to_taskdocs(wf,
                                               update_dict=additional_fields)
    if powerup_dicts is not None:
        wf = powerup_by_kwargs(wf, powerup_dicts)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = powerup_dicts
    wf.metadata.update({"approx_neb_wf_uuid": wf_uuid})
    wf.name = name

    return wf