Exemplo n.º 1
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe, gzipped=False)
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler(),
            NonConvergingErrorHandler()
        ]
    else:
        jobs = [VaspJob(v_exe)]
        handlers = [
            VaspErrorHandler(),
            FrozenJobErrorHandler(),
            MeshSymmetryErrorHandler()
        ]

    params = {
        'jobs': [j_decorate(j.to_dict) for j in jobs],
        'handlers': [h.to_dict for h in handlers],
        'max_errors': 10
    }

    return VaspCustodianTask(params)
Exemplo n.º 2
0
 def test_mesh_symmetry(self):
     h = MeshSymmetryErrorHandler("vasp.ibzkpt")
     h.check()
     d = h.correct()
     self.assertEqual(d["errors"], ['mesh_symmetry'])
     self.assertEqual(d["actions"],
                      [{'action': {'_set': {'kpoints': [[4, 4, 4]]}},
                        'dict': 'KPOINTS'}])
Exemplo n.º 3
0
 def test_mesh_symmetry(self):
     h = MeshSymmetryErrorHandler("vasp.ibzkpt")
     h.check()
     d = h.correct()
     self.assertEqual(d["errors"], ['mesh_symmetry'])
     self.assertEqual(d["actions"],
                      [{'action': {'_set': {'kpoints': [[4, 4, 4]]}},
                        'dict': 'KPOINTS'}])
Exemplo n.º 4
0
 def test_mesh_symmetry(self):
     h = MeshSymmetryErrorHandler("vasp.ibzkpt")
     h.check()
     d = h.correct()
     self.assertEqual(d["errors"], ["mesh_symmetry"])
     self.assertEqual(
         d["actions"],
         [{
             "action": {
                 "_set": {
                     "kpoints": [[4, 4, 4]]
                 }
             },
             "dict": "KPOINTS"
         }],
     )
Exemplo n.º 5
0
def get_custodian_task(spec):
    task_type = spec['task_type']
    v_exe = 'VASP_EXE'  # will be transformed to vasp executable on the node
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PositiveEnergyErrorHandler()
    ]

    if 'optimize structure (2x)' in task_type:
        jobs = VaspJob.double_relaxation_run(v_exe)
    elif 'static' in task_type or 'deformed' in task_type:
        jobs = [VaspJob(v_exe)]
    else:
        # non-SCF runs
        jobs = [VaspJob(v_exe)]
        handlers = []

    params = {
        'jobs': [j_decorate(j.as_dict()) for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }

    return VaspCustodianTask(params)
Exemplo n.º 6
0
 def run_task(self, fw_spec):
     #workdir=fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     #with cd(workdir):
     incar = Incar.from_file('INCAR')
     kpoints = Kpoints.from_file('KPOINTS')
     poscar = Poscar.from_file('POSCAR')
     potcar = Potcar.from_file('POTCAR')
     try:
         out = Outcar(work_dir + '/OUTCAR')
         if len(out.run_stats) != 7:
             raise VaspDoneError()
     except:
         try:
             contcar = Structure.from_file('CONTCAR')
             os.rename('CONTCAR', 'POSCAR')
         except:
             pass
         job = VaspJob(vasp_cmd)
         handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
                   NonConvergingErrorHandler(nionic_steps=2, change_algo=True),MeshSymmetryErrorHandler()]
         c = Custodian(handlers, [job], max_errors=10)
         c.run()
     else:
         print 'Vasp job was already done well. No need to rerun!'
Exemplo n.º 7
0
 def run_task(self, fw_spec):
     workdir = fw_spec['workdir']
     vasp_cmd = fw_spec['vasp_cmd']
     os.chdir(workdir)
     jobs = VaspJob.double_relaxation_run(vasp_cmd)
     handlers=[VaspErrorHandler(),UnconvergedErrorHandler(),FrozenJobErrorHandler(),\
               NonConvergingErrorHandler(nionic_steps=5, change_algo=True),MeshSymmetryErrorHandler()]
     c = Custodian(handlers, jobs, max_errors=10)
     c.run()
Exemplo n.º 8
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                        NonConvergingErrorHandler(),PotimErrorHandler(),
                        PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(),
                        DriftErrorHandler()],
            "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
                       NonConvergingErrorHandler(),PotimErrorHandler(),
                       PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
                       StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)

        jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        validators = []

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()
Exemplo n.º 9
0
def do_run(args):
    handlers = [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        UnconvergedErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler()
    ]
    c = Custodian(handlers,
                  get_runs(args),
                  max_errors=10,
                  gzipped_output=args.gzip)
    c.run()
    logging.info("Geometry optimization complete")
Exemplo n.º 10
0
    def run_task(self, fw_spec):
        dec = MontyDecoder()
        jobs = dec.process_decoded(self["jobs"])
        fw_env = fw_spec.get("_fw_env", {})
        #Override VASP and gamma VASP commands using fw_env
        if fw_env.get("vasp_cmd"):
            for j in jobs:
                j.vasp_cmd = os.path.expandvars(fw_env["vasp_cmd"])
                j.gamma_vasp_cmd = j.gamma_vasp_cmd
                logging.info("Vasp command is {}".format(j.vasp_cmd))
        if fw_env.get("gamma_vasp_cmd"):
            for j in jobs:
                j.gamma_vasp_cmd = os.path.expandvars(fw_env["gamma_vasp_cmd"])
                logging.info("Vasp gamma command is {}".format(
                    j.gamma_vasp_cmd))
        #Override custodian scratch dir.
        cust_params = self.get("custodian_params", {})
        if fw_env.get("scratch_root"):
            cust_params["scratch_dir"] = os.path.expandvars(
                fw_env["scratch_root"])

        logging.info("Running with custodian params %s" % cust_params)
        handlers = [
            VaspErrorHandler(),
            MeshSymmetryErrorHandler(),
            UnconvergedErrorHandler(),
            NonConvergingErrorHandler(),
            PotimErrorHandler()
        ]
        validators = [VasprunXMLValidator()]
        c = Custodian(handlers=[h.as_dict() for h in handlers],
                      jobs=jobs,
                      validators=[v.as_dict() for v in validators],
                      **cust_params)
        output = c.run()
        return FWAction(stored_data=output)
Exemplo n.º 11
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.as_dict()
    spec['vasp']['poscar'] = poscar.as_dict()
    spec['vasp']['kpoints'] = kpoints.as_dict()
    spec['vasp']['potcar'] = potcar.as_dict()
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('')
    for j in jobs:  # turn off auto npar, it doesn't work for >1 node
        j.auto_npar = False
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler()
    ]
    c_params = {
        'jobs': [j.as_dict() for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st Firework - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=1))

    # 2nd Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    # 3rd Firework - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({
        'jobs': [VaspJob('', auto_npar=False).as_dict()],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors':
        5
    })
    fws.append(
        Firework([copytask, setuptask, custodiantask],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=3))
    connections[2] = [3]

    # 4th Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
Exemplo n.º 12
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                DriftErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                StdErrHandler(),
                AliasingErrorHandler(),
                DriftErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, str):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(
                vasp_cmd,
                auto_npar=auto_npar,
                ediffg=self.get("ediffg"),
                half_kpts_first_relax=self.get("half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd,
                                           auto_npar=auto_npar,
                                           ediffg=self.get("ediffg"),
                                           half_kpts_first_relax=self.get(
                                               "half_kpts_first_relax",
                                               HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get(
                                            "half_kpts_first_relax",
                                            HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [
                VaspNEBJob(vasp_cmd,
                           final=False,
                           auto_npar=auto_npar,
                           gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, str):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = [
            ]  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            stored_custodian_data = {
                "custodian": loadfn(zpath("custodian.json"))
            }
            return FWAction(stored_data=stored_custodian_data)
Exemplo n.º 13
0
    def run_task(self, fw_spec):

        handler_groups = {
            "default": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler()
            ],
            "strict": [
                VaspErrorHandler(),
                MeshSymmetryErrorHandler(),
                UnconvergedErrorHandler(),
                NonConvergingErrorHandler(),
                PotimErrorHandler(),
                PositiveEnergyErrorHandler(),
                FrozenJobErrorHandler(),
                AliasingErrorHandler()
            ],
            "md": [VaspErrorHandler(),
                   NonConvergingErrorHandler()],
            "no_handler": []
        }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"),
                            fw_spec,
                            strict=False,
                            default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"),
                                 fw_spec,
                                 strict=False,
                                 default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [
                VaspJob(vasp_cmd,
                        auto_npar=auto_npar,
                        gamma_vasp_cmd=gamma_vasp_cmd)
            ]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd,
                                                 auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=False)
        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd,
                                        auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=5,
                                        half_kpts_first_relax=False)
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers
        handlers = handler_groups[self.get("handler_group", "default")]

        if self.get("max_force_threshold"):
            handlers.append(
                MaxForceErrorHandler(
                    max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        validators = [VasprunXMLValidator()]

        c = Custodian(handlers,
                      jobs,
                      validators=validators,
                      max_errors=max_errors,
                      scratch_dir=scratch_dir,
                      gzipped_output=gzip_output)

        c.run()
Exemplo n.º 14
0
import sys

from custodian.custodian import Custodian
from custodian.vasp.handlers import VaspErrorHandler, \
    UnconvergedErrorHandler, AliasingErrorHandler, FrozenJobErrorHandler, \
    PositiveEnergyErrorHandler, MeshSymmetryErrorHandler
from custodian.vasp.jobs import VaspJob

handlers = [
    VaspErrorHandler(),
    UnconvergedErrorHandler(),
    AliasingErrorHandler(),
    FrozenJobErrorHandler(),
    PositiveEnergyErrorHandler(),
    MeshSymmetryErrorHandler()
]
jobs = [VaspJob(sys.argv[1:])]
c = Custodian(handlers, jobs, max_errors=10)
c.run()
Exemplo n.º 15
0
from pymatgen.io.vasp.inputs import Incar, Poscar, VaspInput,Potcar, Kpoints
import os,shutil
from custodian.vasp.jobs import VaspJob
from custodian.vasp.handlers import VaspErrorHandler, UnconvergedErrorHandler,MeshSymmetryErrorHandler, NonConvergingErrorHandler, PotimErrorHandler
from custodian.vasp.validators import VasprunXMLValidator
from custodian.custodian import Custodian
inc=Incar.from_file("INCAR")
pot=Potcar.from_file("POTCAR")
pos=Poscar.from_file("POSCAR")
kp=Kpoints.from_file("KPOINTS")
shutil.copy2('/users/knc6/bin/vdw_kernel.bindat','./')
vinput = VaspInput.from_directory(".")
job=VaspJob(['mpirun', '-np', '16', '/users/knc6/VASP/vasp54/src/vasp.5.4.1/bin/vasp_std'], final=False, backup=False)
handlers = [VaspErrorHandler(), MeshSymmetryErrorHandler(),UnconvergedErrorHandler(), NonConvergingErrorHandler(),PotimErrorHandler()]
validators = [VasprunXMLValidator()]
c = Custodian(handlers, [job],max_errors=5,validators=validators)
c.run()
Exemplo n.º 16
0
def get_aneb_wf(
    structure,
    working_ion,
    insert_coords,
    insert_coords_combinations,
    n_images,
    vasp_input_set=None,
    override_default_vasp_params=None,
    handler_group=None,
    selective_dynamics_scheme="fix_two_atom",
    launch_mode="all",
    vasp_cmd=VASP_CMD,
    db_file=DB_FILE,
    wall_time=None,
    additional_fields=None,
    tags=None,
    powerup_dicts=None,
    name="ApproxNEB",
):
    """
    Workflow for running the "ApproxNEB" algorithm to estimate
    energetic barriers for a working ion in a structure (host)
    between end point positions specified by insert_coords and
    insert_coords_combinations. Note this workflow is only
    intended for the dilute lattice limit (where one working
    ion is in a large supercell structure of the host and
    little volume change upon insertion is expected).
    By default workflow sets appropriate VASP input parameters
    and Custodian handler groups.

    This workflow uses an "approx_neb" collection to organize
    outputs and generate inputs for new VASP calculations for
    easier data management and analysis. An "approx_neb"
    additional field is automatically added to all task docs
    generated to assist record keeping.

    To make modifications to docs generated by this
    workflow, use of the additional_fields and tags arguments
    is recommended to ensure all fireworks, tasks collection
    docs, and approx_neb collection docs are modified.

    Args:
    structure (Structure): structure of empty host
    working_ion: specie of site to insert in structure
        (e.g. "Li").
    insert_coords (1x3 array or list of 1x3 arrays):
        fractional coordinates of site(s) to insert in
        structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]).
    insert_coords_combinations (list of strings): list of
        strings corresponding to the list index of
        insert_coords to specify which combination
        of end_points to use for path interpolation.
        (e.g. ["0+1", "0+2"])
    n_images: n_images (int): number of images
        interpolated between end point structures for
        each path set by insert_coords_combinations
    vasp_input_set (VaspInputSet class): can use to
        define VASP input parameters.
        See pymatgen.io.vasp.sets module for more
        information. MPRelaxSet() and
        override_default_vasp_params are used if
        vasp_input_set = None.
    override_default_vasp_params (dict): if provided,
        vasp_input_set is disregarded and the Vasp Input
        Set is created by passing override_default_vasp_params
        to MPRelaxSet(). Allows for easy modification of
        MPRelaxSet().
        For example, to set ISIF=2 in the INCAR use:
        {"user_incar_settings":{"ISIF":2}}
    handler_group (str or [ErrorHandler]): group of handlers to
        use for RunVaspCustodian firetask. See handler_groups
        dict in the code for the groups and complete list of
        handlers in each group. Alternatively, you can specify a
        list of ErrorHandler objects.
    selective_dynamics_scheme (str): "fix_two_atom"
    launch_mode (str): "all" or "screening"
    vasp_cmd (str): the name of the full executable for running
        VASP.
    db_file (str): path to file containing the database
        credentials.
    wall_time (int): Total walltime in seconds. If this is None and
        the job is running on a PBS system, the handler will attempt to
        determine the walltime from the PBS_WALLTIME environment
        variable. If the wall time cannot be determined or is not
        set, this handler will have no effect.
    additional_fields (dict): specifies more information
        to be stored in the approx_neb collection to
        assist user record keeping.
    tags (list): list of strings to be stored in the
        approx_neb collection under the "tags" field to
        assist user record keeping.
    powerup_dicts (list): additional powerups given to all the dynamically
        created image fireworks
    name (str): name for the workflow returned

    Returns: Workflow
    """
    approx_neb_params = override_default_vasp_params or {
        "user_incar_settings": {
            "EDIFF": 0.0005,
            "EDIFFG": -0.05,
            "IBRION": 1,
            "ISIF": 3,
            "ISMEAR": 0,
            "LDAU": False,
            "NSW": 400,
            "ADDGRID": True,
            "ISYM": 1,
            "NELMIN": 4,
        }
    }

    handler_group = handler_group or [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler(),
        PositiveEnergyErrorHandler(),
        FrozenJobErrorHandler(),
        StdErrHandler(),
        WalltimeHandler(wall_time=wall_time),
    ]

    wf_uuid = str(uuid4())
    additional_fields = deepcopy(additional_fields)

    host_fw = HostFW(
        structure=structure,
        approx_neb_wf_uuid=wf_uuid,
        db_file=db_file,
        vasp_input_set=vasp_input_set,
        vasp_cmd=vasp_cmd,
        override_default_vasp_params=deepcopy(approx_neb_params),
        additional_fields=additional_fields,
        tags=tags,
    )

    # modifies incar settings needed for end point and image structure relaxations
    if "user_incar_settings" not in approx_neb_params.keys():
        approx_neb_params = {"user_incar_settings": {}}
    approx_neb_params["user_incar_settings"]["ISIF"] = 2
    approx_neb_params["user_incar_settings"]["ISYM"] = 0
    approx_neb_params["user_incar_settings"]["LDAU"] = False

    end_point_fws = []
    for n, coord in enumerate(insert_coords):
        end_point_fws.append(
            EndPointFW(
                approx_neb_wf_uuid=wf_uuid,
                insert_specie=working_ion,
                insert_coords=coord,
                end_points_index=n,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                parents=host_fw,
            ))

    evaluate_path_fws = []
    for end_points_combo in insert_coords_combinations:
        if isinstance(end_points_combo, (str)):
            combo = end_points_combo.split("+")
            if len(combo) == 2:
                c = [int(combo[0]), int(combo[-1])]
            else:
                raise ValueError(
                    "string format in insert_coords_combinations is incorrect")

        evaluate_path_fws.append(
            EvaluatePathFW(
                approx_neb_wf_uuid=wf_uuid,
                end_points_combo=end_points_combo,
                mobile_specie=working_ion,
                n_images=n_images,
                selective_dynamics_scheme=selective_dynamics_scheme,
                launch_mode=launch_mode,
                vasp_cmd=vasp_cmd,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                handler_group=handler_group,
                parents=[end_point_fws[c[0]], end_point_fws[c[1]]],
                add_additional_fields=additional_fields,
                add_tags=tags,
            ))

    wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws)

    wf = use_custodian(wf, custodian_params={"handler_group": handler_group})
    if isinstance(tags, (list)):
        wf = add_tags(wf, tags)
    if isinstance(additional_fields, (dict)):
        wf = add_additional_fields_to_taskdocs(wf,
                                               update_dict=additional_fields)
    if powerup_dicts is not None:
        wf = powerup_by_kwargs(wf, powerup_dicts)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = powerup_dicts
    wf.metadata.update({"approx_neb_wf_uuid": wf_uuid})
    wf.name = name

    return wf