Пример #1
0
    def get_wf(
        self, scan=False, perform_bader=True, num_orderings_hard_limit=16, c=None
    ):
        """
        Retrieve the FireWorks workflow.

        Args:
            scan: if True, use the SCAN functional instead of GGA+U, since
        the SCAN functional has shown to have improved performance for
        magnetic systems in some cases
            perform_bader: if True, make sure the "bader" binary is in your
        path, will use Bader analysis to calculate atom-projected magnetic
        moments
            num_orderings_hard_limit: will make sure total number of magnetic
        orderings does not exceed this number even if there are extra orderings
        of equivalent symmetry
            c: additional config dict (as used elsewhere in atomate)

        Returns: FireWorks Workflow

        """

        c = c or {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}

        fws = []
        analysis_parents = []

        # trim total number of orderings (useful in high-throughput context)
        # this is somewhat course, better to reduce num_orderings kwarg and/or
        # change enumeration strategies
        ordered_structures = self.ordered_structures
        ordered_structure_origins = self.ordered_structure_origins

        def _add_metadata(structure):
            """
            For book-keeping, store useful metadata with the Structure
            object for later database ingestion including workflow
            version and a UUID for easier querying of all tasks generated
            from the workflow.

            Args:
                structure: Structure

            Returns: TransformedStructure
            """
            # this could be further improved by storing full transformation
            # history, but would require an improved transformation pipeline
            return TransformedStructure(
                structure, other_parameters={"wf_meta": self.wf_meta}
            )

        ordered_structures = [_add_metadata(struct) for struct in ordered_structures]

        if (
            num_orderings_hard_limit
            and len(self.ordered_structures) > num_orderings_hard_limit
        ):
            ordered_structures = self.ordered_structures[0:num_orderings_hard_limit]
            ordered_structure_origins = self.ordered_structure_origins[
                0:num_orderings_hard_limit
            ]
            logger.warning(
                "Number of ordered structures exceeds hard limit, "
                "removing last {} structures.".format(
                    len(self.ordered_structures) - len(ordered_structures)
                )
            )
            # always make sure input structure is included
            if self.input_index and self.input_index > num_orderings_hard_limit:
                ordered_structures.append(self.ordered_structures[self.input_index])
                ordered_structure_origins.append(
                    self.ordered_structure_origins[self.input_index]
                )

        # default incar settings
        user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05}
        if scan:
            # currently, using SCAN relaxation as a static calculation also
            # since it is typically high quality enough, but want to make
            # sure we are also writing the AECCAR* files
            user_incar_settings.update({"LAECHG": True})
        user_incar_settings.update(c.get("user_incar_settings", {}))
        c["user_incar_settings"] = user_incar_settings

        for idx, ordered_structure in enumerate(ordered_structures):

            analyzer = CollinearMagneticStructureAnalyzer(ordered_structure)

            name = " ordering {} {} -".format(idx, analyzer.ordering.value)

            if not scan:

                vis = MPRelaxSet(
                    ordered_structure, user_incar_settings=user_incar_settings
                )

                # relax
                fws.append(
                    OptimizeFW(
                        ordered_structure,
                        vasp_input_set=vis,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        max_force_threshold=0.05,
                        half_kpts_first_relax=False,
                        name=name + " optimize",
                    )
                )

                # static
                fws.append(
                    StaticFW(
                        ordered_structure,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        name=name + " static",
                        prev_calc_loc=True,
                        parents=fws[-1],
                    )
                )

            else:

                # wf_scan_opt is just a single FireWork so can append it directly
                scan_fws = wf_scan_opt(ordered_structure, c=c).fws
                # change name for consistency with non-SCAN
                new_name = scan_fws[0].name.replace(
                    "structure optimization", name + " optimize"
                )
                scan_fws[0].name = new_name
                scan_fws[0].tasks[-1]["additional_fields"]["task_label"] = new_name
                fws += scan_fws

            analysis_parents.append(fws[-1])

        fw_analysis = Firework(
            MagneticOrderingsToDB(
                db_file=c["DB_FILE"],
                wf_uuid=self.uuid,
                auto_generated=False,
                name="MagneticOrderingsToDB",
                parent_structure=self.sanitized_structure,
                origins=ordered_structure_origins,
                input_index=self.input_index,
                perform_bader=perform_bader,
                scan=scan,
            ),
            name="Magnetic Orderings Analysis",
            parents=analysis_parents,
            spec={"_allow_fizzled_parents": True},
        )
        fws.append(fw_analysis)

        formula = self.sanitized_structure.composition.reduced_formula
        wf_name = "{} - magnetic orderings".format(formula)
        if scan:
            wf_name += " - SCAN"
        wf = Workflow(fws, name=wf_name)

        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        tag = "magnetic_orderings group: >>{}<<".format(self.uuid)
        wf = add_tags(wf, [tag, ordered_structure_origins])

        return wf
Пример #2
0
def get_wf_magnetic_deformation(structure, c=None, vis=None):
    """
    Minimal workflow to obtain magnetic deformation proxy, as
    defined by Bocarsly et al. 2017, doi: 10.1021/acs.chemmater.6b04729

    Args:
        structure: input structure, must be structure with magnetic
    elements, such that pymatgen will initalize ferromagnetic input by
    default -- see MPRelaxSet.yaml for list of default elements
        c: Workflow config dict, in the same format
    as in presets/core.py and elsewhere in atomate
        vis: A VaspInputSet to use for the first FW

    Returns: Workflow
    """

    if not structure.is_ordered:
        raise ValueError(
            "Please obtain an ordered approximation of the input structure."
        )

    structure = structure.get_primitive_structure(use_site_props=True)

    # using a uuid for book-keeping,
    # in a similar way to other workflows
    uuid = str(uuid4())

    c_defaults = {"vasp_cmd": VASP_CMD, "db_file": DB_FILE}
    if c:
        c.update(c_defaults)
    else:
        c = c_defaults

    wf = get_wf(structure, "magnetic_deformation.yaml", common_params=c, vis=vis)

    fw_analysis = Firework(
        MagneticDeformationToDB(
            db_file=DB_FILE, wf_uuid=uuid, to_db=c.get("to_db", True)
        ),
        name="MagneticDeformationToDB",
    )

    wf.append_wf(Workflow.from_Firework(fw_analysis), wf.leaf_fw_ids)

    wf = add_common_powerups(wf, c)

    if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
        wf = add_wf_metadata(wf, structure)

    wf = add_additional_fields_to_taskdocs(
        wf,
        {
            "wf_meta": {
                "wf_uuid": uuid,
                "wf_name": "magnetic_deformation",
                "wf_version": __magnetic_deformation_wf_version__,
            }
        },
    )

    return wf
Пример #3
0
def get_wf_magnetic_deformation(structure, c=None, vis=None):
    """
    Minimal workflow to obtain magnetic deformation proxy, as
    defined by Bocarsly et al. 2017, doi: 10.1021/acs.chemmater.6b04729

    Args:
        structure: input structure, must be structure with magnetic
    elements, such that pymatgen will initalize ferromagnetic input by
    default -- see MPRelaxSet.yaml for list of default elements
        c: Workflow config dict, in the same format
    as in presets/core.py and elsewhere in atomate
        vis: A VaspInputSet to use for the first FW

    Returns: Workflow
    """

    if not structure.is_ordered:
        raise ValueError(
            "Please obtain an ordered approximation of the input structure.")

    structure = structure.get_primitive_structure(use_site_props=True)

    # using a uuid for book-keeping,
    # in a similar way to other workflows
    uuid = str(uuid4())

    c_defaults = {"vasp_cmd": VASP_CMD, "db_file": DB_FILE}
    if c:
        c.update(c_defaults)
    else:
        c = c_defaults

    wf = get_wf(structure,
                "magnetic_deformation.yaml",
                common_params=c,
                vis=vis)

    fw_analysis = Firework(
        MagneticDeformationToDb(db_file=DB_FILE,
                                wf_uuid=uuid,
                                to_db=c.get("to_db", True)),
        name="MagneticDeformationToDb",
    )

    wf.append_wf(Workflow.from_Firework(fw_analysis), wf.leaf_fw_ids)

    wf = add_common_powerups(wf, c)

    if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
        wf = add_wf_metadata(wf, structure)

    wf = add_additional_fields_to_taskdocs(
        wf,
        {
            "wf_meta": {
                "wf_uuid": uuid,
                "wf_name": "magnetic_deformation",
                "wf_version": __magnetic_deformation_wf_version__,
            }
        },
    )

    return wf
Пример #4
0
    def get_wf(self, num_orderings_hard_limit=16, c=None):
        """Retrieve Fireworks workflow.

        c is an optional dictionary that can contain:
        * heisenberg_settings:
            cutoff (float): Starting point for nearest neighbor search.
            tol (float): Tolerance for equivalent NN bonds.
        * mc_settings:
            mc_box_size (float): MC simulation box size in nm.
            equil_timesteps (int): Number of MC equilibration moves.
            mc_timesteps (int): Number of MC moves for averaging.
            avg (bool): Compute only <J>.
        * DB_FILE:
            path to db.json.

        Args:
            num_orderings_hard_limit (int): will make sure total number of
                magnetic orderings does not exceed this number even if there
                are extra orderings of equivalent symmetry
            c Optional[dict]: additional config dict described above

        Returns:
            wf (Workflow): Heisenberg Model + Vampire Monte Carlo.

        TODO:
            * Add static SCAN option (only optimization is available)

        """

        c = c or {"DB_FILE": DB_FILE}

        if "DB_FILE" not in c:
            c["DB_FILE"] = DB_FILE

        heisenberg_settings = c.get("heisenberg_settings", {})

        fws = []

        heisenberg_model_fw = HeisenbergModelFW(
            wf_uuid=self.uuid,
            parent_structure=self.structures[0],
            db_file=c["DB_FILE"],
            heisenberg_settings=heisenberg_settings,
            parents=None,
            structures=self.structures,
            energies=self.energies,
        )

        # Vampire Monte Carlo
        mc_settings = c.get("mc_settings", {})

        vampire_fw = VampireCallerFW(
            wf_uuid=self.uuid,
            parent_structure=self.structures[0],
            parents=[heisenberg_model_fw],
            db_file=c["DB_FILE"],
            mc_settings=mc_settings,
        )

        fws = [heisenberg_model_fw, vampire_fw]

        wf = Workflow(fws)

        # Add metadata
        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})
        formula = self.structures[0].composition.reduced_formula
        wf.name = f"{formula} - Exchange"

        return wf
Пример #5
0
    def get_wf(self,
               scan=False,
               perform_bader=True,
               num_orderings_hard_limit=16,
               c=None):
        """
        Retrieve the FireWorks workflow.

        Args:
            scan (bool): if True, use the SCAN functional instead of GGA+U,
                since the SCAN functional has shown to have improved
                performance for magnetic systems in some cases
            perform_bader (bool): if True, make sure the "bader" binary is in
                your path, will use Bader analysis to calculate
                atom-projected magnetic moments
            num_orderings_hard_limit (int): will make sure total number of
                magnetic orderings does not exceed this number even if there
                are extra orderings of equivalent symmetry
            c (dict): additional config dict (as used elsewhere in atomate)

        Returns: FireWorks Workflow

        """

        c_defaults = {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}
        additional_fields = {"relax": not self.static}
        c = c or {}
        for k, v in c_defaults.items():
            if k not in c:
                c[k] = v

        fws = []
        analysis_parents = []

        # trim total number of orderings (useful in high-throughput context)
        # this is somewhat course, better to reduce num_orderings kwarg and/or
        # change enumeration strategies
        ordered_structures = self.ordered_structures
        ordered_structure_origins = self.ordered_structure_origins

        def _add_metadata(structure):
            """
            For book-keeping, store useful metadata with the Structure
            object for later database ingestion including workflow
            version and a UUID for easier querying of all tasks generated
            from the workflow.

            Args:
                structure: Structure

            Returns: TransformedStructure

            """

            # this could be further improved by storing full transformation
            # history, but would require an improved transformation pipeline
            return TransformedStructure(
                structure, other_parameters={"wf_meta": self.wf_meta})

        ordered_structures = [
            _add_metadata(struct) for struct in ordered_structures
        ]

        if (num_orderings_hard_limit
                and len(self.ordered_structures) > num_orderings_hard_limit):
            ordered_structures = self.ordered_structures[
                0:num_orderings_hard_limit]
            ordered_structure_origins = self.ordered_structure_origins[
                0:num_orderings_hard_limit]
            logger.warning("Number of ordered structures exceeds hard limit, "
                           "removing last {} structures.".format(
                               len(self.ordered_structures) -
                               len(ordered_structures)))
            # always make sure input structure is included
            if self.input_index and self.input_index > num_orderings_hard_limit:
                ordered_structures.append(
                    self.ordered_structures[self.input_index])
                ordered_structure_origins.append(
                    self.ordered_structure_origins[self.input_index])

        # default incar settings
        user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05}
        if scan:
            # currently, using SCAN relaxation as a static calculation also
            # since it is typically high quality enough, but want to make
            # sure we are also writing the AECCAR* files
            user_incar_settings.update({"LAECHG": True})
        user_incar_settings.update(c.get("user_incar_settings", {}))
        c["user_incar_settings"] = user_incar_settings

        for idx, ordered_structure in enumerate(ordered_structures):

            analyzer = CollinearMagneticStructureAnalyzer(ordered_structure)

            name = f" ordering {idx} {analyzer.ordering.value} -"

            if not scan:

                vis = MPRelaxSet(ordered_structure,
                                 user_incar_settings=user_incar_settings)

                if not self.static:

                    # relax
                    fws.append(
                        OptimizeFW(
                            ordered_structure,
                            vasp_input_set=vis,
                            vasp_cmd=c["VASP_CMD"],
                            db_file=c["DB_FILE"],
                            max_force_threshold=0.05,
                            half_kpts_first_relax=False,
                            name=name + " optimize",
                        ))

                # static
                fws.append(
                    StaticFW(
                        ordered_structure,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        name=name + " static",
                        prev_calc_loc=True,
                        parents=fws[-1],
                        vasptodb_kwargs={
                            "parse_chgcar": True,
                            "parse_aeccar": True
                        },
                    ))

                if not self.static:
                    # so a failed optimize doesn't crash workflow
                    fws[-1].spec["_allow_fizzled_parents"] = True

            elif scan:

                # wf_scan_opt is just a single FireWork so can append it directly
                scan_fws = wf_scan_opt(ordered_structure, c=c).fws
                # change name for consistency with non-SCAN
                new_name = scan_fws[0].name.replace("structure optimization",
                                                    name + " optimize")
                scan_fws[0].name = new_name
                scan_fws[0].tasks[-1]["additional_fields"][
                    "task_label"] = new_name
                fws += scan_fws

            analysis_parents.append(fws[-1])

        fw_analysis = Firework(
            MagneticOrderingsToDb(
                db_file=c["DB_FILE"],
                wf_uuid=self.uuid,
                parent_structure=self.sanitized_structure,
                origins=ordered_structure_origins,
                input_index=self.input_index,
                perform_bader=perform_bader,
                scan=scan,
                additional_fields=additional_fields,
            ),
            name="Magnetic Orderings Analysis",
            parents=analysis_parents,
            spec={"_allow_fizzled_parents": True},
        )
        fws.append(fw_analysis)

        formula = self.sanitized_structure.composition.reduced_formula
        wf_name = f"{formula} - magnetic orderings"
        if scan:
            wf_name += " - SCAN"
        wf = Workflow(fws, name=wf_name)

        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        tag = f"magnetic_orderings group: >>{self.uuid}<<"
        wf = add_tags(wf, [tag, ordered_structure_origins])

        return wf
Пример #6
0
def get_wf_hubbard_hund_linresp(structure,
                                user_incar_settings=None,
                                relax_nonmagnetic=True,
                                spin_polarized=True,
                                applied_potential_range=(-0.2, 0.2),
                                num_evals=9,
                                site_indices_perturb=None,
                                species_perturb=None,
                                find_nearest_sites=True,
                                parallel_scheme=0,
                                ediff_tight=None,
                                c=None):
    """
    Compute Hubbard U (and Hund J) on-site interaction values using GGA+U
    linear response method proposed by Cococcioni et. al.
    (DOI: 10.1103/PhysRevB.71.035105)
    and the spin-polarized response formalism developed by Linscott et. al.
    (DOI: 10.1103/PhysRevB.98.235157)

    This workflow relies on the constrained on-site potential functional implemented in VASP, 
    with a helpful tutorial found here: 
    https://www.vasp.at/wiki/index.php/Calculate_U_for_LSDA%2BU

    Args:
        structure:
        user_incar_settings: user INCAR settings
        relax_nonmagnetic: Restart magnetic SCF runs from 
    non-magnetic calculation, using WAVECAR
        spin_polarized: Perform spin-dependent perturbations
        applied_potential_range: Bounds of applied potential
        num_evals: Number of perturbation evalutaions
        site_indices_perturb: (must specify if species_perturb=None) 
    List of site indices within
    Structure indicating perturbation sites; 
        species_perturb: (must specify if site_indices_perturb=None) 
    List of names of species (string)
    of sites to perturb; First site of that species
    is selected in the structure
        find_nearest_sites: If set to true and species_perturb != None, 
    the closest sites (by the Structure distance matrix) will be selected 
    in the response analysis to account for inter-site screening effects
        parallel_scheme: 0 - (default) self-consistent (SCF)
    runs use WAVECAR from non-self consistent (NSCF) run
    at same applied potential; 1 - SCF runs use WAVECAR
    from ground-state (V=0) run. 
    While reusing the WAVECAR from NSCF run in SCF run may be more 
    efficient (parallel_scheme: 0), the user may also choose to 
    remove the dependency between NSCF and SCF runs 
    (parallel_scheme: 1)
        ediff_tight: Final energy convergence tolerance, 
    if restarting from a previous run
    (if not specified, will default to pymatgen default EDIFF)
        c: Workflow config dict, in the same format
    as in presets/core.py and elsewhere in atomate

    Returns: Workflow
    """

    if not structure.is_ordered:
        raise ValueError(
            "Please obtain an ordered approximation of the input structure.")

    if not site_indices_perturb:
        site_indices_perturb = []

    if species_perturb:

        if find_nearest_sites:
            site_indices_perturb = find_closest_sites(structure,
                                                      species_perturb)
        else:
            for specie_u in species_perturb:
                found_specie = False
                for s in range(len(structure)):
                    site = structure[s]
                    if (Element(str(site.specie)) == Element(specie_u)) \
                       and (s not in site_indices_perturb):
                        found_specie = True
                        break
                if not found_specie:
                    raise ValueError("Could not find specie(s) in structure.")
                site_indices_perturb.append(s)

    elif not site_indices_perturb:
        logger.warning("Sites for computing U value are not specified. "
                       "Computing U for first site in structure. ")

    site_indices_perturb = list(tuple(site_indices_perturb))
    num_perturb = len(site_indices_perturb)

    sites_perturb = []
    for site_index_perturb in site_indices_perturb:
        site = structure[site_index_perturb]
        sites_perturb.append(site)

    structure.remove_sites(indices=site_indices_perturb)

    for site in sites_perturb:
        structure.insert(i=0,
                         species=site.specie,
                         coords=site.frac_coords,
                         properties=site.properties)

    # using a uuid for book-keeping,
    # in a similar way to other workflows
    uuid = str(uuid4())

    c_defaults = {"vasp_cmd": VASP_CMD, "db_file": DB_FILE}
    if c:
        c.update(c_defaults)
    else:
        c = c_defaults

    # Calculate groundstate

    # set user_incar_settings
    if not user_incar_settings:
        user_incar_settings = {}

    # setup VASP input sets
    uis_gs, uis_ldau, val_dict, vis_ldau = init_linresp_input_sets(
        user_incar_settings, structure, num_perturb)

    fws = []
    index_fw_gs = [0]

    ediff_default = vis_ldau.incar['EDIFF']
    if not ediff_tight:
        ediff_tight = 0.1 * ediff_default

    append_linresp_ground_state_fws(fws, structure, num_perturb, index_fw_gs,
                                    uis_gs, relax_nonmagnetic, ediff_default,
                                    ediff_tight)

    # generate list of applied on-site potentials in linear response
    applied_potential_value_list = []
    for counter_perturb in range(num_perturb):
        applied_potential_values = np.linspace(applied_potential_range[0],
                                               applied_potential_range[1],
                                               num_evals)
        applied_potential_values = np.around(applied_potential_values,
                                             decimals=9)

        if 0.0 in applied_potential_values:
            applied_potential_values = list(applied_potential_values)
            applied_potential_values.pop(applied_potential_values.index(0.0))
            applied_potential_values = np.array(applied_potential_values)

        applied_potential_value_list.append(applied_potential_values.copy())

    for counter_perturb in range(num_perturb):

        applied_potential_values = applied_potential_value_list[
            counter_perturb]

        for v in applied_potential_values:

            append_linresp_perturb_fws(v, fws, structure, counter_perturb,
                                       num_perturb, index_fw_gs, uis_ldau,
                                       val_dict, spin_polarized,
                                       relax_nonmagnetic, ediff_default,
                                       ediff_tight, parallel_scheme)

    wf = Workflow(fws)

    fw_analysis = Firework(
        HubbardHundLinRespToDb(num_perturb=num_perturb,
                               spin_polarized=spin_polarized,
                               relax_nonmagnetic=relax_nonmagnetic,
                               db_file=DB_FILE,
                               wf_uuid=uuid),
        name="HubbardHundLinRespToDb",
    )

    wf.append_wf(Workflow.from_Firework(fw_analysis), wf.leaf_fw_ids)

    wf = add_common_powerups(wf, c)

    if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
        wf = add_wf_metadata(wf, structure)

    wf = add_additional_fields_to_taskdocs(
        wf,
        {
            "wf_meta": {
                "wf_uuid": uuid,
                "wf_name": "hubbard_hund_linresp",
                "wf_version": __hubbard_hund_linresp_wf_version__,
            }
        },
    )

    return wf
Пример #7
0
    def get_wf(self, c=None):
        """
        Get the workflow.

        Returns:
            Workflow

        """

        c = c or {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}
        vasp_cmd = c.get("VASP_CMD", VASP_CMD)
        db_file = c.get("DB_FILE", DB_FILE)

        nsites = len(self.structure.sites)

        vis = MPRelaxSet(self.structure, potcar_functional="PBE_54", force_gamma=True)

        opt_fw = OptimizeFW(
            self.structure,
            vasp_input_set=vis,
            vasp_cmd=c["VASP_CMD"],
            db_file=c["DB_FILE"],
        )

        vis = MPStaticSet(self.structure, potcar_functional="PBE_54", force_gamma=True)

        static_fw = StaticFW(
            self.structure,
            vasp_input_set=vis,
            vasp_cmd=c["VASP_CMD"],
            db_file=c["DB_FILE"],
            parents=[opt_fw],
        )

        # Separate FW for each BZ surface calc
        # Run Z2Pack on unique TRIM planes in the BZ

        surfaces = ["kx_0", "kx_1"]
        equiv_planes = self.get_equiv_planes()

        # Only run calcs on inequivalent BZ surfaces
        if self.symmetry_reduction:
            for add_surface in equiv_planes.keys():
                mark = True
                for surface in surfaces:
                    if surface in equiv_planes[add_surface]:
                        mark = False
                if mark and add_surface not in surfaces:
                    surfaces.append(add_surface)
        else:  # 4 TRI surfaces define Z2 in 3D
            surfaces = ["kx_1", "ky_1", "kz_0", "kz_1"]

        z2pack_fws = []

        for surface in surfaces:
            z2pack_fw = Z2PackFW(
                parents=[static_fw],
                structure=self.structure,
                surface=surface,
                uuid=self.uuid,
                name="z2pack",
                vasp_cmd=c["VASP_CMD"],
                db_file=c["DB_FILE"],
            )
            z2pack_fws.append(z2pack_fw)

        analysis_fw = InvariantFW(
            parents=z2pack_fws,
            structure=self.structure,
            symmetry_reduction=self.symmetry_reduction,
            equiv_planes=equiv_planes,
            uuid=self.uuid,
            name="invariant",
            db_file=c["DB_FILE"],
        )

        fws = [opt_fw, static_fw] + z2pack_fws + [analysis_fw]

        wf = Workflow(fws)
        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        # Add vdW corrections if structure is layered
        dim_data = StructureDimensionality(self.structure)

        if np.any(
            [
                dim == 2
                for dim in [dim_data.larsen_dim, dim_data.cheon_dim, dim_data.gorai_dim]
            ]
        ):
            wf = add_modify_incar(
                wf,
                modify_incar_params={
                    "incar_update": {
                        "IVDW": 11,
                        "EDIFFG": 0.005,
                        "IBRION": 2,
                        "NSW": 100,
                    }
                },
                fw_name_constraint="structure optimization",
            )

            wf = add_modify_incar(
                wf,
                modify_incar_params={"incar_update": {"IVDW": 11}},
                fw_name_constraint="static",
            )

            wf = add_modify_incar(
                wf,
                modify_incar_params={"incar_update": {"IVDW": 11}},
                fw_name_constraint="z2pack",
            )

        else:
            wf = add_modify_incar(
                wf,
                modify_incar_params={
                    "incar_update": {"EDIFFG": 0.005, "IBRION": 2, "NSW": 100}
                },
                fw_name_constraint="structure optimization",
            )

        # Helpful vasp settings and no parallelization
        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {
                    "ADDGRID": ".TRUE.",
                    "LASPH": ".TRUE.",
                    "GGA": "PS",
                    "NCORE": 1,
                }
            },
        )

        # Generate inputs for Z2Pack with a static calc
        wf = add_modify_incar(
            wf,
            modify_incar_params={"incar_update": {"PREC": "Accurate"}},
            fw_name_constraint="static",
        )

        wf = add_common_powerups(wf, c)

        wf.name = "{} {}".format(self.structure.composition.reduced_formula, "Z2Pack")

        if c.get("STABILITY_CHECK", STABILITY_CHECK):
            wf = add_stability_check(wf, fw_name_constraint="structure optimization")

        if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
            wf = add_wf_metadata(wf, self.structure)

        tag = "z2pack: {}".format(self.uuid)
        wf = add_tags(wf, [tag])

        return wf
Пример #8
0
def wf_irvsp(structure, magnetic=False, soc=False, v2t=False, c=None):
    """
    Fireworks workflow for running an irvsp calculation.
    Optionally performs a vasp2trace calculation.

    Args:
        structure (Structure): Pymatgen structure object
        magnetic (bool): Whether the calculation is on a magnetic structure
        soc (bool): Spin-orbit coupling included
        v2t (bool): Do a vasp2trace calculation in addition to irvsp.

    Returns:
        Workflow

    """

    c = c or {}
    vasp_cmd = c.get("VASP_CMD", VASP_CMD)
    db_file = c.get("DB_FILE", DB_FILE)

    uuid = str(uuid4())
    wf_meta = {"wf_uuid": uuid, "wf_name": "Irvsp WF"}

    magmoms = None

    if magnetic and "magmom" in structure.site_properties:
        magmoms_orig = structure.site_properties["magmom"]
        magmoms = [str(m) for m in magmoms_orig]
        magmoms = " ".join(magmoms)
    elif magnetic:
        raise RuntimeError(
            "Structure must have magnetic moments in site_properties for magnetic calcualtion!"
        )

    ncoords = 3 * len(structure.sites)

    nbands = 0

    for site in structure.sites:
        nbands += site.species.total_electrons

    trim_kpoints = Kpoints(
        comment="TRIM Points",
        num_kpts=8,
        style=Kpoints.supported_modes.Reciprocal,
        kpts=(
            (0, 0, 0),
            (0.5, 0, 0),
            (0, 0.5, 0),
            (0, 0, 0.5),
            (0.5, 0.5, 0),
            (0, 0.5, 0.5),
            (0.5, 0, 0.5),
            (0.5, 0.5, 0.5),
        ),
        kpts_shift=(0, 0, 0),
        kpts_weights=[1, 1, 1, 1, 1, 1, 1, 1],
        coord_type="Reciprocal",
        labels=["gamma", "x", "y", "z", "s", "t", "u", "r"],
        tet_number=0,
        tet_weight=0,
        tet_connections=None,
    )

    # params dicts for wf
    params = [
        {},  # optimization
        {},  # standardization
        {},  # static
        {
            "input_set_overrides": {
                "other_params": {"user_kpoints_settings": trim_kpoints}
            }
        },  # nscf
        {"wf_uuid": uuid},  # irvsp
    ]

    if magnetic and v2t:
        yaml_spec = "irvsp_v2t_magnetic.yaml"
        params.append({})
    elif v2t:
        yaml_spec = "irvsp_v2t.yaml"
        params.append({})
    else:
        yaml_spec = "irvsp.yaml"

    wf = get_wf(
        structure,
        yaml_spec,
        params=params,
        vis=MPStaticSet(structure, potcar_functional="PBE_54", force_gamma=True),
        common_params={"vasp_cmd": vasp_cmd, "db_file": db_file},
        wf_metadata=wf_meta,
    )

    dim_data = StructureDimensionality(structure)

    if np.any(
        [
            dim == 2
            for dim in [dim_data.larsen_dim, dim_data.cheon_dim, dim_data.gorai_dim]
        ]
    ):
        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {"IVDW": 11, "EDIFFG": 0.005, "IBRION": 2, "NSW": 100}
            },
            fw_name_constraint="structure optimization",
        )
    else:
        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {"EDIFFG": 0.005, "IBRION": 2, "NSW": 100}
            },
            fw_name_constraint="structure optimization",
        )

    wf = add_modify_incar(
        wf,
        modify_incar_params={
            "incar_update": {"ADDGRID": ".TRUE.", "LASPH": ".TRUE.", "GGA": "PS"}
        },
    )

    # Includ ncl magmoms with saxis = (0, 0, 1)
    if magnetic and soc:
        magmoms = []
        for m in magmoms_orig:
            magmoms += [0.0, 0.0, m]
        magmoms = [str(m) for m in magmoms]
        magmoms = " ".join(magmoms)

        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {"ISYM": 2, "MAGMOM": "%s" % magmoms}
            },
        )

    if magnetic and not soc:
        # Include magmoms in every calculation
        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {"ISYM": 2, "MAGMOM": "%s" % magmoms, "ISPIN": 2}
            },
        )

    wf = add_modify_incar(
        wf,
        modify_incar_params={
            "incar_update": {
                "ISYM": 2,
                "LSORBIT": ".TRUE." if soc else ".FALSE.",
                "MAGMOM": "%s" % magmoms
                if magnetic
                else "%i*0.0" % ncoords,
                "ISPIN": 2 if magnetic and not soc else 1,
                "LWAVE": ".TRUE.",
                # "NBANDS": nbands,
            }
        },
        fw_name_constraint="nscf",
    )

    wf = add_common_powerups(wf, c)

    wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": wf_meta},
        task_name_constraint="VaspToDb")

    if c.get("STABILITY_CHECK", STABILITY_CHECK):
        wf = add_stability_check(wf, fw_name_constraint="structure optimization")

    if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
        wf = add_wf_metadata(wf, structure)

    return wf
Пример #9
0
def get_aneb_wf(
    structure,
    working_ion,
    insert_coords,
    insert_coords_combinations,
    n_images,
    vasp_input_set=None,
    override_default_vasp_params=None,
    handler_group=None,
    selective_dynamics_scheme="fix_two_atom",
    launch_mode="all",
    vasp_cmd=VASP_CMD,
    db_file=DB_FILE,
    wall_time=None,
    additional_fields=None,
    tags=None,
    powerup_dicts=None,
    name="ApproxNEB",
):
    """
    Workflow for running the "ApproxNEB" algorithm to estimate
    energetic barriers for a working ion in a structure (host)
    between end point positions specified by insert_coords and
    insert_coords_combinations. Note this workflow is only
    intended for the dilute lattice limit (where one working
    ion is in a large supercell structure of the host and
    little volume change upon insertion is expected).
    By default workflow sets appropriate VASP input parameters
    and Custodian handler groups.

    This workflow uses an "approx_neb" collection to organize
    outputs and generate inputs for new VASP calculations for
    easier data management and analysis. An "approx_neb"
    additional field is automatically added to all task docs
    generated to assist record keeping.

    To make modifications to docs generated by this
    workflow, use of the additional_fields and tags arguments
    is recommended to ensure all fireworks, tasks collection
    docs, and approx_neb collection docs are modified.

    Args:
    structure (Structure): structure of empty host
    working_ion: specie of site to insert in structure
        (e.g. "Li").
    insert_coords (1x3 array or list of 1x3 arrays):
        fractional coordinates of site(s) to insert in
        structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]).
    insert_coords_combinations (list of strings): list of
        strings corresponding to the list index of
        insert_coords to specify which combination
        of end_points to use for path interpolation.
        (e.g. ["0+1", "0+2"])
    n_images: n_images (int): number of images
        interpolated between end point structures for
        each path set by insert_coords_combinations
    vasp_input_set (VaspInputSet class): can use to
        define VASP input parameters.
        See pymatgen.io.vasp.sets module for more
        information. MPRelaxSet() and
        override_default_vasp_params are used if
        vasp_input_set = None.
    override_default_vasp_params (dict): if provided,
        vasp_input_set is disregarded and the Vasp Input
        Set is created by passing override_default_vasp_params
        to MPRelaxSet(). Allows for easy modification of
        MPRelaxSet().
        For example, to set ISIF=2 in the INCAR use:
        {"user_incar_settings":{"ISIF":2}}
    handler_group (str or [ErrorHandler]): group of handlers to
        use for RunVaspCustodian firetask. See handler_groups
        dict in the code for the groups and complete list of
        handlers in each group. Alternatively, you can specify a
        list of ErrorHandler objects.
    selective_dynamics_scheme (str): "fix_two_atom"
    launch_mode (str): "all" or "screening"
    vasp_cmd (str): the name of the full executable for running
        VASP.
    db_file (str): path to file containing the database
        credentials.
    wall_time (int): Total walltime in seconds. If this is None and
        the job is running on a PBS system, the handler will attempt to
        determine the walltime from the PBS_WALLTIME environment
        variable. If the wall time cannot be determined or is not
        set, this handler will have no effect.
    additional_fields (dict): specifies more information
        to be stored in the approx_neb collection to
        assist user record keeping.
    tags (list): list of strings to be stored in the
        approx_neb collection under the "tags" field to
        assist user record keeping.
    powerup_dicts (list): additional powerups given to all the dynamically
        created image fireworks
    name (str): name for the workflow returned

    Returns: Workflow
    """
    approx_neb_params = override_default_vasp_params or {
        "user_incar_settings": {
            "EDIFF": 0.0005,
            "EDIFFG": -0.05,
            "IBRION": 1,
            "ISIF": 3,
            "ISMEAR": 0,
            "LDAU": False,
            "NSW": 400,
            "ADDGRID": True,
            "ISYM": 1,
            "NELMIN": 4,
        }
    }

    handler_group = handler_group or [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler(),
        PositiveEnergyErrorHandler(),
        FrozenJobErrorHandler(),
        StdErrHandler(),
        WalltimeHandler(wall_time=wall_time),
    ]

    wf_uuid = str(uuid4())
    additional_fields = deepcopy(additional_fields)

    host_fw = HostFW(
        structure=structure,
        approx_neb_wf_uuid=wf_uuid,
        db_file=db_file,
        vasp_input_set=vasp_input_set,
        vasp_cmd=vasp_cmd,
        override_default_vasp_params=deepcopy(approx_neb_params),
        additional_fields=additional_fields,
        tags=tags,
    )

    # modifies incar settings needed for end point and image structure relaxations
    if "user_incar_settings" not in approx_neb_params.keys():
        approx_neb_params = {"user_incar_settings": {}}
    approx_neb_params["user_incar_settings"]["ISIF"] = 2
    approx_neb_params["user_incar_settings"]["ISYM"] = 0
    approx_neb_params["user_incar_settings"]["LDAU"] = False

    end_point_fws = []
    for n, coord in enumerate(insert_coords):
        end_point_fws.append(
            EndPointFW(
                approx_neb_wf_uuid=wf_uuid,
                insert_specie=working_ion,
                insert_coords=coord,
                end_points_index=n,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                parents=host_fw,
            ))

    evaluate_path_fws = []
    for end_points_combo in insert_coords_combinations:
        if isinstance(end_points_combo, (str)):
            combo = end_points_combo.split("+")
            if len(combo) == 2:
                c = [int(combo[0]), int(combo[-1])]
            else:
                raise ValueError(
                    "string format in insert_coords_combinations is incorrect")

        evaluate_path_fws.append(
            EvaluatePathFW(
                approx_neb_wf_uuid=wf_uuid,
                end_points_combo=end_points_combo,
                mobile_specie=working_ion,
                n_images=n_images,
                selective_dynamics_scheme=selective_dynamics_scheme,
                launch_mode=launch_mode,
                vasp_cmd=vasp_cmd,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                handler_group=handler_group,
                parents=[end_point_fws[c[0]], end_point_fws[c[1]]],
                add_additional_fields=additional_fields,
                add_tags=tags,
            ))

    wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws)

    wf = use_custodian(wf, custodian_params={"handler_group": handler_group})
    if isinstance(tags, (list)):
        wf = add_tags(wf, tags)
    if isinstance(additional_fields, (dict)):
        wf = add_additional_fields_to_taskdocs(wf,
                                               update_dict=additional_fields)
    if powerup_dicts is not None:
        wf = powerup_by_kwargs(wf, powerup_dicts)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = powerup_dicts
    wf.metadata.update({"approx_neb_wf_uuid": wf_uuid})
    wf.name = name

    return wf