示例#1
0
    def test_add_tags(self):
        my_wf = self._copy_wf(self.bs_wf)
        my_wf.metadata = {"tags": ["a"]}
        my_wf = add_tags(my_wf, ["b", "c"])

        found = 0

        self.assertEqual(my_wf.metadata["tags"], ["a", "b", "c"])
        for fw in my_wf.fws:
            self.assertEqual(fw.spec["tags"], ["b", "c"])
            for t in fw.tasks:
                if 'VaspToDb' in str(t):
                    self.assertEqual(t["additional_fields"]["tags"], ["b", "c"])
                    found += 1
        self.assertEqual(found, 4)

        my_wf = self._copy_wf(self.bsboltz_wf)
        my_wf = add_tags(my_wf, ["foo", "bar"])

        v_found = 0
        b_found = 0

        self.assertEqual(my_wf.metadata["tags"], ["foo", "bar"])
        for fw in my_wf.fws:
            self.assertEqual(fw.spec["tags"], ["foo", "bar"])
            for t in fw.tasks:
                if 'BoltztrapToDb' in str(t):
                    self.assertEqual(t["additional_fields"]["tags"], ["foo", "bar"])
                    b_found += 1
                if 'VaspToDb' in str(t):
                    self.assertEqual(t["additional_fields"]["tags"], ["foo", "bar"])
                    v_found += 1
        self.assertEqual(b_found, 1)
        self.assertEqual(v_found, 4)
示例#2
0
def generate_elastic_workflow(structure, tags=None):
    """
    Generates a standard production workflow.

    Notes:
        Uses a primitive structure transformed into
        the conventional basis (for equivalent deformations).

        Adds the "minimal" category to the minimal portion
        of the workflow necessary to generate the elastic tensor,
        and the "minimal_full_stencil" category to the portion that
        includes all of the strain stencil, but is symmetrically complete
    """
    if tags == None:
        tags = []
    # transform the structure
    ieee_rot = Tensor.get_ieee_rotation(structure)
    if not SquareTensor(ieee_rot).is_rotation(tol=0.005):
        raise ValueError(
            "Rotation matrix does not satisfy rotation conditions")
    symm_op = SymmOp.from_rotation_and_translation(ieee_rot)
    ieee_structure = structure.copy()
    ieee_structure.apply_operation(symm_op)

    # construct workflow
    wf = wf_elastic_constant(ieee_structure)

    # Set categories, starting with optimization
    opt_fws = get_fws_and_tasks(wf, fw_name_constraint="optimization")
    wf.fws[opt_fws[0][0]].spec['elastic_category'] = "minimal"

    # find minimal set of fireworks using symmetry reduction
    fws_by_strain = {
        Strain(fw.tasks[-1]['pass_dict']['strain']): n
        for n, fw in enumerate(wf.fws) if 'deformation' in fw.name
    }
    unique_tensors = symmetry_reduce(list(fws_by_strain.keys()),
                                     ieee_structure)
    for unique_tensor in unique_tensors:
        fw_index = get_tkd_value(fws_by_strain, unique_tensor)
        if np.isclose(unique_tensor, 0.005).any():
            wf.fws[fw_index].spec['elastic_category'] = "minimal"
        else:
            wf.fws[fw_index].spec['elastic_category'] = "minimal_full_stencil"

    # Add tags
    if tags:
        wf = add_tags(wf, tags)

    wf = add_modify_incar(wf)
    priority = 500 - structure.num_sites
    wf = add_priority(wf, priority)
    for fw in wf.fws:
        if fw.spec.get('elastic_category') == 'minimal':
            fw.spec['_priority'] += 2000
        elif fw.spec.get('elastic_category') == 'minimal_full_stencil':
            fw.spec['_priority'] += 1000
    return wf
    def test_add_tags(self):
        my_wf = self._copy_wf(self.bs_wf)
        my_wf.metadata = {"tags": ["a"]}
        my_wf = add_tags(my_wf, ["b", "c"])

        found = 0

        self.assertEqual(my_wf.metadata["tags"], ["a", "b", "c"])
        for fw in my_wf.fws:
            print(fw.spec["tags"])
            self.assertEqual(fw.spec["tags"], ["b", "c"])
            for t in fw.tasks:
                if 'VaspToDbTask' in str(t):
                    self.assertEqual(t["additional_fields"]["tags"], ["b", "c"])
                    found += 1

        self.assertEqual(found, 4)
示例#4
0
    def process_item(self, item):
        """
        Processes items into workflows

        Args:
            item ((dict, list)): pair of doc and task_ids to filter

        Returns:
            Workflow
        """
        wf_input, ids_to_filter = item
        mat_id = wf_input["task_id"]
        if mat_id in ids_to_filter:
            return None
        else:
            structure = Structure.from_dict(wf_input.get('structure'))
            wf = self.wf_function(structure)
            wf = add_tags(wf, [mat_id])
            return wf
示例#5
0
def get_wf_ferroelectric(polar_structure,
                         nonpolar_structure,
                         vasp_cmd="vasp",
                         db_file=None,
                         vasp_input_set_polar="MPStaticSet",
                         vasp_input_set_nonpolar="MPStaticSet",
                         relax=False,
                         vasp_relax_input_set_polar=None,
                         vasp_relax_input_set_nonpolar=None,
                         nimages=9,
                         hse=False,
                         add_analysis_task=False,
                         wfid=None,
                         tags=None):
    """
    Returns a workflow to calculate the spontaneous polarization of polar_structure using
    a nonpolar reference phase structure and linear interpolations between the polar and
    nonpolar structure.

    The nonpolar and polar structures must be in the same space group setting and atoms ordered
    such that a linear interpolation can be performed to create intermediate structures along
    the distortion.

    For example, to calculate the polarization of orthorhombic BaTiO3 (space group 38) using
    the cubic structure (space group 221) as the nonpolar reference phase, we must transform
    the cubic to the orthorhombic setting. This can be accomplished using Bilbao Crystallographic
    Server's Structure Relations tool. (http://www.cryst.ehu.es/cryst/rel.html)

    Args:
        polar_structure (Structure): polar structure of candidate ferroelectric
        nonpolar_structure (Structure): nonpolar reference structure in polar setting
        vasp_input_set_polar (DictVaspInputSet): VASP polar input set. Defaults to MPStaticSet.
        vasp_input_set_nonpolar (DictVaspInputSet): VASP nonpolar input set. Defaults to MPStaticSet.
        vasp_relax_input_set_polar (DictVaspInputSet): VASP polar input set. Defaults to MPRelaxSet.
        vasp_relax_input_set_nonpolar (DictVaspInputSet): VASP nonpolar input set. Defaults to MPRelaxSet.
        vasp_cmd (str): command to run
        db_file (str): path to file containing the database credentials.
        nimages: Number of interpolations calculated from polar to nonpolar structures, including the nonpolar.
            For example, nimages = 9 will calculate 8 interpolated structures. 8 interpolations + nonpolar = 9.
        add_analysis_task: Analyze polarization and energy trends as part of workflow. Default False.
        wfid (string): Unique worfklow id starting with "wfid_". If None this is atomatically generated (recommended).
        tags (list of strings): Additional tags to add such as identifiers for structures.

    Returns:

    """
    wf = []

    if wfid is None:
        wfid = 'wfid_' + get_a_unique_id()
    if tags is None:
        tags = []

    if relax:
        polar_relax = OptimizeFW(structure=polar_structure,
                                 name="_polar_relaxation",
                                 vasp_cmd=vasp_cmd,
                                 db_file=db_file,
                                 vasp_input_set=vasp_relax_input_set_polar)
        nonpolar_relax = OptimizeFW(
            structure=nonpolar_structure,
            name="_nonpolar_relaxation",
            vasp_cmd=vasp_cmd,
            db_file=db_file,
            vasp_input_set=vasp_relax_input_set_nonpolar)
        wf.append(polar_relax)
        wf.append(nonpolar_relax)
        parents_polar = polar_relax
        parents_nonpolar = nonpolar_relax
    else:
        parents_polar = None
        parents_nonpolar = None

    # Run polarization calculation on polar structure.
    # Defuse workflow if polar structure is metallic.
    polar = LcalcpolFW(structure=polar_structure,
                       name="_polar_polarization",
                       static_name="_polar_static",
                       parents=parents_polar,
                       vasp_cmd=vasp_cmd,
                       db_file=db_file,
                       vasp_input_set=vasp_input_set_polar)

    # Run polarization calculation on nonpolar structure.
    # Defuse workflow if nonpolar structure is metallic.
    nonpolar = LcalcpolFW(structure=nonpolar_structure,
                          name="_nonpolar_polarization",
                          static_name="_nonpolar_static",
                          parents=parents_nonpolar,
                          vasp_cmd=vasp_cmd,
                          db_file=db_file,
                          vasp_input_set=vasp_input_set_nonpolar)

    # Interpolation polarization
    interpolation = []
    # Interpolations start from one increment after polar and end prior to nonpolar.
    # The Structure.interpolate method adds an additional image for the nonpolar endpoint.
    # Defuse children fireworks if metallic.
    for i in range(1, nimages):
        # nonpolar_structure is being used as a dummy structure.
        # The structure will be replaced by the interpolated structure generated by
        # StaticInterpolatedFW.
        # Defuse workflow if interpolated structure is metallic.
        interpolation.append(
            LcalcpolFW(structure=polar_structure,
                       name="_interpolation_{}_polarization".format(str(i)),
                       static_name="_interpolation_{}_static".format(str(i)),
                       vasp_cmd=vasp_cmd,
                       db_file=db_file,
                       vasp_input_set=vasp_input_set_polar,
                       interpolate=True,
                       start="_polar_static",
                       end="_nonpolar_static",
                       nimages=nimages,
                       this_image=i,
                       parents=[polar, nonpolar]))

    wf.append(polar)
    wf.append(nonpolar)
    wf += interpolation

    # Add FireTask that uses Polarization object to store spontaneous polarization information
    if add_analysis_task:
        fw_analysis = Firework(PolarizationToDb(db_file=db_file),
                               parents=interpolation,
                               name="_polarization_post_processing")
        wf.append(fw_analysis)

    # Run HSE band gap calculation
    if hse:
        # Run HSE calculation at band gap for polar calculation if polar structure is not metallic
        hse = HSEBSFW(structure=polar_structure,
                      parents=polar,
                      name="_polar_hse_gap",
                      vasp_cmd=vasp_cmd,
                      db_file=db_file,
                      calc_loc="_polar_polarization")
        wf.append(hse)

    # Create Workflow task and add tags to workflow
    workflow = Workflow(wf)
    workflow = add_tags(workflow, [wfid] + tags)

    return workflow
示例#6
0
    def get_wf(
        self, scan=False, perform_bader=True, num_orderings_hard_limit=16, c=None
    ):
        """
        Retrieve the FireWorks workflow.

        Args:
            scan: if True, use the SCAN functional instead of GGA+U, since
        the SCAN functional has shown to have improved performance for
        magnetic systems in some cases
            perform_bader: if True, make sure the "bader" binary is in your
        path, will use Bader analysis to calculate atom-projected magnetic
        moments
            num_orderings_hard_limit: will make sure total number of magnetic
        orderings does not exceed this number even if there are extra orderings
        of equivalent symmetry
            c: additional config dict (as used elsewhere in atomate)

        Returns: FireWorks Workflow

        """

        c = c or {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}

        fws = []
        analysis_parents = []

        # trim total number of orderings (useful in high-throughput context)
        # this is somewhat course, better to reduce num_orderings kwarg and/or
        # change enumeration strategies
        ordered_structures = self.ordered_structures
        ordered_structure_origins = self.ordered_structure_origins

        def _add_metadata(structure):
            """
            For book-keeping, store useful metadata with the Structure
            object for later database ingestion including workflow
            version and a UUID for easier querying of all tasks generated
            from the workflow.

            Args:
                structure: Structure

            Returns: TransformedStructure
            """
            # this could be further improved by storing full transformation
            # history, but would require an improved transformation pipeline
            return TransformedStructure(
                structure, other_parameters={"wf_meta": self.wf_meta}
            )

        ordered_structures = [_add_metadata(struct) for struct in ordered_structures]

        if (
            num_orderings_hard_limit
            and len(self.ordered_structures) > num_orderings_hard_limit
        ):
            ordered_structures = self.ordered_structures[0:num_orderings_hard_limit]
            ordered_structure_origins = self.ordered_structure_origins[
                0:num_orderings_hard_limit
            ]
            logger.warning(
                "Number of ordered structures exceeds hard limit, "
                "removing last {} structures.".format(
                    len(self.ordered_structures) - len(ordered_structures)
                )
            )
            # always make sure input structure is included
            if self.input_index and self.input_index > num_orderings_hard_limit:
                ordered_structures.append(self.ordered_structures[self.input_index])
                ordered_structure_origins.append(
                    self.ordered_structure_origins[self.input_index]
                )

        # default incar settings
        user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05}
        if scan:
            # currently, using SCAN relaxation as a static calculation also
            # since it is typically high quality enough, but want to make
            # sure we are also writing the AECCAR* files
            user_incar_settings.update({"LAECHG": True})
        user_incar_settings.update(c.get("user_incar_settings", {}))
        c["user_incar_settings"] = user_incar_settings

        for idx, ordered_structure in enumerate(ordered_structures):

            analyzer = CollinearMagneticStructureAnalyzer(ordered_structure)

            name = " ordering {} {} -".format(idx, analyzer.ordering.value)

            if not scan:

                vis = MPRelaxSet(
                    ordered_structure, user_incar_settings=user_incar_settings
                )

                # relax
                fws.append(
                    OptimizeFW(
                        ordered_structure,
                        vasp_input_set=vis,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        max_force_threshold=0.05,
                        half_kpts_first_relax=False,
                        name=name + " optimize",
                    )
                )

                # static
                fws.append(
                    StaticFW(
                        ordered_structure,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        name=name + " static",
                        prev_calc_loc=True,
                        parents=fws[-1],
                    )
                )

            else:

                # wf_scan_opt is just a single FireWork so can append it directly
                scan_fws = wf_scan_opt(ordered_structure, c=c).fws
                # change name for consistency with non-SCAN
                new_name = scan_fws[0].name.replace(
                    "structure optimization", name + " optimize"
                )
                scan_fws[0].name = new_name
                scan_fws[0].tasks[-1]["additional_fields"]["task_label"] = new_name
                fws += scan_fws

            analysis_parents.append(fws[-1])

        fw_analysis = Firework(
            MagneticOrderingsToDB(
                db_file=c["DB_FILE"],
                wf_uuid=self.uuid,
                auto_generated=False,
                name="MagneticOrderingsToDB",
                parent_structure=self.sanitized_structure,
                origins=ordered_structure_origins,
                input_index=self.input_index,
                perform_bader=perform_bader,
                scan=scan,
            ),
            name="Magnetic Orderings Analysis",
            parents=analysis_parents,
            spec={"_allow_fizzled_parents": True},
        )
        fws.append(fw_analysis)

        formula = self.sanitized_structure.composition.reduced_formula
        wf_name = "{} - magnetic orderings".format(formula)
        if scan:
            wf_name += " - SCAN"
        wf = Workflow(fws, name=wf_name)

        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        tag = "magnetic_orderings group: >>{}<<".format(self.uuid)
        wf = add_tags(wf, [tag, ordered_structure_origins])

        return wf
示例#7
0
    def get_wf(self,
               scan=False,
               perform_bader=True,
               num_orderings_hard_limit=16,
               c=None):
        """
        Retrieve the FireWorks workflow.

        Args:
            scan (bool): if True, use the SCAN functional instead of GGA+U,
                since the SCAN functional has shown to have improved
                performance for magnetic systems in some cases
            perform_bader (bool): if True, make sure the "bader" binary is in
                your path, will use Bader analysis to calculate
                atom-projected magnetic moments
            num_orderings_hard_limit (int): will make sure total number of
                magnetic orderings does not exceed this number even if there
                are extra orderings of equivalent symmetry
            c (dict): additional config dict (as used elsewhere in atomate)

        Returns: FireWorks Workflow

        """

        c_defaults = {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}
        additional_fields = {"relax": not self.static}
        c = c or {}
        for k, v in c_defaults.items():
            if k not in c:
                c[k] = v

        fws = []
        analysis_parents = []

        # trim total number of orderings (useful in high-throughput context)
        # this is somewhat course, better to reduce num_orderings kwarg and/or
        # change enumeration strategies
        ordered_structures = self.ordered_structures
        ordered_structure_origins = self.ordered_structure_origins

        def _add_metadata(structure):
            """
            For book-keeping, store useful metadata with the Structure
            object for later database ingestion including workflow
            version and a UUID for easier querying of all tasks generated
            from the workflow.

            Args:
                structure: Structure

            Returns: TransformedStructure

            """

            # this could be further improved by storing full transformation
            # history, but would require an improved transformation pipeline
            return TransformedStructure(
                structure, other_parameters={"wf_meta": self.wf_meta})

        ordered_structures = [
            _add_metadata(struct) for struct in ordered_structures
        ]

        if (num_orderings_hard_limit
                and len(self.ordered_structures) > num_orderings_hard_limit):
            ordered_structures = self.ordered_structures[
                0:num_orderings_hard_limit]
            ordered_structure_origins = self.ordered_structure_origins[
                0:num_orderings_hard_limit]
            logger.warning("Number of ordered structures exceeds hard limit, "
                           "removing last {} structures.".format(
                               len(self.ordered_structures) -
                               len(ordered_structures)))
            # always make sure input structure is included
            if self.input_index and self.input_index > num_orderings_hard_limit:
                ordered_structures.append(
                    self.ordered_structures[self.input_index])
                ordered_structure_origins.append(
                    self.ordered_structure_origins[self.input_index])

        # default incar settings
        user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05}
        if scan:
            # currently, using SCAN relaxation as a static calculation also
            # since it is typically high quality enough, but want to make
            # sure we are also writing the AECCAR* files
            user_incar_settings.update({"LAECHG": True})
        user_incar_settings.update(c.get("user_incar_settings", {}))
        c["user_incar_settings"] = user_incar_settings

        for idx, ordered_structure in enumerate(ordered_structures):

            analyzer = CollinearMagneticStructureAnalyzer(ordered_structure)

            name = f" ordering {idx} {analyzer.ordering.value} -"

            if not scan:

                vis = MPRelaxSet(ordered_structure,
                                 user_incar_settings=user_incar_settings)

                if not self.static:

                    # relax
                    fws.append(
                        OptimizeFW(
                            ordered_structure,
                            vasp_input_set=vis,
                            vasp_cmd=c["VASP_CMD"],
                            db_file=c["DB_FILE"],
                            max_force_threshold=0.05,
                            half_kpts_first_relax=False,
                            name=name + " optimize",
                        ))

                # static
                fws.append(
                    StaticFW(
                        ordered_structure,
                        vasp_cmd=c["VASP_CMD"],
                        db_file=c["DB_FILE"],
                        name=name + " static",
                        prev_calc_loc=True,
                        parents=fws[-1],
                        vasptodb_kwargs={
                            "parse_chgcar": True,
                            "parse_aeccar": True
                        },
                    ))

                if not self.static:
                    # so a failed optimize doesn't crash workflow
                    fws[-1].spec["_allow_fizzled_parents"] = True

            elif scan:

                # wf_scan_opt is just a single FireWork so can append it directly
                scan_fws = wf_scan_opt(ordered_structure, c=c).fws
                # change name for consistency with non-SCAN
                new_name = scan_fws[0].name.replace("structure optimization",
                                                    name + " optimize")
                scan_fws[0].name = new_name
                scan_fws[0].tasks[-1]["additional_fields"][
                    "task_label"] = new_name
                fws += scan_fws

            analysis_parents.append(fws[-1])

        fw_analysis = Firework(
            MagneticOrderingsToDb(
                db_file=c["DB_FILE"],
                wf_uuid=self.uuid,
                parent_structure=self.sanitized_structure,
                origins=ordered_structure_origins,
                input_index=self.input_index,
                perform_bader=perform_bader,
                scan=scan,
                additional_fields=additional_fields,
            ),
            name="Magnetic Orderings Analysis",
            parents=analysis_parents,
            spec={"_allow_fizzled_parents": True},
        )
        fws.append(fw_analysis)

        formula = self.sanitized_structure.composition.reduced_formula
        wf_name = f"{formula} - magnetic orderings"
        if scan:
            wf_name += " - SCAN"
        wf = Workflow(fws, name=wf_name)

        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        tag = f"magnetic_orderings group: >>{self.uuid}<<"
        wf = add_tags(wf, [tag, ordered_structure_origins])

        return wf
示例#8
0
def optimize_failed_ts(
        db: CatDB,
        lp: LaunchPad,
        query: Optional[Dict] = None,
        num_run: Optional[int] = None,
        allowed_calc_types: Optional[frozenset] = frozenset(["relax_ts", "qst"]),
        allow_failed_calcs: Optional[bool] = False,
        with_critic: Optional[bool] = False,
        qchem_cmd: Optional[str] = ">>qchem_cmd<<",
        max_cores: Optional[Union[str, int]] = ">>max_cores<<",
        multimode: Optional[str] = ">>multimode<<",
        qchem_input_params: Optional[Dict] = None,
        db_file: Optional[str] = ">>db_file<<",
        tags: Optional[Dict] = None
):
    if query is None:
        failed_query = {"completed": False, "run_atomate": {"$ne": True}}
    else:
        failed_query = query
        failed_query["completed"] = False
        failed_query["run_atomate"] = {"$ne": True}

    possible_entries = [e for e in db.database[db.data_collection].find(
        failed_query, {"_id": 0, "rxnid": 1, "calcs": 1}
    )]

    if num_run is not None:
        if num_run < len(possible_entries):
            possible_entries = possible_entries[:num_run]

    for entry in possible_entries:
        rxnid = entry["rxnid"]

        for calc in entry["calcs"]:
            if not calc["success"] and not allow_failed_calcs:
                continue
            name = calc["job_name"]
            if not any([e in name for e in allowed_calc_types]):
                continue

            if "qst" in name:
                if calc["output"].get("frequencies", [None])[0] is None:
                    continue
                elif calc["output"]["frequencies"][0] > 0:
                    continue

            if calc["output"].get("molecule") is None:
                continue
            else:
                molecule = Molecule.from_dict(calc["output"]["molecule"])

                if calc["success"]:
                    calc_status = "success"
                else:
                    calc_status = "failed"

                wf_name = "failed_rxn_{}:{}_{}".format(
                        rxnid, calc_status, name)

                if with_critic:
                    wf = get_wf_FFTSopt_and_critic(molecule, wf_name,
                                                   qchem_input_params=qchem_input_params,
                                                   db_file=db_file)
                    if tags is not None:
                        wf = add_tags(wf, tags)
                    lp.add_wf(wf)

                else:
                    fw = FrequencyFlatteningTransitionStateFW(
                        molecule=molecule,
                        name=wf_name,
                        qchem_cmd=qchem_cmd,
                        multimode=multimode,
                        max_cores=max_cores,
                        qchem_input_params=qchem_input_params,
                        linked=True,
                        freq_before_opt=True,
                        db_file=db_file
                    )
                    wf = Workflow([fw], name=wf_name)
                    if tags is not None:
                        wf = add_tags(wf, tags)
                    lp.add_wf(wf)

        time_now = datetime.datetime.now(datetime.timezone.utc)
        db.database[db.data_collection].update_one({"rxnid": rxnid}, {"$set": {"run_atomate": True,
                                                                               "updated_on": time_now}})
示例#9
0
def optimize_successful_ts(
        db: CatDB,
        lp: LaunchPad,
        query: Optional[Dict] = None,
        num_run: Optional[int] = None,
        with_critic: Optional[bool] = False,
        qchem_cmd: Optional[str] = ">>qchem_cmd<<",
        max_cores: Optional[Union[str, int]] = ">>max_cores<<",
        multimode: Optional[str] = ">>multimode<<",
        qchem_input_params: Optional[Dict] = None,
        db_file: Optional[str] = ">>db_file<<",
        tags: Optional[Dict] = None
):
    if query is None:
        success_query = {"completed": True, "run_atomate": {"$ne": True}}
    else:
        success_query = query
        success_query["completed"] = True
        success_query["run_atomate"] = {"$ne": True}

    possible_entries = [e for e in db.database[db.data_collection].find(
        success_query, {"_id": 0, "rxnid": 1, "output": 1}
    )]

    if num_run is not None:
        if num_run < len(possible_entries):
            possible_entries = possible_entries[:num_run]

    for entry in possible_entries:
        rxnid = entry["rxnid"]
        entry_names = list(entry["output"]["optimized_structure_energies"].keys())
        ts_structures = [
            Molecule.from_dict(e) for i, e in enumerate(entry["output"]["path_molecules"])
            if "transition_state" in entry_names[i]
        ]

        if with_critic:
            for i, ts in enumerate(ts_structures):
                name = "rxn_{}:ts_{}".format(rxnid, i + 1)
                wf = get_wf_FFTSopt_and_critic(ts, name,
                                               qchem_input_params=qchem_input_params,
                                               db_file=db_file)
                if tags is not None:
                    wf = add_tags(wf, tags)
                lp.add_wf(wf)

        else:
            for i, ts in enumerate(ts_structures):
                name = "rxn_{}:ts_{}".format(rxnid, i + 1)
                fw = FrequencyFlatteningTransitionStateFW(
                    molecule=ts,
                    name=name,
                    qchem_cmd=qchem_cmd,
                    multimode=multimode,
                    max_cores=max_cores,
                    qchem_input_params=qchem_input_params,
                    linked=True,
                    freq_before_opt=True,
                    db_file=db_file
                )
                wf = Workflow([fw], name=name)
                if tags is not None:
                    wf = add_tags(wf, tags)
                lp.add_wf(wf)

        time_now = datetime.datetime.now(datetime.timezone.utc)
        db.database[db.data_collection].update_one({"rxnid": rxnid}, {"$set": {"run_atomate": True,
                                                                               "updated_on": time_now}})
示例#10
0
    def get_wf(self, c=None):
        """
        Get the workflow.

        Returns:
            Workflow

        """

        c = c or {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE}
        vasp_cmd = c.get("VASP_CMD", VASP_CMD)
        db_file = c.get("DB_FILE", DB_FILE)

        nsites = len(self.structure.sites)

        vis = MPRelaxSet(self.structure, potcar_functional="PBE_54", force_gamma=True)

        opt_fw = OptimizeFW(
            self.structure,
            vasp_input_set=vis,
            vasp_cmd=c["VASP_CMD"],
            db_file=c["DB_FILE"],
        )

        vis = MPStaticSet(self.structure, potcar_functional="PBE_54", force_gamma=True)

        static_fw = StaticFW(
            self.structure,
            vasp_input_set=vis,
            vasp_cmd=c["VASP_CMD"],
            db_file=c["DB_FILE"],
            parents=[opt_fw],
        )

        # Separate FW for each BZ surface calc
        # Run Z2Pack on unique TRIM planes in the BZ

        surfaces = ["kx_0", "kx_1"]
        equiv_planes = self.get_equiv_planes()

        # Only run calcs on inequivalent BZ surfaces
        if self.symmetry_reduction:
            for add_surface in equiv_planes.keys():
                mark = True
                for surface in surfaces:
                    if surface in equiv_planes[add_surface]:
                        mark = False
                if mark and add_surface not in surfaces:
                    surfaces.append(add_surface)
        else:  # 4 TRI surfaces define Z2 in 3D
            surfaces = ["kx_1", "ky_1", "kz_0", "kz_1"]

        z2pack_fws = []

        for surface in surfaces:
            z2pack_fw = Z2PackFW(
                parents=[static_fw],
                structure=self.structure,
                surface=surface,
                uuid=self.uuid,
                name="z2pack",
                vasp_cmd=c["VASP_CMD"],
                db_file=c["DB_FILE"],
            )
            z2pack_fws.append(z2pack_fw)

        analysis_fw = InvariantFW(
            parents=z2pack_fws,
            structure=self.structure,
            symmetry_reduction=self.symmetry_reduction,
            equiv_planes=equiv_planes,
            uuid=self.uuid,
            name="invariant",
            db_file=c["DB_FILE"],
        )

        fws = [opt_fw, static_fw] + z2pack_fws + [analysis_fw]

        wf = Workflow(fws)
        wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta})

        # Add vdW corrections if structure is layered
        dim_data = StructureDimensionality(self.structure)

        if np.any(
            [
                dim == 2
                for dim in [dim_data.larsen_dim, dim_data.cheon_dim, dim_data.gorai_dim]
            ]
        ):
            wf = add_modify_incar(
                wf,
                modify_incar_params={
                    "incar_update": {
                        "IVDW": 11,
                        "EDIFFG": 0.005,
                        "IBRION": 2,
                        "NSW": 100,
                    }
                },
                fw_name_constraint="structure optimization",
            )

            wf = add_modify_incar(
                wf,
                modify_incar_params={"incar_update": {"IVDW": 11}},
                fw_name_constraint="static",
            )

            wf = add_modify_incar(
                wf,
                modify_incar_params={"incar_update": {"IVDW": 11}},
                fw_name_constraint="z2pack",
            )

        else:
            wf = add_modify_incar(
                wf,
                modify_incar_params={
                    "incar_update": {"EDIFFG": 0.005, "IBRION": 2, "NSW": 100}
                },
                fw_name_constraint="structure optimization",
            )

        # Helpful vasp settings and no parallelization
        wf = add_modify_incar(
            wf,
            modify_incar_params={
                "incar_update": {
                    "ADDGRID": ".TRUE.",
                    "LASPH": ".TRUE.",
                    "GGA": "PS",
                    "NCORE": 1,
                }
            },
        )

        # Generate inputs for Z2Pack with a static calc
        wf = add_modify_incar(
            wf,
            modify_incar_params={"incar_update": {"PREC": "Accurate"}},
            fw_name_constraint="static",
        )

        wf = add_common_powerups(wf, c)

        wf.name = "{} {}".format(self.structure.composition.reduced_formula, "Z2Pack")

        if c.get("STABILITY_CHECK", STABILITY_CHECK):
            wf = add_stability_check(wf, fw_name_constraint="structure optimization")

        if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
            wf = add_wf_metadata(wf, self.structure)

        tag = "z2pack: {}".format(self.uuid)
        wf = add_tags(wf, [tag])

        return wf
示例#11
0
        wf = add_modify_incar(wf,
                              modify_incar_params={
                                  'incar_update': {
                                      'KPAR': 2,
                                      'NCORE': 4,
                                      'NSIM': 8,
                                      'EDIFF': 0.000002,
                                      'LMAXMIX': 6,
                                      'LSCALAPACK': '.FALSE.',
                                      'ALGO': 'All'
                                  }
                              })
        wf = add_modify_incar(wf,
                              modify_incar_params={
                                  'incar_update': {
                                      'AMIX': 0.2,
                                      'AMIX_MAG': 0.8,
                                      'BMIX': 0.00001,
                                      'BMIX_MAG': 0.00001,
                                      'ICHARG': 2,
                                      'EDIFFG': 0.0005 * site_num,
                                      'KPAR': 1
                                  }
                              },
                              fw_name_constraint='structure optimization')
        #           wf = add_stability_check(wf, fw_name_constraint = 'static') #default cutoff 0.1 eV/atom
        wf = add_tags(wf, (comp_tag, struct_tag, poscar_key))

        tilt_count = tilt_count + 1
        lpad.add_wf(wf)
示例#12
0
def get_aneb_wf(
    structure,
    working_ion,
    insert_coords,
    insert_coords_combinations,
    n_images,
    vasp_input_set=None,
    override_default_vasp_params=None,
    handler_group=None,
    selective_dynamics_scheme="fix_two_atom",
    launch_mode="all",
    vasp_cmd=VASP_CMD,
    db_file=DB_FILE,
    wall_time=None,
    additional_fields=None,
    tags=None,
    powerup_dicts=None,
    name="ApproxNEB",
):
    """
    Workflow for running the "ApproxNEB" algorithm to estimate
    energetic barriers for a working ion in a structure (host)
    between end point positions specified by insert_coords and
    insert_coords_combinations. Note this workflow is only
    intended for the dilute lattice limit (where one working
    ion is in a large supercell structure of the host and
    little volume change upon insertion is expected).
    By default workflow sets appropriate VASP input parameters
    and Custodian handler groups.

    This workflow uses an "approx_neb" collection to organize
    outputs and generate inputs for new VASP calculations for
    easier data management and analysis. An "approx_neb"
    additional field is automatically added to all task docs
    generated to assist record keeping.

    To make modifications to docs generated by this
    workflow, use of the additional_fields and tags arguments
    is recommended to ensure all fireworks, tasks collection
    docs, and approx_neb collection docs are modified.

    Args:
    structure (Structure): structure of empty host
    working_ion: specie of site to insert in structure
        (e.g. "Li").
    insert_coords (1x3 array or list of 1x3 arrays):
        fractional coordinates of site(s) to insert in
        structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]).
    insert_coords_combinations (list of strings): list of
        strings corresponding to the list index of
        insert_coords to specify which combination
        of end_points to use for path interpolation.
        (e.g. ["0+1", "0+2"])
    n_images: n_images (int): number of images
        interpolated between end point structures for
        each path set by insert_coords_combinations
    vasp_input_set (VaspInputSet class): can use to
        define VASP input parameters.
        See pymatgen.io.vasp.sets module for more
        information. MPRelaxSet() and
        override_default_vasp_params are used if
        vasp_input_set = None.
    override_default_vasp_params (dict): if provided,
        vasp_input_set is disregarded and the Vasp Input
        Set is created by passing override_default_vasp_params
        to MPRelaxSet(). Allows for easy modification of
        MPRelaxSet().
        For example, to set ISIF=2 in the INCAR use:
        {"user_incar_settings":{"ISIF":2}}
    handler_group (str or [ErrorHandler]): group of handlers to
        use for RunVaspCustodian firetask. See handler_groups
        dict in the code for the groups and complete list of
        handlers in each group. Alternatively, you can specify a
        list of ErrorHandler objects.
    selective_dynamics_scheme (str): "fix_two_atom"
    launch_mode (str): "all" or "screening"
    vasp_cmd (str): the name of the full executable for running
        VASP.
    db_file (str): path to file containing the database
        credentials.
    wall_time (int): Total walltime in seconds. If this is None and
        the job is running on a PBS system, the handler will attempt to
        determine the walltime from the PBS_WALLTIME environment
        variable. If the wall time cannot be determined or is not
        set, this handler will have no effect.
    additional_fields (dict): specifies more information
        to be stored in the approx_neb collection to
        assist user record keeping.
    tags (list): list of strings to be stored in the
        approx_neb collection under the "tags" field to
        assist user record keeping.
    powerup_dicts (list): additional powerups given to all the dynamically
        created image fireworks
    name (str): name for the workflow returned

    Returns: Workflow
    """
    approx_neb_params = override_default_vasp_params or {
        "user_incar_settings": {
            "EDIFF": 0.0005,
            "EDIFFG": -0.05,
            "IBRION": 1,
            "ISIF": 3,
            "ISMEAR": 0,
            "LDAU": False,
            "NSW": 400,
            "ADDGRID": True,
            "ISYM": 1,
            "NELMIN": 4,
        }
    }

    handler_group = handler_group or [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler(),
        PositiveEnergyErrorHandler(),
        FrozenJobErrorHandler(),
        StdErrHandler(),
        WalltimeHandler(wall_time=wall_time),
    ]

    wf_uuid = str(uuid4())
    additional_fields = deepcopy(additional_fields)

    host_fw = HostFW(
        structure=structure,
        approx_neb_wf_uuid=wf_uuid,
        db_file=db_file,
        vasp_input_set=vasp_input_set,
        vasp_cmd=vasp_cmd,
        override_default_vasp_params=deepcopy(approx_neb_params),
        additional_fields=additional_fields,
        tags=tags,
    )

    # modifies incar settings needed for end point and image structure relaxations
    if "user_incar_settings" not in approx_neb_params.keys():
        approx_neb_params = {"user_incar_settings": {}}
    approx_neb_params["user_incar_settings"]["ISIF"] = 2
    approx_neb_params["user_incar_settings"]["ISYM"] = 0
    approx_neb_params["user_incar_settings"]["LDAU"] = False

    end_point_fws = []
    for n, coord in enumerate(insert_coords):
        end_point_fws.append(
            EndPointFW(
                approx_neb_wf_uuid=wf_uuid,
                insert_specie=working_ion,
                insert_coords=coord,
                end_points_index=n,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                parents=host_fw,
            ))

    evaluate_path_fws = []
    for end_points_combo in insert_coords_combinations:
        if isinstance(end_points_combo, (str)):
            combo = end_points_combo.split("+")
            if len(combo) == 2:
                c = [int(combo[0]), int(combo[-1])]
            else:
                raise ValueError(
                    "string format in insert_coords_combinations is incorrect")

        evaluate_path_fws.append(
            EvaluatePathFW(
                approx_neb_wf_uuid=wf_uuid,
                end_points_combo=end_points_combo,
                mobile_specie=working_ion,
                n_images=n_images,
                selective_dynamics_scheme=selective_dynamics_scheme,
                launch_mode=launch_mode,
                vasp_cmd=vasp_cmd,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                handler_group=handler_group,
                parents=[end_point_fws[c[0]], end_point_fws[c[1]]],
                add_additional_fields=additional_fields,
                add_tags=tags,
            ))

    wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws)

    wf = use_custodian(wf, custodian_params={"handler_group": handler_group})
    if isinstance(tags, (list)):
        wf = add_tags(wf, tags)
    if isinstance(additional_fields, (dict)):
        wf = add_additional_fields_to_taskdocs(wf,
                                               update_dict=additional_fields)
    if powerup_dicts is not None:
        wf = powerup_by_kwargs(wf, powerup_dicts)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = powerup_dicts
    wf.metadata.update({"approx_neb_wf_uuid": wf_uuid})
    wf.name = name

    return wf
示例#13
0
def get_wf_ferroelectric(polar_structure, nonpolar_structure, vasp_cmd="vasp", db_file=None,
                         vasp_input_set_polar="MPStaticSet", vasp_input_set_nonpolar="MPStaticSet",
                         relax=False, vasp_relax_input_set_polar=None, vasp_relax_input_set_nonpolar=None,
                         nimages=9, hse=False, add_analysis_task=False, wfid=None,
                         tags=None):
    """
    Returns a workflow to calculate the spontaneous polarization of polar_structure using
    a nonpolar reference phase structure and linear interpolations between the polar and
    nonpolar structure.

    The nonpolar and polar structures must be in the same space group setting and atoms ordered
    such that a linear interpolation can be performed to create intermediate structures along
    the distortion.

    For example, to calculate the polarization of orthorhombic BaTiO3 (space group 38) using
    the cubic structure (space group 221) as the nonpolar reference phase, we must transform
    the cubic to the orthorhombic setting. This can be accomplished using Bilbao Crystallographic
    Server's Structure Relations tool. (http://www.cryst.ehu.es/cryst/rel.html)

    Args:
        polar_structure (Structure): polar structure of candidate ferroelectric
        nonpolar_structure (Structure): nonpolar reference structure in polar setting
        vasp_input_set_polar (DictVaspInputSet): VASP polar input set. Defaults to MPStaticSet.
        vasp_input_set_nonpolar (DictVaspInputSet): VASP nonpolar input set. Defaults to MPStaticSet.
        vasp_relax_input_set_polar (DictVaspInputSet): VASP polar input set. Defaults to MPRelaxSet.
        vasp_relax_input_set_nonpolar (DictVaspInputSet): VASP nonpolar input set. Defaults to MPRelaxSet.
        vasp_cmd (str): command to run
        db_file (str): path to file containing the database credentials.
        nimages: Number of interpolations calculated from polar to nonpolar structures, including the nonpolar.
            For example, nimages = 9 will calculate 8 interpolated structures. 8 interpolations + nonpolar = 9.
        add_analysis_task: Analyze polarization and energy trends as part of workflow. Default False.
        wfid (string): Unique worfklow id starting with "wfid_". If None this is atomatically generated (recommended).
        tags (list of strings): Additional tags to add such as identifiers for structures.

    Returns:

    """
    wf = []

    if wfid is None:
        wfid = 'wfid_' + get_a_unique_id()
    if tags is None:
        tags = []

    if relax:
        polar_relax = OptimizeFW(structure=polar_structure, name="_polar_relaxation",
                                 vasp_cmd=vasp_cmd, db_file=db_file, vasp_input_set=vasp_relax_input_set_polar)
        nonpolar_relax = OptimizeFW(structure=nonpolar_structure, name="_nonpolar_relaxation",
                                    vasp_cmd=vasp_cmd, db_file=db_file, vasp_input_set=vasp_relax_input_set_nonpolar)
        wf.append(polar_relax)
        wf.append(nonpolar_relax)
        parents_polar = polar_relax
        parents_nonpolar = nonpolar_relax
    else:
        parents_polar = None
        parents_nonpolar = None

    # Run polarization calculation on polar structure.
    # Defuse workflow if polar structure is metallic.
    polar = LcalcpolFW(structure=polar_structure,
                       name="_polar_polarization",
                       static_name="_polar_static",
                       parents=parents_polar,
                       vasp_cmd=vasp_cmd, db_file=db_file,
                       vasp_input_set=vasp_input_set_polar)

    # Run polarization calculation on nonpolar structure.
    # Defuse workflow if nonpolar structure is metallic.
    nonpolar = LcalcpolFW(structure=nonpolar_structure,
                          name="_nonpolar_polarization",
                          static_name="_nonpolar_static",
                          parents=parents_nonpolar,
                          vasp_cmd=vasp_cmd, db_file=db_file,
                          vasp_input_set=vasp_input_set_nonpolar)

    # Interpolation polarization
    interpolation = []
    # Interpolations start from one increment after polar and end prior to nonpolar.
    # The Structure.interpolate method adds an additional image for the nonpolar endpoint.
    # Defuse children fireworks if metallic.
    for i in range(1, nimages):
        # nonpolar_structure is being used as a dummy structure.
        # The structure will be replaced by the interpolated structure generated by
        # StaticInterpolatedFW.
        # Defuse workflow if interpolated structure is metallic.
        interpolation.append(
            LcalcpolFW(structure=polar_structure,
                       name="_interpolation_{}_polarization".format(str(i)),
                       static_name="_interpolation_{}_static".format(str(i)),
                       vasp_cmd=vasp_cmd, db_file=db_file,
                       vasp_input_set=vasp_input_set_polar, interpolate=True,
                       start="_polar_static",
                       end="_nonpolar_static",
                       nimages=nimages, this_image=i, parents=[polar, nonpolar]))

    wf.append(polar)
    wf.append(nonpolar)
    wf += interpolation

    # Add FireTask that uses Polarization object to store spontaneous polarization information
    if add_analysis_task:
        fw_analysis = Firework(PolarizationToDb(db_file=db_file, name="_polarization_post_processing"),
                               parents=interpolation, name="_polarization_post_processing")
        wf.append(fw_analysis)

    # Run HSE band gap calculation
    if hse:
        # Run HSE calculation at band gap for polar calculation if polar structure is not metallic
        hse = HSEBSFW(structure=polar_structure, parents=polar, name="_polar_hse_gap", vasp_cmd=vasp_cmd,
                      db_file=db_file, calc_loc="_polar_polarization")
        wf.append(hse)

    # Create Workflow task and add tags to workflow
    workflow = Workflow(wf)
    workflow = add_tags(workflow, [wfid] + tags)

    return workflow