コード例 #1
0
 def test_powerup_by_kwargs(self):
     my_wf = copy_wf(self.bs_wf)
     my_wf = powerup_by_kwargs(
         my_wf, [{"powerup_name": "add_trackers", "kwargs": {}}]
     )
     my_wf = powerup_by_kwargs(
         my_wf,
         [{"powerup_name": "add_tags", "kwargs": {"tags_list": ["foo", "bar"]}}],
     )
     for fw in my_wf.fws:
         self.assertEqual(len(fw.spec["_trackers"]), 2)
     self.assertEqual(my_wf.metadata["tags"], ["foo", "bar"])
コード例 #2
0
 def test_powerup_by_kwargs(self):
     my_wf = copy_wf(self.bs_wf)
     my_wf = powerup_by_kwargs(
         my_wf,
         [
             {"powerup_name": "add_tags", "kwargs": {"tags_list": ["foo", "bar"]}},
             {
                 "powerup_name": "atomate.common.powerups.add_priority",
                 "kwargs": {"root_priority": 123},
             },
         ],
     )
     self.assertEqual(my_wf.metadata["tags"], ["foo", "bar"])
コード例 #3
0
def get_powerup_wf(wf, fw_spec, additional_fields=None):
    """
    Check the fw_spec['vasp_powerups'] for powerups and apply them to a workflow.
    Add/overwrite the additional fields in the fw_spec with user inputs
    Args:
        fw_spec: the fw_spec of the current workflow
        additional_fields: The additional fields to be added to the task document
    Returns:
        Updated workflow
    """
    powerup_list = []
    powerup_list.extend(fw_spec.get("vasp_powerups", []))
    if additional_fields is not None:
        powerup_list.append({
            "powerup_name": "add_additional_fields_to_taskdocs",
            "kwargs": {
                "update_dict": additional_fields
            },
        })
    return powerup_by_kwargs(wf, powerup_list)
コード例 #4
0
ファイル: approx_neb.py プロジェクト: samblau/atomate
def get_aneb_wf(
    structure,
    working_ion,
    insert_coords,
    insert_coords_combinations,
    n_images,
    vasp_input_set=None,
    override_default_vasp_params=None,
    handler_group=None,
    selective_dynamics_scheme="fix_two_atom",
    launch_mode="all",
    vasp_cmd=VASP_CMD,
    db_file=DB_FILE,
    wall_time=None,
    additional_fields=None,
    tags=None,
    powerup_dicts=None,
    name="ApproxNEB",
):
    """
    Workflow for running the "ApproxNEB" algorithm to estimate
    energetic barriers for a working ion in a structure (host)
    between end point positions specified by insert_coords and
    insert_coords_combinations. Note this workflow is only
    intended for the dilute lattice limit (where one working
    ion is in a large supercell structure of the host and
    little volume change upon insertion is expected).
    By default workflow sets appropriate VASP input parameters
    and Custodian handler groups.

    This workflow uses an "approx_neb" collection to organize
    outputs and generate inputs for new VASP calculations for
    easier data management and analysis. An "approx_neb"
    additional field is automatically added to all task docs
    generated to assist record keeping.

    To make modifications to docs generated by this
    workflow, use of the additional_fields and tags arguments
    is recommended to ensure all fireworks, tasks collection
    docs, and approx_neb collection docs are modified.

    Args:
    structure (Structure): structure of empty host
    working_ion: specie of site to insert in structure
        (e.g. "Li").
    insert_coords (1x3 array or list of 1x3 arrays):
        fractional coordinates of site(s) to insert in
        structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]).
    insert_coords_combinations (list of strings): list of
        strings corresponding to the list index of
        insert_coords to specify which combination
        of end_points to use for path interpolation.
        (e.g. ["0+1", "0+2"])
    n_images: n_images (int): number of images
        interpolated between end point structures for
        each path set by insert_coords_combinations
    vasp_input_set (VaspInputSet class): can use to
        define VASP input parameters.
        See pymatgen.io.vasp.sets module for more
        information. MPRelaxSet() and
        override_default_vasp_params are used if
        vasp_input_set = None.
    override_default_vasp_params (dict): if provided,
        vasp_input_set is disregarded and the Vasp Input
        Set is created by passing override_default_vasp_params
        to MPRelaxSet(). Allows for easy modification of
        MPRelaxSet().
        For example, to set ISIF=2 in the INCAR use:
        {"user_incar_settings":{"ISIF":2}}
    handler_group (str or [ErrorHandler]): group of handlers to
        use for RunVaspCustodian firetask. See handler_groups
        dict in the code for the groups and complete list of
        handlers in each group. Alternatively, you can specify a
        list of ErrorHandler objects.
    selective_dynamics_scheme (str): "fix_two_atom"
    launch_mode (str): "all" or "screening"
    vasp_cmd (str): the name of the full executable for running
        VASP.
    db_file (str): path to file containing the database
        credentials.
    wall_time (int): Total walltime in seconds. If this is None and
        the job is running on a PBS system, the handler will attempt to
        determine the walltime from the PBS_WALLTIME environment
        variable. If the wall time cannot be determined or is not
        set, this handler will have no effect.
    additional_fields (dict): specifies more information
        to be stored in the approx_neb collection to
        assist user record keeping.
    tags (list): list of strings to be stored in the
        approx_neb collection under the "tags" field to
        assist user record keeping.
    powerup_dicts (list): additional powerups given to all the dynamically
        created image fireworks
    name (str): name for the workflow returned

    Returns: Workflow
    """
    approx_neb_params = override_default_vasp_params or {
        "user_incar_settings": {
            "EDIFF": 0.0005,
            "EDIFFG": -0.05,
            "IBRION": 1,
            "ISIF": 3,
            "ISMEAR": 0,
            "LDAU": False,
            "NSW": 400,
            "ADDGRID": True,
            "ISYM": 1,
            "NELMIN": 4,
        }
    }

    handler_group = handler_group or [
        VaspErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler(),
        PotimErrorHandler(),
        PositiveEnergyErrorHandler(),
        FrozenJobErrorHandler(),
        StdErrHandler(),
        WalltimeHandler(wall_time=wall_time),
    ]

    wf_uuid = str(uuid4())
    additional_fields = deepcopy(additional_fields)

    host_fw = HostFW(
        structure=structure,
        approx_neb_wf_uuid=wf_uuid,
        db_file=db_file,
        vasp_input_set=vasp_input_set,
        vasp_cmd=vasp_cmd,
        override_default_vasp_params=deepcopy(approx_neb_params),
        additional_fields=additional_fields,
        tags=tags,
    )

    # modifies incar settings needed for end point and image structure relaxations
    if "user_incar_settings" not in approx_neb_params.keys():
        approx_neb_params = {"user_incar_settings": {}}
    approx_neb_params["user_incar_settings"]["ISIF"] = 2
    approx_neb_params["user_incar_settings"]["ISYM"] = 0
    approx_neb_params["user_incar_settings"]["LDAU"] = False

    end_point_fws = []
    for n, coord in enumerate(insert_coords):
        end_point_fws.append(
            EndPointFW(
                approx_neb_wf_uuid=wf_uuid,
                insert_specie=working_ion,
                insert_coords=coord,
                end_points_index=n,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                parents=host_fw,
            ))

    evaluate_path_fws = []
    for end_points_combo in insert_coords_combinations:
        if isinstance(end_points_combo, (str)):
            combo = end_points_combo.split("+")
            if len(combo) == 2:
                c = [int(combo[0]), int(combo[-1])]
            else:
                raise ValueError(
                    "string format in insert_coords_combinations is incorrect")

        evaluate_path_fws.append(
            EvaluatePathFW(
                approx_neb_wf_uuid=wf_uuid,
                end_points_combo=end_points_combo,
                mobile_specie=working_ion,
                n_images=n_images,
                selective_dynamics_scheme=selective_dynamics_scheme,
                launch_mode=launch_mode,
                vasp_cmd=vasp_cmd,
                db_file=db_file,
                override_default_vasp_params=approx_neb_params,
                handler_group=handler_group,
                parents=[end_point_fws[c[0]], end_point_fws[c[1]]],
                add_additional_fields=additional_fields,
                add_tags=tags,
            ))

    wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws)

    wf = use_custodian(wf, custodian_params={"handler_group": handler_group})
    if isinstance(tags, (list)):
        wf = add_tags(wf, tags)
    if isinstance(additional_fields, (dict)):
        wf = add_additional_fields_to_taskdocs(wf,
                                               update_dict=additional_fields)
    if powerup_dicts is not None:
        wf = powerup_by_kwargs(wf, powerup_dicts)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = powerup_dicts
    wf.metadata.update({"approx_neb_wf_uuid": wf_uuid})
    wf.name = name

    return wf
コード例 #5
0
    def run_task(self, fw_spec):
        # get the database connection
        db_file = env_chk(self["db_file"], fw_spec)
        mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
        mmdb.collection = mmdb.db["approx_neb"]
        wf_uuid = self["approx_neb_wf_uuid"]
        launch_mode = self["launch_mode"]
        images_key = self["images_key"]

        approx_neb_doc = mmdb.collection.find_one({"wf_uuid": wf_uuid}, {"images": 1})
        all_images = approx_neb_doc["images"]

        # get structure_path of desired images and sort into structure_paths
        if images_key and isinstance(all_images, (dict)):
            images = all_images[images_key]
            max_n = len(images)
            if launch_mode == "all":
                structure_paths = [
                    "images." + images_key + "." + str(n) + ".input_structure"
                    for n in range(0, max_n)
                ]
            elif launch_mode == "screening":
                structure_paths = self.get_and_sort_paths(
                    max_n=max_n, images_key=images_key
                )
        elif isinstance(all_images, (dict)):
            structure_paths = dict()
            if launch_mode == "all":
                for key, images in all_images.items():
                    max_n = len(images)
                    structure_paths[key] = [
                        "images." + key + "." + str(n) + ".input_structure"
                        for n in range(0, max_n)
                    ]
            elif launch_mode == "screening":
                for key, images in all_images.items():
                    structure_paths[key] = self.get_and_sort_paths(
                        max_n=len(images), images_key=key
                    )

        # get list of fireworks to launch
        if isinstance(structure_paths, (list)):
            if isinstance(structure_paths[0], (str)):
                relax_image_fws = []
                for path in structure_paths:
                    relax_image_fws.append(self.get_fw(structure_path=path))
            else:
                relax_image_fws = self.get_screening_fws(sorted_paths=structure_paths)
        elif isinstance(structure_paths, (dict)):
            relax_image_fws = []
            if launch_mode == "all":
                for key in structure_paths.keys():
                    for path in structure_paths[key]:
                        relax_image_fws.append(self.get_fw(structure_path=path))
            elif launch_mode == "screening":
                for key in structure_paths.keys():
                    sorted_paths = structure_paths[key]
                    relax_image_fws.extend(
                        self.get_screening_fws(sorted_paths=sorted_paths)
                    )

        # place fws in temporary wf in order to use powerup_by_kwargs
        # to apply powerups to image fireworks
        if "vasp_powerups" in fw_spec.keys():
            temp_wf = Workflow(relax_image_fws)
            powerup_dicts = fw_spec["vasp_powerups"]
            temp_wf = powerup_by_kwargs(temp_wf, powerup_dicts)
            relax_image_fws = temp_wf.fws

        return FWAction(additions=relax_image_fws)
コード例 #6
0
ファイル: electrode.py プロジェクト: samblau/atomate
def get_ion_insertion_wf(
    structure: Structure,
    working_ion: str,
    structure_matcher: StructureMatcher = None,
    db_file: str = DB_FILE,
    vasptodb_kwargs: dict = None,
    volumetric_data_type: str = "CHGCAR",
    vasp_powerups: List[dict] = None,
    attempt_insertions: int = 4,
    max_inserted_atoms: int = None,
    allow_fizzled_parents: bool = True,
    optimizefw_kwargs: dict = None,
    staticfw_kwargs: dict = None,
):
    """
    Take the output static worflow and iteratively insert working ions based on charge density analysis.

    The workflow performs the following tasks.
    (StaticFW) <- Recieved dat inserted task_id from this workflow
    (AnalyzeChgcar) <- Obtain the set of possible unique insertions using the stored charge density
    (GetInsertionCalcs) <- This task contains the dynamic workflow creation that will keep inserting working ions

    Args:
        structure: The host structure to begin inserting on
        working_ion: The working ion to be inserted at each step
        structure_matcher: StructureMatcher object used to define topotactic insertion
        db_file: The db_file that defines the VASP output database
        vasptodb_kwargs: vasptodb_kwargs for the static workflow
        volumetric_data_type: the type of volumetric data used to determine the insertion sites
        vasp_powerups: additional powerups given to all the dynamically created workflows
        optimizefw_kwargs: additional kwargs for all the OptimizeFWs
        max_inserted_atoms: the limit on the total number of ions to insert
        attempt_insertions: number of insertions sites to run at each insertion step
        staticfw_kwargs: additional kwargs for all the StaticFWs
    """

    if not structure.is_ordered:
        raise ValueError(
            "Please obtain an ordered approximation of the input structure.")

    if optimizefw_kwargs is None:
        optimizefw_kwargs = {}
    if staticfw_kwargs is None:
        staticfw_kwargs = {}

    # Configured the optimization and static FWs for the base material
    vasptodb_kwargs = vasptodb_kwargs if vasptodb_kwargs is not None else {}
    vasptodb_kwargs_vol_data = {
        "CHGCAR": ["CHGCAR"],
        "AECCAR": ["AECCAR0", "AECCAR2"]
    }

    vasptodb_kwargs.update({
        "store_volumetric_data":
        vasptodb_kwargs_vol_data[volumetric_data_type],
        "task_fields_to_push": {
            "base_task_id": "task_id"
        },
    })

    opt_wf = OptimizeFW(structure=structure,
                        db_file=db_file,
                        **optimizefw_kwargs)

    pass_task = pass_vasp_result(
        filename="vasprun.xml.relax2.gz",
        pass_dict=">>output.ionic_steps.-1.structure",
        mod_spec_key="prev_calc_structure",
    )
    opt_wf.tasks.append(pass_task)

    static_wf = StaticFW(structure=structure,
                         vasptodb_kwargs=vasptodb_kwargs,
                         db_file=db_file,
                         parents=[opt_wf],
                         spec_structure_key="prev_calc_structure",
                         **staticfw_kwargs)

    wf_name = "{}-{}".format(
        structure.composition.reduced_formula if structure else "unknown",
        "insertion",
    )

    # Configure the analysis FW
    analysis_wf = Firework(
        [AnalyzeChgcar(), GetInsertionCalcs()],
        parents=[static_wf],
        name="Charge Density Analysis-0",
    )
    analysis_wf.spec["working_ion"] = working_ion

    # Crate the initial workflow
    wf = Workflow([opt_wf, static_wf, analysis_wf], name=wf_name)

    # Apply the vasp powerup if present
    if vasp_powerups is not None:
        wf = powerup_by_kwargs(wf, vasp_powerups)
        for fw in wf.fws:
            fw.spec["vasp_powerups"] = vasp_powerups

    if structure_matcher is not None:
        sm_dict = structure_matcher.as_dict()
        for fw in wf.fws:
            fw.spec["structure_matcher"] = sm_dict

    # write the persistent specs to all the fws
    # Note this is probably redundant but is easier
    for fw in wf.fws:
        fw.spec["db_file"] = db_file
        fw.spec["attempt_insertions"] = attempt_insertions
        fw.spec["vasptodb_kwargs"] = vasptodb_kwargs
        fw.spec["staticfw_kwargs"] = staticfw_kwargs
        fw.spec["optimizefw_kwargs"] = optimizefw_kwargs
        fw.spec["allow_fizzled_parents"] = allow_fizzled_parents
        fw.spec["volumetric_data_type"] = volumetric_data_type
        fw.spec["max_inserted_atoms"] = max_inserted_atoms

    return wf