Esempio n. 1
0
    def workflow(self, model):
        """
        @brief    Create a FireWorks Workflow object performing initialisation.
        @details
                  The workflow

        @param model surrogate model object.

        @return Workflow object
        """
        ## Call the newPoints method to receive a list of dictionaries each
        #  dictionary representing one data point.
        p = self.newPoints(model)
        if len(p):
            wf = model.exactTasks(p)
            wf.append_wf(
                model.parameterFittingStrategy().workflow(model),
                wf.leaf_fw_ids
            )
            return wf

        elif not len(p) and len(model.substituteModels):
            wf = Workflow([])
            for sm in model.substituteModels:
                wf.append_wf(
                    sm.initialisationStrategy().workflow(sm),
                    []
                )
            return wf

        else:
            return Workflow([])
Esempio n. 2
0
    def workflow(self, model):
        """
        @brief    Create a FireWorks Workflow object performing initialisation.
        @details
                  The workflow

        @param model surrogate model object.

        @return Workflow object
        """
        ## Call the newPoints method to receive a list of dictionaries each
        #  dictionary representing one data point.
        p = self.newPoints(model)
        if len(p):
            wf = model.exactTasks(p)
            wf.append_wf(model.parameterFittingStrategy().workflow(model),
                         wf.leaf_fw_ids)
            return wf

        elif not len(p) and len(model.substituteModels):
            wf = Workflow([])
            for sm in model.substituteModels:
                wf.append_wf(sm.initialisationStrategy().workflow(sm), [])
            return wf

        else:
            return Workflow([])
Esempio n. 3
0
    def workflow(self, model):
        """
        """
        # Get initial data
        points = self.newPoints(model)

        # Save initial data in database
        model.updateFitDataFromFwSpec(points)
        model.updateMinMax()
        model.save()

        wf = Workflow( [], name='initialising to dataset')
        wf.append_wf( model.parameterFittingStrategy().workflow(model),
                      wf.leaf_fw_ids)

        return wf
Esempio n. 4
0
    def workflow(self, model):
        """
        """
        # Get initial data
        points = self.newPoints(model)

        # Save initial data in database
        model.updateFitDataFromFwSpec(points)
        model.updateMinMax()
        model.save()

        wf = Workflow([], name='initialising to dataset')
        wf.append_wf(model.parameterFittingStrategy().workflow(model),
                     wf.leaf_fw_ids)

        return wf
Esempio n. 5
0
def get_wf_slab(slab,
                include_bulk_opt=False,
                adsorbates=None,
                ads_structures_params=None,
                ads_site_finder_params=None,
                vasp_cmd="vasp",
                db_file=None,
                add_molecules_in_box=False,
                user_incar_settings=None):
    """
    Gets a workflow corresponding to a slab calculation along with optional
    adsorbate calcs and precursor oriented unit cell optimization

    Args:
        slabs (list of Slabs or Structures): slabs to calculate
        include_bulk_opt (bool): whether to include bulk optimization,
            this flag sets the slab fireworks to be TransmuterFWs based
            on bulk optimization of oriented unit cells
        adsorbates ([Molecule]): list of molecules to place as adsorbates
        ads_structures_params (dict): parameters to be supplied as
            kwargs to AdsorbateSiteFinder.generate_adsorption_structures
        add_molecules_in_box (boolean): flag to add calculation of
            adsorbate molecule energies to the workflow
        db_file (string): path to database file
        vasp_cmd (string): vasp command

    Returns:
        Workflow
    """
    fws, parents = [], []

    if adsorbates is None:
        adsorbates = []

    if ads_structures_params is None:
        ads_structures_params = {}

    # Add bulk opt firework if specified
    if include_bulk_opt:
        oriented_bulk = slab.oriented_unit_cell
        vis = MPSurfaceSet(oriented_bulk, bulk=True)
        fws.append(
            OptimizeFW(structure=oriented_bulk,
                       vasp_input_set=vis,
                       vasp_cmd=vasp_cmd,
                       db_file=db_file))
        parents = fws[-1]

    name = slab.composition.reduced_formula
    if getattr(slab, "miller_index", None):
        name += "_{}".format(slab.miller_index)
    # Create slab fw and add it to list of fws
    slab_fw = get_slab_fw(slab,
                          include_bulk_opt,
                          db_file=db_file,
                          vasp_cmd=vasp_cmd,
                          parents=parents,
                          name="{} slab optimization".format(name))
    fws.append(slab_fw)

    for adsorbate in adsorbates:
        ads_slabs = AdsorbateSiteFinder(
            slab, **ads_site_finder_params).generate_adsorption_structures(
                adsorbate, **ads_structures_params)
        for n, ads_slab in enumerate(ads_slabs):
            # Create adsorbate fw
            ads_name = "{}-{} adsorbate optimization {}".format(
                adsorbate.composition.formula, name, n)
            adsorbate_fw = get_slab_fw(ads_slab,
                                       include_bulk_opt,
                                       db_file=db_file,
                                       vasp_cmd=vasp_cmd,
                                       parents=parents,
                                       name=ads_name,
                                       user_incar_settings=user_incar_settings)
            fws.append(adsorbate_fw)

    if isinstance(slab, Slab):
        name = "{}_{} slab workflow".format(
            slab.composition.reduced_composition, slab.miller_index)
    else:
        name = "{} slab workflow".format(slab.composition.reduced_composition)

    wf = Workflow(fws, name=name)

    # Add optional molecules workflow
    if add_molecules_in_box:
        molecule_wf = get_wf_molecules(adsorbates,
                                       db_file=db_file,
                                       vasp_cmd=vasp_cmd)
        wf.append_wf(molecule_wf)

    return wf
Esempio n. 6
0
def get_wf_hubbard_hund_linresp(structure,
                                user_incar_settings=None,
                                relax_nonmagnetic=True,
                                spin_polarized=True,
                                applied_potential_range=(-0.2, 0.2),
                                num_evals=9,
                                site_indices_perturb=None,
                                species_perturb=None,
                                find_nearest_sites=True,
                                parallel_scheme=0,
                                ediff_tight=None,
                                c=None):
    """
    Compute Hubbard U (and Hund J) on-site interaction values using GGA+U
    linear response method proposed by Cococcioni et. al.
    (DOI: 10.1103/PhysRevB.71.035105)
    and the spin-polarized response formalism developed by Linscott et. al.
    (DOI: 10.1103/PhysRevB.98.235157)

    This workflow relies on the constrained on-site potential functional implemented in VASP, 
    with a helpful tutorial found here: 
    https://www.vasp.at/wiki/index.php/Calculate_U_for_LSDA%2BU

    Args:
        structure:
        user_incar_settings: user INCAR settings
        relax_nonmagnetic: Restart magnetic SCF runs from 
    non-magnetic calculation, using WAVECAR
        spin_polarized: Perform spin-dependent perturbations
        applied_potential_range: Bounds of applied potential
        num_evals: Number of perturbation evalutaions
        site_indices_perturb: (must specify if species_perturb=None) 
    List of site indices within
    Structure indicating perturbation sites; 
        species_perturb: (must specify if site_indices_perturb=None) 
    List of names of species (string)
    of sites to perturb; First site of that species
    is selected in the structure
        find_nearest_sites: If set to true and species_perturb != None, 
    the closest sites (by the Structure distance matrix) will be selected 
    in the response analysis to account for inter-site screening effects
        parallel_scheme: 0 - (default) self-consistent (SCF)
    runs use WAVECAR from non-self consistent (NSCF) run
    at same applied potential; 1 - SCF runs use WAVECAR
    from ground-state (V=0) run. 
    While reusing the WAVECAR from NSCF run in SCF run may be more 
    efficient (parallel_scheme: 0), the user may also choose to 
    remove the dependency between NSCF and SCF runs 
    (parallel_scheme: 1)
        ediff_tight: Final energy convergence tolerance, 
    if restarting from a previous run
    (if not specified, will default to pymatgen default EDIFF)
        c: Workflow config dict, in the same format
    as in presets/core.py and elsewhere in atomate

    Returns: Workflow
    """

    if not structure.is_ordered:
        raise ValueError(
            "Please obtain an ordered approximation of the input structure.")

    if not site_indices_perturb:
        site_indices_perturb = []

    if species_perturb:

        if find_nearest_sites:
            site_indices_perturb = find_closest_sites(structure,
                                                      species_perturb)
        else:
            for specie_u in species_perturb:
                found_specie = False
                for s in range(len(structure)):
                    site = structure[s]
                    if (Element(str(site.specie)) == Element(specie_u)) \
                       and (s not in site_indices_perturb):
                        found_specie = True
                        break
                if not found_specie:
                    raise ValueError("Could not find specie(s) in structure.")
                site_indices_perturb.append(s)

    elif not site_indices_perturb:
        logger.warning("Sites for computing U value are not specified. "
                       "Computing U for first site in structure. ")

    site_indices_perturb = list(tuple(site_indices_perturb))
    num_perturb = len(site_indices_perturb)

    sites_perturb = []
    for site_index_perturb in site_indices_perturb:
        site = structure[site_index_perturb]
        sites_perturb.append(site)

    structure.remove_sites(indices=site_indices_perturb)

    for site in sites_perturb:
        structure.insert(i=0,
                         species=site.specie,
                         coords=site.frac_coords,
                         properties=site.properties)

    # using a uuid for book-keeping,
    # in a similar way to other workflows
    uuid = str(uuid4())

    c_defaults = {"vasp_cmd": VASP_CMD, "db_file": DB_FILE}
    if c:
        c.update(c_defaults)
    else:
        c = c_defaults

    # Calculate groundstate

    # set user_incar_settings
    if not user_incar_settings:
        user_incar_settings = {}

    # setup VASP input sets
    uis_gs, uis_ldau, val_dict, vis_ldau = init_linresp_input_sets(
        user_incar_settings, structure, num_perturb)

    fws = []
    index_fw_gs = [0]

    ediff_default = vis_ldau.incar['EDIFF']
    if not ediff_tight:
        ediff_tight = 0.1 * ediff_default

    append_linresp_ground_state_fws(fws, structure, num_perturb, index_fw_gs,
                                    uis_gs, relax_nonmagnetic, ediff_default,
                                    ediff_tight)

    # generate list of applied on-site potentials in linear response
    applied_potential_value_list = []
    for counter_perturb in range(num_perturb):
        applied_potential_values = np.linspace(applied_potential_range[0],
                                               applied_potential_range[1],
                                               num_evals)
        applied_potential_values = np.around(applied_potential_values,
                                             decimals=9)

        if 0.0 in applied_potential_values:
            applied_potential_values = list(applied_potential_values)
            applied_potential_values.pop(applied_potential_values.index(0.0))
            applied_potential_values = np.array(applied_potential_values)

        applied_potential_value_list.append(applied_potential_values.copy())

    for counter_perturb in range(num_perturb):

        applied_potential_values = applied_potential_value_list[
            counter_perturb]

        for v in applied_potential_values:

            append_linresp_perturb_fws(v, fws, structure, counter_perturb,
                                       num_perturb, index_fw_gs, uis_ldau,
                                       val_dict, spin_polarized,
                                       relax_nonmagnetic, ediff_default,
                                       ediff_tight, parallel_scheme)

    wf = Workflow(fws)

    fw_analysis = Firework(
        HubbardHundLinRespToDb(num_perturb=num_perturb,
                               spin_polarized=spin_polarized,
                               relax_nonmagnetic=relax_nonmagnetic,
                               db_file=DB_FILE,
                               wf_uuid=uuid),
        name="HubbardHundLinRespToDb",
    )

    wf.append_wf(Workflow.from_Firework(fw_analysis), wf.leaf_fw_ids)

    wf = add_common_powerups(wf, c)

    if c.get("ADD_WF_METADATA", ADD_WF_METADATA):
        wf = add_wf_metadata(wf, structure)

    wf = add_additional_fields_to_taskdocs(
        wf,
        {
            "wf_meta": {
                "wf_uuid": uuid,
                "wf_name": "hubbard_hund_linresp",
                "wf_version": __hubbard_hund_linresp_wf_version__,
            }
        },
    )

    return wf
Esempio n. 7
0
def make_md_workflow(sim,
                     archive,
                     stages,
                     md_engine='gromacs',
                     md_category='md',
                     local_category='local',
                     postrun_wf=None,
                     post_wf=None,
                     files=None):
    """Construct a general, single MD simulation workflow.

    Assumptions
    -----------
    Queue launcher submission script must define and export the following
    environment variables:

        1. STAGING : absolute path on resource to staging directory
        2. SCRATCH : absolute path on resource to scratch directory

    The staging directory must already exist on all resources specified in
    ``stages``.

    The script ``run_md.sh`` must be somewhere on your path, and must take
    a single argument giving the directory to execute MD out of. It should
    create and change the working directory to that directory before anything
    else.

    Parameters
    ----------
    sim : str
        MDSynthesis Sim.
    archive : str
        Absolute path to directory to launch from, which holds all required
        files for running MD. 
    stages : list, str
        Dicts giving for each of the following keys:
            - 'server': server host to transfer to
            - 'user': username to authenticate with
            - 'staging': absolute path to staging area on remote resource
        alternatively, a path to a yaml file giving a list of dictionaries
        with the same information.
    md_engine : {'gromacs'}
        MD engine name; needed to determine continuation mechanism to use.
    md_category : str
        Category to use for the MD Firework. Used to target to correct rockets.
    local_category : str
        Category to use for non-MD Fireworks, which should be run by rockets
        where the ``archive`` directory is accessible.
    postrun_wf : Workflow
        Workflow to perform after each copyback; performed in parallel to continuation run.
    post_wf : Workflow
        Workflow to perform after completed MD (no continuation); use for final
        postprocessing. 
    files : list 
        Names of files (not paths) needed for each leg of the simulation. Need
        not exist, but if they do they will get staged before each run.

    Returns
    -------
    workflow 
        MD workflow; can be submitted to LaunchPad of choice.

    """
    sim = mds.Sim(sim)

    #TODO: perhaps move to its own FireTask?
    sim.categories['md_status'] = 'running'

    #TODO: the trouble with this is that if this workflow is created with the intent
    #      of being attached to another, these files may not exist at all yet
    f_exist = [f for f in files if os.path.exists(os.path.join(archive, f))]

    if isinstance(stages, string_types):
        with open(stages, 'r') as f:
            stages = yaml.load(f)

    ## Stage files on all resources where MD may run; takes place locally
    fts_stage = list()
    for stage in stages:
        fts_stage.append(
            FileTransferTask(mode='rtransfer',
                             server=stage['server'],
                             user=stage['user'],
                             files=[os.path.join(archive, i) for i in files],
                             dest=os.path.join(stage['staging'], sim.uuid),
                             max_retry=5,
                             shell_interpret=True))

    fw_stage = Firework(fts_stage,
                        spec={
                            '_launch_dir': archive,
                            '_category': local_category
                        },
                        name='staging')

    ## MD execution; takes place in queue context of compute resource

    # make rundir
    ft_mkdir = MkRunDirTask(uuid=sim.uuid)

    # copy input files to scratch space
    ft_copy = FileTransferTask(
        mode='copy',
        files=[os.path.join('${STAGING}/', sim.uuid, i) for i in files],
        dest=os.path.join('${SCRATCHDIR}/', sim.uuid),
        ignore_missing=True,
        shell_interpret=True)

    # next, run MD
    ft_md = ScriptTask(script='run_md.sh {}'.format(
        os.path.join('${SCRATCHDIR}/', sim.uuid)),
                       use_shell=True,
                       fizzle_bad_rc=True)

    # send info on where files live to pull firework
    ft_info = BeaconTask(uuid=sim.uuid)

    fw_md = Firework([ft_mkdir, ft_copy, ft_md, ft_info],
                     spec={'_category': md_category},
                     name='md')

    ## Pull files back to archive; takes place locally
    ft_copyback = FilePullTask(dest=archive)

    fw_copyback = Firework([ft_copyback],
                           spec={
                               '_launch_dir': archive,
                               '_category': local_category
                           },
                           name='pull')

    ## Decide if we need to continue and submit new workflow if so; takes place
    ## locally

    if md_engine == 'gromacs':
        ft_continue = GromacsContinueTask(sim=sim,
                                          archive=archive,
                                          stages=stages,
                                          md_engine=md_engine,
                                          md_category=md_category,
                                          local_category=local_category,
                                          postrun_wf=postrun_wf,
                                          post_wf=post_wf,
                                          files=files)
    else:
        raise ValueError("No known md engine `{}`.".format(md_engine))

    fw_continue = Firework([ft_continue],
                           spec={
                               '_launch_dir': archive,
                               '_category': local_category
                           },
                           name='continue')

    wf = Workflow([fw_stage, fw_md, fw_copyback, fw_continue],
                  links_dict={
                      fw_stage: [fw_md],
                      fw_md: [fw_copyback],
                      fw_copyback: [fw_continue]
                  },
                  name='{} | md'.format(sim.name),
                  metadata=dict(sim.categories))

    ## Mix in postrun workflow, if given
    if postrun_wf:
        if isinstance(postrun_wf, dict):
            postrun_wf = Workflow.from_dict(postrun_wf)

        wf.append_wf(Workflow.from_wflow(postrun_wf), [fw_copyback.fw_id])

    return wf
Esempio n. 8
0
def get_wf_slab(slab, include_bulk_opt=False, adsorbates=None,
                ads_structures_params=None, vasp_cmd="vasp",
                db_file=None, add_molecules_in_box=False):
    """
    Gets a workflow corresponding to a slab calculation along with optional
    adsorbate calcs and precursor oriented unit cell optimization

    Args:
        slabs (list of Slabs or Structures): slabs to calculate
        include_bulk_opt (bool): whether to include bulk optimization,
            this flag sets the slab fireworks to be TransmuterFWs based
            on bulk optimization of oriented unit cells
        adsorbates ([Molecule]): list of molecules to place as adsorbates
        ads_structures_params (dict): parameters to be supplied as
            kwargs to AdsorbateSiteFinder.generate_adsorption_structures
        add_molecules_in_box (boolean): flag to add calculation of
            adsorbate molecule energies to the workflow
        db_file (string): path to database file
        vasp_cmd (string): vasp command

    Returns:
        Workflow
    """
    fws, parents = [], []

    if adsorbates is None:
        adsorbates = []

    if ads_structures_params is None:
        ads_structures_params = {}

    # Add bulk opt firework if specified
    if include_bulk_opt:
        oriented_bulk = slab.oriented_unit_cell
        vis = MPSurfaceSet(oriented_bulk, bulk=True)
        fws.append(OptimizeFW(structure=oriented_bulk, vasp_input_set=vis,
                              vasp_cmd=vasp_cmd, db_file=db_file))
        parents = fws[-1]

    name = slab.composition.reduced_formula
    if getattr(slab, "miller_index", None):
        name += "_{}".format(slab.miller_index)
    # Create slab fw and add it to list of fws
    slab_fw = get_slab_fw(slab, include_bulk_opt, db_file=db_file,
                          vasp_cmd=vasp_cmd, parents=parents,
                          name="{} slab optimization".format(name))
    fws.append(slab_fw)

    for adsorbate in adsorbates:
        ads_slabs = AdsorbateSiteFinder(slab).generate_adsorption_structures(
            adsorbate, **ads_structures_params)
        for n, ads_slab in enumerate(ads_slabs):
            # Create adsorbate fw
            ads_name = "{}-{} adsorbate optimization {}".format(
                adsorbate.composition.formula, name, n)
            adsorbate_fw = get_slab_fw(
                ads_slab, include_bulk_opt, db_file=db_file, vasp_cmd=vasp_cmd,
                parents=parents, name=ads_name)
            fws.append(adsorbate_fw)

    if isinstance(slab, Slab):
        name = "{}_{} slab workflow".format(
            slab.composition.reduced_composition, slab.miller_index)
    else:
        name = "{} slab workflow".format(slab.composition.reduced_composition)

    wf = Workflow(fws, name=name)

    # Add optional molecules workflow
    if add_molecules_in_box:
        molecule_wf = get_wf_molecules(adsorbates, db_file=db_file,
                                       vasp_cmd=vasp_cmd)
        wf.append_wf(molecule_wf)

    return wf