def get_MDFW(structure, start_temp, end_temp, name="molecular dynamics", priority=None, args={}, **kwargs): """ Helper function to get molecular dynamics firework for quench workflow Args: structure: Initial structure for molecular dynamics run start_temp: Starting Temperature end_temp: Ending Temperature name: name of firework priority: priority of job in database args: custom arguments dictionary for molecular dynamics run kwargs: kwargs for MDFW Returns: Molecular Dynamics Firework """ # Get customized firework run_args = { "md_params": { "nsteps": 500, "start_temp": start_temp, "end_temp": end_temp }, "run_specs": { "vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<", "wall_time": 40000 }, "optional_fw_params": { "override_default_vasp_params": {}, "spec": { '_priority': priority } } } run_args["optional_fw_params"]["override_default_vasp_params"].update( {'user_incar_settings': { 'ISIF': 1, 'LWAVE': False, 'PREC': 'Low' }}) run_args = recursive_update(run_args, args) _mdfw = MDFW(structure=structure, name=name, **run_args["md_params"], **run_args["run_specs"], **run_args["optional_fw_params"], **kwargs) return _mdfw
def get_MDFW(structure, start_temp, end_temp, name="molecular dynamics", priority=None, job_time=None, args={}, **kwargs): run_args = {"md_params": {"nsteps": 500}, "run_specs": {"vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<", "wall_time": 40000}, "optional_fw_params": {"override_default_vasp_params": {}, "spec": {}}} run_args["optional_fw_params"]["override_default_vasp_params"].update( {'user_incar_settings': {'ISIF': 1, 'LWAVE': False, 'PREC': 'Low'}}) run_args = recursive_update(run_args, args) _mdfw = MDFW(structure=structure, name=name, **run_args["md_params"], **run_args["run_specs"], **run_args["optional_fw_params"], **kwargs) return _mdfw
def get_quench_wf(structures, priority=None, quench_type="slow_quench", descriptor="", **kwargs): """ Args: structure: Starting structure for the run priority: Priority of all fireworks in the workflows quench_type: use "slow_quench" for a gradual decrease in temperature or "mp_quench" for a instantaneous DFT relaxation target_steps: Target number of steps for production MD run descriptor: Extra description to add to the name of the firework **kwargs: Arguments such as cool_args, hold_args, quench_args, etc. Cool_args and hold args are only applicable when using "slow_quench" Returns: Workflow object """ fw_list = [] temperatures = kwargs.get('temperatures', { "start_temp": 3000, "end_temp": 500, "temp_step": 500 }) cool_args = kwargs.get('cool_args', {"md_params": {"nsteps": 200}}) hold_args = kwargs.get('hold_args', {"md_params": {"nsteps": 500}}) quench_args = kwargs.get('quench_args', {}) for (i, structure) in enumerate(structures): _fw_list = [] if quench_type == "slow_quench": for temp in np.arange(temperatures["start_temp"], temperatures["end_temp"], -temperatures["temp_step"]): # get fw for cool step use_prev_structure = False if len(_fw_list) > 0: use_prev_structure = True _fw = get_MDFW( structure, temp, temp - temperatures["temp_step"], name="snap_" + str(i) + "_cool_" + str(temp - temperatures["temp_step"]), args=cool_args, parents=[_fw_list[-1]] if len(_fw_list) > 0 else [], priority=priority, previous_structure=use_prev_structure, insert_db=True, **kwargs) _fw_list.append(_fw) # get fw for hold step _fw = get_MDFW(structure, temp - temperatures["temp_step"], temp - temperatures["temp_step"], name="snap_" + str(i) + "_hold_" + str(temp - temperatures["temp_step"]), args=hold_args, parents=[_fw_list[-1]], priority=priority, previous_structure=True, insert_db=True, **kwargs) _fw_list.append(_fw) if quench_type in ["slow_quench", "mp_quench"]: # Relax OptimizeFW and StaticFW run_args = { "run_specs": { "vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<", "spec": { "_priority": priority } }, "optional_fw_params": { "override_default_vasp_params": {} } } run_args = recursive_update(run_args, quench_args) _name = "snap_" + str(i) use_prev_structure = True if len(_fw_list) > 0 else False fw1 = OptimizeFW( structure=structure, name=f'{_name}{descriptor}_optimize', parents=[_fw_list[-1]] if len(_fw_list) > 0 else [], previous_structure=use_prev_structure, **run_args["run_specs"], **run_args["optional_fw_params"], max_force_threshold=None) fw2 = StaticFW(structure=structure, name=f'{_name}{descriptor}_static', parents=[fw1], previous_structure=True, **run_args["run_specs"], **run_args["optional_fw_params"]) _fw_list.extend([fw1, fw2]) fw_list.extend(_fw_list) name = structure.composition.reduced_formula + descriptor + "_quench" wf = Workflow(fw_list, name=name) return wf
def get_quench_wf(structures, temperatures={}, priority=None, quench_type="slow_quench", cool_args={}, hold_args={}, quench_args={}, descriptor="", **kwargs): fw_list = [] temp = {"start_temp": 3000, "end_temp": 500, "temp_step": 500} if temp is None else temp cool_args = {"md_params": {"nsteps": 200}} if cool_args is None else cool_args hold_args = {"md_params": {"nsteps": 500}} if hold_args is None else hold_args quench_args = {} if quench_args is None else quench_args for (i, structure) in enumerate(structures): _fw_list = [] if quench_type == "slow_quench": for temp in np.arange(temperatures["start_temp"], temperatures["end_temp"], -temperatures["temp_step"]): # get fw for cool step use_prev_structure = False if len(_fw_list) > 0: use_prev_structure = True _fw = get_MDFW(structure, t, t - temp["temp_step"], name="snap_" + str(i) + "_cool_" + str(t - temp["temp_step"]), args=cool_args, parents=[_fw_list[-1]] if len(_fw_list) > 0 else [], priority=priority, previous_structure=use_prev_structure, insert_db=True, **kwargs) _fw_list.append(_fw) # get fw for hold step _fw = get_MDFW(structure, t - temp["temp_step"], t - temp["temp_step"], name="snap_" + str(i) + "_hold_" + str(t - temp["temp_step"]), args=hold_args, parents=[_fw_list[-1]], priority=priority, previous_structure=True, insert_db=True, **kwargs) _fw_list.append(_fw) if quench_type in ["slow_quench", "mp_quench"]: # Relax OptimizeFW and StaticFW run_args = {"run_specs": {"vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<", "spec": {"_priority": priority} }, "optional_fw_params": {"override_default_vasp_params": {}} } run_args = recursive_update(run_args, quench_args) _name = "snap_" + str(i) fw1 = OptimizeFW(structure=structure, name=_name + descriptor + "_optimize", parents=[_fw_list[-1]] if len(_fw_list) > 0 else [], **run_args["run_specs"], **run_args["optional_fw_params"], max_force_threshold=None) if len(_fw_list) > 0: fw1 = powerups.add_cont_structure(fw1) fw1 = powerups.add_pass_structure(fw1) fw2 = StaticFW(structure=structure, name=_name + descriptor + "_static", parents=[fw1], **run_args["run_specs"], **run_args["optional_fw_params"]) fw2 = powerups.add_cont_structure(fw2) fw2 = powerups.add_pass_structure(fw2) _fw_list.extend([fw1, fw2]) fw_list.extend(_fw_list) name = structure.composition.reduced_formula + descriptor + "_quench" wf = Workflow(fw_list, name=name) return wf
def get_converge_wf(structure, temperature, converge_scheme='EOS', priority=None, max_steps=5000, target_steps=10000, preconverged=False, notes=None, save_data="all", aggregate_trajectory=True, **kwargs): """ Args: structure: Starting structure for the run temperature: Temperature for the MD runs converge_scheme: Equation of state is normally faster and preferred priority: Priority of all fireworks in the workflows max_steps: Maximum number of steps per chunk of production run MD simulation target_steps: Target number of steps for production MD run preconverged: Whether the structure already converged (i.e. Pressure 0bar) or volume rescaling not desired notes: Any additional comments to propagate with this run save_data: Level to save job outputs. Options are "all", 'production', and None aggregate_trajectory: Whether to aggregate trajectory to database **kwargs: Arguments such as spawner_args, converge_args, convergence_criteria, tag_id, prod_count, etc. Returns: Workflow object """ # Generate a unique identifier for the fireworks belonging to this workflows tag_id = kwargs.get('tag_id', uuid.uuid4()) prod_count = kwargs.get('prod_count', 0) wf_name = kwargs.get( 'wf_name', f'{structure.composition.reduced_formula}_{temperature}_diffusion') # To aggregate trajectory, the job output from the production runs must be saved. if aggregate_trajectory and save_data is None: save_data = "production" fw_list = [] # Setup initial Run and convergence of structure run_args = { "md_params": { "start_temp": temperature, "end_temp": temperature, "nsteps": 2000 }, "run_specs": { "vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<" }, "optional_fw_params": { "override_default_vasp_params": { 'user_incar_settings': { 'ISIF': 1, 'LWAVE': False, 'PREC': 'Normal' } }, "spec": { '_priority': priority } } } run_args = recursive_update(run_args, kwargs.get('converge_args', {})) # Setup Dictionary specifying parameters of the spawner for convergence tasks _spawner_args = { "converge_params": { "max_rescales": 15, "density_spawn_count": 1, "energy_spawn_count": 0, 'converge_type': kwargs.get('convergence_criteria', [("density", 5), ('ionic', 0.001)]) }, "rescale_params": { "beta": 5e-7 }, "run_specs": run_args["run_specs"], "md_params": run_args["md_params"], "optional_fw_params": run_args["optional_fw_params"], "tag_id": tag_id } _spawner_args["md_params"].update( {"start_temp": run_args["md_params"]["end_temp"]}) _spawner_args = recursive_update(_spawner_args, kwargs.get('spawner_args', {})) # Converge the pressure (volume) of the system if not preconverged: insert_converge_data = True if save_data == "all" else False if converge_scheme == 'EOS': # Create structures for varying volumes images = kwargs.get('image_scale', [0.8, 1, 1.2]) structures = [structure.copy() for i in images] for i, factor in enumerate(images): structures[i].scale_lattice(structure.volume * factor) # Create firework for each structure EOS_run_args = deepcopy(run_args) EOS_run_args = recursive_update(EOS_run_args, kwargs.get('converge_args', {})) volume_fws = [] for n, (i, vol_structure) in enumerate(zip(images, structures)): save_structure = True if n == len(images) - 1 else False _fw = MDFW(structure=vol_structure, name=f'volume_{i}-{tag_id}', previous_structure=False, insert_db=insert_converge_data, save_structure=save_structure, **EOS_run_args["md_params"], **EOS_run_args["run_specs"], **EOS_run_args["optional_fw_params"]) _fw = powerups.add_pass_pv(_fw) volume_fws.append(_fw) fw_list.extend(volume_fws) # Create firework to converge pressure/volume spawner_fw = MDFW(structure=structure, name=f'run1-{tag_id}', previous_structure=True, insert_db=insert_converge_data, parents=volume_fws, **run_args["md_params"], **run_args["run_specs"], **run_args["optional_fw_params"]) spawner_fw = powerups.add_pv_volume_rescale(spawner_fw) spawner_fw = powerups.add_pass_pv(spawner_fw) _spawner_args['run_specs']['insert_db'] = insert_converge_data spawner_fw = powerups.add_converge_task(spawner_fw, **_spawner_args) fw_list.append(spawner_fw) else: fw1 = MDFW(structure=structure, name="run0" + "-" + str(tag_id), previous_structure=False, insert_db=insert_converge_data, **run_args["md_params"], **run_args["run_specs"], **run_args["optional_fw_params"]) fw1 = powerups.add_converge_task(fw1, **_spawner_args) fw_list.append(fw1) # Production length MD runs insert_prod_data = True if save_data == "all" or save_data == "production" else False prod_steps = 0 while prod_steps <= target_steps - max_steps: # Create Dictionary with production run parameters run_args = { "md_params": { "start_temp": run_args["md_params"]["end_temp"], "end_temp": run_args["md_params"]["end_temp"], "nsteps": max_steps }, "run_specs": { "vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<" }, "optional_fw_params": { "override_default_vasp_params": { 'user_incar_settings': { 'ISIF': 1, 'LWAVE': False, 'PREC': 'Normal' } }, "spec": { '_priority': priority } } } run_args = recursive_update(run_args, kwargs.get('prod_args', {})) parents = fw_list[-1] if len(fw_list) > 0 else [] previous_structure = False if preconverged and prod_steps == 0 else True fw = MDFW(structure=structure, name=f'{temperature}_run_{prod_count}-{tag_id}', previous_structure=previous_structure, insert_db=insert_prod_data, **run_args["md_params"], **run_args["run_specs"], **run_args["optional_fw_params"], parents=parents) fw_list.append(fw) prod_steps += max_steps prod_count += 1 if aggregate_trajectory: fw_list[-1] = powerups.aggregate_trajectory( fw_list[-1], tag_id=tag_id, notes=notes, db_file=run_args["run_specs"]["db_file"]) wf = Workflow(fireworks=fw_list, name=wf_name) return wf
def run_task(self, fw_spec): from mpmorph.fireworks import powerups from mpmorph.fireworks.core import MDFW # Load Structure from Poscar _poscar = Poscar.from_file("CONTCAR.gz") structure = _poscar.structure # Get convergence parameters from spec converge_params = self["converge_params"] avg_fraction = converge_params.get("avg_fraction", 0.5) convergence_vars = dict(converge_params["converge_type"]) if "ionic" not in convergence_vars.keys(): convergence_vars["ionic"] = 0.0005 rescale_params = self.get("rescale_params", {}) # Load Data from OUTCAR search_keys = [ 'external', 'kinetic energy EKIN', '% ion-electron', 'ETOTAL' ] key_map = { 'density': 'external', 'kinetic energy': 'kinetic energy EKIN', 'ionic': '% ion-electron', 'total energy': 'ETOTAL' } outcar_data = md_data.get_MD_data("./OUTCAR.gz", search_keys=search_keys) # Check for convergence converged = {} _index = search_keys.index(key_map["density"]) _data = np.transpose(outcar_data)[_index].copy() pressure = np.mean(_data[int(avg_fraction * (len(_data) - 1)):]) if "density" in convergence_vars.keys(): if np.abs(pressure) >= convergence_vars["density"]: converged["density"] = False else: converged["density"] = True if "kinetic energy" in convergence_vars.keys(): _index = search_keys.index(key_map["kinetic energy"]) energy = np.transpose(outcar_data)[_index].copy() norm_energy = (energy / structure.num_sites) / np.mean( energy / structure.num_sites) - 1 if np.abs(np.mean(norm_energy[-500:]) - np.mean(norm_energy) ) > convergence_vars["kinetic energy"]: converged["kinetic energy"] = False else: converged["kinetic energy"] = True _index = search_keys.index(key_map["ionic"]) energy = np.transpose(outcar_data)[_index].copy() norm_energies = energy / structure.num_sites mu, std = stats.norm.fit(norm_energies) mu1, std1 = stats.norm.fit(norm_energies[0:int(len(norm_energies) / 2)]) mu2, std2 = stats.norm.fit(norm_energies[int(len(norm_energies) / 2):]) if np.abs((mu2 - mu1) / mu) < convergence_vars["ionic"]: converged["ionic"] = True else: converged["ionic"] = False # Spawn Additional Fireworks if not all([item[1] for item in converged.items()]): density_spawn_count = converge_params["density_spawn_count"] energy_spawn_count = converge_params["energy_spawn_count"] max_rescales = converge_params["max_rescales"] max_energy_runs = 3 # Set max energy convergence runs to default of 3 run_specs = self["run_specs"] md_params = self["md_params"] optional_params = self["optional_fw_params"] tag_id = self.get("tag_id", "") if density_spawn_count >= max_rescales: return FWAction(defuse_children=True) elif energy_spawn_count >= max_energy_runs: # Too many energy rescales... Just continue with the production runs return FWAction(stored_data={ 'pressure': pressure, 'energy': mu, 'density_calculated': True }) elif not converged.get("density", True): rescale_args = { "initial_pressure": pressure * 1000, "initial_temperature": 1, "beta": 0.0000005 } rescale_args = recursive_update(rescale_args, rescale_params) # Spawn fw fw = MDFW( structure, name=f'density_run_{density_spawn_count + 1}-{tag_id}', previous_structure=False, **run_specs, **md_params, **optional_params) converge_params["density_spawn_count"] += 1 _spawner_args = { "converge_params": converge_params, "rescale_params": rescale_params, "run_specs": run_specs, "md_params": md_params, "optional_fw_params": optional_params, "tag_id": tag_id } fw = powerups.add_rescale_volume(fw, **rescale_args) fw = powerups.add_pass_pv(fw) fw = powerups.add_converge_task(fw, **_spawner_args) wf = Workflow([fw]) return FWAction(detours=wf, stored_data={ 'pressure': pressure, 'energy': mu }) else: fw = MDFW(structure, name=f'energy_run_{energy_spawn_count + 1}_{tag_id}', previous_structure=False, **run_specs, **md_params, **optional_params) converge_params["energy_spawn_count"] += 1 _spawner_args = { "converge_params": converge_params, "rescale_params": rescale_params, "run_specs": run_specs, "md_params": md_params, "optional_fw_params": optional_params, "tag_id": tag_id } fw = powerups.add_pass_pv(fw) fw = powerups.add_converge_task(fw, **_spawner_args) wf = Workflow([fw]) return FWAction(detours=wf, stored_data={ 'pressure': pressure, 'energy': mu }) else: return FWAction(stored_data={ 'pressure': pressure, 'energy': mu, 'density_calculated': True })