def get_wf( self, scan=False, perform_bader=True, num_orderings_hard_limit=16, c=None ): """ Retrieve the FireWorks workflow. Args: scan: if True, use the SCAN functional instead of GGA+U, since the SCAN functional has shown to have improved performance for magnetic systems in some cases perform_bader: if True, make sure the "bader" binary is in your path, will use Bader analysis to calculate atom-projected magnetic moments num_orderings_hard_limit: will make sure total number of magnetic orderings does not exceed this number even if there are extra orderings of equivalent symmetry c: additional config dict (as used elsewhere in atomate) Returns: FireWorks Workflow """ c = c or {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE} fws = [] analysis_parents = [] # trim total number of orderings (useful in high-throughput context) # this is somewhat course, better to reduce num_orderings kwarg and/or # change enumeration strategies ordered_structures = self.ordered_structures ordered_structure_origins = self.ordered_structure_origins def _add_metadata(structure): """ For book-keeping, store useful metadata with the Structure object for later database ingestion including workflow version and a UUID for easier querying of all tasks generated from the workflow. Args: structure: Structure Returns: TransformedStructure """ # this could be further improved by storing full transformation # history, but would require an improved transformation pipeline return TransformedStructure( structure, other_parameters={"wf_meta": self.wf_meta} ) ordered_structures = [_add_metadata(struct) for struct in ordered_structures] if ( num_orderings_hard_limit and len(self.ordered_structures) > num_orderings_hard_limit ): ordered_structures = self.ordered_structures[0:num_orderings_hard_limit] ordered_structure_origins = self.ordered_structure_origins[ 0:num_orderings_hard_limit ] logger.warning( "Number of ordered structures exceeds hard limit, " "removing last {} structures.".format( len(self.ordered_structures) - len(ordered_structures) ) ) # always make sure input structure is included if self.input_index and self.input_index > num_orderings_hard_limit: ordered_structures.append(self.ordered_structures[self.input_index]) ordered_structure_origins.append( self.ordered_structure_origins[self.input_index] ) # default incar settings user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05} if scan: # currently, using SCAN relaxation as a static calculation also # since it is typically high quality enough, but want to make # sure we are also writing the AECCAR* files user_incar_settings.update({"LAECHG": True}) user_incar_settings.update(c.get("user_incar_settings", {})) c["user_incar_settings"] = user_incar_settings for idx, ordered_structure in enumerate(ordered_structures): analyzer = CollinearMagneticStructureAnalyzer(ordered_structure) name = " ordering {} {} -".format(idx, analyzer.ordering.value) if not scan: vis = MPRelaxSet( ordered_structure, user_incar_settings=user_incar_settings ) # relax fws.append( OptimizeFW( ordered_structure, vasp_input_set=vis, vasp_cmd=c["VASP_CMD"], db_file=c["DB_FILE"], max_force_threshold=0.05, half_kpts_first_relax=False, name=name + " optimize", ) ) # static fws.append( StaticFW( ordered_structure, vasp_cmd=c["VASP_CMD"], db_file=c["DB_FILE"], name=name + " static", prev_calc_loc=True, parents=fws[-1], ) ) else: # wf_scan_opt is just a single FireWork so can append it directly scan_fws = wf_scan_opt(ordered_structure, c=c).fws # change name for consistency with non-SCAN new_name = scan_fws[0].name.replace( "structure optimization", name + " optimize" ) scan_fws[0].name = new_name scan_fws[0].tasks[-1]["additional_fields"]["task_label"] = new_name fws += scan_fws analysis_parents.append(fws[-1]) fw_analysis = Firework( MagneticOrderingsToDB( db_file=c["DB_FILE"], wf_uuid=self.uuid, auto_generated=False, name="MagneticOrderingsToDB", parent_structure=self.sanitized_structure, origins=ordered_structure_origins, input_index=self.input_index, perform_bader=perform_bader, scan=scan, ), name="Magnetic Orderings Analysis", parents=analysis_parents, spec={"_allow_fizzled_parents": True}, ) fws.append(fw_analysis) formula = self.sanitized_structure.composition.reduced_formula wf_name = "{} - magnetic orderings".format(formula) if scan: wf_name += " - SCAN" wf = Workflow(fws, name=wf_name) wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta}) tag = "magnetic_orderings group: >>{}<<".format(self.uuid) wf = add_tags(wf, [tag, ordered_structure_origins]) return wf
def get_wf(self, scan=False, perform_bader=True, num_orderings_hard_limit=16, c=None): """ Retrieve the FireWorks workflow. Args: scan (bool): if True, use the SCAN functional instead of GGA+U, since the SCAN functional has shown to have improved performance for magnetic systems in some cases perform_bader (bool): if True, make sure the "bader" binary is in your path, will use Bader analysis to calculate atom-projected magnetic moments num_orderings_hard_limit (int): will make sure total number of magnetic orderings does not exceed this number even if there are extra orderings of equivalent symmetry c (dict): additional config dict (as used elsewhere in atomate) Returns: FireWorks Workflow """ c_defaults = {"VASP_CMD": VASP_CMD, "DB_FILE": DB_FILE} additional_fields = {"relax": not self.static} c = c or {} for k, v in c_defaults.items(): if k not in c: c[k] = v fws = [] analysis_parents = [] # trim total number of orderings (useful in high-throughput context) # this is somewhat course, better to reduce num_orderings kwarg and/or # change enumeration strategies ordered_structures = self.ordered_structures ordered_structure_origins = self.ordered_structure_origins def _add_metadata(structure): """ For book-keeping, store useful metadata with the Structure object for later database ingestion including workflow version and a UUID for easier querying of all tasks generated from the workflow. Args: structure: Structure Returns: TransformedStructure """ # this could be further improved by storing full transformation # history, but would require an improved transformation pipeline return TransformedStructure( structure, other_parameters={"wf_meta": self.wf_meta}) ordered_structures = [ _add_metadata(struct) for struct in ordered_structures ] if (num_orderings_hard_limit and len(self.ordered_structures) > num_orderings_hard_limit): ordered_structures = self.ordered_structures[ 0:num_orderings_hard_limit] ordered_structure_origins = self.ordered_structure_origins[ 0:num_orderings_hard_limit] logger.warning("Number of ordered structures exceeds hard limit, " "removing last {} structures.".format( len(self.ordered_structures) - len(ordered_structures))) # always make sure input structure is included if self.input_index and self.input_index > num_orderings_hard_limit: ordered_structures.append( self.ordered_structures[self.input_index]) ordered_structure_origins.append( self.ordered_structure_origins[self.input_index]) # default incar settings user_incar_settings = {"ISYM": 0, "LASPH": True, "EDIFFG": -0.05} if scan: # currently, using SCAN relaxation as a static calculation also # since it is typically high quality enough, but want to make # sure we are also writing the AECCAR* files user_incar_settings.update({"LAECHG": True}) user_incar_settings.update(c.get("user_incar_settings", {})) c["user_incar_settings"] = user_incar_settings for idx, ordered_structure in enumerate(ordered_structures): analyzer = CollinearMagneticStructureAnalyzer(ordered_structure) name = f" ordering {idx} {analyzer.ordering.value} -" if not scan: vis = MPRelaxSet(ordered_structure, user_incar_settings=user_incar_settings) if not self.static: # relax fws.append( OptimizeFW( ordered_structure, vasp_input_set=vis, vasp_cmd=c["VASP_CMD"], db_file=c["DB_FILE"], max_force_threshold=0.05, half_kpts_first_relax=False, name=name + " optimize", )) # static fws.append( StaticFW( ordered_structure, vasp_cmd=c["VASP_CMD"], db_file=c["DB_FILE"], name=name + " static", prev_calc_loc=True, parents=fws[-1], vasptodb_kwargs={ "parse_chgcar": True, "parse_aeccar": True }, )) if not self.static: # so a failed optimize doesn't crash workflow fws[-1].spec["_allow_fizzled_parents"] = True elif scan: # wf_scan_opt is just a single FireWork so can append it directly scan_fws = wf_scan_opt(ordered_structure, c=c).fws # change name for consistency with non-SCAN new_name = scan_fws[0].name.replace("structure optimization", name + " optimize") scan_fws[0].name = new_name scan_fws[0].tasks[-1]["additional_fields"][ "task_label"] = new_name fws += scan_fws analysis_parents.append(fws[-1]) fw_analysis = Firework( MagneticOrderingsToDb( db_file=c["DB_FILE"], wf_uuid=self.uuid, parent_structure=self.sanitized_structure, origins=ordered_structure_origins, input_index=self.input_index, perform_bader=perform_bader, scan=scan, additional_fields=additional_fields, ), name="Magnetic Orderings Analysis", parents=analysis_parents, spec={"_allow_fizzled_parents": True}, ) fws.append(fw_analysis) formula = self.sanitized_structure.composition.reduced_formula wf_name = f"{formula} - magnetic orderings" if scan: wf_name += " - SCAN" wf = Workflow(fws, name=wf_name) wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta}) tag = f"magnetic_orderings group: >>{self.uuid}<<" wf = add_tags(wf, [tag, ordered_structure_origins]) return wf