def from_dict(cls, d): dec = MontyDecoder() kwargs = { k: dec.process_decoded(v) for k, v in d.items() if k in inspect.getargspec(cls.__init__).args } return cls(**kwargs)
def setUp(self): self.temps = [300, 600, 900, 1200, 1500, 1800] self.struct = vasprun.final_structure self.num_atoms = self.struct.composition.num_atoms self.entries_with_temps = { temp: GibbsComputedStructureEntry( self.struct, -2.436, temp=temp, gibbs_model="SISSO", parameters=vasprun.incar, entry_id="test", ) for temp in self.temps } with open( os.path.join(PymatgenTest.TEST_FILES_DIR, "Mn-O_entries.json"), "r") as f: data = json.load(f) with open( os.path.join(PymatgenTest.TEST_FILES_DIR, "structure_CO2.json"), "r") as f: self.co2_struct = MontyDecoder().process_decoded(json.load(f)) self.mp_entries = [MontyDecoder().process_decoded(d) for d in data]
def from_dict(cls, d): dec = MontyDecoder() return cls(dec.process_decoded(d["structure"]), d["energy"], d["correction"], dec.process_decoded(d.get("parameters", {})), dec.process_decoded(d.get("data", {})), entry_id=d.get("entry_id", None))
def from_dict(cls, d): dec = MontyDecoder() return cls( dec.process_decoded(d["voltage_pairs"]), dec.process_decoded(d["working_ion_entry"]), Composition(d["initial_comp"]), )
def from_dict(cls, d): """ Args: d (dict): Dict representation Returns: Class """ a = d["about"] dec = MontyDecoder() created_at = dec.process_decoded(a.get("created_at")) data = {k: v for k, v in d["about"].items() if k.startswith("_")} data = dec.process_decoded(data) structure = Structure.from_dict( d) if "lattice" in d else Molecule.from_dict(d) return cls( structure, a["authors"], projects=a.get("projects", None), references=a.get("references", ""), remarks=a.get("remarks", None), data=data, history=a.get("history", None), created_at=created_at, )
def from_dict(cls, d): dec = MontyDecoder() return cls(d["composition"], d["energy"], d["correction"], dec.process_decoded(d.get("parameters", {})), dec.process_decoded(d.get("data", {})), entry_id=d.get("entry_id", None), attribute=d["attribute"] if "attribute" in d else None)
def from_dict(cls, d): d = d.copy() d.pop('@module', None) d.pop('@class', None) dec = MontyDecoder() d['kpts'] = dec.process_decoded(d['kpts']) return cls(**d)
def apply_corrections(self, fw_to_correct, corrections): # Apply the corrections spec = fw_to_correct.spec modder = Modder() for correction in corrections: actions = correction['actions'] for action in actions: if action['action_type'] == 'modify_object': if action['object']['source'] == 'fw_spec': myobject = spec[action['object']['key']] else: raise NotImplementedError('Object source "{}" not implemented in ' 'CheckTask'.format(action['object']['source'])) newobj = modder.modify_object(action['action'], myobject) spec[action['object']['key']] = newobj elif action['action_type'] == 'modify_dict': if action['dict']['source'] == 'fw_spec': mydict = spec[action['dict']['key']] else: raise NotImplementedError('Dict source "{}" not implemented in ' 'CheckTask'.format(action['dict']['source'])) modder.modify(action['action'], mydict) else: raise NotImplementedError('Action type "{}" not implemented in ' 'CheckTask'.format(action['action_type'])) # Keep track of the corrections that have been applied spec['SRC_check_corrections'] = corrections # Update the task index fws_task_index = int(fw_to_correct.spec['wf_task_index'].split('_')[-1]) new_index = fws_task_index + 1 # Update the Fireworks _queueadapter key #TODO: in the future, see whether the FW queueadapter might be replaced by the qtk_queueadapter ? # ... to be discussed with Anubhav, when the qtk queueadapter is in a qtk toolkit and not anymore # in pymatgen/io/abinit spec['_queueadapter'] = spec['qtk_queueadapter'].get_subs_dict() queue_adapter_update = get_queue_adapter_update(qtk_queueadapter=spec['qtk_queueadapter'], corrections=corrections) # Get and update the task_input if needed # TODO: make this more general ... right now, it is based on AbinitInput and thus is strongly tight # to abinit due to abiinput, deps, ... mytask = fw_to_correct.tasks[0] task_class = mytask.__class__ decoder = MontyDecoder() task_input = decoder.process_decoded(fw_to_correct.spec['_tasks'][0]['abiinput']) initialization_info = fw_to_correct.spec['initialization_info'] deps = mytask.deps # Create the new Setup/Run/Check fireworks SRC_fws = createSRCFireworksOld(task_class=task_class, task_input=task_input, SRC_spec=spec, initialization_info=initialization_info, wf_task_index_prefix=spec['wf_task_index_prefix'], current_task_index=new_index, handlers=self.handlers, validators=self.validators, deps=deps, task_type=mytask.task_type, queue_adapter_update=queue_adapter_update) wf = Workflow(fireworks=SRC_fws['fws'], links_dict=SRC_fws['links_dict']) return FWAction(detours=[wf])
def __setstate__(self, d): del d["@class"] del d["@module"] if "@version" in d: del d["@version"] md = MontyDecoder() d = md.process_decoded(d) self.__init__(**d)
def from_dict(cls, d): d = d.copy() d.pop("@module", None) d.pop("@class", None) dec = MontyDecoder() d["spin_mode"] = dec.process_decoded(d["spin_mode"]) d["smearing"] = dec.process_decoded(d["smearing"]) d["algorithm"] = dec.process_decoded(d["algorithm"]) if d["algorithm"] else None return cls(**d)
def from_dict(cls, d): dec = MontyDecoder() sub_d = {"optional_files": {}} for k, v in d.items(): if k in ["INCAR", "POSCAR", "POTCAR", "KPOINTS"]: sub_d[k.lower()] = dec.process_decoded(v) elif k not in ["@module", "@class"]: sub_d["optional_files"][k] = dec.process_decoded(v) return cls(**sub_d)
def from_dict(cls, d): dec = MontyDecoder() return cls(dec.process_decoded(d["structure"]), d["energy"], d["correction"], parameters={k: dec.process_decoded(v) for k, v in d.get("parameters", {}).items()}, data={k: dec.process_decoded(v) for k, v in d.get("data", {}).items()}, entry_id=d.get("entry_id", None))
def from_dict(cls, d): dec = MontyDecoder() scf_strategy = dec.process_decoded(d["scf_strategy"]) ksampling = dec.process_decoded(d["ksampling"]) nscf_nband = dec.process_decoded(d["nscf_nband"]) nscf_algorithm = dec.process_decoded(d["nscf_algorithm"]) return cls(scf_strategy=scf_strategy, ksampling=ksampling, nscf_nband=nscf_nband, nscf_algorithm=nscf_algorithm, **d['extra_abivars'])
def run_task(self, fw_spec): struct = self.get("structure") or fw_spec["structure"] s = Structure.from_dict(struct.as_dict()) user_incar_settings = fw_spec.get("user_incar_settings", {}) vasp_input_set = MPRelaxSet(s, user_incar_settings=user_incar_settings) dec = MontyDecoder() vis = dec.process_decoded(vasp_input_set.as_dict()) output_dir = os.getcwd() vis.write_input(output_dir=output_dir) return FWAction()
def __setstate__(self, d): """ Double underscore method used by pickle to deserialize this object This uses MSONable deerialization instead """ del d["@class"] del d["@module"] md = MontyDecoder() d = md.process_decoded(d) self.__init__(**d)
def from_dict(cls, d) -> "ComputedStructureEntry": """ :param d: Dict representation. :return: ComputedStructureEntry """ dec = MontyDecoder() # the first block here is for legacy ComputedEntry that were # serialized before we had the energy_adjustments attribute. if d["correction"] != 0 and not d.get("energy_adjustments"): struct = dec.process_decoded(d["structure"]) return cls( struct, d["energy"], correction=d["correction"], parameters={k: dec.process_decoded(v) for k, v in d.get("parameters", {}).items()}, data={k: dec.process_decoded(v) for k, v in d.get("data", {}).items()}, entry_id=d.get("entry_id", None), ) # this is the preferred / modern way of instantiating ComputedEntry # we don't pass correction explicitly because it will be calculated # on the fly from energy_adjustments return cls( dec.process_decoded(d["structure"]), d["energy"], composition=d.get("composition", None), correction=0, energy_adjustments=[dec.process_decoded(e) for e in d.get("energy_adjustments", {})], parameters={k: dec.process_decoded(v) for k, v in d.get("parameters", {}).items()}, data={k: dec.process_decoded(v) for k, v in d.get("data", {}).items()}, entry_id=d.get("entry_id", None), )
def from_dict(cls, d): dec = MontyDecoder() structure = dec.process_decoded(d["structure"]) pseudos = dec.process_decoded(d['pseudos']) ksampling = dec.process_decoded(d["ksampling"]) electrons = dec.process_decoded(d["electrons"]) return cls(structure=structure, pseudos=pseudos, ksampling=ksampling, accuracy=d['accuracy'], spin_mode=electrons.spin_mode, smearing=electrons.smearing, charge=d['charge'], scf_algorithm=electrons.algorithm, use_symmetries=d['use_symmetries'], **d['extra_abivars'])
def from_dict(cls, d): dec = MontyDecoder() if 'is_valid' in d: return cls(controller=dec.process_decoded(d['controller']), state=d['state'], problems=d['problems'], actions=dec.process_decoded(d['actions']), restart=d['restart'], is_valid=d['is_valid']) else: return cls(controller=dec.process_decoded(d['controller']), state=d['state'], problems=d['problems'], actions=dec.process_decoded(d['actions']), restart=d['restart'])
def from_dict(cls, d): """ Args: d (dict): from as_dict() Returns: A ComputedReaction object. """ dec = MontyDecoder() reactants = [dec.process_decoded(e) for e in d["reactants"]] products = [dec.process_decoded(e) for e in d["products"]] return cls(reactants, products)
def featurize(self, dict_data): """Convert a string to a pymatgen Composition. Args: dict_data (dict): A MSONable dictionary. E.g. Produced from `pymatgen.core.structure.Structure.as_dict()`. Returns: (object): An object with the type specified by `dict_data`. """ md = MontyDecoder() return [md.process_decoded(dict_data)]
def from_dict(cls, d): dec = MontyDecoder() working_ion_entry = dec.process_decoded(d["working_ion_entry"]) balanced_rxn = dec.process_decoded(d["balanced_rxn"]) entries_charge = dec.process_decoded(d["entries_charge"]) entries_discharge = dec.process_decoded(d["entries_discharge"]) return ConversionVoltagePair(balanced_rxn, d["voltage"], d["mAh"], d["vol_charge"], d["vol_discharge"], d["mass_charge"], d["mass_discharge"], d["frac_charge"], d["frac_discharge"], entries_charge, entries_discharge, working_ion_entry)
def from_dict(cls, d): """ Args: d (dict): Dict representation Returns: InsertionElectrode """ from monty.json import MontyDecoder dec = MontyDecoder() return cls(dec.process_decoded(d["entries"]), dec.process_decoded(d["working_ion_entry"]))
def from_dict(cls, d): """ Args: d (dict): Dict representation Returns: ConversionElectrode """ dec = MontyDecoder() return cls(dec.process_decoded(d["voltage_pairs"]), dec.process_decoded(d["working_ion_entry"]), Composition(d["initial_comp"]))
def from_dict(cls, d): dec = MontyDecoder() structure = dec.process_decoded(d["structure"]) pseudos = [Pseudo.from_file(p['filepath']) for p in d['pseudos']] ksampling = dec.process_decoded(d["ksampling"]) spin_mode = dec.process_decoded(d["spin_mode"]) smearing = dec.process_decoded(d["smearing"]) return cls(structure=structure, pseudos=pseudos, ksampling=ksampling, accuracy=d['accuracy'], spin_mode=spin_mode, smearing=smearing, charge=d['charge'], scf_algorithm=d['scf_algorithm'], use_symmetries=d['use_symmetries'], relax_algo=d['relax_algo'], **d['extra_abivars'])
def process_item(self, item): mp_id = item['mp-id'] self.logger.debug("Processing {}".format(mp_id)) decoder = MontyDecoder() ph_bs = decoder.process_decoded(item['ph_bs']) web_doc = ph_bs.as_phononwebsite() plotter = PhononBSPlotter(ph_bs) ylim = (0, max(py_.flatten_deep(plotter.bs_plot_data()['frequency']))) filelike = io.BytesIO() plotter.save_plot(filelike, ylim=ylim, img_format="png") image = Binary(filelike.getvalue()) filelike.close() return dict(mp_id=mp_id, web_doc=web_doc, image=image)
def run_task(self, fw_spec): material_id = self["material_id"] dest_root = fw_spec["_fw_en"]["run_dest_root"] dest = "{}/{}/bs/{}/relax".format(dest_root, os.environ["USER"], material_id) user_incar_settings = fw_spec.get("user_incar_settings", {}) vasp_input_set = MPStaticSet.from_prev_calc( prev_calc_dir=dest, standardize=1e-3, user_incar_settings=user_incar_settings) dec = MontyDecoder() vis = dec.process_decoded(vasp_input_set.as_dict()) vis.write_input(".")
def from_dict(cls, d): dec = MontyDecoder() sub_d = {"optional_files": {}} potcar=d.get('POTCAR',False) if potcar: infiles=["INCAR", "POSCAR", "POTCAR", "KPOINTS"] else: infiles=["INCAR", "POSCAR", "KPOINTS"] for k, v in d.items(): if k in infiles: sub_d[k.lower()] = dec.process_decoded(v) elif k not in ["@module", "@class"]: sub_d["optional_files"][k] = dec.process_decoded(v) return cls(**sub_d)
def from_dict_legacy(cls, d): """ Args: d (dict): Dict representation Returns: InsertionElectrode """ from monty.json import MontyDecoder dec = MontyDecoder() return InsertionElectrode( # pylint: disable=E1120 dec.process_decoded(d["entries"]), dec.process_decoded(d["working_ion_entry"]), )
def from_dict(cls, d): """ Initializes a DielTensor object from a dictionary. Args: d (dict): Dictionary from which the DielTensor should be initialized. Returns: DielTensor """ energies = MontyDecoder().process_decoded(d["energies"]) real_diel = MontyDecoder().process_decoded(d["real_diel"]) imag_diel = MontyDecoder().process_decoded(d["imag_diel"]) return cls(energies, real_diel + 1j * imag_diel)
def from_dict(cls, d): a = d["about"] dec = MontyDecoder() created_at = dec.process_decoded(a.get("created_at")) data = {k: v for k, v in d["about"].items() if k.startswith("_")} data = dec.process_decoded(data) structure = Structure.from_dict(d) if "lattice" in d \ else Molecule.from_dict(d) return cls(structure, a["authors"], projects=a.get("projects", None), references=a.get("references", ""), remarks=a.get("remarks", None), data=data, history=a.get("history", None), created_at=created_at)
def from_dict(cls, d): dec = MontyDecoder() return cls(d["composition"], d["calculator"], inputs={ k: dec.process_decoded(v) for k, v in d.get("inputs", {}).items() }, data={ k: dec.process_decoded(v) for k, v in d.get("data", {}).items() }, entry_id=d.get("entry_id", None), attribute=d["attribute"] if "attribute" in d else None, tag=d["tag"] if "tag" in d else None)
def test_entry(self): enc = MontyEncoder() dec = MontyDecoder() entry = ComputedEntry("Fe2O3", 2.3) jsonstr = enc.encode(entry) d = dec.decode(jsonstr) self.assertEqual(type(d), ComputedEntry) #Check list of entries entries = [entry, entry, entry] jsonstr = enc.encode(entries) d = dec.decode(jsonstr) for i in d: self.assertEqual(type(i), ComputedEntry) self.assertEqual(len(d), 3)
def calc(self, item): struct_or_mol = MontyDecoder().process_decoded( item[self.projected_object_name]) # TODO: will combine these two functions into something more intuitive graph = StructureMoleculeComponent._preprocess_input_to_graph( struct_or_mol, bonding_strategy=self.settings["bonding_strategy"], bonding_strategy_kwargs=self.settings["bonding_strategy_kwargs"], ) scene, legend = StructureMoleculeComponent.get_scene_and_legend( graph, color_scheme=self.settings["color_scheme"], color_scale=self.settings["color_scale"], radius_strategy=self.settings["radius_strategy"], draw_image_atoms=self.settings["draw_image_atoms"], bonded_sites_outside_unit_cell=self. settings["bonded_sites_outside_unit_cell"], hide_incomplete_bonds=self.settings["hide_incomplete_bonds"], ) return { "scene": scene.to_json(), "legend": legend, "settings": self.settings, "source": item[self.projected_object_name], }
def normalize(self, mode: str = "formula_unit") -> "ComputedEntry": """ Normalize the entry's composition and energy. Args: mode: "formula_unit" is the default, which normalizes to composition.reduced_formula. The other option is "atom", which normalizes such that the composition amounts sum to 1. """ factor = self._normalization_factor(mode) new_composition = self._composition / factor new_energy = self._energy / factor new_entry_dict = self.as_dict() new_entry_dict["composition"] = new_composition.as_dict() new_entry_dict["energy"] = new_energy # TODO: make sure EnergyAdjustments are _also_ immutable to avoid this hacking new_energy_adjustments = MontyDecoder().process_decoded( new_entry_dict["energy_adjustments"]) for ea in new_energy_adjustments: ea.normalize(factor) new_entry_dict["energy_adjustments"] = [ ea.as_dict() for ea in new_energy_adjustments ] return self.from_dict(new_entry_dict)
def __init__(self, parameters): self.update(parameters) self.jobs = self['jobs'] dec = MontyDecoder() self.handlers = map(dec.process_decoded, self['handlers']) self.max_errors = self.get('max_errors', 1) self.gzip_output = self.get('gzip_output', True)
def _from_dict(cls, d): modname = d["tp_mo"] classname = d["tp_na"] decoded = { k: MontyDecoder().process_decoded(v) for k, v in d.items() if not k.startswith("@") } mod = __import__(modname, globals(), locals(), [classname], 0) if hasattr(mod, classname): cla = getattr(mod, classname) if decoded["all_args_kwargs"] == {}: return cla() else: return cla(**decoded["all_args_kwargs"]) else: try: return cls() except BaseException: raise TypeError( "Cant find {} in {}, Please import it first.".format( classname, modname), "NNDict = mark_classes([VoronoiNN,])" "for i, j in NNDict.items():\n" " locals()[i] = j")
def from_dict(cls, d): init = d["init_args"] return MagOrderingTransformation( init["mag_species_spin"], init["order_parameter"], energy_model=MontyDecoder().process_decoded(init["energy_model"]), **init["enum_kwargs"])
def update_targets(self, items): """ Inserts the thermo docs into the thermo collection Args: items ([[dict]]): a list of list of thermo dictionaries to update """ # flatten out lists items = list(filter(None, chain.from_iterable(items))) # check for duplicates within this set items = list({(v[self.thermo.key], frozenset(v["sandboxes"])): v for v in items}.values()) # Check if already updated this run items = [ i for i in items if i[self.thermo.key] not in self._completed_tasks ] self._completed_tasks |= {i[self.thermo.key] for i in items} for item in items: if isinstance(item["last_updated"], dict): item["last_updated"] = MontyDecoder().process_decoded( item["last_updated"]) if len(items) > 0: self.logger.info(f"Updating {len(items)} thermo documents") self.thermo.update(docs=items, key=[self.thermo.key, "sandboxes"]) else: self.logger.info("No items to update")
def from_dict(cls, dd): dec = MontyDecoder() return cls(mp_symbol=dd['mp_symbol'], name=dd['name'], alternative_names=dd['alternative_names'], IUPAC_symbol=dd['IUPAC_symbol'], IUCr_symbol=dd['IUCr_symbol'], coordination=dd['coordination'], central_site=dd['central_site'], points=dd['points'], solid_angles=(dd['solid_angles'] if 'solid_angles' in dd else [4.0 * np.pi / dd['coordination']] * dd['coordination']), deactivate=dd['deactivate'], faces=dd['_faces'], edges=dd['_edges'], algorithms=[dec.process_decoded(algo_d) for algo_d in dd['_algorithms']] if dd['_algorithms'] is not None else None, equivalent_indices=dd['equivalent_indices'] if 'equivalent_indices' in dd else None)
def run_task(self, fw_spec): """ Required Parameters: dir (str path): directory containing the vasp inputs jobs (VaspJob): Contains the cmd needed to run vasp Optional Parameters: custodian_params (dict **kwargs): Contains the job and the scratch directory for a custodian run handlers (list of custodian handlers): Defaults to empty list """ dec = MontyDecoder() dir = dec.process_decoded(self['dir']) cwd = dec.process_decoded(self['cwd']) # Change to the directory with the vasp inputs to run custodian os.chdir(cwd+dir) handlers = dec.process_decoded(self.get('handlers', [])) jobs = dec.process_decoded(self['jobs']) max_errors = dec.process_decoded(self['max_errors']) fw_env = fw_spec.get("_fw_env", {}) cust_params = self.get("custodian_params", {}) # Get the scratch directory if fw_env.get('scratch_root'): cust_params['scratch_dir'] = os.path.expandvars( fw_env['scratch_root']) c = Custodian(handlers=handlers, jobs=jobs, max_errors=max_errors, gzipped_output=True, **cust_params) output = c.run() return FWAction(stored_data=output)
def process_decoded(self, d): """ Recursive method to support decoding dicts and lists containing pymatgen objects. """ if isinstance(d, dict) and "module" in d and "class" in d: modname = d["module"] classname = d["class"] mod = __import__(modname, globals(), locals(), [classname], 0) if hasattr(mod, classname): cls_ = getattr(mod, classname) data = {k: v for k, v in d.items() if k not in ["module", "class"]} if hasattr(cls_, "from_dict"): return cls_.from_dict(data) return {self.process_decoded(k): self.process_decoded(v) for k, v in d.items()} return MontyDecoder.process_decoded(self, d)
def from_dict(cls, d): dec = MontyDecoder() return cls([dec.process_decoded(i) for i in d['items']])
def from_dict(cls, d): dec = MontyDecoder() reactants = [dec.process_decoded(e) for e in d["reactants"]] products = [dec.process_decoded(e) for e in d["products"]] return cls(reactants, products)
def from_dict(cls, d): dec = MontyDecoder() return cls(*dec.process_decoded(d['args']), **dec.process_decoded(d['kwargs']))
def from_dict(cls, d): dec = MontyDecoder() return cls(inverse_power=d['inverse_power'], weights_setup=dec.process_decoded(d['weights_setup']))
def from_dict(cls, m_dict): m = MontyDecoder() return cls(handlers=m.process_decoded(m_dict['handlers']), validators=m.process_decoded(m_dict['validators']) , max_restarts=m_dict['max_restarts'])
def from_dict(cls, d): dec = MontyDecoder() entries = dec.process_decoded(d["original_entries"]) terminal_compositions = dec.process_decoded(d["terminal_compositions"]) return cls(entries, terminal_compositions, d["normalize_terminal_compositions"])
def from_dict(cls, d): dec = MontyDecoder() kwargs = {k: dec.process_decoded(v) for k, v in d.items() if k in inspect.getargspec(cls.__init__).args} return cls(**kwargs)
def from_dict(cls, d): from monty.json import MontyDecoder dec = MontyDecoder() return cls(dec.process_decoded(d["entries"]), dec.process_decoded(d["working_ion_entry"]))
def from_dict(cls, d): dec = MontyDecoder() details = dec.process_decoded(d['details']) if 'details' in d else None return cls(event_type=d['event_type'], details=details)
def from_dict(cls, d): dec = MontyDecoder() return cls(handler=dec.process_decoded(d['handler']), actions=d['actions'], event=dec.process_decoded(d['event']), reset=d['reset'])
def test_msonable(self): compat_dict = self.aqcompat.as_dict() decoder = MontyDecoder() temp_compat = decoder.process_decoded(compat_dict) self.assertIsInstance(temp_compat,MITAqueousCompatibility)
def from_dict(cls, d): dec = MontyDecoder() return cls(controllers=dec.process_decoded(d['controllers']))