def test_msonable(self): sif = StructInputFile.from_file(os.path.join(test_dir, "Li.cif")) sif_dict = sif.as_dict() decoder = MontyDecoder() temp_sif = decoder.process_decoded(sif_dict) assert isinstance(temp_sif, StructInputFile) assert sif.structure == temp_sif.structure
def get_dos_from_material_id(self, material_id: str): """ Get the complete density of states pymatgen object associated with a Materials Project ID. Arguments: materials_id (str): Materials Project ID for a material Returns: dos (CompleteDos): CompleteDos object """ es_rester = ElectronicStructureRester(endpoint=self.base_endpoint, api_key=self.api_key) dos_data = es_rester.get_data_by_id(document_id=material_id, fields=["dos"]).dict() if dos_data["dos"]: dos_task_id = dos_data["dos"]["total"]["1"]["task_id"] else: raise MPRestError( "No density of states data found for {}".format(material_id)) dos_obj = self.get_dos_from_task_id(dos_task_id) if dos_obj: b64_bytes = base64.b64decode(dos_obj[0], validate=True) packed_bytes = zlib.decompress(b64_bytes) json_data = msgpack.unpackb(packed_bytes, raw=False) data = MontyDecoder().process_decoded(json_data["data"]) return data else: raise MPRestError("No density of states object found.")
def entries(): return MontyDecoder().process_decoded( [ { "@module": "pymatgen.entries.computed_entries", "@class": "ComputedEntry", "correction": 0.0, "entry_id": "test-1", "energy": -382.146593528, "composition": {"Fe": 24.0, "O": 32.0}, "name": "Fe3O4", "attribute": None, "@version": "2020.4.29", }, { "@module": "pymatgen.entries.computed_entries", "@class": "ComputedEntry", "correction": 0.0, "entry_id": "test-2", "energy": -270.38765404, "composition": {"Fe": 16.0, "O": 24.0}, "name": "Fe2O3", "attribute": None, "@version": "2020.4.29", }, { "@module": "pymatgen.entries.computed_entries", "@class": "ComputedEntry", "correction": 0.0, "entry_id": "test-3", "energy": -92.274692568, "composition": {"O": 24.0}, "name": "O", "attribute": None, "@version": "2020.4.29", }, { "@module": "pymatgen.entries.computed_entries", "@class": "ComputedEntry", "correction": 0.0, "entry_id": "test-4", "energy": -13.00419661, "composition": {"Fe": 2.0}, "name": "Fe", "attribute": None, "@version": "2020.4.29", }, { "@module": "pymatgen.entries.computed_entries", "@class": "ComputedEntry", "correction": 0.0, "entry_id": "unstable", "energy": -1080.82678592, "composition": {"Fe": 64.0, "O": 96.0}, "name": "Fe2O3", "attribute": None, "@version": "2020.4.29", }, ] )
def from_dict(cls, d): decoded = {k: MontyDecoder().process_decoded(v) for k, v in d.items() if not k.startswith('@')} internal_id = decoded.pop('internal_id') value = decoded.pop('value') symbol_type = decoded.pop('symbol_type') q = cls(symbol_type, value, **decoded) q._internal_id = internal_id return q
def get_entries_from_dbs(structure_group_store: MongoStore, material_store: MongoStore, migrating_ion: str, material_id: Union[str, int]): """ Get the entries needed to construct a migration from a database that contains topotactically matched structures. Args: structure_group_store: Electrode documents one per each similar group of insertion materials, can also use any db that contains a material_store: Material documenets one per each similar structure ( multiple tasks) migrating_ion: The name of the migrating ion material_ids list with topotactic structures """ with structure_group_store as store: sg_doc = store.query_one({structure_group_store.key: material_id}) ignored_species = migrating_ion base_entries = [] inserted_entries = [] with material_store as store: for m_doc in store.query( {"material_id": { "$in": sg_doc["material_ids"] }}): if "GGA+U" in m_doc["entries"]: entry = MontyDecoder().process_decoded( m_doc["entries"]["GGA+U"]) # type: ComputedEntry elif "GGA" in m_doc["entries"]: entry = MontyDecoder().process_decoded(m_doc["entries"]["GGA"]) else: raise RuntimeError( "Missing GGA or GGA+U calc type in <entries>") if ignored_species in entry.composition.as_dict().keys(): inserted_entries.append(entry) else: base_entries.append(entry) return base_entries, inserted_entries
def test_msonable(self): sif1 = StructInputFile.from_file(os.path.join(test_dir, "Li.cif")) sif2 = StructInputFile.from_file(os.path.join(test_dir, "Li2O.cif")) inp_set = InputSet( { "cif1": sif1, "cif2": sif2, }, kwarg1=1, kwarg2="hello", ) inp_set_dict = inp_set.as_dict() decoder = MontyDecoder() temp_inp_set = decoder.process_decoded(inp_set_dict) assert isinstance(temp_inp_set, InputSet) assert temp_inp_set.kwarg1 == 1 assert temp_inp_set.kwarg2 == "hello" assert temp_inp_set._kwargs == inp_set._kwargs for (fname, contents), (fname2, contents2) in zip(temp_inp_set, inp_set): assert fname == fname2 assert contents.structure == contents2.structure
def test_serialization(self): # Test base serialize-deserialize d = self.rand_sqtensor.as_dict() new = SquareTensor.from_dict(d) self.assertArrayAlmostEqual(new, self.rand_sqtensor) self.assertIsInstance(new, SquareTensor) # Ensure proper object-independent deserialization obj = MontyDecoder().process_decoded(d) self.assertIsInstance(obj, SquareTensor) with warnings.catch_warnings(record=True): vsym = self.rand_sqtensor.voigt_symmetrized d_vsym = vsym.as_dict(voigt=True) new_voigt = Tensor.from_dict(d_vsym) self.assertArrayAlmostEqual(vsym, new_voigt)
def from_dict(cls, d): """ Constructs a ProvenanceStore object from its dictionary representation. Args: d: (dict) dictionary representation of the object. Can contain the dictionary representation of other objects as long as they implement from_dict() Returns: (ProvenanceStore) new object constructed from dictionary values """ d_in = {k: MontyDecoder().process_decoded(v) for k, v in d.items() if not k.startswith('@')} out = cls() out._initialize(**d_in) return out
def get_charge_density_from_file_id(self, fs_id: str): url_doc = self.get_data_by_id(fs_id) if url_doc: # The check below is performed to see if the client is being # used by our internal AWS deployment. If it is, we pull charge # density data from a private S3 bucket. Else, we pull data # from public MinIO buckets. if environ.get("AWS_EXECUTION_ENV", None) == "AWS_ECS_FARGATE": if self.boto_resource is None: self.boto_resource = self._get_s3_resource(use_minio=False, unsigned=False) bucket, obj_prefix = self._extract_s3_url_info(url_doc, use_minio=False) else: try: if self.boto_resource is None: self.boto_resource = self._get_s3_resource() bucket, obj_prefix = self._extract_s3_url_info(url_doc) except ConnectionError: self.boto_resource = self._get_s3_resource(use_minio=False) bucket, obj_prefix = self._extract_s3_url_info( url_doc, use_minio=False) r = self.boto_resource.Object( # type: ignore bucket, "{}/{}".format(obj_prefix, url_doc.fs_id)).get()["Body"] packed_bytes = r.read() packed_bytes = zlib.decompress(packed_bytes) json_data = msgpack.unpackb(packed_bytes, raw=False) chgcar = MontyDecoder().process_decoded(json_data["data"]) return chgcar else: return None
def from_dict(cls, d): """ Creates a StorageQuantity object from a dictionary of instance variable values. Args: d: (dict) dictionary of object instance variable values Returns: (StorageQuantity) StorageQuantity represented by the dictionary values """ d_in = {k: MontyDecoder().process_decoded(v) for k, v in d.items() if not k.startswith("@")} if isinstance(d_in['symbol_type'], str): d_in['symbol_type'] = DEFAULT_SYMBOLS[d_in['symbol_type']] out = cls() out._initialize(**d_in) return out
async def worker(url: str, num_workers: int): """ Simple distributed worker that connects to a master asks for work and deploys using multiprocessing """ # Should this have some sort of unique ID? logger = getLogger("Worker") logger.info(f"Connnecting to Master at {url}") with Pair1(dial=url, polyamorous=True) as master: logger.info(f"Connected to Master at {url}") running = True while running: await master.asend(b"Ready") message = await master.arecv() work = json.loads(message.decode("utf-8")) if "@class" in work and "@module" in work: # We have a valid builder builder = MontyDecoder().process_decoded(work) await multi(builder, num_workers) else: # End the worker # This should look for a specific message ? running = False
def last_updated_dict_ok(cls, v): return MontyDecoder().process_decoded(v)
def get_bandstructure_from_material_id( self, material_id: str, path_type: BSPathType = BSPathType.setyawan_curtarolo, line_mode=True, ): """ Get the band structure pymatgen object associated with a Materials Project ID. Arguments: materials_id (str): Materials Project ID for a material path_type (BSPathType): k-point path selection convention line_mode (bool): Whether to return data for a line-mode calculation Returns: bandstructure (Union[BandStructure, BandStructureSymmLine]): BandStructure or BandStructureSymmLine object """ es_rester = ElectronicStructureRester(endpoint=self.base_endpoint, api_key=self.api_key) if line_mode: bs_data = es_rester.get_data_by_id(document_id=material_id, fields=["bandstructure" ]).bandstructure if bs_data is None: raise MPRestError( "No {} band structure data found for {}".format( path_type.value, material_id)) else: bs_data = bs_data.dict() if bs_data.get(path_type.value, None): bs_task_id = bs_data[path_type.value]["task_id"] else: raise MPRestError( "No {} band structure data found for {}".format( path_type.value, material_id)) else: bs_data = es_rester.get_data_by_id(document_id=material_id, fields=["dos"]).dos if bs_data is None: raise MPRestError( "No uniform band structure data found for {}".format( material_id)) else: bs_data = bs_data.dict() if bs_data.get("total", None): bs_task_id = bs_data["total"]["1"]["task_id"] else: raise MPRestError( "No uniform band structure data found for {}".format( material_id)) bs_obj = self.get_bandstructure_from_task_id(bs_task_id) if bs_obj: b64_bytes = base64.b64decode(bs_obj[0], validate=True) packed_bytes = zlib.decompress(b64_bytes) json_data = msgpack.unpackb(packed_bytes, raw=False) data = MontyDecoder().process_decoded(json_data["data"]) return data else: raise MPRestError("No band structure object found.")