def search_resource(eos_store): resource = Resource( eos_store, SearchDoc, query_operators=[ FormulaQuery(), MinMaxQuery(), SymmetryQuery(), ThermoEnergySearchQuery(), IsStableQuery(), SearchBandGapQuery(), BulkModulusQuery(), ShearModulusQuery(), PoissonQuery(), DielectricQuery(), PiezoelectricQuery(), SurfaceMinMaxQuery(), SearchTaskIDsQuery(), HasPropsQuery(), DeprecationQuery(), PaginationQuery(), SparseFieldsQuery(SearchDoc, default_fields=["task_id"]), ], tags=["Search"], ) return resource
def xas_resource(xas_store): resource = Resource( xas_store, XASDoc, query_operators=[ FormulaQuery(), XASQuery(), PaginationQuery(), SparseFieldsQuery( XASDoc, default_fields=[ "xas_id", "task_id", "edge", "absorbing_element", "formula_pretty", "spectrum_type", "last_updated", ], ), ], tags=["XAS"], ) return resource
def task_resource(task_store): resource = Resource( task_store, TaskDoc, query_operators=[ FormulaQuery(), PaginationQuery(), SparseFieldsQuery( TaskDoc, default_fields=["task_id", "formula_pretty", "last_updated"], ), ], tags=["Tasks"], ) return resource
def trajectory_resource(task_store): class TrajectoryProcess(APIRoute): def get_route_handler(self) -> Callable: original_route_handler = super().get_route_handler() async def custom_route_handler(request: Request) -> Response: response: Response = await original_route_handler(request) d = json.loads(response.body, encoding=response.charset) trajectories = [] for entry in d["data"]: trajectories.append( calcs_reversed_to_trajectory(entry["calcs_reversed"])) trajectories = jsanitize(trajectories) response.body = json.dumps( trajectories, ensure_ascii=False, allow_nan=False, indent=None, separators=(",", ":"), ).encode(response.charset) traj_len = str(len(response.body)) response.headers["content-length"] = traj_len return response return custom_route_handler resource = Resource( task_store, TaskDoc, query_operators=[FormulaQuery(), PaginationQuery()], route_class=TrajectoryProcess, key_fields=["calcs_reversed"], tags=["Tasks"], ) return resource
def bs_resource(bs_store, s3_store): def custom_bs_endpoint_prep(self): self.s3 = s3_store model = BSObjectReturn model_name = model.__name__ key_name = self.s3.key field_input = SparseFieldsQuery( model, [self.s3.key, self.s3.last_updated_field]).query async def get_object( key: str = Query( ..., alias=key_name, title=f"The {key_name} of the {model_name} to get", ), path_type: BSPathType = Query( ..., title= "The k-path convention type for the band structure object", ), fields: STORE_PARAMS = Depends(field_input), ): f""" Get's a document by the primary key in the store Args: {key_name}: the id of a single {model_name} Returns: a single {model_name} document """ self.store.connect() self.s3.connect() bs_entry = self.store.query_one( criteria={self.store.key: key}, properties=[f"{str(path_type.name)}.task_id"], ) bs_task = bs_entry.get(str(path_type.name)).get("task_id", None) if bs_task is None: raise HTTPException( status_code=404, detail= f"Band structure with {self.store.key} = {key} not found", ) item = self.s3.query_one({"task_id": bs_task}, properties=fields["properties"]) response = item return response self.router.get( "/object/", response_description=f"Get an {model_name} by {key_name}", response_model=model, response_model_exclude_unset=True, tags=self.tags, )(get_object) resource = Resource( bs_store, BSDoc, query_operators=[ BSDataQuery(), FormulaQuery(), MinMaxQuery(), PaginationQuery(), SparseFieldsQuery(BSDoc, default_fields=["task_id", "last_updated"]), ], tags=["Electronic Structure"], custom_endpoint_funcs=[custom_bs_endpoint_prep], ) return resource
def materials_resource(materials_store): def custom_version_prep(self): model_name = self.model.__name__ async def get_versions(): f""" Obtains the database versions for the data in {model_name} Returns: A list of database versions one can use to query on """ try: conn = MongoClient(self.store.host, self.store.port) db = conn[self.store.database] if self.core.username != "": db.authenticate(self.username, self.password) except AttributeError: conn = MongoClient(self.store.uri) db = conn[self.store.database] col_names = db.list_collection_names() d = [ name.replace("_", ".")[15:] for name in col_names if "materials" in name if name != "materials.core" ] response = {"data": d} return response self.router.get( "/versions/", response_model_exclude_unset=True, response_description=f"Get versions of {model_name}", tags=self.tags, )(get_versions) def custom_findstructure_prep(self): model_name = self.model.__name__ async def find_structure( structure: Structure = Body( ..., title="Pymatgen structure object to query with", ), ltol: float = Query( 0.2, title="Fractional length tolerance. Default is 0.2.", ), stol: float = Query( 0.3, title= "Site tolerance. Defined as the fraction of the average free \ length per atom := ( V / Nsites ) ** (1/3). Default is 0.3.", ), angle_tol: float = Query( 5, title="Angle tolerance in degrees. Default is 5 degrees.", ), limit: int = Query( 1, title= "Maximum number of matches to show. Defaults to 1, only showing the best match.", ), ): """ Obtains material structures that match a given input structure within some tolerance. Returns: A list of Material IDs for materials with matched structures alongside the associated RMS values """ try: s = PS.from_dict(structure.dict()) except Exception: raise HTTPException( status_code=404, detail= "Body cannot be converted to a pymatgen structure object.", ) m = StructureMatcher( ltol=ltol, stol=stol, angle_tol=angle_tol, primitive_cell=True, scale=True, attempt_supercell=False, comparator=ElementComparator(), ) crit = {"composition_reduced": dict(s.composition.to_reduced_dict)} self.store.connect() matches = [] for r in self.store.query(criteria=crit, properties=["structure", "task_id"]): s2 = PS.from_dict(r["structure"]) matched = m.fit(s, s2) if matched: rms = m.get_rms_dist(s, s2) matches.append({ "task_id": r["task_id"], "normalized_rms_displacement": rms[0], "max_distance_paired_sites": rms[1], }) response = { "data": sorted( matches[:limit], key=lambda x: ( x["normalized_rms_displacement"], x["max_distance_paired_sites"], ), ) } return response self.router.post( "/find_structure/", response_model_exclude_unset=True, response_description= f"Get matching structures using data from {model_name}", tags=self.tags, )(find_structure) def custom_autocomplete_prep(self): async def formula_autocomplete( text: str = Query( ..., description="Text to run against formula autocomplete", ), limit: int = Query( 10, description="Maximum number of matches to show. Defaults to 10", ), ): comp = Composition(text) ind_str = [] if len(comp) == 1: d = comp.get_integer_formula_and_factor() s = d[0] + str(int(d[1])) if d[1] != 1 else d[0] ind_str.append(s) else: comp_red = comp.reduced_composition.items() for (i, j) in comp_red: if j != 1: ind_str.append(i.name + str(int(j))) else: ind_str.append(i.name) final_terms = ["".join(entry) for entry in permutations(ind_str)] pipeline = [ { "$search": { "index": "formula_autocomplete", "autocomplete": { "path": "formula_pretty", "query": final_terms, "tokenOrder": "any", }, } }, { "$group": { "_id": "$formula_pretty", } }, { "$project": { "score": { "$strLenCP": "$_id" } } }, { "$sort": { "score": 1 } }, { "$limit": limit }, ] self.store.connect() data = list( self.store._collection.aggregate(pipeline, allowDiskUse=True)) response = {"data": data} return response self.router.get( "/formula_autocomplete/", response_model_exclude_unset=True, response_description="Get autocomplete results for a formula", tags=self.tags, )(formula_autocomplete) resource = Resource( materials_store, MaterialsCoreDoc, query_operators=[ VersionQuery(), FormulaQuery(), MultiTaskIDQuery(), SymmetryQuery(), DeprecationQuery(), MinMaxQuery(), SortQuery(), PaginationQuery(), SparseFieldsQuery( MaterialsCoreDoc, default_fields=["task_id", "formula_pretty", "last_updated"], ), ], tags=["Materials"], custom_endpoint_funcs=[ custom_version_prep, custom_findstructure_prep, custom_autocomplete_prep, ], ) return resource
def materials_resource(materials_store): def custom_version_prep(self): model_name = self.model.__name__ async def get_versions(): f""" Obtains the database versions for the data in {model_name} Returns: A list of database versions one can use to query on """ try: conn = MongoClient(self.store.host, self.store.port) db = conn[self.store.database] if self.core.username != "": db.authenticate(self.username, self.password) except AttributeError: conn = MongoClient(self.store.uri) db = conn[self.store.database] col_names = db.list_collection_names() d = [ name.replace("_", ".")[15:] for name in col_names if "materials" in name if name != "materials.core" ] response = {"data": d} return response self.router.get( "/versions/", response_model_exclude_unset=True, response_description=f"Get versions of {model_name}", tags=self.tags, )(get_versions) def custom_findstructure_prep(self): model_name = self.model.__name__ async def find_structure( structure: Structure = Body( ..., title="Pymatgen structure object to query with", ), ltol: float = Query( 0.2, title="Fractional length tolerance. Default is 0.2.", ), stol: float = Query( 0.3, title="Site tolerance. Defined as the fraction of the average free \ length per atom := ( V / Nsites ) ** (1/3). Default is 0.3.", ), angle_tol: float = Query( 5, title="Angle tolerance in degrees. Default is 5 degrees.", ), limit: int = Query( 1, title="Maximum number of matches to show. Defaults to 1, only showing the best match.", ), ): """ Obtains material structures that match a given input structure within some tolerance. Returns: A list of Material IDs for materials with matched structures alongside the associated RMS values """ try: s = PS.from_dict(structure.dict()) except Exception: raise HTTPException( status_code=404, detail="Body cannot be converted to a pymatgen structure object.", ) m = StructureMatcher( ltol=ltol, stol=stol, angle_tol=angle_tol, primitive_cell=True, scale=True, attempt_supercell=False, comparator=ElementComparator(), ) crit = {"composition_reduced": dict(s.composition.to_reduced_dict)} self.store.connect() matches = [] for r in self.store.query( criteria=crit, properties=["structure", "task_id"] ): s2 = PS.from_dict(r["structure"]) matched = m.fit(s, s2) if matched: rms = m.get_rms_dist(s, s2) matches.append( { "task_id": r["task_id"], "normalized_rms_displacement": rms[0], "max_distance_paired_sites": rms[1], } ) response = { "data": sorted( matches[:limit], key=lambda x: ( x["normalized_rms_displacement"], x["max_distance_paired_sites"], ), ) } return response self.router.post( "/find_structure/", response_model_exclude_unset=True, response_description=f"Get matching structures using data from {model_name}", tags=self.tags, )(find_structure) resource = Resource( materials_store, MaterialsCoreDoc, query_operators=[ VersionQuery(), FormulaQuery(), MultiTaskIDQuery(), SymmetryQuery(), DeprecationQuery(), MinMaxQuery(), PaginationQuery(), SparseFieldsQuery( MaterialsCoreDoc, default_fields=["task_id", "formula_pretty", "last_updated"], ), ], tags=["Materials"], custom_endpoint_funcs=[custom_version_prep, custom_findstructure_prep], ) return resource