def _get_mpid_cache(self): path = os.path.join(os.path.dirname(module_path), "mpid_cache.json") if os.path.isfile(path): mpid_cache = loadfn(path) else: with MPRester() as mpr: # restrict random mpids to those likely experimentally known # and not too large entries = mpr.query( {"nsites": { "$lte": 16 }}, ["task_id", "icsd_ids"], chunk_size=0, mp_decode=False, ) mpid_cache = [ entry["task_id"] for entry in entries if len(entry["icsd_ids"]) > 2 ] dumpfn(mpid_cache, path) self.mpid_cache = mpid_cache
def _get_tag_cache(self): path = os.path.join(os.path.dirname(module_path), "tag_cache.json") def _process_tag(tag): # remove information that is typically not helpful return tag.split(" (")[0] if os.path.isfile(path): tag_cache = loadfn(path) else: with MPRester() as mpr: entries = mpr.query( {}, [ "exp.tags", "task_id", "e_above_hull", "pretty_formula", "spacegroup.symbol", ], chunk_size=0, mp_decode=False, ) tags = [[(_process_tag(tag), entry) for tag in entry["exp.tags"]] for entry in entries] tag_cache = defaultdict(list) for tag, entry in chain.from_iterable(tags): tag_cache[tag].append(entry) dumpfn(tag_cache, path) self.tag_cache = tag_cache self.tag_cache_keys = list(tag_cache.keys())
def master_update_structure(search_mpid, upload_data): if not search_mpid and not upload_data: raise PreventUpdate search_mpid = search_mpid or {} upload_data = upload_data or {} time_searched = search_mpid.get("time_requested", -1) time_uploaded = upload_data.get("time_requested", -1) if time_searched > time_uploaded: if search_mpid is None or "mpid" not in search_mpid: raise PreventUpdate with MPRester() as mpr: try: struct = mpr.get_task_data(search_mpid["mpid"], "structure")[0]["structure"] print("Struct from task.") except: struct = mpr.get_structure_by_material_id(search_mpid["mpid"]) print("Struct from material.") else: struct = MPComponent.from_data(upload_data["data"]) return MPComponent.to_data(struct.as_dict())
def master_update_structure(search_mpid: Optional[str], upload_data: Optional[str]): """ A new structure is loaded either from the search component or from the upload component. This callback triggers the update, and uses the callback context to determine which should take precedence if there is both a search term and uploaded data present. Args: search_mpid: e.g. "mp-11358" upload_data: output of upload component, {"data": ..., "error" ...} Returns: an encoded Structure """ print("master_update_structure", search_mpid, upload_data) if not search_mpid and not upload_data: raise PreventUpdate if not dash.callback_context.triggered: raise PreventUpdate if dash.callback_context.triggered[0]["prop_id"] == search_component.id( ) + ".data": load_by = "mpid" else: load_by = "uploaded" upload_data = upload_data or {} if load_by == "mpid": if search_mpid is None: raise PreventUpdate with MPRester() as mpr: # TODO: add comprehensive fix to this in pymatgen try: struct = mpr.get_task_data(search_mpid, "structure")[0]["structure"] print("Struct from task.") except: struct = mpr.get_structure_by_material_id(search_mpid) print("Struct from material.") else: struct = MPComponent.from_data(upload_data["data"]) return struct
def retrieve_grain_boundaries(mpid): if not mpid or "mpid" not in mpid: raise PreventUpdate data = None with MPRester() as mpr: data = mpr.get_gb_data(mpid["mpid"]) if not data: return ( "No grain boundary information computed for this crystal structure. " "Grain boundary information has only been computed for elemental ground state " "crystal structures at present.") table_data = [{ "Sigma": d["sigma"], "Rotation Axis": f"{d['rotation_axis']}", "Rotation Angle / º": f"{d['rotation_angle']:.2f}", "Grain Boundary Plane": f"({' '.join(map(str, d['gb_plane']))})", "Grain Boundary Energy / Jm⁻²": f"{d['gb_energy']:.2f}", } for d in data] df = pd.DataFrame(table_data) table = dt.DataTable( id=self.id("table"), columns=[{ "name": i, "id": i } for i in df.columns], data=df.to_dict("records"), style_cell={ "minWidth": "0px", "maxWidth": "200px", "whiteSpace": "normal", }, css=[{ "selector": ".dash-cell div.dash-cell-value", "rule": "display: inline; white-space: inherit; overflow: inherit; text-overflow: inherit;", }], sort_action="native", sort_mode="multi", ) view = html.Div( [ StructureMoleculeComponent( data[2]["initial_structure"], id=self.id("struct"), static=True, color_scheme="grain_label", ).struct_layout ], style={ "width": "400px", "height": "400px" }, ) return Columns([Column(table), Column(view)])
[ MessageHeader("Warning"), MessageBody( dcc.Markdown( "This is a pre-release version of Crystal Toolkit and " "may not behave reliably.")), ], kind="warning", ), ], id="banner", ) api_offline, api_error = True, "Unknown error connecting to Materials Project API." try: with MPRester() as mpr: api_check = mpr._make_request("/api_check") if not api_check.get("api_key_valid", False): api_error = ("Materials Project API key not supplied or not valid, " "please set PMG_MAPI_KEY in your environment.") else: api_offline = False except Exception as exception: api_error = str(exception) if api_offline: banner = html.Div( [ html.Br(), MessageContainer( [ MessageHeader(
def get_human_readable_results_from_search_term(search_term): # common confusables if search_term.isnumeric() and str( int(search_term)) == search_term: search_term = f"mp-{search_term}" if search_term.startswith("mp") and "-" not in search_term: search_term = f"mp-{search_term.split('mp')[1]}" if search_term.startswith("mp-") or search_term.startswith("mvc-"): # no need to actually search, support multiple mp-ids (space separated) return {mpid: mpid for mpid in search_term.split(" ")} with MPRester() as mpr: try: entries = mpr.query( search_term, [ "task_id", "pretty_formula", "e_above_hull", "spacegroup.symbol", ], ) except CompositionError: entries = [] mpids, tags = None, None if len(entries) == 0 and not (search_term.startswith("mp-") or search_term.startswith("mvc-")): mpids, tags = self.search_tags(search_term) entries = mpr.query( {"task_id": { "$in": mpids }}, [ "task_id", "pretty_formula", "e_above_hull", "spacegroup.symbol" ], ) if len(entries) == 0: self.logger.info(f"Search: no results for {search_term}") return {"error": f"No results found for {search_term}."} # sort by e_above_hull if a normal query, or by Levenshtein distance # if fuzzy matching (order of mpids list if present matches Levenshtein distance) if not mpids: entries = sorted(entries, key=lambda x: x["e_above_hull"]) else: entries = sorted(entries, key=lambda x: mpids.index(x["task_id"])) for entry in entries: e_hull = entry["e_above_hull"] if e_hull == 0: entry["e_above_hull_human"] = "predicted stable phase" elif e_hull >= 0.01: entry["e_above_hull_human"] = f"+{e_hull:.2f} eV/atom" else: e_hull_str = np.format_float_scientific(e_hull, precision=2) entry["e_above_hull_human"] = f"+{e_hull_str} eV/atom" human_readable_results = { entry["task_id"]: f"{unicodeify(entry['pretty_formula'])} " f"({unicodeify_spacegroup(entry['spacegroup.symbol'])}) " f"{entry['e_above_hull_human']}" for entry in entries } return human_readable_results