def _on_database_select(self, _): """Load chosen database""" self.structure_drop.reset() if (self.database[1] is None or getattr(self.database[1], "base_url", None) is None): self.query_button.tooltip = "Search - No database chosen" self.freeze() else: self.offset = 0 self.number = 1 self.structure_page_chooser.silent_reset() try: self.freeze() self.query_button.description = "Updating ..." self.query_button.icon = "cog" self.query_button.tooltip = "Updating filters ..." self._set_intslider_ranges() self._set_version() except Exception as exc: # pylint: disable=broad-except LOGGER.error( "Exception raised during setting IntSliderRanges: %s", exc.with_traceback(), ) finally: self.query_button.description = "Search" self.query_button.icon = "search" self.query_button.tooltip = "Search" self.sort_selector.valid_fields = sorted( get_sortable_fields(self.database[1].base_url)) self.unfreeze()
def update_local_providers_json(response: dict) -> None: """Update local `providers.json` if necessary""" # Remove dynamic fields _response = response.copy() for dynamic_field in ( "time_stamp", "query", "last_id", "response_message", "warnings", ): _response.get("meta", {}).pop(dynamic_field, None) if CACHED_PROVIDERS.exists(): try: with open(CACHED_PROVIDERS, "r") as handle: _file_response = json.load(handle) except JSONDecodeError: pass else: if _file_response == _response: LOGGER.debug("Local %r is up-to-date", CACHED_PROVIDERS.name) return LOGGER.debug( "Creating/updating local file of cached providers (%r).", CACHED_PROVIDERS.name ) with open(CACHED_PROVIDERS, "w") as handle: json.dump(_response, handle)
def _on_change_structure(self, change: dict): """Update widget when a new structure is chosen""" if change["new"] is None: LOGGER.debug( "Got no new structure for DownloadChooser (change['new']=%s).", change["new"], ) self.reset() else: LOGGER.debug("Got new structure for DownloadChooser: id=%s", change["new"].id) self._update_options() self.unfreeze() # Auto-choose the first (available) option in the dropdown available_formats = { label: index for index, (label, _) in enumerate(self._formats) } available_formats.pop(self._formats[0][0]) for label in self.dropdown.disabled_options: available_formats.pop(label) if available_formats: new_index = min(available_formats.values()) self.dropdown.index = new_index else: self.dropdown.index = 0
def _set_version(self): """Set self.database_version from an /info query""" base_url = self.database[1].base_url if base_url not in self.__cached_versions: # Retrieve and cache version response = perform_optimade_query( base_url=self.database[1].base_url, endpoint="/info") msg, _ = handle_errors(response) if msg: raise QueryError(msg) if "meta" not in response: raise QueryError( f"'meta' field not found in /info endpoint for base URL: {base_url}" ) if "api_version" not in response["meta"]: raise QueryError( f"'api_version' field not found in 'meta' for base URL: {base_url}" ) version = response["meta"]["api_version"] if version.startswith("v"): version = version[1:] self.__cached_versions[base_url] = version LOGGER.debug( "Cached version %r for base URL: %r", self.__cached_versions[base_url], base_url, ) self.database_version = self.__cached_versions[base_url]
def _observe_providers(self, change: dict): """Update child database dropdown upon changing provider""" value = change["new"] self.show_child_dbs.display = "none" self.provider = value if value is None or not value: self.show_child_dbs.display = "none" self.child_dbs.grouping = self.INITIAL_CHILD_DBS self.providers.index = 0 self.child_dbs.index = 0 else: self._initialize_child_dbs() if sum([len(_[1]) for _ in self.child_dbs.grouping]) <= 2: # The provider either has 0 or 1 implementations # or we have failed to retrieve any implementations. # Automatically choose the 1 implementation (if there), # while otherwise keeping the dropdown disabled. self.show_child_dbs.display = "none" try: self.child_dbs.index = 1 LOGGER.debug("Changed child_dbs index. New child_dbs: %s", self.child_dbs) except IndexError: pass else: self.show_child_dbs.display = None
def _on_database_change(self, change): """Update database summary, since self.database has been changed""" LOGGER.debug("Database changed in summary. New value: %r", change["new"]) if not change["new"] or change["new"] is None: self.database_summary.value = "" else: self._update_database()
def _get_more_results(self, change): """Query for more results according to pageing""" if not self.__perform_query: self.__perform_query = True LOGGER.debug( "NOT going to perform query with change: name=%s value=%s", change["name"], change["new"], ) return pageing: Union[int, str] = change["new"] LOGGER.debug( "Updating results with pageing change: name=%s value=%s", change["name"], pageing, ) if change["name"] == "page_offset": self.offset = pageing pageing = None elif change["name"] == "page_number": self.number = pageing pageing = None else: # It is needed to update page_offset, but we do not wish to query again with self.hold_trait_notifications(): self.__perform_query = False self.structure_page_chooser.update_offset() try: # Freeze and disable list of structures in dropdown widget # We don't want changes leading to weird things happening prior to the query ending self.freeze() # Update button text and icon self.query_button.description = "Updating ... " self.query_button.icon = "cog" self.query_button.tooltip = "Please wait ..." # Query database response = self._query(pageing) msg, _ = handle_errors(response) if msg: self.error_or_status_messages.value = msg return # Update list of structures in dropdown widget self._update_structures(response["data"]) # Update pageing self.structure_page_chooser.set_pagination_data( links_to_page=response.get("links", {}), ) finally: self.query_button.description = "Search" self.query_button.icon = "search" self.query_button.tooltip = "Search" self.unfreeze()
def _on_provider_change(self, change: dict): """Update provider summary, since self.provider has been changed""" LOGGER.debug("Provider changed in summary. New value: %r", change["new"]) self.database_summary.value = "" if not change["new"] or change["new"] is None: self.provider_summary.value = "" else: self._update_provider()
def __init__(self, *args: Tuple[Any]): LOGGER.warning( "%s warned.\nWarning message: %s\nAbout this warning: %s", args[0].__class__.__name__ if args and isinstance(args[0], Exception) else self.__class__.__name__, str(args[0]) if args else "", args[0].__doc__ if args and isinstance(args[0], Exception) else self.__doc__, ) super().__init__(*args)
def __getitem__(cls, name): """Log warning and default to "DEFAULT" if name is not valid""" if name not in cls._member_map_: LOGGER.warning( "%r is not a valid button style. Setting button style to 'DEFAULT'. " "Valid button styles: %s", name, list(cls._member_map_.keys()), ) name = "DEFAULT" return cls._member_map_[name]
def _get_file(filename: str) -> Union[str, bytes]: """Read and return file""" path = Path(filename).resolve() LOGGER.debug("Trying image file path: %s", str(path)) if path.exists() and path.is_file(): with open(path, "rb") as file_handle: res = file_handle.read() return res LOGGER.debug("File %s either does not exist or is not a file", str(path)) return ""
def __init__(self, *args: Tuple[Any]): LOGGER.error( "%s raised.\nError message: %s\nAbout this exception: %s", args[0].__class__.__name__ if args and isinstance(args[0], Exception) else self.__class__.__name__, str(args[0]) if args else "", args[0].__doc__ if args and isinstance(args[0], Exception) else self.__doc__, ) super().__init__(*args)
def _initialize_options(self) -> None: """Initialize options according to installed packages""" for imported_object, adapter_format in [ (aseAtoms, "ase"), (pymatgenStructure, "pymatgen"), ]: if imported_object is None: LOGGER.debug("%s not recognized to be installed.", adapter_format) self._formats = [ option for option in self._formats if option[1].get("adapter_format", "") != adapter_format ]
def value(self) -> dict: """Return value for wrapped PTableWidget""" LOGGER.debug( "PeriodicTable: PTableWidget.selected_elements = %r", self.ptable.selected_elements, ) LOGGER.debug( "PeriodicTable: Select ANY (True) or ALL (False) = %r", self.select_any_all.value, ) return not self.select_any_all.value, self.ptable.selected_elements.copy( )
def _update_options(self) -> None: """Update options according to chosen structure""" disabled_options = set() if StructureFeatures.DISORDER in self.structure.structure_features: # Disordered structures not usable with ASE LOGGER.debug( "'disorder' found in the structure's structure_features (%s)", self.structure.structure_features, ) disabled_options |= { label for label, value in self._formats if value.get("adapter_format", "") == "ase" } if not self.structure.attributes.lattice_vectors: LOGGER.debug("'lattice_vectors' not found for structure") disabled_options |= { label for label, value in self._formats if (value.get("adapter_format", "") == "ase" and value.get("final_format", "") in ("struct", "vasp")) } if not self.structure.attributes.species: LOGGER.debug("'species' not found for structure") disabled_options |= { label for label, value in self._formats if value.get("adapter_format", "") in ("cif", "pdb", "ase") } LOGGER.debug("Will disable the following dropdown options: %s", disabled_options) self.dropdown.disabled_options = list(disabled_options)
def _clear_cache(_): """Clear cached responses (not logs)""" if str(LOG_DIR).startswith(str(CACHE_DIR)): log_sub_dir = list(Path(str(LOG_DIR)[len(f"{CACHE_DIR}/"):]).parts) LOGGER.debug( "Cache dir: %s - Log dir: %s - Log sub dir parts: %s", CACHE_DIR, LOG_DIR, log_sub_dir, ) for dirpath, dirnames, filenames in os.walk(CACHE_DIR): log_dir_part = log_sub_dir.pop(0) if log_sub_dir else "" if not log_sub_dir: LOGGER.debug( "No more log sub directory parts. Removing %r from dirnames list.", log_dir_part, ) dirnames.remove(log_dir_part) for directory in list(dirnames): if directory == log_dir_part: continue LOGGER.debug("Removing folder: %s", Path(dirpath).joinpath(directory).resolve()) shutil.rmtree(Path(dirpath).joinpath(directory).resolve(), ignore_errors=True) dirnames.remove(directory) for filename in filenames: LOGGER.debug("Removing file: %s", Path(dirpath).joinpath(filename).resolve()) os.remove(Path(dirpath).joinpath(filename).resolve()) CACHE_DIR.mkdir(parents=True, exist_ok=True)
def _toggle_widget(self, change: dict): """Hide or show the widget according to the toggle button""" if change["new"]: # Show widget LOGGER.debug("Show widget since toggle is %s", change["new"]) self.ptable_container.layout.visibility = "visible" self.ptable_container.layout.height = "auto" self.toggle_button.tooltip = "Hide Periodic Table" self.toggle_button.description = "Hide Periodic Table" else: # Hide widget LOGGER.debug("Hide widget since toggle is %s", change["new"]) self.ptable_container.layout.visibility = "hidden" self.ptable_container.layout.height = "0px" self.toggle_button.tooltip = "Show Periodic Table" self.toggle_button.description = "Show Periodic Table"
def handle_errors(response: dict) -> Tuple[str, set]: """Handle any errors""" if "data" not in response and "errors" not in response: raise InputError(f"No data and no errors reported in response: {response}") if "errors" in response: LOGGER.error("Errored response:\n%s", json.dumps(response, indent=2)) if "data" in response: msg = ( '<font color="red">Error(s) during querying,</font> but ' f"<strong>{len(response['data'])}</strong> structures found." ) elif isinstance(response["errors"], dict) and "detail" in response["errors"]: msg = ( '<font color="red">Error(s) during querying. ' f"Message from server:<br>{response['errors']['detail']!r}.</font>" ) elif isinstance(response["errors"], list) and any( ["detail" in _ for _ in response["errors"]] ): details = [_["detail"] for _ in response["errors"] if "detail" in _] msg = ( '<font color="red">Error(s) during querying. Message(s) from server:<br> - ' f"{'<br> - '.join(details)!r}</font>" ) else: msg = ( '<font color="red">Error during querying, ' "please try again later.</font>" ) http_errors = set() for raw_error in response.get("errors", []): try: error = OptimadeError(**raw_error) status = int(error.status) except (ValidationError, TypeError, ValueError): status = 400 http_errors.add(status) return msg, http_errors return "", set()
def _uses_new_structure_features(self) -> bool: """Check whether self.database_version is >= v1.0.0-rc.2""" critical_version = SemanticVersion("1.0.0-rc.2") version = SemanticVersion(self.database_version) LOGGER.debug("Semantic version: %r", version) if version.base_version > critical_version.base_version: return True if version.base_version == critical_version.base_version: if version.prerelease: return version.prerelease >= critical_version.prerelease # Version is bigger than critical version and is not a pre-release return True # Major.Minor.Patch is lower than critical version return False
def _goto_last(self, _): """Go to last page of results""" if self.pages_links.get("last", False): for pageing in self.SUPPORTED_PAGEING: self._cache[pageing] = self._parse_pageing( self.pages_links["last"], pageing) LOGGER.debug("Go to last page of results - using link: %s", self.pages_links["last"]) self.page_link = self.pages_links["last"] else: self._cache["page_offset"] = self._last_page_offset self._cache["page_number"] = self._last_page_number LOGGER.debug( "Go to last page of results - using offset: %d", self._cache["page_offset"], ) self.page_offset = self._cache["page_offset"]
def elements( value: Tuple[bool, Dict[str, int]] ) -> Tuple[Union[List[str], List[Tuple[str]]], List[str]]: """Extract included and excluded elements""" use_all = value[0] ptable_value = value[1] include = [] exclude = [] for element, state in ptable_value.items(): if state == 0: # Include include.append(element) elif state == 1: # Exclude exclude.append(element) LOGGER.debug( "elements: With value %r the following are included: %r. And excluded: %r", value, include, exclude, ) values = [] operators = [] if exclude: elements = ",".join([f'"{element}"' for element in exclude]) values.append(("NOT", elements)) operators.append(" HAS ANY ") if include: include_elements = ",".join( [f'"{element}"' for element in include]) values.append(include_elements) operators.append(" HAS ALL " if use_all else " HAS ANY ") LOGGER.debug( "elements: Resulting parsed operator(s): %r and value(s): %r", operators, values, ) return values, operators
def get_entry_endpoint_schema(base_url: str, endpoint: str = None) -> dict: """Retrieve provider's entry endpoint schema (default: /structures).""" result = {} endpoint = endpoint if endpoint is not None else "structures" endpoint = f"/info/{endpoint.strip('/')}" response = perform_optimade_query(endpoint=endpoint, base_url=base_url) msg, _ = handle_errors(response) if msg: LOGGER.error( "Could not retrieve information about entry-endpoint %r.\n Message: %r\n Response:" "\n%s", endpoint[len("/info/") :], msg, response, ) return result return response.get("data", {}).get("properties", {})
def _goto_next(self, _): """Go to next page of results""" if self.pages_links.get("next", False): for pageing in self.SUPPORTED_PAGEING: self._cache[pageing] = self._parse_pageing( self.pages_links["next"], pageing) LOGGER.debug("Go to next page of results - using link: %s", self.pages_links["next"]) self.page_link = self.pages_links["next"] else: self._cache["page_offset"] += self._page_limit self._cache["page_number"] += 1 LOGGER.debug( "Go to next page of results - using pageing:\n page_offset=%d\n page_number=%d", self._cache["page_offset"], self._cache["page_number"], ) self.page_offset = self._cache["page_offset"] self.page_number = self._cache["page_number"]
def _query(self, link: str = None) -> dict: """Query helper function""" # If a complete link is provided, use it straight up if link is not None: try: link = ordered_query_url(link) response = SESSION.get(link, timeout=TIMEOUT_SECONDS) if response.from_cache: LOGGER.debug("Request to %s was taken from cache !", link) response = response.json() except ( requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ) as exc: response = { "errors": { "msg": "CLIENT: Connection error or timeout.", "url": link, "Exception": repr(exc), } } except JSONDecodeError as exc: response = { "errors": { "msg": "CLIENT: Could not decode response to JSON.", "url": link, "Exception": repr(exc), } } return response # Avoid structures with null positions and with assemblies. add_to_filter = 'NOT structure_features HAS ANY "assemblies"' if not self._uses_new_structure_features(): add_to_filter += ',"unknown_positions"' optimade_filter = self.filters.collect_value() optimade_filter = ("( {} ) AND ( {} )".format(optimade_filter, add_to_filter) if optimade_filter and add_to_filter else optimade_filter or add_to_filter or None) LOGGER.debug("Querying with filter: %s", optimade_filter) # OPTIMADE queries queries = { "base_url": self.database[1].base_url, "filter": optimade_filter, "page_limit": self.page_limit, "page_offset": self.offset, "page_number": self.number, "sort": self.sorting, } LOGGER.debug( "Parameters (excluding filter) sent to query util func: %s", {key: value for key, value in queries.items() if key != "filter"}, ) return perform_optimade_query(**queries)
def update_old_links_resources(resource: dict) -> Union[LinksResource, None]: """Try to update to resource to newest LinksResource schema""" try: res = LinksResource(**resource) except ValidationError: LOGGER.debug( "Links resource could not be cast to newest LinksResource model. Resource: %s", resource, ) resource["attributes"]["link_type"] = resource["type"] resource["type"] = "links" LOGGER.debug( "Trying casting to LinksResource again with the updated resource: %s", resource, ) try: res = LinksResource(**resource) except ValidationError: LOGGER.debug( "After updating 'type' and 'attributes.link_type' in resource, " "it still fails to cast to LinksResource model. Resource: %s", resource, ) return None else: return res else: return res
def update_ranged_inputs(self, change: dict): """Update ranged inputs' min/max values""" ranges = change["new"] if not ranges or ranges is None: return for field, config in ranges.items(): if field not in self.query_fields: raise ParserError( field=field, value="N/A", extras=[ ("config", config), ("self.query_fields.keys", self.query_fields.keys()), ], msg= "Provided field is unknown. Can not update range for unknown field.", ) widget = self.query_fields[field].input_widget cached_value: Tuple[int, int] = widget.value for attr in ("min", "max"): if attr in config and config[attr] is not None: try: new_value = int(config[attr]) except (TypeError, ValueError) as exc: raise ParserError( field=field, value=cached_value, extras=[("attr", attr), ("config[attr]", config[attr])], msg= f"Could not cast config[attr] to int. Exception: {exc!s}", ) from exc LOGGER.debug( "Setting %s for %s to %d.\nWidget immediately before: %r", attr, field, new_value, widget, ) # Since "min" is always set first, to be able to set "min" to a valid value, # "max" is first set to the new "min" value + 1 IF the new "min" value is # larger than the current "max" value, otherwise there is no reason, # and it may indeed lead to invalid attribute setting, if this is done. # For "max", coming last, this should then be fine, as the new "min" and "max" # values should never be an invalid pair. if attr == "min" and new_value > cached_value[1]: widget.max = new_value + 1 setattr(widget, attr, new_value) LOGGER.debug("Updated widget %r:\n%r", attr, widget) widget.value = (widget.min, widget.max) LOGGER.debug("Final state, updated widget:\n%r", widget)
def ranged_int( field: str, value: Tuple[Union[int, None], Union[int, None]]) -> Union[str, List[str]]: """Turn IntRangeSlider widget value into OPTIMADE filter string""" LOGGER.debug("ranged_int: Received value %r for field %r", value, field) low, high = value res = "" if low is None or high is None: if low is not None: res = f">={low}" if high is not None: res = f"<={high}" elif low == high: # Exactly N of property res = f"={low}" else: # Range of property res = [f">={low}", f"<={high}"] LOGGER.debug("ranged_int: Concluded the response is %r", res) return res
def _toggle_debug_logging(self, change: dict): """Set logging level depending on toggle button""" if change["new"]: # Set logging level DEBUG WIDGET_HANDLER.setLevel(logging.DEBUG) LOGGER.info("Set log output in widget to level DEBUG") LOGGER.debug("This should now be shown") # Show debug buttons self.clear_cache.layout.visibility = "visible" self.clear_logs.layout.visibility = "visible" else: # Set logging level to INFO WIDGET_HANDLER.setLevel(logging.INFO) LOGGER.info("Set log output in widget to level INFO") LOGGER.debug("This should now NOT be shown") # Hide debug buttons self.clear_cache.layout.visibility = "hidden" self.clear_logs.layout.visibility = "hidden"
def ordered_query_url(url: str) -> str: """Decode URL, sort queries, re-encode URL""" LOGGER.debug("Ordering URL: %s", url) parsed_url = urlparse(url) queries = parse_qs(parsed_url.query) LOGGER.debug("Queries to sort and order: %s", queries) sorted_keys = sorted(queries.keys()) res = OrderedDict() for key in sorted_keys: # Since the values are all lists, we also sort these res[key] = sorted(queries[key]) res = urlencode(res, doseq=True) res = ( f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path};{parsed_url.params}?{res}" f"#{parsed_url.fragment}" ) LOGGER.debug("Newly ordered URL: %s", res) LOGGER.debug("Treated URL after unparse(parse): %s", urlunparse(urlparse(res))) return urlunparse(urlparse(res))
def fetch_providers(providers_urls: Union[str, List[str]] = None) -> list: """Fetch OPTIMADE database providers (from Materials-Consortia) :param providers_urls: String or list of strings with versioned base URL(s) to Materials-Consortia providers database """ if providers_urls and not isinstance(providers_urls, (list, str)): raise TypeError("providers_urls must be a string or list of strings") if not providers_urls: providers_urls = PROVIDERS_URLS elif not isinstance(providers_urls, list): providers_urls = [providers_urls] for providers_url in providers_urls: providers = perform_optimade_query(base_url=providers_url, endpoint="") msg, _ = handle_errors(providers) if msg: LOGGER.warning("%r returned error(s).", providers_url) else: break else: if CACHED_PROVIDERS.exists(): # Load local cached providers file LOGGER.warning( "Loading local, possibly outdated, list of providers (%r).", CACHED_PROVIDERS.name, ) with open(CACHED_PROVIDERS, "r") as handle: providers = json.load(handle) else: LOGGER.error( "Neither any of the provider URLs: %r returned a valid response, " "and the local cached file of the latest valid response does not exist.", providers_urls, ) providers = {} update_local_providers_json(providers) return providers.get("data", [])