def _on_change_structure(self, change: dict): """Update widget when a new structure is chosen""" if change["new"] is None: LOGGER.debug( "Got no new structure for DownloadChooser (change['new']=%s).", change["new"], ) self.reset() else: LOGGER.debug("Got new structure for DownloadChooser: id=%s", change["new"].id) self._update_options() self.unfreeze() # Auto-choose the first (available) option in the dropdown available_formats = { label: index for index, (label, _) in enumerate(self._formats) } available_formats.pop(self._formats[0][0]) for label in self.dropdown.disabled_options: available_formats.pop(label) if available_formats: new_index = min(available_formats.values()) self.dropdown.index = new_index else: self.dropdown.index = 0
def _set_version(self): """Set self.database_version from an /info query""" base_url = self.database[1].base_url if base_url not in self.__cached_versions: # Retrieve and cache version response = perform_optimade_query( base_url=self.database[1].base_url, endpoint="/info") msg, _ = handle_errors(response) if msg: raise QueryError(msg) if "meta" not in response: raise QueryError( f"'meta' field not found in /info endpoint for base URL: {base_url}" ) if "api_version" not in response["meta"]: raise QueryError( f"'api_version' field not found in 'meta' for base URL: {base_url}" ) version = response["meta"]["api_version"] if version.startswith("v"): version = version[1:] self.__cached_versions[base_url] = version LOGGER.debug( "Cached version %r for base URL: %r", self.__cached_versions[base_url], base_url, ) self.database_version = self.__cached_versions[base_url]
def update_local_providers_json(response: dict) -> None: """Update local `providers.json` if necessary""" # Remove dynamic fields _response = response.copy() for dynamic_field in ( "time_stamp", "query", "last_id", "response_message", "warnings", ): _response.get("meta", {}).pop(dynamic_field, None) if CACHED_PROVIDERS.exists(): try: with open(CACHED_PROVIDERS, "r") as handle: _file_response = json.load(handle) except JSONDecodeError: pass else: if _file_response == _response: LOGGER.debug("Local %r is up-to-date", CACHED_PROVIDERS.name) return LOGGER.debug( "Creating/updating local file of cached providers (%r).", CACHED_PROVIDERS.name ) with open(CACHED_PROVIDERS, "w") as handle: json.dump(_response, handle)
def _observe_providers(self, change: dict): """Update child database dropdown upon changing provider""" value = change["new"] self.show_child_dbs.display = "none" self.provider = value if value is None or not value: self.show_child_dbs.display = "none" self.child_dbs.grouping = self.INITIAL_CHILD_DBS self.providers.index = 0 self.child_dbs.index = 0 else: self._initialize_child_dbs() if sum([len(_[1]) for _ in self.child_dbs.grouping]) <= 2: # The provider either has 0 or 1 implementations # or we have failed to retrieve any implementations. # Automatically choose the 1 implementation (if there), # while otherwise keeping the dropdown disabled. self.show_child_dbs.display = "none" try: self.child_dbs.index = 1 LOGGER.debug("Changed child_dbs index. New child_dbs: %s", self.child_dbs) except IndexError: pass else: self.show_child_dbs.display = None
def _on_database_change(self, change): """Update database summary, since self.database has been changed""" LOGGER.debug("Database changed in summary. New value: %r", change["new"]) if not change["new"] or change["new"] is None: self.database_summary.value = "" else: self._update_database()
def _get_more_results(self, change): """Query for more results according to pageing""" if not self.__perform_query: self.__perform_query = True LOGGER.debug( "NOT going to perform query with change: name=%s value=%s", change["name"], change["new"], ) return pageing: Union[int, str] = change["new"] LOGGER.debug( "Updating results with pageing change: name=%s value=%s", change["name"], pageing, ) if change["name"] == "page_offset": self.offset = pageing pageing = None elif change["name"] == "page_number": self.number = pageing pageing = None else: # It is needed to update page_offset, but we do not wish to query again with self.hold_trait_notifications(): self.__perform_query = False self.structure_page_chooser.update_offset() try: # Freeze and disable list of structures in dropdown widget # We don't want changes leading to weird things happening prior to the query ending self.freeze() # Update button text and icon self.query_button.description = "Updating ... " self.query_button.icon = "cog" self.query_button.tooltip = "Please wait ..." # Query database response = self._query(pageing) msg, _ = handle_errors(response) if msg: self.error_or_status_messages.value = msg return # Update list of structures in dropdown widget self._update_structures(response["data"]) # Update pageing self.structure_page_chooser.set_pagination_data( links_to_page=response.get("links", {}), ) finally: self.query_button.description = "Search" self.query_button.icon = "search" self.query_button.tooltip = "Search" self.unfreeze()
def _on_provider_change(self, change: dict): """Update provider summary, since self.provider has been changed""" LOGGER.debug("Provider changed in summary. New value: %r", change["new"]) self.database_summary.value = "" if not change["new"] or change["new"] is None: self.provider_summary.value = "" else: self._update_provider()
def _get_file(filename: str) -> Union[str, bytes]: """Read and return file""" path = Path(filename).resolve() LOGGER.debug("Trying image file path: %s", str(path)) if path.exists() and path.is_file(): with open(path, "rb") as file_handle: res = file_handle.read() return res LOGGER.debug("File %s either does not exist or is not a file", str(path)) return ""
def _initialize_options(self) -> None: """Initialize options according to installed packages""" for imported_object, adapter_format in [ (aseAtoms, "ase"), (pymatgenStructure, "pymatgen"), ]: if imported_object is None: LOGGER.debug("%s not recognized to be installed.", adapter_format) self._formats = [ option for option in self._formats if option[1].get("adapter_format", "") != adapter_format ]
def value(self) -> dict: """Return value for wrapped PTableWidget""" LOGGER.debug( "PeriodicTable: PTableWidget.selected_elements = %r", self.ptable.selected_elements, ) LOGGER.debug( "PeriodicTable: Select ANY (True) or ALL (False) = %r", self.select_any_all.value, ) return not self.select_any_all.value, self.ptable.selected_elements.copy( )
def _clear_cache(_): """Clear cached responses (not logs)""" if str(LOG_DIR).startswith(str(CACHE_DIR)): log_sub_dir = list(Path(str(LOG_DIR)[len(f"{CACHE_DIR}/"):]).parts) LOGGER.debug( "Cache dir: %s - Log dir: %s - Log sub dir parts: %s", CACHE_DIR, LOG_DIR, log_sub_dir, ) for dirpath, dirnames, filenames in os.walk(CACHE_DIR): log_dir_part = log_sub_dir.pop(0) if log_sub_dir else "" if not log_sub_dir: LOGGER.debug( "No more log sub directory parts. Removing %r from dirnames list.", log_dir_part, ) dirnames.remove(log_dir_part) for directory in list(dirnames): if directory == log_dir_part: continue LOGGER.debug("Removing folder: %s", Path(dirpath).joinpath(directory).resolve()) shutil.rmtree(Path(dirpath).joinpath(directory).resolve(), ignore_errors=True) dirnames.remove(directory) for filename in filenames: LOGGER.debug("Removing file: %s", Path(dirpath).joinpath(filename).resolve()) os.remove(Path(dirpath).joinpath(filename).resolve()) CACHE_DIR.mkdir(parents=True, exist_ok=True)
def _update_options(self) -> None: """Update options according to chosen structure""" disabled_options = set() if StructureFeatures.DISORDER in self.structure.structure_features: # Disordered structures not usable with ASE LOGGER.debug( "'disorder' found in the structure's structure_features (%s)", self.structure.structure_features, ) disabled_options |= { label for label, value in self._formats if value.get("adapter_format", "") == "ase" } if not self.structure.attributes.lattice_vectors: LOGGER.debug("'lattice_vectors' not found for structure") disabled_options |= { label for label, value in self._formats if (value.get("adapter_format", "") == "ase" and value.get("final_format", "") in ("struct", "vasp")) } if not self.structure.attributes.species: LOGGER.debug("'species' not found for structure") disabled_options |= { label for label, value in self._formats if value.get("adapter_format", "") in ("cif", "pdb", "ase") } LOGGER.debug("Will disable the following dropdown options: %s", disabled_options) self.dropdown.disabled_options = list(disabled_options)
def _toggle_widget(self, change: dict): """Hide or show the widget according to the toggle button""" if change["new"]: # Show widget LOGGER.debug("Show widget since toggle is %s", change["new"]) self.ptable_container.layout.visibility = "visible" self.ptable_container.layout.height = "auto" self.toggle_button.tooltip = "Hide Periodic Table" self.toggle_button.description = "Hide Periodic Table" else: # Hide widget LOGGER.debug("Hide widget since toggle is %s", change["new"]) self.ptable_container.layout.visibility = "hidden" self.ptable_container.layout.height = "0px" self.toggle_button.tooltip = "Show Periodic Table" self.toggle_button.description = "Show Periodic Table"
def _goto_last(self, _): """Go to last page of results""" if self.pages_links.get("last", False): for pageing in self.SUPPORTED_PAGEING: self._cache[pageing] = self._parse_pageing( self.pages_links["last"], pageing) LOGGER.debug("Go to last page of results - using link: %s", self.pages_links["last"]) self.page_link = self.pages_links["last"] else: self._cache["page_offset"] = self._last_page_offset self._cache["page_number"] = self._last_page_number LOGGER.debug( "Go to last page of results - using offset: %d", self._cache["page_offset"], ) self.page_offset = self._cache["page_offset"]
def _uses_new_structure_features(self) -> bool: """Check whether self.database_version is >= v1.0.0-rc.2""" critical_version = SemanticVersion("1.0.0-rc.2") version = SemanticVersion(self.database_version) LOGGER.debug("Semantic version: %r", version) if version.base_version > critical_version.base_version: return True if version.base_version == critical_version.base_version: if version.prerelease: return version.prerelease >= critical_version.prerelease # Version is bigger than critical version and is not a pre-release return True # Major.Minor.Patch is lower than critical version return False
def elements( value: Tuple[bool, Dict[str, int]] ) -> Tuple[Union[List[str], List[Tuple[str]]], List[str]]: """Extract included and excluded elements""" use_all = value[0] ptable_value = value[1] include = [] exclude = [] for element, state in ptable_value.items(): if state == 0: # Include include.append(element) elif state == 1: # Exclude exclude.append(element) LOGGER.debug( "elements: With value %r the following are included: %r. And excluded: %r", value, include, exclude, ) values = [] operators = [] if exclude: elements = ",".join([f'"{element}"' for element in exclude]) values.append(("NOT", elements)) operators.append(" HAS ANY ") if include: include_elements = ",".join( [f'"{element}"' for element in include]) values.append(include_elements) operators.append(" HAS ALL " if use_all else " HAS ANY ") LOGGER.debug( "elements: Resulting parsed operator(s): %r and value(s): %r", operators, values, ) return values, operators
def _toggle_debug_logging(self, change: dict): """Set logging level depending on toggle button""" if change["new"]: # Set logging level DEBUG WIDGET_HANDLER.setLevel(logging.DEBUG) LOGGER.info("Set log output in widget to level DEBUG") LOGGER.debug("This should now be shown") # Show debug buttons self.clear_cache.layout.visibility = "visible" self.clear_logs.layout.visibility = "visible" else: # Set logging level to INFO WIDGET_HANDLER.setLevel(logging.INFO) LOGGER.info("Set log output in widget to level INFO") LOGGER.debug("This should now NOT be shown") # Hide debug buttons self.clear_cache.layout.visibility = "hidden" self.clear_logs.layout.visibility = "hidden"
def _goto_next(self, _): """Go to next page of results""" if self.pages_links.get("next", False): for pageing in self.SUPPORTED_PAGEING: self._cache[pageing] = self._parse_pageing( self.pages_links["next"], pageing) LOGGER.debug("Go to next page of results - using link: %s", self.pages_links["next"]) self.page_link = self.pages_links["next"] else: self._cache["page_offset"] += self._page_limit self._cache["page_number"] += 1 LOGGER.debug( "Go to next page of results - using pageing:\n page_offset=%d\n page_number=%d", self._cache["page_offset"], self._cache["page_number"], ) self.page_offset = self._cache["page_offset"] self.page_number = self._cache["page_number"]
def _query(self, link: str = None) -> dict: """Query helper function""" # If a complete link is provided, use it straight up if link is not None: try: link = ordered_query_url(link) response = SESSION.get(link, timeout=TIMEOUT_SECONDS) if response.from_cache: LOGGER.debug("Request to %s was taken from cache !", link) response = response.json() except ( requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ) as exc: response = { "errors": { "msg": "CLIENT: Connection error or timeout.", "url": link, "Exception": repr(exc), } } except JSONDecodeError as exc: response = { "errors": { "msg": "CLIENT: Could not decode response to JSON.", "url": link, "Exception": repr(exc), } } return response # Avoid structures with null positions and with assemblies. add_to_filter = 'NOT structure_features HAS ANY "assemblies"' if not self._uses_new_structure_features(): add_to_filter += ',"unknown_positions"' optimade_filter = self.filters.collect_value() optimade_filter = ("( {} ) AND ( {} )".format(optimade_filter, add_to_filter) if optimade_filter and add_to_filter else optimade_filter or add_to_filter or None) LOGGER.debug("Querying with filter: %s", optimade_filter) # OPTIMADE queries queries = { "base_url": self.database[1].base_url, "filter": optimade_filter, "page_limit": self.page_limit, "page_offset": self.offset, "page_number": self.number, "sort": self.sorting, } LOGGER.debug( "Parameters (excluding filter) sent to query util func: %s", {key: value for key, value in queries.items() if key != "filter"}, ) return perform_optimade_query(**queries)
def update_old_links_resources(resource: dict) -> Union[LinksResource, None]: """Try to update to resource to newest LinksResource schema""" try: res = LinksResource(**resource) except ValidationError: LOGGER.debug( "Links resource could not be cast to newest LinksResource model. Resource: %s", resource, ) resource["attributes"]["link_type"] = resource["type"] resource["type"] = "links" LOGGER.debug( "Trying casting to LinksResource again with the updated resource: %s", resource, ) try: res = LinksResource(**resource) except ValidationError: LOGGER.debug( "After updating 'type' and 'attributes.link_type' in resource, " "it still fails to cast to LinksResource model. Resource: %s", resource, ) return None else: return res else: return res
def update_ranged_inputs(self, change: dict): """Update ranged inputs' min/max values""" ranges = change["new"] if not ranges or ranges is None: return for field, config in ranges.items(): if field not in self.query_fields: raise ParserError( field=field, value="N/A", extras=[ ("config", config), ("self.query_fields.keys", self.query_fields.keys()), ], msg= "Provided field is unknown. Can not update range for unknown field.", ) widget = self.query_fields[field].input_widget cached_value: Tuple[int, int] = widget.value for attr in ("min", "max"): if attr in config and config[attr] is not None: try: new_value = int(config[attr]) except (TypeError, ValueError) as exc: raise ParserError( field=field, value=cached_value, extras=[("attr", attr), ("config[attr]", config[attr])], msg= f"Could not cast config[attr] to int. Exception: {exc!s}", ) from exc LOGGER.debug( "Setting %s for %s to %d.\nWidget immediately before: %r", attr, field, new_value, widget, ) # Since "min" is always set first, to be able to set "min" to a valid value, # "max" is first set to the new "min" value + 1 IF the new "min" value is # larger than the current "max" value, otherwise there is no reason, # and it may indeed lead to invalid attribute setting, if this is done. # For "max", coming last, this should then be fine, as the new "min" and "max" # values should never be an invalid pair. if attr == "min" and new_value > cached_value[1]: widget.max = new_value + 1 setattr(widget, attr, new_value) LOGGER.debug("Updated widget %r:\n%r", attr, widget) widget.value = (widget.min, widget.max) LOGGER.debug("Final state, updated widget:\n%r", widget)
def ranged_int( field: str, value: Tuple[Union[int, None], Union[int, None]]) -> Union[str, List[str]]: """Turn IntRangeSlider widget value into OPTIMADE filter string""" LOGGER.debug("ranged_int: Received value %r for field %r", value, field) low, high = value res = "" if low is None or high is None: if low is not None: res = f">={low}" if high is not None: res = f"<={high}" elif low == high: # Exactly N of property res = f"={low}" else: # Range of property res = [f">={low}", f"<={high}"] LOGGER.debug("ranged_int: Concluded the response is %r", res) return res
def ordered_query_url(url: str) -> str: """Decode URL, sort queries, re-encode URL""" LOGGER.debug("Ordering URL: %s", url) parsed_url = urlparse(url) queries = parse_qs(parsed_url.query) LOGGER.debug("Queries to sort and order: %s", queries) sorted_keys = sorted(queries.keys()) res = OrderedDict() for key in sorted_keys: # Since the values are all lists, we also sort these res[key] = sorted(queries[key]) res = urlencode(res, doseq=True) res = ( f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path};{parsed_url.params}?{res}" f"#{parsed_url.fragment}" ) LOGGER.debug("Newly ordered URL: %s", res) LOGGER.debug("Treated URL after unparse(parse): %s", urlunparse(urlparse(res))) return urlunparse(urlparse(res))
SESSION_ADAPTER_DEBUG = CacheControlAdapter() SESSION.mount("http://", SESSION_ADAPTER) SESSION.mount("https://", SESSION_ADAPTER) SESSION.mount("http://localhost", SESSION_ADAPTER_DEBUG) SESSION.mount("http://127.0.0.1", SESSION_ADAPTER_DEBUG) # Currently known providers' development OPTIMADE base URLs DEVELOPMENT_PROVIDERS = {"mcloud": "https://dev-www.materialscloud.org/optimade"} try: DEVELOPMENT_MODE = bool(int(os.getenv("OPTIMADE_CLIENT_DEVELOPMENT_MODE", "0"))) except ValueError: LOGGER.debug( ( "OPTIMADE_CLIENT_DEVELOPMENT_MODE found, but cannot be parsed as a bool of an int. " "Setting it to False. Found value: %s" ), os.getenv("OPTIMADE_CLIENT_DEVELOPMENT_MODE"), ) DEVELOPMENT_MODE = False class DefaultingEnum(EnumMeta): """Override __getitem__()""" def __getitem__(cls, name): """Log warning and default to "DEFAULT" if name is not valid""" if name not in cls._member_map_: LOGGER.warning( "%r is not a valid button style. Setting button style to 'DEFAULT'. " "Valid button styles: %s",
def _update_download_button(self, change: dict): """Update Download button with correct onclick value The whole parsing process from `Structure` to desired format, is wrapped in a try/except, which is further wrapped in a `warnings.catch_warnings()`. This is in order to be able to log any warnings that might be thrown by the adapter in `optimade-python-tools` and/or any related exceptions. """ desired_format = change["new"] LOGGER.debug("Updating the download button with desired format: %s", desired_format) if not desired_format or desired_format is None: self.download_button.value = self._download_button_format.format( button_style=self._button_style.value, disabled="disabled", encoding="", data="", filename="", ) return output = None with warnings.catch_warnings(): warnings.filterwarnings("error") try: output = getattr(self.structure, f"as_{desired_format['adapter_format']}") except RuntimeWarning as warn: if "numpy.ufunc size changed" in str(warn): # This is an issue that may occur if using pre-built binaries for numpy and # scipy. It can be resolved by uninstalling scipy and reinstalling it with # `--no-binary :all:` when using pip. This will recompile all related binaries # using the currently installed numpy version. # However, it shouldn't be critical, hence here the warning will be ignored. warnings.filterwarnings("default") output = getattr(self.structure, f"as_{desired_format['adapter_format']}") else: self.download_button.value = self._download_button_format.format( button_style=self._button_style.value, disabled="disabled", encoding="", data="", filename="", ) warnings.warn(OptimadeClientWarning(warn)) except Warning as warn: self.download_button.value = self._download_button_format.format( button_style=self._button_style.value, disabled="disabled", encoding="", data="", filename="", ) warnings.warn(OptimadeClientWarning(warn)) except Exception as exc: self.download_button.value = self._download_button_format.format( button_style=self._button_style.value, disabled="disabled", encoding="", data="", filename="", ) if isinstance(exc, exceptions.OptimadeClientError): raise exc # Else wrap the exception to make sure to log it. raise exceptions.OptimadeClientError(exc) if desired_format["adapter_format"] in ( "ase", "pymatgen", "aiida_structuredata", ): # output is not a file, but a proxy Python class func = getattr(self, f"_get_via_{desired_format['adapter_format']}") output = func(output, desired_format=desired_format["final_format"]) encoding = "utf-8" # Specifically for CIF: v1.x CIF needs to be in "latin-1" formatting if desired_format["ext"] == ".cif": encoding = "latin-1" filename = f"optimade_structure_{self.structure.id}{desired_format['ext']}" if isinstance(output, str): output = output.encode(encoding) data = base64.b64encode(output).decode() self.download_button.value = self._download_button_format.format( button_style=self._button_style.value, disabled="", encoding=encoding, data=data, filename=filename, )
def perform_optimade_query( # pylint: disable=too-many-arguments,too-many-branches,too-many-locals base_url: str, endpoint: str = None, filter: Union[dict, str] = None, # pylint: disable=redefined-builtin sort: Union[str, List[str]] = None, response_format: str = None, response_fields: str = None, email_address: str = None, page_limit: int = None, page_offset: int = None, page_number: int = None, ) -> dict: """Perform query of database""" queries = OrderedDict() if endpoint is None: endpoint = "/structures" elif endpoint: # Make sure we supply the correct slashed format no matter the input endpoint = f"/{endpoint.strip('/')}" url_path = ( base_url + endpoint[1:] if base_url.endswith("/") else base_url + endpoint ) if filter: if isinstance(filter, dict): pass elif isinstance(filter, str): queries["filter"] = filter else: raise TypeError("'filter' must be either a dict or a str") if sort is not None: if isinstance(sort, str): queries["sort"] = sort else: queries["sort"] = ",".join(sort) if response_format is None: response_format = "json" queries["response_format"] = response_format if response_fields is not None: queries["response_fields"] = response_fields elif endpoint == "/structures": queries["response_fields"] = ",".join( [ "structure_features", "chemical_formula_descriptive", "chemical_formula_reduced", "elements", "nsites", "lattice_vectors", "species", "cartesian_site_positions", "species_at_sites", "chemical_formula_hill", "nelements", "nperiodic_dimensions", "last_modified", "elements_ratios", "dimension_types", ] ) if email_address is not None: queries["email_address"] = email_address if page_limit is not None: queries["page_limit"] = page_limit if page_offset is not None: queries["page_offset"] = page_offset if page_number is not None: queries["page_number"] = page_number # Make query - get data url_query = urlencode(queries) complete_url = f"{url_path}?{url_query}" LOGGER.debug("Performing OPTIMADE query:\n%s", complete_url) try: response = SESSION.get(complete_url, timeout=TIMEOUT_SECONDS) if response.from_cache: LOGGER.debug("Request to %s was taken from cache !", complete_url) except ( requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ) as exc: return { "errors": [ { "detail": ( f"CLIENT: Connection error or timeout.\nURL: {complete_url}\n" f"Exception: {exc!r}" ) } ] } try: response = response.json() except JSONDecodeError as exc: return { "errors": [ { "detail": ( f"CLIENT: Cannot decode response to JSON format.\nURL: {complete_url}\n" f"Exception: {exc!r}" ) } ] } return response
def get_versioned_base_url( # pylint: disable=too-many-branches base_url: Union[str, dict, Link, AnyUrl] ) -> str: """Retrieve the versioned base URL First, check if the given base URL is already a versioned base URL. Then, use `Version Negotiation` as outlined in the specification: https://github.com/Materials-Consortia/OPTIMADE/blob/v1.0.0/optimade.rst#version-negotiation 1. Try unversioned base URL's `/versions` endpoint. 2. Go through valid versioned base URLs. """ if isinstance(base_url, dict): base_url = base_url.get("href", "") elif isinstance(base_url, Link): base_url = base_url.href LOGGER.debug("Retrieving versioned base URL for %r", base_url) for version in VERSION_PARTS: if version in base_url: if re.match(rf".+{version}$", base_url): return base_url if re.match(rf".+{version}/$", base_url): return base_url[:-1] LOGGER.debug( "Found version '%s' in base URL '%s', but not at the end of it. Will continue.", version, base_url, ) # 1. Try unversioned base URL's `/versions` endpoint. versions_endpoint = ( f"{base_url}versions" if base_url.endswith("/") else f"{base_url}/versions" ) try: response = SESSION.get(versions_endpoint, timeout=TIMEOUT_SECONDS) if response.from_cache: LOGGER.debug("Request to %s was taken from cache !", versions_endpoint) except ( requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ): pass else: if response.status_code == 200: # This endpoint should be of type "text/csv" csv_data = response.text.splitlines() keys = csv_data.pop(0).split(",") versions = {}.fromkeys(keys, []) for line in csv_data: values = line.split(",") for key, value in zip(keys, values): versions[key].append(value) for version in versions.get("version", []): version_path = f"/v{version}" if version_path in VERSION_PARTS: LOGGER.debug("Found versioned base URL through /versions endpoint.") return ( base_url + version_path[1:] if base_url.endswith("/") else base_url + version_path ) timeout_seconds = 5 # Use custom timeout seconds due to potentially many requests # 2. Go through valid versioned base URLs. for version in VERSION_PARTS: versioned_base_url = ( base_url + version[1:] if base_url.endswith("/") else base_url + version ) try: response = SESSION.get( f"{versioned_base_url}/info", timeout=timeout_seconds ) if response.from_cache: LOGGER.debug( "Request to %s/info was taken from cache !", versioned_base_url ) except ( requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ): continue else: if response.status_code == 200: LOGGER.debug( "Found versioned base URL through adding valid versions to path and requesting " "the /info endpoint." ) return versioned_base_url return ""
def get_list_of_valid_providers( # pylint: disable=too-many-branches disable_providers: List[str] = None, skip_providers: List[str] = None ) -> Tuple[List[Tuple[str, LinksResourceAttributes]], List[str]]: """Get curated list of database providers Return formatted list of tuples to use with a dropdown-widget. """ providers = fetch_providers() res = [] invalid_providers = [] disable_providers = disable_providers or [] skip_providers = skip_providers or ["exmpl", "optimade", "aiida"] for entry in providers: provider = LinksResource(**entry) if provider.id in skip_providers: LOGGER.debug("Skipping provider: %s", provider) continue attributes = provider.attributes if provider.id in disable_providers: LOGGER.debug("Temporarily disabling provider: %s", str(provider)) invalid_providers.append((attributes.name, attributes)) continue # Skip if not an 'external' link_type database if attributes.link_type != LinkType.EXTERNAL: LOGGER.debug( "Skip %s: Links resource not an %r link_type, instead: %r", attributes.name, LinkType.EXTERNAL, attributes.link_type, ) continue # Disable if there is no base URL if attributes.base_url is None: LOGGER.debug("Base URL found to be None for provider: %s", str(provider)) invalid_providers.append((attributes.name, attributes)) continue # Use development servers for providers if desired and available if DEVELOPMENT_MODE and provider.id in DEVELOPMENT_PROVIDERS: development_base_url = DEVELOPMENT_PROVIDERS[provider.id] LOGGER.debug( "Setting base URL for %s to their development server", provider.id ) if isinstance(attributes.base_url, dict): attributes.base_url["href"] = development_base_url elif isinstance(attributes.base_url, Link): attributes.base_url.href = development_base_url elif isinstance(attributes.base_url, (AnyUrl, str)): attributes.base_url = development_base_url else: raise TypeError( "base_url not found to be a valid type. Must be either an optimade.models." f"Link or a dict. Found type: {type(attributes.base_url)}" ) versioned_base_url = get_versioned_base_url(attributes.base_url) if versioned_base_url: attributes.base_url = versioned_base_url else: # Not a valid/supported provider: skip LOGGER.debug( "Could not determine versioned base URL for provider: %s", str(provider) ) invalid_providers.append((attributes.name, attributes)) continue res.append((attributes.name, attributes)) return res + invalid_providers, [name for name, _ in invalid_providers]
def unfreeze(self): """Activate widget (in its current state)""" LOGGER.debug("Will unfreeze %s", self.__class__.__name__) for widget in self.children: widget.disabled = False
def check_entry_properties( base_url: str, entry_endpoint: str, properties: Union[str, Iterable[str]], checks: Union[str, Iterable[str]], ) -> List[str]: """Check an entry-endpoint's properties :param checks: An iterable, which only recognizes the following str entries: "sort", "sortable", "present", "queryable" The first two and latter two represent the same thing, i.e., whether a property is sortable and whether a property is present in the entry-endpoint's resource's attributes, respectively. :param properties: Can be either a list or not of properties to check. :param entry_endpoint: A valid entry-endpoint for the OPTIMADE implementation, e.g., "structures", "_exmpl_calculations", or "/extensions/structures". """ if isinstance(properties, str): properties = [properties] properties = list(properties) if not checks: # Don't make any queries if called improperly (with empty iterable for `checks`) return properties if isinstance(checks, str): checks = [checks] checks = set(checks) if "queryable" in checks: checks.update({"present"}) checks.remove("queryable") if "sortable" in checks: checks.update({"sort"}) checks.remove("sortable") query_params = { "endpoint": f"/info/{entry_endpoint.strip('/')}", "base_url": base_url, } response = perform_optimade_query(**query_params) msg, _ = handle_errors(response) if msg: LOGGER.error( "Could not retrieve information about entry-endpoint %r.\n Message: %r\n Response:" "\n%s", entry_endpoint, msg, response, ) if "present" in checks: return [] return properties res = list(properties) # Copy of input list of properties found_properties = response.get("data", {}).get("properties", {}) for field in properties: field_property = found_properties.get(field, None) if field_property is None: LOGGER.debug( "Could not find %r in %r for provider with base URL %r. Found properties:\n%s", field, query_params["endpoint"], base_url, json.dumps(found_properties), ) if "present" in checks: res.remove(field) elif "sort" in checks: sortable = field_property.get("sortable", False) if not sortable: res.remove(field) LOGGER.debug( "sortable fields found for %s (looking for %r): %r", base_url, properties, res ) return res