def sess(self, url, tmpdir): self.url = url self.cache = FileCache(str(tmpdir)) sess = CacheControl(requests.Session(), cache=self.cache) yield sess # closing session object sess.close()
def test_file_cache_recognizes_consumed_file_handle(self, url): s = CacheControl(Session(), FileCache("web_cache")) the_url = url + "cache_60" s.get(the_url) r = s.get(the_url) assert r.from_cache s.close()
def sess(self, url): self.etag_url = urljoin(url, "/etag") self.update_etag_url = urljoin(url, "/update_etag") self.cache = DictCache() sess = CacheControl( requests.Session(), cache=self.cache, serializer=NullSerializer() ) yield sess # closing session object sess.close()
class SQLiteCacheTest(unittest.TestCase): def setUp(self): self.url = "https://httpbin.org/cache/60" self.sess = CacheControl(requests.Session(), cache=SQLiteCache(":memory:")) def tearDown(self): self.sess.close() def test_simple(self): response = self.sess.get(self.url) assert not response.from_cache response = self.sess.get(self.url) assert response.from_cache
def fetch_url(url, cache_dir, expire_time): filename = get_filename(url, cache_dir) # Don't fetch if mtime new enough try: stat = os.stat(filename) if time.time() < stat.st_mtime + expire_time: print("CACH: {0}: {1}".format(url, os.path.basename(filename))) return filename except OSError: pass # Setup HTTP cache, if available session = requests.session() if CacheControl is not None: web_cache = os.path.join(cache_dir, 'web-cache') session = CacheControl(session, cache=FileCache(web_cache)) # Fetch print("GET : {0}: {1}".format(url, os.path.basename(filename))) headers = {'User-agent': 'staticplanetscipy'} try: with session.get(url, headers=headers, stream=True) as r, open(filename + '.new', 'wb') as f: for chunk in r.iter_content(chunk_size=65536): f.write(chunk) os.rename(filename + '.new', filename) except: try: os.unlink(filename) except FileNotFoundError: pass raise finally: session.close() return filename
class PyPiRepository(RemoteRepository): CACHE_VERSION = parse_constraint("1.0.0") def __init__( self, url: str = "https://pypi.org/", disable_cache: bool = False, fallback: bool = True, ) -> None: super().__init__(url.rstrip("/") + "/simple/") self._base_url = url self._disable_cache = disable_cache self._fallback = fallback release_cache_dir = REPOSITORY_CACHE_DIR / "pypi" self._cache = CacheManager({ "default": "releases", "serializer": "json", "stores": { "releases": { "driver": "file", "path": str(release_cache_dir) }, "packages": { "driver": "dict" }, }, }) self._cache_control_cache = FileCache(str(release_cache_dir / "_http")) self._session = CacheControl(requests.session(), cache=self._cache_control_cache) self._name = "PyPI" @property def session(self) -> CacheControl: return self._session def __del__(self) -> None: self._session.close() def find_packages(self, dependency: Dependency) -> List[Package]: """ Find packages on the remote server. """ constraint = dependency.constraint if constraint is None: constraint = "*" if not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) allow_prereleases = dependency.allows_prereleases() if isinstance(constraint, VersionRange) and ( constraint.max is not None and constraint.max.is_unstable() or constraint.min is not None and constraint.min.is_unstable()): allow_prereleases = True try: info = self.get_package_info(dependency.name) except PackageNotFound: self._log( f"No packages found for {dependency.name} {constraint!s}", level="debug", ) return [] packages = [] ignored_pre_release_packages = [] for version, release in info["releases"].items(): if not release: # Bad release self._log( f"No release information found for {dependency.name}-{version}, skipping", level="debug", ) continue try: package = Package(info["info"]["name"], version) except InvalidVersion: self._log( f'Unable to parse version "{version}" for the {dependency.name} package, skipping', level="debug", ) continue if package.is_prerelease() and not allow_prereleases: if constraint.is_any(): # we need this when all versions of the package are pre-releases ignored_pre_release_packages.append(package) continue if not constraint or (constraint and constraint.allows(package.version)): packages.append(package) self._log( f"{len(packages)} packages found for {dependency.name} {constraint!s}", level="debug", ) return packages or ignored_pre_release_packages def package( self, name: str, version: str, extras: (Union[list, None]) = None, ) -> Package: return self.get_release_info(name, version).to_package(name=name, extras=extras) def search(self, query: str) -> List[Package]: results = [] search = {"q": query} response = requests.session().get(self._base_url + "search", params=search) content = parse(response.content, namespaceHTMLElements=False) for result in content.findall(".//*[@class='package-snippet']"): name = result.find("h3/*[@class='package-snippet__name']").text version = result.find( "h3/*[@class='package-snippet__version']").text if not name or not version: continue description = result.find( "p[@class='package-snippet__description']").text if not description: description = "" try: result = Package(name, version, description) result.description = to_str(description.strip()) results.append(result) except InvalidVersion: self._log( f'Unable to parse version "{version}" for the {name} package, skipping', level="debug", ) return results def get_package_info(self, name: str) -> dict: """ Return the package information given its name. The information is returned from the cache if it exists or retrieved from the remote server. """ if self._disable_cache: return self._get_package_info(name) return self._cache.store("packages").remember_forever( name, lambda: self._get_package_info(name)) def _get_package_info(self, name: str) -> dict: data = self._get(f"pypi/{name}/json") if data is None: raise PackageNotFound(f"Package [{name}] not found.") return data def get_release_info(self, name: str, version: str) -> "PackageInfo": """ Return the release information given a package name and a version. The information is returned from the cache if it exists or retrieved from the remote server. """ from poetry.inspection.info import PackageInfo if self._disable_cache: return PackageInfo.load(self._get_release_info(name, version)) cached = self._cache.remember_forever( f"{name}:{version}", lambda: self._get_release_info(name, version)) cache_version = cached.get("_cache_version", "0.0.0") if parse_constraint(cache_version) != self.CACHE_VERSION: # The cache must be updated self._log( f"The cache for {name} {version} is outdated. Refreshing.", level="debug", ) cached = self._get_release_info(name, version) self._cache.forever(f"{name}:{version}", cached) return PackageInfo.load(cached) def find_links_for_package(self, package: Package) -> List[Link]: json_data = self._get(f"pypi/{package.name}/{package.version}/json") if json_data is None: return [] links = [] for url in json_data["urls"]: h = f"sha256={url['digests']['sha256']}" links.append(Link(url["url"] + "#" + h)) return links def _get_release_info(self, name: str, version: str) -> dict: from poetry.inspection.info import PackageInfo self._log(f"Getting info for {name} ({version}) from PyPI", "debug") json_data = self._get(f"pypi/{name}/{version}/json") if json_data is None: raise PackageNotFound(f"Package [{name}] not found.") info = json_data["info"] data = PackageInfo( name=info["name"], version=info["version"], summary=info["summary"], platform=info["platform"], requires_dist=info["requires_dist"], requires_python=info["requires_python"], files=info.get("files", []), cache_version=str(self.CACHE_VERSION), ) try: version_info = json_data["releases"][version] except KeyError: version_info = [] for file_info in version_info: data.files.append({ "file": file_info["filename"], "hash": "sha256:" + file_info["digests"]["sha256"], }) if self._fallback and data.requires_dist is None: self._log("No dependencies found, downloading archives", level="debug") # No dependencies set (along with other information) # This might be due to actually no dependencies # or badly set metadata when uploading # So, we need to make sure there is actually no # dependencies by introspecting packages urls = defaultdict(list) for url in json_data["urls"]: # Only get sdist and wheels if they exist dist_type = url["packagetype"] if dist_type not in ["sdist", "bdist_wheel"]: continue urls[dist_type].append(url["url"]) if not urls: return data.asdict() info = self._get_info_from_urls(urls) data.requires_dist = info.requires_dist if not data.requires_python: data.requires_python = info.requires_python return data.asdict() def _get(self, endpoint: str) -> Union[dict, None]: try: json_response = self.session.get(self._base_url + endpoint) except requests.exceptions.TooManyRedirects: # Cache control redirect loop. # We try to remove the cache and try again self._cache_control_cache.delete(self._base_url + endpoint) json_response = self.session.get(self._base_url + endpoint) if json_response.status_code == 404: return None return json_response.json() def _get_info_from_urls(self, urls: Dict[str, List[str]]) -> "PackageInfo": # Checking wheels first as they are more likely to hold # the necessary information if "bdist_wheel" in urls: # Check for a universal wheel wheels = urls["bdist_wheel"] universal_wheel = None universal_python2_wheel = None universal_python3_wheel = None platform_specific_wheels = [] for wheel in wheels: link = Link(wheel) m = wheel_file_re.match(link.filename) if not m: continue pyver = m.group("pyver") abi = m.group("abi") plat = m.group("plat") if abi == "none" and plat == "any": # Universal wheel if pyver == "py2.py3": # Any Python universal_wheel = wheel elif pyver == "py2": universal_python2_wheel = wheel else: universal_python3_wheel = wheel else: platform_specific_wheels.append(wheel) if universal_wheel is not None: return self._get_info_from_wheel(universal_wheel) info = None if universal_python2_wheel and universal_python3_wheel: info = self._get_info_from_wheel(universal_python2_wheel) py3_info = self._get_info_from_wheel(universal_python3_wheel) if py3_info.requires_dist: if not info.requires_dist: info.requires_dist = py3_info.requires_dist return info py2_requires_dist = { Dependency.create_from_pep_508(r).to_pep_508() for r in info.requires_dist } py3_requires_dist = { Dependency.create_from_pep_508(r).to_pep_508() for r in py3_info.requires_dist } base_requires_dist = py2_requires_dist & py3_requires_dist py2_only_requires_dist = py2_requires_dist - py3_requires_dist py3_only_requires_dist = py3_requires_dist - py2_requires_dist # Normalizing requires_dist requires_dist = list(base_requires_dist) for requirement in py2_only_requires_dist: dep = Dependency.create_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version == '2.7'")) requires_dist.append(dep.to_pep_508()) for requirement in py3_only_requires_dist: dep = Dependency.create_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version >= '3'")) requires_dist.append(dep.to_pep_508()) info.requires_dist = sorted(set(requires_dist)) if info: return info # Prefer non platform specific wheels if universal_python3_wheel: return self._get_info_from_wheel(universal_python3_wheel) if universal_python2_wheel: return self._get_info_from_wheel(universal_python2_wheel) if platform_specific_wheels and "sdist" not in urls: # Pick the first wheel available and hope for the best return self._get_info_from_wheel(platform_specific_wheels[0]) return self._get_info_from_sdist(urls["sdist"][0]) def _get_info_from_wheel(self, url: str) -> "PackageInfo": from poetry.inspection.info import PackageInfo wheel_name = urllib.parse.urlparse(url).path.rsplit("/")[-1] self._log(f"Downloading wheel: {wheel_name}", level="debug") filename = os.path.basename(wheel_name) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) return PackageInfo.from_wheel(filepath) def _get_info_from_sdist(self, url: str) -> "PackageInfo": from poetry.inspection.info import PackageInfo sdist_name = urllib.parse.urlparse(url).path self._log(f"Downloading sdist: {sdist_name.rsplit('/')[-1]}", level="debug") filename = os.path.basename(sdist_name) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) return PackageInfo.from_sdist(filepath) def _download(self, url: str, dest: str) -> None: return download_file(url, dest, session=self.session) def _log(self, msg: str, level: str = "info") -> None: getattr(logger, level)(f"<debug>{self._name}:</debug> {msg}")
def sess(): sess = CacheControl(requests.Session()) yield sess # closing session object sess.close()
def test_file_cache_recognizes_consumed_file_handle(self): s = CacheControl(Session(), FileCache("web_cache")) s.get("http://httpbin.org/cache/60") r = s.get("http://httpbin.org/cache/60") assert r.from_cache s.close()
class Connection(object): """ Handler for connection and calls to the Open Targets Validation Platform REST API """ _AUTO_GET_TOKEN = 'auto' def __init__( self, host='https://www.targetvalidation.org', port=443, api_version='latest', auth_app_name=None, auth_secret=None, use_http2=False, ): """ Args: host (str): host serving the API port (int): port to use for connection to the API api_version (str): api version to point to, default to 'latest' auth_app_name (str): app_name if using authentication auth_secret (str): secret if using authentication use_http2 (bool): use http2 client """ self._logger = logging.getLogger(__name__) self.host = host self.port = str(port) self.api_version = api_version self.auth_app_name = auth_app_name self.auth_secret = auth_secret if self.auth_app_name and self.auth_secret: self.use_auth = True else: self.use_auth = False self.token = None self.use_http2 = use_http2 session = requests.Session() if self.use_http2: session.mount(host, HTTP20Adapter()) self.session = CacheControl(session) self._get_remote_api_specs() def _build_url(self, endpoint): return '{}:{}/api/{}{}'.format( self.host, self.port, self.api_version, endpoint, ) @staticmethod def _auto_detect_post(params): """ Determine if a post request should be made instead of a get depending on the size of the parameters in the request. Args: params (dict): params to pass in the request Returns: Boolean: True if post is needed """ if params: for k, v in params.items(): if isinstance(v, (list, tuple)): if len(v) > 3: return True return False def get(self, endpoint, params=None): """ makes a GET request Args: endpoint (str): REST API endpoint to call params (dict): request payload Returns: Response: request response """ if self._auto_detect_post(params): self._logger.debug('switching to POST due to big size of params') return self.post(endpoint, data=params) return Response( self._make_request(endpoint, params=params, method='GET')) def post(self, endpoint, data=None): """ makes a POST request Args: endpoint (str): REST API endpoint to call data (dict): request payload Returns: Response: request response """ return Response(self._make_request(endpoint, data=data, method='POST')) def _make_token_request(self, expire=60): """ Asks for a token to the API Args: expire (int): expiration time for the token Returns: response for the get token request """ return self._make_request('/public/auth/request_token', params={ 'app_name': self.auth_app_name, 'secret': self.auth_secret, 'expiry': expire }, headers={ 'Cache-Control': 'no-cache', }) def get_token(self, expire=60): """ Asks for a token to the API Args: expire (int): expiration time for the token Returns: str: the token served by the API """ response = self._make_token_request(expire) return response.json()['token'] def _make_request(self, endpoint, params=None, data=None, method=HTTPMethods.GET, headers={}, rate_limit_fail=False, **kwargs): """ Makes a request to the REST API Args: endpoint (str): endpoint of the REST API params (dict): payload for GET request data (dict): payload for POST request method (HTTPMethods): request method, either HTTPMethods.GET or HTTPMethods.POST. Defaults to HTTPMethods.GET headers (dict): HTTP headers for the request rate_limit_fail (bool): If True raise exception when usage limit is exceeded. If False wait and retry the request. Defaults to False. Keyword Args: **kwargs: forwarded to requests Returns: a response from requests """ def call(): headers['User-agent'] = 'Open Targets Python Client/%s' % str( __version__) if self.use_http2 and set(headers.keys()) & INVALID_HTTP2_HEADERS: for h in INVALID_HTTP2_HEADERS: if h in headers: del headers[h] return self.session.request(method, self._build_url(endpoint), params=params, json=data, headers=headers, **kwargs) 'order params to allow efficient caching' if params is not None: if isinstance(params, dict): params = sorted(params.items()) else: params = sorted(params) if self.use_auth and not 'request_token' in endpoint: if self.token is None: self._update_token() if self.token is not None: headers['Auth-Token'] = self.token response = None default_retry_after = 5 if not rate_limit_fail: status_code = 429 while status_code in [429, 419]: try: response = call() status_code = response.status_code if status_code == 429: retry_after = default_retry_after if 'Retry-After' in response.headers: retry_after = float( response.headers['Retry-After']) self._logger.warning( 'Maximum usage limit hit. Retrying in {} seconds'. format(retry_after)) time.sleep(retry_after) elif status_code == 419: self._update_token(force=True) headers['Auth-Token'] = self.token time.sleep(0.5) except MaxRetryError as e: self._logger.exception(e.args[0].reason) self._logger.warning( 'Problem connecting to the remote API. Retrying in {} seconds' .format(default_retry_after)) time.sleep(default_retry_after) except OSError as e: self._logger.exception(str(e)) self._logger.warning( 'Problem connecting to the remote API. Retrying in {} seconds' .format(default_retry_after)) time.sleep(default_retry_after) else: response = call() response.raise_for_status() return response def _update_token(self, force=False): """ Update token when expired """ if self.token and not force: token_valid_response = self._make_request( '/public/auth/validate_token', headers={'Auth-Token': self.token}) if token_valid_response.status_code == 200: return elif token_valid_response.status_code == 419: pass else: token_valid_response.raise_for_status() self.token = self.get_token() def _get_remote_api_specs(self): """ Fetch and parse REST API documentation """ r = self.session.get(self.host + ':' + self.port + '/api/docs/swagger.yaml') r.raise_for_status() self.swagger_yaml = r.text self.api_specs = yaml.load(self.swagger_yaml) self.endpoint_validation_data = {} for p, data in self.api_specs['paths'].items(): p = p.split('{')[0] if p[-1] == '/': p = p[:-1] self.endpoint_validation_data[p] = {} for method, method_data in data.items(): if 'parameters' in method_data: params = {} for par in method_data['parameters']: par_type = par.get('type', 'string') params[par['name']] = par_type self.endpoint_validation_data[p][method] = params remote_version = self.get('/public/utils/version').data # TODO because content type wasnt checked proerly a float # was returned instead a proper version string if str(remote_version).startswith(API_MAJOR_VERSION): self._logger.warning( 'The remote server is running the API with version {}, but the client expected this major version {}. They may not be compatible.' .format(remote_version, API_MAJOR_VERSION)) def validate_parameter(self, endpoint, filter_type, value, method=HTTPMethods.GET): """ Validate payload to send to the REST API based on info fetched from the API documentation Args: endpoint (str): endpoint of the REST API filter_type (str): the parameter sent for the request value: the value sent for the request method (HTTPMethods): request method, either HTTPMethods.GET or HTTPMethods.POST. Defaults to HTTPMethods.GET Raises AttributeError: if validation is not passed """ endpoint_data = self.endpoint_validation_data[endpoint][method] if filter_type in endpoint_data: if endpoint_data[filter_type] == 'string' and isinstance( value, str): return elif endpoint_data[filter_type] == 'boolean' and isinstance( value, bool): return elif endpoint_data[filter_type] == 'number' and isinstance( value, (int, float)): return raise AttributeError( '{}={} is not a valid parameter for endpoint {}'.format( filter_type, value, endpoint)) def api_endpoint_docs(self, endpoint): """ Returns the documentation available for a given REST API endpoint Args: endpoint (str): endpoint of the REST API Returns: dict: documentation for the endpoint parsed from YAML docs """ return self.api_specs['paths'][endpoint] def get_api_endpoints(self): """ Get a list of available endpoints Returns: list: available endpoints """ return self.api_specs['paths'].keys() def close(self): """ Close connection to the REST API """ self.session.close() def ping(self): """ Pings the API as a live check Returns: bool: True if pinging the raw response as a ``str`` if the API has a non standard name """ response = self.get('/public/utils/ping') if response.data == 'pong': return True elif response.data: return response.data return False
class Connection(object): """ Handler for connection and calls to the Open Targets Validation Platform REST API """ def __init__(self, host='https://platform-api.opentargets.io', port=443, api_version='v3', verify=True, proxies={}, auth=None): """ Args: host (str): host serving the API port (int): port to use for connection to the API api_version (str): api version to point to, default to 'latest' verify (bool): sets SSL verification for Request session, accepts True, False or a path to a certificate auth (AuthBase): sets the custom authentication object to use for requests made to the API. Should be one of the built in options provided by the reqests package, or a subclass of requests.auth.AuthBase. """ self._logger = logging.getLogger(__name__) self.host = host self.port = str(port) self.api_version = api_version session = requests.Session() session.verify = verify session.proxies = proxies session.auth = auth retry_policies = Retry( total=10, read=10, connect=10, backoff_factor=.5, status_forcelist=(500, 502, 504), ) http_retry = HTTPAdapter(max_retries=retry_policies) session.mount(host, http_retry) self.session = CacheControl(session) self._get_remote_api_specs() def _build_url(self, endpoint): url = '{}:{}/{}{}'.format( self.host, self.port, self.api_version, endpoint, ) return url @staticmethod def _auto_detect_post(params): """ Determine if a post request should be made instead of a get depending on the size of the parameters in the request. Args: params (dict): params to pass in the request Returns: Boolean: True if post is needed """ if params: for k, v in params.items(): if isinstance(v, (list, tuple)): if len(v) > 3: return True return False def get(self, endpoint, params=None): """ makes a GET request Args: endpoint (str): REST API endpoint to call params (dict): request payload Returns: Response: request response """ if self._auto_detect_post(params): self._logger.debug('switching to POST due to big size of params') return self.post(endpoint, data=params) return Response( self._make_request(endpoint, params=params, method='GET')) def post(self, endpoint, data=None): """ makes a POST request Args: endpoint (str): REST API endpoint to call data (dict): request payload Returns: Response: request response """ return Response(self._make_request(endpoint, data=data, method='POST')) def _make_request(self, endpoint, params=None, data=None, method=HTTPMethods.GET, headers={}, rate_limit_fail=False, **kwargs): """ Makes a request to the REST API Args: endpoint (str): endpoint of the REST API params (dict): payload for GET request data (dict): payload for POST request method (HTTPMethods): request method, either HTTPMethods.GET or HTTPMethods.POST. Defaults to HTTPMethods.GET headers (dict): HTTP headers for the request rate_limit_fail (bool): If True raise exception when usage limit is exceeded. If False wait and retry the request. Defaults to False. Keyword Args: **kwargs: forwarded to requests Returns: a response from requests """ 'order params to allow efficient caching' if params: if isinstance(params, dict): params = sorted(params.items()) else: params = sorted(params) headers['User-agent'] = 'Open Targets Python Client/%s' % str( __version__) response = self.session.request(method, self._build_url(endpoint), params=params, json=data, headers=headers, **kwargs) response.raise_for_status() return response def _get_remote_api_specs(self): """ Fetch and parse REST API documentation """ r = self.session.get(self.host + ':' + self.port + '/v%s/platform/swagger' % API_MAJOR_VERSION) r.raise_for_status() self.swagger_yaml = r.text self.api_specs = yaml.load(self.swagger_yaml) self.endpoint_validation_data = {} for p, data in self.api_specs['paths'].items(): p = p.split('{')[0] if p[-1] == '/': p = p[:-1] self.endpoint_validation_data[p] = {} self.endpoint_validation_data['/platform' + p] = {} for method, method_data in data.items(): if 'parameters' in method_data: params = {} for par in method_data['parameters']: par_type = par.get('type', 'string') params[par['name']] = par_type self.endpoint_validation_data[p][method] = params self.endpoint_validation_data['/platform' + p][method] = params remote_version = self.get('/platform/public/utils/version').data # TODO because content type wasnt checked proerly a float # was returned instead a proper version string if not str(remote_version).startswith(API_MAJOR_VERSION): self._logger.warning( 'The remote server is running the API with version {}, but the client expected this major version {}. They may not be compatible.' .format(remote_version, API_MAJOR_VERSION)) def validate_parameter(self, endpoint, filter_type, value, method=HTTPMethods.GET): """ Validate payload to send to the REST API based on info fetched from the API documentation Args: endpoint (str): endpoint of the REST API filter_type (str): the parameter sent for the request value: the value sent for the request method (HTTPMethods): request method, either HTTPMethods.GET or HTTPMethods.POST. Defaults to HTTPMethods.GET Raises AttributeError: if validation is not passed """ endpoint_data = self.endpoint_validation_data[endpoint][method] if filter_type in endpoint_data: if endpoint_data[filter_type] == 'string' and isinstance( value, str): return elif endpoint_data[filter_type] == 'boolean' and isinstance( value, bool): return elif endpoint_data[filter_type] == 'number' and isinstance( value, (int, float)): return raise AttributeError( '{}={} is not a valid parameter for endpoint {}'.format( filter_type, value, endpoint)) def api_endpoint_docs(self, endpoint): """ Returns the documentation available for a given REST API endpoint Args: endpoint (str): endpoint of the REST API Returns: dict: documentation for the endpoint parsed from YAML docs """ return self.api_specs['paths'][endpoint] def get_api_endpoints(self): """ Get a list of available endpoints Returns: list: available endpoints """ return self.api_specs['paths'].keys() def close(self): """ Close connection to the REST API """ self.session.close() def ping(self): """ Pings the API as a live check Returns: bool: True if pinging the raw response as a ``str`` if the API has a non standard name """ response = self.get('/platform/public/utils/ping') if response.data == 'pong': return True elif response.data: return response.data return False