def paragraph_to_actions_on_success( response: requests.models.Response) -> dict: """ Process the successful response of a paragraph to actions request. Args: response (requests.models.Response): response from an API request. Returns: dict: dictionary representing the response. """ response_dict = response.json() return { "actions": [ action.strip(" .") for element in BeautifulSoup( response_dict["payload"]["actionSequence"], "html.parser").find_all("li") for action in element.text.split(";") ], "response": response_dict, }
def paragraph_to_actions_on_success( response: requests.models.Response) -> dict: """ Process the successful response of a paragraph to actions request. Args: response (requests.models.Response): response from an API request. Returns: dict: dictionary representing the response. """ response_dict = response.json() return { 'actions': [ action.strip(' .') for element in BeautifulSoup( response_dict['payload']['actionSequence'], 'html.parser').find_all('li') for action in element.text.split(';') ], 'response': response_dict }
def read_commits(commits: requests.models.Response = None, repo: str = 'repo_name') -> list: """ Create list of JSON objects with commits (by timestamp) info :param: commits:requests.models.Response - Object with commit info repo:str - repository name :sample: { "timestamp" : "2018-06-18" "key" : "ff7a5466-7c0a-11e8-ab26-0800275d93ce" "asset" : "github/repo_name/commits" "readings" : {"commits/timestamp" : 12} } """ commits = commits.json() # Group by TIMESTAMP timestamps = {} for i in range(len(commits)): timestamp = commits[i]['commit']['author']['date'] timestamp = timestamp.split('T')[0] if timestamp not in timestamps: timestamps[timestamp] = 0 timestamps[timestamp] += 1 # Create file with JSON object based on timestamps dict data = [] for timestamp in timestamps: data.append({ 'timestamp': str(timestamp), 'key': str(uuid.uuid4()), 'asset': 'github/%s/commits' % repo, 'readings': { 'count': timestamps[timestamp] } }) return data
def predict_reaction_batch_on_success( response: requests.models.Response) -> dict: """ Process the successful response of requests returning predict reaction batch results. Args: response (requests.models.Response): response from an API request. Returns: dict: dictionary representing the response. """ response_dict = response.json() identifier = response_dict["payload"]["task"]["task_id"] status = response_dict["payload"]["task"]["status"] return_dict = {} if status == "DONE": return response_dict["payload"]["result"] elif status == "WAITING": return_dict[ "message"] = "Task waiting: either the task is submitted and not running or it does not exists in the queue." return_dict["task_id"] = identifier return_dict["task_status"] = status return return_dict
def process_chains(response: requests.models.Response) -> pd.DataFrame: """ Function to take in the requests.get and return a DataFrame Parameters ---------- response: requests.models.Response This is the response from tradier api. Returns ------- opt_chain: pd.DataFrame Dataframe with all available options """ json_response = response.json() options = json_response["options"]["option"] opt_chain = pd.DataFrame(columns=df_columns) for idx, option in enumerate(options): data = [option[col] for col in option_columns] data += [option["greeks"][col] for col in greek_columns] opt_chain.loc[idx, :] = data return opt_chain
def should_backoff(response: requests.models.Response): """ Work out if the client should retry after a 5xx or 429. Args: response (requests.models.Response): The response object. """ # Note right now, if something in-between us and the http returns a 429, this will raise an exception. # Always retry on a 5xx. if response.status_code >= 500: return True # Don't try to decode json content if content-type is not json. if response.headers.get("Content-Type") != "application/json": return False # Attempt to get error message and if not present, give up. try: msg = response.json()["message"] except (KeyError, json.decoder.JSONDecodeError): return False return msg == "Rate limited, too many requests per second"
def automatic_retrosynthesis_results_on_success( response: requests.models.Response) -> dict: """ Process the successful response of an automatic retrosyntesis result request. Args: response (requests.models.Response): response from an API request. Returns: dict: dictionary representing the response. """ response_dict = response.json() return { 'retrosynthetic_paths': [ sequence['tree'] for sequence in response_dict['payload']['sequences'] ], 'status': response_dict['payload']['status'], 'response': response_dict }
def process_chains(response: requests.models.Response) -> pd.DataFrame: """Function to take in the requests.get and return a DataFrame Parameters ---------- response: requests.models.Response This is the response from tradier api. Returns ------- opt_chain: pd.DataFrame Dataframe with all available options """ json_response = response.json() options = json_response["options"]["option"] opt_chain = pd.DataFrame(columns=df_columns) for idx, option in enumerate(options): # initialize empty dictionary d = {} for col in df_columns: d[col] = "" # populate main dictionary values for col in option_columns: if col in option: d[col] = option[col] # populate greek dictionary values if option["greeks"]: for col in greek_columns: if col in option["greeks"]: d[col] = option["greeks"][col] opt_chain.loc[idx, :] = d return opt_chain
def automatic_retrosynthesis_results_on_success( response: requests.models.Response, ) -> dict: """ Process the successful response of an automatic retrosyntesis result request. Args: response (requests.models.Response): response from an API request. Returns: dict: dictionary representing the response. """ response_dict = response.json() return { "retrosynthetic_paths": [ _postprocess_retrosynthesis_tree(sequence["tree"]) for sequence in response_dict["payload"]["sequences"] ], "status": response_dict["payload"]["status"], "response": response_dict, }
def check(response: requests.models.Response) -> None: if response.status_code == 400 or response.json().get( 'statusCode', 200) == 400: raise RequestError(response)
def check(response: requests.models.Response) -> None: if response.status_code >= 500 or response.json().get( 'statusCode', 200) >= 500: raise InternalServerError(response)
def __init__(self, response: requests.models.Response) -> None: self.response = response if response.status_code == 200: self.status_code = response.json()['statusCode'] else: self.status_code = response.status_code
def create_json_file(file_name: str, response: requests.models.Response) -> None: # noqa with open(file_name, "w") as outfile: json.dump(response.json(), outfile, indent=2)
def check_response_status_vk(response: requests.models.Response): response.raise_for_status() response_data = response.json() if 'error' in response_data: raise requests.exceptions.HTTPError(response_data['error']) return response_data
def __get_json(self, resp: requests.models.Response): if resp.ok: return resp.json() raise NatureRemoError(f"{resp.status_code} {resp.reason}")
def _check_response(request: requests.models.Response): if not request.ok: raise BadResponse(json.dumps(request.json()))
def cdb_parse( http_resp: requests.models.Response, cdb_doc: str, cdb_name: str, attempt: int, attempts: int, log_en: bool, stat_en: bool = True, logfile: str = 'janusess', ): """ Processes http response from CouchDB action :param http_resp: int :param cdb_doc: str :param cdb_name: str :param attempt: int :param attempts: int :param log_en: bool :param stat_en: bool :param logfile: str :return data_cdb_out: 0 (if STAT_LVL['op_err']) :return data_cdb_out: json (if STAT_LVL['op']) :return stat_cdb: STAT_LVL['op'] or STAT_LVL['crit'] ;Return http_cdb: http response value """ logger = logging.getLogger(logfile) data_cdb_out = 0 stat_cdb = STAT_LVL['op'] http_cdb = http_resp.status_code # Successful GET attempt if http_resp.status_code == 200: data_cdb_out = http_resp.json() log = 'Attempt {0} of {1} to get document '.format(attempt, attempts) +\ '{0} from CouchDB {1} data succeeded.'.format(cdb_doc, cdb_name) if log_en: logger.debug(log) if stat_en: MPQ_ACT.put_nowait( [datetime.now().isoformat(' '), 'DEBUG', log]) elif http_resp.status_code == 201: log = 'Attempt {0} of {1} to store/update document '.format(attempt, attempts) + \ '{0} to/in CouchDB {1} database succeeded.'.format(cdb_doc, cdb_name) if log_en: logger.debug(log) if stat_en: MPQ_ACT.put_nowait( [datetime.now().isoformat(' '), 'DEBUG', log]) elif http_resp.status_code == 202: log = 'Attempt {0} of {1} to compact CouchDB '.format(attempt, attempts) +\ '{0} database succeeded.'.format(cdb_name) if log_en: logger.debug(log) if stat_en: MPQ_ACT.put_nowait( [datetime.now().isoformat(' '), 'DEBUG', log]) # Document not found error elif http_resp.status_code == 404: stat_cdb = STAT_LVL['op_evt'] if attempt == attempts: log = 'Attempt to get document {0} from CouchDB '.format(cdb_doc) +\ '{0} database failed, document not found.'.format(cdb_name) if log_en: logger.warning(log) if stat_en: MPQ_ACT.put_nowait( [datetime.now().isoformat(' '), 'WARNING', log]) # All other errors else: stat_cdb = STAT_LVL['crit'] if attempt == attempts: log = 'Attempt {0} of {1} to execute document '.format(attempt, attempts) + \ '{0} transaction in CouchDB {1} data '.format(cdb_doc, cdb_name) + \ 'returned http response {0}.'.format(http_resp.status_code) if log_en: logger.warning(log) if stat_en: MPQ_ACT.put_nowait( [datetime.now().isoformat(' '), 'WARNING', log]) return data_cdb_out, stat_cdb, http_cdb
def response_to_dataframe(resp: requests.models.Response) -> pd.DataFrame: df = pd.DataFrame(resp.json()["Data"]) return df
def _get_next_date_range(resp: requests.models.Response) -> int: # fetch the unix time from the first entry in the response Data timestamp = resp.json()["Data"][0]["time"] timestamp = datetime.fromtimestamp(timestamp) - timedelta(hours=1) unixtime = int(time.mktime(timestamp.timetuple())) return unixtime
def check(response: requests.models.Response) -> None: if response.status_code == 403 or response.json().get( 'statusCode', 200) == 403: raise CredentialError(response)
def get_total_gists(response: requests.models.Response) -> int: ''' get total number of github user's gists ''' return response.json().get('public_gists')
def build_error_message(resp: requests.models.Response) -> str: error = resp.json() return (f"HTTP Status Code: {resp.status_code}, " + f'Nature Remo Code: {error["code"]}, Message: {error["message"]}')
def __get_json(self, resp: requests.models.Response): self.__set_rate_limit(resp) if resp.ok: return resp.json() raise NatureRemoError(build_error_message(resp))
def _raise_for_status(self, response: requests.models.Response) -> None: try: response.raise_for_status() except HTTPError: self.log.error(response.json().get('exception')) raise
def _test_page_limit( self, response: requests.models.Response, check_next_link: int = 5) -> Tuple[Optional[bool], str]: """Test that a multi-entry endpoint obeys the page limit by following pagination links up to a depth of `check_next_link`. Parameters: response: The response to test for page limit compliance. check_next_link: Maximum recursion depth for following pagination links. Returns: `True` if the test was successful and `None` if not, with a string summary. """ try: response = response.json() except (AttributeError, json.JSONDecodeError): raise ResponseError("Unable to test endpoint page limit.") try: num_entries = len(response["data"]) except (KeyError, TypeError): raise ResponseError( "Response under `data` field was missing or had wrong type.") if num_entries > self.page_limit: raise ResponseError( f"Endpoint did not obey page limit: {num_entries} entries vs {self.page_limit} limit" ) try: more_data_available = response["meta"]["more_data_available"] except KeyError: raise ResponseError( "Field `meta->more_data_available` was missing.") if more_data_available and check_next_link: try: next_link = response["links"]["next"] if isinstance(next_link, dict): next_link = next_link["href"] except KeyError: raise ResponseError( "Endpoint suggested more data was available but provided no valid links->next link." ) if not isinstance(next_link, str): raise ResponseError( f"Unable to parse links->next {next_link!r} as a link.") self._log.debug("Following pagination link to %r.", next_link) next_response, _ = self._get_endpoint(next_link) check_next_link = bool(check_next_link - 1) self._test_page_limit( next_response, check_next_link=check_next_link, multistage=check_next_link, ) return ( True, f"Endpoint obeyed page limit of {self.page_limit} by returning {num_entries} entries.", )