def download_ocsp_response_cache(url): """ Downloads OCSP response cache from Snowflake. """ logger = getLogger(__name__) ocsp_validation_cache = {} import binascii try: with requests.Session() as session: session.mount('http://', HTTPAdapter(max_retries=5)) session.mount('https://', HTTPAdapter(max_retries=5)) response = session.request( method=u'get', url=url, timeout=10, # socket timeout verify=True, # for HTTPS (future use) ) if response.status_code == OK: try: _decode_ocsp_response_cache(response.json(), ocsp_validation_cache) except (ValueError, binascii.Error) as err: logger.debug( 'Failed to convert OCSP cache server response to ' 'JSON. The cache was corrupted. No worry. It will' 'validate with OCSP server: %s', err) else: logger.debug("Failed to get OCSP response cache from %s: %s", url, response.status_code) except Exception as e: logger.debug("Failed to get OCSP response cache from %s: %s", url, e) return ocsp_validation_cache
def send_logs(message, start_time, vendor): account_mode = os.getenv('ACCOUNT_MODE', '') cross_account_role_name = os.getenv('CROSS_ACCOUNT_ROLE_NAME', '') output_type = os.getenv('OUTPUT_TYPE', '') execution_time = time.time() - start_time session = requests.Session() for bot in message.get('Rules violations found', []): del bot['ID'] del bot['Name'] headers = { "Content-Type": "application/json", "Accept": "application/json", "X-Sumo-Name": message.get('Account id'), "X-Sumo-Category": vendor } data = { 'msg': message, 'account_mode': account_mode, 'cross_account_role_name': cross_account_role_name, 'output_type': output_type, 'execution_time': execution_time } r = session.post(SUMO_HTTP_ENDPOINT, headers=headers, data=json.dumps(data)) print(f'{__file__} - status code from dome9 logs: {r.status_code}') return
def requests_session(self): """ Return a requests session to execute prepared requests using the same pool """ if self._requests_session is None: self._requests_session = requests.Session() return self._requests_session
def _fetch_ocsp_response(self, ocsp_request, cert, do_retry=True): """ Fetch OCSP response using OCSPRequest """ ocsp_url = self.extract_ocsp_url(cert) if not ocsp_url: return None actual_method = 'post' if self._use_post_method else 'get' if SnowflakeOCSP.OCSP_CACHE.RETRY_URL_PATTERN: # no POST is supported for Retry URL at the moment. actual_method = 'get' if actual_method == 'get': b64data = self.decode_ocsp_request_b64(ocsp_request) target_url = SnowflakeOCSP.OCSP_CACHE.generate_get_url( ocsp_url, b64data) payload = None headers = None else: target_url = ocsp_url payload = self.decode_ocsp_request(ocsp_request) headers = {'Content-Type': 'application/ocsp-request'} ret = None logger.debug('url: %s', target_url) with requests.Session() as session: session.mount('http://', adapters.HTTPAdapter(max_retries=5)) session.mount('https://', adapters.HTTPAdapter(max_retries=5)) max_retry = 30 if do_retry else 1 sleep_time = 1 backoff = DecorrelateJitterBackoff(sleep_time, 16) for attempt in range(max_retry): response = session.request( headers=headers, method=actual_method, url=target_url, timeout=30, data=payload, ) if response.status_code == OK: logger.debug( "OCSP response was successfully returned from OCSP " "server.") ret = response.content break elif max_retry > 1: sleep_time = backoff.next_sleep(sleep_time) logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, sleep_time) time.sleep(sleep_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) raise OperationalError( msg="Failed to get OCSP response after {) attempt.".format( max_retry), errno=ER_INVALID_OCSP_RESPONSE) return ret
def where_stream(intent): """ main api code """ speech = "I'm sorry, I couldn't find that one." try: # get requested film film = intent['slots']['film']['value'] # search for the query r = requests.Session() query = {"query": film, "page_size": 1} res = r.post(api, json=query, headers=head) lst = [providers[str(x['provider_id'])] for x in res.json()['items'][0]['offers'] if (x['monetization_type'] == 'flatrate') and (x['presentation_type'] == 'hd')] if lst and len(lst) > 0: if len(lst) == 1: lst = lst[0] else: lst = ", ".join(lst[:-1]) + ", and " + lst[-1] speech = "You can stream {} on ".format(film) + lst else: speech = "It looks like {} isn't available to stream anywhere.".format(film) except (AttributeError, ValueError, KeyError, IndexError): pass return speech
def __init__( self, debug=False, verify=True, metadata1_generator=None, captcha_solver=None, **kwargs ): """ Args: debug (bool): Enable debug messages. verify (str | bool): Requests SSL certificate checking. Path to CA certificates file. ``False`` to ignore certificate errors. ``True`` to use defaults (default). captcha_solver (coto.captcha.Solver): Class implementing a way to solve captchas (e.g., send them to Slack for you to solve). metadata1_generator (coto.metadata1.Generator): Class implementing a way to generate metadata1. **kwargs: You can pass arguments for the signin method here. """ self.debug = debug self._metadata1_generator = metadata1_generator self._captcha_solver = captcha_solver self.root = False self.coupled = None self.session = requests.Session() self.session.verify = verify self.authenticated = False self._clients = {} self.timeout = (3.1, 10) self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36' if len(kwargs) > 0: self.signin(**kwargs)
def __init__(self, session=None, sleep=time.sleep): if session is None: session = requests.Session() session.trust_env = False session.proxies = {} self._session = session self._sleep = sleep
def lambda_handler(event, context): sm_client = boto3.client('sagemaker') s3_client = boto3.client("s3") notebook_instance_name = 'Wav2Lip-demo' url = sm_client.create_presigned_notebook_instance_url( NotebookInstanceName=notebook_instance_name)['AuthorizedUrl'] url_tokens = url.split('/') http_proto = url_tokens[0] http_hn = url_tokens[2].split('?')[0].split('#')[0] s = requests.Session() r = s.get(url) cookies = "; ".join(key + "=" + value for key, value in s.cookies.items()) ws = websocket.create_connection( "wss://{}/terminals/websocket/1".format(http_hn), cookie=cookies, host=http_hn, origin=http_proto + "//" + http_hn) # TODO: Take the input file(s) and upload to S3 # * Once files are uploaded, run our SageMaker notebook ws.send( """[ "stdin", "jupyter nbconvert --execute --to notebook --inplace /home/ec2-user/SageMaker/Wav2Lip/Wav2Lip-socket.ipynb --ExecutePreprocessor.kernel_name=python3 --ExecutePreprocessor.timeout=1500\\r" ]""" ) time.sleep(1) ws.close() return "looking clean"
def get_tracks(playlist): """ Returns a list of track URIs from the desired playlist :param playlist """ from botocore.vendored import requests grant_type = 'client_credentials' #Request based on Client Credentials Flow from https://developer.spotify.com/web-api/authorization-guide/ #Request body parameter: grant_type Value: Required. Set it to client_credentials body_params = {'grant_type': grant_type} url = 'https://accounts.spotify.com/api/token' response = requests.post(url, data=body_params, auth=(SPOT_CLIENT_ID, SPOT_CLIENT_SECRET)).json() token = response['access_token'] #Start a HTTP session and update session headers session = requests.Session() session.headers.update(get_auth_header(token)) playlist_id = playlist['id'] url = "https://api.spotify.com/v1/playlists/{}/tracks?fields=items(track(uri))&limit={}".format( playlist_id, 20) response = session.get(url).json()['items'] tracks = [] for item in response: tracks.append(item['track']['uri']) return tracks
def record(id, field=[], method='GET', pretty_print='0'): """ Simple function for accessing the Finna API. :param id: id of the book looked for :param field: what fields we want to include in the json search :param method: POST or GET, use of POST should be done if the response is long, defaults to GET :param pretty_print: if the resulting JSON should be prettyprinted, '1' for yes and '0' for no, defaults to '0' :return: a dictionary with 'status_code' from the request and 'json' """ params = { 'field[]': field, 'id': [id], 'prettyPrint': [pretty_print], 'lng': ['en-gb'] } sess = requests.Session() sess.headers.update(__headers) sess.params.update(params) r = sess.request(url=__url + 'record', method=method) sess.close() return {'status_code': r.status_code, 'json': r.json()}
def get_session(self): if self._session is None: self._session = requests.Session(**self._session_kwargs) self._session.headers.update(Authorization="Bearer " + self.get_oauth2_token()) adapter = HTTPAdapter(max_retries=self.retry_policy) self._session.mount('http://', adapter) self._session.mount('https://', adapter) return self._session
def execute_ocsp_request(ocsp_uri, cert_id, proxies=None, do_retry=True): """ Executes OCSP request for the given cert id """ logger = getLogger(__name__) request = Request() request['reqCert'] = cert_id request_list = univ.SequenceOf(componentType=Request()) request_list[0] = request tbs_request = TBSRequest() tbs_request['requestList'] = request_list tbs_request['version'] = Version(0).subtype( explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) ocsp_request = OCSPRequest() ocsp_request['tbsRequest'] = tbs_request # no signature for the client # no nonce is set, because not all OCSP resopnder implements it yet # transform objects into data in requests data = der_encoder.encode(ocsp_request) parsed_url = urlsplit(ocsp_uri) max_retry = 100 if do_retry else 1 # NOTE: This retry is to retry getting HTTP 200. headers = { 'Content-Type': 'application/ocsp-request', 'Content-Length': '{0}'.format(len(data)), 'Host': parsed_url.hostname, } logger.debug('url: %s, headers: %s, proxies: %s', ocsp_uri, headers, proxies) with requests.Session() as session: session.mount('http://', HTTPAdapter(max_retries=5)) session.mount('https://', HTTPAdapter(max_retries=5)) for attempt in range(max_retry): response = session.post(ocsp_uri, headers=headers, proxies=proxies, data=data, timeout=60) if response.status_code == OK: logger.debug("OCSP response was successfully returned") break elif max_retry > 1: wait_time = 2**attempt wait_time = 16 if wait_time > 16 else wait_time logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, wait_time) time.sleep(wait_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) return response.content
def execute_ocsp_request(ocsp_uri, cert_id, proxies=None, do_retry=True): """ Executes OCSP request for the given cert id """ global SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN logger = getLogger(__name__) request = Request() request['reqCert'] = cert_id request_list = univ.SequenceOf(componentType=Request()) request_list[0] = request tbs_request = TBSRequest() tbs_request['requestList'] = request_list tbs_request['version'] = Version(0).subtype( explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) ocsp_request = OCSPRequest() ocsp_request['tbsRequest'] = tbs_request # no signature for the client # no nonce is set, because not all OCSP resopnder implements it yet # transform objects into data in requests data = der_encoder.encode(ocsp_request) b64data = b64encode(data).decode('ascii') if SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN: parsed_url = urlsplit(ocsp_uri) target_url = SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN.format( parsed_url.hostname, b64data) else: target_url = u"{0}/{1}".format(ocsp_uri, b64data) max_retry = 100 if do_retry else 1 # NOTE: This retry is to retry getting HTTP 200. logger.debug('url: %s, proxies: %s', target_url, proxies) with requests.Session() as session: session.mount('http://', HTTPAdapter(max_retries=5)) session.mount('https://', HTTPAdapter(max_retries=5)) for attempt in range(max_retry): response = session.get(target_url, proxies=proxies, timeout=60) if response.status_code == OK: logger.debug("OCSP response was successfully returned") break elif max_retry > 1: wait_time = 2**attempt wait_time = 16 if wait_time > 16 else wait_time logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, wait_time) time.sleep(wait_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) return response.content
def handler(self): notice = sns() hnd = CheckRun('zen of python', 'this', '1', '2', '3') subject, message = check_run_event() notice['Records'][0]['Sns']['Subject'] = subject notice['Records'][0]['Sns']['Message'] = message hnd.event = notice hnd.hook = json.loads(message) hnd._session = requests.Session() return hnd
def session(self): if not self._session: self._session = requests.Session() self._session.headers.update({ 'Authorization': 'token %s' % self.token, 'Accept': 'application/vnd.github.antiope-preview+json', }) return self._session
def __init__(self, country='AU', use_sessions=True, **kwargs): self.kwargs = kwargs self.country = country #self.language = Locale.parse('und_{}'.format(self.country)).language self.language = 'en' self.locale = self.language + '_' + self.country self.kwargs_cinema = [] if use_sessions: self.requests = requests.Session() else: self.requests = requests
def _get_attr_by_link(attribute, interParams): get_attr_by_link_cache = {} snow = requests.Session() snow.headers.update({"Accept": "application/json"}) snow.auth = snowAuth try: link = attribute["link"] except: try: returnValue = interParams["default"] loger.warning( "Unable to get link. If it happens for all requests, this may mean that you're using incorrect interpreter for this attribute. Returning default value.: " + str(attribute)) return returnValue except: raise ValueError( "Check your configuration of interpreterParameters. It should contain default value" ) try: attr = interParams["linkAttribute"] except: raise ValueError( "Check your configuration of intepreterParameters. linkAttribute is missing or corrupted" ) try: returnValue = get_attr_by_link_cache[link][attr] except KeyError: loger.debug("Link missing in cache. I have:" + str(get_attr_by_link_cache)) try: r = snow.get(attribute["link"]) loger.debug("_get_person_by_name: Results from: '" + str(attribute) + "' show:" + str(json.dumps(r.json(), indent=4))) resultJSON = r.json() returnValue = resultJSON["result"][attr] except KeyError as e: try: loger.warning("Using default value for mapping, error" + str(e) + ". Attribute was=" + str(attribute)) returnValue = interParams["default"] except KeyError: raise ValueError( "Check your configuration interpreterParameters should contain default value" ) temp = {} temp[attr] = returnValue get_attr_by_link_cache[link] = temp return returnValue
def lambda_handler(event, context): session = requests.Session() data = session.get(web) parse = data.content.decode("utf-8") read = csv.reader(parse.splitlines(), delimiter = ",") #if seperated by tabs replace delimiter value with "\t" rows = list(read) for row in rows: print(row)
def query_post(received, context): target_name = received["targets"][0]["target"] target = queries[target_name] loger.debug("My query target is:" + str(target)) snow = requests.Session() snow.headers.update({"Accept": "application/json"}) snow.auth = snowAuth loger.debug("My snow filter is:" + target["snowFilter"]) snow.verify = False loger.info("Starting request to service-now, to " + str(snowUrl + "//api/now/table/" + target["table"] + " params=" + target["snowFilter"])) r = snow.get(snowUrl + "//api/now/table/" + target["table"], params=target["snowFilter"]) items = r.json() if r.status_code == 401: loger.error( "Unauthorized request to service-now instance - check user and password" ) return elif r.text == "": loger.warning("Empty reply from service-now instance") return elif r.status_code == 400: loger.error("Bad request, service-now returned:" + items["error"]["message"]) return loger.debug("Service-now returned " + str(r.status_code) + " message in json format:" + json.dumps(items, indent=4, sort_keys=True)) #For instance #queryReply[0]["columns"]=[{"text": "Number", "type": "string"}, {"text": "Short description", "type": "string"},{"text": "Last update by", "type": "string"}] queryReply = [0] queryReply[0] = {"columns": []} for attr in target["attributes"]: queryReply[0]["columns"].append({ "text": attr["displayName"], "type": "string" }) queryReply[0]["rows"] = [] for row in items["result"]: oneResultRow = __get_row(target["attributes"], row) queryReply[0]["rows"].append(oneResultRow) queryReply[0]["type"] = "table" return queryReply
def login(): ''' Returns a session object that is logged in to access.caltech.edu''' #t1 = time.time() post_login_url = "https://access.caltech.edu/auth/login_handler?came_from=https://access.caltech.edu/tqfr/reports/list_surveys&login_counter=0" login_info = {"login": "******", "password": "******"} session = requests.Session() #Need to send get request to access to get n = session.get(post_login_url) soup = BeautifulSoup(n.text, 'html.parser') login_info["lt"] = soup.find(id="lt")['value'] #Login to access and get reidrected to TQFR Home page p = session.post(post_login_url, data=login_info, verify=True) return session
def _fetch_ocsp_response(req, cert, do_retry=True): """ Fetch OCSP response using OCSPRequest """ global SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN max_retry = 100 if do_retry else 1 data = req.dump() # convert to DER b64data = b64encode(data).decode('ascii') urls = cert.ocsp_urls ocsp_url = urls[0] if SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN: parsed_url = urlsplit(ocsp_url) target_url = SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN.format( parsed_url.hostname, b64data ) else: target_url = u"{0}/{1}".format(ocsp_url, b64data) ret = None logger.debug('url: %s', target_url) with requests.Session() as session: session.mount('http://', adapters.HTTPAdapter(max_retries=5)) session.mount('https://', adapters.HTTPAdapter(max_retries=5)) global PROXIES for attempt in range(max_retry): response = session.get( target_url, proxies=PROXIES, timeout=30) if response.status_code == OK: logger.debug( "OCSP response was successfully returned from OCSP server.") ret = response.content break elif max_retry > 1: wait_time = 2 ** attempt wait_time = 16 if wait_time > 16 else wait_time logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, wait_time) time.sleep(wait_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) raise OperationalError( msg="Failed to get OCSP response after {) attempt.".format( max_retry), errno=ER_INVALID_OCSP_RESPONSE ) return ret
def test_timeout(self, handler, caplog): httpretty.register_uri( httpretty.PATCH, handler.check_run_url, data='', status=200, content_type='application/json', ) handler.cmd = 'tests.timeout' handler.cmd_timeout = 1 handler._session = requests.Session() handler.download_code = lambda: '.' with pytest.raises(subprocess.TimeoutExpired) as e: handler(handler.event, {}) assert "timed out after 1 seconds" in str(e)
def connections(): url_tokens = url.split('/') http_proto = url_tokens[0] http_hn = url_tokens[2].split('?')[0].split('#')[0] s = requests.Session() r = s.get(url) cookies = "; ".join(key + "=" + value for key, value in s.cookies.items()) ws = websocket.create_connection( "wss://{}/terminals/websocket/1".format(http_hn), cookie=cookies, host=http_hn, origin=http_proto + "//" + http_hn) return ws
def lambda_handler(event, context): session = requests.Session() raw_data = session.get(url) decode_content = raw_data.content.decode('utf-8') reader = csv.reader(decode_content.splitlines(), delimiter=',') rows = list(reader) for row in rows: print(row) # TODO implement return {'statusCode': 200}
def get_recordings(mixer_client_id, mixer_channel_id): s = requests.Session() s.headers.update({'Client-ID': mixer_client_id}) recordings_response = s.get( 'https://mixer.com/api/v1/recordings?where=channelId:eq:{}'.format( mixer_channel_id)) # All recordings available on the channel # Key properties: # - createdAt # - state == AVAILABLE # - name # - vods[] - baseUrl - source.mp4 recordings = recordings_response.json() recording_infos = [] for rec in recordings: if not rec["state"] == "AVAILABLE": # TODO: Handle other states (Issue #2) continue # We assume that all VODs have the same baseUrl video_url = rec["vods"][0]["baseUrl"] + "source.mp4" # We further assume that the first VOD entry has all information plus it represents the mp4 version! data = rec["vods"][0]["data"] width = data["Width"] height = data["Height"] fps = data["Fps"] bitrate = data["Bitrate"] recording_infos.append( gr.Recording({ "platform": "mixer", "id": rec["id"], "name": rec["name"], "duration": rec["duration"], "createdAt": rec["createdAt"], "channelId": mixer_channel_id, "width": width, "height": height, "fps": fps, "bitrate": bitrate, "url": video_url })) return recording_infos
def __init__(self, api_key, timeout_s=None): """ Instantiate a YelpAPI object. An API key from Yelp is required. required parameters: * api_key - Our Yelp API key optional parameters: * timeout_s - Timeout, in seconds, to set for all API calls. If the the timeout expires before the request completes, then a Timeout exception will be raised. If this is not given, the default is to block indefinitely. """ self._api_key = api_key self._timeout_s = timeout_s self._yelp_session = requests.Session() self._headers = {'Authorization': 'Bearer {}'.format(self._api_key)}
def _fetch_ocsp_response(req, cert, do_retry=True): """ Fetch OCSP response using OCSPRequest """ urls = cert.ocsp_urls parsed_url = urlsplit(urls[0]) # urls is guaranteed to have OCSP URL max_retry = 100 if do_retry else 1 data = req.dump() # convert to DER headers = { 'Content-Type': 'application/ocsp-request', 'Content-Length': '{0}'.format(len(data)), 'Host': parsed_url.hostname, } ret = None with requests.Session() as session: session.mount('http://', adapters.HTTPAdapter(max_retries=5)) session.mount('https://', adapters.HTTPAdapter(max_retries=5)) global PROXIES for attempt in range(max_retry): response = session.post(urls[0], headers=headers, proxies=PROXIES, data=data, timeout=30) if response.status_code == OK: logger.debug("OCSP response was successfully returned from " "OCSP server.") ret = response.content break elif max_retry > 1: wait_time = 2**attempt wait_time = 16 if wait_time > 16 else wait_time logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, wait_time) time.sleep(wait_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) raise OperationalError( msg="Failed to get OCSP response after {) attempt.".format( max_retry), errno=ER_INVALID_OCSP_RESPONSE) return ret
def _download_ocsp_response_cache(url, do_retry=True): """ Download OCSP response cache from the cache server :param url: OCSP response cache server :param do_retry: retry if connection fails up to N times """ global PROXIES max_retry = 100 if do_retry else 1 ocsp_validation_cache = {} try: start_time = time.time() logger.debug("started downloading OCSP response cache file") with requests.Session() as session: session.mount('http://', adapters.HTTPAdapter(max_retries=5)) session.mount('https://', adapters.HTTPAdapter(max_retries=5)) for attempt in range(max_retry): response = session.request( method=u'get', url=url, proxies=PROXIES, timeout=10, # socket timeout verify=True, # for HTTPS (future use) ) if response.status_code == OK: _decode_ocsp_response_cache(response.json(), ocsp_validation_cache) elapsed_time = time.time() - start_time logger.debug( "ended downloading OCSP response cache file. " "elapsed time: %ss", elapsed_time) break elif max_retry > 1: wait_time = 2**attempt wait_time = 16 if wait_time > 16 else wait_time logger.debug("OCSP server returned %s. Retrying in %s(s)", response.status_code, wait_time) time.sleep(wait_time) else: logger.error("Failed to get OCSP response after %s attempt.", max_retry) except Exception as e: logger.debug("Failed to get OCSP response cache from %s: %s", url, e) return ocsp_validation_cache
def _download_ocsp_response_cache(ocsp, url, do_retry=True): """ Download OCSP response cache from the cache server :param url: OCSP response cache server :param do_retry: retry if connection fails up to N times """ try: start_time = time.time() logger.debug("started downloading OCSP response cache file") with requests.Session() as session: session.mount('http://', adapters.HTTPAdapter(max_retries=5)) session.mount('https://', adapters.HTTPAdapter(max_retries=5)) max_retry = 30 if do_retry else 1 sleep_time = 1 backoff = DecorrelateJitterBackoff(sleep_time, 16) for attempt in range(max_retry): response = session.get( url, timeout=10, # socket timeout ) if response.status_code == OK: ocsp.decode_ocsp_response_cache(response.json()) elapsed_time = time.time() - start_time logger.debug( "ended downloading OCSP response cache file. " "elapsed time: %ss", elapsed_time) break elif max_retry > 1: sleep_time = backoff.next_sleep(sleep_time) logger.debug( "OCSP server returned %s. Retrying in %s(s)", response.status_code, sleep_time) time.sleep(sleep_time) else: logger.error( "Failed to get OCSP response after %s attempt.", max_retry) except Exception as e: logger.debug("Failed to get OCSP response cache from %s: %s", url, e)
def optimizely_request(http_verb, resource='/', qs_params={}, payload={}): http_verb = http_verb.lower() session = requests.Session() url = 'https://api.optimizely.com/{version}{resource}'.format( version='v2', resource=resource) headers = {"Authorization": "Bearer {}".format(token)} if http_verb in ['post', 'patch']: headers['Content-Type'] = 'application/json' req = requests.Request(http_verb.lower(), url, json=payload, headers=headers, params=qs_params) prepped = session.prepare_request(req) response = session.send(prepped) if response.status_code > 299: raise Exception('Unable to make request: \n{}'.format(response.text)) try: return response.json() except Exception as e: return []