class RetryHTTPProvider(HTTPProvider): """RetryHTTPProvider is a custom HTTPProvider that retries failed http requests.""" def __init__(self, endpoint_uri, request_kwargs=None): super(RetryHTTPProvider, self).__init__(endpoint_uri, request_kwargs) def make_request(self, method, params): """overrides the parent method to replace `make_post_request` with custom implementation""" request_data = self.encode_rpc_request(method, params) raw_response = self.retriable_post_request(request_data) # instead of make_post_request response = self.decode_rpc_response(raw_response) return response @backoff.on_exception( lambda: backoff.expo(factor=0.2), requests.exceptions.RequestException, max_tries=4, giveup=lambda e: e.response is not None and 400 <= e.response.status_code < 500 ) def retriable_post_request(self, request_data): return make_post_request( self.endpoint_uri, request_data, **self.get_request_kwargs() )
def _export(self, data: TypingSequence[SDKDataT]) -> ExportResultT: # expo returns a generator that yields delay values which grow # exponentially. Once delay is greater than max_value, the yielded # value will remain constant. # max_value is set to 900 (900 seconds is 15 minutes) to use the same # value as used in the Go implementation. max_value = 900 for delay in expo(max_value=max_value): if delay == max_value: return self._result.FAILURE try: self._client.Export( request=self._translate_data(data), metadata=self._headers, timeout=self._timeout, ) return self._result.SUCCESS except RpcError as error: if error.code() in [ StatusCode.CANCELLED, StatusCode.DEADLINE_EXCEEDED, StatusCode.PERMISSION_DENIED, StatusCode.UNAUTHENTICATED, StatusCode.RESOURCE_EXHAUSTED, StatusCode.ABORTED, StatusCode.OUT_OF_RANGE, StatusCode.UNAVAILABLE, StatusCode.DATA_LOSS, ]: retry_info_bin = dict(error.trailing_metadata()).get( "google.rpc.retryinfo-bin" ) if retry_info_bin is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_bin) delay = ( retry_info.retry_delay.seconds + retry_info.retry_delay.nanos / 1.0e9 ) logger.debug( "Waiting %ss before retrying export of span", delay ) sleep(delay) continue if error.code() == StatusCode.OK: return self._result.SUCCESS return self._result.FAILURE return self._result.FAILURE
def export(self, spans) -> SpanExportResult: # After the call to Shutdown subsequent calls to Export are # not allowed and should return a Failure result. if self._shutdown: _logger.warning("Exporter already shutdown, ignoring batch") return SpanExportResult.FAILURE serialized_data = _ProtobufEncoder.serialize(spans) for delay in expo(max_value=self._MAX_RETRY_TIMEOUT): if delay == self._MAX_RETRY_TIMEOUT: return SpanExportResult.FAILURE resp = self._export(serialized_data) # pylint: disable=no-else-return if resp.status_code in (200, 202): return SpanExportResult.SUCCESS elif self._retryable(resp): _logger.warning( "Transient error %s encountered while exporting span batch, retrying in %ss.", resp.reason, delay, ) sleep(delay) continue else: _logger.error( "Failed to export batch code: %s, reason: %s", resp.status_code, resp.text, ) return SpanExportResult.FAILURE return SpanExportResult.FAILURE
def _export(self, data: TypingSequence[SDKDataT]) -> ExportResultT: max_value = 64 # expo returns a generator that yields delay values which grow # exponentially. Once delay is greater than max_value, the yielded # value will remain constant. for delay in expo(max_value=max_value): if delay == max_value: return self._result.FAILURE try: self._client.Export( request=self._translate_data(data), metadata=self._headers, timeout=self._timeout, ) return self._result.SUCCESS except RpcError as error: if error.code() in [ StatusCode.CANCELLED, StatusCode.DEADLINE_EXCEEDED, StatusCode.RESOURCE_EXHAUSTED, StatusCode.ABORTED, StatusCode.OUT_OF_RANGE, StatusCode.UNAVAILABLE, StatusCode.DATA_LOSS, ]: retry_info_bin = dict(error.trailing_metadata()).get( "google.rpc.retryinfo-bin") if retry_info_bin is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_bin) delay = (retry_info.retry_delay.seconds + retry_info.retry_delay.nanos / 1.0e9) logger.warning( "Transient error %s encountered while exporting span batch, retrying in %ss.", error.code(), delay, ) sleep(delay) continue else: logger.error( "Failed to export span batch, error code: %s", error.code(), ) if error.code() == StatusCode.OK: return self._result.SUCCESS return self._result.FAILURE return self._result.FAILURE
def expo_scaled() -> Iterator[float]: """A scaled version backoff.expo that doesn't wait as long.""" gen: Iterator[float] = backoff.expo(2, 0.1) # type: ignore while True: try: yield next(gen) * 0.1 # Scaling factor except StopIteration: return
async def connect(self) -> None: """Connect to the websocket endpoint and process responses. This will continuously loop until :meth:`EventClient.close` is called. If the WebSocket connection encounters and error, it will be automatically restarted. Any event payloads received will be passed to :meth:`EventClient.dispatch` for filtering and event dispatch. """ # NOTE: When multiple triggers are added to the bot without an active # websocket connection, this function may be scheduled multiple times. if self._connect_lock.locked(): _log.debug('Websocket already running') return await self._connect_lock.acquire() def on_success(details: Dict[str, Any]) -> None: tries = int(details['tries']) _log.debug('Connection successful after %d tries', tries) def on_backoff(details: Dict[str, Any]) -> None: wait = float(details['wait']) tries = int(details['tries']) _log.debug('Backing off %.2f seconds after %d failed attempt[s])', wait, tries) backoff_errors = (ConnectionRefusedError, ConnectionResetError, websockets.exceptions.ConnectionClosed) backoff_gen: Iterator[float] = backoff.expo( # type: ignore base=10, factor=0.001, max_value=60.0) backoff_wrap: _Decorator = backoff.on_exception( # type: ignore lambda: backoff_gen, backoff_errors, on_backoff=on_backoff, on_success=on_success, jitter=None) await backoff_wrap(self._connection_handler)()
class RetryHTTPProvider(HTTPProvider): """自定义可重试HTTPProvider模块 """ def __init__(self, endpoint_uri, request_kwargs=None): super(RetryHTTPProvider, self).__init__(endpoint_uri, request_kwargs) def make_request(self, method, params): request_data = self.encode_rpc_request(method, params) raw_response = self.retriable_post_request( request_data) # instead of make_post_request response = self.decode_rpc_response(raw_response) return response @backoff.on_exception( lambda: backoff.expo(factor=0.2), requests.exceptions.RequestException, max_tries=4, giveup= lambda e: e.response is not None and 400 <= e.response.status_code < 500 ) def retriable_post_request(self, request_data): return make_post_request(self.endpoint_uri, request_data, **self.get_request_kwargs())
def test_expo_base3(): gen = backoff.expo(base=3) for i in range(9): assert 3**i == next(gen)
_log.debug('Backing off %.2f seconds after %d attempt[s]: %s', wait, tries, url) def on_giveup(details: Dict[str, Any]) -> None: elapsed: float = details['elapsed'] tries: int = details['tries'] _log.warning( 'Giving up on query and re-raising exception after %.2f ' 'seconds and %d attempt[s]: %s', elapsed, tries, url) _, exc_value, _ = sys.exc_info() assert exc_value is not None raise exc_value backoff_errors = (aiohttp.ClientResponseError, aiohttp.ClientConnectionError, MaintenanceError) backoff_gen: Iterator[float] = backoff.expo( # type: ignore base=10, factor=0.001, max_value=5.0) @backoff.on_exception( # type: ignore lambda: backoff_gen, backoff_errors, on_backoff=on_backoff, on_giveup=on_giveup, on_success=on_success, max_tries=5, jitter=None) async def retry_query() -> aiohttp.ClientResponse: """Request handling wrapper.""" response = await session.get(url, allow_redirects=False, raise_for_status=True) # Trigger MaintenanceErrors from redirect response. This will also be
def _backoff_expo(): return backoff.expo(max_value=30)
def test_expo(): gen = backoff.expo() for i in range(9): assert 2**i == next(gen)
def test_expo_max_value(): gen = backoff.expo(max_value=2**4) expected = [1, 2, 4, 8, 16, 16, 16] for expect in expected: assert expect == next(gen)
def test_expo_base3_factor5(): gen = backoff.expo(base=3, factor=5) for i in range(9): assert 5 * 3**i == next(gen)
def expo_higher() -> Iterator[int]: e = backoff.expo() for _ in range(3): next(e) yield from e
def backoff_wait_times(): """Create a generator of wait times as [30, 60, 120, 240, 480, ...]""" return backoff.expo(factor=30)
def test_expo_base3_init5(): gen = backoff.expo(base=3, init_value=5) for i in range(9): assert 5 * 3 ** i == next(gen)
def test_expo_init3(): gen = backoff.expo(init_value=3) for i in range(9): assert 3 * 2 ** i == next(gen)
def test_expo_factor3(): gen = backoff.expo(factor=3) for i in range(9): assert 3 * 2**i == next(gen)
sys.stdout.write(f'\rPage {page} Batch {count}/{total_number_of_batches} for Table {table} copied ' f'successfully!') except Exception as e: sys.stdout.flush() sys.stdout.write(f'\rPage {page} Batch {count}/{total_number_of_batches} for Table {table} failed ' f'to copy!: {e}') else: print('Batch is empty, skipping') count += 1 sys.stdout.write('\n') else: print('Table {} is empty!'.format(table)) @backoff.on_predicate(backoff.expo(), lambda leftovers: len(leftovers) > 0, jitter=backoff.full_jitter) def write_records(dst, records): r = dst.batch_write_item(RequestItems=records) return r['UnprocessedItems'] def get_total_items(table: str, aws_session: Session) -> list: """ A helper function to get programmatically all items from a table and :param table: The table name to retrieve its items :param aws_session: The AWS Session of the targeted AWS profile/region :return: A list that contains all items """ d_client = aws_session.client('dynamodb') print(f"Scanning table {table} in {aws_session.region_name}")
class GsSession(ContextBase): __config = None class Scopes(Enum): READ_CONTENT = 'read_content' READ_FINANCIAL_DATA = 'read_financial_data' READ_PRODUCT_DATA = 'read_product_data' READ_USER_PROFILE = 'read_user_profile' MODIFY_CONTENT = 'modify_content' MODIFY_FINANCIAL_DATA = 'modify_financial_data' MODIFY_PRODUCT_DATA = 'modify_product_data' MODIFY_USER_PROFILE = 'modify_user_profile' RUN_ANALYTICS = 'run_analytics' EXECUTE_TRADES = 'execute_trades' @classmethod def get_default(cls): return [ cls.READ_CONTENT.value, cls.READ_PRODUCT_DATA.value, cls.READ_FINANCIAL_DATA.value ] def __init__(self, domain: str, api_version: str = API_VERSION, application: str = DEFAULT_APPLICATION, verify=True, http_adapter: requests.adapters.HTTPAdapter = None): super().__init__() self._session = None self.domain = domain self.api_version = api_version self.application = application self.verify = verify self.http_adapter = requests.adapters.HTTPAdapter(pool_maxsize=100) if http_adapter is None else http_adapter @backoff.on_exception(lambda: backoff.expo(factor=2), (requests.exceptions.HTTPError, requests.exceptions.Timeout), max_tries=5) @backoff.on_predicate(lambda: backoff.expo(factor=2), lambda x: x.status_code in (500, 502, 503, 504), max_tries=5) @abstractmethod def _authenticate(self): raise NotImplementedError("Must implement _authenticate") def _on_enter(self): self.__close_on_exit = self._session is None if not self._session: self.init() def _on_exit(self, exc_type, exc_val, exc_tb): if self.__close_on_exit: self._session = None def init(self): if not self._session: self._session = requests.Session() if self.http_adapter is not None: self._session.mount('https://', self.http_adapter) self._session.verify = self.verify self._session.headers.update({'X-Application': self.application}) self._authenticate() def close(self): self._session: requests.Session if self._session: # don't close a shared adapter if self.http_adapter is None: self._session.close() self._session = None def __del__(self): self.close() @staticmethod def __unpack(results: Union[dict, list], cls: type) -> Union[Base, tuple, dict]: if issubclass(cls, Base): if isinstance(results, list): return tuple(None if r is None else cls.from_dict(r) for r in results) else: return None if results is None else cls.from_dict(results) else: if isinstance(results, list): return tuple(cls(**r) for r in results) else: return cls(**results) def __request( self, method: str, path: str, payload: Optional[Union[dict, str, Base, pd.DataFrame]] = None, request_headers: Optional[dict] = None, cls: Optional[type] = None, try_auth=True, include_version: bool = True, timeout: int = DEFAULT_TIMEOUT ) -> Union[Base, tuple, dict]: is_dataframe = isinstance(payload, pd.DataFrame) if not is_dataframe: payload = payload or {} url = '{}{}{}'.format(self.domain, '/' + self.api_version if include_version else '', path) kwargs = { 'timeout': timeout } if method in ['GET', 'DELETE']: kwargs['params'] = payload elif method in ['POST', 'PUT']: headers = self._session.headers.copy() if request_headers: headers.update({**{'Content-Type': 'application/json'}, **request_headers}) else: headers.update({'Content-Type': 'application/json'}) kwargs['headers'] = headers if is_dataframe or payload: kwargs['data'] = payload if isinstance(payload, str) else json.dumps(payload, cls=JSONEncoder) else: raise MqError('not implemented') response = self._session.request(method, url, **kwargs) if response.status_code == 401: # Expired token or other authorization issue if not try_auth: raise MqRequestError(response.status_code, response.text, context='{} {}'.format(method, url)) self._authenticate() return self.__request(method, path, payload=payload, cls=cls, try_auth=False) elif not 199 < response.status_code < 300: raise MqRequestError(response.status_code, response.text, context='{} {}'.format(method, url)) elif 'application/x-msgpack' in response.headers['content-type']: res = msgpack.unpackb(response.content, raw=False) if cls: if isinstance(res, dict) and 'results' in res: res['results'] = self.__unpack(res['results'], cls) else: res = self.__unpack(res, cls) return res elif 'application/json' in response.headers['content-type']: res = json.loads(response.text) if cls: if isinstance(res, dict) and 'results' in res: res['results'] = self.__unpack(res['results'], cls) else: res = self.__unpack(res, cls) return res else: return {'raw': response} def _get(self, path: str, payload: Optional[Union[dict, Base]] = None, request_headers: Optional[dict] = None, cls: Optional[type] = None, include_version: bool = True, timeout: int = DEFAULT_TIMEOUT) -> Union[Base, tuple, dict]: return self.__request('GET', path, payload=payload, request_headers=request_headers, cls=cls, include_version=include_version, timeout=timeout) def _post(self, path: str, payload: Optional[Union[dict, Base, pd.DataFrame]] = None, request_headers: Optional[dict] = None, cls: Optional[type] = None, include_version: bool = True, timeout: int = DEFAULT_TIMEOUT) -> Union[Base, tuple, dict]: return self.__request('POST', path, payload=payload, request_headers=request_headers, cls=cls, include_version=include_version, timeout=timeout) def _delete(self, path: str, payload: Optional[Union[dict, Base]] = None, request_headers: Optional[dict] = None, cls: Optional[type] = None, include_version: bool = True, timeout: int = DEFAULT_TIMEOUT) -> Union[Base, tuple, dict]: return self.__request('DELETE', path, payload=payload, request_headers=request_headers, cls=cls, include_version=include_version, timeout=timeout) def _put(self, path: str, payload: Optional[Union[dict, Base]] = None, request_headers: Optional[dict] = None, cls: Optional[type] = None, include_version: bool = True, timeout: int = DEFAULT_TIMEOUT) -> Union[Base, tuple, dict]: return self.__request('PUT', path, payload=payload, request_headers=request_headers, cls=cls, include_version=include_version, timeout=timeout) def _connect_websocket(self, path: str): url = 'ws{}{}{}'.format(self.domain[4:], '/' + self.api_version, path) return websockets.connect(url, extra_headers=self._headers(), max_size=2**64, read_limit=2**64) def _headers(self): return [('Cookie', 'GSSSO=' + self._session.cookies['GSSSO'])] @classmethod def _config_for_environment(cls, environment): if cls.__config is None: cls.__config = ConfigParser() cls.__config.read(os.path.join(os.path.dirname(inspect.getfile(cls)), 'config.ini')) return cls.__config[environment] @classmethod def use( cls, environment_or_domain: Union[Environment, str] = Environment.PROD, client_id: Optional[str] = None, client_secret: Optional[str] = None, scopes: Optional[Union[Tuple, List, str]] = (), api_version: str = API_VERSION, application: str = DEFAULT_APPLICATION, http_adapter: requests.adapters.HTTPAdapter = None ) -> None: environment_or_domain = environment_or_domain.name if isinstance(environment_or_domain, Environment) else environment_or_domain session = cls.get( environment_or_domain, client_id=client_id, client_secret=client_secret, scopes=scopes, api_version=api_version, application=application, http_adapter=http_adapter ) session.init() cls.current = session @classmethod def get( cls, environment_or_domain: Union[Environment, str] = Environment.PROD, client_id: Optional[str] = None, client_secret: Optional[str] = None, scopes: Optional[Union[Tuple, List, str]] = (), token: str = '', is_gssso: bool = False, api_version: str = API_VERSION, application: str = DEFAULT_APPLICATION, http_adapter: requests.adapters.HTTPAdapter = None ) -> 'GsSession': """ Return an instance of the appropriate session type for the given credentials""" environment_or_domain = environment_or_domain.name if isinstance(environment_or_domain, Environment) else environment_or_domain if client_id is not None: if isinstance(scopes, str): scopes = (scopes,) else: scopes = cls.Scopes.get_default() if len(scopes) == 0 else scopes return OAuth2Session(environment_or_domain, client_id, client_secret, scopes, api_version=api_version, application=application, http_adapter=http_adapter) elif token: if is_gssso: try: return PassThroughGSSSOSession(environment_or_domain, token, api_version=api_version, application=application, http_adapter=http_adapter) except NameError: raise MqUninitialisedError('This option requires gs_quant_internal to be installed') else: return PassThroughSession(environment_or_domain, token, api_version=api_version, application=application, http_adapter=http_adapter) else: try: return KerberosSession(environment_or_domain, api_version=api_version, http_adapter=http_adapter) except NameError: raise MqUninitialisedError('Must specify client_id and client_secret')
"""Triggers an Airplane task with the provided arguments.""" client = api_client_from_env() run_id = client.execute_task(slug, param_values) run_info = __wait_for_run_completion(run_id) outputs = client.get_run_output(run_id) return { "id": run_info["id"], "taskID": run_info["taskID"], "paramValues": run_info["paramValues"], "status": run_info["status"], "output": outputs, } @backoff.on_exception( lambda: backoff.expo(factor=0.1, max_value=5), ( requests.exceptions.ConnectionError, requests.exceptions.Timeout, RunPendingException, ), ) def __wait_for_run_completion(run_id: str) -> Dict[str, Any]: client = api_client_from_env() run_info = client.get_run(run_id) if run_info["status"] in ("NotStarted", "Queued", "Active"): raise RunPendingException() return run_info def api_client_from_env() -> APIClient: