def _export(self, data: TypingSequence[SDKDataT]) -> ExportResultT: # expo returns a generator that yields delay values which grow # exponentially. Once delay is greater than max_value, the yielded # value will remain constant. # max_value is set to 900 (900 seconds is 15 minutes) to use the same # value as used in the Go implementation. max_value = 900 for delay in expo(max_value=max_value): if delay == max_value: return self._result.FAILURE try: self._client.Export( request=self._translate_data(data), metadata=self._headers, timeout=self._timeout, ) return self._result.SUCCESS except RpcError as error: if error.code() in [ StatusCode.CANCELLED, StatusCode.DEADLINE_EXCEEDED, StatusCode.PERMISSION_DENIED, StatusCode.UNAUTHENTICATED, StatusCode.RESOURCE_EXHAUSTED, StatusCode.ABORTED, StatusCode.OUT_OF_RANGE, StatusCode.UNAVAILABLE, StatusCode.DATA_LOSS, ]: retry_info_bin = dict(error.trailing_metadata()).get( "google.rpc.retryinfo-bin" ) if retry_info_bin is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_bin) delay = ( retry_info.retry_delay.seconds + retry_info.retry_delay.nanos / 1.0e9 ) logger.debug( "Waiting %ss before retrying export of span", delay ) sleep(delay) continue if error.code() == StatusCode.OK: return self._result.SUCCESS return self._result.FAILURE return self._result.FAILURE
def _export(self, data: TypingSequence[SDKDataT]) -> ExportResultT: max_value = 64 # expo returns a generator that yields delay values which grow # exponentially. Once delay is greater than max_value, the yielded # value will remain constant. for delay in expo(max_value=max_value): if delay == max_value: return self._result.FAILURE try: self._client.Export( request=self._translate_data(data), metadata=self._headers, timeout=self._timeout, ) return self._result.SUCCESS except RpcError as error: if error.code() in [ StatusCode.CANCELLED, StatusCode.DEADLINE_EXCEEDED, StatusCode.RESOURCE_EXHAUSTED, StatusCode.ABORTED, StatusCode.OUT_OF_RANGE, StatusCode.UNAVAILABLE, StatusCode.DATA_LOSS, ]: retry_info_bin = dict(error.trailing_metadata()).get( "google.rpc.retryinfo-bin") if retry_info_bin is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_bin) delay = (retry_info.retry_delay.seconds + retry_info.retry_delay.nanos / 1.0e9) logger.warning( "Transient error %s encountered while exporting span batch, retrying in %ss.", error.code(), delay, ) sleep(delay) continue else: logger.error( "Failed to export span batch, error code: %s", error.code(), ) if error.code() == StatusCode.OK: return self._result.SUCCESS return self._result.FAILURE return self._result.FAILURE
def _get_retry_delay(cause): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. """ metadata = dict(cause.trailing_metadata()) retry_info_pb = metadata.get('google.rpc.retryinfo-bin') if retry_info_pb is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1.0e9
def _get_retry_delay(cause, attempts): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. :type attempts: int :param attempts: number of call retries """ metadata = dict(cause.trailing_metadata()) retry_info_pb = metadata.get("google.rpc.retryinfo-bin") if retry_info_pb is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1.0e9 return 2**attempts + random.random()