def continue_from_headers( cls, headers, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from the 'sentry-trace' header (if any) before returning the Transaction. If the 'sentry-trace' header is malformed or missing, just create and return a Transaction instance with the given params. """ if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers." ) transaction = Transaction.from_traceparent( headers.get("sentry-trace"), **kwargs ) if transaction is None: transaction = Transaction(**kwargs) transaction.same_process_as_parent = False return transaction
def finish(self, hub=None): # type: (Optional[sentry_sdk.Hub]) -> Optional[str] hub = hub or self.hub or sentry_sdk.Hub.current if self.timestamp is not None: # This transaction is already finished, so we should not flush it again. return None self.timestamp = datetime.now() _maybe_create_breadcrumbs_from_span(hub, self) if self._span_recorder is None: return None self._span_recorder.finish_span(self) if self.transaction is None: # If this has no transaction set we assume there's a parent # transaction for this span that would be flushed out eventually. return None client = hub.client if client is None: # We have no client and therefore nowhere to send this transaction # event. return None if not self.sampled: # At this point a `sampled = None` should have already been # resolved to a concrete decision. If `sampled` is `None`, it's # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a # non-transaction span and later decided to make it a transaction. if self.sampled is None: logger.warning("Discarding transaction Span without sampling decision") return None return hub.capture_event( { "type": "transaction", "transaction": self.transaction, "contexts": {"trace": self.get_trace_context()}, "timestamp": partial_serialize( client, self.timestamp, is_databag=False, should_repr_strings=False ), "start_timestamp": partial_serialize( client, self.start_timestamp, is_databag=False, should_repr_strings=False, ), "spans": [ s.to_json(client) for s in self._span_recorder.finished_spans if s is not self ], } )
def _patch_channels(): # type: () -> None try: from channels.http import AsgiHandler # type: ignore except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because channels may not be used at all in # the current process. logger.warning( "We detected that you are using Django channels 2.0. To get proper " "instrumentation for ASGI requests, the Sentry SDK requires " "Python 3.7+ or the aiocontextvars package from PyPI.") from sentry_sdk.integrations.asgi import SentryAsgiMiddleware old_app = AsgiHandler.__call__ def sentry_patched_asgi_handler(self, receive, send): # type: (AsgiHandler, Any, Any) -> Any if Hub.current.get_integration(DjangoIntegration) is None: return old_app(receive, send) middleware = SentryAsgiMiddleware( lambda _scope: old_app.__get__(self, AsgiHandler)) return middleware(self.scope)(receive, send) AsgiHandler.__call__ = sentry_patched_asgi_handler
def __init__( self, name="", # type: str parent_sampled=None, # type: Optional[bool] sentry_tracestate=None, # type: Optional[str] third_party_tracestate=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before Transaction # existed, to allow for a smoother transition. if not name and "transaction" in kwargs: logger.warning( "Deprecated: use Transaction(name=...) to create transactions " "instead of Span(transaction=...).") name = kwargs.pop("transaction") Span.__init__(self, **kwargs) self.name = name self.parent_sampled = parent_sampled # if tracestate isn't inherited and set here, it will get set lazily, # either the first time an outgoing request needs it for a header or the # first time an event needs it for inclusion in the captured data self._sentry_tracestate = sentry_tracestate self._third_party_tracestate = third_party_tracestate
def get_project_key(): from sentry.models import ProjectKey if not settings.SENTRY_PROJECT: return None key = None try: if settings.SENTRY_PROJECT_KEY is not None: key = ProjectKey.objects.get(id=settings.SENTRY_PROJECT_KEY, project=settings.SENTRY_PROJECT) else: key = ProjectKey.get_default(settings.SENTRY_PROJECT) except Exception as exc: # if the relation fails to query or is missing completely, lets handle # it gracefully sdk_logger.warning( "internal-error.unable-to-fetch-project", extra={ "project_id": settings.SENTRY_PROJECT, "project_key": settings.SENTRY_PROJECT_KEY, "error_message": str(exc), }, ) if key is None: sdk_logger.warning( "internal-error.no-project-available", extra={ "project_id": settings.SENTRY_PROJECT, "project_key": settings.SENTRY_PROJECT_KEY, }, ) return key
def _is_valid_sample_rate(rate): # type: (Any) -> bool """ Checks the given sample rate to make sure it is valid type and value (a boolean or a number between 0 and 1, inclusive). """ # both booleans and NaN are instances of Real, so a) checking for Real # checks for the possibility of a boolean also, and b) we have to check # separately for NaN if not isinstance(rate, Real) or math.isnan(rate): logger.warning( "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}." .format(rate=rate, type=type(rate))) return False # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False rate = float(rate) if rate < 0 or rate > 1: logger.warning( "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}." .format(rate=rate)) return False return True
def get_integration(self, name_or_class): # type: (Union[str, Integration]) -> Any """Returns the integration for this hub by name or class. If there is no client bound or the client does not have that integration then `None` is returned. If the return value is not `None` the hub is guaranteed to have a client attached. """ if isinstance(name_or_class, str): integration_name = name_or_class elif name_or_class.identifier is not None: integration_name = name_or_class.identifier else: raise ValueError("Integration has no name") client = self._stack[-1][0] if client is not None: rv = client.integrations.get(integration_name) if rv is not None: return rv initial_client = _initial_client if initial_client is not None: initial_client = initial_client() if (initial_client is not None and initial_client is not client and initial_client.integrations.get(name_or_class) is not None): warning = ("Integration %r attempted to run but it was only " "enabled on init() but not the client that " "was bound to the current flow. Earlier versions of " "the SDK would consider these integrations enabled but " "this is no longer the case." % (name_or_class, )) warn(Warning(warning), stacklevel=3) logger.warning(warning)
def reinflate_tracestate(encoded_tracestate): # type: (str) -> typing.Optional[Mapping[str, str]] """ Given a sentry tracestate value in its encoded form, translate it back into a dictionary of data. """ inflated_tracestate = None if encoded_tracestate: # Base64-encoded strings always come out with a length which is a # multiple of 4. In order to achieve this, the end is padded with one or # more `=` signs. Because the tracestate standard calls for using `=` # signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`), # to avoid confusion we strip the `=` when the data is initially # encoded. Python's decoding function requires they be put back. # Fortunately, it doesn't complain if there are too many, so we just # attach two `=` on spec (there will never be more than 2, see # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding). tracestate_json = from_base64(encoded_tracestate + "==") try: assert tracestate_json is not None inflated_tracestate = json.loads(tracestate_json) except Exception as err: logger.warning( ("Unable to attach tracestate data to envelope header: {err}" + "\nTracestate value is {encoded_tracestate}").format( err=err, encoded_tracestate=encoded_tracestate), ) return inflated_tracestate
def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None current_len = len(self._hub._stack) if current_len < self._original_len: logger.error( "Scope popped too soon. Popped %s scopes too many.", self._original_len - current_len, ) return elif current_len > self._original_len: logger.warning( "Leaked %s scopes: %s", current_len - self._original_len, self._hub._stack[self._original_len:], ) layer = self._hub._stack[self._original_len - 1] del self._hub._stack[self._original_len - 1:] if layer[1] != self._layer[1]: logger.error( "Wrong scope found. Meant to pop %s, but popped %s.", layer[1], self._layer[1], ) elif layer[0] != self._layer[0]: warning = ( "init() called inside of pushed scope. This might be entirely " "legitimate but usually occurs when initializing the SDK inside " "a request handler or task/job function. Try to initialize the " "SDK as early as possible instead.") logger.warning(warning)
def setup_once(): import __main__ as lambda_bootstrap pre_37 = True # Python 3.6 or 2.7 if not hasattr(lambda_bootstrap, "handle_http_request"): try: import bootstrap as lambda_bootstrap pre_37 = False # Python 3.7 except ImportError: pass if not hasattr(lambda_bootstrap, "handle_event_request"): logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled" ) return if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request(request_handler, *args, **kwargs): request_handler = _wrap_handler(request_handler) return old_handle_event_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_event_request = sentry_handle_event_request old_handle_http_request = lambda_bootstrap.handle_http_request def sentry_handle_http_request(request_handler, *args, **kwargs): request_handler = _wrap_handler(request_handler) return old_handle_http_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_http_request = sentry_handle_http_request else: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request( lambda_runtime_client, request_handler, *args, **kwargs ): request_handler = _wrap_handler(request_handler) return old_handle_event_request( lambda_runtime_client, request_handler, *args, **kwargs ) lambda_bootstrap.handle_event_request = sentry_handle_event_request # This is the only function that is called in all Python environments # at the end of the request/response lifecycle. It is the only way to # do it in the Python 3.7 env. old_to_json = lambda_bootstrap.to_json def sentry_to_json(*args, **kwargs): _drain_queue() return old_to_json(*args, **kwargs) lambda_bootstrap.to_json = sentry_to_json
def setup_integrations( integrations, with_defaults=True, with_auto_enabling_integrations=False ): # type: (List[Integration], bool, bool) -> Dict[str, Integration] """Given a list of integration instances this installs them all. When `with_defaults` is set to `True` then all default integrations are added unless they were already provided before. """ integrations = dict( (integration.identifier, integration) for integration in integrations or () ) logger.debug("Setting up integrations (with default = %s)", with_defaults) # Integrations that are not explicitly set up by the user. used_as_default_integration = set() if with_defaults: for integration_cls in iter_default_integrations( with_auto_enabling_integrations ): if integration_cls.identifier not in integrations: instance = integration_cls() integrations[instance.identifier] = instance used_as_default_integration.add(instance.identifier) for identifier, integration in iteritems(integrations): with _installer_lock: if identifier not in _installed_integrations: logger.debug( "Setting up previously not enabled integration %s", identifier ) try: type(integration).setup_once() except NotImplementedError: if getattr(integration, "install", None) is not None: logger.warning( "Integration %s: The install method is " "deprecated. Use `setup_once`.", identifier, ) integration.install() else: raise except DidNotEnable as e: if identifier not in used_as_default_integration: raise logger.debug( "Did not enable default integration %s: %s", identifier, e ) _installed_integrations.add(identifier) for identifier in integrations: logger.debug("Enabling integration %s", identifier) return integrations
def finish(self, hub=None): # type: (Optional[sentry_sdk.Hub]) -> Optional[str] hub = hub or self.hub or sentry_sdk.Hub.current if self.timestamp is not None: # This transaction is already finished, so we should not flush it again. return None try: duration_seconds = time.perf_counter() - self._start_timestamp_monotonic self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds) except AttributeError: self.timestamp = datetime.utcnow() _maybe_create_breadcrumbs_from_span(hub, self) if self._span_recorder is None: return None if self.transaction is None: # If this has no transaction set we assume there's a parent # transaction for this span that would be flushed out eventually. return None client = hub.client if client is None: # We have no client and therefore nowhere to send this transaction # event. return None if not self.sampled: # At this point a `sampled = None` should have already been # resolved to a concrete decision. If `sampled` is `None`, it's # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a # non-transaction span and later decided to make it a transaction. if self.sampled is None: logger.warning("Discarding transaction Span without sampling decision") return None finished_spans = [ span.to_json(client) for span in self._span_recorder.spans if span is not self and span.timestamp is not None ] return hub.capture_event( { "type": "transaction", "transaction": self.transaction, "contexts": {"trace": self.get_trace_context()}, "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, "spans": finished_spans, } )
def continue_from_environ( cls, environ, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction if cls is Span: logger.warning("Deprecated: use Transaction.continue_from_environ " "instead of Span.continue_from_environ.") return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs)
def setup_once(): # type: () -> None import __main__ as gcp_functions # type: ignore if not hasattr(gcp_functions, "worker_v1"): logger.warning( "GcpIntegration currently supports only Python 3.7 runtime environment." ) return worker1 = gcp_functions.worker_v1 worker1.FunctionHandler.invoke_user_function = _wrap_func( worker1.FunctionHandler.invoke_user_function)
def finish(self, hub=None): # type: (Optional[sentry_sdk.Hub]) -> Optional[str] if self.timestamp is not None: # This transaction is already finished, ignore. return None # This is a de facto proxy for checking if sampled = False if self._span_recorder is None: logger.debug("Discarding transaction because sampled = False") return None hub = hub or self.hub or sentry_sdk.Hub.current client = hub.client if client is None: # We have no client and therefore nowhere to send this transaction. return None if not self.name: logger.warning( "Transaction has no name, falling back to `<unlabeled transaction>`." ) self.name = "<unlabeled transaction>" Span.finish(self, hub) if not self.sampled: # At this point a `sampled = None` should have already been resolved # to a concrete decision. if self.sampled is None: logger.warning( "Discarding transaction without sampling decision.") return None finished_spans = [ span.to_json() for span in self._span_recorder.spans if span is not self and span.timestamp is not None ] return hub.capture_event({ "type": "transaction", "transaction": self.name, "contexts": { "trace": self.get_trace_context() }, "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, "spans": finished_spans, })
def continue_from_headers( cls, headers, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction if cls is Span: logger.warning("Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers.") parent = Transaction.from_traceparent(headers.get("sentry-trace"), **kwargs) if parent is None: parent = Transaction(**kwargs) parent.same_process_as_parent = False return parent
def __init__( self, name="", # type: str **kwargs # type: Any ): # type: (...) -> None # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before Transaction # existed, to allow for a smoother transition. if not name and "transaction" in kwargs: logger.warning( "Deprecated: use Transaction(name=...) to create transactions " "instead of Span(transaction=...).") name = kwargs.pop("transaction") Span.__init__(self, **kwargs) self.name = name
def add_event_processor( self, func # type: EventProcessor ): # type: (...) -> None """Register a scope local event processor on the scope. :param func: This function behaves like `before_send.` """ if len(self._event_processors) > 20: logger.warning( "Too many event processors on scope! Clearing list to free up some memory: %r", self._event_processors, ) del self._event_processors[:] self._event_processors.append(func)
def from_traceparent( cls, traceparent, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Optional[Transaction] """ Create a Transaction with the given params, then add in data pulled from the given 'sentry-trace' header value before returning the Transaction. If the header value is malformed or missing, just create and return a Transaction instance with the given params. """ if cls is Span: logger.warning( "Deprecated: use Transaction.from_traceparent " "instead of Span.from_traceparent." ) if not traceparent: return None if traceparent.startswith("00-") and traceparent.endswith("-00"): traceparent = traceparent[3:-3] match = _traceparent_header_format_re.match(str(traceparent)) if match is None: return None trace_id, parent_span_id, sampled_str = match.groups() if trace_id is not None: trace_id = "{:032x}".format(int(trace_id, 16)) if parent_span_id is not None: parent_span_id = "{:016x}".format(int(parent_span_id, 16)) if sampled_str: parent_sampled = sampled_str != "0" # type: Optional[bool] else: parent_sampled = None return Transaction( trace_id=trace_id, parent_span_id=parent_span_id, parent_sampled=parent_sampled, **kwargs )
def _update_rate_limits(self, response): # type: (urllib3.HTTPResponse) -> None # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limits") if header: logger.warning("Rate-limited via x-sentry-rate-limits") self._disabled_until.update(_parse_rate_limits(header)) # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: logger.warning("Rate-limited via 429") self._disabled_until[None] = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60)
def _patch_django_asgi_handler(): # type: () -> None try: from django.core.handlers.asgi import ASGIHandler except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because Django's ASGI stuff may not be used # at all. logger.warning("We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE) from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl patch_django_asgi_handler_impl(ASGIHandler)
def continue_from_environ( cls, environ, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from the 'sentry-trace' header in the environ (if any) before returning the Transaction. If the 'sentry-trace' header is malformed or missing, just create and return a Transaction instance with the given params. """ if cls is Span: logger.warning("Deprecated: use Transaction.continue_from_environ " "instead of Span.continue_from_environ.") return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs)
def start_span( self, span=None, # type: Optional[Span] **kwargs # type: Any ): # type: (...) -> Span """ Create and start timing a new span whose parent is the currently active span or transaction, if any. The return value is a span instance, typically used as a context manager to start and stop timing in a `with` block. Only spans contained in a transaction are sent to Sentry. Most integrations start a transaction at the appropriate time, for example for every incoming HTTP request. Use `start_transaction` to start a new transaction when one is not already in progress. """ # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before # start_transaction existed, to allow for a smoother transition. if isinstance(span, Transaction) or "transaction" in kwargs: deprecation_msg = ( "Deprecated: use start_transaction to start transactions and " "Transaction.start_child to start spans." ) if isinstance(span, Transaction): logger.warning(deprecation_msg) return self.start_transaction(span) if "transaction" in kwargs: logger.warning(deprecation_msg) name = kwargs.pop("transaction") return self.start_transaction(name=name, **kwargs) if span is not None: return span kwargs.setdefault("hub", self) span = self.scope.span if span is not None: return span.start_child(**kwargs) return Span(**kwargs)
def continue_from_environ( cls, environ, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from the 'sentry-trace' and 'tracestate' headers from the environ (if any) before returning the Transaction. This is different from `continue_from_headers` in that it assumes header names in the form "HTTP_HEADER_NAME" - such as you would get from a wsgi environ - rather than the form "header-name". """ if cls is Span: logger.warning("Deprecated: use Transaction.continue_from_environ " "instead of Span.continue_from_environ.") return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs)
def _patch_channels(): # type: () -> None try: from channels.http import AsgiHandler # type: ignore except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because channels may not be used at all in # the current process. That is the case when running traditional WSGI # workers in gunicorn+gevent and the websocket stuff in a separate # process. logger.warning("We detected that you are using Django channels 2.0." + CONTEXTVARS_ERROR_MESSAGE) from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl patch_channels_asgi_handler_impl(AsgiHandler)
def _patch_django_asgi_handler(): # type: () -> None try: from django.core.handlers.asgi import ASGIHandler except ImportError: return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. # # We cannot hard-raise here because Django may not be used at all in # the current process. logger.warning( "We detected that you are using Django 3. To get proper " "instrumentation for ASGI requests, the Sentry SDK requires " "Python 3.7+ or the aiocontextvars package from PyPI.") from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl patch_django_asgi_handler_impl(ASGIHandler)
def from_traceparent( cls, traceparent, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Optional[Transaction] """ DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs) Create a Transaction with the given params, then add in data pulled from the given 'sentry-trace' header value before returning the Transaction. """ logger.warning( "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " "instead of from_traceparent(traceparent, **kwargs)") if not traceparent: return None return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs)
def continue_from_headers( cls, headers, # type: typing.Mapping[str, str] **kwargs # type: Any ): # type: (...) -> Transaction """ Create a transaction with the given params (including any data pulled from the 'sentry-trace' and 'tracestate' headers). """ # TODO move this to the Transaction class if cls is Span: logger.warning("Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers.") kwargs.update(extract_sentrytrace_data(headers.get("sentry-trace"))) kwargs.update(extract_tracestate_data(headers.get("tracestate"))) transaction = Transaction(**kwargs) transaction.same_process_as_parent = False return transaction
def from_traceparent( cls, traceparent, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Optional[Transaction] if cls is Span: logger.warning( "Deprecated: use Transaction.from_traceparent " "instead of Span.from_traceparent." ) if not traceparent: return None if traceparent.startswith("00-") and traceparent.endswith("-00"): traceparent = traceparent[3:-3] match = _traceparent_header_format_re.match(str(traceparent)) if match is None: return None trace_id, span_id, sampled_str = match.groups() if trace_id is not None: trace_id = "{:032x}".format(int(trace_id, 16)) if span_id is not None: span_id = "{:016x}".format(int(span_id, 16)) if sampled_str: sampled = sampled_str != "0" # type: Optional[bool] else: sampled = None return Transaction( trace_id=trace_id, parent_span_id=span_id, sampled=sampled, **kwargs )
def setup_once(): # type: () -> None import __main__ as lambda_bootstrap # type: ignore pre_37 = True # Python 3.6 or 2.7 if not hasattr(lambda_bootstrap, "handle_http_request"): try: import bootstrap as lambda_bootstrap # type: ignore pre_37 = False # Python 3.7 except ImportError: pass if not hasattr(lambda_bootstrap, "handle_event_request"): logger.warning("Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled") return if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_event_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_event_request = sentry_handle_event_request old_handle_http_request = lambda_bootstrap.handle_http_request def sentry_handle_http_request(request_handler, *args, **kwargs): # type: (Any, *Any, **Any) -> Any request_handler = _wrap_handler(request_handler) return old_handle_http_request(request_handler, *args, **kwargs) lambda_bootstrap.handle_http_request = sentry_handle_http_request # Patch to_json to drain the queue. This should work even when the # SDK is initialized inside of the handler old_to_json = lambda_bootstrap.to_json def sentry_to_json(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return old_to_json(*args, **kwargs) lambda_bootstrap.to_json = sentry_to_json else: old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request( # type: ignore lambda_runtime_client, request_handler, *args, **kwargs): request_handler = _wrap_handler(request_handler) return old_handle_event_request(lambda_runtime_client, request_handler, *args, **kwargs) lambda_bootstrap.handle_event_request = sentry_handle_event_request # Patch the runtime client to drain the queue. This should work # even when the SDK is initialized inside of the handler def _wrap_post_function(f): # type: (F) -> F def inner(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() return f(*args, **kwargs) return inner # type: ignore lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_result) lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function( lambda_bootstrap.LambdaRuntimeClient.post_invocation_error)