class HttpTransport(Transport): """The default HTTP transport.""" def __init__( self, options # type: Dict[str, Any] ): # type: (...) -> None from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None self._worker = BackgroundWorker() self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[Any, datetime] self._retry = urllib3.util.Retry() self.options = options self._pool = self._make_pool( self.parsed_dsn, http_proxy=options["http_proxy"], https_proxy=options["https_proxy"], ca_certs=options["ca_certs"], ) from sentry_sdk import Hub self.hub_cls = Hub def _update_rate_limits(self, response): # type: (urllib3.HTTPResponse) -> None # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limit") if header: for limit in header.split(","): try: retry_after, categories, _ = limit.strip().split(":", 2) if retry_after.startswith("+"): retry_after = datetime.utcnow() + timedelta( seconds=int(retry_after)) else: retry_after = datetime.utcfromtimestamp( int(retry_after)) for category in categories.split(";") or (None, ): self._disabled_until[category] = retry_after except (LookupError, ValueError): continue # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: self._disabled_until[None] = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60) def _send_request( self, body, # type: bytes headers, # type: Dict[str, str] ): # type: (...) -> None headers.update({ "User-Agent": str(self._auth.client), "X-Sentry-Auth": str(self._auth.to_header()), }) response = self._pool.request("POST", str(self._auth.store_api_url), body=body, headers=headers) try: self._update_rate_limits(response) if response.status == 429: # if we hit a 429. Something was rate limited but we already # acted on this in `self._update_rate_limits`. pass elif response.status >= 300 or response.status < 200: logger.error( "Unexpected status code: %s (body: %s)", response.status, response.data, ) finally: response.close() def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.utcnow() return _disabled(category) or _disabled(None) def _send_event( self, event # type: Event ): # type: (...) -> None if self._check_disabled(get_event_data_category(event)): return None body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: f.write(json.dumps(event, allow_nan=False).encode("utf-8")) assert self.parsed_dsn is not None logger.debug( "Sending event, type:%s level:%s event_id:%s project:%s host:%s" % ( event.get("type") or "null", event.get("level") or "null", event.get("event_id") or "null", self.parsed_dsn.project_id, self.parsed_dsn.host, )) self._send_request( body.getvalue(), headers={ "Content-Type": "application/json", "Content-Encoding": "gzip" }, ) return None def _send_envelope( self, envelope # type: Envelope ): # type: (...) -> None # remove all items from the envelope which are over quota envelope.items[:] = [ x for x in envelope.items if not self._check_disabled(x.data_category) ] if not envelope.items: return None body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: envelope.serialize_into(f) assert self.parsed_dsn is not None logger.debug( "Sending envelope [%s] project:%s host:%s", envelope.description, self.parsed_dsn.project_id, self.parsed_dsn.host, ) self._send_request( body.getvalue(), headers={ "Content-Type": "application/x-sentry-envelope", "Content-Encoding": "gzip", }, ) return None def _get_pool_options(self, ca_certs): # type: (Optional[Any]) -> Dict[str, Any] return { "num_pools": 2, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), } def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or getproxies().get("https") # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or getproxies().get("http") opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def capture_event( self, event # type: Event ): # type: (...) -> None hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) self._worker.submit(send_event_wrapper) def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None hub = self.hub_cls.current def send_envelope_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_envelope(envelope) self._worker.submit(send_envelope_wrapper) def flush( self, timeout, # type: float callback=None, # type: Optional[Any] ): # type: (...) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing HTTP transport") self._worker.kill()
class HttpTransport(Transport): """The default HTTP transport.""" def __init__(self, options): # type: (ClientOptions) -> None Transport.__init__(self, options) self._worker = BackgroundWorker( options) #enhance: pass options to worker class self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = None # type: Optional[datetime] self._retry = urllib3.util.Retry() self.options = options self._pool = self._make_pool( self.parsed_dsn, http_proxy=options["http_proxy"], https_proxy=options["https_proxy"], ca_certs=options["ca_certs"], ) from sentry_sdk import Hub self.hub_cls = Hub def _send_event(self, event): # type: (Dict[str, Any]) -> None if self._disabled_until is not None: if datetime.utcnow() < self._disabled_until: return self._disabled_until = None body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: f.write(json.dumps(event, allow_nan=False).encode("utf-8")) logger.debug("Sending %s event [%s] to %s project:%s" % ( event.get("level") or "error", event["event_id"], self.parsed_dsn.host, self.parsed_dsn.project_id, )) # fixed: 1) move request into try block 2) set timeout from options, there's no points to wait forever try: response = self._pool.request( "POST", str(self._auth.store_api_url), timeout=self.options.get('timeout', None), body=body.getvalue(), headers={ "X-Sentry-Auth": str(self._auth.to_header()), "Content-Type": "application/json", "Content-Encoding": "gzip", }, ) except: return if response.status == 429: self._disabled_until = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60) return elif response.status >= 300 or response.status < 200: raise ValueError("Unexpected status code: %s" % response.status) self._disabled_until = None def _get_pool_options(self, ca_certs): # type: (Optional[Any]) -> Dict[str, Any] return { "num_pools": 2, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), "retries": self.options.get('retries', False) #enchance: add retries configuration } def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] # Use http_proxy if scheme is https and https_proxy is not set proxy = parsed_dsn.scheme == "https" and https_proxy or http_proxy if not proxy: proxy = getproxies().get(parsed_dsn.scheme) opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def capture_event(self, event): # type: (Dict[str, Any]) -> bool #enchance hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) #enchance: return put result from worker queue return self._worker.submit(send_event_wrapper) def flush(self, timeout, callback=None): # type: (float, Optional[Any]) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing HTTP transport") self._worker.kill()
class HttpTransport(Transport): """The default HTTP transport.""" def __init__( self, options # type: Dict[str, Any] ): # type: (...) -> None from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None self.options = options # type: Dict[str, Any] self._worker = BackgroundWorker( queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[DataCategory, datetime] self._retry = urllib3.util.Retry() self._discarded_events = defaultdict( int) # type: DefaultDict[Tuple[str, str], int] self._last_client_report_sent = time.time() self._pool = self._make_pool( self.parsed_dsn, http_proxy=options["http_proxy"], https_proxy=options["https_proxy"], ca_certs=options["ca_certs"], ) from sentry_sdk import Hub self.hub_cls = Hub def record_lost_event( self, reason, # type: str data_category=None, # type: Optional[str] item=None, # type: Optional[Item] ): # type: (...) -> None if not self.options["send_client_reports"]: return quantity = 1 if item is not None: data_category = item.data_category if data_category == "attachment": # quantity of 0 is actually 1 as we do not want to count # empty attachments as actually empty. quantity = len(item.get_bytes()) or 1 elif data_category is None: raise TypeError("data category not provided") self._discarded_events[data_category, reason] += quantity def _update_rate_limits(self, response): # type: (urllib3.HTTPResponse) -> None # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limits") if header: logger.warning("Rate-limited via x-sentry-rate-limits") self._disabled_until.update(_parse_rate_limits(header)) # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: logger.warning("Rate-limited via 429") self._disabled_until[None] = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60) def _send_request( self, body, # type: bytes headers, # type: Dict[str, str] endpoint_type="store", # type: EndpointType envelope=None, # type: Optional[Envelope] ): # type: (...) -> None def record_loss(reason): # type: (str) -> None if envelope is None: self.record_lost_event(reason, data_category="error") else: for item in envelope.items: self.record_lost_event(reason, item=item) headers.update({ "User-Agent": str(self._auth.client), "X-Sentry-Auth": str(self._auth.to_header()), }) try: response = self._pool.request( "POST", str(self._auth.get_api_url(endpoint_type)), body=body, headers=headers, ) except Exception: self.on_dropped_event("network") record_loss("network_error") raise try: self._update_rate_limits(response) if response.status == 429: # if we hit a 429. Something was rate limited but we already # acted on this in `self._update_rate_limits`. Note that we # do not want to record event loss here as we will have recorded # an outcome in relay already. self.on_dropped_event("status_429") pass elif response.status >= 300 or response.status < 200: logger.error( "Unexpected status code: %s (body: %s)", response.status, response.data, ) self.on_dropped_event("status_{}".format(response.status)) record_loss("network_error") finally: response.close() def on_dropped_event(self, reason): # type: (str) -> None return None def _fetch_pending_client_report(self, force=False, interval=60): # type: (bool, int) -> Optional[Item] if not self.options["send_client_reports"]: return None if not (force or self._last_client_report_sent < time.time() - interval): return None discarded_events = self._discarded_events self._discarded_events = defaultdict(int) self._last_client_report_sent = time.time() if not discarded_events: return None return Item( PayloadRef( json={ "timestamp": time.time(), "discarded_events": [{ "reason": reason, "category": category, "quantity": quantity } for ( (category, reason), quantity, ) in discarded_events.items()], }), type="client_report", ) def _flush_client_reports(self, force=False): # type: (bool) -> None client_report = self._fetch_pending_client_report(force=force, interval=60) if client_report is not None: self.capture_envelope(Envelope(items=[client_report])) def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.utcnow() return _disabled(category) or _disabled(None) def _send_event( self, event # type: Event ): # type: (...) -> None if self._check_disabled("error"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", data_category="error") return None body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: f.write(json_dumps(event)) assert self.parsed_dsn is not None logger.debug( "Sending event, type:%s level:%s event_id:%s project:%s host:%s" % ( event.get("type") or "null", event.get("level") or "null", event.get("event_id") or "null", self.parsed_dsn.project_id, self.parsed_dsn.host, )) self._send_request( body.getvalue(), headers={ "Content-Type": "application/json", "Content-Encoding": "gzip" }, ) return None def _send_envelope( self, envelope # type: Envelope ): # type: (...) -> None # remove all items from the envelope which are over quota new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): if item.data_category in ("transaction", "error", "default"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: new_items.append(item) # Since we're modifying the envelope here make a copy so that others # that hold references do not see their envelope modified. envelope = Envelope(headers=envelope.headers, items=new_items) if not envelope.items: return None # since we're already in the business of sending out an envelope here # check if we have one pending for the stats session envelopes so we # can attach it to this enveloped scheduled for sending. This will # currently typically attach the client report to the most recent # session update. client_report_item = self._fetch_pending_client_report(interval=30) if client_report_item is not None: envelope.items.append(client_report_item) body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: envelope.serialize_into(f) assert self.parsed_dsn is not None logger.debug( "Sending envelope [%s] project:%s host:%s", envelope.description, self.parsed_dsn.project_id, self.parsed_dsn.host, ) self._send_request( body.getvalue(), headers={ "Content-Type": "application/x-sentry-envelope", "Content-Encoding": "gzip", }, endpoint_type="envelope", envelope=envelope, ) return None def _get_pool_options(self, ca_certs): # type: (Optional[Any]) -> Dict[str, Any] return { "num_pools": 2, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), } def _in_no_proxy(self, parsed_dsn): # type: (Dsn) -> bool no_proxy = getproxies().get("no") if not no_proxy: return False for host in no_proxy.split(","): host = host.strip() if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith( host): return True return False def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None no_proxy = self._in_no_proxy(parsed_dsn) # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def capture_event( self, event # type: Event ): # type: (...) -> None hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) self._flush_client_reports() if not self._worker.submit(send_event_wrapper): self.on_dropped_event("full_queue") self.record_lost_event("queue_overflow", data_category="error") def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None hub = self.hub_cls.current def send_envelope_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_envelope(envelope) self._flush_client_reports() if not self._worker.submit(send_envelope_wrapper): self.on_dropped_event("full_queue") for item in envelope.items: self.record_lost_event("queue_overflow", item=item) def flush( self, timeout, # type: float callback=None, # type: Optional[Any] ): # type: (...) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing HTTP transport") self._worker.kill()
class HttpTransport(Transport): """The default HTTP transport.""" def __init__(self, options): # type: (ClientOptions) -> None Transport.__init__(self, options) assert self.parsed_dsn is not None self._worker = BackgroundWorker() self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = None # type: Optional[datetime] self._retry = urllib3.util.Retry() self.options = options self._pool = self._make_pool( self.parsed_dsn, http_proxy=options["http_proxy"], https_proxy=options["https_proxy"], ca_certs=options["ca_certs"], ) from sentry_sdk import Hub self.hub_cls = Hub def _send_event(self, event): # type: (Event) -> None if self._disabled_until is not None: if datetime.utcnow() < self._disabled_until: return self._disabled_until = None body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: f.write(json.dumps(event, allow_nan=False).encode("utf-8")) assert self.parsed_dsn is not None logger.debug("Sending %s event [%s] to %s project:%s" % ( event.get("level") or "error", event["event_id"], self.parsed_dsn.host, self.parsed_dsn.project_id, )) response = self._pool.request( "POST", str(self._auth.store_api_url), body=body.getvalue(), headers={ "X-Sentry-Auth": str(self._auth.to_header()), "Content-Type": "application/json", "Content-Encoding": "gzip", }, ) try: if response.status == 429: self._disabled_until = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60) return elif response.status >= 300 or response.status < 200: logger.error( "Unexpected status code: %s (body: %s)", response.status, response.data, ) finally: response.close() self._disabled_until = None def _get_pool_options(self, ca_certs): # type: (Optional[Any]) -> Dict[str, Any] return { "num_pools": 2, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), } def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or getproxies().get("https") # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or getproxies().get("http") opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts) def capture_event(self, event): # type: (Event) -> None hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) self._worker.submit(send_event_wrapper) def flush(self, timeout, callback=None): # type: (float, Optional[Any]) -> None logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing HTTP transport") self._worker.kill()
class SQSTransport(Transport): """The default HTTP transport.""" def __init__( self, options # type: Dict[str, Any] ): # type: (...) -> None from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None self._worker = BackgroundWorker() self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self.options = options self._sqs_queue_url = options.get('sqs_queue_url') if not self._sqs_queue_url: self._sqs_queue_url = os.environ.get('SENTRY_SQS_QUEUE_URL') self._sqs_client_kwargs = options['sqs_client_kwargs'] from sentry_sdk import Hub self.hub_cls = Hub def _send_event( self, event # type: Event ): # type: (...) -> None # As this is ran in a thread sqs_client = boto3.client('sqs', **self._sqs_client_kwargs) body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: f.write(json.dumps(event, allow_nan=False).encode("utf-8")) assert self.parsed_dsn is not None assert self._sqs_queue_url is not None logger.debug( "Sending event to SQS, type:%s level:%s event_id:%s project:%s host:%s" % ( event.get("type") or "null", event.get("level") or "null", event.get("event_id") or "null", self.parsed_dsn.project_id, self.parsed_dsn.host, )) sqs_payload = json.dumps({ 'method': 'POST', 'headers': { "User-Agent": str(self._auth.client), "X-Sentry-Auth": str(self._auth.to_header()), "Content-Type": "application/json", "Content-Encoding": "gzip", }, 'url': str(self._auth.store_api_url), 'body': base64.b64encode(body.getvalue()).decode() }) # TODO if message is greater than 256KiB then SQS wont take it, should add S3 ref fallback system try: sqs_client.send_message(QueueUrl=self._sqs_queue_url, MessageBody=sqs_payload) except Exception as err: logger.exception('Unexpected error whilst putting message on SQS', exc_info=err) def capture_event( self, event # type: Event ): # type: (...) -> None hub = self.hub_cls.current def send_event_wrapper(): # type: () -> None with hub: with capture_internal_exceptions(): self._send_event(event) self._worker.submit(send_event_wrapper) def flush( self, timeout, # type: float callback=None, # type: Optional[Any] ): # type: (...) -> None logger.debug("Flushing SQS transport") if timeout > 0: self._worker.flush(timeout, callback) def kill(self): # type: () -> None logger.debug("Killing SQS transport") self._worker.kill()