def process(self, request, project, auth, data, **kwargs): event_received.send_robust(ip=request.META['REMOTE_ADDR'], sender=type(self)) # TODO: improve this API (e.g. make RateLimit act on __ne__) rate_limit = safe_execute(app.quotas.is_rate_limited, project=project) if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) if rate_limit is not None and rate_limit.is_limited: raise APIRateLimited(rate_limit.retry_after) result = plugins.first('has_perm', request.user, 'create_event', project) if result is False: raise APIForbidden('Creation of this event was blocked') content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '') if content_encoding == 'gzip': data = decompress_gzip(data) elif content_encoding == 'deflate': data = decompress_deflate(data) elif not data.startswith('{'): data = decode_and_decompress_data(data) data = safely_load_json_string(data) try: # mutates data validate_data(project, data, auth.client) except InvalidData as e: raise APIError(u'Invalid data: %s (%s)' % (six.text_type(e), type(e))) # mutates data manager = EventManager(data) data = manager.normalize() # insert IP address if not available if auth.is_public: ensure_has_ip(data, request.META['REMOTE_ADDR']) event_id = data['event_id'] # We filter data immediately before it ever gets into the queue inst = SensitiveDataFilter() inst.apply(data) # mutates data (strips a lot of context if not queued) insert_data_to_database(data) logger.debug('New event from project %s/%s (id=%s)', project.team.slug, project.slug, event_id) return event_id
def _decode_event(data, content_encoding): if isinstance(data, six.binary_type): if content_encoding == "gzip": data = decompress_gzip(data) elif content_encoding == "deflate": data = decompress_deflate(data) elif data[0] != b"{": data = decode_and_decompress_data(data) else: data = decode_data(data) if isinstance(data, six.text_type): data = safely_load_json_string(data) return CanonicalKeyDict(data)
def _decode_event(data, content_encoding): if isinstance(data, six.binary_type): if content_encoding == 'gzip': data = decompress_gzip(data) elif content_encoding == 'deflate': data = decompress_deflate(data) elif data[0] != b'{': data = decode_and_decompress_data(data) else: data = decode_data(data) if isinstance(data, six.text_type): data = safely_load_json_string(data) return CanonicalKeyDict(data)
def process(self, request, project, auth, data, **kwargs): event_received.send_robust(ip=request.META['REMOTE_ADDR'], sender=type(self)) rate_limits = [safe_execute(app.quotas.is_rate_limited, project=project)] for plugin in plugins.all(): rate_limit = safe_execute(plugin.is_rate_limited, project=project) # We must handle the case of plugins not returning new RateLimit objects if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) rate_limits.append(rate_limit) if any(limit.is_limited for limit in rate_limits): raise APIRateLimited(max(limit.retry_after for limit in rate_limits)) result = plugins.first('has_perm', request.user, 'create_event', project) if result is False: raise APIForbidden('Creation of this event was blocked') content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '') if content_encoding == 'gzip': data = decompress_gzip(data) elif content_encoding == 'deflate': data = decompress_deflate(data) elif not data.startswith('{'): data = decode_and_decompress_data(data) data = safely_load_json_string(data) try: # mutates data validate_data(project, data, auth.client) except InvalidData as e: raise APIError(u'Invalid data: %s (%s)' % (unicode(e), type(e))) # mutates data Group.objects.normalize_event_data(data) # insert IP address if not available if auth.is_public: ensure_has_ip(data, request.META['REMOTE_ADDR']) event_id = data['event_id'] # mutates data (strips a lot of context if not queued) insert_data_to_database(data) logger.debug('New event from project %s/%s (id=%s)', project.team.slug, project.slug, event_id) return event_id
def process(self, request, project, auth, data, **kwargs): event_received.send(ip=request.META["REMOTE_ADDR"], sender=type(self)) is_rate_limited = safe_execute(app.quotas.is_rate_limited, project=project) for plugin in plugins.all(): if safe_execute(plugin.is_rate_limited, project=project): is_rate_limited = True if is_rate_limited: raise APIRateLimited result = plugins.first("has_perm", request.user, "create_event", project) if result is False: raise APIForbidden("Creation of this event was blocked") content_encoding = request.META.get("HTTP_CONTENT_ENCODING", "") if content_encoding == "gzip": data = decompress_gzip(data) elif content_encoding == "deflate": data = decompress_deflate(data) elif not data.startswith("{"): data = decode_and_decompress_data(data) data = safely_load_json_string(data) try: # mutates data validate_data(project, data, auth.client) except InvalidData as e: raise APIError(u"Invalid data: %s (%s)" % (unicode(e), type(e))) # mutates data Group.objects.normalize_event_data(data) # insert IP address if not available if auth.is_public: ensure_has_ip(data, request.META["REMOTE_ADDR"]) event_id = data["event_id"] # mutates data (strips a lot of context if not queued) insert_data_to_database(data) logger.debug("New event from project %s/%s (id=%s)", project.team.slug, project.slug, event_id) return event_id
def process(self, request, project, auth, data, **kwargs): event_received.send_robust(ip=request.META['REMOTE_ADDR'], sender=type(self)) # TODO: improve this API (e.g. make RateLimit act on __ne__) rate_limit = safe_execute(app.quotas.is_rate_limited, project=project, _with_transaction=False) if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) if rate_limit is not None and rate_limit.is_limited: app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.project_total_rejected, project.id), (app.tsdb.models.organization_total_received, project.organization_id), (app.tsdb.models.organization_total_rejected, project.organization_id), ]) raise APIRateLimited(rate_limit.retry_after) else: app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.organization_total_received, project.organization_id), ]) result = plugins.first('has_perm', request.user, 'create_event', project, version=1) if result is False: raise APIForbidden('Creation of this event was blocked') content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '') if content_encoding == 'gzip': data = decompress_gzip(data) elif content_encoding == 'deflate': data = decompress_deflate(data) elif not data.startswith('{'): data = decode_and_decompress_data(data) data = safely_load_json_string(data) try: # mutates data validate_data(project, data, auth.client) except InvalidData as e: raise APIError(u'Invalid data: %s (%s)' % (six.text_type(e), type(e))) # mutates data manager = EventManager(data, version=auth.version) data = manager.normalize() scrub_ip_address = project.get_option('sentry:scrub_ip_address', False) # insert IP address if not available if auth.is_public and not scrub_ip_address: ensure_has_ip(data, request.META['REMOTE_ADDR']) event_id = data['event_id'] # TODO(dcramer): ideally we'd only validate this if the event_id was # supplied by the user cache_key = 'ev:%s:%s' % ( project.id, event_id, ) if cache.get(cache_key) is not None: logger.warning( 'Discarded recent duplicate event from project %s/%s (id=%s)', project.organization.slug, project.slug, event_id) raise InvalidRequest('An event with the same ID already exists.') if project.get_option('sentry:scrub_data', True): # We filter data immediately before it ever gets into the queue inst = SensitiveDataFilter() inst.apply(data) if scrub_ip_address: # We filter data immediately before it ever gets into the queue ensure_does_not_have_ip(data) # mutates data (strips a lot of context if not queued) insert_data_to_database(data) cache.set(cache_key, '', 60 * 5) logger.debug('New event from project %s/%s (id=%s)', project.organization.slug, project.slug, event_id) return event_id
def process(self, request, project, auth, data, **kwargs): metrics.incr('events.total', 1) event_received.send_robust(ip=request.META['REMOTE_ADDR'], sender=type(self)) # TODO: improve this API (e.g. make RateLimit act on __ne__) rate_limit = safe_execute(app.quotas.is_rate_limited, project=project, _with_transaction=False) if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) if rate_limit is not None and rate_limit.is_limited: app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.project_total_rejected, project.id), (app.tsdb.models.organization_total_received, project.organization_id), (app.tsdb.models.organization_total_rejected, project.organization_id), ]) metrics.incr('events.dropped', 1) raise APIRateLimited(rate_limit.retry_after) else: app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.organization_total_received, project.organization_id), ]) result = plugins.first('has_perm', request.user, 'create_event', project, version=1) if result is False: metrics.incr('events.dropped', 1) raise APIForbidden('Creation of this event was blocked') content_encoding = request.META.get('HTTP_CONTENT_ENCODING', '') if content_encoding == 'gzip': data = decompress_gzip(data) elif content_encoding == 'deflate': data = decompress_deflate(data) elif not data.startswith('{'): data = decode_and_decompress_data(data) data = safely_load_json_string(data) try: # mutates data validate_data(project, data, auth.client) except InvalidData as e: raise APIError(u'Invalid data: %s (%s)' % (six.text_type(e), type(e))) # mutates data manager = EventManager(data, version=auth.version) data = manager.normalize() scrub_ip_address = project.get_option('sentry:scrub_ip_address', False) # insert IP address if not available if auth.is_public and not scrub_ip_address: ensure_has_ip(data, request.META['REMOTE_ADDR']) event_id = data['event_id'] # TODO(dcramer): ideally we'd only validate this if the event_id was # supplied by the user cache_key = 'ev:%s:%s' % (project.id, event_id,) if cache.get(cache_key) is not None: logger.warning('Discarded recent duplicate event from project %s/%s (id=%s)', project.organization.slug, project.slug, event_id) raise InvalidRequest('An event with the same ID already exists.') if project.get_option('sentry:scrub_data', True): # We filter data immediately before it ever gets into the queue inst = SensitiveDataFilter(project.get_option('sentry:sensitive_fields', None)) inst.apply(data) if scrub_ip_address: # We filter data immediately before it ever gets into the queue ensure_does_not_have_ip(data) # mutates data (strips a lot of context if not queued) insert_data_to_database(data) cache.set(cache_key, '', 60 * 5) logger.debug('New event from project %s/%s (id=%s)', project.organization.slug, project.slug, event_id) return event_id