def test_returns_cannonical_dict(): from sentry.utils.canonical import CanonicalKeyDict event = make_event() manager = EventManager(event) assert isinstance(manager.get_data(), CanonicalKeyDict) manager.normalize() assert isinstance(manager.get_data(), CanonicalKeyDict)
def test_invalid_tags(self): manager = EventManager(make_event(**{ 'tags': [42], })) manager.normalize() assert None in manager.get_data().get('tags', []) assert 42 not in manager.get_data().get('tags', []) event = manager.save(self.project.id) assert 42 not in event.tags assert None not in event.tags
def test_logger(): manager = EventManager(make_event(logger="foo\nbar")) manager.normalize() data = manager.get_data() assert data['logger'] == DEFAULT_LOGGER_NAME manager = EventManager(make_event(logger="")) manager.normalize() data = manager.get_data() assert data['logger'] == DEFAULT_LOGGER_NAME
def test_logger(): manager = EventManager(make_event(logger="foo\nbar")) manager.normalize() data = manager.get_data() assert data['logger'] == DEFAULT_LOGGER_NAME manager = EventManager(make_event(logger="")) manager.normalize() data = manager.get_data() assert data['logger'] == DEFAULT_LOGGER_NAME assert not any(e.get('name') == 'logger' for e in data['errors'])
def get(self, request): org = Organization( id=1, slug='organization', name='My Company', ) project = Project( id=1, organization=org, slug='project', name='My Project', ) group = next( make_group_generator( get_random(request), project, ), ) data = dict(load_data('python')) data['message'] = group.message data.pop('logentry', None) event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() event_type = event_manager.get_event_type() group.message = event_manager.get_search_message() group.data = { 'type': event_type.key, 'metadata': event_type.get_metadata(data), } event = Event( id=1, project=project, message=event_manager.get_search_message(), group=group, datetime=datetime(2016, 6, 13, 3, 8, 24, tzinfo=timezone.utc), data=event_manager.get_data() ) activity = Activity( group=event.group, project=event.project, **self.get_activity(request, event) ) return render_to_response( 'sentry/debug/mail/preview.html', { 'preview': ActivityMailPreview(request, activity), 'format': request.GET.get('format'), } )
def test_event_id_lowercase(): manager = EventManager(make_event(event_id='1234ABCD' * 4)) manager.normalize() data = manager.get_data() assert data['event_id'] == '1234abcd' * 4 manager = EventManager(make_event(event_id=u'1234ABCD' * 4)) manager.normalize() data = manager.get_data() assert data['event_id'] == '1234abcd' * 4
def test_long_culprit(): manager = EventManager(make_event( culprit='x' * (MAX_CULPRIT_LENGTH + 1), )) manager.normalize() data = manager.get_data() assert len(data['culprit']) == MAX_CULPRIT_LENGTH
def test_long_transaction(): manager = EventManager(make_event( transaction='x' * (MAX_CULPRIT_LENGTH + 1), )) manager.normalize() data = manager.get_data() assert len(data['transaction']) == MAX_CULPRIT_LENGTH
def test_moves_stacktrace_to_exception(): manager = EventManager( make_event( exception={ 'type': 'MyException', }, stacktrace={ 'frames': [ { 'lineno': 1, 'filename': 'foo.py', }, { 'lineno': 1, 'filename': 'bar.py', } ] } ) ) manager.normalize() data = manager.get_data() frames = data['exception']['values'][0]['stacktrace']['frames'] assert frames[0]['lineno'] == 1 assert frames[0]['filename'] == 'foo.py' assert frames[1]['lineno'] == 1 assert frames[1]['filename'] == 'bar.py' assert 'stacktrace' not in data
def create_event(group, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault('project', group.project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs['message'] or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data'])) manager.normalize() kwargs['data'] = manager.get_data() kwargs['data'].update(manager.materialize_metadata()) kwargs['message'] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs['data'].setdefault( 'node_id', Event.generate_node_id(kwargs['project'].id, event_id) ) event = Event(event_id=event_id, group=group, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def inner(data): mgr = EventManager(data={"sdk": data}) mgr.normalize() evt = Event(data=mgr.get_data()) insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': evt.interfaces.get('sdk').to_json() })
def test_removes_some_empty_containers(key, value): event = make_event() event[key] = value manager = EventManager(event) manager.normalize() data = manager.get_data() assert key not in data
def inner(data): mgr = EventManager(data={"debug_meta": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('debug_meta') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json() })
def create_event(self, event_id=None, normalize=True, **kwargs): if event_id is None: event_id = uuid4().hex if 'group' not in kwargs: kwargs['group'] = self.group kwargs.setdefault('project', kwargs['group'].project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event fixtures by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs.get('message') or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data']), for_store=False) manager.normalize() kwargs['data'] = manager.get_data() kwargs['message'] = manager.get_search_message() else: assert 'message' not in kwargs, 'do not pass message this way' event = Event(event_id=event_id, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=event.group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def test_event_pii(): manager = EventManager( make_event( message='foo bar', _meta={'message': {'': {'err': ['invalid']}}}, ) ) manager.normalize() data = manager.get_data() assert data['_meta']['message'] == {'': {'err': ['invalid']}}
def test_environment_tag_removed(environment): event = make_event() event['environment'] = environment event['tags'] = {"environment": "production"} manager = EventManager(event) manager.normalize() data = manager.get_data() assert 'environment' not in dict(data.get('tags') or ()) assert data['environment'] == 'production'
def inner(data): mgr = EventManager(data={"stacktrace": {"frames": [data]}}) mgr.normalize() evt = Event(data=mgr.get_data()) frame = evt.interfaces['stacktrace'].frames[0] insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': frame.to_json() })
def test_event_pii(): manager = EventManager( make_event( user={"id": None}, _meta={'user': {"id": {'': {'err': ['invalid']}}}}, ) ) manager.normalize() data = manager.get_data() assert data['_meta']['user']['id'] == {'': {'err': ['invalid']}}
def test_deprecated_attrs(key): event = make_event() event[key] = "some value" manager = EventManager(event) manager.normalize() data = manager.get_data() assert key not in data assert not data.get('errors')
def inner(data): mgr = EventManager(data={"user": {"id": "123", "geo": data}}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces['user'].geo insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json() })
def inner(data): mgr = EventManager(data={"breadcrumbs": data}) mgr.normalize() evt = Event(data=mgr.get_data()) breadcrumbs = evt.interfaces.get('breadcrumbs') insta_snapshot({ 'errors': evt.data.get("errors"), 'to_json': breadcrumbs and breadcrumbs.to_json(), })
def test_long_message(): manager = EventManager( make_event( message='x' * (settings.SENTRY_MAX_MESSAGE_LENGTH + 1), ) ) manager.normalize() data = manager.get_data() assert len(data['logentry']['formatted']) == \ settings.SENTRY_MAX_MESSAGE_LENGTH
def test_default_event_type(self): manager = EventManager(make_event(message='foo bar')) manager.normalize() data = manager.get_data() assert data['type'] == 'default' event = manager.save(self.project.id) group = event.group assert group.data.get('type') == 'default' assert group.data.get('metadata') == { 'title': 'foo bar', }
def inner(data): mgr = EventManager(data={"threads": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('threads') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json(), 'api_context': interface and interface.get_api_context() })
def inner(data): mgr = EventManager(data={"request": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('request') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface.to_json(), })
def store_event(data, project_id, assert_no_errors=True): # Like `create_event`, but closer to how events are actually # ingested. Prefer to use this method over `create_event` manager = EventManager(data) manager.normalize() if assert_no_errors: errors = manager.get_data().get('errors') assert not errors, errors event = manager.save(project_id) event.group.save() return event
def inner(data): mgr = EventManager(data={"contexts": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('contexts') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface.to_json(), 'tags': sorted(interface.iter_tags()) })
def inner(data): mgr = EventManager(data={"exception": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('exception') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json(), 'get_api_context': interface and interface.get_api_context(), 'to_string': interface and interface.to_string(evt), })
def inner(data): mgr = EventManager(data={"stacktrace": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('stacktrace') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json(), 'get_stacktrace': interface and interface.get_stacktrace(evt), 'to_string': interface and interface.to_string(evt), })
def inner(data): mgr = EventManager(data={ "exception": { "values": [{"type": "FooError", "mechanism": data}] } }) mgr.normalize() evt = Event(data=mgr.get_data()) mechanism = evt.interfaces['exception'].values[0].mechanism insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': mechanism.to_json(), 'tags': sorted(mechanism.iter_tags()) })
def validate_and_normalize(report, client_ip=None): manager = EventManager(report, client_ip=client_ip) manager.normalize() return manager.get_data()
def test_key_id_remains_in_data(self): manager = EventManager(make_event(key_id=12345)) manager.normalize() assert manager.get_data()['key_id'] == 12345 event = manager.save(1) assert event.data['key_id'] == 12345
def get_culprit(data): mgr = EventManager(data) mgr.normalize() return get_culprit_impl(mgr.get_data())
def process(self, request, project, key, auth, helper, data, attachments=None, **kwargs): metrics.incr('events.total') if not data: raise APIError('No JSON data was found') remote_addr = request.META['REMOTE_ADDR'] event_manager = EventManager( data, project=project, key=key, auth=auth, client_ip=remote_addr, user_agent=helper.context.agent, version=auth.version, content_encoding=request.META.get('HTTP_CONTENT_ENCODING', ''), ) del data self.pre_normalize(event_manager, helper) event_manager.normalize() agent = request.META.get('HTTP_USER_AGENT') # TODO: Some form of coordination between the Kafka consumer # and this method (the 'relay') to decide whether a 429 should # be returned here. # Everything before this will eventually be done in the relay. if (kafka_publisher is not None and not attachments and random.random() < options.get('store.kafka-sample-rate')): process_in_kafka = options.get('store.process-in-kafka') try: kafka_publisher.publish( channel=getattr(settings, 'KAFKA_EVENTS_PUBLISHER_TOPIC', 'store-events'), # Relay will (eventually) need to produce a Kafka message # with this JSON format. value=json.dumps({ 'data': event_manager.get_data(), 'project_id': project.id, 'auth': { 'sentry_client': auth.client, 'sentry_version': auth.version, 'sentry_secret': auth.secret_key, 'sentry_key': auth.public_key, 'is_public': auth.is_public, }, 'remote_addr': remote_addr, 'agent': agent, # Whether or not the Kafka consumer is in charge # of actually processing this event. 'should_process': process_in_kafka, })) except Exception as e: logger.exception("Cannot publish event to Kafka: {}".format( e.message)) else: if process_in_kafka: # This event will be processed by the Kafka consumer, so we # shouldn't double process it here. return event_manager.get_data()['event_id'] # Everything after this will eventually be done in a Kafka consumer. return process_event(event_manager, project, key, remote_addr, helper, attachments)
def _do_save_event(cache_key=None, data=None, start_time=None, event_id=None, project_id=None, **kwargs): """ Saves an event to the database. """ set_current_project(project_id) from sentry.event_manager import EventManager, HashDiscarded event_type = "none" if cache_key and data is None: with metrics.timer( "tasks.store.do_save_event.get_cache") as metric_tags: data = event_processing_store.get(cache_key) if data is not None: metric_tags["event_type"] = event_type = data.get( "type") or "none" with metrics.global_tags(event_type=event_type): if data is not None: data = CanonicalKeyDict(data) if event_id is None and data is not None: event_id = data["event_id"] # only when we come from reprocessing we get a project_id sent into # the task. if project_id is None: project_id = data.pop("project") set_current_project(project_id) # We only need to delete raw events for events that support # reprocessing. If the data cannot be found we want to assume # that we need to delete the raw event. if not data or reprocessing.event_supports_reprocessing(data): with metrics.timer("tasks.store.do_save_event.delete_raw_event"): delete_raw_event(project_id, event_id, allow_hint_clear=True) # This covers two cases: where data is None because we did not manage # to fetch it from the default cache or the empty dictionary was # stored in the default cache. The former happens if the event # expired while being on the queue, the second happens on reprocessing # if the raw event was deleted concurrently while we held on to # it. This causes the node store to delete the data and we end up # fetching an empty dict. We could in theory not invoke `save_event` # in those cases but it's important that we always clean up the # reprocessing reports correctly or they will screw up the UI. So # to future proof this correctly we just handle this case here. if not data: metrics.incr("events.failed", tags={ "reason": "cache", "stage": "post" }, skip_internal=False) return try: with metrics.timer("tasks.store.do_save_event.event_manager.save"): manager = EventManager(data) # event.project.organization is populated after this statement. manager.save(project_id, assume_normalized=True, start_time=start_time, cache_key=cache_key) # Put the updated event back into the cache so that post_process # has the most recent data. data = manager.get_data() if isinstance(data, CANONICAL_TYPES): data = dict(data.items()) with metrics.timer( "tasks.store.do_save_event.write_processing_cache"): event_processing_store.store(data) except HashDiscarded: # Delete the event payload from cache since it won't show up in post-processing. if cache_key: with metrics.timer("tasks.store.do_save_event.delete_cache"): event_processing_store.delete_by_key(cache_key) event_processing_store.delete_by_key( _get_unprocessed_key(cache_key)) finally: reprocessing2.mark_event_reprocessed(data) if cache_key: with metrics.timer( "tasks.store.do_save_event.delete_attachment_cache"): attachment_cache.delete(cache_key) if start_time: metrics.timing("events.time-to-process", time() - start_time, instance=data["platform"]) time_synthetic_monitoring_event(data, project_id, start_time)
def get_normalized_event(data, project): mgr = EventManager(data, project=project) mgr.normalize() return dict(mgr.get_data())
def test_explicit_version(): manager = EventManager(make_event(), "6") manager.normalize() data = manager.get_data() assert data["version"] == "6"
def test_long_culprit(): manager = EventManager(make_event(culprit="x" * (MAX_CULPRIT_LENGTH + 1))) manager.normalize() data = manager.get_data() assert len(data["culprit"]) == MAX_CULPRIT_LENGTH
def process(self, request, project, key, auth, helper, data, attachments=None, **kwargs): metrics.incr('events.total') if not data: raise APIError('No JSON data was found') remote_addr = request.META['REMOTE_ADDR'] event_mgr = EventManager( data, project=project, key=key, auth=auth, client_ip=remote_addr, user_agent=helper.context.agent, version=auth.version, content_encoding=request.META.get('HTTP_CONTENT_ENCODING', ''), ) del data self.pre_normalize(event_mgr, helper) event_mgr.normalize() event_received.send_robust(ip=remote_addr, project=project, sender=type(self)) start_time = time() tsdb_start_time = to_datetime(start_time) should_filter, filter_reason = event_mgr.should_filter() if should_filter: increment_list = [ (tsdb.models.project_total_received, project.id), (tsdb.models.project_total_blacklisted, project.id), (tsdb.models.organization_total_received, project.organization_id), (tsdb.models.organization_total_blacklisted, project.organization_id), (tsdb.models.key_total_received, key.id), (tsdb.models.key_total_blacklisted, key.id), ] try: increment_list.append( (FILTER_STAT_KEYS_TO_VALUES[filter_reason], project.id)) # should error when filter_reason does not match a key in FILTER_STAT_KEYS_TO_VALUES except KeyError: pass tsdb.incr_multi( increment_list, timestamp=tsdb_start_time, ) metrics.incr('events.blacklisted', tags={'reason': filter_reason}) event_filtered.send_robust( ip=remote_addr, project=project, sender=type(self), ) raise APIForbidden('Event dropped due to filter: %s' % (filter_reason, )) # TODO: improve this API (e.g. make RateLimit act on __ne__) rate_limit = safe_execute(quotas.is_rate_limited, project=project, key=key, _with_transaction=False) if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) # XXX(dcramer): when the rate limiter fails we drop events to ensure # it cannot cascade if rate_limit is None or rate_limit.is_limited: if rate_limit is None: api_logger.debug( 'Dropped event due to error with rate limiter') tsdb.incr_multi( [ (tsdb.models.project_total_received, project.id), (tsdb.models.project_total_rejected, project.id), (tsdb.models.organization_total_received, project.organization_id), (tsdb.models.organization_total_rejected, project.organization_id), (tsdb.models.key_total_received, key.id), (tsdb.models.key_total_rejected, key.id), ], timestamp=tsdb_start_time, ) metrics.incr( 'events.dropped', tags={ 'reason': rate_limit.reason_code if rate_limit else 'unknown', }) event_dropped.send_robust( ip=remote_addr, project=project, sender=type(self), reason_code=rate_limit.reason_code if rate_limit else None, ) if rate_limit is not None: raise APIRateLimited(rate_limit.retry_after) else: tsdb.incr_multi( [ (tsdb.models.project_total_received, project.id), (tsdb.models.organization_total_received, project.organization_id), (tsdb.models.key_total_received, key.id), ], timestamp=tsdb_start_time, ) org_options = OrganizationOption.objects.get_all_values( project.organization_id) data = event_mgr.get_data() del event_mgr event_id = data['event_id'] # TODO(dcramer): ideally we'd only validate this if the event_id was # supplied by the user cache_key = 'ev:%s:%s' % ( project.id, event_id, ) if cache.get(cache_key) is not None: raise APIForbidden( 'An event with the same ID already exists (%s)' % (event_id, )) scrub_ip_address = ( org_options.get('sentry:require_scrub_ip_address', False) or project.get_option('sentry:scrub_ip_address', False)) scrub_data = (org_options.get('sentry:require_scrub_data', False) or project.get_option('sentry:scrub_data', True)) if scrub_data: # We filter data immediately before it ever gets into the queue sensitive_fields_key = 'sentry:sensitive_fields' sensitive_fields = (org_options.get(sensitive_fields_key, []) + project.get_option(sensitive_fields_key, [])) exclude_fields_key = 'sentry:safe_fields' exclude_fields = (org_options.get(exclude_fields_key, []) + project.get_option(exclude_fields_key, [])) scrub_defaults = ( org_options.get('sentry:require_scrub_defaults', False) or project.get_option('sentry:scrub_defaults', True)) SensitiveDataFilter( fields=sensitive_fields, include_defaults=scrub_defaults, exclude_fields=exclude_fields, ).apply(data) if scrub_ip_address: # We filter data immediately before it ever gets into the queue helper.ensure_does_not_have_ip(data) # mutates data (strips a lot of context if not queued) helper.insert_data_to_database(data, start_time=start_time, attachments=attachments) cache.set(cache_key, '', 60 * 5) api_logger.debug('New event received (%s)', event_id) event_accepted.send_robust( ip=remote_addr, data=data, project=project, sender=type(self), ) return event_id
def alert(request): platform = request.GET.get("platform", "python") org = Organization(id=1, slug="example", name="Example") project = Project(id=1, slug="example", name="Example", organization=org) random = get_random(request) group = next(make_group_generator(random, project)) data = dict(load_data(platform)) data["message"] = group.message data["event_id"] = "44f1419e73884cd2b45c79918f4b6dc4" data.pop("logentry", None) data["environment"] = "prod" data["tags"] = [ ("logger", "javascript"), ("environment", "prod"), ("level", "error"), ("device", "Other"), ] event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() event = event_manager.save(project.id) # Prevent CI screenshot from constantly changing event.data["timestamp"] = 1504656000.0 # datetime(2017, 9, 6, 0, 0) event_type = get_event_type(event.data) group.message = event.search_message group.data = { "type": event_type.key, "metadata": event_type.get_metadata(data) } rule = Rule(label="An example rule") # XXX: this interface_list code needs to be the same as in # src/sentry/mail/adapter.py interface_list = [] for interface in six.itervalues(event.interfaces): body = interface.to_email_html(event) if not body: continue text_body = interface.to_string(event) interface_list.append( (interface.get_title(), mark_safe(body), text_body)) return MailPreview( html_template="sentry/emails/error.html", text_template="sentry/emails/error.txt", context={ "rule": rule, "group": group, "event": event, "link": "http://example.com/link", "interfaces": interface_list, "tags": event.tags, "project_label": project.slug, "commits": [{ # TODO(dcramer): change to use serializer "repository": { "status": "active", "name": "Example Repo", "url": "https://github.com/example/example", "dateCreated": "2018-02-28T23:39:22.402Z", "provider": { "id": "github", "name": "GitHub" }, "id": "1", }, "score": 2, "subject": "feat: Do something to raven/base.py", "message": "feat: Do something to raven/base.py\naptent vivamus vehicula tempus volutpat hac tortor", "id": "1b17483ffc4a10609e7921ee21a8567bfe0ed006", "shortId": "1b17483", "author": { "username": "******", "isManaged": False, "lastActive": "2018-03-01T18:25:28.149Z", "id": "1", "isActive": True, "has2fa": False, "name": "*****@*****.**", "avatarUrl": "https://secure.gravatar.com/avatar/51567a4f786cd8a2c41c513b592de9f9?s=32&d=mm", "dateJoined": "2018-02-27T22:04:32.847Z", "emails": [{ "is_verified": False, "id": "1", "email": "*****@*****.**" }], "avatar": { "avatarUuid": None, "avatarType": "letter_avatar" }, "lastLogin": "******", "email": "*****@*****.**", }, }], }, ).render(request)
def alert(request): platform = request.GET.get('platform', 'python') org = Organization( id=1, slug='example', name='Example', ) project = Project( id=1, slug='example', name='Example', organization=org, ) random = get_random(request) group = next(make_group_generator(random, project), ) data = dict(load_data(platform)) data['message'] = group.message data['event_id'] = '44f1419e73884cd2b45c79918f4b6dc4' data.pop('logentry', None) data['environment'] = 'prod' data['tags'] = [('logger', 'javascript'), ('environment', 'prod'), ('level', 'error'), ('device', 'Other')] event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() event = event_manager.save(project.id) # Prevent Percy screenshot from constantly changing event.datetime = datetime(2017, 9, 6, 0, 0) event.save() event_type = event_manager.get_event_type() group.message = event_manager.get_search_message() group.data = { 'type': event_type.key, 'metadata': event_type.get_metadata(data), } rule = Rule(label="An example rule") interface_list = [] for interface in six.itervalues(event.interfaces): body = interface.to_email_html(event) if not body: continue interface_list.append((interface.get_title(), mark_safe(body))) return MailPreview( html_template='sentry/emails/error.html', text_template='sentry/emails/error.txt', context={ 'rule': rule, 'group': group, 'event': event, 'link': 'http://example.com/link', 'interfaces': interface_list, 'tags': event.get_tags(), 'project_label': project.slug, 'commits': [{ # TODO(dcramer): change to use serializer "repository": { "status": "active", "name": "Example Repo", "url": "https://github.com/example/example", "dateCreated": "2018-02-28T23:39:22.402Z", "provider": { "id": "github", "name": "GitHub" }, "id": "1" }, "score": 2, "subject": "feat: Do something to raven/base.py", "message": "feat: Do something to raven/base.py\naptent vivamus vehicula tempus volutpat hac tortor", "id": "1b17483ffc4a10609e7921ee21a8567bfe0ed006", "shortId": "1b17483", "author": { "username": "******", "isManaged": False, "lastActive": "2018-03-01T18:25:28.149Z", "id": "1", "isActive": True, "has2fa": False, "name": "*****@*****.**", "avatarUrl": "https://secure.gravatar.com/avatar/51567a4f786cd8a2c41c513b592de9f9?s=32&d=mm", "dateJoined": "2018-02-27T22:04:32.847Z", "emails": [{ "is_verified": False, "id": "1", "email": "*****@*****.**" }], "avatar": { "avatarUuid": None, "avatarType": "letter_avatar" }, "lastLogin": "******", "email": "*****@*****.**" } }], }, ).render(request)
def test_tags_as_list(): manager = EventManager(make_event(tags=[('foo', 'bar')])) manager.normalize() data = manager.get_data() assert data['tags'] == [['foo', 'bar']]
def test_tags_as_dict(): manager = EventManager(make_event(tags={'foo': 'bar'})) manager.normalize() data = manager.get_data() assert data['tags'] == [['foo', 'bar']]
def validate_and_normalize(data): manager = EventManager(data) manager.normalize() return manager.get_data()
def test_long_transaction(): manager = EventManager( make_event(transaction="x" * (MAX_CULPRIT_LENGTH + 1))) manager.normalize() data = manager.get_data() assert len(data["transaction"]) == MAX_CULPRIT_LENGTH
def test_tags_as_dict(): manager = EventManager(make_event(tags={"foo": "bar"})) manager.normalize() data = manager.get_data() assert data["tags"] == [["foo", "bar"]]
def test_empty_message(): manager = EventManager(make_event(message='')) manager.normalize() data = manager.get_data() assert 'logentry' not in data
def test_interface_none(): manager = EventManager(make_event(user=None)) manager.normalize() data = manager.get_data() assert 'user' not in data
def test_interface_is_relabeled(): manager = EventManager(make_event(**{"sentry.interfaces.User": {'id': '1'}})) manager.normalize() data = manager.get_data() assert data['user'] == {'id': '1'}
def test_tags_none(): manager = EventManager(make_event(tags=None)) manager.normalize() data = manager.get_data() assert not data.get('tags')
def process( self, request, project, key, auth, helper, data, project_config, attachments=None, **kwargs ): disable_transaction_events() metrics.incr("events.total", skip_internal=False) project_id = project_config.project_id organization_id = project_config.organization_id if not data: track_outcome(organization_id, project_id, key.id, Outcome.INVALID, "no_data") raise APIError("No JSON data was found") remote_addr = request.META["REMOTE_ADDR"] event_manager = EventManager( data, project=project, key=key, auth=auth, client_ip=remote_addr, user_agent=helper.context.agent, version=auth.version, content_encoding=request.META.get("HTTP_CONTENT_ENCODING", ""), project_config=project_config, ) del data self.pre_normalize(event_manager, helper) try: event_manager.normalize() except ProcessingErrorInvalidTransaction as e: track_outcome( organization_id, project_id, key.id, Outcome.INVALID, "invalid_transaction", category=DataCategory.TRANSACTION, ) raise APIError(six.text_type(e).split("\n", 1)[0]) data = event_manager.get_data() dict_data = dict(data) data_size = len(json.dumps(dict_data)) if data_size > 10000000: metrics.timing("events.size.rejected", data_size) track_outcome( organization_id, project_id, key.id, Outcome.INVALID, "too_large", event_id=dict_data.get("event_id"), category=DataCategory.from_event_type(dict_data.get("type")), ) raise APIForbidden("Event size exceeded 10MB after normalization.") metrics.timing("events.size.data.post_storeendpoint", data_size) return process_event( event_manager, project, key, remote_addr, helper, attachments, project_config )
def test_default_version(): manager = EventManager(make_event()) manager.normalize() data = manager.get_data() assert data['version'] == '5'
def digest(request): random = get_random(request) # TODO: Refactor all of these into something more manageable. org = Organization(id=1, slug="example", name="Example Organization") project = Project(id=1, slug="example", name="Example Project", organization=org) rules = { i: Rule(id=i, project=project, label="Rule #%s" % (i, )) for i in range(1, random.randint(2, 4)) } state = { "project": project, "groups": {}, "rules": rules, "event_counts": {}, "user_counts": {}, } records = [] group_generator = make_group_generator(random, project) for i in range(random.randint(1, 30)): group = next(group_generator) state["groups"][group.id] = group offset = timedelta(seconds=0) for i in range(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) data = dict(load_data("python")) data["message"] = group.message data.pop("logentry", None) event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() data["timestamp"] = random.randint(to_timestamp(group.first_seen), to_timestamp(group.last_seen)) event = eventstore.create_event(event_id=uuid.uuid4().hex, group_id=group.id, project_id=project.id, data=data.data) records.append( Record( event.event_id, Notification( event, random.sample(list(state["rules"].keys()), random.randint(1, len(state["rules"]))), ), to_timestamp(event.datetime), )) state["event_counts"][group.id] = random.randint(10, 1e4) state["user_counts"][group.id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) context = { "project": project, "counts": counts, "digest": digest, "start": start, "end": end, "referrer": "digest_email", } add_unsubscribe_link(context) return MailPreview( html_template="sentry/emails/digests/body.html", text_template="sentry/emails/digests/body.txt", context=context, ).render(request)
def test_explicit_version(): manager = EventManager(make_event(), '6') manager.normalize() data = manager.get_data() assert data['version'] == '6'
def test_tags_as_list(): manager = EventManager(make_event(tags=[("foo", "bar")])) manager.normalize() data = manager.get_data() assert data["tags"] == [["foo", "bar"]]