def test_long_culprit(): manager = EventManager(make_event( culprit='x' * (MAX_CULPRIT_LENGTH + 1), )) manager.normalize() data = manager.get_data() assert len(data['culprit']) == MAX_CULPRIT_LENGTH
def test_user_report_gets_environment(self): project = self.create_project() environment = Environment.objects.create( project_id=project.id, organization_id=project.organization_id, name='production', ) environment.add_project(project) event_id = 'a' * 32 group = self.create_group(project=project) UserReport.objects.create( group=group, project=project, event_id=event_id, name='foo', email='*****@*****.**', comments='It Broke!!!', ) manager = EventManager( self.make_event( environment=environment.name, event_id=event_id, group=group)) manager.normalize() manager.save(project.id) assert UserReport.objects.get(event_id=event_id).environment == environment
def test_group_release_with_env(self): manager = EventManager( make_event(release='1.0', environment='prod', event_id='a' * 32) ) manager.normalize() event = manager.save(1) release = Release.objects.get(version='1.0', projects=event.project_id) assert GroupRelease.objects.filter( release_id=release.id, group_id=event.group_id, environment='prod', ).exists() manager = EventManager( make_event(release='1.0', environment='staging', event_id='b' * 32) ) event = manager.save(1) release = Release.objects.get(version='1.0', projects=event.project_id) assert GroupRelease.objects.filter( release_id=release.id, group_id=event.group_id, environment='staging', ).exists()
def test_long_transaction(): manager = EventManager(make_event( transaction='x' * (MAX_CULPRIT_LENGTH + 1), )) manager.normalize() data = manager.get_data() assert len(data['transaction']) == MAX_CULPRIT_LENGTH
def test_invalid_environment(self): manager = EventManager(make_event(**{ 'environment': 'bad/name', })) manager.normalize() event = manager.save(self.project.id) assert dict(event.tags).get('environment') is None
def test_updates_group(self): timestamp = time() - 300 manager = EventManager( make_event( message='foo', event_id='a' * 32, checksum='a' * 32, timestamp=timestamp, ) ) manager.normalize() event = manager.save(1) manager = EventManager( make_event( message='foo bar', event_id='b' * 32, checksum='a' * 32, timestamp=timestamp + 2.0, ) ) manager.normalize() with self.tasks(): event2 = manager.save(1) group = Group.objects.get(id=event.group_id) assert group.times_seen == 2 assert group.last_seen == event2.datetime assert group.message == event2.message assert group.data.get('type') == 'default' assert group.data.get('metadata') == { 'title': 'foo bar', }
def test_does_not_unresolve_group(self, plugin_is_regression): # N.B. EventManager won't unresolve the group unless the event2 has a # later timestamp than event1. plugin_is_regression.return_value = False manager = EventManager( make_event( event_id='a' * 32, checksum='a' * 32, timestamp=1403007314, ) ) with self.tasks(): manager.normalize() event = manager.save(1) group = Group.objects.get(id=event.group_id) group.status = GroupStatus.RESOLVED group.save() assert group.is_resolved() manager = EventManager( make_event( event_id='b' * 32, checksum='a' * 32, timestamp=1403007315, ) ) manager.normalize() event2 = manager.save(1) assert event.group_id == event2.group_id group = Group.objects.get(id=group.id) assert group.is_resolved()
def test_moves_stacktrace_to_exception(): manager = EventManager( make_event( exception={ 'type': 'MyException', }, stacktrace={ 'frames': [ { 'lineno': 1, 'filename': 'foo.py', }, { 'lineno': 1, 'filename': 'bar.py', } ] } ) ) manager.normalize() data = manager.get_data() frames = data['exception']['values'][0]['stacktrace']['frames'] assert frames[0]['lineno'] == 1 assert frames[0]['filename'] == 'foo.py' assert frames[1]['lineno'] == 1 assert frames[1]['filename'] == 'bar.py' assert 'stacktrace' not in data
def test_event_user(self): manager = EventManager(self.make_event(**{ 'sentry.interfaces.User': { 'id': '1', } })) manager.normalize() event = manager.save(self.project.id) assert EventUser.objects.filter( project=self.project, ident='1', ).exists() assert 'sentry:user' in dict(event.tags) # ensure event user is mapped to tags in second attempt manager = EventManager(self.make_event(**{ 'sentry.interfaces.User': { 'id': '1', } })) manager.normalize() event = manager.save(self.project.id) assert EventUser.objects.filter( project=self.project, ident='1', ).exists() assert 'sentry:user' in dict(event.tags)
def test(self, mock_delay_index_event_tags, mock_eventstream_insert): now = datetime.utcnow() def _get_event_count(): return snuba.query( start=now - timedelta(days=1), end=now + timedelta(days=1), groupby=['project_id'], filter_keys={'project_id': [self.project.id]}, ).get(self.project.id, 0) assert _get_event_count() == 0 raw_event = { 'event_id': 'a' * 32, 'message': 'foo', 'timestamp': time.mktime(now.timetuple()), 'level': logging.ERROR, 'logger': 'default', 'tags': [], } manager = EventManager(raw_event) manager.normalize() event = manager.save(self.project.id) # verify eventstream was called by EventManager insert_args, insert_kwargs = list(mock_eventstream_insert.call_args) assert not insert_args assert insert_kwargs == { 'event': event, 'group': event.group, 'is_new_group_environment': True, 'is_new': True, 'is_regression': False, 'is_sample': False, 'primary_hash': 'acbd18db4cc2f85cedef654fccc4a4d8', 'skip_consume': False } assert mock_delay_index_event_tags.call_count == 1 # pass arguments on to Kafka EventManager self.kafka_eventstream.insert(*insert_args, **insert_kwargs) produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args) assert not produce_args assert produce_kwargs['topic'] == 'events' assert produce_kwargs['key'] == six.text_type(self.project.id) version, type_, payload1, payload2 = json.loads(produce_kwargs['value']) assert version == 2 assert type_ == 'insert' # insert what would have been the Kafka payload directly # into Snuba, expect an HTTP 200 and for the event to now exist snuba_eventstream = SnubaEventStream() snuba_eventstream._send(self.project.id, 'insert', (payload1, payload2)) assert _get_event_count() == 1
def create_event(group, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault('project', group.project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs['message'] or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data'])) manager.normalize() kwargs['data'] = manager.get_data() kwargs['data'].update(manager.materialize_metadata()) kwargs['message'] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs['data'].setdefault( 'node_id', Event.generate_node_id(kwargs['project'].id, event_id) ) event = Event(event_id=event_id, group=group, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def test_ephemral_interfaces_removed_on_save(self): manager = EventManager(make_event(platform='python')) manager.normalize() event = manager.save(1) group = event.group assert group.platform == 'python' assert event.platform == 'python'
def test_culprit_is_not_transaction(self): manager = EventManager(make_event( culprit='foobar', )) manager.normalize() event1 = manager.save(1) assert event1.transaction is None assert event1.culprit == 'foobar'
def test_transaction_as_culprit(self): manager = EventManager(make_event( transaction='foobar', )) manager.normalize() event = manager.save(1) assert event.transaction == 'foobar' assert event.culprit == 'foobar'
def inner(data): mgr = EventManager(data={"sdk": data}) mgr.normalize() evt = Event(data=mgr.get_data()) insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': evt.interfaces.get('sdk').to_json() })
def test_environment(self): manager = EventManager(self.make_event(**{ 'environment': 'beta', })) manager.normalize() event = manager.save(self.project.id) assert dict(event.tags).get('environment') == 'beta'
def test_removes_some_empty_containers(key, value): event = make_event() event[key] = value manager = EventManager(event) manager.normalize() data = manager.get_data() assert key not in data
def save_event(): manager = EventManager(self.make_event(**{ 'event_id': uuid.uuid1().hex, # don't deduplicate 'environment': 'beta', 'release': release_version, })) manager.normalize() return manager.save(self.project.id)
def test_invalid_transaction(self): dict_input = {'messages': 'foo'} manager = EventManager(self.make_event( transaction=dict_input, )) manager.normalize() event = manager.save(1) assert event.transaction is None
def test_logger(self): manager = EventManager(self.make_event(logger="foo\nbar")) data = manager.normalize() assert data['logger'] == DEFAULT_LOGGER_NAME manager = EventManager(self.make_event(logger="")) data = manager.normalize() assert data['logger'] == DEFAULT_LOGGER_NAME assert not any(e.get('name') == 'logger' for e in data['errors'])
def test_event_user_unicode_identifier(self): manager = EventManager(self.make_event(**{'sentry.interfaces.User': {'username': u'foô'}})) manager.normalize() with self.tasks(): manager.save(self.project.id) euser = EventUser.objects.get( project_id=self.project.id, ) assert euser.username == u'foô'
def test_message_attribute_goes_to_interface(self): manager = EventManager(self.make_event(**{ 'message': 'hello world', })) manager.normalize() event = manager.save(self.project.id) assert event.data['sentry.interfaces.Message'] == { 'message': 'hello world', }
def inner(data): mgr = EventManager(data={"debug_meta": data}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces.get('debug_meta') insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json() })
def test_returns_cannonical_dict(): from sentry.utils.canonical import CanonicalKeyDict event = make_event() manager = EventManager(event) assert isinstance(manager.get_data(), CanonicalKeyDict) manager.normalize() assert isinstance(manager.get_data(), CanonicalKeyDict)
def create_event(self, event_id=None, normalize=True, **kwargs): if event_id is None: event_id = uuid4().hex if 'group' not in kwargs: kwargs['group'] = self.group kwargs.setdefault('project', kwargs['group'].project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event fixtures by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs.get('message') or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data']), for_store=False) manager.normalize() kwargs['data'] = manager.get_data() kwargs['message'] = manager.get_search_message() else: assert 'message' not in kwargs, 'do not pass message this way' event = Event(event_id=event_id, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=event.group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def test_long_message(): manager = EventManager( make_event( message='x' * (settings.SENTRY_MAX_MESSAGE_LENGTH + 1), ) ) manager.normalize() data = manager.get_data() assert len(data['logentry']['formatted']) == \ settings.SENTRY_MAX_MESSAGE_LENGTH
def test_event_pii(): manager = EventManager( make_event( message='foo bar', _meta={'message': {'': {'err': ['invalid']}}}, ) ) manager.normalize() data = manager.get_data() assert data['_meta']['message'] == {'': {'err': ['invalid']}}
def inner(data): mgr = EventManager(data={"user": {"id": "123", "geo": data}}) mgr.normalize() evt = Event(data=mgr.get_data()) interface = evt.interfaces['user'].geo insta_snapshot({ 'errors': evt.data.get('errors'), 'to_json': interface and interface.to_json() })
def inner(data): mgr = EventManager(data={"breadcrumbs": data}) mgr.normalize() evt = Event(data=mgr.get_data()) breadcrumbs = evt.interfaces.get('breadcrumbs') insta_snapshot({ 'errors': evt.data.get("errors"), 'to_json': breadcrumbs and breadcrumbs.to_json(), })
def test_transaction_over_culprit(self): manager = EventManager(self.make_event( culprit='foo', transaction='bar' )) manager.normalize() event1 = manager.save(1) assert event1.transaction == 'bar' assert event1.culprit == 'bar'
def test_default_version(): manager = EventManager(make_event()) manager.normalize() data = manager.get_data() assert data['version'] == '5'
def test_interface_none(): manager = EventManager(make_event(user=None)) manager.normalize() data = manager.get_data() assert 'user' not in data
def test_transaction_as_culprit(self): manager = EventManager(make_event(transaction="foobar")) manager.normalize() event = manager.save(1) assert event.transaction == "foobar" assert event.culprit == "foobar"
def process(self, request, project, auth, helper, data, **kwargs): metrics.incr('events.total') if not data: raise APIError('No JSON data was found') data = LazyData( data=data, content_encoding=request.META.get('HTTP_CONTENT_ENCODING', ''), helper=helper, ) remote_addr = request.META['REMOTE_ADDR'] event_received.send_robust( ip=remote_addr, sender=type(self), ) if helper.should_filter(project, data, ip_address=remote_addr): app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.project_total_blacklisted, project.id), (app.tsdb.models.organization_total_received, project.organization_id), (app.tsdb.models.organization_total_blacklisted, project.organization_id), ]) metrics.incr('events.blacklisted') raise APIForbidden('Event dropped due to filter') # TODO: improve this API (e.g. make RateLimit act on __ne__) rate_limit = safe_execute(app.quotas.is_rate_limited, project=project, _with_transaction=False) if isinstance(rate_limit, bool): rate_limit = RateLimit(is_limited=rate_limit, retry_after=None) # XXX(dcramer): when the rate limiter fails we drop events to ensure # it cannot cascade if rate_limit is None or rate_limit.is_limited: if rate_limit is None: helper.log.debug( 'Dropped event due to error with rate limiter') app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.project_total_rejected, project.id), (app.tsdb.models.organization_total_received, project.organization_id), (app.tsdb.models.organization_total_rejected, project.organization_id), ]) metrics.incr('events.dropped') if rate_limit is not None: raise APIRateLimited(rate_limit.retry_after) else: app.tsdb.incr_multi([ (app.tsdb.models.project_total_received, project.id), (app.tsdb.models.organization_total_received, project.organization_id), ]) # mutates data data = helper.validate_data(project, data) if 'sdk' not in data: sdk = helper.parse_client_as_sdk(auth.client) if sdk: data['sdk'] = sdk else: data['sdk'] = {} data['sdk']['client_ip'] = remote_addr # mutates data manager = EventManager(data, version=auth.version) manager.normalize() org_options = OrganizationOption.objects.get_all_values( project.organization_id) if org_options.get('sentry:require_scrub_ip_address', False): scrub_ip_address = True else: scrub_ip_address = project.get_option('sentry:scrub_ip_address', False) # insert IP address if not available and wanted if not scrub_ip_address: helper.ensure_has_ip( data, remote_addr, set_if_missing=auth.is_public or data.get('platform') in ('javascript', 'cocoa', 'objc')) event_id = data['event_id'] # TODO(dcramer): ideally we'd only validate this if the event_id was # supplied by the user cache_key = 'ev:%s:%s' % ( project.id, event_id, ) if cache.get(cache_key) is not None: raise APIForbidden( 'An event with the same ID already exists (%s)' % (event_id, )) if org_options.get('sentry:require_scrub_data', False): scrub_data = True else: scrub_data = project.get_option('sentry:scrub_data', True) if scrub_data: # We filter data immediately before it ever gets into the queue sensitive_fields_key = 'sentry:sensitive_fields' sensitive_fields = (org_options.get(sensitive_fields_key, []) + project.get_option(sensitive_fields_key, [])) if org_options.get('sentry:require_scrub_defaults', False): scrub_defaults = True else: scrub_defaults = project.get_option('sentry:scrub_defaults', True) inst = SensitiveDataFilter( fields=sensitive_fields, include_defaults=scrub_defaults, ) inst.apply(data) if scrub_ip_address: # We filter data immediately before it ever gets into the queue helper.ensure_does_not_have_ip(data) # mutates data (strips a lot of context if not queued) helper.insert_data_to_database(data) cache.set(cache_key, '', 60 * 5) helper.log.debug('New event received (%s)', event_id) event_accepted.send_robust( ip=remote_addr, data=data, project=project, sender=type(self), ) return event_id
def make_release_event(self, release_name, project_id): manager = EventManager(make_event(release=release_name)) manager.normalize() event = manager.save(project_id) return event
def test_transaction_and_culprit(self): manager = EventManager(make_event(transaction="foobar", culprit="baz")) manager.normalize() event1 = manager.save(1) assert event1.transaction == "foobar" assert event1.culprit == "baz"
def test_long_transaction(self): manager = EventManager( self.make_event(transaction='x' * (MAX_CULPRIT_LENGTH + 1), )) data = manager.normalize() assert len(data['transaction']) == MAX_CULPRIT_LENGTH
def test_key_id_remains_in_data(self): manager = EventManager(self.make_event(key_id=12345)) manager.normalize() assert manager.data['key_id'] == 12345 event = manager.save(1) assert event.data['key_id'] == 12345
def test_interface_is_relabeled(self): manager = EventManager(self.make_event(user={'id': '1'})) data = manager.normalize() assert data['sentry.interfaces.User'] == {'id': '1'} assert 'user' not in data
def test_tags_as_dict(self): manager = EventManager(self.make_event(tags={'foo': 'bar'})) data = manager.normalize() assert data['tags'] == [('foo', 'bar')]
def process(self, request, project, key, auth, helper, data, attachments=None, **kwargs): metrics.incr('events.total', skip_internal=False) if not data: raise APIError('No JSON data was found') remote_addr = request.META['REMOTE_ADDR'] event_manager = EventManager( data, project=project, key=key, auth=auth, client_ip=remote_addr, user_agent=helper.context.agent, version=auth.version, content_encoding=request.META.get('HTTP_CONTENT_ENCODING', ''), ) del data self.pre_normalize(event_manager, helper) event_manager.normalize() agent = request.META.get('HTTP_USER_AGENT') # TODO: Some form of coordination between the Kafka consumer # and this method (the 'relay') to decide whether a 429 should # be returned here. # Everything before this will eventually be done in the relay. if (kafka_publisher is not None and not attachments and random.random() < options.get('store.kafka-sample-rate')): process_in_kafka = options.get('store.process-in-kafka') try: kafka_publisher.publish( channel=getattr(settings, 'KAFKA_EVENTS_PUBLISHER_TOPIC', 'store-events'), # Relay will (eventually) need to produce a Kafka message # with this JSON format. value=json.dumps({ 'data': dict(event_manager.get_data()), 'project_id': project.id, 'auth': { 'sentry_client': auth.client, 'sentry_version': auth.version, 'sentry_secret': auth.secret_key, 'sentry_key': auth.public_key, 'is_public': auth.is_public, }, 'remote_addr': remote_addr, 'agent': agent, # Whether or not the Kafka consumer is in charge # of actually processing this event. 'should_process': process_in_kafka, })) except Exception as e: logger.exception("Cannot publish event to Kafka: {}".format( e.message)) else: if process_in_kafka: # This event will be processed by the Kafka consumer, so we # shouldn't double process it here. return event_manager.get_data()['event_id'] # Everything after this will eventually be done in a Kafka consumer. return process_event(event_manager, project, key, remote_addr, helper, attachments)
def test_explicit_version(): manager = EventManager(make_event(), '6') manager.normalize() data = manager.get_data() assert data['version'] == '6'
def test_event_user(self): manager = EventManager( make_event(event_id="a", environment="totally unique environment", **{"user": { "id": "1" }})) manager.normalize() with self.tasks(): event = manager.save(self.project.id) environment_id = Environment.get_for_organization_id( event.project.organization_id, "totally unique environment").id assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_group, (event.group.id, ), event.datetime, event.datetime) == { event.group.id: 1 } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_project, (event.project.id, ), event.datetime, event.datetime, ) == { event.project.id: 1 } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_group, (event.group.id, ), event.datetime, event.datetime, environment_id=environment_id, ) == { event.group.id: 1 } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_project, (event.project.id, ), event.datetime, event.datetime, environment_id=environment_id, ) == { event.project.id: 1 } euser = EventUser.objects.get(project_id=self.project.id, ident="1") assert event.get_tag("sentry:user") == euser.tag_value # ensure event user is mapped to tags in second attempt manager = EventManager( make_event(event_id="b", **{"user": { "id": "1", "name": "jane" }})) manager.normalize() with self.tasks(): event = manager.save(self.project.id) euser = EventUser.objects.get(id=euser.id) assert event.get_tag("sentry:user") == euser.tag_value assert euser.name == "jane" assert euser.ident == "1"
def test_event_user(self): manager = EventManager( self.make_event(environment='totally unique environment', **{'sentry.interfaces.User': { 'id': '1', }})) manager.normalize() with self.tasks(): event = manager.save(self.project.id) environment_id = Environment.get_for_organization_id( event.project.organization_id, 'totally unique environment', ).id assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_group, (event.group.id, ), event.datetime, event.datetime, ) == { event.group.id: 1, } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_project, (event.project.id, ), event.datetime, event.datetime, ) == { event.project.id: 1, } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_group, (event.group.id, ), event.datetime, event.datetime, environment_id=environment_id, ) == { event.group.id: 1, } assert tsdb.get_distinct_counts_totals( tsdb.models.users_affected_by_project, (event.project.id, ), event.datetime, event.datetime, environment_id=environment_id, ) == { event.project.id: 1, } euser = EventUser.objects.get( project_id=self.project.id, ident='1', ) assert event.get_tag('sentry:user') == euser.tag_value # ensure event user is mapped to tags in second attempt manager = EventManager( self.make_event( **{'sentry.interfaces.User': { 'id': '1', 'name': 'jane', }})) manager.normalize() with self.tasks(): event = manager.save(self.project.id) euser = EventUser.objects.get(id=euser.id) assert event.get_tag('sentry:user') == euser.tag_value assert euser.name == 'jane' assert euser.ident == '1'
def test_invalid_environment(self): manager = EventManager(make_event(**{"environment": "bad/name"})) manager.normalize() event = manager.save(self.project.id) assert dict(event.tags).get("environment") is None
def test_environment(self): manager = EventManager(make_event(**{"environment": "beta"})) manager.normalize() event = manager.save(self.project.id) assert dict(event.tags).get("environment") == "beta"
def test_inferred_culprit_from_empty_stacktrace(self): manager = EventManager(make_event(stacktrace={"frames": []})) manager.normalize() event = manager.save(1) assert event.culprit == ""
def digest(request): random = get_random(request) # TODO: Refactor all of these into something more manageable. org = Organization(id=1, slug="example", name="Example Organization") project = Project(id=1, slug="example", name="Example Project", organization=org) rules = { i: Rule(id=i, project=project, label=f"Rule #{i}") for i in range(1, random.randint(2, 4)) } state = { "project": project, "groups": {}, "rules": rules, "event_counts": {}, "user_counts": {}, } records = [] group_generator = make_group_generator(random, project) for i in range(random.randint(1, 30)): group = next(group_generator) state["groups"][group.id] = group offset = timedelta(seconds=0) for i in range(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) data = dict(load_data("python")) data["message"] = group.message data.pop("logentry", None) event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() data["timestamp"] = random.randint(to_timestamp(group.first_seen), to_timestamp(group.last_seen)) event = eventstore.create_event(event_id=uuid.uuid4().hex, group_id=group.id, project_id=project.id, data=data.data) records.append( Record( event.event_id, Notification( event, random.sample(list(state["rules"].keys()), random.randint(1, len(state["rules"]))), ), to_timestamp(event.datetime), )) state["event_counts"][group.id] = random.randint(10, 1e4) state["user_counts"][group.id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) context = { "project": project, "counts": counts, "digest": digest, "start": start, "end": end, "referrer": "digest_email", } add_unsubscribe_link(context) return MailPreview( html_template="sentry/emails/digests/body.html", text_template="sentry/emails/digests/body.txt", context=context, ).render(request)
def test_culprit_is_not_transaction(self): manager = EventManager(make_event(culprit="foobar")) manager.normalize() event1 = manager.save(1) assert event1.transaction is None assert event1.culprit == "foobar"
def test_default_version(self): manager = EventManager(self.make_event()) data = manager.normalize() assert data['version'] == '5'
def test_invalid_transaction(self): dict_input = {"messages": "foo"} manager = EventManager(make_event(transaction=dict_input)) manager.normalize() event = manager.save(1) assert event.transaction is None
def test_explicit_version(self): manager = EventManager(self.make_event(), '6') data = manager.normalize() assert data['version'] == '6'
def process(self, request, project, key, auth, helper, data, project_config, attachments=None, **kwargs): disable_transaction_events() metrics.incr("events.total", skip_internal=False) project_id = project_config.project_id organization_id = project_config.organization_id if not data: track_outcome(organization_id, project_id, key.id, Outcome.INVALID, "no_data") raise APIError("No JSON data was found") remote_addr = request.META["REMOTE_ADDR"] event_manager = EventManager( data, project=project, key=key, auth=auth, client_ip=remote_addr, user_agent=helper.context.agent, version=auth.version, content_encoding=request.META.get("HTTP_CONTENT_ENCODING", ""), project_config=project_config, ) del data self.pre_normalize(event_manager, helper) try: event_manager.normalize() except ProcessingErrorInvalidTransaction as e: track_outcome( organization_id, project_id, key.id, Outcome.INVALID, "invalid_transaction", category=DataCategory.TRANSACTION, ) raise APIError(six.text_type(e).split("\n", 1)[0]) data = event_manager.get_data() dict_data = dict(data) data_size = len(json.dumps(dict_data)) if data_size > 10000000: metrics.timing("events.size.rejected", data_size) track_outcome( organization_id, project_id, key.id, Outcome.INVALID, "too_large", event_id=dict_data.get("event_id"), category=DataCategory.from_event_type(dict_data.get("type")), ) raise APIForbidden("Event size exceeded 10MB after normalization.") metrics.timing("events.size.data.post_storeendpoint", data_size) return process_event(event_manager, project, key, remote_addr, helper, attachments, project_config)
def test_bad_logger(self): manager = EventManager(self.make_event(logger='foo bar')) data = manager.normalize() assert data['logger'] == DEFAULT_LOGGER_NAME
def test_default_fingerprint(self): manager = EventManager(self.make_event()) manager.normalize() event = manager.save(self.project.id) assert event.data.get('fingerprint') == ['{{ default }}']
def test_empty_message(): manager = EventManager(make_event(message='')) manager.normalize() data = manager.get_data() assert 'logentry' not in data
def from_kwargs(self, project, **kwargs): from sentry.event_manager import EventManager manager = EventManager(kwargs) manager.normalize() return manager.save(project)
def test_long_culprit(self): manager = EventManager( self.make_event(culprit='x' * (MAX_CULPRIT_LENGTH + 1), )) data = manager.normalize() assert len(data['culprit']) == MAX_CULPRIT_LENGTH
def validate_and_normalize(report, client_ip="198.51.100.0", user_agent="Awesome Browser"): manager = EventManager(report, client_ip=client_ip, user_agent=user_agent) manager.process_csp_report() manager.normalize() return manager.get_data()
def alert(request): platform = request.GET.get("platform", "python") org = Organization(id=1, slug="example", name="Example") project = Project(id=1, slug="example", name="Example", organization=org) random = get_random(request) group = next(make_group_generator(random, project)) data = dict(load_data(platform)) data["message"] = group.message data["event_id"] = "44f1419e73884cd2b45c79918f4b6dc4" data.pop("logentry", None) data["environment"] = "prod" data["tags"] = [ ("logger", "javascript"), ("environment", "prod"), ("level", "error"), ("device", "Other"), ] event_manager = EventManager(data) event_manager.normalize() data = event_manager.get_data() event = event_manager.save(project.id) # Prevent CI screenshot from constantly changing event.data["timestamp"] = 1504656000.0 # datetime(2017, 9, 6, 0, 0) event_type = get_event_type(event.data) group.message = event.search_message group.data = { "type": event_type.key, "metadata": event_type.get_metadata(data) } rule = Rule(label="An example rule") # XXX: this interface_list code needs to be the same as in # src/sentry/mail/adapter.py interface_list = [] for interface in event.interfaces.values(): body = interface.to_email_html(event) if not body: continue text_body = interface.to_string(event) interface_list.append( (interface.get_title(), mark_safe(body), text_body)) return MailPreview( html_template="sentry/emails/error.html", text_template="sentry/emails/error.txt", context={ "rule": rule, "group": group, "event": event, "link": "http://example.com/link", "interfaces": interface_list, "tags": event.tags, "project_label": project.slug, "commits": [{ # TODO(dcramer): change to use serializer "repository": { "status": "active", "name": "Example Repo", "url": "https://github.com/example/example", "dateCreated": "2018-02-28T23:39:22.402Z", "provider": { "id": "github", "name": "GitHub" }, "id": "1", }, "score": 2, "subject": "feat: Do something to raven/base.py", "message": "feat: Do something to raven/base.py\naptent vivamus vehicula tempus volutpat hac tortor", "id": "1b17483ffc4a10609e7921ee21a8567bfe0ed006", "shortId": "1b17483", "author": { "username": "******", "isManaged": False, "lastActive": "2018-03-01T18:25:28.149Z", "id": "1", "isActive": True, "has2fa": False, "name": "*****@*****.**", "avatarUrl": "https://secure.gravatar.com/avatar/51567a4f786cd8a2c41c513b592de9f9?s=32&d=mm", "dateJoined": "2018-02-27T22:04:32.847Z", "emails": [{ "is_verified": False, "id": "1", "email": "*****@*****.**" }], "avatar": { "avatarUuid": None, "avatarType": "letter_avatar" }, "lastLogin": "******", "email": "*****@*****.**", }, }], }, ).render(request)