def get_fingerprint(event): # TODO: This *might* need to be protected from an IndexError? primary_hash = get_hashes_from_fingerprint( event, get_fingerprint_for_event(event), )[0] return md5_from_hash(primary_hash)
def test_custom_values(self): event = Event( data={ 'sentry.interfaces.Stacktrace': { 'frames': [ { 'lineno': 1, 'filename': 'foo.py', }, { 'lineno': 1, 'filename': 'foo.py', 'in_app': True, } ], }, 'sentry.interfaces.Http': { 'url': 'http://example.com' }, }, platform='python', message='Foo bar', ) fp_checksums = get_hashes_from_fingerprint(event, ["{{default}}", "custom"]) def_checksums = get_hashes_for_event(event) assert len(fp_checksums) == len(def_checksums) assert def_checksums != fp_checksums
def create_event(self, *args, **kwargs): """\ Takes the results from the existing `create_event` method and inserts into the local test Snuba cluster so that tests can be run against the same event data. Note that we create a GroupHash as necessary because `create_event` doesn't run them through the 'real' event pipeline. In a perfect world all test events would go through the full regular pipeline. """ from sentry.event_manager import get_hashes_from_fingerprint, md5_from_hash event = super(SnubaTestCase, self).create_event(*args, **kwargs) data = event.data.data tags = dict(data.get('tags', [])) if not data.get('received'): data['received'] = calendar.timegm(event.datetime.timetuple()) if 'environment' in tags: environment = Environment.get_or_create( event.project, tags['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) if 'user' in tags: user = tags.pop('user') data['user'] = user hashes = get_hashes_from_fingerprint( event, data.get('fingerprint', ['{{ default }}']), ) primary_hash = md5_from_hash(hashes[0]) grouphash, _ = GroupHash.objects.get_or_create( project=event.project, group=event.group, hash=primary_hash, ) self.snuba_insert(self.__wrap_event(event, data, grouphash.hash)) return event
def test_default_value(self): event = Event( data={ "sentry.interfaces.Stacktrace": { "frames": [{"lineno": 1, "filename": "foo.py"}, {"lineno": 1, "filename": "foo.py", "in_app": True}] }, "sentry.interfaces.Http": {"url": "http://example.com"}, }, platform="python", message="Foo bar", ) fp_checksums = get_hashes_from_fingerprint(event, ["{{default}}"]) def_checksums = get_hashes_for_event(event) assert def_checksums == fp_checksums
def create_event(self, *args, **kwargs): """\ Takes the results from the existing `create_event` method and inserts into the local test Snuba cluster so that tests can be run against the same event data. Note that we create a GroupHash as necessary because `create_event` doesn't run them through the 'real' event pipeline. In a perfect world all test events would go through the full regular pipeline. """ from sentry.event_manager import get_hashes_from_fingerprint, md5_from_hash event = super(SnubaTestCase, self).create_event(*args, **kwargs) data = event.data.data tags = dict(data.get('tags', [])) if not data.get('received'): data['received'] = calendar.timegm(event.datetime.timetuple()) if 'environment' in tags: environment = Environment.get_or_create( event.project, tags['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) hashes = get_hashes_from_fingerprint( event, data.get('fingerprint', ['{{ default }}']), ) primary_hash = md5_from_hash(hashes[0]) grouphash, _ = GroupHash.objects.get_or_create( project=event.project, group=event.group, hash=primary_hash, ) self.snuba_insert(self.__wrap_event(event, data, grouphash.hash)) return event
def _rehash_group_events(group, limit=100): from sentry.event_manager import (EventManager, get_hashes_from_fingerprint, generate_culprit, md5_from_hash) from sentry.models import Event, Group environment_cache = {} project = group.project event_list = list(Event.objects.filter(group_id=group.id)[:limit]) Event.objects.bind_nodes(event_list, 'data') for event in event_list: fingerprint = event.data.get('fingerprint', ['{{ default }}']) if fingerprint and not isinstance(fingerprint, (list, tuple)): fingerprint = [fingerprint] elif not fingerprint: fingerprint = ['{{ default }}'] manager = EventManager({}) group_kwargs = { 'message': event.message, 'platform': event.platform, 'culprit': generate_culprit(event.data), 'logger': event.get_tag('logger') or group.logger, 'level': group.level, 'last_seen': event.datetime, 'first_seen': event.datetime, 'data': group.data, } # XXX(dcramer): doesnt support checksums as they're not stored hashes = map(md5_from_hash, get_hashes_from_fingerprint(event, fingerprint)) for hash in hashes: new_group, _, _, _ = manager._save_aggregate(event=event, hashes=hashes, release=None, **group_kwargs) event.update(group_id=new_group.id) if event.data.get('tags'): Group.objects.add_tags( new_group, _get_event_environment(event, project, environment_cache), event.data['tags']) return bool(event_list)
def _rehash_group_events(group, limit=100): from sentry.event_manager import ( EventManager, get_hashes_from_fingerprint, generate_culprit, md5_from_hash ) from sentry.models import Event, Group environment_cache = {} project = group.project event_list = list(Event.objects.filter(group_id=group.id)[:limit]) Event.objects.bind_nodes(event_list, 'data') for event in event_list: fingerprint = event.data.get('fingerprint', ['{{ default }}']) if fingerprint and not isinstance(fingerprint, (list, tuple)): fingerprint = [fingerprint] elif not fingerprint: fingerprint = ['{{ default }}'] manager = EventManager({}) group_kwargs = { 'message': event.message, 'platform': event.platform, 'culprit': generate_culprit(event.data), 'logger': event.get_tag('logger') or group.logger, 'level': group.level, 'last_seen': event.datetime, 'first_seen': event.datetime, 'data': group.data, } # XXX(dcramer): doesnt support checksums as they're not stored hashes = map(md5_from_hash, get_hashes_from_fingerprint(event, fingerprint)) for hash in hashes: new_group, _, _, _ = manager._save_aggregate( event=event, hashes=hashes, release=None, **group_kwargs ) event.update(group_id=new_group.id) if event.data.get('tags'): Group.objects.add_tags( new_group, _get_event_environment(event, project, environment_cache), event.data['tags']) return bool(event_list)
def serialize(self, obj, attrs, user): errors = [] for error in obj.data.get('errors', []): message = EventError.get_message(error) error_result = { 'type': error['type'], 'message': message, 'data': {k: v for k, v in six.iteritems(error) if k != 'type'}, } errors.append(error_result) (message, message_meta) = self._get_message_with_meta(obj) (tags, tags_meta) = self._get_tags_with_meta(obj) (context, context_meta) = self._get_attr_with_meta(obj, 'extra', {}) (packages, packages_meta) = self._get_attr_with_meta(obj, 'modules', {}) received = obj.data.get('received') if received: # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: received = datetime.utcfromtimestamp(received).replace( tzinfo=timezone.utc, ) except TypeError: received = None from sentry.event_manager import ( get_hashes_from_fingerprint, md5_from_hash, ) # TODO(dcramer): move release serialization here d = { 'id': six.text_type(obj.id), 'groupID': six.text_type(obj.group_id), 'eventID': six.text_type(obj.event_id), 'size': obj.size, 'entries': attrs['entries'], 'dist': obj.dist, # See GH-3248 'message': message, 'user': attrs['user'], 'contexts': attrs['contexts'], 'sdk': attrs['sdk'], # TODO(dcramer): move into contexts['extra'] 'context': context, 'packages': packages, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'tags': tags, 'platform': obj.platform, 'dateCreated': obj.datetime, 'dateReceived': received, 'errors': errors, 'fingerprints': [ md5_from_hash(h) for h in get_hashes_from_fingerprint(obj, obj.data.get('fingerprint', ['{{ default }}'])) ], '_meta': { 'entries': attrs['_meta']['entries'], 'message': message_meta, 'user': attrs['_meta']['user'], 'contexts': attrs['_meta']['contexts'], 'sdk': attrs['_meta']['sdk'], 'context': context_meta, 'packages': packages_meta, 'tags': tags_meta, }, } return d
def serialize(self, obj, attrs, user): errors = [] for error in obj.data.get('errors', []): message = EventError.get_message(error) error_result = { 'type': error['type'], 'message': message, 'data': {k: v for k, v in six.iteritems(error) if k != 'type'}, } errors.append(error_result) tags = sorted( [{ 'key': k.split('sentry:', 1)[-1], 'value': v } for k, v in obj.get_tags()], key=lambda x: x['key'] ) received = obj.data.get('received') if received: # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: received = datetime.utcfromtimestamp(received).replace( tzinfo=timezone.utc, ) except TypeError: received = None from sentry.event_manager import ( get_hashes_from_fingerprint, md5_from_hash, ) # TODO(dcramer): move release serialization here d = { 'id': six.text_type(obj.id), 'groupID': six.text_type(obj.group_id), 'eventID': six.text_type(obj.event_id), 'size': obj.size, 'entries': attrs['entries'], 'dist': obj.dist, # See GH-3248 'message': obj.get_legacy_message(), 'user': attrs['user'], 'contexts': attrs['contexts'], 'sdk': attrs['sdk'], # TODO(dcramer): move into contexts['extra'] 'context': obj.data.get('extra', {}), 'packages': obj.data.get('modules', {}), 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'tags': tags, 'platform': obj.platform, 'dateCreated': obj.datetime, 'dateReceived': received, 'errors': errors, 'fingerprints': [ md5_from_hash(h) for h in get_hashes_from_fingerprint(obj, obj.data.get('fingerprint', ['{{ default }}'])) ], } return d