def get_preprocess_hashes(data): fingerprint = data.get('fingerprint') if fingerprint: hashes = [ md5_from_hash(h) for h in get_preprocess_hashes_from_fingerprint(data, fingerprint) ] elif data.get('checksum'): hashes = [data['checksum']] else: hashes = [md5_from_hash(h) for h in get_preprocess_hash_inputs(data)] return hashes
def get_fingerprint(event): # TODO: This *might* need to be protected from an IndexError? primary_hash = get_hashes_from_fingerprint( event, get_fingerprint_for_event(event), )[0] return md5_from_hash(primary_hash)
def serialize(self, obj, attrs, user): errors = [] for error in obj.data.get('errors', []): message = EventError.get_message(error) error_result = { 'type': error['type'], 'message': message, 'data': {k: v for k, v in six.iteritems(error) if k != 'type'}, } errors.append(error_result) tags = sorted([{ 'key': k.split('sentry:', 1)[-1], 'value': v } for k, v in obj.get_tags()], key=lambda x: x['key']) received = obj.data.get('received') if received: # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: received = datetime.utcfromtimestamp(received).replace( tzinfo=timezone.utc, ) except TypeError: received = None from sentry.event_manager import ( get_hashes_for_event, md5_from_hash, ) # TODO(dcramer): move release serialization here d = { 'id': six.text_type(obj.id), 'groupID': six.text_type(obj.group_id), 'eventID': six.text_type(obj.event_id), 'size': obj.size, 'entries': attrs['entries'], 'dist': obj.dist, # See GH-3248 'message': obj.get_legacy_message(), 'user': attrs['user'], 'contexts': attrs['contexts'], 'sdk': attrs['sdk'], # TODO(dcramer): move into contexts['extra'] 'context': obj.data.get('extra', {}), 'packages': obj.data.get('modules', {}), 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'tags': tags, 'platform': obj.platform, 'dateCreated': obj.datetime, 'dateReceived': received, 'errors': errors, 'fingerprints': [md5_from_hash(h) for h in get_hashes_for_event(obj)], } return d
def test_checksum_rehashed(self): checksum = 'invalid checksum hash' manager = EventManager(make_event(**{ 'checksum': checksum, })) manager.normalize() event = manager.save(self.project.id) hashes = [ gh.hash for gh in GroupHash.objects.filter(group=event.group) ] assert hashes == [md5_from_hash(checksum), checksum]
def test_checksum_rehashed(self): checksum = 'invalid checksum hash' manager = EventManager( self.make_event(**{ 'checksum': checksum, }) ) manager.normalize() event = manager.save(self.project.id) hashes = [gh.hash for gh in GroupHash.objects.filter(group=event.group)] assert hashes == [md5_from_hash(checksum), checksum]
def create_event(self, *args, **kwargs): """\ Takes the results from the existing `create_event` method and inserts into the local test Snuba cluster so that tests can be run against the same event data. Note that we create a GroupHash as necessary because `create_event` doesn't run them through the 'real' event pipeline. In a perfect world all test events would go through the full regular pipeline. """ from sentry.event_manager import get_hashes_from_fingerprint, md5_from_hash event = super(SnubaTestCase, self).create_event(*args, **kwargs) data = event.data.data tags = dict(data.get('tags', [])) if not data.get('received'): data['received'] = calendar.timegm(event.datetime.timetuple()) if 'environment' in tags: environment = Environment.get_or_create( event.project, tags['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) if 'user' in tags: user = tags.pop('user') data['user'] = user hashes = get_hashes_from_fingerprint( event, data.get('fingerprint', ['{{ default }}']), ) primary_hash = md5_from_hash(hashes[0]) grouphash, _ = GroupHash.objects.get_or_create( project=event.project, group=event.group, hash=primary_hash, ) self.snuba_insert(self.__wrap_event(event, data, grouphash.hash)) return event
def create_event(self, *args, **kwargs): """\ Takes the results from the existing `create_event` method and inserts into the local test Snuba cluster so that tests can be run against the same event data. Note that we create a GroupHash as necessary because `create_event` doesn't run them through the 'real' event pipeline. In a perfect world all test events would go through the full regular pipeline. """ from sentry.event_manager import get_hashes_from_fingerprint, md5_from_hash event = super(SnubaTestCase, self).create_event(*args, **kwargs) data = event.data.data tags = dict(data.get('tags', [])) if not data.get('received'): data['received'] = calendar.timegm(event.datetime.timetuple()) if 'environment' in tags: environment = Environment.get_or_create( event.project, tags['environment'], ) GroupEnvironment.objects.get_or_create( environment_id=environment.id, group_id=event.group_id, ) hashes = get_hashes_from_fingerprint( event, data.get('fingerprint', ['{{ default }}']), ) primary_hash = md5_from_hash(hashes[0]) grouphash, _ = GroupHash.objects.get_or_create( project=event.project, group=event.group, hash=primary_hash, ) self.snuba_insert(self.__wrap_event(event, data, grouphash.hash)) return event
def test_md5_from_hash(self): result = md5_from_hash(['foo', 'bar', u'foô']) assert result == '6d81588029ed4190110b2779ba952a00'
def serialize(self, obj, attrs, user): errors = [] error_set = set() for error in obj.data.get('errors', []): message = EventError.get_message(error) if message in error_set: continue error_set.add(message) error_result = { 'type': error['type'], 'message': message, 'data': { k: v for k, v in six.iteritems(error) if k != 'type' }, } errors.append(error_result) tags = sorted([ { 'key': k.split('sentry:', 1)[-1], 'value': v } for k, v in obj.get_tags() ], key=lambda x: x['key']) received = obj.data.get('received') if received: # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: received = datetime.utcfromtimestamp(received).replace( tzinfo=timezone.utc, ) except TypeError: received = None from sentry.event_manager import ( get_hashes_for_event, md5_from_hash, ) # TODO(dcramer): move release serialization here d = { 'id': six.text_type(obj.id), 'groupID': six.text_type(obj.group_id), 'eventID': six.text_type(obj.event_id), 'size': obj.size, 'entries': attrs['entries'], 'dist': obj.dist, # See GH-3248 'message': obj.get_legacy_message(), 'user': attrs['user'], 'contexts': attrs['contexts'], 'sdk': attrs['sdk'], # TODO(dcramer): move into contexts['extra'] 'context': obj.data.get('extra', {}), 'packages': obj.data.get('modules', {}), 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'tags': tags, 'platform': obj.platform, 'dateCreated': obj.datetime, 'dateReceived': received, 'errors': errors, 'fingerprints': [md5_from_hash(h) for h in get_hashes_for_event(obj)], } return d
def get_fingerprint(event): from sentry.event_manager import get_hashes_for_event, md5_from_hash # TODO: This *might* need to be protected from an IndexError? primary_hash = get_hashes_for_event(event)[0] return md5_from_hash(primary_hash)
def serialize(self, obj, attrs, user): errors = [] for error in obj.data.get('errors', []): message = EventError.get_message(error) error_result = { 'type': error['type'], 'message': message, 'data': {k: v for k, v in six.iteritems(error) if k != 'type'}, } errors.append(error_result) (message, message_meta) = self._get_message_with_meta(obj) (tags, tags_meta) = self._get_tags_with_meta(obj) (context, context_meta) = self._get_attr_with_meta(obj, 'extra', {}) (packages, packages_meta) = self._get_attr_with_meta(obj, 'modules', {}) received = obj.data.get('received') if received: # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: received = datetime.utcfromtimestamp(received).replace( tzinfo=timezone.utc, ) except TypeError: received = None from sentry.event_manager import ( get_hashes_from_fingerprint, md5_from_hash, ) # TODO(dcramer): move release serialization here d = { 'id': six.text_type(obj.id), 'groupID': six.text_type(obj.group_id), 'eventID': six.text_type(obj.event_id), 'size': obj.size, 'entries': attrs['entries'], 'dist': obj.dist, # See GH-3248 'message': message, 'user': attrs['user'], 'contexts': attrs['contexts'], 'sdk': attrs['sdk'], # TODO(dcramer): move into contexts['extra'] 'context': context, 'packages': packages, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'tags': tags, 'platform': obj.platform, 'dateCreated': obj.datetime, 'dateReceived': received, 'errors': errors, 'fingerprints': [ md5_from_hash(h) for h in get_hashes_from_fingerprint(obj, obj.data.get('fingerprint', ['{{ default }}'])) ], '_meta': { 'entries': attrs['_meta']['entries'], 'message': message_meta, 'user': attrs['_meta']['user'], 'contexts': attrs['_meta']['contexts'], 'sdk': attrs['_meta']['sdk'], 'context': context_meta, 'packages': packages_meta, 'tags': tags_meta, }, } return d