def test_no_commits(self): event = self.create_event( group=self.group, message='Kaboom!', platform='python', stacktrace={ 'frames': [{ "function": "handle_set_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks.py", "module": "sentry.tasks", "in_app": True, "lineno": 30, "filename": "sentry/tasks.py", }, { "function": "set_commits", "abs_path": "/usr/src/sentry/src/sentry/models/release.py", "module": "sentry.models.release", "in_app": True, "lineno": 39, "filename": "sentry/models/release.py", }] }) with self.assertRaises(Commit.DoesNotExist): get_event_file_committers(self.project, event)
def test_no_commits(self): event = self.create_event( group=self.group, message='Kaboom!', platform='python', stacktrace={ 'frames': [ { "function": "handle_set_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks.py", "module": "sentry.tasks", "in_app": True, "lineno": 30, "filename": "sentry/tasks.py", }, { "function": "set_commits", "abs_path": "/usr/src/sentry/src/sentry/models/release.py", "module": "sentry.models.release", "in_app": True, "lineno": 39, "filename": "sentry/models/release.py", } ] } ) with self.assertRaises(Commit.DoesNotExist): get_event_file_committers(self.project, event)
def test_java_sdk_path_mangling(self): event = self.create_event( group=self.group, message='Kaboom!', platform='java', stacktrace={ 'frames': [ { "function": "invoke0", "abs_path": "NativeMethodAccessorImpl.java", "in_app": False, "module": "jdk.internal.reflect.NativeMethodAccessorImpl", "filename": "NativeMethodAccessorImpl.java", }, { "function": "home", "abs_path": "Application.java", "module": "io.sentry.example.Application", "in_app": True, "lineno": 30, "filename": "Application.java", }, { "function": "handledError", "abs_path": "Application.java", "module": "io.sentry.example.Application", "in_app": True, "lineno": 39, "filename": "Application.java", } ] } ) self.release.set_commits([ { 'id': 'a' * 40, 'repository': self.repo.name, 'author_email': '*****@*****.**', 'author_name': 'Bob', 'message': 'i fixed a bug', 'patch_set': [ { 'path': 'src/main/java/io/sentry/example/Application.java', 'type': 'M', }, ] } ]) result = get_event_file_committers(self.project, event) assert len(result) == 1 assert 'commits' in result[0] assert len(result[0]['commits']) == 1 assert result[0]['commits'][0]['id'] == 'a' * 40
def test_java_sdk_path_mangling(self): event = self.create_event( group=self.group, message='Kaboom!', platform='java', stacktrace={ 'frames': [{ "function": "invoke0", "abs_path": "NativeMethodAccessorImpl.java", "in_app": False, "module": "jdk.internal.reflect.NativeMethodAccessorImpl", "filename": "NativeMethodAccessorImpl.java", }, { "function": "home", "abs_path": "Application.java", "module": "io.sentry.example.Application", "in_app": True, "lineno": 30, "filename": "Application.java", }, { "function": "handledError", "abs_path": "Application.java", "module": "io.sentry.example.Application", "in_app": True, "lineno": 39, "filename": "Application.java", }] }) self.release.set_commits([{ 'id': 'a' * 40, 'repository': self.repo.name, 'author_email': '*****@*****.**', 'author_name': 'Bob', 'message': 'i fixed a bug', 'patch_set': [ { 'path': 'sentry/example/Application/Application.java', 'type': 'M', }, ] }]) result = get_event_file_committers(self.project, event) assert len(result) == 1 assert 'commits' in result[0] assert len(result[0]['commits']) == 1 assert result[0]['commits'][0]['id'] == 'a' * 40
def get(self, request, project, event_id): """ Retrieve Committer information for an event ``````````````````````````````````````````` Return commiters on an individual event, plus a per-frame breakdown. :pparam string project_slug: the slug of the project the event belongs to. :pparam string event_id: the hexadecimal ID of the event to retrieve (as reported by the raven client). :auth: required """ use_snuba = options.get('snuba.events-queries.enabled') event_cls = event_cls = SnubaEvent if use_snuba else Event event = event_cls.objects.from_event_id(event_id, project.id) if event is None: return Response({'detail': 'Event not found'}, status=404) # populate event data Event.objects.bind_nodes([event], 'data') try: committers = get_event_file_committers( project, event, frame_limit=int(request.GET.get('frameLimit', 25)), ) except Release.DoesNotExist: return Response({'detail': 'Release not found'}, status=404) except Commit.DoesNotExist: return Response({'detail': 'No Commits found for Release'}, status=404) # XXX(dcramer): this data is unused, so lets not bother returning it for now # serialize the commit objects # serialized_annotated_frames = [ # { # 'frame': frame['frame'], # 'commits': serialize(frame['commits']) # } for frame in annotated_frames # ] data = { 'committers': committers, # 'annotatedFrames': serialized_annotated_frames } return Response(data)
def get(self, request, project, event_id): """ Retrieve Committer information for an event ``````````````````````````````````````````` Return commiters on an individual event, plus a per-frame breakdown. :pparam string project_slug: the slug of the project the event belongs to. :pparam string event_id: the hexadecimal ID of the event to retrieve (as reported by the raven client). :auth: required """ try: event = Event.objects.get( id=event_id, project_id=project.id, ) except Event.DoesNotExist: return Response({'detail': 'Event not found'}, status=404) # populate event data Event.objects.bind_nodes([event], 'data') try: committers = get_event_file_committers( project, event, frame_limit=int(request.GET.get('frameLimit', 25)), ) except Release.DoesNotExist: return Response({'detail': 'Release not found'}, status=404) except Commit.DoesNotExist: return Response({'detail': 'No Commits found for Release'}, status=404) # XXX(dcramer): this data is unused, so lets not bother returning it for now # serialize the commit objects # serialized_annotated_frames = [ # { # 'frame': frame['frame'], # 'commits': serialize(frame['commits']) # } for frame in annotated_frames # ] data = { 'committers': committers, # 'annotatedFrames': serialized_annotated_frames } return Response(data)
def test_matching(self): event = self.create_event( group=self.group, message='Kaboom!', platform='python', stacktrace={ 'frames': [ { "function": "handle_set_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks.py", "module": "sentry.tasks", "in_app": True, "lineno": 30, "filename": "sentry/tasks.py", }, { "function": "set_commits", "abs_path": "/usr/src/sentry/src/sentry/models/release.py", "module": "sentry.models.release", "in_app": True, "lineno": 39, "filename": "sentry/models/release.py", } ] } ) self.release.set_commits([ { 'id': 'a' * 40, 'repository': self.repo.name, 'author_email': '*****@*****.**', 'author_name': 'Bob', 'message': 'i fixed a bug', 'patch_set': [ { 'path': 'src/sentry/models/release.py', 'type': 'M', }, ] } ]) result = get_event_file_committers(self.project, event) assert len(result) == 1 assert 'commits' in result[0] assert len(result[0]['commits']) == 1 assert result[0]['commits'][0]['id'] == 'a' * 40
def test_matching(self): event = self.create_event( group=self.group, message='Kaboom!', platform='python', stacktrace={ 'frames': [{ "function": "handle_set_commits", "abs_path": "/usr/src/sentry/src/sentry/tasks.py", "module": "sentry.tasks", "in_app": True, "lineno": 30, "filename": "sentry/tasks.py", }, { "function": "set_commits", "abs_path": "/usr/src/sentry/src/sentry/models/release.py", "module": "sentry.models.release", "in_app": True, "lineno": 39, "filename": "sentry/models/release.py", }] }) self.release.set_commits([{ 'id': 'a' * 40, 'repository': self.repo.name, 'author_email': '*****@*****.**', 'author_name': 'Bob', 'message': 'i fixed a bug', 'patch_set': [ { 'path': 'src/sentry/models/release.py', 'type': 'M', }, ] }]) result = get_event_file_committers(self.project, event) assert len(result) == 1 assert 'commits' in result[0] assert len(result[0]['commits']) == 1 assert result[0]['commits'][0]['id'] == 'a' * 40
def get_incident_suspects(incident, projects): groups = list(incident.groups.all().filter(project__in=projects)) # For now, we want to track whether we've seen a commit before to avoid # duplicates. We'll probably use a commit being seen across multiple groups # as a way to increase score in the future. seen = set() for group in groups: event = group.get_latest_event_for_environments() committers = get_event_file_committers(group.project, event) for committer in committers: author = committer['author'] for commit in committer['commits']: commit['author'] = author commit_key = (commit['repository']['id'], commit['id']) if commit_key in seen: continue seen.add(commit_key) yield commit
def get_incident_suspect_commits(incident): groups = list(incident.groups.all()) # For now, we want to track whether we've seen a commit before to avoid # duplicates. We'll probably use a commit being seen across multiple groups # as a way to increase score in the future. seen = set() for group in groups: event = group.get_latest_event_for_environments() try: committers = get_event_file_committers(group.project, event) except (Release.DoesNotExist, Commit.DoesNotExist): continue for committer in committers: for (commit, _) in committer["commits"]: if commit.id in seen: continue seen.add(commit.id) yield commit.id
def notify(self, notification): from sentry.models import Commit, Release event = notification.event group = event.group project = group.project org = group.organization subject = event.get_email_subject() link = group.get_absolute_url() template = 'sentry/emails/error.txt' html_template = 'sentry/emails/error.html' rules = [] for rule in notification.rules: rule_link = '/%s/%s/settings/alerts/rules/%s/' % ( org.slug, project.slug, rule.id) rules.append((rule.label, rule_link)) enhanced_privacy = org.flags.enhanced_privacy # lets identify possibly suspect commits and owners commits = {} if features.has('organizations:suggested-commits', org): try: committers = get_event_file_committers(project, event) except (Commit.DoesNotExist, Release.DoesNotExist): pass except Exception as exc: logging.exception(six.text_type(exc)) else: for committer in committers: for commit in committer['commits']: if commit['id'] not in commits: commit_data = commit.copy() commit_data['shortId'] = commit_data['id'][:7] commit_data['author'] = committer['author'] commit_data['subject'] = commit_data[ 'message'].split('\n', 1)[0] commits[commit['id']] = commit_data context = { 'project_label': project.get_full_name(), 'group': group, 'event': event, 'link': link, 'rules': rules, 'enhanced_privacy': enhanced_privacy, 'commits': sorted(commits.values(), key=lambda x: x['score'], reverse=True), } # if the organization has enabled enhanced privacy controls we dont send # data which may show PII or source code if not enhanced_privacy: interface_list = [] for interface in six.itervalues(event.interfaces): body = interface.to_email_html(event) if not body: continue text_body = interface.to_string(event) interface_list.append( (interface.get_title(), mark_safe(body), text_body)) context.update({ 'tags': event.get_tags(), 'interfaces': interface_list, }) headers = { 'X-Sentry-Logger': group.logger, 'X-Sentry-Logger-Level': group.get_level_display(), 'X-Sentry-Project': project.slug, 'X-Sentry-Reply-To': group_id_to_email(group.id), } for user_id in self.get_send_to(project=project, event=event): self.add_unsubscribe_link(context, user_id, project) self._send_mail( subject=subject, template=template, html_template=html_template, project=project, reference=group, headers=headers, type='notify.error', context=context, send_to=[user_id], )
def process_suspect_commits(event_id, event_platform, event_frames, group_id, project_id, **kwargs): metrics.incr("sentry.tasks.process_suspect_commits.start") project = Project.objects.get_from_cache(id=project_id) owners = GroupOwner.objects.filter( group_id=group_id, project=project, organization_id=project.organization_id, type=GroupOwnerType.SUSPECT_COMMIT.value, ) owner_count = owners.count() if owner_count >= PREFERRED_GROUP_OWNERS: owners = owners.filter(date_added__lte=timezone.now() - PREFERRED_GROUP_OWNER_AGE).order_by( "-date_added" ) if not owners.exists(): metrics.incr( "sentry.tasks.process_suspect_commits.aborted", tags={"detail": "maxed_owners_none_old"}, ) return with metrics.timer("sentry.tasks.process_suspect_commits.process_loop"): try: with metrics.timer( "sentry.tasks.process_suspect_commits.get_serialized_event_file_committers" ): committers = get_event_file_committers( project, group_id, event_frames, event_platform ) owner_scores = {} for committer in committers: if "id" in committer["author"]: author_id = committer["author"]["id"] for commit, score in committer["commits"]: if score >= MIN_COMMIT_SCORE: owner_scores[author_id] = max(score, owner_scores.get(author_id, 0)) if owner_scores: for owner_id in sorted(owner_scores, reverse=True, key=owner_scores.get)[ :PREFERRED_GROUP_OWNERS ]: go, created = GroupOwner.objects.update_or_create( group_id=group_id, type=GroupOwnerType.SUSPECT_COMMIT.value, user_id=owner_id, project=project, organization_id=project.organization_id, defaults={ "date_added": timezone.now() }, # Updates date of an existing owner, since we just matched them with this new event ) if created: owner_count += 1 if owner_count > PREFERRED_GROUP_OWNERS: owners.first().delete() except Commit.DoesNotExist: logger.info( "process_suspect_commits.skipped", extra={"event": event_id, "reason": "no_commit"}, ) except Release.DoesNotExist: logger.info( "process_suspect_commits.skipped", extra={"event": event_id, "reason": "no_release"}, )
def notify(self, notification): from sentry.models import Commit, Release event = notification.event environment = event.get_tag('environment') group = event.group project = group.project org = group.organization subject = event.get_email_subject() link = group.get_absolute_url() if environment: link = link + '?' + urlencode({'environment': environment}) template = 'sentry/emails/error.txt' html_template = 'sentry/emails/error.html' rules = [] for rule in notification.rules: rule_link = '/%s/%s/settings/alerts/rules/%s/' % (org.slug, project.slug, rule.id) rules.append((rule.label, rule_link)) enhanced_privacy = org.flags.enhanced_privacy # lets identify possibly suspect commits and owners commits = {} if features.has('organizations:suggested-commits', org): try: committers = get_event_file_committers(project, event) except (Commit.DoesNotExist, Release.DoesNotExist): pass except Exception as exc: logging.exception(six.text_type(exc)) else: for committer in committers: for commit in committer['commits']: if commit['id'] not in commits: commit_data = commit.copy() commit_data['shortId'] = commit_data['id'][:7] commit_data['author'] = committer['author'] commit_data['subject'] = commit_data['message'].split('\n', 1)[0] commits[commit['id']] = commit_data context = { 'project_label': project.get_full_name(), 'group': group, 'event': event, 'link': link, 'rules': rules, 'enhanced_privacy': enhanced_privacy, 'commits': sorted(commits.values(), key=lambda x: x['score'], reverse=True), 'environment': environment } # if the organization has enabled enhanced privacy controls we dont send # data which may show PII or source code if not enhanced_privacy: interface_list = [] for interface in six.itervalues(event.interfaces): body = interface.to_email_html(event) if not body: continue text_body = interface.to_string(event) interface_list.append((interface.get_title(), mark_safe(body), text_body)) context.update({ 'tags': event.get_tags(), 'interfaces': interface_list, }) headers = { 'X-Sentry-Logger': group.logger, 'X-Sentry-Logger-Level': group.get_level_display(), 'X-Sentry-Project': project.slug, 'X-Sentry-Reply-To': group_id_to_email(group.id), } for user_id in self.get_send_to(project=project, event=event): self.add_unsubscribe_link(context, user_id, project) self._send_mail( subject=subject, template=template, html_template=html_template, project=project, reference=group, headers=headers, type='notify.error', context=context, send_to=[user_id], )
def process_suspect_commits(event, **kwargs): metrics.incr("sentry.tasks.process_suspect_commits.start") with metrics.timer("sentry.tasks.process_suspect_commits"): can_process = True # Abbreviation for "workflow-owners-ingestion:group-{}" cache_key = "w-o-i:g-{}".format(event.group_id) if cache.get(cache_key): # Only process once per OWNER_CACHE_LIFE seconds. metrics.incr("sentry.tasks.process_suspect_commits.skipped", tags={"detail": "too_many_owners"}) can_process = False else: project = Project.objects.get_from_cache(id=event.project_id) owners = GroupOwner.objects.filter( group_id=event.group_id, project=project, organization_id=project.organization_id, type=GroupOwnerType.SUSPECT_COMMIT.value, ) owner_count = owners.count() if owner_count >= PREFERRED_GROUP_OWNERS: owners = owners.filter( date_added__lte=timezone.now() - PREFERRED_GROUP_OWNER_AGE).order_by("-date_added") if not owners.exists(): metrics.incr( "sentry.tasks.process_suspect_commits.aborted", tags={"detail": "maxed_owners_none_old"}, ) can_process = False cache.set(cache_key, True, OWNER_CACHE_LIFE) if can_process: with metrics.timer( "sentry.tasks.process_suspect_commits.process_loop"): metrics.incr("sentry.tasks.process_suspect_commits.calculated") try: with metrics.timer( "sentry.tasks.process_suspect_commits.get_serialized_event_file_committers" ): committers = get_event_file_committers(project, event) owner_scores = {} for committer in committers: if "id" in committer["author"]: author_id = committer["author"]["id"] for commit, score in committer["commits"]: if score >= MIN_COMMIT_SCORE: owner_scores[author_id] = max( score, owner_scores.get(author_id, 0)) if owner_scores: for owner_id in sorted( owner_scores, reverse=True, key=owner_scores.get)[:PREFERRED_GROUP_OWNERS]: go, created = GroupOwner.objects.update_or_create( group_id=event.group_id, type=GroupOwnerType.SUSPECT_COMMIT.value, user_id=owner_id, project=project, organization_id=project.organization_id, defaults={ "date_added": timezone.now() }, # Updates date of an existing owner, since we just matched them with this new event ) if created: owner_count += 1 if owner_count > PREFERRED_GROUP_OWNERS: owners.first().delete() except Commit.DoesNotExist: logger.info( "process_suspect_commits.skipped", extra={ "event": event.event_id, "reason": "no_commit" }, ) except Release.DoesNotExist: logger.info( "process_suspect_commits.skipped", extra={ "event": event.event_id, "reason": "no_release" }, )
def process_suspect_commits(event, **kwargs): metrics.incr("sentry.tasks.process_suspect_commits.start") with metrics.timer("sentry.tasks.process_suspect_commits"): can_process = True cache_key = "workflow-owners-ingestion:group-{}".format(event.group_id) owner_data = cache.get(cache_key) if owner_data and owner_data["count"] >= PREFERRED_GROUP_OWNERS: # Only process once per OWNER_CACHE_LIFE seconds for groups already populated with owenrs. metrics.incr("sentry.tasks.process_suspect_commits.skipped", tags={"detail": "too_many_owners"}) can_process = False elif owner_data and owner_data["time"] > timezone.now( ) - GROUP_PROCESSING_DELAY: # Smaller delay for groups without PREFERRED_GROUP_OWNERS owners yet metrics.incr("sentry.tasks.process_suspect_commits.skipped", tags={"detail": "group_delay"}) can_process = False else: project = Project.objects.get_from_cache(id=event.project_id) owners = GroupOwner.objects.filter( group_id=event.group_id, project=project, organization_id=project.organization_id, type=GroupOwnerType.SUSPECT_COMMIT.value, ) owner_count = owners.count() if owner_count >= PREFERRED_GROUP_OWNERS: # We have enough owners already - so see if any are old. # If so, we can delete it and replace with a fresh one. owners = owners.filter( date_added__lte=timezone.now() - PREFERRED_GROUP_OWNER_AGE).order_by("-date_added") if not owners.exists(): metrics.incr( "sentry.tasks.process_suspect_commits.aborted", tags={"detail": "maxed_owners_none_old"}, ) can_process = False owner_data = {"count": owner_count, "time": timezone.now()} cache.set(cache_key, owner_data, OWNER_CACHE_LIFE) if can_process: with metrics.timer( "sentry.tasks.process_suspect_commits.process_loop"): metrics.incr("sentry.tasks.process_suspect_commits.calculated") try: with metrics.timer( "sentry.tasks.process_suspect_commits.get_serialized_event_file_committers" ): committers = get_event_file_committers(project, event) new_owners = [] for committer in committers: if "id" in committer["author"]: author_id = committer["author"]["id"] for commit, score in committer["commits"]: if score >= MIN_COMMIT_SCORE and not [ aid for aid, score in new_owners if aid == author_id ]: new_owners.append((author_id, score)) if new_owners: for owner_id, score in sorted( new_owners, key=lambda a: a[1], reverse=True)[:PREFERRED_GROUP_OWNERS]: go, created = GroupOwner.objects.update_or_create( group_id=event.group_id, type=GroupOwnerType.SUSPECT_COMMIT.value, user_id=owner_id, project=project, organization_id=project.organization_id, defaults={ "date_added": timezone.now() }, # Updates date of an existing owner, since we just matched them with this new event ) if created: owner_count += 1 if owner_count > PREFERRED_GROUP_OWNERS: owners.first().delete() except Release.DoesNotExist: logger.info( "process_suspect_commits.skipped", extra={ "event": event.id, "reason": "no_release" }, )