Пример #1
0
 def test_no_release_or_commit(self):
     event_with_no_release = self.store_event(
         data={
             "message": "BOOM!",
             "platform": "python",
             "timestamp": iso_format(before_now(seconds=1)),
             "stacktrace": {
                 "frames": [
                     {
                         "function": "process_suspect_commits",
                         "abs_path":
                         "/usr/src/sentry/src/sentry/tasks/groupowner.py",
                         "module": "sentry.tasks.groupowner",
                         "in_app": True,
                         "lineno": 48,
                         "filename": "sentry/tasks/groupowner.py",
                     },
                 ]
             },
             "fingerprint": ["i-have-no-release"],
         },
         project_id=self.project.id,
     )
     process_suspect_commits(
         event_with_no_release.event_id,
         event_with_no_release.platform,
         event_with_no_release.data,
         event_with_no_release.group_id,
         event_with_no_release.project_id,
     )
     assert GroupOwner.objects.filter(
         group=event_with_no_release.group).count() == 0
Пример #2
0
    def test_update_existing_entries(self):
        # As new events come in associated with existing owners, we should update the date_added of that owner.
        self.set_release_commits(self.user.email)
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        go = GroupOwner.objects.get(
            group=self.event.group,
            project=self.event.project,
            organization=self.event.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )

        date_added_before_update = go.date_added
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        go.refresh_from_db()
        assert go.date_added > date_added_before_update
        assert GroupOwner.objects.filter(group=self.event.group).count() == 1
        assert GroupOwner.objects.get(
            group=self.event.group,
            project=self.event.project,
            organization=self.event.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )
Пример #3
0
    def test_update_existing_entries(self):
        # As new events come in associated with existing owners, we should update the date_added of that owner.
        self.set_release_commits(self.user.email)
        data = self.event_1.data
        data["event_id"] = self.event_1.event_id
        data["project"] = self.event_1.project_id

        process_suspect_commits(self.event_1)
        go = GroupOwner.objects.get(
            group=self.event_1.group,
            project=self.event_1.project,
            organization=self.event_1.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )

        date_added_before_update = go.date_added
        process_suspect_commits(self.event_1)
        go.refresh_from_db()
        assert go.date_added > date_added_before_update
        assert GroupOwner.objects.filter(group=self.event_1.group).count() == 1
        assert GroupOwner.objects.get(
            group=self.event_1.group,
            project=self.event_1.project,
            organization=self.event_1.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )
Пример #4
0
 def test_simple(self):
     self.set_release_commits(self.user.email)
     assert not GroupOwner.objects.filter(group=self.event.group).exists()
     process_suspect_commits(self.event)
     assert GroupOwner.objects.get(
         group=self.event.group,
         project=self.event.project,
         organization=self.event.project.organization,
         type=GroupOwnerType.SUSPECT_COMMIT.value,
     )
Пример #5
0
    def test_no_matching_user(self):
        self.set_release_commits("*****@*****.**")

        result = get_serialized_event_file_committers(self.project, self.event)

        assert len(result) == 1
        assert "commits" in result[0]
        assert len(result[0]["commits"]) == 1
        assert result[0]["commits"][0]["id"] == "a" * 40
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
        process_suspect_commits(self.event)
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
Пример #6
0
 def test_simple(self):
     self.set_release_commits(self.user.email)
     assert not GroupOwner.objects.filter(group=self.event.group).exists()
     event_frames = get_frame_paths(self.event.data)
     process_suspect_commits(
         event_id=self.event.event_id,
         event_platform=self.event.platform,
         event_frames=event_frames,
         group_id=self.event.group_id,
         project_id=self.event.project_id,
     )
     assert GroupOwner.objects.get(
         group=self.event.group,
         project=self.event.project,
         organization=self.event.project.organization,
         type=GroupOwnerType.SUSPECT_COMMIT.value,
     )
Пример #7
0
    def test_no_matching_user(self):
        self.set_release_commits("*****@*****.**")

        result = get_serialized_event_file_committers(self.project, self.event)

        assert len(result) == 1
        assert "commits" in result[0]
        assert len(result[0]["commits"]) == 1
        assert result[0]["commits"][0]["id"] == "a" * 40
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
Пример #8
0
def post_process_group(is_new,
                       is_regression,
                       is_new_group_environment,
                       cache_key,
                       group_id=None,
                       **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.eventstore.models import Event
    from sentry.eventstore.processing import event_processing_store
    from sentry.utils import snuba
    from sentry.reprocessing2 import is_reprocessed_event

    with snuba.options_override({"consistent": True}):
        # We use the data being present/missing in the processing store
        # to ensure that we don't duplicate work should the forwarding consumers
        # need to rewind history.
        data = event_processing_store.get(cache_key)
        if not data:
            logger.info(
                "post_process.skipped",
                extra={
                    "cache_key": cache_key,
                    "reason": "missing_cache"
                },
            )
            return
        event = Event(project_id=data["project"],
                      event_id=data["event_id"],
                      group_id=group_id,
                      data=data)

        set_current_project(event.project_id)

        is_reprocessed = is_reprocessed_event(event.data)

        # NOTE: we must pass through the full Event object, and not an
        # event_id since the Event object may not actually have been stored
        # in the database due to sampling.
        from sentry.models import (
            Commit,
            Project,
            Organization,
            EventDict,
            GroupInboxReason,
        )
        from sentry.models.groupinbox import add_group_to_inbox
        from sentry.models.group import get_group_with_redirect
        from sentry.rules.processor import RuleProcessor
        from sentry.tasks.servicehooks import process_service_hook
        from sentry.tasks.groupowner import process_suspect_commits

        # Re-bind node data to avoid renormalization. We only want to
        # renormalize when loading old data from the database.
        event.data = EventDict(event.data, skip_renormalization=True)

        # Re-bind Project and Org since we're reading the Event object
        # from cache which may contain stale parent models.
        event.project = Project.objects.get_from_cache(id=event.project_id)
        event.project._organization_cache = Organization.objects.get_from_cache(
            id=event.project.organization_id)

        if event.group_id:
            # Re-bind Group since we're reading the Event object
            # from cache, which may contain a stale group and project
            event.group, _ = get_group_with_redirect(event.group_id)
            event.group_id = event.group.id

            event.group.project = event.project
            event.group.project._organization_cache = event.project._organization_cache

        bind_organization_context(event.project.organization)

        _capture_stats(event, is_new)

        if event.group_id and is_reprocessed and is_new:
            add_group_to_inbox(event.group, GroupInboxReason.REPROCESSED)

        if event.group_id and not is_reprocessed:
            # we process snoozes before rules as it might create a regression
            # but not if it's new because you can't immediately snooze a new group
            has_reappeared = False if is_new else process_snoozes(event.group)
            if not has_reappeared:  # If true, we added the .UNIGNORED reason already
                if is_new:
                    add_group_to_inbox(event.group, GroupInboxReason.NEW)
                elif is_regression:
                    add_group_to_inbox(event.group,
                                       GroupInboxReason.REGRESSION)

            handle_owner_assignment(event.project, event.group, event)

            rp = RuleProcessor(event, is_new, is_regression,
                               is_new_group_environment, has_reappeared)
            has_alert = False
            # TODO(dcramer): ideally this would fanout, but serializing giant
            # objects back and forth isn't super efficient
            for callback, futures in rp.apply():
                has_alert = True
                with sentry_sdk.start_transaction(op="post_process_group",
                                                  name="rule_processor_apply",
                                                  sampled=True):
                    safe_execute(callback, event, futures)

            has_commit_key = "workflow-owners-ingestion:org-{}-has-commits".format(
                event.project.organization_id)

            try:
                org_has_commit = cache.get(has_commit_key)
                if org_has_commit is None:
                    org_has_commit = Commit.objects.filter(
                        organization_id=event.project.organization_id).exists(
                        )
                    cache.set(has_commit_key, org_has_commit, 3600)

                if org_has_commit and features.has(
                        "projects:workflow-owners-ingestion",
                        event.project,
                ):
                    process_suspect_commits(event=event)
            except Exception:
                logger.exception("Failed to process suspect commits")

            if features.has("projects:servicehooks", project=event.project):
                allowed_events = set(["event.created"])
                if has_alert:
                    allowed_events.add("event.alert")

                if allowed_events:
                    for servicehook_id, events in _get_service_hooks(
                            project_id=event.project_id):
                        if any(e in allowed_events for e in events):
                            process_service_hook.delay(
                                servicehook_id=servicehook_id, event=event)

            from sentry.tasks.sentry_apps import process_resource_change_bound

            if event.get_event_type(
            ) == "error" and _should_send_error_created_hooks(event.project):
                process_resource_change_bound.delay(action="created",
                                                    sender="Error",
                                                    instance_id=event.event_id,
                                                    instance=event)
            if is_new:
                process_resource_change_bound.delay(action="created",
                                                    sender="Group",
                                                    instance_id=event.group_id)

            from sentry.plugins.base import plugins

            for plugin in plugins.for_project(event.project):
                plugin_post_process_group(plugin_slug=plugin.slug,
                                          event=event,
                                          is_new=is_new,
                                          is_regresion=is_regression)

            from sentry import similarity

            safe_execute(similarity.record, event.project, [event])

        if event.group_id:
            # Patch attachments that were ingested on the standalone path.
            update_existing_attachments(event)

        if not is_reprocessed:
            event_processed.send_robust(
                sender=post_process_group,
                project=event.project,
                event=event,
                primary_hash=kwargs.get("primary_hash"),
            )

        with metrics.timer("tasks.post_process.delete_event_cache"):
            event_processing_store.delete_by_key(cache_key)
Пример #9
0
 def test_keep_highest_score(self, patched_committers):
     self.user2 = self.create_user(email="*****@*****.**")
     self.user3 = self.create_user(email="*****@*****.**")
     patched_committers.return_value = [
         {
             "commits": [(None, 3)],
             "author": {
                 "username":
                 self.user.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is _verified": True,
                     "id": self.user.id,
                     "email": self.user.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user.email,
             },
         },
         {
             "commits": [(None, 1)],
             "author": {
                 "username":
                 self.user2.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user2.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user2.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is_verified": True,
                     "id": self.user2.id,
                     "email": self.user2.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user2.email,
             },
         },
         {
             "commits": [(None, 2)],
             "author": {
                 "username":
                 self.user3.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user3.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user3.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is_verified": True,
                     "id": self.user3.id,
                     "email": self.user3.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user3.email,
             },
         },
     ]
     event_frames = get_frame_paths(self.event.data)
     process_suspect_commits(
         event_id=self.event.event_id,
         event_platform=self.event.platform,
         event_frames=event_frames,
         group_id=self.event.group_id,
         project_id=self.event.project_id,
     )
     # Doesn't use self.user2 due to low score.
     assert GroupOwner.objects.get(user=self.user.id)
     assert GroupOwner.objects.get(user=self.user3.id)
     assert not GroupOwner.objects.filter(user=self.user2.id).exists()
Пример #10
0
    def test_delete_old_entries(self):
        # As new events come in associated with new owners, we should delete old ones.
        self.set_release_commits(self.user.email)
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )

        assert GroupOwner.objects.filter(group=self.event.group).count() == 1
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        event_2 = self.store_event(
            data={
                "message": "BANG!",
                "platform": "python",
                "timestamp": iso_format(before_now(seconds=1)),
                "stacktrace": {
                    "frames": [
                        {
                            "function": "process_suspect_commits",
                            "abs_path":
                            "/usr/src/sentry/src/sentry/tasks/groupowner.py",
                            "module": "sentry.tasks.groupowner",
                            "in_app": True,
                            "lineno": 48,
                            "filename": "sentry/tasks/groupowner.py",
                        },
                    ]
                },
                "tags": {
                    "sentry:release": self.release.version
                },
                "fingerprint": ["put-me-in-the-control-group"],
            },
            project_id=self.project.id,
        )
        event_3 = self.store_event(
            data={
                "message": "BOP!",
                "platform": "python",
                "timestamp": iso_format(before_now(seconds=1)),
                "stacktrace": {
                    "frames": [
                        {
                            "function": "process_suspect_commits",
                            "abs_path":
                            "/usr/src/sentry/src/sentry/tasks/groupowner.py",
                            "module": "sentry.tasks.groupowner",
                            "in_app": True,
                            "lineno": 48,
                            "filename": "sentry/tasks/groupowner.py",
                        },
                    ]
                },
                "tags": {
                    "sentry:release": self.release.version
                },
                "fingerprint": ["put-me-in-the-control-group"],
            },
            project_id=self.project.id,
        )

        self.user_2 = self.create_user("*****@*****.**", is_superuser=True)
        self.create_member(teams=[self.team],
                           user=self.user_2,
                           organization=self.organization)
        self.user_3 = self.create_user("*****@*****.**", is_superuser=True)
        self.create_member(teams=[self.team],
                           user=self.user_3,
                           organization=self.organization)
        self.release.set_commits([{
            "id":
            "a" * 40,
            "repository":
            self.repo.name,
            "author_email":
            self.user_2.email,
            "author_name":
            "joe",
            "message":
            "i fixed another bug",
            "patch_set": [{
                "path": "src/sentry/tasks/groupowner.py",
                "type": "M"
            }],
        }])

        assert event_2.group == self.event.group
        assert event_3.group == self.event.group

        self.set_release_commits(self.user_2.email)
        event_2_frames = get_frame_paths(event_2.data)
        process_suspect_commits(
            event_id=event_2.event_id,
            event_platform=event_2.platform,
            event_frames=event_2_frames,
            group_id=event_2.group_id,
            project_id=event_2.project_id,
        )
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_2).exists()

        self.set_release_commits(self.user_3.email)
        event_3_frames = get_frame_paths(event_3.data)
        process_suspect_commits(
            event_id=event_3.event_id,
            event_platform=event_3.platform,
            event_frames=event_3_frames,
            group_id=event_3.group_id,
            project_id=event_3.project_id,
        )
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_2).exists()
        assert not GroupOwner.objects.filter(group=event_2.group,
                                             user=self.user_3).exists()

        go = GroupOwner.objects.filter(group=event_2.group,
                                       user=self.user_2).first()
        go.date_added = timezone.now() - PREFERRED_GROUP_OWNER_AGE * 2
        go.save()

        self.set_release_commits(self.user_3.email)
        process_suspect_commits(
            event_id=event_3.event_id,
            event_platform=event_3.platform,
            event_frames=event_3_frames,
            group_id=event_3.group_id,
            project_id=event_3.project_id,
        )
        # Won't be processed because the cache is present and this group has owners
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert not GroupOwner.objects.filter(group=event_2.group,
                                             user=self.user_2).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_3).exists()