예제 #1
0
    def test_update_existing_entries(self):
        # As new events come in associated with existing owners, we should update the date_added of that owner.
        self.set_release_commits(self.user.email)
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        go = GroupOwner.objects.get(
            group=self.event.group,
            project=self.event.project,
            organization=self.event.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )

        date_added_before_update = go.date_added
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        go.refresh_from_db()
        assert go.date_added > date_added_before_update
        assert GroupOwner.objects.filter(group=self.event.group).count() == 1
        assert GroupOwner.objects.get(
            group=self.event.group,
            project=self.event.project,
            organization=self.event.project.organization,
            type=GroupOwnerType.SUSPECT_COMMIT.value,
        )
예제 #2
0
 def test_no_stacktrace_in_exception_values(self):
     self.event.data = {
         "exception": {
             "values": [{
                 "this does not": "match"
             }]
         }
     }
     assert [] == get_frame_paths(self.event.data)
예제 #3
0
 def test_data_in_exception_values(self):
     self.event.data = {
         "exception": {
             "values": [{
                 "stacktrace": {
                     "frames": ["data"]
                 }
             }]
         }
     }
     assert ["data"] == get_frame_paths(self.event.data)
예제 #4
0
 def test_simple(self):
     self.set_release_commits(self.user.email)
     assert not GroupOwner.objects.filter(group=self.event.group).exists()
     event_frames = get_frame_paths(self.event.data)
     process_suspect_commits(
         event_id=self.event.event_id,
         event_platform=self.event.platform,
         event_frames=event_frames,
         group_id=self.event.group_id,
         project_id=self.event.project_id,
     )
     assert GroupOwner.objects.get(
         group=self.event.group,
         project=self.event.project,
         organization=self.event.project.organization,
         type=GroupOwnerType.SUSPECT_COMMIT.value,
     )
예제 #5
0
    def test_no_matching_user(self):
        self.set_release_commits("*****@*****.**")

        result = get_serialized_event_file_committers(self.project, self.event)

        assert len(result) == 1
        assert "commits" in result[0]
        assert len(result[0]["commits"]) == 1
        assert result[0]["commits"][0]["id"] == "a" * 40
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        assert not GroupOwner.objects.filter(group=self.event.group).exists()
예제 #6
0
def post_process_group(
    is_new, is_regression, is_new_group_environment, cache_key, group_id=None, **kwargs
):
    """
    Fires post processing hooks for a group.
    """
    from sentry.eventstore.models import Event
    from sentry.eventstore.processing import event_processing_store
    from sentry.reprocessing2 import is_reprocessed_event
    from sentry.utils import snuba

    with snuba.options_override({"consistent": True}):
        # We use the data being present/missing in the processing store
        # to ensure that we don't duplicate work should the forwarding consumers
        # need to rewind history.
        data = event_processing_store.get(cache_key)
        if not data:
            logger.info(
                "post_process.skipped",
                extra={"cache_key": cache_key, "reason": "missing_cache"},
            )
            return
        event = Event(
            project_id=data["project"], event_id=data["event_id"], group_id=group_id, data=data
        )

        set_current_event_project(event.project_id)

        is_transaction_event = not bool(event.group_id)

        from sentry.models import EventDict, Organization, Project

        # Re-bind node data to avoid renormalization. We only want to
        # renormalize when loading old data from the database.
        event.data = EventDict(event.data, skip_renormalization=True)

        # Re-bind Project and Org since we're reading the Event object
        # from cache which may contain stale parent models.
        event.project = Project.objects.get_from_cache(id=event.project_id)
        event.project.set_cached_field_value(
            "organization", Organization.objects.get_from_cache(id=event.project.organization_id)
        )

        # Simplified post processing for transaction events.
        # This should eventually be completely removed and transactions
        # will not go through any post processing.
        if is_transaction_event:
            transaction_processed.send_robust(
                sender=post_process_group,
                project=event.project,
                event=event,
            )

            event_processing_store.delete_by_key(cache_key)

            return

        is_reprocessed = is_reprocessed_event(event.data)

        # NOTE: we must pass through the full Event object, and not an
        # event_id since the Event object may not actually have been stored
        # in the database due to sampling.
        from sentry.models import Commit, GroupInboxReason
        from sentry.models.group import get_group_with_redirect
        from sentry.models.groupinbox import add_group_to_inbox
        from sentry.rules.processor import RuleProcessor
        from sentry.tasks.groupowner import process_suspect_commits
        from sentry.tasks.servicehooks import process_service_hook

        # Re-bind Group since we're reading the Event object
        # from cache, which may contain a stale group and project
        event.group, _ = get_group_with_redirect(event.group_id)
        event.group_id = event.group.id

        event.group.project = event.project
        event.group.project.set_cached_field_value("organization", event.project.organization)

        bind_organization_context(event.project.organization)

        _capture_stats(event, is_new)

        if is_reprocessed and is_new:
            add_group_to_inbox(event.group, GroupInboxReason.REPROCESSED)

        if not is_reprocessed:
            # we process snoozes before rules as it might create a regression
            # but not if it's new because you can't immediately snooze a new group
            has_reappeared = False if is_new else process_snoozes(event.group)
            if not has_reappeared:  # If true, we added the .UNIGNORED reason already
                if is_new:
                    add_group_to_inbox(event.group, GroupInboxReason.NEW)
                elif is_regression:
                    add_group_to_inbox(event.group, GroupInboxReason.REGRESSION)

            handle_owner_assignment(event.project, event.group, event)

            rp = RuleProcessor(
                event, is_new, is_regression, is_new_group_environment, has_reappeared
            )
            has_alert = False
            # TODO(dcramer): ideally this would fanout, but serializing giant
            # objects back and forth isn't super efficient
            for callback, futures in rp.apply():
                has_alert = True
                safe_execute(callback, event, futures, _with_transaction=False)

            try:
                lock = locks.get(
                    f"w-o:{event.group_id}-d-l",
                    duration=10,
                )
                with lock.acquire():
                    has_commit_key = f"w-o:{event.project.organization_id}-h-c"
                    org_has_commit = cache.get(has_commit_key)
                    if org_has_commit is None:
                        org_has_commit = Commit.objects.filter(
                            organization_id=event.project.organization_id
                        ).exists()
                        cache.set(has_commit_key, org_has_commit, 3600)

                    if org_has_commit:
                        group_cache_key = f"w-o-i:g-{event.group_id}"
                        if cache.get(group_cache_key):
                            metrics.incr(
                                "sentry.tasks.process_suspect_commits.debounce",
                                tags={"detail": "w-o-i:g debounce"},
                            )
                        else:
                            from sentry.utils.committers import get_frame_paths

                            cache.set(group_cache_key, True, 604800)  # 1 week in seconds
                            event_frames = get_frame_paths(event.data)
                            process_suspect_commits.delay(
                                event_id=event.event_id,
                                event_platform=event.platform,
                                event_frames=event_frames,
                                group_id=event.group_id,
                                project_id=event.project_id,
                            )
            except UnableToAcquireLock:
                pass
            except Exception:
                logger.exception("Failed to process suspect commits")

            if features.has("projects:servicehooks", project=event.project):
                allowed_events = {"event.created"}
                if has_alert:
                    allowed_events.add("event.alert")

                if allowed_events:
                    for servicehook_id, events in _get_service_hooks(project_id=event.project_id):
                        if any(e in allowed_events for e in events):
                            process_service_hook.delay(servicehook_id=servicehook_id, event=event)

            from sentry.tasks.sentry_apps import process_resource_change_bound

            if event.get_event_type() == "error" and _should_send_error_created_hooks(
                event.project
            ):
                process_resource_change_bound.delay(
                    action="created", sender="Error", instance_id=event.event_id, instance=event
                )
            if is_new:
                process_resource_change_bound.delay(
                    action="created", sender="Group", instance_id=event.group_id
                )

            from sentry.plugins.base import plugins

            for plugin in plugins.for_project(event.project):
                plugin_post_process_group(
                    plugin_slug=plugin.slug, event=event, is_new=is_new, is_regresion=is_regression
                )

            from sentry import similarity

            safe_execute(similarity.record, event.project, [event], _with_transaction=False)

        # Patch attachments that were ingested on the standalone path.
        update_existing_attachments(event)

        if not is_reprocessed:
            event_processed.send_robust(
                sender=post_process_group,
                project=event.project,
                event=event,
                primary_hash=kwargs.get("primary_hash"),
            )

        with metrics.timer("tasks.post_process.delete_event_cache"):
            event_processing_store.delete_by_key(cache_key)
예제 #7
0
 def test_data_does_not_match(self):
     self.event.data = {"this does not": "match"}
     assert [] == get_frame_paths(self.event.data)
예제 #8
0
 def test_data_in_stacktrace_frames(self):
     self.event.data = {"stacktrace": {"frames": ["data"]}}
     assert ["data"] == get_frame_paths(self.event.data)
예제 #9
0
 def test_keep_highest_score(self, patched_committers):
     self.user2 = self.create_user(email="*****@*****.**")
     self.user3 = self.create_user(email="*****@*****.**")
     patched_committers.return_value = [
         {
             "commits": [(None, 3)],
             "author": {
                 "username":
                 self.user.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is _verified": True,
                     "id": self.user.id,
                     "email": self.user.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user.email,
             },
         },
         {
             "commits": [(None, 1)],
             "author": {
                 "username":
                 self.user2.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user2.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user2.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is_verified": True,
                     "id": self.user2.id,
                     "email": self.user2.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user2.email,
             },
         },
         {
             "commits": [(None, 2)],
             "author": {
                 "username":
                 self.user3.email,
                 "lastLogin":
                 None,
                 "isSuperuser":
                 True,
                 "isManaged":
                 False,
                 "experiments": {},
                 "lastActive":
                 timezone.now(),
                 "isStaff":
                 True,
                 "id":
                 self.user3.id,
                 "isActive":
                 True,
                 "has2fa":
                 False,
                 "name":
                 self.user3.email,
                 "avatarUrl":
                 "https://secure.gravatar.com/avatar/46d229b033af06a191ff2267bca9ae56?s=32&d=mm",
                 "dateJoined":
                 timezone.now(),
                 "emails": [{
                     "is_verified": True,
                     "id": self.user3.id,
                     "email": self.user3.email
                 }],
                 "avatar": {
                     "avatarUuid": None,
                     "avatarType": "letter_avatar"
                 },
                 "hasPasswordAuth":
                 True,
                 "email":
                 self.user3.email,
             },
         },
     ]
     event_frames = get_frame_paths(self.event.data)
     process_suspect_commits(
         event_id=self.event.event_id,
         event_platform=self.event.platform,
         event_frames=event_frames,
         group_id=self.event.group_id,
         project_id=self.event.project_id,
     )
     # Doesn't use self.user2 due to low score.
     assert GroupOwner.objects.get(user=self.user.id)
     assert GroupOwner.objects.get(user=self.user3.id)
     assert not GroupOwner.objects.filter(user=self.user2.id).exists()
예제 #10
0
    def test_delete_old_entries(self):
        # As new events come in associated with new owners, we should delete old ones.
        self.set_release_commits(self.user.email)
        event_frames = get_frame_paths(self.event.data)
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )
        process_suspect_commits(
            event_id=self.event.event_id,
            event_platform=self.event.platform,
            event_frames=event_frames,
            group_id=self.event.group_id,
            project_id=self.event.project_id,
        )

        assert GroupOwner.objects.filter(group=self.event.group).count() == 1
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        event_2 = self.store_event(
            data={
                "message": "BANG!",
                "platform": "python",
                "timestamp": iso_format(before_now(seconds=1)),
                "stacktrace": {
                    "frames": [
                        {
                            "function": "process_suspect_commits",
                            "abs_path":
                            "/usr/src/sentry/src/sentry/tasks/groupowner.py",
                            "module": "sentry.tasks.groupowner",
                            "in_app": True,
                            "lineno": 48,
                            "filename": "sentry/tasks/groupowner.py",
                        },
                    ]
                },
                "tags": {
                    "sentry:release": self.release.version
                },
                "fingerprint": ["put-me-in-the-control-group"],
            },
            project_id=self.project.id,
        )
        event_3 = self.store_event(
            data={
                "message": "BOP!",
                "platform": "python",
                "timestamp": iso_format(before_now(seconds=1)),
                "stacktrace": {
                    "frames": [
                        {
                            "function": "process_suspect_commits",
                            "abs_path":
                            "/usr/src/sentry/src/sentry/tasks/groupowner.py",
                            "module": "sentry.tasks.groupowner",
                            "in_app": True,
                            "lineno": 48,
                            "filename": "sentry/tasks/groupowner.py",
                        },
                    ]
                },
                "tags": {
                    "sentry:release": self.release.version
                },
                "fingerprint": ["put-me-in-the-control-group"],
            },
            project_id=self.project.id,
        )

        self.user_2 = self.create_user("*****@*****.**", is_superuser=True)
        self.create_member(teams=[self.team],
                           user=self.user_2,
                           organization=self.organization)
        self.user_3 = self.create_user("*****@*****.**", is_superuser=True)
        self.create_member(teams=[self.team],
                           user=self.user_3,
                           organization=self.organization)
        self.release.set_commits([{
            "id":
            "a" * 40,
            "repository":
            self.repo.name,
            "author_email":
            self.user_2.email,
            "author_name":
            "joe",
            "message":
            "i fixed another bug",
            "patch_set": [{
                "path": "src/sentry/tasks/groupowner.py",
                "type": "M"
            }],
        }])

        assert event_2.group == self.event.group
        assert event_3.group == self.event.group

        self.set_release_commits(self.user_2.email)
        event_2_frames = get_frame_paths(event_2.data)
        process_suspect_commits(
            event_id=event_2.event_id,
            event_platform=event_2.platform,
            event_frames=event_2_frames,
            group_id=event_2.group_id,
            project_id=event_2.project_id,
        )
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_2).exists()

        self.set_release_commits(self.user_3.email)
        event_3_frames = get_frame_paths(event_3.data)
        process_suspect_commits(
            event_id=event_3.event_id,
            event_platform=event_3.platform,
            event_frames=event_3_frames,
            group_id=event_3.group_id,
            project_id=event_3.project_id,
        )
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_2).exists()
        assert not GroupOwner.objects.filter(group=event_2.group,
                                             user=self.user_3).exists()

        go = GroupOwner.objects.filter(group=event_2.group,
                                       user=self.user_2).first()
        go.date_added = timezone.now() - PREFERRED_GROUP_OWNER_AGE * 2
        go.save()

        self.set_release_commits(self.user_3.email)
        process_suspect_commits(
            event_id=event_3.event_id,
            event_platform=event_3.platform,
            event_frames=event_3_frames,
            group_id=event_3.group_id,
            project_id=event_3.project_id,
        )
        # Won't be processed because the cache is present and this group has owners
        assert GroupOwner.objects.filter(group=self.event.group).count() == 2
        assert GroupOwner.objects.filter(group=self.event.group,
                                         user=self.user).exists()
        assert not GroupOwner.objects.filter(group=event_2.group,
                                             user=self.user_2).exists()
        assert GroupOwner.objects.filter(group=event_2.group,
                                         user=self.user_3).exists()