Пример #1
0
    def post(self, request, group):
        """
        Reprocess a group
        `````````````````

        This endpoint triggers reprocessing for all events in a group.
        Currently this means duplicating the events with new event IDs and
        bumped timestamps.

        :pparam string issue_id: the ID of the issue to retrieve.
        :auth: required
        """

        if not features.has("projects:reprocessing-v2", group.project, actor=request.user):
            return self.respond(
                {"error": "This project does not have the reprocessing v2 feature"}, status=404,
            )

        max_events = request.data.get("maxEvents")
        if max_events:
            max_events = int(max_events)

            if max_events <= 0:
                return self.respond({"error": "maxEvents must be at least 1"}, status=400,)
        else:
            max_events = None

        reprocess_group.delay(project_id=group.project_id, group_id=group.id, max_events=max_events)
        return self.respond(status=200)
Пример #2
0
    def post(self, request, group):
        """
        Reprocess a group
        `````````````````

        This endpoint triggers reprocessing for all events in a group.

        :pparam string issue_id: the numeric ID of the issue to reprocess. The
            reprocessed events will be assigned to a new numeric ID. See comments
            in sentry.reprocessing2.
        :auth: required
        """

        if not features.has("organizations:reprocessing-v2",
                            group.project.organization,
                            actor=request.user):
            return self.respond(
                {
                    "error":
                    "This project does not have the reprocessing v2 feature"
                },
                status=404,
            )

        max_events = request.data.get("maxEvents")
        if max_events:
            max_events = int(max_events)

            if max_events <= 0:
                return self.respond({"error": "maxEvents must be at least 1"},
                                    status=400)
        else:
            max_events = None

        remaining_events = request.data.get("remainingEvents")
        if remaining_events not in ("delete", "keep"):
            return self.respond(
                {"error": "remainingEvents must be delete or keep"},
                status=400)

        reprocess_group.delay(
            project_id=group.project_id,
            group_id=group.id,
            max_events=max_events,
            acting_user_id=getattr(request.user, "id", None),
            remaining_events=remaining_events,
        )
        return self.respond(status=200)
Пример #3
0
    def test_reprocessing(self):
        self.project.update_option("sentry:store_crash_reports",
                                   STORE_CRASH_REPORTS_ALL)

        with self.feature({
                "organizations:event-attachments": True,
                "organizations:reprocessing-v2": True
        }):
            with open(get_fixture_path("windows.dmp"), "rb") as f:
                event = self.post_and_retrieve_minidump(
                    {"upload_file_minidump": f},
                    {"sentry[logger]": "test-logger"})

            insta_snapshot_stacktrace_data(self, event.data, subname="initial")

            self.upload_symbols()

            from sentry.tasks.reprocessing2 import reprocess_group

            with BurstTaskRunner() as burst:
                reprocess_group.delay(project_id=self.project.id,
                                      group_id=event.group_id)

            burst(max_jobs=100)

            new_event = eventstore.get_event_by_id(self.project.id,
                                                   event.event_id)
            assert new_event is not None
            assert new_event.event_id == event.event_id

        insta_snapshot_stacktrace_data(self,
                                       new_event.data,
                                       subname="reprocessed")

        for event_id in (event.event_id, new_event.event_id):
            (minidump, ) = sorted(
                EventAttachment.objects.filter(event_id=new_event.event_id),
                key=lambda x: x.name)

            assert minidump.name == "windows.dmp"
            minidump_file = File.objects.get(id=minidump.file_id)
            assert minidump_file.type == "event.minidump"
            assert minidump_file.checksum == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247"
Пример #4
0
    def post(self, request, group):
        """
        Reprocess a group
        `````````````````

        This endpoint triggers reprocessing for all events in a group.
        Currently this means duplicating the events with new event IDs and
        bumped timestamps.

        :pparam string issue_id: the ID of the issue to retrieve.
        :auth: required
        """

        if not features.has(
                "projects:reprocessing-v2", group.project, actor=request.user):
            return self.respond(status=404)

        reprocess_group.delay(project_id=group.project_id, group_id=group.id)
        return self.respond(status=200)
Пример #5
0
    def test_reprocessing(self):
        pytest.skip("Temporarily disabled due to prod problem")
        self.project.update_option("sentry:store_crash_reports", STORE_CRASH_REPORTS_ALL)

        with self.feature(
            {"organizations:event-attachments": True, "projects:reprocessing-v2": True}
        ):
            with open(get_fixture_path("windows.dmp"), "rb") as f:
                event = self.post_and_retrieve_minidump(
                    {"upload_file_minidump": f}, {"sentry[logger]": "test-logger"}
                )

            insta_snapshot_stacktrace_data(self, event.data, subname="initial")

            self.upload_symbols()

            from sentry.tasks.reprocessing2 import reprocess_group

            with BurstTaskRunner() as burst:
                reprocess_group.delay(project_id=self.project.id, group_id=event.group_id)

            burst()

            (new_event,) = eventstore.get_events(
                eventstore.Filter(
                    project_ids=[self.project.id],
                    conditions=[["tags[original_event_id]", "=", event.event_id]],
                )
            )
            assert new_event is not None
            assert new_event.event_id != event.event_id

        insta_snapshot_stacktrace_data(self, new_event.data, subname="reprocessed")

        for event_id in (event.event_id, new_event.event_id):
            (minidump,) = sorted(
                EventAttachment.objects.filter(event_id=new_event.event_id), key=lambda x: x.name
            )

            assert minidump.name == "windows.dmp"
            assert minidump.file.type == "event.minidump"
            assert minidump.file.checksum == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247"