Beispiel #1
0
    def test_merge_with_event_integrity(self):
        project = self.create_project()
        event1 = self.store_event(
            data={
                "event_id": "a" * 32,
                "timestamp": iso_format(before_now(seconds=1)),
                "fingerprint": ["group-1"],
                "extra": {
                    "foo": "bar"
                },
            },
            project_id=project.id,
        )
        group1 = event1.group
        event2 = self.store_event(
            data={
                "event_id": "b" * 32,
                "timestamp": iso_format(before_now(seconds=1)),
                "fingerprint": ["group-2"],
                "extra": {
                    "foo": "baz"
                },
            },
            project_id=project.id,
        )
        group2 = event2.group

        with self.tasks():
            eventstream_state = eventstream.start_merge(
                project.id, [group1.id], group2.id)
            merge_groups([group1.id], group2.id)
            eventstream.end_merge(eventstream_state)

        assert not Group.objects.filter(id=group1.id).exists()

        event1 = eventstore.get_event_by_id(project.id, event1.event_id)
        assert event1.group_id == group2.id
        Event.objects.bind_nodes([event1], "data")
        assert event1.data["extra"]["foo"] == "bar"

        event2 = eventstore.get_event_by_id(project.id, event2.event_id)
        assert event2.group_id == group2.id
        Event.objects.bind_nodes([event2], "data")
        assert event2.data["extra"]["foo"] == "baz"
    def put(self, request, project):
        """
        Bulk Mutate a List of Issues
        ````````````````````````````

        Bulk mutate various attributes on issues.  The list of issues
        to modify is given through the `id` query parameter.  It is repeated
        for each issue that should be modified.

        - For non-status updates, the `id` query parameter is required.
        - For status updates, the `id` query parameter may be omitted
          for a batch "update all" query.
        - An optional `status` query parameter may be used to restrict
          mutations to only events with the given status.

        The following attributes can be modified and are supplied as
        JSON object in the body:

        If any ids are out of scope this operation will succeed without
        any data mutation.

        :qparam int id: a list of IDs of the issues to be mutated.  This
                        parameter shall be repeated for each issue.  It
                        is optional only if a status is mutated in which
                        case an implicit `update all` is assumed.
        :qparam string status: optionally limits the query to issues of the
                               specified status.  Valid values are
                               ``"resolved"``, ``"unresolved"`` and
                               ``"ignored"``.
        :pparam string organization_slug: the slug of the organization the
                                          issues belong to.
        :pparam string project_slug: the slug of the project the issues
                                     belong to.
        :param string status: the new status for the issues.  Valid values
                              are ``"resolved"``, ``"resolvedInNextRelease"``,
                              ``"unresolved"``, and ``"ignored"``.
        :param map statusDetails: additional details about the resolution.
                                  Valid values are ``"inRelease"``, ``"inNextRelease"``,
                                  ``"inCommit"``,  ``"ignoreDuration"``, ``"ignoreCount"``,
                                  ``"ignoreWindow"``, ``"ignoreUserCount"``, and
                                  ``"ignoreUserWindow"``.
        :param int ignoreDuration: the number of minutes to ignore this issue.
        :param boolean isPublic: sets the issue to public or private.
        :param boolean merge: allows to merge or unmerge different issues.
        :param string assignedTo: the actor id (or username) of the user or team that should be
                                  assigned to this issue.
        :param boolean hasSeen: in case this API call is invoked with a user
                                context this allows changing of the flag
                                that indicates if the user has seen the
                                event.
        :param boolean isBookmarked: in case this API call is invoked with a
                                     user context this allows changing of
                                     the bookmark flag.
        :auth: required
        """
        group_ids = request.GET.getlist('id')
        if group_ids:
            group_list = Group.objects.filter(
                project=project, id__in=group_ids)
            # filter down group ids to only valid matches
            group_ids = [g.id for g in group_list]
            if not group_ids:
                return Response(status=204)
        else:
            group_list = None

        serializer = GroupValidator(
            data=request.DATA,
            partial=True,
            context={'project': project},
        )
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)
        result = dict(serializer.object)

        acting_user = request.user if request.user.is_authenticated() else None

        if not group_ids:
            try:
                # bulk mutations are limited to 1000 items
                # TODO(dcramer): it'd be nice to support more than this, but its
                # a bit too complicated right now
                cursor_result, _ = self._search(request, project, {
                    'limit': 1000,
                    'paginator_options': {'max_limit': 1000},
                })
            except ValidationError as exc:
                return Response({'detail': six.text_type(exc)}, status=400)

            group_list = list(cursor_result)
            group_ids = [g.id for g in group_list]

        is_bulk = len(group_ids) > 1

        queryset = Group.objects.filter(
            id__in=group_ids,
        )

        discard = result.get('discard')
        if discard:

            if not features.has('projects:discard-groups', project, actor=request.user):
                return Response({'detail': ['You do not have that feature enabled']}, status=400)

            group_list = list(queryset)
            groups_to_delete = []

            for group in group_list:
                with transaction.atomic():
                    try:
                        tombstone = GroupTombstone.objects.create(
                            previous_group_id=group.id,
                            actor_id=acting_user.id if acting_user else None,
                            **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP}
                        )
                    except IntegrityError:
                        # in this case, a tombstone has already been created
                        # for a group, so no hash updates are necessary
                        pass
                    else:
                        groups_to_delete.append(group)

                        GroupHash.objects.filter(
                            group=group,
                        ).update(
                            group=None,
                            group_tombstone_id=tombstone.id,
                        )

            self._delete_groups(request, project, groups_to_delete, delete_type='discard')

            return Response(status=204)

        statusDetails = result.pop('statusDetails', result)
        status = result.get('status')
        release = None
        commit = None

        if status in ('resolved', 'resolvedInNextRelease'):
            if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'):
                # XXX(dcramer): this code is copied between the inNextRelease validator
                # due to the status vs statusDetails field
                release = statusDetails.get('inNextRelease') or Release.objects.filter(
                    projects=project,
                    organization_id=project.organization_id,
                ).extra(select={
                    'sort': 'COALESCE(date_released, date_added)',
                }).order_by('-sort')[0]
                activity_type = Activity.SET_RESOLVED_IN_RELEASE
                activity_data = {
                    # no version yet
                    'version': '',
                }
                status_details = {
                    'inNextRelease': True,
                    'actor': serialize(extract_lazy_object(request.user), request.user),
                }
                res_type = GroupResolution.Type.in_next_release
                res_type_str = 'in_next_release'
                res_status = GroupResolution.Status.pending
            elif statusDetails.get('inRelease'):
                release = statusDetails['inRelease']
                activity_type = Activity.SET_RESOLVED_IN_RELEASE
                activity_data = {
                    # no version yet
                    'version': release.version,
                }
                status_details = {
                    'inRelease': release.version,
                    'actor': serialize(extract_lazy_object(request.user), request.user),
                }
                res_type = GroupResolution.Type.in_release
                res_type_str = 'in_release'
                res_status = GroupResolution.Status.resolved
            elif statusDetails.get('inCommit'):
                commit = statusDetails['inCommit']
                activity_type = Activity.SET_RESOLVED_IN_COMMIT
                activity_data = {
                    'commit': commit.id,
                }
                status_details = {
                    'inCommit': serialize(commit, request.user),
                    'actor': serialize(extract_lazy_object(request.user), request.user),
                }
                res_type_str = 'in_commit'
            else:
                res_type_str = 'now'
                activity_type = Activity.SET_RESOLVED
                activity_data = {}
                status_details = {}

            now = timezone.now()
            metrics.incr('group.resolved', instance=res_type_str, skip_internal=True)

            # if we've specified a commit, let's see if its already been released
            # this will allow us to associate the resolution to a release as if we
            # were simply using 'inRelease' above
            # Note: this is different than the way commit resolution works on deploy
            # creation, as a given deploy is connected to an explicit release, and
            # in this case we're simply choosing the most recent release which contains
            # the commit.
            if commit and not release:
                try:
                    release = Release.objects.filter(
                        projects=project,
                        releasecommit__commit=commit,
                    ).extra(select={
                        'sort': 'COALESCE(date_released, date_added)',
                    }).order_by('-sort')[0]
                    res_type = GroupResolution.Type.in_release
                    res_status = GroupResolution.Status.resolved
                except IndexError:
                    release = None

            for group in group_list:
                with transaction.atomic():
                    resolution = None
                    if release:
                        resolution_params = {
                            'release': release,
                            'type': res_type,
                            'status': res_status,
                            'actor_id': request.user.id
                            if request.user.is_authenticated() else None,
                        }
                        resolution, created = GroupResolution.objects.get_or_create(
                            group=group,
                            defaults=resolution_params,
                        )
                        if not created:
                            resolution.update(
                                datetime=timezone.now(), **resolution_params)

                    if commit:
                        GroupLink.objects.create(
                            group_id=group.id,
                            project_id=group.project_id,
                            linked_type=GroupLink.LinkedType.commit,
                            relationship=GroupLink.Relationship.resolves,
                            linked_id=commit.id,
                        )

                    affected = Group.objects.filter(
                        id=group.id,
                    ).update(
                        status=GroupStatus.RESOLVED,
                        resolved_at=now,
                    )
                    if not resolution:
                        created = affected

                    group.status = GroupStatus.RESOLVED
                    group.resolved_at = now

                    self._subscribe_and_assign_issue(
                        acting_user, group, result)

                    if created:
                        activity = Activity.objects.create(
                            project=group.project,
                            group=group,
                            type=activity_type,
                            user=acting_user,
                            ident=resolution.id if resolution else None,
                            data=activity_data,
                        )
                        # TODO(dcramer): we need a solution for activity rollups
                        # before sending notifications on bulk changes
                        if not is_bulk:
                            activity.send_notification()

                if release:
                    issue_resolved_in_release.send_robust(
                        group=group,
                        project=project,
                        user=acting_user,
                        resolution_type=res_type_str,
                        sender=type(self),
                    )
                elif commit:
                    resolved_with_commit.send_robust(
                        organization_id=group.project.organization_id,
                        user=request.user,
                        group=group,
                        sender=type(self),
                    )

                kick_off_status_syncs.apply_async(kwargs={
                    'project_id': group.project_id,
                    'group_id': group.id,
                })

            result.update({
                'status': 'resolved',
                'statusDetails': status_details,
            })

        elif status:
            new_status = STATUS_CHOICES[result['status']]

            with transaction.atomic():
                happened = queryset.exclude(
                    status=new_status,
                ).update(
                    status=new_status,
                )

                GroupResolution.objects.filter(
                    group__in=group_ids,
                ).delete()

                if new_status == GroupStatus.IGNORED:
                    metrics.incr('group.ignored', skip_internal=True)

                    ignore_duration = (
                        statusDetails.pop('ignoreDuration', None) or
                        statusDetails.pop('snoozeDuration', None)
                    ) or None
                    ignore_count = statusDetails.pop(
                        'ignoreCount', None) or None
                    ignore_window = statusDetails.pop(
                        'ignoreWindow', None) or None
                    ignore_user_count = statusDetails.pop(
                        'ignoreUserCount', None) or None
                    ignore_user_window = statusDetails.pop(
                        'ignoreUserWindow', None) or None
                    if ignore_duration or ignore_count or ignore_user_count:
                        if ignore_duration:
                            ignore_until = timezone.now() + timedelta(
                                minutes=ignore_duration,
                            )
                        else:
                            ignore_until = None
                        for group in group_list:
                            state = {}
                            if ignore_count and not ignore_window:
                                state['times_seen'] = group.times_seen
                            if ignore_user_count and not ignore_user_window:
                                state['users_seen'] = group.count_users_seen()
                            GroupSnooze.objects.create_or_update(
                                group=group,
                                values={
                                    'until':
                                    ignore_until,
                                    'count':
                                    ignore_count,
                                    'window':
                                    ignore_window,
                                    'user_count':
                                    ignore_user_count,
                                    'user_window':
                                    ignore_user_window,
                                    'state':
                                    state,
                                    'actor_id':
                                    request.user.id if request.user.is_authenticated() else None,
                                }
                            )
                            result['statusDetails'] = {
                                'ignoreCount': ignore_count,
                                'ignoreUntil': ignore_until,
                                'ignoreUserCount': ignore_user_count,
                                'ignoreUserWindow': ignore_user_window,
                                'ignoreWindow': ignore_window,
                                'actor': serialize(extract_lazy_object(request.user), request.user),
                            }
                    else:
                        GroupSnooze.objects.filter(
                            group__in=group_ids,
                        ).delete()
                        ignore_until = None
                        result['statusDetails'] = {}
                else:
                    result['statusDetails'] = {}

            if group_list and happened:
                if new_status == GroupStatus.UNRESOLVED:
                    activity_type = Activity.SET_UNRESOLVED
                    activity_data = {}
                elif new_status == GroupStatus.IGNORED:
                    activity_type = Activity.SET_IGNORED
                    activity_data = {
                        'ignoreCount': ignore_count,
                        'ignoreDuration': ignore_duration,
                        'ignoreUntil': ignore_until,
                        'ignoreUserCount': ignore_user_count,
                        'ignoreUserWindow': ignore_user_window,
                        'ignoreWindow': ignore_window,
                    }

                issue_ignored.send_robust(
                    project=project,
                    user=acting_user,
                    group_list=group_list,
                    activity_data=activity_data,
                    sender=self.__class__)

                for group in group_list:
                    group.status = new_status

                    activity = Activity.objects.create(
                        project=group.project,
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        data=activity_data,
                    )
                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        if acting_user:
                            GroupSubscription.objects.subscribe(
                                user=acting_user,
                                group=group,
                                reason=GroupSubscriptionReason.status_change,
                            )
                        activity.send_notification()

                    if new_status == GroupStatus.UNRESOLVED:
                        kick_off_status_syncs.apply_async(kwargs={
                            'project_id': group.project_id,
                            'group_id': group.id,
                        })

        if 'assignedTo' in result:
            assigned_actor = result['assignedTo']
            if assigned_actor:
                for group in group_list:
                    resolved_actor = assigned_actor.resolve()

                    GroupAssignee.objects.assign(group, resolved_actor, acting_user)
                result['assignedTo'] = serialize(
                    assigned_actor.resolve(), acting_user, ActorSerializer())
            else:
                for group in group_list:
                    GroupAssignee.objects.deassign(group, acting_user)

        if result.get('hasSeen') and project.member_set.filter(user=acting_user).exists():
            for group in group_list:
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=group.project,
                    values={
                        'last_seen': timezone.now(),
                    }
                )
        elif result.get('hasSeen') is False:
            GroupSeen.objects.filter(
                group__in=group_ids,
                user=acting_user,
            ).delete()

        if result.get('isBookmarked'):
            for group in group_list:
                GroupBookmark.objects.get_or_create(
                    project=project,
                    group=group,
                    user=acting_user,
                )
                GroupSubscription.objects.subscribe(
                    user=acting_user,
                    group=group,
                    reason=GroupSubscriptionReason.bookmark,
                )
        elif result.get('isBookmarked') is False:
            GroupBookmark.objects.filter(
                group__in=group_ids,
                user=acting_user,
            ).delete()

        # TODO(dcramer): we could make these more efficient by first
        # querying for rich rows are present (if N > 2), flipping the flag
        # on those rows, and then creating the missing rows
        if result.get('isSubscribed') in (True, False):
            is_subscribed = result['isSubscribed']
            for group in group_list:
                # NOTE: Subscribing without an initiating event (assignment,
                # commenting, etc.) clears out the previous subscription reason
                # to avoid showing confusing messaging as a result of this
                # action. It'd be jarring to go directly from "you are not
                # subscribed" to "you were subscribed due since you were
                # assigned" just by clicking the "subscribe" button (and you
                # may no longer be assigned to the issue anyway.)
                GroupSubscription.objects.create_or_update(
                    user=acting_user,
                    group=group,
                    project=project,
                    values={
                        'is_active': is_subscribed,
                        'reason': GroupSubscriptionReason.unknown,
                    },
                )

            result['subscriptionDetails'] = {
                'reason': SUBSCRIPTION_REASON_MAP.get(
                    GroupSubscriptionReason.unknown,
                    'unknown',
                ),
            }

        if 'isPublic' in result:
            # We always want to delete an existing share, because triggering
            # an isPublic=True even when it's already public, should trigger
            # regenerating.
            for group in group_list:
                if GroupShare.objects.filter(group=group).delete():
                    result['shareId'] = None
                    Activity.objects.create(
                        project=group.project,
                        group=group,
                        type=Activity.SET_PRIVATE,
                        user=acting_user,
                    )

        if result.get('isPublic'):
            for group in group_list:
                share, created = GroupShare.objects.get_or_create(
                    project=group.project,
                    group=group,
                    user=acting_user,
                )
                if created:
                    result['shareId'] = share.uuid
                    Activity.objects.create(
                        project=group.project,
                        group=group,
                        type=Activity.SET_PUBLIC,
                        user=acting_user,
                    )

        # XXX(dcramer): this feels a bit shady like it should be its own
        # endpoint
        if result.get('merge') and len(group_list) > 1:
            group_list_by_times_seen = sorted(
                group_list,
                key=lambda g: (g.times_seen, g.id),
                reverse=True,
            )
            primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]

            group_ids_to_merge = [g.id for g in groups_to_merge]
            eventstream_state = eventstream.start_merge(
                primary_group.project_id,
                group_ids_to_merge,
                primary_group.id
            )

            Group.objects.filter(
                id__in=group_ids_to_merge
            ).update(
                status=GroupStatus.PENDING_MERGE
            )

            transaction_id = uuid4().hex
            merge_groups.delay(
                from_object_ids=group_ids_to_merge,
                to_object_id=primary_group.id,
                transaction_id=transaction_id,
                eventstream_state=eventstream_state,
            )

            Activity.objects.create(
                project=primary_group.project,
                group=primary_group,
                type=Activity.MERGE,
                user=acting_user,
                data={
                    'issues': [{
                        'id': c.id
                    } for c in groups_to_merge],
                },
            )

            result['merge'] = {
                'parent': six.text_type(primary_group.id),
                'children': [six.text_type(g.id) for g in groups_to_merge],
            }

        return Response(result)
Beispiel #3
0
    def test_unmerge(self):
        now = before_now(minutes=5).replace(microsecond=0, tzinfo=pytz.utc)

        def time_from_now(offset=0):
            return now + timedelta(seconds=offset)

        project = self.create_project()

        sequence = itertools.count(0)
        tag_values = itertools.cycle(["red", "green", "blue"])
        user_values = itertools.cycle([{"id": 1}, {"id": 2}])

        def create_message_event(template, parameters, environment, release, fingerprint="group1"):
            i = next(sequence)

            event_id = uuid.UUID(fields=(i, 0x0, 0x1000, 0x80, 0x80, 0x808080808080)).hex

            tags = [["color", next(tag_values)]]

            if release:
                tags.append(["sentry:release", release])

            event = self.store_event(
                data={
                    "event_id": event_id,
                    "message": template % parameters,
                    "type": "default",
                    "user": next(user_values),
                    "tags": tags,
                    "fingerprint": [fingerprint],
                    "timestamp": iso_format(now + timedelta(seconds=i)),
                    "environment": environment,
                    "release": release,
                },
                project_id=project.id,
            )

            UserReport.objects.create(
                project_id=project.id,
                group_id=event.group.id,
                event_id=event_id,
                name="Log Hat",
                email="*****@*****.**",
                comments="Quack",
            )

            features.record([event])

            return event

        events = OrderedDict()

        for event in (
            create_message_event(
                "This is message #%s.", i, environment="production", release="version"
            )
            for i in xrange(10)
        ):
            events.setdefault(get_fingerprint(event), []).append(event)

        for event in (
            create_message_event(
                "This is message #%s!",
                i,
                environment="production",
                release="version2",
                fingerprint="group2",
            )
            for i in xrange(10, 16)
        ):
            events.setdefault(get_fingerprint(event), []).append(event)

        event = create_message_event(
            "This is message #%s!",
            17,
            environment="staging",
            release="version3",
            fingerprint="group3",
        )

        events.setdefault(get_fingerprint(event), []).append(event)

        merge_source, source, destination = list(Group.objects.all())

        assert len(events) == 3
        assert sum(map(len, events.values())) == 17

        production_environment = Environment.objects.get(
            organization_id=project.organization_id, name="production"
        )

        with self.tasks():
            eventstream_state = eventstream.start_merge(project.id, [merge_source.id], source.id)
            merge_groups.delay([merge_source.id], source.id)
            eventstream.end_merge(eventstream_state)

        assert set(
            [
                (gtv.value, gtv.times_seen)
                for gtv in tagstore.get_group_tag_values(
                    project.id, source.id, production_environment.id, "color"
                )
            ]
        ) == set([("red", 6), ("green", 5), ("blue", 5)])

        similar_items = features.compare(source)
        assert len(similar_items) == 2
        assert similar_items[0][0] == source.id
        assert similar_items[0][1]["message:message:character-shingles"] == 1.0
        assert similar_items[1][0] == destination.id
        assert similar_items[1][1]["message:message:character-shingles"] < 1.0

        with self.tasks():
            eventstream_state = eventstream.start_unmerge(
                project.id, [list(events.keys())[0]], source.id, destination.id
            )
            unmerge.delay(
                project.id, source.id, destination.id, [list(events.keys())[0]], None, batch_size=5
            )
            eventstream.end_unmerge(eventstream_state)

        assert (
            list(
                Group.objects.filter(id=merge_source.id).values_list(
                    "times_seen", "first_seen", "last_seen"
                )
            )
            == []
        )

        assert list(
            Group.objects.filter(id=source.id).values_list("times_seen", "first_seen", "last_seen")
        ) == [(6, time_from_now(10), time_from_now(15))]

        assert list(
            Group.objects.filter(id=destination.id).values_list(
                "times_seen", "first_seen", "last_seen"
            )
        ) == [(11, time_from_now(0), time_from_now(16))]

        assert source.id != destination.id
        assert source.project == destination.project

        destination_event_ids = map(lambda event: event.event_id, list(events.values())[1])

        assert set(
            UserReport.objects.filter(group_id=source.id).values_list("event_id", flat=True)
        ) == set(destination_event_ids)

        assert set(
            GroupHash.objects.filter(group_id=source.id).values_list("hash", flat=True)
        ) == set(itertools.islice(events.keys(), 2))

        assert set(
            GroupRelease.objects.filter(group_id=source.id).values_list(
                "environment", "first_seen", "last_seen"
            )
        ) == set([(u"production", time_from_now(10), time_from_now(15))])

        assert set(
            [
                (gtv.value, gtv.times_seen)
                for gtv in tagstore.get_group_tag_values(
                    project.id, destination.id, production_environment.id, "color"
                )
            ]
        ) == set([(u"red", 4), (u"green", 3), (u"blue", 3)])

        destination_event_ids = map(
            lambda event: event.event_id, list(events.values())[0] + list(events.values())[2]
        )

        assert set(
            UserReport.objects.filter(group_id=destination.id).values_list("event_id", flat=True)
        ) == set(destination_event_ids)

        assert set(
            GroupHash.objects.filter(group_id=destination.id).values_list("hash", flat=True)
        ) == set(itertools.islice(events.keys(), 2, 3))

        assert set(
            GroupRelease.objects.filter(group_id=destination.id).values_list(
                "environment", "first_seen", "last_seen"
            )
        ) == set(
            [
                ("production", time_from_now(0), time_from_now(9)),
                ("staging", time_from_now(16), time_from_now(16)),
            ]
        )

        assert set(
            [
                (gtk.value, gtk.times_seen)
                for gtk in tagstore.get_group_tag_values(
                    project.id, destination.id, production_environment.id, "color"
                )
            ]
        ) == set([("red", 4), ("blue", 3), ("green", 3)])

        rollup_duration = 3600

        time_series = tsdb.get_range(
            tsdb.models.group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            time_from_now(17),
            rollup_duration,
        )

        environment_time_series = tsdb.get_range(
            tsdb.models.group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            time_from_now(17),
            rollup_duration,
            environment_ids=[production_environment.id],
        )

        def get_expected_series_values(rollup, events, function=None):
            if function is None:

                def function(aggregate, event):
                    return (aggregate if aggregate is not None else 0) + 1

            expected = {}
            for event in events:
                k = float((to_timestamp(event.datetime) // rollup_duration) * rollup_duration)
                expected[k] = function(expected.get(k), event)

            return expected

        def assert_series_contains(expected, actual, default=0):
            actual = dict(actual)

            for key, value in expected.items():
                assert actual.get(key, 0) == value

            for key in set(actual.keys()) - set(expected.keys()):
                assert actual.get(key, 0) == default

        assert_series_contains(
            get_expected_series_values(rollup_duration, list(events.values())[1]),
            time_series[source.id],
            0,
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration, list(events.values())[0] + list(events.values())[2]
            ),
            time_series[destination.id],
            0,
        )

        assert_series_contains(
            get_expected_series_values(rollup_duration, list(events.values())[1]),
            environment_time_series[source.id],
            0,
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration, list(events.values())[0][:-1] + list(events.values())[2]
            ),
            environment_time_series[destination.id],
            0,
        )

        time_series = tsdb.get_distinct_counts_series(
            tsdb.models.users_affected_by_group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            time_from_now(17),
            rollup_duration,
        )

        environment_time_series = tsdb.get_distinct_counts_series(
            tsdb.models.users_affected_by_group,
            [source.id, destination.id],
            now - timedelta(seconds=rollup_duration),
            time_from_now(17),
            rollup_duration,
            environment_id=production_environment.id,
        )

        def collect_by_user_tag(aggregate, event):
            aggregate = aggregate if aggregate is not None else set()
            aggregate.add(get_event_user_from_interface(event.data["user"]).tag_value)
            return aggregate

        for series in [time_series, environment_time_series]:
            assert_series_contains(
                {
                    timestamp: len(values)
                    for timestamp, values in get_expected_series_values(
                        rollup_duration, list(events.values())[1], collect_by_user_tag
                    ).items()
                },
                series[source.id],
            )

            assert_series_contains(
                {
                    timestamp: len(values)
                    for timestamp, values in get_expected_series_values(
                        rollup_duration,
                        list(events.values())[0] + list(events.values())[2],
                        collect_by_user_tag,
                    ).items()
                },
                time_series[destination.id],
            )

        def strip_zeroes(data):
            for group_id, series in data.items():
                for _, values in series:
                    for key, val in list(values.items()):
                        if val == 0:
                            values.pop(key)

            return data

        def collect_by_release(group, aggregate, event):
            aggregate = aggregate if aggregate is not None else {}
            release = event.get_tag("sentry:release")
            if not release:
                return aggregate
            release = GroupRelease.objects.get(
                group_id=group.id,
                environment=event.data["environment"],
                release_id=Release.objects.get(
                    organization_id=project.organization_id, version=release
                ).id,
            ).id
            aggregate[release] = aggregate.get(release, 0) + 1
            return aggregate

        items = {}
        for i in [source.id, destination.id]:
            items[i] = list(GroupRelease.objects.filter(group_id=i).values_list("id", flat=True))

        time_series = strip_zeroes(
            tsdb.get_frequency_series(
                tsdb.models.frequent_releases_by_group,
                items,
                now - timedelta(seconds=rollup_duration),
                time_from_now(17),
                rollup_duration,
            )
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                list(events.values())[1],
                functools.partial(collect_by_release, source),
            ),
            time_series[source.id],
            {},
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                list(events.values())[0] + list(events.values())[2],
                functools.partial(collect_by_release, destination),
            ),
            time_series[destination.id],
            {},
        )

        items = {}
        for i in [source.id, destination.id]:
            items[i] = list(Environment.objects.all().values_list("id", flat=True))

        time_series = strip_zeroes(
            tsdb.get_frequency_series(
                tsdb.models.frequent_environments_by_group,
                items,
                now - timedelta(seconds=rollup_duration),
                time_from_now(17),
                rollup_duration,
            )
        )

        def collect_by_environment(aggregate, event):
            aggregate = aggregate if aggregate is not None else {}
            environment = Environment.objects.get(
                organization_id=project.organization_id, name=event.data["environment"]
            ).id
            aggregate[environment] = aggregate.get(environment, 0) + 1
            return aggregate

        assert_series_contains(
            get_expected_series_values(
                rollup_duration, list(events.values())[1], collect_by_environment
            ),
            time_series[source.id],
            {},
        )

        assert_series_contains(
            get_expected_series_values(
                rollup_duration,
                list(events.values())[0] + list(events.values())[2],
                collect_by_environment,
            ),
            time_series[destination.id],
            {},
        )

        source_similar_items = features.compare(source)
        assert source_similar_items[0] == (
            source.id,
            {
                "exception:message:character-shingles": None,
                "exception:stacktrace:application-chunks": None,
                "exception:stacktrace:pairs": None,
                "message:message:character-shingles": 1.0,
            },
        )
        assert source_similar_items[1][0] == destination.id
        assert source_similar_items[1][1]["message:message:character-shingles"] < 1.0

        destination_similar_items = features.compare(destination)
        assert destination_similar_items[0] == (
            destination.id,
            {
                "exception:message:character-shingles": None,
                "exception:stacktrace:application-chunks": None,
                "exception:stacktrace:pairs": None,
                "message:message:character-shingles": 1.0,
            },
        )
        assert destination_similar_items[1][0] == source.id
        assert destination_similar_items[1][1]["message:message:character-shingles"] < 1.0
Beispiel #4
0
def update_groups(request, projects, organization_id, search_fn):
    group_ids = request.GET.getlist("id")
    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id, project__in=projects, id__in=group_ids
        )
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(data=request.data, partial=True, context={"project": project})
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    result = dict(serializer.validated_data)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = request.user if request.user.is_authenticated() else None

    if not group_ids:
        try:
            # bulk mutations are limited to 1000 items
            # TODO(dcramer): it'd be nice to support more than this, but its
            # a bit too complicated right now
            cursor_result, _ = search_fn({"limit": 1000, "paginator_options": {"max_limit": 1000}})
        except ValidationError as exc:
            return Response({"detail": six.text_type(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(id__in=group_ids)

    discard = result.get("discard")
    if discard:
        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop("statusDetails", result)
    status = result.get("status")
    release = None
    commit = None

    if status in ("resolved", "resolvedInNextRelease"):
        if status == "resolvedInNextRelease" or statusDetails.get("inNextRelease"):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in next release for multiple projects."},
                    status=400,
                )
            release = (
                statusDetails.get("inNextRelease")
                or Release.objects.filter(
                    projects=projects[0], organization_id=projects[0].organization_id
                )
                .extra(select={"sort": "COALESCE(date_released, date_added)"})
                .order_by("-sort")[0]
            )
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": ""
            }
            status_details = {
                "inNextRelease": True,
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = "in_next_release"
            res_status = GroupResolution.Status.pending
        elif statusDetails.get("inRelease"):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in release for multiple projects."}, status=400
                )
            release = statusDetails["inRelease"]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": release.version
            }
            status_details = {
                "inRelease": release.version,
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = "in_release"
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get("inCommit"):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in commit for multiple projects."}, status=400
                )
            commit = statusDetails["inCommit"]
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {"commit": commit.id}
            status_details = {
                "inCommit": serialize(commit, request.user),
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type_str = "in_commit"
        else:
            res_type_str = "now"
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr("group.resolved", instance=res_type_str, skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = (
                    Release.objects.filter(projects__in=projects, releasecommit__commit=commit)
                    .extra(select={"sort": "COALESCE(date_released, date_added)"})
                    .order_by("-sort")[0]
                )
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None

        for group in group_list:
            with transaction.atomic():
                resolution = None
                if release:
                    resolution_params = {
                        "release": release,
                        "type": res_type,
                        "status": res_status,
                        "actor_id": request.user.id if request.user.is_authenticated() else None,
                    }
                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group, defaults=resolution_params
                    )
                    if not created:
                        resolution.update(datetime=timezone.now(), **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(id=group.id).update(
                    status=GroupStatus.RESOLVED, resolved_at=now
                )
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now

                assigned_to = self_subscribe_and_assign_issue(acting_user, group)
                if assigned_to is not None:
                    result["assignedTo"] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or request.user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(
                kwargs={"project_id": group.project_id, "group_id": group.id}
            )

        result.update({"status": "resolved", "statusDetails": status_details})

    elif status:
        new_status = STATUS_CHOICES[result["status"]]

        with transaction.atomic():
            happened = queryset.exclude(status=new_status).update(status=new_status)

            GroupResolution.objects.filter(group__in=group_ids).delete()

            if new_status == GroupStatus.IGNORED:
                metrics.incr("group.ignored", skip_internal=True)

                ignore_duration = (
                    statusDetails.pop("ignoreDuration", None)
                    or statusDetails.pop("snoozeDuration", None)
                ) or None
                ignore_count = statusDetails.pop("ignoreCount", None) or None
                ignore_window = statusDetails.pop("ignoreWindow", None) or None
                ignore_user_count = statusDetails.pop("ignoreUserCount", None) or None
                ignore_user_window = statusDetails.pop("ignoreUserWindow", None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(minutes=ignore_duration)
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state["times_seen"] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state["users_seen"] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                "until": ignore_until,
                                "count": ignore_count,
                                "window": ignore_window,
                                "user_count": ignore_user_count,
                                "user_window": ignore_user_window,
                                "state": state,
                                "actor_id": request.user.id
                                if request.user.is_authenticated()
                                else None,
                            },
                        )
                        result["statusDetails"] = {
                            "ignoreCount": ignore_count,
                            "ignoreUntil": ignore_until,
                            "ignoreUserCount": ignore_user_count,
                            "ignoreUserWindow": ignore_user_window,
                            "ignoreWindow": ignore_window,
                            "actor": serialize(extract_lazy_object(request.user), request.user),
                        }
                else:
                    GroupSnooze.objects.filter(group__in=group_ids).delete()
                    ignore_until = None
                    result["statusDetails"] = {}
            else:
                result["statusDetails"] = {}

        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    "ignoreCount": ignore_count,
                    "ignoreDuration": ignore_duration,
                    "ignoreUntil": ignore_until,
                    "ignoreUserCount": ignore_user_count,
                    "ignoreUserWindow": ignore_user_window,
                    "ignoreWindow": ignore_window,
                }

            groups_by_project_id = defaultdict(list)
            for group in group_list:
                groups_by_project_id[group.project_id].append(group)

            for project in projects:
                project_groups = groups_by_project_id.get(project.id)
                if project_groups:
                    issue_ignored.send_robust(
                        project=project,
                        user=acting_user,
                        group_list=project_groups,
                        activity_data=activity_data,
                        sender=update_groups,
                    )

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(
                        kwargs={"project_id": group.project_id, "group_id": group.id}
                    )

    if "assignedTo" in result:
        assigned_actor = result["assignedTo"]
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                GroupAssignee.objects.assign(group, resolved_actor, acting_user)
            result["assignedTo"] = serialize(
                assigned_actor.resolve(), acting_user, ActorSerializer()
            )
        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)

    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists() for project in projects
    }
    if result.get("hasSeen"):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={"last_seen": timezone.now()},
                )
    elif result.get("hasSeen") is False:
        GroupSeen.objects.filter(group__in=group_ids, user=acting_user).delete()

    if result.get("isBookmarked"):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id], group=group, user=acting_user
            )
            GroupSubscription.objects.subscribe(
                user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark
            )
    elif result.get("isBookmarked") is False:
        GroupBookmark.objects.filter(group__in=group_ids, user=acting_user).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get("isSubscribed") in (True, False):
        is_subscribed = result["isSubscribed"]
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown},
            )

        result["subscriptionDetails"] = {
            "reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown")
        }

    if "isPublic" in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result["shareId"] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get("isPublic"):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id], group=group, user=acting_user
            )
            if created:
                result["shareId"] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own
    # endpoint
    if result.get("merge") and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({"detail": "Merging across multiple projects is not supported"})
        group_list_by_times_seen = sorted(
            group_list, key=lambda g: (g.times_seen, g.id), reverse=True
        )
        primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(
            primary_group.project_id, group_ids_to_merge, primary_group.id
        )

        Group.objects.filter(id__in=group_ids_to_merge).update(status=GroupStatus.PENDING_MERGE)

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={"issues": [{"id": c.id} for c in groups_to_merge]},
        )

        result["merge"] = {
            "parent": six.text_type(primary_group.id),
            "children": [six.text_type(g.id) for g in groups_to_merge],
        }

    return Response(result)
Beispiel #5
0
def update_groups(
    request: Request,
    group_ids: Sequence[Group],
    projects: Sequence[Project],
    organization_id: int,
    search_fn: SearchFunction | None,
    user: User | None = None,
    data: Mapping[str, Any] | None = None,
) -> Response:
    # If `user` and `data` are passed as parameters then they should override
    # the values in `request`.
    user = user or request.user
    data = data or request.data

    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id,
            project__in=projects,
            id__in=group_ids)
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    serializer = None
    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(
            data=data,
            partial=True,
            context={
                "project": project,
                "organization": project.organization,
                "access": getattr(request, "access", None),
            },
        )
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    if serializer is None:
        return

    result = dict(serializer.validated_data)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = user if user.is_authenticated else None

    if search_fn and not group_ids:
        try:
            cursor_result, _ = search_fn({
                "limit": BULK_MUTATION_LIMIT,
                "paginator_options": {
                    "max_limit": BULK_MUTATION_LIMIT
                },
            })
        except ValidationError as exc:
            return Response({"detail": str(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(id__in=group_ids)

    discard = result.get("discard")
    if discard:

        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop("statusDetails", result)
    status = result.get("status")
    release = None
    commit = None
    res_type = None
    activity_type = None
    activity_data: MutableMapping[str, Any | None] | None = None
    if status in ("resolved", "resolvedInNextRelease"):
        res_status = None
        if status == "resolvedInNextRelease" or statusDetails.get(
                "inNextRelease"):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in next release for multiple projects."
                    },
                    status=400,
                )
            release = (
                statusDetails.get("inNextRelease") or Release.objects.filter(
                    projects=projects[0],
                    organization_id=projects[0].organization_id).extra(
                        select={
                            "sort": "COALESCE(date_released, date_added)"
                        }).order_by("-sort")[0])
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": ""
            }
            status_details = {
                "inNextRelease": True,
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = "in_next_release"
            res_status = GroupResolution.Status.pending
        elif statusDetails.get("inRelease"):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in release for multiple projects."
                    },
                    status=400)
            release = statusDetails["inRelease"]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": release.version
            }
            status_details = {
                "inRelease": release.version,
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = "in_release"
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get("inCommit"):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in commit for multiple projects."
                    },
                    status=400)
            commit = statusDetails["inCommit"]
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {"commit": commit.id}
            status_details = {
                "inCommit": serialize(commit, user),
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type_str = "in_commit"
        else:
            res_type_str = "now"
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr("group.resolved",
                     instance=res_type_str,
                     skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = (Release.objects.filter(
                    projects__in=projects, releasecommit__commit=commit).extra(
                        select={
                            "sort": "COALESCE(date_released, date_added)"
                        }).order_by("-sort")[0])
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None
        for group in group_list:
            with transaction.atomic():
                resolution = None
                created = None
                if release:
                    resolution_params = {
                        "release": release,
                        "type": res_type,
                        "status": res_status,
                        "actor_id": user.id if user.is_authenticated else None,
                    }

                    # We only set `current_release_version` if GroupResolution type is
                    # in_next_release, because we need to store information about the latest/most
                    # recent release that was associated with a group and that is required for
                    # release comparisons (i.e. handling regressions)
                    if res_type == GroupResolution.Type.in_next_release:
                        # Check if semver versioning scheme is followed
                        follows_semver = follows_semver_versioning_scheme(
                            org_id=group.organization.id,
                            project_id=group.project.id,
                            release_version=release.version,
                        )

                        current_release_version = get_current_release_version_of_group(
                            group=group, follows_semver=follows_semver)
                        if current_release_version:
                            resolution_params.update({
                                "current_release_version":
                                current_release_version
                            })

                            # Sets `current_release_version` for activity, since there is no point
                            # waiting for when a new release is created i.e.
                            # clear_expired_resolutions task to be run.
                            # Activity should look like "... resolved in version
                            # >current_release_version" in the UI
                            if follows_semver:
                                activity_data.update({
                                    "current_release_version":
                                    current_release_version
                                })

                                # In semver projects, and thereby semver releases, we determine
                                # resolutions by comparing against an expression rather than a
                                # specific release (i.e. >current_release_version). Consequently,
                                # at this point we can consider this GroupResolution as resolved
                                # in release
                                resolution_params.update({
                                    "type":
                                    GroupResolution.Type.in_release,
                                    "status":
                                    GroupResolution.Status.resolved,
                                })
                            else:
                                # If we already know the `next` release in date based ordering
                                # when clicking on `resolvedInNextRelease` because it is already
                                # been released, there is no point in setting GroupResolution to
                                # be of type in_next_release but rather in_release would suffice

                                try:
                                    # Get current release object from current_release_version
                                    current_release_obj = Release.objects.get(
                                        version=current_release_version,
                                        organization_id=projects[0].
                                        organization_id,
                                    )

                                    date_order_q = Q(
                                        date_added__gt=current_release_obj.
                                        date_added) | Q(
                                            date_added=current_release_obj.
                                            date_added,
                                            id__gt=current_release_obj.id,
                                        )

                                    # Find the next release after the current_release_version
                                    # i.e. the release that resolves the issue
                                    resolved_in_release = (
                                        Release.objects.filter(
                                            date_order_q,
                                            projects=projects[0],
                                            organization_id=projects[0].
                                            organization_id,
                                        ).extra(
                                            select={
                                                "sort":
                                                "COALESCE(date_released, date_added)"
                                            }).order_by("sort",
                                                        "id")[:1].get())

                                    # If we get here, we assume it exists and so we update
                                    # GroupResolution and Activity
                                    resolution_params.update({
                                        "release":
                                        resolved_in_release,
                                        "type":
                                        GroupResolution.Type.in_release,
                                        "status":
                                        GroupResolution.Status.resolved,
                                    })
                                    activity_data.update({
                                        "version":
                                        resolved_in_release.version
                                    })
                                except Release.DoesNotExist:
                                    # If it gets here, it means we don't know the upcoming
                                    # release yet because it does not exist, and so we should
                                    # fall back to our current model
                                    ...

                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group, defaults=resolution_params)
                    if not created:
                        resolution.update(datetime=timezone.now(),
                                          **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(id=group.id).update(
                    status=GroupStatus.RESOLVED, resolved_at=now)
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now
                remove_group_from_inbox(group,
                                        action=GroupInboxRemoveAction.RESOLVED,
                                        user=acting_user)
                result["inbox"] = None

                assigned_to = self_subscribe_and_assign_issue(
                    acting_user, group)
                if assigned_to is not None:
                    result["assignedTo"] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    record_group_history_from_activity_type(group,
                                                            activity_type,
                                                            actor=acting_user)

                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(kwargs={
                "project_id": group.project_id,
                "group_id": group.id
            })

        result.update({"status": "resolved", "statusDetails": status_details})

    elif status:
        new_status = STATUS_UPDATE_CHOICES[result["status"]]
        ignore_duration = None
        ignore_count = None
        ignore_window = None
        ignore_user_count = None
        ignore_user_window = None
        ignore_until = None

        with transaction.atomic():
            happened = queryset.exclude(status=new_status).update(
                status=new_status)

            GroupResolution.objects.filter(group__in=group_ids).delete()
            if new_status == GroupStatus.IGNORED:
                metrics.incr("group.ignored", skip_internal=True)
                for group in group_ids:
                    remove_group_from_inbox(
                        group,
                        action=GroupInboxRemoveAction.IGNORED,
                        user=acting_user)
                result["inbox"] = None

                ignore_duration = (statusDetails.pop("ignoreDuration", None)
                                   or statusDetails.pop(
                                       "snoozeDuration", None)) or None
                ignore_count = statusDetails.pop("ignoreCount", None) or None
                ignore_window = statusDetails.pop("ignoreWindow", None) or None
                ignore_user_count = statusDetails.pop("ignoreUserCount",
                                                      None) or None
                ignore_user_window = statusDetails.pop("ignoreUserWindow",
                                                       None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(
                            minutes=ignore_duration)
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state["times_seen"] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state["users_seen"] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                "until":
                                ignore_until,
                                "count":
                                ignore_count,
                                "window":
                                ignore_window,
                                "user_count":
                                ignore_user_count,
                                "user_window":
                                ignore_user_window,
                                "state":
                                state,
                                "actor_id":
                                user.id if user.is_authenticated else None,
                            },
                        )
                        result["statusDetails"] = {
                            "ignoreCount": ignore_count,
                            "ignoreUntil": ignore_until,
                            "ignoreUserCount": ignore_user_count,
                            "ignoreUserWindow": ignore_user_window,
                            "ignoreWindow": ignore_window,
                            "actor": serialize(extract_lazy_object(user),
                                               user),
                        }
                else:
                    GroupSnooze.objects.filter(group__in=group_ids).delete()
                    ignore_until = None
                    result["statusDetails"] = {}
            else:
                result["statusDetails"] = {}
        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}

                for group in group_list:
                    if group.status == GroupStatus.IGNORED:
                        issue_unignored.send_robust(
                            project=project_lookup[group.project_id],
                            user=acting_user,
                            group=group,
                            transition_type="manual",
                            sender=update_groups,
                        )
                    else:
                        issue_unresolved.send_robust(
                            project=project_lookup[group.project_id],
                            user=acting_user,
                            group=group,
                            transition_type="manual",
                            sender=update_groups,
                        )
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    "ignoreCount": ignore_count,
                    "ignoreDuration": ignore_duration,
                    "ignoreUntil": ignore_until,
                    "ignoreUserCount": ignore_user_count,
                    "ignoreUserWindow": ignore_user_window,
                    "ignoreWindow": ignore_window,
                }

                groups_by_project_id = defaultdict(list)
                for group in group_list:
                    groups_by_project_id[group.project_id].append(group)

                for project in projects:
                    project_groups = groups_by_project_id.get(project.id)
                    if project_groups:
                        issue_ignored.send_robust(
                            project=project,
                            user=acting_user,
                            group_list=project_groups,
                            activity_data=activity_data,
                            sender=update_groups,
                        )

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                record_group_history_from_activity_type(group,
                                                        activity_type,
                                                        actor=acting_user)

                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(kwargs={
                        "project_id": group.project_id,
                        "group_id": group.id
                    })

    # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of
    # what performance impact this might have & this possibly should be moved else where
    try:
        if len(group_list) == 1:
            if res_type in (
                    GroupResolution.Type.in_next_release,
                    GroupResolution.Type.in_release,
            ):
                result["activity"] = serialize(
                    Activity.objects.get_activities_for_group(
                        group=group_list[0], num=ACTIVITIES_COUNT),
                    acting_user,
                )
    except UnboundLocalError:
        pass

    if "assignedTo" in result:
        assigned_actor = result["assignedTo"]
        assigned_by = (data.get("assignedBy") if data.get("assignedBy") in [
            "assignee_selector", "suggested_assignee"
        ] else None)
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                assignment = GroupAssignee.objects.assign(
                    group, resolved_actor, acting_user)
                analytics.record(
                    "manual.issue_assignment",
                    organization_id=project_lookup[
                        group.project_id].organization_id,
                    project_id=group.project_id,
                    group_id=group.id,
                    assigned_by=assigned_by,
                    had_to_deassign=assignment["updated_assignment"],
                )
            result["assignedTo"] = serialize(assigned_actor.resolve(),
                                             acting_user, ActorSerializer())

        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)
                analytics.record(
                    "manual.issue_assignment",
                    organization_id=project_lookup[
                        group.project_id].organization_id,
                    project_id=group.project_id,
                    group_id=group.id,
                    assigned_by=assigned_by,
                    had_to_deassign=True,
                )
    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists()
        for project in projects
    }
    if result.get("hasSeen"):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={"last_seen": timezone.now()},
                )
    elif result.get("hasSeen") is False:
        GroupSeen.objects.filter(group__in=group_ids,
                                 user=acting_user).delete()

    if result.get("isBookmarked"):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user)
            GroupSubscription.objects.subscribe(
                user=acting_user,
                group=group,
                reason=GroupSubscriptionReason.bookmark)
    elif result.get("isBookmarked") is False:
        GroupBookmark.objects.filter(group__in=group_ids,
                                     user=acting_user).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get("isSubscribed") in (True, False):
        is_subscribed = result["isSubscribed"]
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={
                    "is_active": is_subscribed,
                    "reason": GroupSubscriptionReason.unknown
                },
            )

        result["subscriptionDetails"] = {
            "reason":
            SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown,
                                        "unknown")
        }

    if "isPublic" in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result["shareId"] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get("isPublic"):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user)
            if created:
                result["shareId"] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own endpoint.
    if result.get("merge") and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({
                "detail":
                "Merging across multiple projects is not supported"
            })
        group_list_by_times_seen = sorted(group_list,
                                          key=lambda g: (g.times_seen, g.id),
                                          reverse=True)
        primary_group, groups_to_merge = group_list_by_times_seen[
            0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(primary_group.project_id,
                                                    group_ids_to_merge,
                                                    primary_group.id)

        Group.objects.filter(id__in=group_ids_to_merge).update(
            status=GroupStatus.PENDING_MERGE)

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={"issues": [{
                "id": c.id
            } for c in groups_to_merge]},
        )

        result["merge"] = {
            "parent": str(primary_group.id),
            "children": [str(g.id) for g in groups_to_merge],
        }

    # Support moving groups in or out of the inbox
    inbox = result.get("inbox", None)
    if inbox is not None:
        if inbox:
            for group in group_list:
                add_group_to_inbox(group, GroupInboxReason.MANUAL)
        elif not inbox:
            for group in group_list:
                remove_group_from_inbox(
                    group,
                    action=GroupInboxRemoveAction.MARK_REVIEWED,
                    user=acting_user,
                    referrer=request.META.get("HTTP_REFERER"),
                )
                issue_mark_reviewed.send_robust(
                    project=project_lookup[group.project_id],
                    user=acting_user,
                    group=group,
                    sender=update_groups,
                )
        result["inbox"] = inbox

    return Response(result)
Beispiel #6
0
def update_groups(request, projects, organization_id, search_fn):
    group_ids = request.GET.getlist('id')
    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id,
            project__in=projects,
            id__in=group_ids,
        )
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(
            data=request.DATA,
            partial=True,
            context={'project': project},
        )
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    result = dict(serializer.object)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = request.user if request.user.is_authenticated() else None

    if not group_ids:
        try:
            # bulk mutations are limited to 1000 items
            # TODO(dcramer): it'd be nice to support more than this, but its
            # a bit too complicated right now
            cursor_result, _ = search_fn({
                'limit': 1000,
                'paginator_options': {'max_limit': 1000},
            })
        except ValidationError as exc:
            return Response({'detail': six.text_type(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(
        id__in=group_ids,
    )

    discard = result.get('discard')
    if discard:
        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop('statusDetails', result)
    status = result.get('status')
    release = None
    commit = None

    if status in ('resolved', 'resolvedInNextRelease'):
        if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in next release for multiple projects.'
                }, status=400)
            release = statusDetails.get('inNextRelease') or Release.objects.filter(
                projects=projects[0],
                organization_id=projects[0].organization_id,
            ).extra(select={
                'sort': 'COALESCE(date_released, date_added)',
            }).order_by('-sort')[0]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                'version': '',
            }
            status_details = {
                'inNextRelease': True,
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = 'in_next_release'
            res_status = GroupResolution.Status.pending
        elif statusDetails.get('inRelease'):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in release for multiple projects.'
                }, status=400)
            release = statusDetails['inRelease']
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                'version': release.version,
            }
            status_details = {
                'inRelease': release.version,
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = 'in_release'
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get('inCommit'):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in commit for multiple projects.'
                }, status=400)
            commit = statusDetails['inCommit']
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {
                'commit': commit.id,
            }
            status_details = {
                'inCommit': serialize(commit, request.user),
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type_str = 'in_commit'
        else:
            res_type_str = 'now'
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr('group.resolved', instance=res_type_str, skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = Release.objects.filter(
                    projects__in=projects,
                    releasecommit__commit=commit,
                ).extra(select={
                    'sort': 'COALESCE(date_released, date_added)',
                }).order_by('-sort')[0]
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None

        for group in group_list:
            with transaction.atomic():
                resolution = None
                if release:
                    resolution_params = {
                        'release': release,
                        'type': res_type,
                        'status': res_status,
                        'actor_id': request.user.id
                        if request.user.is_authenticated() else None,
                    }
                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group,
                        defaults=resolution_params,
                    )
                    if not created:
                        resolution.update(
                            datetime=timezone.now(), **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(
                    id=group.id,
                ).update(
                    status=GroupStatus.RESOLVED,
                    resolved_at=now,
                )
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now

                assigned_to = self_subscribe_and_assign_issue(acting_user, group)
                if assigned_to is not None:
                    result['assignedTo'] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or request.user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(kwargs={
                'project_id': group.project_id,
                'group_id': group.id,
            })

        result.update({
            'status': 'resolved',
            'statusDetails': status_details,
        })

    elif status:
        new_status = STATUS_CHOICES[result['status']]

        with transaction.atomic():
            happened = queryset.exclude(
                status=new_status,
            ).update(
                status=new_status,
            )

            GroupResolution.objects.filter(
                group__in=group_ids,
            ).delete()

            if new_status == GroupStatus.IGNORED:
                metrics.incr('group.ignored', skip_internal=True)

                ignore_duration = (
                    statusDetails.pop('ignoreDuration', None) or
                    statusDetails.pop('snoozeDuration', None)
                ) or None
                ignore_count = statusDetails.pop(
                    'ignoreCount', None) or None
                ignore_window = statusDetails.pop(
                    'ignoreWindow', None) or None
                ignore_user_count = statusDetails.pop(
                    'ignoreUserCount', None) or None
                ignore_user_window = statusDetails.pop(
                    'ignoreUserWindow', None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(
                            minutes=ignore_duration,
                        )
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state['times_seen'] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state['users_seen'] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                'until':
                                ignore_until,
                                'count':
                                ignore_count,
                                'window':
                                ignore_window,
                                'user_count':
                                ignore_user_count,
                                'user_window':
                                ignore_user_window,
                                'state':
                                state,
                                'actor_id':
                                request.user.id if request.user.is_authenticated() else None,
                            }
                        )
                        result['statusDetails'] = {
                            'ignoreCount': ignore_count,
                            'ignoreUntil': ignore_until,
                            'ignoreUserCount': ignore_user_count,
                            'ignoreUserWindow': ignore_user_window,
                            'ignoreWindow': ignore_window,
                            'actor': serialize(extract_lazy_object(request.user), request.user),
                        }
                else:
                    GroupSnooze.objects.filter(
                        group__in=group_ids,
                    ).delete()
                    ignore_until = None
                    result['statusDetails'] = {}
            else:
                result['statusDetails'] = {}

        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    'ignoreCount': ignore_count,
                    'ignoreDuration': ignore_duration,
                    'ignoreUntil': ignore_until,
                    'ignoreUserCount': ignore_user_count,
                    'ignoreUserWindow': ignore_user_window,
                    'ignoreWindow': ignore_window,
                }

            groups_by_project_id = defaultdict(list)
            for group in group_list:
                groups_by_project_id[group.project_id].append(group)

            for project in projects:
                project_groups = groups_by_project_id.get(project.id)
                if project_groups:
                    issue_ignored.send_robust(
                        project=project,
                        user=acting_user,
                        group_list=project_groups,
                        activity_data=activity_data,
                        sender=update_groups)

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(kwargs={
                        'project_id': group.project_id,
                        'group_id': group.id,
                    })

    if 'assignedTo' in result:
        assigned_actor = result['assignedTo']
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                GroupAssignee.objects.assign(group, resolved_actor, acting_user)
            result['assignedTo'] = serialize(
                assigned_actor.resolve(), acting_user, ActorSerializer())
        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)

    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists() for project in projects
    }
    if result.get('hasSeen'):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={
                        'last_seen': timezone.now(),
                    }
                )
    elif result.get('hasSeen') is False:
        GroupSeen.objects.filter(
            group__in=group_ids,
            user=acting_user,
        ).delete()

    if result.get('isBookmarked'):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user,
            )
            GroupSubscription.objects.subscribe(
                user=acting_user,
                group=group,
                reason=GroupSubscriptionReason.bookmark,
            )
    elif result.get('isBookmarked') is False:
        GroupBookmark.objects.filter(
            group__in=group_ids,
            user=acting_user,
        ).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get('isSubscribed') in (True, False):
        is_subscribed = result['isSubscribed']
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={
                    'is_active': is_subscribed,
                    'reason': GroupSubscriptionReason.unknown,
                },
            )

        result['subscriptionDetails'] = {
            'reason': SUBSCRIPTION_REASON_MAP.get(
                GroupSubscriptionReason.unknown,
                'unknown',
            ),
        }

    if 'isPublic' in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result['shareId'] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get('isPublic'):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user,
            )
            if created:
                result['shareId'] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own
    # endpoint
    if result.get('merge') and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({'detail': 'Merging across multiple projects is not supported'})
        group_list_by_times_seen = sorted(
            group_list,
            key=lambda g: (g.times_seen, g.id),
            reverse=True,
        )
        primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(
            primary_group.project_id,
            group_ids_to_merge,
            primary_group.id
        )

        Group.objects.filter(
            id__in=group_ids_to_merge
        ).update(
            status=GroupStatus.PENDING_MERGE
        )

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={
                'issues': [{
                    'id': c.id
                } for c in groups_to_merge],
            },
        )

        result['merge'] = {
            'parent': six.text_type(primary_group.id),
            'children': [six.text_type(g.id) for g in groups_to_merge],
        }

    return Response(result)