예제 #1
0
    def set_commits(self, commit_list):
        """
        Bind a list of commits to this release.

        This will clear any existing commit log and replace it with the given
        commits.
        """

        # Sort commit list in reverse order
        commit_list.sort(key=lambda commit: commit.get('timestamp'),
                         reverse=True)

        # TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly
        from sentry.models import (Commit, CommitAuthor, Group, GroupLink,
                                   GroupResolution, GroupStatus, ReleaseCommit,
                                   ReleaseHeadCommit, Repository, PullRequest)
        from sentry.plugins.providers.repository import RepositoryProvider
        from sentry.tasks.integrations import kick_off_status_syncs
        # todo(meredith): implement for IntegrationRepositoryProvider
        commit_list = [
            c for c in commit_list if
            not RepositoryProvider.should_ignore_commit(c.get('message', ''))
        ]
        lock_key = type(self).get_lock_key(self.organization_id, self.id)
        lock = locks.get(lock_key, duration=10)
        with TimedRetryPolicy(10)(lock.acquire):
            start = time()
            with transaction.atomic():
                # TODO(dcramer): would be good to optimize the logic to avoid these
                # deletes but not overly important
                ReleaseCommit.objects.filter(release=self, ).delete()

                authors = {}
                repos = {}
                commit_author_by_commit = {}
                head_commit_by_repo = {}
                latest_commit = None
                for idx, data in enumerate(commit_list):
                    repo_name = data.get(
                        'repository') or u'organization-{}'.format(
                            self.organization_id)
                    if repo_name not in repos:
                        repos[
                            repo_name] = repo = Repository.objects.get_or_create(
                                organization_id=self.organization_id,
                                name=repo_name,
                            )[0]
                    else:
                        repo = repos[repo_name]

                    author_email = data.get('author_email')
                    if author_email is None and data.get('author_name'):
                        author_email = (re.sub(r'[^a-zA-Z0-9\-_\.]*', '',
                                               data['author_name']).lower() +
                                        '@localhost')

                    if not author_email:
                        author = None
                    elif author_email not in authors:
                        author_data = {'name': data.get('author_name')}
                        author, created = CommitAuthor.objects.create_or_update(
                            organization_id=self.organization_id,
                            email=author_email,
                            values=author_data)
                        if not created:
                            author = CommitAuthor.objects.get(
                                organization_id=self.organization_id,
                                email=author_email)
                        authors[author_email] = author
                    else:
                        author = authors[author_email]

                    commit_data = {}
                    defaults = {}

                    # Update/set message and author if they are provided.
                    if author is not None:
                        commit_data['author'] = author
                    if 'message' in data:
                        commit_data['message'] = data['message']
                    if 'timestamp' in data:
                        commit_data['date_added'] = data['timestamp']
                    else:
                        defaults['date_added'] = timezone.now()

                    commit, created = Commit.objects.create_or_update(
                        organization_id=self.organization_id,
                        repository_id=repo.id,
                        key=data['id'],
                        defaults=defaults,
                        values=commit_data)
                    if not created:
                        commit = Commit.objects.get(
                            organization_id=self.organization_id,
                            repository_id=repo.id,
                            key=data['id'])

                    if author is None:
                        author = commit.author

                    commit_author_by_commit[commit.id] = author

                    patch_set = data.get('patch_set', [])
                    for patched_file in patch_set:
                        try:
                            with transaction.atomic():
                                CommitFileChange.objects.create(
                                    organization_id=self.organization.id,
                                    commit=commit,
                                    filename=patched_file['path'],
                                    type=patched_file['type'],
                                )
                        except IntegrityError:
                            pass

                    try:
                        with transaction.atomic():
                            ReleaseCommit.objects.create(
                                organization_id=self.organization_id,
                                release=self,
                                commit=commit,
                                order=idx,
                            )
                    except IntegrityError:
                        pass

                    if latest_commit is None:
                        latest_commit = commit

                    head_commit_by_repo.setdefault(repo.id, commit.id)

                self.update(
                    commit_count=len(commit_list),
                    authors=[
                        six.text_type(a_id)
                        for a_id in ReleaseCommit.objects.filter(
                            release=self,
                            commit__author_id__isnull=False,
                        ).values_list('commit__author_id',
                                      flat=True).distinct()
                    ],
                    last_commit_id=latest_commit.id if latest_commit else None,
                )
                metrics.timing('release.set_commits.duration', time() - start)

        # fill any missing ReleaseHeadCommit entries
        for repo_id, commit_id in six.iteritems(head_commit_by_repo):
            try:
                with transaction.atomic():
                    ReleaseHeadCommit.objects.create(
                        organization_id=self.organization_id,
                        release_id=self.id,
                        repository_id=repo_id,
                        commit_id=commit_id,
                    )
            except IntegrityError:
                pass

        release_commits = list(
            ReleaseCommit.objects.filter(
                release=self).select_related('commit').values(
                    'commit_id', 'commit__key'))

        commit_resolutions = list(
            GroupLink.objects.filter(
                linked_type=GroupLink.LinkedType.commit,
                linked_id__in=[rc['commit_id'] for rc in release_commits],
            ).values_list('group_id', 'linked_id'))

        commit_group_authors = [
            (
                cr[0],  # group_id
                commit_author_by_commit.get(cr[1]))
            for cr in commit_resolutions
        ]

        pr_ids_by_merge_commit = list(
            PullRequest.objects.filter(
                merge_commit_sha__in=[
                    rc['commit__key'] for rc in release_commits
                ],
                organization_id=self.organization_id,
            ).values_list('id', flat=True))

        pull_request_resolutions = list(
            GroupLink.objects.filter(
                relationship=GroupLink.Relationship.resolves,
                linked_type=GroupLink.LinkedType.pull_request,
                linked_id__in=pr_ids_by_merge_commit,
            ).values_list('group_id', 'linked_id'))

        pr_authors = list(
            PullRequest.objects.filter(id__in=[
                prr[1] for prr in pull_request_resolutions
            ], ).select_related('author'))

        pr_authors_dict = {pra.id: pra.author for pra in pr_authors}

        pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1]))
                                      for prr in pull_request_resolutions]

        user_by_author = {None: None}

        commits_and_prs = list(
            itertools.chain(commit_group_authors,
                            pull_request_group_authors), )

        group_project_lookup = dict(
            Group.objects.filter(id__in=[
                group_id for group_id, _ in commits_and_prs
            ], ).values_list('id', 'project_id'))

        for group_id, author in commits_and_prs:
            if author not in user_by_author:
                try:
                    user_by_author[author] = author.find_users()[0]
                except IndexError:
                    user_by_author[author] = None
            actor = user_by_author[author]

            with transaction.atomic():
                GroupResolution.objects.create_or_update(
                    group_id=group_id,
                    values={
                        'release': self,
                        'type': GroupResolution.Type.in_release,
                        'status': GroupResolution.Status.resolved,
                        'actor_id': actor.id if actor else None,
                    },
                )
                group = Group.objects.get(id=group_id, )
                group.update(status=GroupStatus.RESOLVED)
                metrics.incr('group.resolved',
                             instance='in_commit',
                             skip_internal=True)

            issue_resolved.send_robust(
                organization_id=self.organization_id,
                user=actor,
                group=group,
                project=group.project,
                resolution_type='with_commit',
                sender=type(self),
            )

            kick_off_status_syncs.apply_async(
                kwargs={
                    'project_id': group_project_lookup[group_id],
                    'group_id': group_id,
                })
예제 #2
0
파일: release.py 프로젝트: ranyitz/sentry
    def set_commits(self, commit_list):
        """
        Bind a list of commits to this release.

        This will clear any existing commit log and replace it with the given
        commits.
        """

        # Sort commit list in reverse order
        commit_list.sort(key=lambda commit: commit.get("timestamp", 0),
                         reverse=True)

        # TODO(dcramer): this function could use some cleanup/refactoring as it's a bit unwieldy
        from sentry.models import (
            Commit,
            CommitAuthor,
            Group,
            GroupLink,
            GroupResolution,
            GroupStatus,
            ReleaseCommit,
            ReleaseHeadCommit,
            Repository,
            PullRequest,
        )
        from sentry.plugins.providers.repository import RepositoryProvider
        from sentry.tasks.integrations import kick_off_status_syncs

        # todo(meredith): implement for IntegrationRepositoryProvider
        commit_list = [
            c for c in commit_list if
            not RepositoryProvider.should_ignore_commit(c.get("message", ""))
        ]
        lock_key = type(self).get_lock_key(self.organization_id, self.id)
        lock = locks.get(lock_key, duration=10)
        with TimedRetryPolicy(10)(lock.acquire):
            start = time()
            with transaction.atomic():
                # TODO(dcramer): would be good to optimize the logic to avoid these
                # deletes but not overly important
                ReleaseCommit.objects.filter(release=self).delete()

                authors = {}
                repos = {}
                commit_author_by_commit = {}
                head_commit_by_repo = {}
                latest_commit = None
                for idx, data in enumerate(commit_list):
                    repo_name = data.get(
                        "repository") or u"organization-{}".format(
                            self.organization_id)
                    if repo_name not in repos:
                        repos[
                            repo_name] = repo = Repository.objects.get_or_create(
                                organization_id=self.organization_id,
                                name=repo_name)[0]
                    else:
                        repo = repos[repo_name]

                    author_email = data.get("author_email")
                    if author_email is None and data.get("author_name"):
                        author_email = (re.sub(r"[^a-zA-Z0-9\-_\.]*", "",
                                               data["author_name"]).lower() +
                                        "@localhost")

                    author_email = truncatechars(author_email, 75)

                    if not author_email:
                        author = None
                    elif author_email not in authors:
                        author_data = {"name": data.get("author_name")}
                        author, created = CommitAuthor.objects.get_or_create(
                            organization_id=self.organization_id,
                            email=author_email,
                            defaults=author_data,
                        )
                        if author.name != author_data["name"]:
                            author.update(name=author_data["name"])
                        authors[author_email] = author
                    else:
                        author = authors[author_email]

                    commit_data = {}

                    # Update/set message and author if they are provided.
                    if author is not None:
                        commit_data["author"] = author
                    if "message" in data:
                        commit_data["message"] = data["message"]
                    if "timestamp" in data:
                        commit_data["date_added"] = data["timestamp"]

                    commit, created = Commit.objects.get_or_create(
                        organization_id=self.organization_id,
                        repository_id=repo.id,
                        key=data["id"],
                        defaults=commit_data,
                    )
                    if not created:
                        commit_data = {
                            key: value
                            for key, value in six.iteritems(commit_data)
                            if getattr(commit, key) != value
                        }
                        if commit_data:
                            commit.update(**commit_data)

                    if author is None:
                        author = commit.author

                    commit_author_by_commit[commit.id] = author

                    # Guard against patch_set being None
                    patch_set = data.get("patch_set") or []
                    for patched_file in patch_set:
                        try:
                            with transaction.atomic():
                                CommitFileChange.objects.create(
                                    organization_id=self.organization.id,
                                    commit=commit,
                                    filename=patched_file["path"],
                                    type=patched_file["type"],
                                )
                        except IntegrityError:
                            pass

                    try:
                        with transaction.atomic():
                            ReleaseCommit.objects.create(
                                organization_id=self.organization_id,
                                release=self,
                                commit=commit,
                                order=idx,
                            )
                    except IntegrityError:
                        pass

                    if latest_commit is None:
                        latest_commit = commit

                    head_commit_by_repo.setdefault(repo.id, commit.id)

                self.update(
                    commit_count=len(commit_list),
                    authors=[
                        six.text_type(a_id)
                        for a_id in ReleaseCommit.objects.filter(
                            release=self, commit__author_id__isnull=False).
                        values_list("commit__author_id", flat=True).distinct()
                    ],
                    last_commit_id=latest_commit.id if latest_commit else None,
                )
                metrics.timing("release.set_commits.duration", time() - start)

        # fill any missing ReleaseHeadCommit entries
        for repo_id, commit_id in six.iteritems(head_commit_by_repo):
            try:
                with transaction.atomic():
                    ReleaseHeadCommit.objects.create(
                        organization_id=self.organization_id,
                        release_id=self.id,
                        repository_id=repo_id,
                        commit_id=commit_id,
                    )
            except IntegrityError:
                pass

        release_commits = list(
            ReleaseCommit.objects.filter(
                release=self).select_related("commit").values(
                    "commit_id", "commit__key"))

        commit_resolutions = list(
            GroupLink.objects.filter(
                linked_type=GroupLink.LinkedType.commit,
                linked_id__in=[rc["commit_id"] for rc in release_commits],
            ).values_list("group_id", "linked_id"))

        commit_group_authors = [
            (cr[0], commit_author_by_commit.get(cr[1]))
            for cr in commit_resolutions  # group_id
        ]

        pr_ids_by_merge_commit = list(
            PullRequest.objects.filter(
                merge_commit_sha__in=[
                    rc["commit__key"] for rc in release_commits
                ],
                organization_id=self.organization_id,
            ).values_list("id", flat=True))

        pull_request_resolutions = list(
            GroupLink.objects.filter(
                relationship=GroupLink.Relationship.resolves,
                linked_type=GroupLink.LinkedType.pull_request,
                linked_id__in=pr_ids_by_merge_commit,
            ).values_list("group_id", "linked_id"))

        pr_authors = list(
            PullRequest.objects.filter(
                id__in=[prr[1] for prr in pull_request_resolutions
                        ]).select_related("author"))

        pr_authors_dict = {pra.id: pra.author for pra in pr_authors}

        pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1]))
                                      for prr in pull_request_resolutions]

        user_by_author = {None: None}

        commits_and_prs = list(
            itertools.chain(commit_group_authors, pull_request_group_authors))

        group_project_lookup = dict(
            Group.objects.filter(
                id__in=[group_id
                        for group_id, _ in commits_and_prs]).values_list(
                            "id", "project_id"))

        for group_id, author in commits_and_prs:
            if author not in user_by_author:
                try:
                    user_by_author[author] = author.find_users()[0]
                except IndexError:
                    user_by_author[author] = None
            actor = user_by_author[author]

            with transaction.atomic():
                GroupResolution.objects.create_or_update(
                    group_id=group_id,
                    values={
                        "release": self,
                        "type": GroupResolution.Type.in_release,
                        "status": GroupResolution.Status.resolved,
                        "actor_id": actor.id if actor else None,
                    },
                )
                group = Group.objects.get(id=group_id)
                group.update(status=GroupStatus.RESOLVED)
                remove_group_from_inbox(group)
                metrics.incr("group.resolved",
                             instance="in_commit",
                             skip_internal=True)

            issue_resolved.send_robust(
                organization_id=self.organization_id,
                user=actor,
                group=group,
                project=group.project,
                resolution_type="with_commit",
                sender=type(self),
            )

            kick_off_status_syncs.apply_async(
                kwargs={
                    "project_id": group_project_lookup[group_id],
                    "group_id": group_id
                })
예제 #3
0
파일: group_index.py 프로젝트: zvrr/sentry
def update_groups(request, projects, organization_id, search_fn):
    group_ids = request.GET.getlist("id")
    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id, project__in=projects, id__in=group_ids
        )
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(data=request.data, partial=True, context={"project": project})
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    result = dict(serializer.validated_data)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = request.user if request.user.is_authenticated() else None

    if not group_ids:
        try:
            # bulk mutations are limited to 1000 items
            # TODO(dcramer): it'd be nice to support more than this, but its
            # a bit too complicated right now
            cursor_result, _ = search_fn({"limit": 1000, "paginator_options": {"max_limit": 1000}})
        except ValidationError as exc:
            return Response({"detail": six.text_type(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(id__in=group_ids)

    discard = result.get("discard")
    if discard:
        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop("statusDetails", result)
    status = result.get("status")
    release = None
    commit = None

    if status in ("resolved", "resolvedInNextRelease"):
        if status == "resolvedInNextRelease" or statusDetails.get("inNextRelease"):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in next release for multiple projects."},
                    status=400,
                )
            release = (
                statusDetails.get("inNextRelease")
                or Release.objects.filter(
                    projects=projects[0], organization_id=projects[0].organization_id
                )
                .extra(select={"sort": "COALESCE(date_released, date_added)"})
                .order_by("-sort")[0]
            )
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": ""
            }
            status_details = {
                "inNextRelease": True,
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = "in_next_release"
            res_status = GroupResolution.Status.pending
        elif statusDetails.get("inRelease"):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in release for multiple projects."}, status=400
                )
            release = statusDetails["inRelease"]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": release.version
            }
            status_details = {
                "inRelease": release.version,
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = "in_release"
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get("inCommit"):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response(
                    {"detail": "Cannot set resolved in commit for multiple projects."}, status=400
                )
            commit = statusDetails["inCommit"]
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {"commit": commit.id}
            status_details = {
                "inCommit": serialize(commit, request.user),
                "actor": serialize(extract_lazy_object(request.user), request.user),
            }
            res_type_str = "in_commit"
        else:
            res_type_str = "now"
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr("group.resolved", instance=res_type_str, skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = (
                    Release.objects.filter(projects__in=projects, releasecommit__commit=commit)
                    .extra(select={"sort": "COALESCE(date_released, date_added)"})
                    .order_by("-sort")[0]
                )
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None

        for group in group_list:
            with transaction.atomic():
                resolution = None
                if release:
                    resolution_params = {
                        "release": release,
                        "type": res_type,
                        "status": res_status,
                        "actor_id": request.user.id if request.user.is_authenticated() else None,
                    }
                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group, defaults=resolution_params
                    )
                    if not created:
                        resolution.update(datetime=timezone.now(), **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(id=group.id).update(
                    status=GroupStatus.RESOLVED, resolved_at=now
                )
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now

                assigned_to = self_subscribe_and_assign_issue(acting_user, group)
                if assigned_to is not None:
                    result["assignedTo"] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or request.user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(
                kwargs={"project_id": group.project_id, "group_id": group.id}
            )

        result.update({"status": "resolved", "statusDetails": status_details})

    elif status:
        new_status = STATUS_CHOICES[result["status"]]

        with transaction.atomic():
            happened = queryset.exclude(status=new_status).update(status=new_status)

            GroupResolution.objects.filter(group__in=group_ids).delete()

            if new_status == GroupStatus.IGNORED:
                metrics.incr("group.ignored", skip_internal=True)

                ignore_duration = (
                    statusDetails.pop("ignoreDuration", None)
                    or statusDetails.pop("snoozeDuration", None)
                ) or None
                ignore_count = statusDetails.pop("ignoreCount", None) or None
                ignore_window = statusDetails.pop("ignoreWindow", None) or None
                ignore_user_count = statusDetails.pop("ignoreUserCount", None) or None
                ignore_user_window = statusDetails.pop("ignoreUserWindow", None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(minutes=ignore_duration)
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state["times_seen"] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state["users_seen"] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                "until": ignore_until,
                                "count": ignore_count,
                                "window": ignore_window,
                                "user_count": ignore_user_count,
                                "user_window": ignore_user_window,
                                "state": state,
                                "actor_id": request.user.id
                                if request.user.is_authenticated()
                                else None,
                            },
                        )
                        result["statusDetails"] = {
                            "ignoreCount": ignore_count,
                            "ignoreUntil": ignore_until,
                            "ignoreUserCount": ignore_user_count,
                            "ignoreUserWindow": ignore_user_window,
                            "ignoreWindow": ignore_window,
                            "actor": serialize(extract_lazy_object(request.user), request.user),
                        }
                else:
                    GroupSnooze.objects.filter(group__in=group_ids).delete()
                    ignore_until = None
                    result["statusDetails"] = {}
            else:
                result["statusDetails"] = {}

        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    "ignoreCount": ignore_count,
                    "ignoreDuration": ignore_duration,
                    "ignoreUntil": ignore_until,
                    "ignoreUserCount": ignore_user_count,
                    "ignoreUserWindow": ignore_user_window,
                    "ignoreWindow": ignore_window,
                }

            groups_by_project_id = defaultdict(list)
            for group in group_list:
                groups_by_project_id[group.project_id].append(group)

            for project in projects:
                project_groups = groups_by_project_id.get(project.id)
                if project_groups:
                    issue_ignored.send_robust(
                        project=project,
                        user=acting_user,
                        group_list=project_groups,
                        activity_data=activity_data,
                        sender=update_groups,
                    )

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(
                        kwargs={"project_id": group.project_id, "group_id": group.id}
                    )

    if "assignedTo" in result:
        assigned_actor = result["assignedTo"]
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                GroupAssignee.objects.assign(group, resolved_actor, acting_user)
            result["assignedTo"] = serialize(
                assigned_actor.resolve(), acting_user, ActorSerializer()
            )
        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)

    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists() for project in projects
    }
    if result.get("hasSeen"):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={"last_seen": timezone.now()},
                )
    elif result.get("hasSeen") is False:
        GroupSeen.objects.filter(group__in=group_ids, user=acting_user).delete()

    if result.get("isBookmarked"):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id], group=group, user=acting_user
            )
            GroupSubscription.objects.subscribe(
                user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark
            )
    elif result.get("isBookmarked") is False:
        GroupBookmark.objects.filter(group__in=group_ids, user=acting_user).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get("isSubscribed") in (True, False):
        is_subscribed = result["isSubscribed"]
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown},
            )

        result["subscriptionDetails"] = {
            "reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown")
        }

    if "isPublic" in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result["shareId"] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get("isPublic"):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id], group=group, user=acting_user
            )
            if created:
                result["shareId"] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own
    # endpoint
    if result.get("merge") and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({"detail": "Merging across multiple projects is not supported"})
        group_list_by_times_seen = sorted(
            group_list, key=lambda g: (g.times_seen, g.id), reverse=True
        )
        primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(
            primary_group.project_id, group_ids_to_merge, primary_group.id
        )

        Group.objects.filter(id__in=group_ids_to_merge).update(status=GroupStatus.PENDING_MERGE)

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={"issues": [{"id": c.id} for c in groups_to_merge]},
        )

        result["merge"] = {
            "parent": six.text_type(primary_group.id),
            "children": [six.text_type(g.id) for g in groups_to_merge],
        }

    return Response(result)
예제 #4
0
파일: releases.py 프로젝트: sugusbs/sentry
def resolved_in_commit(instance, created, **kwargs):
    current_datetime = timezone.now()

    groups = instance.find_referenced_groups()

    # Delete GroupLinks where message may have changed
    group_ids = {g.id for g in groups}
    group_links = GroupLink.objects.filter(
        linked_type=GroupLink.LinkedType.commit,
        relationship=GroupLink.Relationship.resolves,
        linked_id=instance.id,
    )
    for link in group_links:
        if link.group_id not in group_ids:
            remove_resolved_link(link)

    try:
        repo = Repository.objects.get(id=instance.repository_id)
    except Repository.DoesNotExist:
        repo = None

    for group in groups:
        try:
            # XXX(dcramer): This code is somewhat duplicated from the
            # project_group_index mutation api
            with transaction.atomic():
                GroupLink.objects.create(
                    group_id=group.id,
                    project_id=group.project_id,
                    linked_type=GroupLink.LinkedType.commit,
                    relationship=GroupLink.Relationship.resolves,
                    linked_id=instance.id,
                )

                if instance.author:
                    user_list = list(instance.author.find_users())
                else:
                    user_list = ()

                if user_list:
                    acting_user = user_list[0]
                    Activity.objects.create(
                        project_id=group.project_id,
                        group=group,
                        type=Activity.SET_RESOLVED_IN_COMMIT,
                        ident=instance.id,
                        user=acting_user,
                        data={"commit": instance.id},
                    )
                    self_assign_issue = UserOption.objects.get_value(
                        user=acting_user, key="self_assign_issue", default="0")
                    if self_assign_issue == "1" and not group.assignee_set.exists(
                    ):
                        GroupAssignee.objects.assign(group=group,
                                                     assigned_to=acting_user,
                                                     acting_user=acting_user)

                    # while we only create activity and assignment for one user we want to
                    # subscribe every user
                    for user in user_list:
                        GroupSubscription.objects.subscribe(
                            user=user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change)

                else:
                    Activity.objects.create(
                        project_id=group.project_id,
                        group=group,
                        type=Activity.SET_RESOLVED_IN_COMMIT,
                        ident=instance.id,
                        data={"commit": instance.id},
                    )
                Group.objects.filter(id=group.id).update(
                    status=GroupStatus.RESOLVED, resolved_at=current_datetime)
                remove_group_from_inbox(group, action="resolved")
        except IntegrityError:
            pass
        else:
            if repo is not None:
                if repo.integration_id is not None:
                    analytics.record(
                        "integration.resolve.commit",
                        provider=repo.provider,
                        id=repo.integration_id,
                        organization_id=repo.organization_id,
                    )
                user = user_list[0] if user_list else None

                issue_resolved.send_robust(
                    organization_id=repo.organization_id,
                    user=user,
                    group=group,
                    project=group.project,
                    resolution_type="with_commit",
                    sender="resolved_with_commit",
                )
예제 #5
0
파일: release.py 프로젝트: getsentry/sentry
    def set_commits(self, commit_list):
        """
        Bind a list of commits to this release.

        This will clear any existing commit log and replace it with the given
        commits.
        """

        # Sort commit list in reverse order
        commit_list.sort(key=lambda commit: commit.get('timestamp'), reverse=True)

        # TODO(dcramer): this function could use some cleanup/refactoring as its a bit unwieldly
        from sentry.models import (
            Commit, CommitAuthor, Group, GroupLink, GroupResolution, GroupStatus,
            ReleaseCommit, ReleaseHeadCommit, Repository, PullRequest
        )
        from sentry.plugins.providers.repository import RepositoryProvider
        from sentry.tasks.integrations import kick_off_status_syncs
        # todo(meredith): implement for IntegrationRepositoryProvider
        commit_list = [
            c for c in commit_list
            if not RepositoryProvider.should_ignore_commit(c.get('message', ''))
        ]
        lock_key = type(self).get_lock_key(self.organization_id, self.id)
        lock = locks.get(lock_key, duration=10)
        with TimedRetryPolicy(10)(lock.acquire):
            start = time()
            with transaction.atomic():
                # TODO(dcramer): would be good to optimize the logic to avoid these
                # deletes but not overly important
                ReleaseCommit.objects.filter(
                    release=self,
                ).delete()

                authors = {}
                repos = {}
                commit_author_by_commit = {}
                head_commit_by_repo = {}
                latest_commit = None
                for idx, data in enumerate(commit_list):
                    repo_name = data.get('repository'
                                         ) or u'organization-{}'.format(self.organization_id)
                    if repo_name not in repos:
                        repos[repo_name] = repo = Repository.objects.get_or_create(
                            organization_id=self.organization_id,
                            name=repo_name,
                        )[0]
                    else:
                        repo = repos[repo_name]

                    author_email = data.get('author_email')
                    if author_email is None and data.get('author_name'):
                        author_email = (
                            re.sub(r'[^a-zA-Z0-9\-_\.]*', '', data['author_name']).lower() +
                            '@localhost'
                        )

                    if not author_email:
                        author = None
                    elif author_email not in authors:
                        author_data = {
                            'name': data.get('author_name')
                        }
                        author, created = CommitAuthor.objects.create_or_update(
                            organization_id=self.organization_id,
                            email=author_email,
                            values=author_data)
                        if not created:
                            author = CommitAuthor.objects.get(
                                organization_id=self.organization_id,
                                email=author_email)
                        authors[author_email] = author
                    else:
                        author = authors[author_email]

                    commit_data = {}
                    defaults = {}

                    # Update/set message and author if they are provided.
                    if author is not None:
                        commit_data['author'] = author
                    if 'message' in data:
                        commit_data['message'] = data['message']
                    if 'timestamp' in data:
                        commit_data['date_added'] = data['timestamp']
                    else:
                        defaults['date_added'] = timezone.now()

                    commit, created = Commit.objects.create_or_update(
                        organization_id=self.organization_id,
                        repository_id=repo.id,
                        key=data['id'],
                        defaults=defaults,
                        values=commit_data)
                    if not created:
                        commit = Commit.objects.get(
                            organization_id=self.organization_id,
                            repository_id=repo.id,
                            key=data['id'])

                    if author is None:
                        author = commit.author

                    commit_author_by_commit[commit.id] = author

                    patch_set = data.get('patch_set', [])
                    for patched_file in patch_set:
                        try:
                            with transaction.atomic():
                                CommitFileChange.objects.create(
                                    organization_id=self.organization.id,
                                    commit=commit,
                                    filename=patched_file['path'],
                                    type=patched_file['type'],
                                )
                        except IntegrityError:
                            pass

                    try:
                        with transaction.atomic():
                            ReleaseCommit.objects.create(
                                organization_id=self.organization_id,
                                release=self,
                                commit=commit,
                                order=idx,
                            )
                    except IntegrityError:
                        pass

                    if latest_commit is None:
                        latest_commit = commit

                    head_commit_by_repo.setdefault(repo.id, commit.id)

                self.update(
                    commit_count=len(commit_list),
                    authors=[
                        six.text_type(a_id)
                        for a_id in ReleaseCommit.objects.filter(
                            release=self,
                            commit__author_id__isnull=False,
                        ).values_list('commit__author_id', flat=True).distinct()
                    ],
                    last_commit_id=latest_commit.id if latest_commit else None,
                )
                metrics.timing('release.set_commits.duration', time() - start)

        # fill any missing ReleaseHeadCommit entries
        for repo_id, commit_id in six.iteritems(head_commit_by_repo):
            try:
                with transaction.atomic():
                    ReleaseHeadCommit.objects.create(
                        organization_id=self.organization_id,
                        release_id=self.id,
                        repository_id=repo_id,
                        commit_id=commit_id,
                    )
            except IntegrityError:
                pass

        release_commits = list(ReleaseCommit.objects.filter(release=self)
                               .select_related('commit').values('commit_id', 'commit__key'))

        commit_resolutions = list(
            GroupLink.objects.filter(
                linked_type=GroupLink.LinkedType.commit,
                linked_id__in=[rc['commit_id'] for rc in release_commits],
            ).values_list('group_id', 'linked_id')
        )

        commit_group_authors = [
            (cr[0],  # group_id
             commit_author_by_commit.get(cr[1])) for cr in commit_resolutions]

        pr_ids_by_merge_commit = list(PullRequest.objects.filter(
            merge_commit_sha__in=[rc['commit__key'] for rc in release_commits],
            organization_id=self.organization_id,
        ).values_list('id', flat=True))

        pull_request_resolutions = list(
            GroupLink.objects.filter(
                relationship=GroupLink.Relationship.resolves,
                linked_type=GroupLink.LinkedType.pull_request,
                linked_id__in=pr_ids_by_merge_commit,
            ).values_list('group_id', 'linked_id')
        )

        pr_authors = list(PullRequest.objects.filter(
            id__in=[prr[1] for prr in pull_request_resolutions],
        ).select_related('author'))

        pr_authors_dict = {
            pra.id: pra.author for pra in pr_authors
        }

        pull_request_group_authors = [(prr[0], pr_authors_dict.get(prr[1]))
                                      for prr in pull_request_resolutions]

        user_by_author = {None: None}

        commits_and_prs = list(
            itertools.chain(commit_group_authors, pull_request_group_authors),
        )

        group_project_lookup = dict(Group.objects.filter(
            id__in=[group_id for group_id, _ in commits_and_prs],
        ).values_list('id', 'project_id'))

        for group_id, author in commits_and_prs:
            if author not in user_by_author:
                try:
                    user_by_author[author] = author.find_users()[0]
                except IndexError:
                    user_by_author[author] = None
            actor = user_by_author[author]

            with transaction.atomic():
                GroupResolution.objects.create_or_update(
                    group_id=group_id,
                    values={
                        'release': self,
                        'type': GroupResolution.Type.in_release,
                        'status': GroupResolution.Status.resolved,
                        'actor_id': actor.id if actor else None,
                    },
                )
                group = Group.objects.get(
                    id=group_id,
                )
                group.update(status=GroupStatus.RESOLVED)
                metrics.incr('group.resolved', instance='in_commit', skip_internal=True)

            issue_resolved.send_robust(
                organization_id=self.organization_id,
                user=actor,
                group=group,
                project=group.project,
                resolution_type='with_commit',
                sender=type(self),
            )

            kick_off_status_syncs.apply_async(kwargs={
                'project_id': group_project_lookup[group_id],
                'group_id': group_id,
            })
예제 #6
0
def update_groups(
    request: Request,
    group_ids: Sequence[Group],
    projects: Sequence[Project],
    organization_id: int,
    search_fn: SearchFunction | None,
    user: User | None = None,
    data: Mapping[str, Any] | None = None,
) -> Response:
    # If `user` and `data` are passed as parameters then they should override
    # the values in `request`.
    user = user or request.user
    data = data or request.data

    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id,
            project__in=projects,
            id__in=group_ids)
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    serializer = None
    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(
            data=data,
            partial=True,
            context={
                "project": project,
                "organization": project.organization,
                "access": getattr(request, "access", None),
            },
        )
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    if serializer is None:
        return

    result = dict(serializer.validated_data)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = user if user.is_authenticated else None

    if search_fn and not group_ids:
        try:
            cursor_result, _ = search_fn({
                "limit": BULK_MUTATION_LIMIT,
                "paginator_options": {
                    "max_limit": BULK_MUTATION_LIMIT
                },
            })
        except ValidationError as exc:
            return Response({"detail": str(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(id__in=group_ids)

    discard = result.get("discard")
    if discard:

        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop("statusDetails", result)
    status = result.get("status")
    release = None
    commit = None
    res_type = None
    activity_type = None
    activity_data: MutableMapping[str, Any | None] | None = None
    if status in ("resolved", "resolvedInNextRelease"):
        res_status = None
        if status == "resolvedInNextRelease" or statusDetails.get(
                "inNextRelease"):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in next release for multiple projects."
                    },
                    status=400,
                )
            release = (
                statusDetails.get("inNextRelease") or Release.objects.filter(
                    projects=projects[0],
                    organization_id=projects[0].organization_id).extra(
                        select={
                            "sort": "COALESCE(date_released, date_added)"
                        }).order_by("-sort")[0])
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": ""
            }
            status_details = {
                "inNextRelease": True,
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = "in_next_release"
            res_status = GroupResolution.Status.pending
        elif statusDetails.get("inRelease"):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in release for multiple projects."
                    },
                    status=400)
            release = statusDetails["inRelease"]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                "version": release.version
            }
            status_details = {
                "inRelease": release.version,
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = "in_release"
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get("inCommit"):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response(
                    {
                        "detail":
                        "Cannot set resolved in commit for multiple projects."
                    },
                    status=400)
            commit = statusDetails["inCommit"]
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {"commit": commit.id}
            status_details = {
                "inCommit": serialize(commit, user),
                "actor": serialize(extract_lazy_object(user), user),
            }
            res_type_str = "in_commit"
        else:
            res_type_str = "now"
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr("group.resolved",
                     instance=res_type_str,
                     skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = (Release.objects.filter(
                    projects__in=projects, releasecommit__commit=commit).extra(
                        select={
                            "sort": "COALESCE(date_released, date_added)"
                        }).order_by("-sort")[0])
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None
        for group in group_list:
            with transaction.atomic():
                resolution = None
                created = None
                if release:
                    resolution_params = {
                        "release": release,
                        "type": res_type,
                        "status": res_status,
                        "actor_id": user.id if user.is_authenticated else None,
                    }

                    # We only set `current_release_version` if GroupResolution type is
                    # in_next_release, because we need to store information about the latest/most
                    # recent release that was associated with a group and that is required for
                    # release comparisons (i.e. handling regressions)
                    if res_type == GroupResolution.Type.in_next_release:
                        # Check if semver versioning scheme is followed
                        follows_semver = follows_semver_versioning_scheme(
                            org_id=group.organization.id,
                            project_id=group.project.id,
                            release_version=release.version,
                        )

                        current_release_version = get_current_release_version_of_group(
                            group=group, follows_semver=follows_semver)
                        if current_release_version:
                            resolution_params.update({
                                "current_release_version":
                                current_release_version
                            })

                            # Sets `current_release_version` for activity, since there is no point
                            # waiting for when a new release is created i.e.
                            # clear_expired_resolutions task to be run.
                            # Activity should look like "... resolved in version
                            # >current_release_version" in the UI
                            if follows_semver:
                                activity_data.update({
                                    "current_release_version":
                                    current_release_version
                                })

                                # In semver projects, and thereby semver releases, we determine
                                # resolutions by comparing against an expression rather than a
                                # specific release (i.e. >current_release_version). Consequently,
                                # at this point we can consider this GroupResolution as resolved
                                # in release
                                resolution_params.update({
                                    "type":
                                    GroupResolution.Type.in_release,
                                    "status":
                                    GroupResolution.Status.resolved,
                                })
                            else:
                                # If we already know the `next` release in date based ordering
                                # when clicking on `resolvedInNextRelease` because it is already
                                # been released, there is no point in setting GroupResolution to
                                # be of type in_next_release but rather in_release would suffice

                                try:
                                    # Get current release object from current_release_version
                                    current_release_obj = Release.objects.get(
                                        version=current_release_version,
                                        organization_id=projects[0].
                                        organization_id,
                                    )

                                    date_order_q = Q(
                                        date_added__gt=current_release_obj.
                                        date_added) | Q(
                                            date_added=current_release_obj.
                                            date_added,
                                            id__gt=current_release_obj.id,
                                        )

                                    # Find the next release after the current_release_version
                                    # i.e. the release that resolves the issue
                                    resolved_in_release = (
                                        Release.objects.filter(
                                            date_order_q,
                                            projects=projects[0],
                                            organization_id=projects[0].
                                            organization_id,
                                        ).extra(
                                            select={
                                                "sort":
                                                "COALESCE(date_released, date_added)"
                                            }).order_by("sort",
                                                        "id")[:1].get())

                                    # If we get here, we assume it exists and so we update
                                    # GroupResolution and Activity
                                    resolution_params.update({
                                        "release":
                                        resolved_in_release,
                                        "type":
                                        GroupResolution.Type.in_release,
                                        "status":
                                        GroupResolution.Status.resolved,
                                    })
                                    activity_data.update({
                                        "version":
                                        resolved_in_release.version
                                    })
                                except Release.DoesNotExist:
                                    # If it gets here, it means we don't know the upcoming
                                    # release yet because it does not exist, and so we should
                                    # fall back to our current model
                                    ...

                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group, defaults=resolution_params)
                    if not created:
                        resolution.update(datetime=timezone.now(),
                                          **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(id=group.id).update(
                    status=GroupStatus.RESOLVED, resolved_at=now)
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now
                remove_group_from_inbox(group,
                                        action=GroupInboxRemoveAction.RESOLVED,
                                        user=acting_user)
                result["inbox"] = None

                assigned_to = self_subscribe_and_assign_issue(
                    acting_user, group)
                if assigned_to is not None:
                    result["assignedTo"] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    record_group_history_from_activity_type(group,
                                                            activity_type,
                                                            actor=acting_user)

                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(kwargs={
                "project_id": group.project_id,
                "group_id": group.id
            })

        result.update({"status": "resolved", "statusDetails": status_details})

    elif status:
        new_status = STATUS_UPDATE_CHOICES[result["status"]]
        ignore_duration = None
        ignore_count = None
        ignore_window = None
        ignore_user_count = None
        ignore_user_window = None
        ignore_until = None

        with transaction.atomic():
            happened = queryset.exclude(status=new_status).update(
                status=new_status)

            GroupResolution.objects.filter(group__in=group_ids).delete()
            if new_status == GroupStatus.IGNORED:
                metrics.incr("group.ignored", skip_internal=True)
                for group in group_ids:
                    remove_group_from_inbox(
                        group,
                        action=GroupInboxRemoveAction.IGNORED,
                        user=acting_user)
                result["inbox"] = None

                ignore_duration = (statusDetails.pop("ignoreDuration", None)
                                   or statusDetails.pop(
                                       "snoozeDuration", None)) or None
                ignore_count = statusDetails.pop("ignoreCount", None) or None
                ignore_window = statusDetails.pop("ignoreWindow", None) or None
                ignore_user_count = statusDetails.pop("ignoreUserCount",
                                                      None) or None
                ignore_user_window = statusDetails.pop("ignoreUserWindow",
                                                       None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(
                            minutes=ignore_duration)
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state["times_seen"] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state["users_seen"] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                "until":
                                ignore_until,
                                "count":
                                ignore_count,
                                "window":
                                ignore_window,
                                "user_count":
                                ignore_user_count,
                                "user_window":
                                ignore_user_window,
                                "state":
                                state,
                                "actor_id":
                                user.id if user.is_authenticated else None,
                            },
                        )
                        result["statusDetails"] = {
                            "ignoreCount": ignore_count,
                            "ignoreUntil": ignore_until,
                            "ignoreUserCount": ignore_user_count,
                            "ignoreUserWindow": ignore_user_window,
                            "ignoreWindow": ignore_window,
                            "actor": serialize(extract_lazy_object(user),
                                               user),
                        }
                else:
                    GroupSnooze.objects.filter(group__in=group_ids).delete()
                    ignore_until = None
                    result["statusDetails"] = {}
            else:
                result["statusDetails"] = {}
        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}

                for group in group_list:
                    if group.status == GroupStatus.IGNORED:
                        issue_unignored.send_robust(
                            project=project_lookup[group.project_id],
                            user=acting_user,
                            group=group,
                            transition_type="manual",
                            sender=update_groups,
                        )
                    else:
                        issue_unresolved.send_robust(
                            project=project_lookup[group.project_id],
                            user=acting_user,
                            group=group,
                            transition_type="manual",
                            sender=update_groups,
                        )
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    "ignoreCount": ignore_count,
                    "ignoreDuration": ignore_duration,
                    "ignoreUntil": ignore_until,
                    "ignoreUserCount": ignore_user_count,
                    "ignoreUserWindow": ignore_user_window,
                    "ignoreWindow": ignore_window,
                }

                groups_by_project_id = defaultdict(list)
                for group in group_list:
                    groups_by_project_id[group.project_id].append(group)

                for project in projects:
                    project_groups = groups_by_project_id.get(project.id)
                    if project_groups:
                        issue_ignored.send_robust(
                            project=project,
                            user=acting_user,
                            group_list=project_groups,
                            activity_data=activity_data,
                            sender=update_groups,
                        )

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                record_group_history_from_activity_type(group,
                                                        activity_type,
                                                        actor=acting_user)

                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(kwargs={
                        "project_id": group.project_id,
                        "group_id": group.id
                    })

    # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of
    # what performance impact this might have & this possibly should be moved else where
    try:
        if len(group_list) == 1:
            if res_type in (
                    GroupResolution.Type.in_next_release,
                    GroupResolution.Type.in_release,
            ):
                result["activity"] = serialize(
                    Activity.objects.get_activities_for_group(
                        group=group_list[0], num=ACTIVITIES_COUNT),
                    acting_user,
                )
    except UnboundLocalError:
        pass

    if "assignedTo" in result:
        assigned_actor = result["assignedTo"]
        assigned_by = (data.get("assignedBy") if data.get("assignedBy") in [
            "assignee_selector", "suggested_assignee"
        ] else None)
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                assignment = GroupAssignee.objects.assign(
                    group, resolved_actor, acting_user)
                analytics.record(
                    "manual.issue_assignment",
                    organization_id=project_lookup[
                        group.project_id].organization_id,
                    project_id=group.project_id,
                    group_id=group.id,
                    assigned_by=assigned_by,
                    had_to_deassign=assignment["updated_assignment"],
                )
            result["assignedTo"] = serialize(assigned_actor.resolve(),
                                             acting_user, ActorSerializer())

        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)
                analytics.record(
                    "manual.issue_assignment",
                    organization_id=project_lookup[
                        group.project_id].organization_id,
                    project_id=group.project_id,
                    group_id=group.id,
                    assigned_by=assigned_by,
                    had_to_deassign=True,
                )
    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists()
        for project in projects
    }
    if result.get("hasSeen"):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={"last_seen": timezone.now()},
                )
    elif result.get("hasSeen") is False:
        GroupSeen.objects.filter(group__in=group_ids,
                                 user=acting_user).delete()

    if result.get("isBookmarked"):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user)
            GroupSubscription.objects.subscribe(
                user=acting_user,
                group=group,
                reason=GroupSubscriptionReason.bookmark)
    elif result.get("isBookmarked") is False:
        GroupBookmark.objects.filter(group__in=group_ids,
                                     user=acting_user).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get("isSubscribed") in (True, False):
        is_subscribed = result["isSubscribed"]
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={
                    "is_active": is_subscribed,
                    "reason": GroupSubscriptionReason.unknown
                },
            )

        result["subscriptionDetails"] = {
            "reason":
            SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown,
                                        "unknown")
        }

    if "isPublic" in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result["shareId"] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get("isPublic"):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user)
            if created:
                result["shareId"] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own endpoint.
    if result.get("merge") and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({
                "detail":
                "Merging across multiple projects is not supported"
            })
        group_list_by_times_seen = sorted(group_list,
                                          key=lambda g: (g.times_seen, g.id),
                                          reverse=True)
        primary_group, groups_to_merge = group_list_by_times_seen[
            0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(primary_group.project_id,
                                                    group_ids_to_merge,
                                                    primary_group.id)

        Group.objects.filter(id__in=group_ids_to_merge).update(
            status=GroupStatus.PENDING_MERGE)

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={"issues": [{
                "id": c.id
            } for c in groups_to_merge]},
        )

        result["merge"] = {
            "parent": str(primary_group.id),
            "children": [str(g.id) for g in groups_to_merge],
        }

    # Support moving groups in or out of the inbox
    inbox = result.get("inbox", None)
    if inbox is not None:
        if inbox:
            for group in group_list:
                add_group_to_inbox(group, GroupInboxReason.MANUAL)
        elif not inbox:
            for group in group_list:
                remove_group_from_inbox(
                    group,
                    action=GroupInboxRemoveAction.MARK_REVIEWED,
                    user=acting_user,
                    referrer=request.META.get("HTTP_REFERER"),
                )
                issue_mark_reviewed.send_robust(
                    project=project_lookup[group.project_id],
                    user=acting_user,
                    group=group,
                    sender=update_groups,
                )
        result["inbox"] = inbox

    return Response(result)
예제 #7
0
def resolved_in_commit(instance, created, **kwargs):
    current_datetime = timezone.now()

    groups = instance.find_referenced_groups()

    # Delete GroupLinks where message may have changed
    group_ids = {g.id for g in groups}
    group_links = GroupLink.objects.filter(
        linked_type=GroupLink.LinkedType.commit,
        relationship=GroupLink.Relationship.resolves,
        linked_id=instance.id,
    )
    for link in group_links:
        if link.group_id not in group_ids:
            remove_resolved_link(link)

    try:
        repo = Repository.objects.get(id=instance.repository_id)
    except Repository.DoesNotExist:
        repo = None

    for group in groups:
        try:
            # XXX(dcramer): This code is somewhat duplicated from the
            # project_group_index mutation api
            with transaction.atomic():
                GroupLink.objects.create(
                    group_id=group.id,
                    project_id=group.project_id,
                    linked_type=GroupLink.LinkedType.commit,
                    relationship=GroupLink.Relationship.resolves,
                    linked_id=instance.id,
                )

                if instance.author:
                    user_list = list(instance.author.find_users())
                else:
                    user_list = ()

                if user_list:
                    acting_user = user_list[0]
                    Activity.objects.create(
                        project_id=group.project_id,
                        group=group,
                        type=Activity.SET_RESOLVED_IN_COMMIT,
                        ident=instance.id,
                        user=acting_user,
                        data={
                            'commit': instance.id,
                        }
                    )
                    self_assign_issue = UserOption.objects.get_value(
                        user=acting_user,
                        key='self_assign_issue',
                        default='0'
                    )
                    if self_assign_issue == '1' and not group.assignee_set.exists():
                        GroupAssignee.objects.assign(
                            group=group,
                            assigned_to=acting_user,
                            acting_user=acting_user,
                        )

                    # while we only create activity and assignment for one user we want to
                    # subscribe every user
                    for user in user_list:
                        GroupSubscription.objects.subscribe(
                            user=user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )

                else:
                    Activity.objects.create(
                        project_id=group.project_id,
                        group=group,
                        type=Activity.SET_RESOLVED_IN_COMMIT,
                        ident=instance.id,
                        data={
                            'commit': instance.id,
                        }
                    )
                Group.objects.filter(
                    id=group.id,
                ).update(
                    status=GroupStatus.RESOLVED,
                    resolved_at=current_datetime,
                )
        except IntegrityError:
            pass
        else:
            if repo is not None:
                if repo.integration_id is not None:
                    analytics.record(
                        'integration.resolve.commit',
                        provider=repo.provider,
                        id=repo.integration_id,
                        organization_id=repo.organization_id,
                    )
                user = user_list[0] if user_list else None

                issue_resolved.send_robust(
                    organization_id=repo.organization_id,
                    user=user,
                    group=group,
                    project=group.project,
                    resolution_type='with_commit',
                    sender='resolved_with_commit',
                )
예제 #8
0
def update_groups(request, projects, organization_id, search_fn):
    group_ids = request.GET.getlist('id')
    if group_ids:
        group_list = Group.objects.filter(
            project__organization_id=organization_id,
            project__in=projects,
            id__in=group_ids,
        )
        # filter down group ids to only valid matches
        group_ids = [g.id for g in group_list]
        if not group_ids:
            return Response(status=204)
    else:
        group_list = None

    # TODO(jess): We may want to look into refactoring GroupValidator
    # to support multiple projects, but this is pretty complicated
    # because of the assignee validation. Punting on this for now.
    for project in projects:
        serializer = GroupValidator(
            data=request.DATA,
            partial=True,
            context={'project': project},
        )
        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

    result = dict(serializer.object)

    # so we won't have to requery for each group
    project_lookup = {p.id: p for p in projects}

    acting_user = request.user if request.user.is_authenticated() else None

    if not group_ids:
        try:
            # bulk mutations are limited to 1000 items
            # TODO(dcramer): it'd be nice to support more than this, but its
            # a bit too complicated right now
            cursor_result, _ = search_fn({
                'limit': 1000,
                'paginator_options': {'max_limit': 1000},
            })
        except ValidationError as exc:
            return Response({'detail': six.text_type(exc)}, status=400)

        group_list = list(cursor_result)
        group_ids = [g.id for g in group_list]

    is_bulk = len(group_ids) > 1

    group_project_ids = {g.project_id for g in group_list}
    # filter projects down to only those that have groups in the search results
    projects = [p for p in projects if p.id in group_project_ids]

    queryset = Group.objects.filter(
        id__in=group_ids,
    )

    discard = result.get('discard')
    if discard:
        return handle_discard(request, list(queryset), projects, acting_user)

    statusDetails = result.pop('statusDetails', result)
    status = result.get('status')
    release = None
    commit = None

    if status in ('resolved', 'resolvedInNextRelease'):
        if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'):
            # TODO(jess): We may want to support this for multi project, but punting on it for now
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in next release for multiple projects.'
                }, status=400)
            release = statusDetails.get('inNextRelease') or Release.objects.filter(
                projects=projects[0],
                organization_id=projects[0].organization_id,
            ).extra(select={
                'sort': 'COALESCE(date_released, date_added)',
            }).order_by('-sort')[0]
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                'version': '',
            }
            status_details = {
                'inNextRelease': True,
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_next_release
            res_type_str = 'in_next_release'
            res_status = GroupResolution.Status.pending
        elif statusDetails.get('inRelease'):
            # TODO(jess): We could update validation to check if release
            # applies to multiple projects, but I think we agreed to punt
            # on this for now
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in release for multiple projects.'
                }, status=400)
            release = statusDetails['inRelease']
            activity_type = Activity.SET_RESOLVED_IN_RELEASE
            activity_data = {
                # no version yet
                'version': release.version,
            }
            status_details = {
                'inRelease': release.version,
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type = GroupResolution.Type.in_release
            res_type_str = 'in_release'
            res_status = GroupResolution.Status.resolved
        elif statusDetails.get('inCommit'):
            # TODO(jess): Same here, this is probably something we could do, but
            # punting for now.
            if len(projects) > 1:
                return Response({
                    'detail': 'Cannot set resolved in commit for multiple projects.'
                }, status=400)
            commit = statusDetails['inCommit']
            activity_type = Activity.SET_RESOLVED_IN_COMMIT
            activity_data = {
                'commit': commit.id,
            }
            status_details = {
                'inCommit': serialize(commit, request.user),
                'actor': serialize(extract_lazy_object(request.user), request.user),
            }
            res_type_str = 'in_commit'
        else:
            res_type_str = 'now'
            activity_type = Activity.SET_RESOLVED
            activity_data = {}
            status_details = {}

        now = timezone.now()
        metrics.incr('group.resolved', instance=res_type_str, skip_internal=True)

        # if we've specified a commit, let's see if its already been released
        # this will allow us to associate the resolution to a release as if we
        # were simply using 'inRelease' above
        # Note: this is different than the way commit resolution works on deploy
        # creation, as a given deploy is connected to an explicit release, and
        # in this case we're simply choosing the most recent release which contains
        # the commit.
        if commit and not release:
            # TODO(jess): If we support multiple projects for release / commit resolution,
            # we need to update this to find the release for each project (we shouldn't assume
            # it's the same)
            try:
                release = Release.objects.filter(
                    projects__in=projects,
                    releasecommit__commit=commit,
                ).extra(select={
                    'sort': 'COALESCE(date_released, date_added)',
                }).order_by('-sort')[0]
                res_type = GroupResolution.Type.in_release
                res_status = GroupResolution.Status.resolved
            except IndexError:
                release = None

        for group in group_list:
            with transaction.atomic():
                resolution = None
                if release:
                    resolution_params = {
                        'release': release,
                        'type': res_type,
                        'status': res_status,
                        'actor_id': request.user.id
                        if request.user.is_authenticated() else None,
                    }
                    resolution, created = GroupResolution.objects.get_or_create(
                        group=group,
                        defaults=resolution_params,
                    )
                    if not created:
                        resolution.update(
                            datetime=timezone.now(), **resolution_params)

                if commit:
                    GroupLink.objects.create(
                        group_id=group.id,
                        project_id=group.project_id,
                        linked_type=GroupLink.LinkedType.commit,
                        relationship=GroupLink.Relationship.resolves,
                        linked_id=commit.id,
                    )

                affected = Group.objects.filter(
                    id=group.id,
                ).update(
                    status=GroupStatus.RESOLVED,
                    resolved_at=now,
                )
                if not resolution:
                    created = affected

                group.status = GroupStatus.RESOLVED
                group.resolved_at = now

                assigned_to = self_subscribe_and_assign_issue(acting_user, group)
                if assigned_to is not None:
                    result['assignedTo'] = assigned_to

                if created:
                    activity = Activity.objects.create(
                        project=project_lookup[group.project_id],
                        group=group,
                        type=activity_type,
                        user=acting_user,
                        ident=resolution.id if resolution else None,
                        data=activity_data,
                    )
                    # TODO(dcramer): we need a solution for activity rollups
                    # before sending notifications on bulk changes
                    if not is_bulk:
                        activity.send_notification()

            issue_resolved.send_robust(
                organization_id=organization_id,
                user=acting_user or request.user,
                group=group,
                project=project_lookup[group.project_id],
                resolution_type=res_type_str,
                sender=update_groups,
            )

            kick_off_status_syncs.apply_async(kwargs={
                'project_id': group.project_id,
                'group_id': group.id,
            })

        result.update({
            'status': 'resolved',
            'statusDetails': status_details,
        })

    elif status:
        new_status = STATUS_CHOICES[result['status']]

        with transaction.atomic():
            happened = queryset.exclude(
                status=new_status,
            ).update(
                status=new_status,
            )

            GroupResolution.objects.filter(
                group__in=group_ids,
            ).delete()

            if new_status == GroupStatus.IGNORED:
                metrics.incr('group.ignored', skip_internal=True)

                ignore_duration = (
                    statusDetails.pop('ignoreDuration', None) or
                    statusDetails.pop('snoozeDuration', None)
                ) or None
                ignore_count = statusDetails.pop(
                    'ignoreCount', None) or None
                ignore_window = statusDetails.pop(
                    'ignoreWindow', None) or None
                ignore_user_count = statusDetails.pop(
                    'ignoreUserCount', None) or None
                ignore_user_window = statusDetails.pop(
                    'ignoreUserWindow', None) or None
                if ignore_duration or ignore_count or ignore_user_count:
                    if ignore_duration:
                        ignore_until = timezone.now() + timedelta(
                            minutes=ignore_duration,
                        )
                    else:
                        ignore_until = None
                    for group in group_list:
                        state = {}
                        if ignore_count and not ignore_window:
                            state['times_seen'] = group.times_seen
                        if ignore_user_count and not ignore_user_window:
                            state['users_seen'] = group.count_users_seen()
                        GroupSnooze.objects.create_or_update(
                            group=group,
                            values={
                                'until':
                                ignore_until,
                                'count':
                                ignore_count,
                                'window':
                                ignore_window,
                                'user_count':
                                ignore_user_count,
                                'user_window':
                                ignore_user_window,
                                'state':
                                state,
                                'actor_id':
                                request.user.id if request.user.is_authenticated() else None,
                            }
                        )
                        result['statusDetails'] = {
                            'ignoreCount': ignore_count,
                            'ignoreUntil': ignore_until,
                            'ignoreUserCount': ignore_user_count,
                            'ignoreUserWindow': ignore_user_window,
                            'ignoreWindow': ignore_window,
                            'actor': serialize(extract_lazy_object(request.user), request.user),
                        }
                else:
                    GroupSnooze.objects.filter(
                        group__in=group_ids,
                    ).delete()
                    ignore_until = None
                    result['statusDetails'] = {}
            else:
                result['statusDetails'] = {}

        if group_list and happened:
            if new_status == GroupStatus.UNRESOLVED:
                activity_type = Activity.SET_UNRESOLVED
                activity_data = {}
            elif new_status == GroupStatus.IGNORED:
                activity_type = Activity.SET_IGNORED
                activity_data = {
                    'ignoreCount': ignore_count,
                    'ignoreDuration': ignore_duration,
                    'ignoreUntil': ignore_until,
                    'ignoreUserCount': ignore_user_count,
                    'ignoreUserWindow': ignore_user_window,
                    'ignoreWindow': ignore_window,
                }

            groups_by_project_id = defaultdict(list)
            for group in group_list:
                groups_by_project_id[group.project_id].append(group)

            for project in projects:
                project_groups = groups_by_project_id.get(project.id)
                if project_groups:
                    issue_ignored.send_robust(
                        project=project,
                        user=acting_user,
                        group_list=project_groups,
                        activity_data=activity_data,
                        sender=update_groups)

            for group in group_list:
                group.status = new_status

                activity = Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=activity_type,
                    user=acting_user,
                    data=activity_data,
                )
                # TODO(dcramer): we need a solution for activity rollups
                # before sending notifications on bulk changes
                if not is_bulk:
                    if acting_user:
                        GroupSubscription.objects.subscribe(
                            user=acting_user,
                            group=group,
                            reason=GroupSubscriptionReason.status_change,
                        )
                    activity.send_notification()

                if new_status == GroupStatus.UNRESOLVED:
                    kick_off_status_syncs.apply_async(kwargs={
                        'project_id': group.project_id,
                        'group_id': group.id,
                    })

    if 'assignedTo' in result:
        assigned_actor = result['assignedTo']
        if assigned_actor:
            for group in group_list:
                resolved_actor = assigned_actor.resolve()

                GroupAssignee.objects.assign(group, resolved_actor, acting_user)
            result['assignedTo'] = serialize(
                assigned_actor.resolve(), acting_user, ActorSerializer())
        else:
            for group in group_list:
                GroupAssignee.objects.deassign(group, acting_user)

    is_member_map = {
        project.id: project.member_set.filter(user=acting_user).exists() for project in projects
    }
    if result.get('hasSeen'):
        for group in group_list:
            if is_member_map.get(group.project_id):
                instance, created = create_or_update(
                    GroupSeen,
                    group=group,
                    user=acting_user,
                    project=project_lookup[group.project_id],
                    values={
                        'last_seen': timezone.now(),
                    }
                )
    elif result.get('hasSeen') is False:
        GroupSeen.objects.filter(
            group__in=group_ids,
            user=acting_user,
        ).delete()

    if result.get('isBookmarked'):
        for group in group_list:
            GroupBookmark.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user,
            )
            GroupSubscription.objects.subscribe(
                user=acting_user,
                group=group,
                reason=GroupSubscriptionReason.bookmark,
            )
    elif result.get('isBookmarked') is False:
        GroupBookmark.objects.filter(
            group__in=group_ids,
            user=acting_user,
        ).delete()

    # TODO(dcramer): we could make these more efficient by first
    # querying for rich rows are present (if N > 2), flipping the flag
    # on those rows, and then creating the missing rows
    if result.get('isSubscribed') in (True, False):
        is_subscribed = result['isSubscribed']
        for group in group_list:
            # NOTE: Subscribing without an initiating event (assignment,
            # commenting, etc.) clears out the previous subscription reason
            # to avoid showing confusing messaging as a result of this
            # action. It'd be jarring to go directly from "you are not
            # subscribed" to "you were subscribed due since you were
            # assigned" just by clicking the "subscribe" button (and you
            # may no longer be assigned to the issue anyway.)
            GroupSubscription.objects.create_or_update(
                user=acting_user,
                group=group,
                project=project_lookup[group.project_id],
                values={
                    'is_active': is_subscribed,
                    'reason': GroupSubscriptionReason.unknown,
                },
            )

        result['subscriptionDetails'] = {
            'reason': SUBSCRIPTION_REASON_MAP.get(
                GroupSubscriptionReason.unknown,
                'unknown',
            ),
        }

    if 'isPublic' in result:
        # We always want to delete an existing share, because triggering
        # an isPublic=True even when it's already public, should trigger
        # regenerating.
        for group in group_list:
            if GroupShare.objects.filter(group=group).delete():
                result['shareId'] = None
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PRIVATE,
                    user=acting_user,
                )

    if result.get('isPublic'):
        for group in group_list:
            share, created = GroupShare.objects.get_or_create(
                project=project_lookup[group.project_id],
                group=group,
                user=acting_user,
            )
            if created:
                result['shareId'] = share.uuid
                Activity.objects.create(
                    project=project_lookup[group.project_id],
                    group=group,
                    type=Activity.SET_PUBLIC,
                    user=acting_user,
                )

    # XXX(dcramer): this feels a bit shady like it should be its own
    # endpoint
    if result.get('merge') and len(group_list) > 1:
        # don't allow merging cross project
        if len(projects) > 1:
            return Response({'detail': 'Merging across multiple projects is not supported'})
        group_list_by_times_seen = sorted(
            group_list,
            key=lambda g: (g.times_seen, g.id),
            reverse=True,
        )
        primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:]

        group_ids_to_merge = [g.id for g in groups_to_merge]
        eventstream_state = eventstream.start_merge(
            primary_group.project_id,
            group_ids_to_merge,
            primary_group.id
        )

        Group.objects.filter(
            id__in=group_ids_to_merge
        ).update(
            status=GroupStatus.PENDING_MERGE
        )

        transaction_id = uuid4().hex
        merge_groups.delay(
            from_object_ids=group_ids_to_merge,
            to_object_id=primary_group.id,
            transaction_id=transaction_id,
            eventstream_state=eventstream_state,
        )

        Activity.objects.create(
            project=project_lookup[primary_group.project_id],
            group=primary_group,
            type=Activity.MERGE,
            user=acting_user,
            data={
                'issues': [{
                    'id': c.id
                } for c in groups_to_merge],
            },
        )

        result['merge'] = {
            'parent': six.text_type(primary_group.id),
            'children': [six.text_type(g.id) for g in groups_to_merge],
        }

    return Response(result)