Exemple #1
0
    def convert_args(self, request, issue_id, *args, **kwargs):
        # TODO(tkaemming): Ideally, this would return a 302 response, rather
        # than just returning the data that is bound to the new group. (It
        # technically shouldn't be a 301, since the response could change again
        # as the result of another merge operation that occurs later. This
        # wouldn't break anything though -- it will just be a "permanent"
        # redirect to *another* permanent redirect.) This would require
        # rebuilding the URL in one of two ways: either by hacking it in with
        # string replacement, or making the endpoint aware of the URL pattern
        # that caused it to be dispatched, and reversing it with the correct
        # `issue_id` keyword argument.
        try:
            group, _ = get_group_with_redirect(
                issue_id,
                queryset=Group.objects.select_related('project', 'project__organization'),
            )
        except Group.DoesNotExist:
            raise ResourceDoesNotExist

        self.check_object_permissions(request, group)

        with configure_scope() as scope:
            scope.set_tag("project", group.project_id)
            scope.set_tag("organization", group.project.organization_id)

        if group.status in EXCLUDED_STATUSES:
            raise ResourceDoesNotExist

        request._request.organization = group.project.organization

        kwargs['group'] = group

        return (args, kwargs)
Exemple #2
0
    def get(self, request, organization, group_id, event_id_or_latest):
        use_snuba = options.get('snuba.events-queries.enabled')

        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
            )
        except Group.DoesNotExist:
            raise Http404

        if event_id_or_latest == 'latest':
            # It's possible that a message would not be created under certain
            # circumstances (such as a post_save signal failing)
            event = group.get_latest_event() or Event(group=group)
        else:
            event_cls = SnubaEvent if use_snuba else Event
            event = event_cls.objects.from_event_id(event_id_or_latest, group.project.id)

        if event is None or (event.group_id != int(group_id)):
            raise Http404

        Event.objects.bind_nodes([event], 'data')

        GroupMeta.objects.populate_cache([group])

        return HttpResponse(json.dumps(event.as_dict()), content_type='application/json')
Exemple #3
0
    def test_get_group_with_redirect(self):
        group = self.create_group()
        assert get_group_with_redirect(group.id) == (group, False)

        duplicate_id = self.create_group().id
        Group.objects.filter(id=duplicate_id).delete()
        GroupRedirect.objects.create(
            group_id=group.id,
            previous_group_id=duplicate_id,
        )

        assert get_group_with_redirect(duplicate_id) == (group, True)

        # We shouldn't end up in a case where the redirect points to a bad
        # reference, but testing this path for completeness.
        group.delete()

        with pytest.raises(Group.DoesNotExist):
            get_group_with_redirect(duplicate_id)
Exemple #4
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        def row_iter():
            yield ('value', 'times_seen', 'last_seen', 'first_seen')
            for row in queryset.iterator():
                yield (
                    row.value.encode('utf-8'),
                    str(row.times_seen),
                    row.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    row.first_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                )

        pseudo_buffer = Echo()
        writer = csv.writer(pseudo_buffer)
        response = StreamingHttpResponse(
            (writer.writerow(r) for r in row_iter()),
            content_type='text/csv'
        )
        response['Content-Disposition'] = 'attachment; filename="{}-{}.csv"'.format(
            group.qualified_short_id or group.id, slugify(key)
        )
        return response
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            GroupTagValue.objects.filter(
                group_id=group.id,
                key=lookup_key,
            ),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Exemple #6
0
    def get(self, request, organization, project, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            environment_id = self._get_environment_id_from_request(request, project.organization_id)
        except Environment.DoesNotExist:
            # if the environment doesn't exist then the tag can't possibly exist
            raise Http404

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(project.id, environment_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            tagstore.get_group_tag_value_qs(group.project_id, group.id, environment_id, lookup_key),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
    def get(self, request, organization, group_id, event_id_or_latest):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(group_id)
        except Group.DoesNotExist:
            raise Http404

        if event_id_or_latest == "latest":
            event = group.get_latest_event()
        else:
            event = eventstore.get_event_by_id(group.project.id,
                                               event_id_or_latest,
                                               group_id=group.id)

        if event is None:
            raise Http404

        GroupMeta.objects.populate_cache([group])

        return HttpResponse(json.dumps(event.as_dict()),
                            content_type="application/json")
Exemple #8
0
    def get(self, request, organization, project, team, group_id, event_id_or_latest):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if event_id_or_latest == 'latest':
            # It's possible that a message would not be created under certain
            # circumstances (such as a post_save signal failing)
            event = group.get_latest_event() or Event(group=group)
        else:
            event = get_object_or_404(group.event_set, pk=event_id_or_latest)

        Event.objects.bind_nodes([event], 'data')
        GroupMeta.objects.populate_cache([group])

        return HttpResponse(json.dumps(event.as_dict()), mimetype='application/json')
Exemple #9
0
    def convert_args(self, request, issue_id, *args, **kwargs):
        # TODO(tkaemming): Ideally, this would return a 302 response, rather
        # than just returning the data that is bound to the new group. (It
        # technically shouldn't be a 301, since the response could change again
        # as the result of another merge operation that occurs later. This
        # wouldn't break anything though -- it will just be a "permanent"
        # redirect to *another* permanent redirect.) This would require
        # rebuilding the URL in one of two ways: either by hacking it in with
        # string replacement, or making the endpoint aware of the URL pattern
        # that caused it to be dispatched, and reversing it with the correct
        # `issue_id` keyword argument.
        try:
            group, _ = get_group_with_redirect(
                issue_id,
                queryset=Group.objects.select_related('project'),
            )
        except Group.DoesNotExist:
            raise ResourceDoesNotExist

        self.check_object_permissions(request, group)
        kwargs['group'] = group
        return (args, kwargs)
Exemple #10
0
    def get(self, request, organization, project, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id, queryset=Group.objects.filter(project=project)
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = u"sentry:{0}".format(key)
        else:
            lookup_key = key

        try:
            environment_id = self._get_environment_id_from_request(request, project.organization_id)
        except Environment.DoesNotExist:
            # if the environment doesn't exist then the tag can't possibly exist
            raise Http404

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(project.id, environment_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == "user":
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        gtv_iter = tagstore.get_group_tag_value_iter(
            group.project_id, group.id, environment_id, lookup_key, callbacks=callbacks
        )

        filename = u"{}-{}".format(group.qualified_short_id or group.id, key)

        return self.to_csv_response(gtv_iter, filename, key=key)
Exemple #11
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(group.project_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            tagstore.get_group_tag_value_qs(group.id, lookup_key),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Exemple #12
0
def merge_groups(from_object_ids=None,
                 to_object_id=None,
                 transaction_id=None,
                 recursed=False,
                 eventstream_state=None,
                 **kwargs):
    # TODO(mattrobenolt): Write tests for all of this
    from sentry.models import (
        Activity,
        Group,
        GroupAssignee,
        GroupEnvironment,
        GroupHash,
        GroupRuleStatus,
        GroupSubscription,
        Environment,
        EventAttachment,
        UserReport,
        GroupRedirect,
        GroupMeta,
        get_group_with_redirect,
    )

    if not (from_object_ids and to_object_id):
        logger.error("group.malformed.missing_params",
                     extra={"transaction_id": transaction_id})
        return False

    # Operate on one "from" group per task iteration. The task is recursed
    # until each group has been merged.
    from_object_id = from_object_ids[0]

    try:
        new_group, _ = get_group_with_redirect(to_object_id)
    except Group.DoesNotExist:
        logger.warn(
            "group.malformed.invalid_id",
            extra={
                "transaction_id": transaction_id,
                "old_object_ids": from_object_ids
            },
        )
        return False

    if not recursed:
        logger.info(
            "merge.queued",
            extra={
                "transaction_id": transaction_id,
                "new_group_id": new_group.id,
                "old_group_ids": from_object_ids,
                # TODO(jtcunning): figure out why these are full seq scans and/or alternative solution
                # 'new_event_id': getattr(new_group.event_set.order_by('-id').first(), 'id', None),
                # 'old_event_id': getattr(group.event_set.order_by('-id').first(), 'id', None),
                # 'new_hash_id': getattr(new_group.grouphash_set.order_by('-id').first(), 'id', None),
                # 'old_hash_id': getattr(group.grouphash_set.order_by('-id').first(), 'id', None),
            },
        )

    try:
        group = Group.objects.select_related("project").get(id=from_object_id)
    except Group.DoesNotExist:
        from_object_ids.remove(from_object_id)

        logger.warn(
            "group.malformed.invalid_id",
            extra={
                "transaction_id": transaction_id,
                "old_object_id": from_object_id
            },
        )
    else:
        model_list = tuple(EXTRA_MERGE_MODELS) + (
            Activity,
            GroupAssignee,
            GroupEnvironment,
            GroupHash,
            GroupRuleStatus,
            GroupSubscription,
            EventAttachment,
            UserReport,
            GroupRedirect,
            GroupMeta,
        )

        has_more = merge_objects(model_list,
                                 group,
                                 new_group,
                                 logger=logger,
                                 transaction_id=transaction_id)

        if not has_more:
            # There are no more objects to merge for *this* "from" group, remove it
            # from the list of "from" groups that are being merged, and finish the
            # work for this group.
            from_object_ids.remove(from_object_id)

            similarity.merge(group.project,
                             new_group, [group],
                             allow_unsafe=True)

            environment_ids = list(
                Environment.objects.filter(projects=group.project).values_list(
                    "id", flat=True))

            for model in [tsdb.models.group]:
                tsdb.merge(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids
                    if model in tsdb.models_with_environment_support else None,
                )

            for model in [tsdb.models.users_affected_by_group]:
                tsdb.merge_distinct_counts(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids
                    if model in tsdb.models_with_environment_support else None,
                )

            for model in [
                    tsdb.models.frequent_releases_by_group,
                    tsdb.models.frequent_environments_by_group,
            ]:
                tsdb.merge_frequencies(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids
                    if model in tsdb.models_with_environment_support else None,
                )

            previous_group_id = group.id

            with transaction.atomic():
                GroupRedirect.create_for_group(group, new_group)
                group.delete()
            delete_logger.info(
                "object.delete.executed",
                extra={
                    "object_id": previous_group_id,
                    "transaction_id": transaction_id,
                    "model": Group.__name__,
                },
            )

            new_group.update(
                # TODO(dcramer): ideally these would be SQL clauses
                first_seen=min(group.first_seen, new_group.first_seen),
                last_seen=max(group.last_seen, new_group.last_seen),
            )
            try:
                # it's possible to hit an out of range value for counters
                new_group.update(
                    times_seen=F("times_seen") + group.times_seen,
                    num_comments=F("num_comments") + group.num_comments,
                )
            except DataError:
                pass

    if from_object_ids:
        # This task is recursed until `from_object_ids` is empty and all
        # "from" groups have merged into the `to_group_id`.
        merge_groups.delay(
            from_object_ids=from_object_ids,
            to_object_id=to_object_id,
            transaction_id=transaction_id,
            recursed=True,
            eventstream_state=eventstream_state,
        )
    elif eventstream_state:
        # All `from_object_ids` have been merged!
        eventstream.end_merge(eventstream_state)
Exemple #13
0
def process_issue_by_tag(data_export, file, limit=None):
    """
    Convert the tag query to a CSV, writing it to the provided file.
    Returns the suggested file name.
    (Adapted from 'src/sentry/web/frontend/group_tag_export.py')
    """
    # Get the pertaining project
    try:
        payload = data_export.query_info
        project = Project.objects.get(id=payload["project_id"])
    except Project.DoesNotExist as error:
        metrics.incr("dataexport.error", instance=six.text_type(error))
        logger.error("dataexport.error: {}".format(six.text_type(error)))
        raise DataExportError("Requested project does not exist")

    # Get the pertaining issue
    try:
        group, _ = get_group_with_redirect(
            payload["group_id"], queryset=Group.objects.filter(project=project)
        )
    except Group.DoesNotExist as error:
        metrics.incr("dataexport.error", instance=six.text_type(error))
        logger.error("dataexport.error: {}".format(six.text_type(error)))
        raise DataExportError("Requested issue does not exist")

    # Get the pertaining key
    key = payload["key"]
    lookup_key = six.text_type("sentry:{}").format(key) if tagstore.is_reserved_key(key) else key

    # If the key is the 'user' tag, attach the event user
    def attach_eventuser(items):
        users = EventUser.for_tags(group.project_id, [i.value for i in items])
        for item in items:
            item._eventuser = users.get(item.value)

    # Create the fields/callback lists
    if key == "user":
        callbacks = [attach_eventuser]
        fields = [
            "value",
            "id",
            "email",
            "username",
            "ip_address",
            "times_seen",
            "last_seen",
            "first_seen",
        ]
    else:
        callbacks = []
        fields = ["value", "times_seen", "last_seen", "first_seen"]

    # Example file name: ISSUE_BY_TAG-project10-user__721.csv
    file_details = six.text_type("{}-{}__{}").format(project.slug, key, data_export.id)
    file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details)

    # Iterate through all the GroupTagValues
    writer = create_writer(file, fields)
    iteration = 0
    with snuba_error_handler():
        while True:
            offset = SNUBA_MAX_RESULTS * iteration
            next_offset = SNUBA_MAX_RESULTS * (iteration + 1)
            gtv_list = tagstore.get_group_tag_value_iter(
                project_id=group.project_id,
                group_id=group.id,
                environment_id=None,
                key=lookup_key,
                callbacks=callbacks,
                offset=offset,
            )
            if len(gtv_list) == 0:
                break
            gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list]
            if limit and limit < next_offset:
                # Since the next offset will pass the limit, write the remainder and quit
                writer.writerows(gtv_list_raw[: limit % SNUBA_MAX_RESULTS])
                break
            else:
                writer.writerows(gtv_list_raw)
                iteration += 1
    return file_name
Exemple #14
0
def process_issue_by_tag(data_export, file):
    """
    Convert the tag query to a CSV, writing it to the provided file.
    Returns the suggested file name.
    (Adapted from 'src/sentry/web/frontend/group_tag_export.py')
    """
    # Get the pertaining project
    payload = data_export.query_info
    project = Project.objects.get(id=payload["project_id"])

    # Get the pertaining issue
    group, _ = get_group_with_redirect(
        payload["group_id"], queryset=Group.objects.filter(project=project)
    )

    # Get the pertaining key
    key = payload["key"]
    lookup_key = u"sentry:{0}".format(key) if tagstore.is_reserved_key(key) else key

    # If the key is the 'user' tag, attach the event user
    def attach_eventuser(items):
        users = EventUser.for_tags(group.project_id, [i.value for i in items])
        for item in items:
            item._eventuser = users.get(item.value)

    # Create the fields/callback lists
    if key == "user":
        callbacks = [attach_eventuser]
        fields = [
            "value",
            "id",
            "email",
            "username",
            "ip_address",
            "times_seen",
            "last_seen",
            "first_seen",
        ]
    else:
        callbacks = []
        fields = ["value", "times_seen", "last_seen", "first_seen"]

    # Example file name: ISSUE_BY_TAG-project10-user__721.csv
    file_details = u"{}-{}__{}".format(project.slug, key, data_export.id)
    file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details)

    # Iterate through all the GroupTagValues
    writer = create_writer(file, fields)
    iteration = 0
    while True:
        gtv_list = tagstore.get_group_tag_value_iter(
            project_id=group.project_id,
            group_id=group.id,
            environment_id=None,
            key=lookup_key,
            callbacks=callbacks,
            offset=SNUBA_MAX_RESULTS * iteration,
        )
        gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list]
        if len(gtv_list_raw) == 0:
            break
        writer.writerows(gtv_list_raw)
        iteration += 1
    return file_name
Exemple #15
0
def merge_groups(
    from_object_ids=None, to_object_id=None, transaction_id=None,
    recursed=False, eventstream_state=None, **kwargs
):
    # TODO(mattrobenolt): Write tests for all of this
    from sentry.models import (
        Activity,
        Group,
        GroupAssignee,
        GroupEnvironment,
        GroupHash,
        GroupRuleStatus,
        GroupSubscription,
        Environment,
        EventMapping,
        Event,
        UserReport,
        GroupRedirect,
        GroupMeta,
        get_group_with_redirect,
    )

    if not (from_object_ids and to_object_id):
        logger.error(
            'group.malformed.missing_params', extra={
                'transaction_id': transaction_id,
            }
        )
        return

    # Operate on one "from" group per task iteration. The task is recursed
    # until each group has been merged.
    from_object_id = from_object_ids[0]

    try:
        new_group, _ = get_group_with_redirect(to_object_id)
    except Group.DoesNotExist:
        logger.warn(
            'group.malformed.invalid_id',
            extra={
                'transaction_id': transaction_id,
                'old_object_ids': from_object_ids,
            }
        )
        return

    if not recursed:
        logger.info(
            'merge.queued',
            extra={
                'transaction_id': transaction_id,
                'new_group_id': new_group.id,
                'old_group_ids': from_object_ids,
                # TODO(jtcunning): figure out why these are full seq scans and/or alternative solution
                # 'new_event_id': getattr(new_group.event_set.order_by('-id').first(), 'id', None),
                # 'old_event_id': getattr(group.event_set.order_by('-id').first(), 'id', None),
                # 'new_hash_id': getattr(new_group.grouphash_set.order_by('-id').first(), 'id', None),
                # 'old_hash_id': getattr(group.grouphash_set.order_by('-id').first(), 'id', None),
            }
        )

    try:
        group = Group.objects.get(id=from_object_id)
    except Group.DoesNotExist:
        from_object_ids.remove(from_object_id)

        logger.warn(
            'group.malformed.invalid_id',
            extra={
                'transaction_id': transaction_id,
                'old_object_id': from_object_id,
            }
        )
    else:
        model_list = tuple(EXTRA_MERGE_MODELS) + (
            Activity, GroupAssignee, GroupEnvironment, GroupHash, GroupRuleStatus,
            GroupSubscription, EventMapping, Event, UserReport, GroupRedirect,
            GroupMeta,
        )

        has_more = merge_objects(
            model_list,
            group,
            new_group,
            logger=logger,
            transaction_id=transaction_id,
        )

        if not has_more:
            # There are no more objects to merge for *this* "from" group, remove it
            # from the list of "from" groups that are being merged, and finish the
            # work for this group.
            from_object_ids.remove(from_object_id)

            features.merge(new_group, [group], allow_unsafe=True)

            environment_ids = list(
                Environment.objects.filter(
                    projects=group.project
                ).values_list('id', flat=True)
            )

            for model in [tsdb.models.group]:
                tsdb.merge(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids if model in tsdb.models_with_environment_support else None
                )

            for model in [tsdb.models.users_affected_by_group]:
                tsdb.merge_distinct_counts(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids if model in tsdb.models_with_environment_support else None,
                )

            for model in [
                tsdb.models.frequent_releases_by_group, tsdb.models.frequent_environments_by_group
            ]:
                tsdb.merge_frequencies(
                    model,
                    new_group.id,
                    [group.id],
                    environment_ids=environment_ids if model in tsdb.models_with_environment_support else None,
                )

            previous_group_id = group.id

            group.delete()
            delete_logger.info(
                'object.delete.executed',
                extra={
                    'object_id': previous_group_id,
                    'transaction_id': transaction_id,
                    'model': Group.__name__,
                }
            )

            try:
                with transaction.atomic():
                    GroupRedirect.objects.create(
                        group_id=new_group.id,
                        previous_group_id=previous_group_id,
                    )
            except IntegrityError:
                pass

            new_group.update(
                # TODO(dcramer): ideally these would be SQL clauses
                first_seen=min(group.first_seen, new_group.first_seen),
                last_seen=max(group.last_seen, new_group.last_seen),
            )
            try:
                # it's possible to hit an out of range value for counters
                new_group.update(
                    times_seen=F('times_seen') + group.times_seen,
                    num_comments=F('num_comments') + group.num_comments,
                )
            except DataError:
                pass

    if from_object_ids:
        # This task is recursed until `from_object_ids` is empty and all
        # "from" groups have merged into the `to_group_id`.
        merge_groups.delay(
            from_object_ids=from_object_ids,
            to_object_id=to_object_id,
            transaction_id=transaction_id,
            recursed=True,
            eventstream_state=eventstream_state,
        )
        return

    # All `from_object_ids` have been merged!
    if eventstream_state:
        eventstream.end_merge(eventstream_state)