def merge_groups( from_object_ids=None, to_object_id=None, transaction_id=None, recursed=False, eventstream_state=None, **kwargs ): # TODO(mattrobenolt): Write tests for all of this from sentry.models import ( Activity, Group, GroupAssignee, GroupEnvironment, GroupHash, GroupRuleStatus, GroupSubscription, Environment, EventAttachment, UserReport, GroupRedirect, GroupMeta, get_group_with_redirect, ) if not (from_object_ids and to_object_id): logger.error("group.malformed.missing_params", extra={"transaction_id": transaction_id}) return False # Operate on one "from" group per task iteration. The task is recursed # until each group has been merged. from_object_id = from_object_ids[0] try: new_group, _ = get_group_with_redirect(to_object_id) except Group.DoesNotExist: logger.warn( "group.malformed.invalid_id", extra={"transaction_id": transaction_id, "old_object_ids": from_object_ids}, ) return False if not recursed: logger.info( "merge.queued", extra={ "transaction_id": transaction_id, "new_group_id": new_group.id, "old_group_ids": from_object_ids, # TODO(jtcunning): figure out why these are full seq scans and/or alternative solution # 'new_event_id': getattr(new_group.event_set.order_by('-id').first(), 'id', None), # 'old_event_id': getattr(group.event_set.order_by('-id').first(), 'id', None), # 'new_hash_id': getattr(new_group.grouphash_set.order_by('-id').first(), 'id', None), # 'old_hash_id': getattr(group.grouphash_set.order_by('-id').first(), 'id', None), }, ) try: group = Group.objects.select_related("project").get(id=from_object_id) except Group.DoesNotExist: from_object_ids.remove(from_object_id) logger.warn( "group.malformed.invalid_id", extra={"transaction_id": transaction_id, "old_object_id": from_object_id}, ) else: model_list = tuple(EXTRA_MERGE_MODELS) + ( Activity, GroupAssignee, GroupEnvironment, GroupHash, GroupRuleStatus, GroupSubscription, EventAttachment, UserReport, GroupRedirect, GroupMeta, ) has_more = merge_objects( model_list, group, new_group, logger=logger, transaction_id=transaction_id ) if not has_more: # There are no more objects to merge for *this* "from" group, remove it # from the list of "from" groups that are being merged, and finish the # work for this group. from_object_ids.remove(from_object_id) features.merge(new_group, [group], allow_unsafe=True) environment_ids = list( Environment.objects.filter(projects=group.project).values_list("id", flat=True) ) for model in [tsdb.models.group]: tsdb.merge( model, new_group.id, [group.id], environment_ids=environment_ids if model in tsdb.models_with_environment_support else None, ) for model in [tsdb.models.users_affected_by_group]: tsdb.merge_distinct_counts( model, new_group.id, [group.id], environment_ids=environment_ids if model in tsdb.models_with_environment_support else None, ) for model in [ tsdb.models.frequent_releases_by_group, tsdb.models.frequent_environments_by_group, ]: tsdb.merge_frequencies( model, new_group.id, [group.id], environment_ids=environment_ids if model in tsdb.models_with_environment_support else None, ) previous_group_id = group.id with transaction.atomic(): GroupRedirect.create_for_group(group, new_group) group.delete() delete_logger.info( "object.delete.executed", extra={ "object_id": previous_group_id, "transaction_id": transaction_id, "model": Group.__name__, }, ) new_group.update( # TODO(dcramer): ideally these would be SQL clauses first_seen=min(group.first_seen, new_group.first_seen), last_seen=max(group.last_seen, new_group.last_seen), ) try: # it's possible to hit an out of range value for counters new_group.update( times_seen=F("times_seen") + group.times_seen, num_comments=F("num_comments") + group.num_comments, ) except DataError: pass if from_object_ids: # This task is recursed until `from_object_ids` is empty and all # "from" groups have merged into the `to_group_id`. merge_groups.delay( from_object_ids=from_object_ids, to_object_id=to_object_id, transaction_id=transaction_id, recursed=True, eventstream_state=eventstream_state, ) elif eventstream_state: # All `from_object_ids` have been merged! eventstream.end_merge(eventstream_state)