def post(self, request, group): serializer = NoteSerializer(data=request.DATA) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) if Activity.objects.filter( group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1) ).exists(): return Response('{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def post(self, request, group_id): group = Group.objects.get( id=group_id, ) assert_perm(group, request.user, request.auth) form = NewNoteForm(request.DATA) if not form.is_valid(): return Response('{"error": "form"}', status=status.HTTP_400_BAD_REQUEST) if Activity.objects.filter( group=group, type=Activity.NOTE, user=request.user, data=form.cleaned_data, datetime__gte=timezone.now() - timedelta(hours=1) ).exists(): return Response('{"error": "duplicate"}', status=status.HTTP_400_BAD_REQUEST) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=form.cleaned_data, ) # TODO: move this into the queue activity.send_notification() return Response(serialize(activity, request.user), status=201)
def post(self, request, group): serializer = NoteSerializer(data=request.DATA) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def post(self, request, group_id): group = Group.objects.get(id=group_id, ) assert_perm(group, request.user, request.auth) form = NewNoteForm(request.DATA) if not form.is_valid(): return Response('{"error": "form"}', status=status.HTTP_400_BAD_REQUEST) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=form.cleaned_data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response('{"error": "duplicate"}', status=status.HTTP_400_BAD_REQUEST) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=form.cleaned_data, ) # TODO: move this into the queue activity.send_notification() return Response(serialize(activity, request.user), status=201)
def get_react_config(context): if 'request' in context: user = context['request'].user else: user = None if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if features.has('auth:register', actor=user): enabled_features.append('auth:register') context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'urlPrefix': settings.SENTRY_URL_PREFIX, 'version': _get_version_info(), 'features': enabled_features, 'mediaUrl': reverse('sentry-media', args=['sentry', '']), } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) else: context.update({ 'isAuthenticated': False, 'user': None, }) return mark_safe(json.dumps(context))
def get_react_config(context): if 'request' in context: user = getattr(context['request'], 'user', None) or AnonymousUser() messages = get_messages(context['request']) try: is_superuser = context['request'].is_superuser() except AttributeError: is_superuser = False else: user = None messages = [] is_superuser = False if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if auth.has_user_registration(): enabled_features.append('auth:register') version_info = _get_version_info() needs_upgrade = False if is_superuser: needs_upgrade = _needs_upgrade() context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'supportEmail': get_support_mail(), 'urlPrefix': options.get('system.url-prefix'), 'version': version_info, 'features': enabled_features, 'mediaUrl': get_asset_url('sentry', ''), 'needsUpgrade': needs_upgrade, 'dsn': _get_public_dsn(), 'statuspage': _get_statuspage(), 'messages': [{ 'message': msg.message, 'level': msg.tags, } for msg in messages], 'isOnPremise': settings.SENTRY_ONPREMISE, 'invitesEnabled': settings.SENTRY_ENABLE_INVITES, 'gravatarBaseUrl': settings.SENTRY_GRAVATAR_BASE_URL, } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) context['user']['isSuperuser'] = is_superuser else: context.update({ 'isAuthenticated': False, 'user': None, }) return json.dumps_htmlsafe(context)
def post(self, request, group): serializer = NoteSerializer( data=request.data, context={ 'organization_id': group.organization.id, 'projects': [group.project], }, ) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.validated_data) mentions = data.pop('mentions', []) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) mentioned_users = extract_user_ids_from_mentions( group.organization.id, mentions) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_users['users'], reason=GroupSubscriptionReason.mentioned, ) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_users['team_users'], reason=GroupSubscriptionReason.team_mentioned, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() self.create_external_comment(request, group, activity) return Response(serialize(activity, request.user), status=201)
def post(self, request: Request, group) -> Response: serializer = NoteSerializer( data=request.data, context={ "organization": group.organization, "organization_id": group.organization.id, "projects": [group.project], }, ) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.validated_data) mentions = data.pop("mentions", []) if Activity.objects.filter( group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1), ).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST, ) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment) mentioned_users = extract_user_ids_from_mentions( group.organization.id, mentions) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_users["users"], reason=GroupSubscriptionReason.mentioned) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_users["team_users"], reason=GroupSubscriptionReason.team_mentioned, ) activity = Activity.objects.create_group_activity( group, ActivityType.NOTE, user=extract_lazy_object(request.user), data=data) self.create_external_comment(request, group, activity) return Response(serialize(activity, request.user), status=201)
def get_react_config(context): if 'request' in context: user = context['request'].user messages = get_messages(context['request']) try: is_superuser = context['request'].is_superuser() except AttributeError: is_superuser = False else: user = None messages = [] is_superuser = False if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if features.has('auth:register', actor=user): enabled_features.append('auth:register') version_info = _get_version_info() needs_upgrade = False if is_superuser: needs_upgrade = _needs_upgrade() context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'urlPrefix': options.get('system.url-prefix'), 'version': version_info, 'features': enabled_features, 'mediaUrl': get_asset_url('sentry', ''), 'needsUpgrade': needs_upgrade, 'dsn': _get_public_dsn(), 'statuspage': _get_statuspage(), 'messages': [{ 'message': msg.message, 'level': msg.tags, } for msg in messages], } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) context['user']['isSuperuser'] = is_superuser else: context.update({ 'isAuthenticated': False, 'user': None, }) return mark_safe(json.dumps(context))
def _subscribe_and_assign_issue(self, acting_user, group, result): if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) self_assign_issue = UserOption.objects.get_value( user=acting_user, key='self_assign_issue', default='0') if self_assign_issue == '1' and not group.assignee_set.exists(): result['assignedTo'] = extract_lazy_object(acting_user)
def handle(self, *args, **options): if not isinstance(extract_lazy_object(nodestore), ElasticNodeStorage): raise CommandError('ElasticNodeStorage is not correctly configured') template = os.path.abspath(options['template']) with io.open(template, mode='rt', encoding=nodestore.encoding) as fp: template = json.load(fp) nodestore.put_template(template) self.stdout.write('Successfully created elastic template')
def _subscribe_and_assign_issue(self, acting_user, group, result): if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) self_assign_issue = UserOption.objects.get_value( user=acting_user, key='self_assign_issue', default='0' ) if self_assign_issue == '1' and not group.assignee_set.exists(): result['assignedTo'] = Actor(type=User, id=extract_lazy_object(acting_user).id)
def get(self, request): if not request.user.is_authenticated(): return Response(status=status.HTTP_400_BAD_REQUEST) user = extract_lazy_object(request._request.user) data = serialize(user, user) # XXX(dcramer): we dont use is_active_superuser here as we simply # want to tell the UI that we're an authenticated superuser, and # for requests that require an *active* session, they should prompt # on-demand. This ensures things like links to the Sentry admin can # still easily be rendered. data['isSuperuser'] = user.is_superuser return Response(data)
def get(self, request): if request.user.is_authenticated: user = serialize(extract_lazy_object(request.user), request.user) else: user = None if request.auth: auth = {"scopes": request.auth.get_scopes()} else: auth = None context = {"version": "0", "auth": auth, "user": user} return Response(context, status=200)
def get_react_config(context): if 'request' in context: user = context['request'].user messages = get_messages(context['request']) try: is_superuser = context['request'].is_superuser() except AttributeError: is_superuser = False else: user = None messages = [] is_superuser = False if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if features.has('auth:register', actor=user): enabled_features.append('auth:register') context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'urlPrefix': settings.SENTRY_URL_PREFIX, 'version': _get_version_info(), 'features': enabled_features, 'mediaUrl': get_asset_url('sentry', ''), 'messages': [{ 'message': msg.message, 'level': msg.tags, } for msg in messages], } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) context['user']['isSuperuser'] = is_superuser else: context.update({ 'isAuthenticated': False, 'user': None, }) return mark_safe(json.dumps(context))
def post(self, request, group_id): group = Group.objects.get(id=group_id, ) assert_perm(group, request.user, request.auth) instance, created = create_or_update(GroupSeen, group=group, user=extract_lazy_object( request.user), project=group.project, defaults={ 'last_seen': timezone.now(), }) if created: return Response(status=201) return Response(status=204)
def handle_react(self, request): if request.user.is_authenticated(): # remove lazy eval request.user = extract_lazy_object(request.user) enabled_features = [] if features.has('organizations:create', actor=request.user): enabled_features.append('organizations:create') if features.has('auth:register', actor=request.user): enabled_features.append('auth:register') context = { 'features': mark_safe(json.dumps(enabled_features)), } return self.respond('sentry/bases/react.html', context)
def post(self, request, group): serializer = NoteSerializer(data=request.DATA, context={'group': group}) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) mentions = data.pop('mentions', []) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) if mentions: users = User.objects.filter(id__in=mentions) for user in users: GroupSubscription.objects.subscribe( group=group, user=user, reason=GroupSubscriptionReason.mentioned, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def post(self, request, group_id): group = Group.objects.get( id=group_id, ) assert_perm(group, request.user, request.auth) instance, created = create_or_update( GroupSeen, group=group, user=extract_lazy_object(request.user), project=group.project, defaults={ 'last_seen': timezone.now(), } ) if created: return Response(status=201) return Response(status=204)
def get(self, request): if request.user.is_authenticated(): user = serialize(extract_lazy_object(request.user), request.user) else: user = None if request.auth: auth = { 'scopes': request.auth.get_scopes(), } else: auth = None context = { 'version': '0', 'auth': auth, 'user': user, } return Response(context, status=200)
def get_react_config(context): if 'request' in context: user = context['request'].user messages = get_messages(context['request']) is_superuser = context['request'].is_superuser() else: user = None messages = [] is_superuser = False if user: user = extract_lazy_object(user) enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if features.has('auth:register', actor=user): enabled_features.append('auth:register') context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'urlPrefix': settings.SENTRY_URL_PREFIX, 'version': _get_version_info(), 'features': enabled_features, 'mediaUrl': get_asset_url('sentry', ''), 'messages': [{ 'message': msg.message, 'level': msg.tags, } for msg in messages], } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user), }) context['user']['isSuperuser'] = is_superuser else: context.update({ 'isAuthenticated': False, 'user': None, }) return mark_safe(json.dumps(context))
def get(self, request): if not request.user.is_authenticated(): return Response(status=status.HTTP_400_BAD_REQUEST) user = extract_lazy_object(request._request.user) return Response(serialize(user, user, DetailedUserSerializer()))
def post(self, request, group): serializer = NoteSerializer(data=request.DATA, context={'group': group}) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) mentions = data.pop('mentions', []) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) actors = Actor.resolve_many(mentions) actor_mentions = seperate_resolved_actors(actors) for user in actor_mentions.get('users'): GroupSubscription.objects.subscribe( group=group, user=user, reason=GroupSubscriptionReason.mentioned, ) mentioned_teams = actor_mentions.get('teams') mentioned_team_users = list( User.objects.filter( sentry_orgmember_set__organization_id=group.project. organization_id, sentry_orgmember_set__organizationmemberteam__team__in= mentioned_teams, sentry_orgmember_set__organizationmemberteam__is_active=True, is_active=True, ).exclude( id__in={u.id for u in actor_mentions.get('users')}).values_list( 'id', flat=True)) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_team_users, reason=GroupSubscriptionReason.team_mentioned, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() # sync Sentry comments to external issues if features.has('organizations:internal-catchall', group.organization, actor=request.user): external_issue_ids = GroupLink.objects.filter( project_id=group.project_id, group_id=group.id, linked_type=GroupLink.LinkedType.issue, ).values_list('linked_id', flat=True) for external_issue_id in external_issue_ids: post_comment.apply_async(kwargs={ 'external_issue_id': external_issue_id, 'data': data, }) return Response(serialize(activity, request.user), status=201)
def put(self, request, project): """ Bulk Mutate a List of Issues ```````````````````````````` Bulk mutate various attributes on issues. The list of issues to modify is given through the `id` query parameter. It is repeated for each issue that should be modified. - For non-status updates, the `id` query parameter is required. - For status updates, the `id` query parameter may be omitted for a batch "update all" query. - An optional `status` query parameter may be used to restrict mutations to only events with the given status. The following attributes can be modified and are supplied as JSON object in the body: If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the issues to be mutated. This parameter shall be repeated for each issue. It is optional only if a status is mutated in which case an implicit `update all` is assumed. :qparam string status: optionally limits the query to issues of the specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``"resolvedInNextRelease"``, ``"unresolved"``, and ``"ignored"``. :param map statusDetails: additional details about the resolution. Valid values are ``"inRelease"``, ``"inNextRelease"``, ``"inCommit"``, ``"ignoreDuration"``, ``"ignoreCount"``, ``"ignoreWindow"``, ``"ignoreUserCount"``, and ``"ignoreUserWindow"``. :param int ignoreDuration: the number of minutes to ignore this issue. :param boolean isPublic: sets the issue to public or private. :param boolean merge: allows to merge or unmerge different issues. :param string assignedTo: the actor id (or username) of the user or team that should be assigned to this issue. :param boolean hasSeen: in case this API call is invoked with a user context this allows changing of the flag that indicates if the user has seen the event. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter( project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now cursor_result, _ = self._search(request, project, { 'limit': 1000, 'paginator_options': {'max_limit': 1000}, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 queryset = Group.objects.filter( id__in=group_ids, ) discard = result.get('discard') if discard: if not features.has('projects:discard-groups', project, actor=request.user): return Response({'detail': ['You do not have that feature enabled']}, status=400) group_list = list(queryset) groups_to_delete = [] for group in group_list: with transaction.atomic(): try: tombstone = GroupTombstone.objects.create( previous_group_id=group.id, actor_id=acting_user.id if acting_user else None, **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP} ) except IntegrityError: # in this case, a tombstone has already been created # for a group, so no hash updates are necessary pass else: groups_to_delete.append(group) GroupHash.objects.filter( group=group, ).update( group=None, group_tombstone_id=tombstone.id, ) self._delete_groups(request, project, groups_to_delete, delete_type='discard') return Response(status=204) statusDetails = result.pop('statusDetails', result) status = result.get('status') release = None commit = None if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'): # XXX(dcramer): this code is copied between the inNextRelease validator # due to the status vs statusDetails field release = statusDetails.get('inNextRelease') or Release.objects.filter( projects=project, organization_id=project.organization_id, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_type_str = 'in_next_release' res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_type_str = 'in_release' res_status = GroupResolution.Status.resolved elif statusDetails.get('inCommit'): commit = statusDetails['inCommit'] activity_type = Activity.SET_RESOLVED_IN_COMMIT activity_data = { 'commit': commit.id, } status_details = { 'inCommit': serialize(commit, request.user), 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type_str = 'in_commit' else: res_type_str = 'now' activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() metrics.incr('group.resolved', instance=res_type_str, skip_internal=True) # if we've specified a commit, let's see if its already been released # this will allow us to associate the resolution to a release as if we # were simply using 'inRelease' above # Note: this is different than the way commit resolution works on deploy # creation, as a given deploy is connected to an explicit release, and # in this case we're simply choosing the most recent release which contains # the commit. if commit and not release: try: release = Release.objects.filter( projects=project, releasecommit__commit=commit, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved except IndexError: release = None for group in group_list: with transaction.atomic(): resolution = None if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update( datetime=timezone.now(), **resolution_params) if commit: GroupLink.objects.create( group_id=group.id, project_id=group.project_id, linked_type=GroupLink.LinkedType.commit, relationship=GroupLink.Relationship.resolves, linked_id=commit.id, ) affected = Group.objects.filter( id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now self._subscribe_and_assign_issue( acting_user, group, result) if created: activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() if release: issue_resolved_in_release.send_robust( group=group, project=project, user=acting_user, resolution_type=res_type_str, sender=type(self), ) elif commit: resolved_with_commit.send_robust( organization_id=group.project.organization_id, user=request.user, group=group, sender=type(self), ) kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude( status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter( group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: metrics.incr('group.ignored', skip_internal=True) ignore_duration = ( statusDetails.pop('ignoreDuration', None) or statusDetails.pop('snoozeDuration', None) ) or None ignore_count = statusDetails.pop( 'ignoreCount', None) or None ignore_window = statusDetails.pop( 'ignoreWindow', None) or None ignore_user_count = statusDetails.pop( 'ignoreUserCount', None) or None ignore_user_window = statusDetails.pop( 'ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, } ) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter( group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } issue_ignored.send_robust( project=project, user=acting_user, group_list=group_list, activity_data=activity_data, sender=self.__class__) for group in group_list: group.status = new_status activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if new_status == GroupStatus.UNRESOLVED: kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) if 'assignedTo' in result: assigned_actor = result['assignedTo'] if assigned_actor: for group in group_list: resolved_actor = assigned_actor.resolve() GroupAssignee.objects.assign(group, resolved_actor, acting_user) result['assignedTo'] = serialize( assigned_actor.resolve(), acting_user, ActorSerializer()) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) if result.get('hasSeen') and project.member_set.filter(user=acting_user).exists(): for group in group_list: instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=group.project, values={ 'last_seen': timezone.now(), } ) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project, group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project, values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if 'isPublic' in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result['shareId'] = None Activity.objects.create( project=group.project, group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get('isPublic'): for group in group_list: share, created = GroupShare.objects.get_or_create( project=group.project, group=group, user=acting_user, ) if created: result['shareId'] = share.uuid Activity.objects.create( project=group.project, group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: group_list_by_times_seen = sorted( group_list, key=lambda g: (g.times_seen, g.id), reverse=True, ) primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:] group_ids_to_merge = [g.id for g in groups_to_merge] eventstream_state = eventstream.start_merge( primary_group.project_id, group_ids_to_merge, primary_group.id ) Group.objects.filter( id__in=group_ids_to_merge ).update( status=GroupStatus.PENDING_MERGE ) transaction_id = uuid4().hex merge_groups.delay( from_object_ids=group_ids_to_merge, to_object_id=primary_group.id, transaction_id=transaction_id, eventstream_state=eventstream_state, ) Activity.objects.create( project=primary_group.project, group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{ 'id': c.id } for c in groups_to_merge], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in groups_to_merge], } return Response(result)
def get_react_config(context): if 'request' in context: user = getattr(context['request'], 'user', None) or AnonymousUser() messages = get_messages(context['request']) session = getattr(context['request'], 'session', None) try: is_superuser = context['request'].is_superuser() except AttributeError: is_superuser = False else: user = None messages = [] is_superuser = False if user: user = extract_lazy_object(user) is_superuser = user.is_superuser enabled_features = [] if features.has('organizations:create', actor=user): enabled_features.append('organizations:create') if auth.has_user_registration(): enabled_features.append('auth:register') version_info = _get_version_info() needs_upgrade = False if is_superuser: needs_upgrade = _needs_upgrade() context = { 'singleOrganization': settings.SENTRY_SINGLE_ORGANIZATION, 'supportEmail': get_support_mail(), 'urlPrefix': options.get('system.url-prefix'), 'version': version_info, 'features': enabled_features, 'mediaUrl': get_asset_url('sentry', ''), 'needsUpgrade': needs_upgrade, 'dsn': get_public_dsn(), 'statuspage': _get_statuspage(), 'messages': [{ 'message': msg.message, 'level': msg.tags, } for msg in messages], 'isOnPremise': settings.SENTRY_ONPREMISE, 'invitesEnabled': settings.SENTRY_ENABLE_INVITES, 'gravatarBaseUrl': settings.SENTRY_GRAVATAR_BASE_URL, 'termsUrl': settings.TERMS_URL, 'privacyUrl': settings.PRIVACY_URL, # Note `lastOrganization` should not be expected to update throughout frontend app lifecycle # It should only be used on a fresh browser nav to a path where an # organization is not in context 'lastOrganization': session['activeorg'] if session and 'activeorg' in session else None, } if user and user.is_authenticated(): context.update({ 'isAuthenticated': True, 'user': serialize(user, user, DetailedUserSerializer()), }) context['user']['isSuperuser'] = is_superuser else: context.update({ 'isAuthenticated': False, 'user': None, }) return json.dumps_htmlsafe(context)
def update_groups(request, projects, organization_id, search_fn): group_ids = request.GET.getlist("id") if group_ids: group_list = Group.objects.filter( project__organization_id=organization_id, project__in=projects, id__in=group_ids ) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None # TODO(jess): We may want to look into refactoring GroupValidator # to support multiple projects, but this is pretty complicated # because of the assignee validation. Punting on this for now. for project in projects: serializer = GroupValidator(data=request.data, partial=True, context={"project": project}) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.validated_data) # so we won't have to requery for each group project_lookup = {p.id: p for p in projects} acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now cursor_result, _ = search_fn({"limit": 1000, "paginator_options": {"max_limit": 1000}}) except ValidationError as exc: return Response({"detail": six.text_type(exc)}, status=400) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 group_project_ids = {g.project_id for g in group_list} # filter projects down to only those that have groups in the search results projects = [p for p in projects if p.id in group_project_ids] queryset = Group.objects.filter(id__in=group_ids) discard = result.get("discard") if discard: return handle_discard(request, list(queryset), projects, acting_user) statusDetails = result.pop("statusDetails", result) status = result.get("status") release = None commit = None if status in ("resolved", "resolvedInNextRelease"): if status == "resolvedInNextRelease" or statusDetails.get("inNextRelease"): # TODO(jess): We may want to support this for multi project, but punting on it for now if len(projects) > 1: return Response( {"detail": "Cannot set resolved in next release for multiple projects."}, status=400, ) release = ( statusDetails.get("inNextRelease") or Release.objects.filter( projects=projects[0], organization_id=projects[0].organization_id ) .extra(select={"sort": "COALESCE(date_released, date_added)"}) .order_by("-sort")[0] ) activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet "version": "" } status_details = { "inNextRelease": True, "actor": serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_type_str = "in_next_release" res_status = GroupResolution.Status.pending elif statusDetails.get("inRelease"): # TODO(jess): We could update validation to check if release # applies to multiple projects, but I think we agreed to punt # on this for now if len(projects) > 1: return Response( {"detail": "Cannot set resolved in release for multiple projects."}, status=400 ) release = statusDetails["inRelease"] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet "version": release.version } status_details = { "inRelease": release.version, "actor": serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_type_str = "in_release" res_status = GroupResolution.Status.resolved elif statusDetails.get("inCommit"): # TODO(jess): Same here, this is probably something we could do, but # punting for now. if len(projects) > 1: return Response( {"detail": "Cannot set resolved in commit for multiple projects."}, status=400 ) commit = statusDetails["inCommit"] activity_type = Activity.SET_RESOLVED_IN_COMMIT activity_data = {"commit": commit.id} status_details = { "inCommit": serialize(commit, request.user), "actor": serialize(extract_lazy_object(request.user), request.user), } res_type_str = "in_commit" else: res_type_str = "now" activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) # if we've specified a commit, let's see if its already been released # this will allow us to associate the resolution to a release as if we # were simply using 'inRelease' above # Note: this is different than the way commit resolution works on deploy # creation, as a given deploy is connected to an explicit release, and # in this case we're simply choosing the most recent release which contains # the commit. if commit and not release: # TODO(jess): If we support multiple projects for release / commit resolution, # we need to update this to find the release for each project (we shouldn't assume # it's the same) try: release = ( Release.objects.filter(projects__in=projects, releasecommit__commit=commit) .extra(select={"sort": "COALESCE(date_released, date_added)"}) .order_by("-sort")[0] ) res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved except IndexError: release = None for group in group_list: with transaction.atomic(): resolution = None if release: resolution_params = { "release": release, "type": res_type, "status": res_status, "actor_id": request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params ) if not created: resolution.update(datetime=timezone.now(), **resolution_params) if commit: GroupLink.objects.create( group_id=group.id, project_id=group.project_id, linked_type=GroupLink.LinkedType.commit, relationship=GroupLink.Relationship.resolves, linked_id=commit.id, ) affected = Group.objects.filter(id=group.id).update( status=GroupStatus.RESOLVED, resolved_at=now ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now assigned_to = self_subscribe_and_assign_issue(acting_user, group) if assigned_to is not None: result["assignedTo"] = assigned_to if created: activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved.send_robust( organization_id=organization_id, user=acting_user or request.user, group=group, project=project_lookup[group.project_id], resolution_type=res_type_str, sender=update_groups, ) kick_off_status_syncs.apply_async( kwargs={"project_id": group.project_id, "group_id": group.id} ) result.update({"status": "resolved", "statusDetails": status_details}) elif status: new_status = STATUS_CHOICES[result["status"]] with transaction.atomic(): happened = queryset.exclude(status=new_status).update(status=new_status) GroupResolution.objects.filter(group__in=group_ids).delete() if new_status == GroupStatus.IGNORED: metrics.incr("group.ignored", skip_internal=True) ignore_duration = ( statusDetails.pop("ignoreDuration", None) or statusDetails.pop("snoozeDuration", None) ) or None ignore_count = statusDetails.pop("ignoreCount", None) or None ignore_window = statusDetails.pop("ignoreWindow", None) or None ignore_user_count = statusDetails.pop("ignoreUserCount", None) or None ignore_user_window = statusDetails.pop("ignoreUserWindow", None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta(minutes=ignore_duration) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state["times_seen"] = group.times_seen if ignore_user_count and not ignore_user_window: state["users_seen"] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ "until": ignore_until, "count": ignore_count, "window": ignore_window, "user_count": ignore_user_count, "user_window": ignore_user_window, "state": state, "actor_id": request.user.id if request.user.is_authenticated() else None, }, ) result["statusDetails"] = { "ignoreCount": ignore_count, "ignoreUntil": ignore_until, "ignoreUserCount": ignore_user_count, "ignoreUserWindow": ignore_user_window, "ignoreWindow": ignore_window, "actor": serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter(group__in=group_ids).delete() ignore_until = None result["statusDetails"] = {} else: result["statusDetails"] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { "ignoreCount": ignore_count, "ignoreDuration": ignore_duration, "ignoreUntil": ignore_until, "ignoreUserCount": ignore_user_count, "ignoreUserWindow": ignore_user_window, "ignoreWindow": ignore_window, } groups_by_project_id = defaultdict(list) for group in group_list: groups_by_project_id[group.project_id].append(group) for project in projects: project_groups = groups_by_project_id.get(project.id) if project_groups: issue_ignored.send_robust( project=project, user=acting_user, group_list=project_groups, activity_data=activity_data, sender=update_groups, ) for group in group_list: group.status = new_status activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if new_status == GroupStatus.UNRESOLVED: kick_off_status_syncs.apply_async( kwargs={"project_id": group.project_id, "group_id": group.id} ) if "assignedTo" in result: assigned_actor = result["assignedTo"] if assigned_actor: for group in group_list: resolved_actor = assigned_actor.resolve() GroupAssignee.objects.assign(group, resolved_actor, acting_user) result["assignedTo"] = serialize( assigned_actor.resolve(), acting_user, ActorSerializer() ) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) is_member_map = { project.id: project.member_set.filter(user=acting_user).exists() for project in projects } if result.get("hasSeen"): for group in group_list: if is_member_map.get(group.project_id): instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=project_lookup[group.project_id], values={"last_seen": timezone.now()}, ) elif result.get("hasSeen") is False: GroupSeen.objects.filter(group__in=group_ids, user=acting_user).delete() if result.get("isBookmarked"): for group in group_list: GroupBookmark.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark ) elif result.get("isBookmarked") is False: GroupBookmark.objects.filter(group__in=group_ids, user=acting_user).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get("isSubscribed") in (True, False): is_subscribed = result["isSubscribed"] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project_lookup[group.project_id], values={"is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown}, ) result["subscriptionDetails"] = { "reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown") } if "isPublic" in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result["shareId"] = None Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get("isPublic"): for group in group_list: share, created = GroupShare.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user ) if created: result["shareId"] = share.uuid Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get("merge") and len(group_list) > 1: # don't allow merging cross project if len(projects) > 1: return Response({"detail": "Merging across multiple projects is not supported"}) group_list_by_times_seen = sorted( group_list, key=lambda g: (g.times_seen, g.id), reverse=True ) primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:] group_ids_to_merge = [g.id for g in groups_to_merge] eventstream_state = eventstream.start_merge( primary_group.project_id, group_ids_to_merge, primary_group.id ) Group.objects.filter(id__in=group_ids_to_merge).update(status=GroupStatus.PENDING_MERGE) transaction_id = uuid4().hex merge_groups.delay( from_object_ids=group_ids_to_merge, to_object_id=primary_group.id, transaction_id=transaction_id, eventstream_state=eventstream_state, ) Activity.objects.create( project=project_lookup[primary_group.project_id], group=primary_group, type=Activity.MERGE, user=acting_user, data={"issues": [{"id": c.id} for c in groups_to_merge]}, ) result["merge"] = { "parent": six.text_type(primary_group.id), "children": [six.text_type(g.id) for g in groups_to_merge], } return Response(result)
def post(self, request, group): serializer = NoteSerializer(data=request.DATA, context={'group': group}) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) mentions = data.pop('mentions', []) if Activity.objects.filter( group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1) ).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST ) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) actors = Actor.resolve_many(mentions) actor_mentions = seperate_resolved_actors(actors) for user in actor_mentions.get('users'): GroupSubscription.objects.subscribe( group=group, user=user, reason=GroupSubscriptionReason.mentioned, ) mentioned_teams = actor_mentions.get('teams') mentioned_team_users = list( User.objects.filter( sentry_orgmember_set__organization_id=group.project.organization_id, sentry_orgmember_set__organizationmemberteam__team__in=mentioned_teams, sentry_orgmember_set__organizationmemberteam__is_active=True, is_active=True, ).exclude(id__in={u.id for u in actor_mentions.get('users')}) .values_list('id', flat=True) ) GroupSubscription.objects.bulk_subscribe( group=group, user_ids=mentioned_team_users, reason=GroupSubscriptionReason.team_mentioned, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def put(self, request, project): """ Bulk Mutate a List of Issues ```````````````````````````` Bulk mutate various attributes on issues. The list of issues to modify is given through the `id` query parameter. It is repeated for each issue that should be modified. - For non-status updates, the `id` query parameter is required. - For status updates, the `id` query parameter may be omitted for a batch "update all" query. - An optional `status` query parameter may be used to restrict mutations to only events with the given status. The following attributes can be modified and are supplied as JSON object in the body: If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the issues to be mutated. This parameter shall be repeated for each issue. It is optional only if a status is mutated in which case an implicit `update all` is assumed. :qparam string status: optionally limits the query to issues of the specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``resolvedInNextRelease``, ``"unresolved"``, and ``"ignored"``. :param int ignoreDuration: the number of minutes to ignore this issue. :param boolean isPublic: sets the issue to public or private. :param boolean merge: allows to merge or unmerge different issues. :param string assignedTo: the username of the user that should be assigned to this issue. :param boolean hasSeen: in case this API call is invoked with a user context this allows changing of the flag that indicates if the user has seen the event. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: query_kwargs = self._build_query_params_from_request(request, project) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now query_kwargs['limit'] = 1000 cursor_result = search.query(**query_kwargs) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 queryset = Group.objects.filter( id__in=group_ids, ) statusDetails = result.pop('statusDetails', result) status = result.get('status') if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'): release = Release.objects.filter( projects=project, organization_id=project.organization_id, ).order_by('-date_added')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved else: release = None activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() for group in group_list: with transaction.atomic(): if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update( datetime=timezone.now(), **resolution_params ) else: resolution = None affected = Group.objects.filter( id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now self._subscribe_and_assign_issue( acting_user, group, result ) if created: activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved_in_release.send( group=group, project=project, sender=acting_user, ) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude( status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter( group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: ignore_duration = ( statusDetails.pop('ignoreDuration', None) or statusDetails.pop('snoozeDuration', None) ) or None ignore_count = statusDetails.pop('ignoreCount', None) or None ignore_window = statusDetails.pop('ignoreWindow', None) or None ignore_user_count = statusDetails.pop('ignoreUserCount', None) or None ignore_user_window = statusDetails.pop('ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, } ) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter( group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } for group in group_list: group.status = new_status activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if 'assignedTo' in result: if result['assignedTo']: for group in group_list: GroupAssignee.objects.assign(group, result['assignedTo'], acting_user) if 'isSubscribed' not in result or result['assignedTo'] != request.user: GroupSubscription.objects.subscribe( group=group, user=result['assignedTo'], reason=GroupSubscriptionReason.assigned, ) result['assignedTo'] = serialize(result['assignedTo']) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) if result.get('hasSeen') and project.member_set.filter(user=acting_user).exists(): for group in group_list: instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=group.project, values={ 'last_seen': timezone.now(), } ) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project, group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project, values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if result.get('isPublic'): queryset.update(is_public=True) for group in group_list: if group.is_public: continue group.is_public = True Activity.objects.create( project=group.project, group=group, type=Activity.SET_PUBLIC, user=acting_user, ) elif result.get('isPublic') is False: queryset.update(is_public=False) for group in group_list: if not group.is_public: continue group.is_public = False Activity.objects.create( project=group.project, group=group, type=Activity.SET_PRIVATE, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: primary_group = sorted(group_list, key=lambda x: -x.times_seen)[0] children = [] transaction_id = uuid4().hex for group in group_list: if group == primary_group: continue children.append(group) group.update(status=GroupStatus.PENDING_MERGE) merge_group.delay( from_object_id=group.id, to_object_id=primary_group.id, transaction_id=transaction_id, ) Activity.objects.create( project=primary_group.project, group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{'id': c.id} for c in children], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in children], } return Response(result)
def update_groups( request: Request, group_ids: Sequence[Group], projects: Sequence[Project], organization_id: int, search_fn: SearchFunction | None, user: User | None = None, data: Mapping[str, Any] | None = None, ) -> Response: # If `user` and `data` are passed as parameters then they should override # the values in `request`. user = user or request.user data = data or request.data if group_ids: group_list = Group.objects.filter( project__organization_id=organization_id, project__in=projects, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None serializer = None # TODO(jess): We may want to look into refactoring GroupValidator # to support multiple projects, but this is pretty complicated # because of the assignee validation. Punting on this for now. for project in projects: serializer = GroupValidator( data=data, partial=True, context={ "project": project, "organization": project.organization, "access": getattr(request, "access", None), }, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) if serializer is None: return result = dict(serializer.validated_data) # so we won't have to requery for each group project_lookup = {p.id: p for p in projects} acting_user = user if user.is_authenticated else None if search_fn and not group_ids: try: cursor_result, _ = search_fn({ "limit": BULK_MUTATION_LIMIT, "paginator_options": { "max_limit": BULK_MUTATION_LIMIT }, }) except ValidationError as exc: return Response({"detail": str(exc)}, status=400) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 group_project_ids = {g.project_id for g in group_list} # filter projects down to only those that have groups in the search results projects = [p for p in projects if p.id in group_project_ids] queryset = Group.objects.filter(id__in=group_ids) discard = result.get("discard") if discard: return handle_discard(request, list(queryset), projects, acting_user) statusDetails = result.pop("statusDetails", result) status = result.get("status") release = None commit = None res_type = None activity_type = None activity_data: MutableMapping[str, Any | None] | None = None if status in ("resolved", "resolvedInNextRelease"): res_status = None if status == "resolvedInNextRelease" or statusDetails.get( "inNextRelease"): # TODO(jess): We may want to support this for multi project, but punting on it for now if len(projects) > 1: return Response( { "detail": "Cannot set resolved in next release for multiple projects." }, status=400, ) release = ( statusDetails.get("inNextRelease") or Release.objects.filter( projects=projects[0], organization_id=projects[0].organization_id).extra( select={ "sort": "COALESCE(date_released, date_added)" }).order_by("-sort")[0]) activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet "version": "" } status_details = { "inNextRelease": True, "actor": serialize(extract_lazy_object(user), user), } res_type = GroupResolution.Type.in_next_release res_type_str = "in_next_release" res_status = GroupResolution.Status.pending elif statusDetails.get("inRelease"): # TODO(jess): We could update validation to check if release # applies to multiple projects, but I think we agreed to punt # on this for now if len(projects) > 1: return Response( { "detail": "Cannot set resolved in release for multiple projects." }, status=400) release = statusDetails["inRelease"] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet "version": release.version } status_details = { "inRelease": release.version, "actor": serialize(extract_lazy_object(user), user), } res_type = GroupResolution.Type.in_release res_type_str = "in_release" res_status = GroupResolution.Status.resolved elif statusDetails.get("inCommit"): # TODO(jess): Same here, this is probably something we could do, but # punting for now. if len(projects) > 1: return Response( { "detail": "Cannot set resolved in commit for multiple projects." }, status=400) commit = statusDetails["inCommit"] activity_type = Activity.SET_RESOLVED_IN_COMMIT activity_data = {"commit": commit.id} status_details = { "inCommit": serialize(commit, user), "actor": serialize(extract_lazy_object(user), user), } res_type_str = "in_commit" else: res_type_str = "now" activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) # if we've specified a commit, let's see if its already been released # this will allow us to associate the resolution to a release as if we # were simply using 'inRelease' above # Note: this is different than the way commit resolution works on deploy # creation, as a given deploy is connected to an explicit release, and # in this case we're simply choosing the most recent release which contains # the commit. if commit and not release: # TODO(jess): If we support multiple projects for release / commit resolution, # we need to update this to find the release for each project (we shouldn't assume # it's the same) try: release = (Release.objects.filter( projects__in=projects, releasecommit__commit=commit).extra( select={ "sort": "COALESCE(date_released, date_added)" }).order_by("-sort")[0]) res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved except IndexError: release = None for group in group_list: with transaction.atomic(): resolution = None created = None if release: resolution_params = { "release": release, "type": res_type, "status": res_status, "actor_id": user.id if user.is_authenticated else None, } # We only set `current_release_version` if GroupResolution type is # in_next_release, because we need to store information about the latest/most # recent release that was associated with a group and that is required for # release comparisons (i.e. handling regressions) if res_type == GroupResolution.Type.in_next_release: # Check if semver versioning scheme is followed follows_semver = follows_semver_versioning_scheme( org_id=group.organization.id, project_id=group.project.id, release_version=release.version, ) current_release_version = get_current_release_version_of_group( group=group, follows_semver=follows_semver) if current_release_version: resolution_params.update({ "current_release_version": current_release_version }) # Sets `current_release_version` for activity, since there is no point # waiting for when a new release is created i.e. # clear_expired_resolutions task to be run. # Activity should look like "... resolved in version # >current_release_version" in the UI if follows_semver: activity_data.update({ "current_release_version": current_release_version }) # In semver projects, and thereby semver releases, we determine # resolutions by comparing against an expression rather than a # specific release (i.e. >current_release_version). Consequently, # at this point we can consider this GroupResolution as resolved # in release resolution_params.update({ "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, }) else: # If we already know the `next` release in date based ordering # when clicking on `resolvedInNextRelease` because it is already # been released, there is no point in setting GroupResolution to # be of type in_next_release but rather in_release would suffice try: # Get current release object from current_release_version current_release_obj = Release.objects.get( version=current_release_version, organization_id=projects[0]. organization_id, ) date_order_q = Q( date_added__gt=current_release_obj. date_added) | Q( date_added=current_release_obj. date_added, id__gt=current_release_obj.id, ) # Find the next release after the current_release_version # i.e. the release that resolves the issue resolved_in_release = ( Release.objects.filter( date_order_q, projects=projects[0], organization_id=projects[0]. organization_id, ).extra( select={ "sort": "COALESCE(date_released, date_added)" }).order_by("sort", "id")[:1].get()) # If we get here, we assume it exists and so we update # GroupResolution and Activity resolution_params.update({ "release": resolved_in_release, "type": GroupResolution.Type.in_release, "status": GroupResolution.Status.resolved, }) activity_data.update({ "version": resolved_in_release.version }) except Release.DoesNotExist: # If it gets here, it means we don't know the upcoming # release yet because it does not exist, and so we should # fall back to our current model ... resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params) if not created: resolution.update(datetime=timezone.now(), **resolution_params) if commit: GroupLink.objects.create( group_id=group.id, project_id=group.project_id, linked_type=GroupLink.LinkedType.commit, relationship=GroupLink.Relationship.resolves, linked_id=commit.id, ) affected = Group.objects.filter(id=group.id).update( status=GroupStatus.RESOLVED, resolved_at=now) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED, user=acting_user) result["inbox"] = None assigned_to = self_subscribe_and_assign_issue( acting_user, group) if assigned_to is not None: result["assignedTo"] = assigned_to if created: activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) record_group_history_from_activity_type(group, activity_type, actor=acting_user) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved.send_robust( organization_id=organization_id, user=acting_user or user, group=group, project=project_lookup[group.project_id], resolution_type=res_type_str, sender=update_groups, ) kick_off_status_syncs.apply_async(kwargs={ "project_id": group.project_id, "group_id": group.id }) result.update({"status": "resolved", "statusDetails": status_details}) elif status: new_status = STATUS_UPDATE_CHOICES[result["status"]] ignore_duration = None ignore_count = None ignore_window = None ignore_user_count = None ignore_user_window = None ignore_until = None with transaction.atomic(): happened = queryset.exclude(status=new_status).update( status=new_status) GroupResolution.objects.filter(group__in=group_ids).delete() if new_status == GroupStatus.IGNORED: metrics.incr("group.ignored", skip_internal=True) for group in group_ids: remove_group_from_inbox( group, action=GroupInboxRemoveAction.IGNORED, user=acting_user) result["inbox"] = None ignore_duration = (statusDetails.pop("ignoreDuration", None) or statusDetails.pop( "snoozeDuration", None)) or None ignore_count = statusDetails.pop("ignoreCount", None) or None ignore_window = statusDetails.pop("ignoreWindow", None) or None ignore_user_count = statusDetails.pop("ignoreUserCount", None) or None ignore_user_window = statusDetails.pop("ignoreUserWindow", None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state["times_seen"] = group.times_seen if ignore_user_count and not ignore_user_window: state["users_seen"] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ "until": ignore_until, "count": ignore_count, "window": ignore_window, "user_count": ignore_user_count, "user_window": ignore_user_window, "state": state, "actor_id": user.id if user.is_authenticated else None, }, ) result["statusDetails"] = { "ignoreCount": ignore_count, "ignoreUntil": ignore_until, "ignoreUserCount": ignore_user_count, "ignoreUserWindow": ignore_user_window, "ignoreWindow": ignore_window, "actor": serialize(extract_lazy_object(user), user), } else: GroupSnooze.objects.filter(group__in=group_ids).delete() ignore_until = None result["statusDetails"] = {} else: result["statusDetails"] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} for group in group_list: if group.status == GroupStatus.IGNORED: issue_unignored.send_robust( project=project_lookup[group.project_id], user=acting_user, group=group, transition_type="manual", sender=update_groups, ) else: issue_unresolved.send_robust( project=project_lookup[group.project_id], user=acting_user, group=group, transition_type="manual", sender=update_groups, ) elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { "ignoreCount": ignore_count, "ignoreDuration": ignore_duration, "ignoreUntil": ignore_until, "ignoreUserCount": ignore_user_count, "ignoreUserWindow": ignore_user_window, "ignoreWindow": ignore_window, } groups_by_project_id = defaultdict(list) for group in group_list: groups_by_project_id[group.project_id].append(group) for project in projects: project_groups = groups_by_project_id.get(project.id) if project_groups: issue_ignored.send_robust( project=project, user=acting_user, group_list=project_groups, activity_data=activity_data, sender=update_groups, ) for group in group_list: group.status = new_status activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, data=activity_data, ) record_group_history_from_activity_type(group, activity_type, actor=acting_user) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if new_status == GroupStatus.UNRESOLVED: kick_off_status_syncs.apply_async(kwargs={ "project_id": group.project_id, "group_id": group.id }) # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of # what performance impact this might have & this possibly should be moved else where try: if len(group_list) == 1: if res_type in ( GroupResolution.Type.in_next_release, GroupResolution.Type.in_release, ): result["activity"] = serialize( Activity.objects.get_activities_for_group( group=group_list[0], num=ACTIVITIES_COUNT), acting_user, ) except UnboundLocalError: pass if "assignedTo" in result: assigned_actor = result["assignedTo"] assigned_by = (data.get("assignedBy") if data.get("assignedBy") in [ "assignee_selector", "suggested_assignee" ] else None) if assigned_actor: for group in group_list: resolved_actor = assigned_actor.resolve() assignment = GroupAssignee.objects.assign( group, resolved_actor, acting_user) analytics.record( "manual.issue_assignment", organization_id=project_lookup[ group.project_id].organization_id, project_id=group.project_id, group_id=group.id, assigned_by=assigned_by, had_to_deassign=assignment["updated_assignment"], ) result["assignedTo"] = serialize(assigned_actor.resolve(), acting_user, ActorSerializer()) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) analytics.record( "manual.issue_assignment", organization_id=project_lookup[ group.project_id].organization_id, project_id=group.project_id, group_id=group.id, assigned_by=assigned_by, had_to_deassign=True, ) is_member_map = { project.id: project.member_set.filter(user=acting_user).exists() for project in projects } if result.get("hasSeen"): for group in group_list: if is_member_map.get(group.project_id): instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=project_lookup[group.project_id], values={"last_seen": timezone.now()}, ) elif result.get("hasSeen") is False: GroupSeen.objects.filter(group__in=group_ids, user=acting_user).delete() if result.get("isBookmarked"): for group in group_list: GroupBookmark.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark) elif result.get("isBookmarked") is False: GroupBookmark.objects.filter(group__in=group_ids, user=acting_user).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get("isSubscribed") in (True, False): is_subscribed = result["isSubscribed"] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project_lookup[group.project_id], values={ "is_active": is_subscribed, "reason": GroupSubscriptionReason.unknown }, ) result["subscriptionDetails"] = { "reason": SUBSCRIPTION_REASON_MAP.get(GroupSubscriptionReason.unknown, "unknown") } if "isPublic" in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result["shareId"] = None Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get("isPublic"): for group in group_list: share, created = GroupShare.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user) if created: result["shareId"] = share.uuid Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own endpoint. if result.get("merge") and len(group_list) > 1: # don't allow merging cross project if len(projects) > 1: return Response({ "detail": "Merging across multiple projects is not supported" }) group_list_by_times_seen = sorted(group_list, key=lambda g: (g.times_seen, g.id), reverse=True) primary_group, groups_to_merge = group_list_by_times_seen[ 0], group_list_by_times_seen[1:] group_ids_to_merge = [g.id for g in groups_to_merge] eventstream_state = eventstream.start_merge(primary_group.project_id, group_ids_to_merge, primary_group.id) Group.objects.filter(id__in=group_ids_to_merge).update( status=GroupStatus.PENDING_MERGE) transaction_id = uuid4().hex merge_groups.delay( from_object_ids=group_ids_to_merge, to_object_id=primary_group.id, transaction_id=transaction_id, eventstream_state=eventstream_state, ) Activity.objects.create( project=project_lookup[primary_group.project_id], group=primary_group, type=Activity.MERGE, user=acting_user, data={"issues": [{ "id": c.id } for c in groups_to_merge]}, ) result["merge"] = { "parent": str(primary_group.id), "children": [str(g.id) for g in groups_to_merge], } # Support moving groups in or out of the inbox inbox = result.get("inbox", None) if inbox is not None: if inbox: for group in group_list: add_group_to_inbox(group, GroupInboxReason.MANUAL) elif not inbox: for group in group_list: remove_group_from_inbox( group, action=GroupInboxRemoveAction.MARK_REVIEWED, user=acting_user, referrer=request.META.get("HTTP_REFERER"), ) issue_mark_reviewed.send_robust( project=project_lookup[group.project_id], user=acting_user, group=group, sender=update_groups, ) result["inbox"] = inbox return Response(result)
def post(self, request, work_batch_id): serializer = NoteSerializer(data=request.data, context={'work_batch': work_batch_id}) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.validated_data) mentions = data.pop('mentions', []) if Activity.objects.filter( work_batch_id=work_batch_id, type=Activity.NOTE, user=request.user, data=data, # TODO: Hash instead? datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) actors = Actor.resolve_many(mentions) actor_mentions = seperate_resolved_actors(actors) for user in actor_mentions.get('users'): GroupSubscription.objects.subscribe( group=1, user=user, reason=GroupSubscriptionReason.mentioned, ) # mentioned_teams = actor_mentions.get('teams') # mentioned_team_users = list( # User.objects.filter( # sentry_orgmember_set__organization_id=1, # sentry_orgmember_set__organizationmemberteam__team__in=mentioned_teams, # sentry_orgmember_set__organizationmemberteam__is_active=True, # is_active=True, # ).exclude(id__in={u.id for u in actor_mentions.get('users')}) # .values_list('id', flat=True) # ) # TODO! # GroupSubscription.objects.bulk_subscribe( # group=1, # user_ids=mentioned_team_users, # reason=GroupSubscriptionReason.team_mentioned, # ) # TODO: Org id! activity = Activity.objects.create( work_batch_id=work_batch_id, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, project_id=1, # TODO: should not be required ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def post(self, request, group): serializer = NoteSerializer(data=request.DATA, context={'group': group}) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) data = dict(serializer.object) mentions = data.pop('mentions', []) if Activity.objects.filter(group=group, type=Activity.NOTE, user=request.user, data=data, datetime__gte=timezone.now() - timedelta(hours=1)).exists(): return Response( '{"detail": "You have already posted that comment."}', status=status.HTTP_400_BAD_REQUEST) GroupSubscription.objects.subscribe( group=group, user=request.user, reason=GroupSubscriptionReason.comment, ) actors = Actor.resolve_many(mentions) actor_mentions = seperate_resolved_actors(actors) for user in actor_mentions.get('users'): GroupSubscription.objects.subscribe( group=group, user=user, reason=GroupSubscriptionReason.mentioned, ) mentioned_teams = actor_mentions.get('teams') mentioned_team_users = User.objects.filter( sentry_orgmember_set__organization_id=group.project. organization_id, sentry_orgmember_set__organizationmemberteam__team__in= mentioned_teams, sentry_orgmember_set__organizationmemberteam__is_active=True, is_active=True, ).exclude(id__in={u.id for u in actor_mentions.get('users')}) for user in mentioned_team_users: GroupSubscription.objects.subscribe( group=group, user=user, reason=GroupSubscriptionReason.team_mentioned, ) activity = Activity.objects.create( group=group, project=group.project, type=Activity.NOTE, user=extract_lazy_object(request.user), data=data, ) activity.send_notification() return Response(serialize(activity, request.user), status=201)
def put(self, request, project): """ Bulk Mutate a List of Issues ```````````````````````````` Bulk mutate various attributes on issues. The list of issues to modify is given through the `id` query parameter. It is repeated for each issue that should be modified. - For non-status updates, the `id` query parameter is required. - For status updates, the `id` query parameter may be omitted for a batch "update all" query. - An optional `status` query parameter may be used to restrict mutations to only events with the given status. The following attributes can be modified and are supplied as JSON object in the body: If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the issues to be mutated. This parameter shall be repeated for each issue. It is optional only if a status is mutated in which case an implicit `update all` is assumed. :qparam string status: optionally limits the query to issues of the specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``resolvedInNextRelease``, ``"unresolved"``, and ``"ignored"``. :param int ignoreDuration: the number of minutes to ignore this issue. :param boolean isPublic: sets the issue to public or private. :param boolean merge: allows to merge or unmerge different issues. :param string assignedTo: the username of the user that should be assigned to this issue. :param boolean hasSeen: in case this API call is invoked with a user context this allows changing of the flag that indicates if the user has seen the event. :param boolean isBookmarked: in case this API call is invoked with a user context this allows changing of the bookmark flag. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: query_kwargs = self._build_query_params_from_request( request, project) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now query_kwargs['limit'] = 1000 cursor_result = search.query(**query_kwargs) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 queryset = Group.objects.filter(id__in=group_ids, ) statusDetails = result.pop('statusDetails', result) status = result.get('status') if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get( 'inNextRelease'): release = Release.objects.filter( projects=project, organization_id=project.organization_id, ).order_by('-date_added')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved else: release = None activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() for group in group_list: with transaction.atomic(): if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update(datetime=timezone.now(), **resolution_params) else: resolution = None affected = Group.objects.filter(id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now self._subscribe_and_assign_issue(acting_user, group, result) if created: activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved_in_release.send( group=group, project=project, sender=acting_user, ) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude(status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter(group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: ignore_duration = ( statusDetails.pop('ignoreDuration', None) or statusDetails.pop('snoozeDuration', None)) or None ignore_count = statusDetails.pop('ignoreCount', None) or None ignore_window = statusDetails.pop('ignoreWindow', None) or None ignore_user_count = statusDetails.pop( 'ignoreUserCount', None) or None ignore_user_window = statusDetails.pop( 'ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, }) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter( group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } for group in group_list: group.status = new_status activity = Activity.objects.create( project=group.project, group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if 'assignedTo' in result: if result['assignedTo']: for group in group_list: GroupAssignee.objects.assign(group, result['assignedTo'], acting_user) if 'isSubscribed' not in result or result[ 'assignedTo'] != request.user: GroupSubscription.objects.subscribe( group=group, user=result['assignedTo'], reason=GroupSubscriptionReason.assigned, ) result['assignedTo'] = serialize(result['assignedTo']) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) if result.get('hasSeen') and project.member_set.filter( user=acting_user).exists(): for group in group_list: instance, created = create_or_update(GroupSeen, group=group, user=acting_user, project=group.project, values={ 'last_seen': timezone.now(), }) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project, group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project, values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if result.get('isPublic'): queryset.update(is_public=True) for group in group_list: if group.is_public: continue group.is_public = True Activity.objects.create( project=group.project, group=group, type=Activity.SET_PUBLIC, user=acting_user, ) elif result.get('isPublic') is False: queryset.update(is_public=False) for group in group_list: if not group.is_public: continue group.is_public = False Activity.objects.create( project=group.project, group=group, type=Activity.SET_PRIVATE, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: primary_group = sorted(group_list, key=lambda x: -x.times_seen)[0] children = [] transaction_id = uuid4().hex for group in group_list: if group == primary_group: continue children.append(group) group.update(status=GroupStatus.PENDING_MERGE) merge_group.delay( from_object_id=group.id, to_object_id=primary_group.id, transaction_id=transaction_id, ) Activity.objects.create( project=primary_group.project, group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{ 'id': c.id } for c in children], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in children], } return Response(result)
def update_groups(request, projects, organization_id, search_fn): group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter( project__organization_id=organization_id, project__in=projects, id__in=group_ids, ) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] if not group_ids: return Response(status=204) else: group_list = None # TODO(jess): We may want to look into refactoring GroupValidator # to support multiple projects, but this is pretty complicated # because of the assignee validation. Punting on this for now. for project in projects: serializer = GroupValidator( data=request.DATA, partial=True, context={'project': project}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) result = dict(serializer.object) # so we won't have to requery for each group project_lookup = {p.id: p for p in projects} acting_user = request.user if request.user.is_authenticated() else None if not group_ids: try: # bulk mutations are limited to 1000 items # TODO(dcramer): it'd be nice to support more than this, but its # a bit too complicated right now cursor_result, _ = search_fn({ 'limit': 1000, 'paginator_options': {'max_limit': 1000}, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) group_list = list(cursor_result) group_ids = [g.id for g in group_list] is_bulk = len(group_ids) > 1 group_project_ids = {g.project_id for g in group_list} # filter projects down to only those that have groups in the search results projects = [p for p in projects if p.id in group_project_ids] queryset = Group.objects.filter( id__in=group_ids, ) discard = result.get('discard') if discard: return handle_discard(request, list(queryset), projects, acting_user) statusDetails = result.pop('statusDetails', result) status = result.get('status') release = None commit = None if status in ('resolved', 'resolvedInNextRelease'): if status == 'resolvedInNextRelease' or statusDetails.get('inNextRelease'): # TODO(jess): We may want to support this for multi project, but punting on it for now if len(projects) > 1: return Response({ 'detail': 'Cannot set resolved in next release for multiple projects.' }, status=400) release = statusDetails.get('inNextRelease') or Release.objects.filter( projects=projects[0], organization_id=projects[0].organization_id, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': '', } status_details = { 'inNextRelease': True, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_next_release res_type_str = 'in_next_release' res_status = GroupResolution.Status.pending elif statusDetails.get('inRelease'): # TODO(jess): We could update validation to check if release # applies to multiple projects, but I think we agreed to punt # on this for now if len(projects) > 1: return Response({ 'detail': 'Cannot set resolved in release for multiple projects.' }, status=400) release = statusDetails['inRelease'] activity_type = Activity.SET_RESOLVED_IN_RELEASE activity_data = { # no version yet 'version': release.version, } status_details = { 'inRelease': release.version, 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type = GroupResolution.Type.in_release res_type_str = 'in_release' res_status = GroupResolution.Status.resolved elif statusDetails.get('inCommit'): # TODO(jess): Same here, this is probably something we could do, but # punting for now. if len(projects) > 1: return Response({ 'detail': 'Cannot set resolved in commit for multiple projects.' }, status=400) commit = statusDetails['inCommit'] activity_type = Activity.SET_RESOLVED_IN_COMMIT activity_data = { 'commit': commit.id, } status_details = { 'inCommit': serialize(commit, request.user), 'actor': serialize(extract_lazy_object(request.user), request.user), } res_type_str = 'in_commit' else: res_type_str = 'now' activity_type = Activity.SET_RESOLVED activity_data = {} status_details = {} now = timezone.now() metrics.incr('group.resolved', instance=res_type_str, skip_internal=True) # if we've specified a commit, let's see if its already been released # this will allow us to associate the resolution to a release as if we # were simply using 'inRelease' above # Note: this is different than the way commit resolution works on deploy # creation, as a given deploy is connected to an explicit release, and # in this case we're simply choosing the most recent release which contains # the commit. if commit and not release: # TODO(jess): If we support multiple projects for release / commit resolution, # we need to update this to find the release for each project (we shouldn't assume # it's the same) try: release = Release.objects.filter( projects__in=projects, releasecommit__commit=commit, ).extra(select={ 'sort': 'COALESCE(date_released, date_added)', }).order_by('-sort')[0] res_type = GroupResolution.Type.in_release res_status = GroupResolution.Status.resolved except IndexError: release = None for group in group_list: with transaction.atomic(): resolution = None if release: resolution_params = { 'release': release, 'type': res_type, 'status': res_status, 'actor_id': request.user.id if request.user.is_authenticated() else None, } resolution, created = GroupResolution.objects.get_or_create( group=group, defaults=resolution_params, ) if not created: resolution.update( datetime=timezone.now(), **resolution_params) if commit: GroupLink.objects.create( group_id=group.id, project_id=group.project_id, linked_type=GroupLink.LinkedType.commit, relationship=GroupLink.Relationship.resolves, linked_id=commit.id, ) affected = Group.objects.filter( id=group.id, ).update( status=GroupStatus.RESOLVED, resolved_at=now, ) if not resolution: created = affected group.status = GroupStatus.RESOLVED group.resolved_at = now assigned_to = self_subscribe_and_assign_issue(acting_user, group) if assigned_to is not None: result['assignedTo'] = assigned_to if created: activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, ident=resolution.id if resolution else None, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: activity.send_notification() issue_resolved.send_robust( organization_id=organization_id, user=acting_user or request.user, group=group, project=project_lookup[group.project_id], resolution_type=res_type_str, sender=update_groups, ) kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) result.update({ 'status': 'resolved', 'statusDetails': status_details, }) elif status: new_status = STATUS_CHOICES[result['status']] with transaction.atomic(): happened = queryset.exclude( status=new_status, ).update( status=new_status, ) GroupResolution.objects.filter( group__in=group_ids, ).delete() if new_status == GroupStatus.IGNORED: metrics.incr('group.ignored', skip_internal=True) ignore_duration = ( statusDetails.pop('ignoreDuration', None) or statusDetails.pop('snoozeDuration', None) ) or None ignore_count = statusDetails.pop( 'ignoreCount', None) or None ignore_window = statusDetails.pop( 'ignoreWindow', None) or None ignore_user_count = statusDetails.pop( 'ignoreUserCount', None) or None ignore_user_window = statusDetails.pop( 'ignoreUserWindow', None) or None if ignore_duration or ignore_count or ignore_user_count: if ignore_duration: ignore_until = timezone.now() + timedelta( minutes=ignore_duration, ) else: ignore_until = None for group in group_list: state = {} if ignore_count and not ignore_window: state['times_seen'] = group.times_seen if ignore_user_count and not ignore_user_window: state['users_seen'] = group.count_users_seen() GroupSnooze.objects.create_or_update( group=group, values={ 'until': ignore_until, 'count': ignore_count, 'window': ignore_window, 'user_count': ignore_user_count, 'user_window': ignore_user_window, 'state': state, 'actor_id': request.user.id if request.user.is_authenticated() else None, } ) result['statusDetails'] = { 'ignoreCount': ignore_count, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, 'actor': serialize(extract_lazy_object(request.user), request.user), } else: GroupSnooze.objects.filter( group__in=group_ids, ).delete() ignore_until = None result['statusDetails'] = {} else: result['statusDetails'] = {} if group_list and happened: if new_status == GroupStatus.UNRESOLVED: activity_type = Activity.SET_UNRESOLVED activity_data = {} elif new_status == GroupStatus.IGNORED: activity_type = Activity.SET_IGNORED activity_data = { 'ignoreCount': ignore_count, 'ignoreDuration': ignore_duration, 'ignoreUntil': ignore_until, 'ignoreUserCount': ignore_user_count, 'ignoreUserWindow': ignore_user_window, 'ignoreWindow': ignore_window, } groups_by_project_id = defaultdict(list) for group in group_list: groups_by_project_id[group.project_id].append(group) for project in projects: project_groups = groups_by_project_id.get(project.id) if project_groups: issue_ignored.send_robust( project=project, user=acting_user, group_list=project_groups, activity_data=activity_data, sender=update_groups) for group in group_list: group.status = new_status activity = Activity.objects.create( project=project_lookup[group.project_id], group=group, type=activity_type, user=acting_user, data=activity_data, ) # TODO(dcramer): we need a solution for activity rollups # before sending notifications on bulk changes if not is_bulk: if acting_user: GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.status_change, ) activity.send_notification() if new_status == GroupStatus.UNRESOLVED: kick_off_status_syncs.apply_async(kwargs={ 'project_id': group.project_id, 'group_id': group.id, }) if 'assignedTo' in result: assigned_actor = result['assignedTo'] if assigned_actor: for group in group_list: resolved_actor = assigned_actor.resolve() GroupAssignee.objects.assign(group, resolved_actor, acting_user) result['assignedTo'] = serialize( assigned_actor.resolve(), acting_user, ActorSerializer()) else: for group in group_list: GroupAssignee.objects.deassign(group, acting_user) is_member_map = { project.id: project.member_set.filter(user=acting_user).exists() for project in projects } if result.get('hasSeen'): for group in group_list: if is_member_map.get(group.project_id): instance, created = create_or_update( GroupSeen, group=group, user=acting_user, project=project_lookup[group.project_id], values={ 'last_seen': timezone.now(), } ) elif result.get('hasSeen') is False: GroupSeen.objects.filter( group__in=group_ids, user=acting_user, ).delete() if result.get('isBookmarked'): for group in group_list: GroupBookmark.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user, ) GroupSubscription.objects.subscribe( user=acting_user, group=group, reason=GroupSubscriptionReason.bookmark, ) elif result.get('isBookmarked') is False: GroupBookmark.objects.filter( group__in=group_ids, user=acting_user, ).delete() # TODO(dcramer): we could make these more efficient by first # querying for rich rows are present (if N > 2), flipping the flag # on those rows, and then creating the missing rows if result.get('isSubscribed') in (True, False): is_subscribed = result['isSubscribed'] for group in group_list: # NOTE: Subscribing without an initiating event (assignment, # commenting, etc.) clears out the previous subscription reason # to avoid showing confusing messaging as a result of this # action. It'd be jarring to go directly from "you are not # subscribed" to "you were subscribed due since you were # assigned" just by clicking the "subscribe" button (and you # may no longer be assigned to the issue anyway.) GroupSubscription.objects.create_or_update( user=acting_user, group=group, project=project_lookup[group.project_id], values={ 'is_active': is_subscribed, 'reason': GroupSubscriptionReason.unknown, }, ) result['subscriptionDetails'] = { 'reason': SUBSCRIPTION_REASON_MAP.get( GroupSubscriptionReason.unknown, 'unknown', ), } if 'isPublic' in result: # We always want to delete an existing share, because triggering # an isPublic=True even when it's already public, should trigger # regenerating. for group in group_list: if GroupShare.objects.filter(group=group).delete(): result['shareId'] = None Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PRIVATE, user=acting_user, ) if result.get('isPublic'): for group in group_list: share, created = GroupShare.objects.get_or_create( project=project_lookup[group.project_id], group=group, user=acting_user, ) if created: result['shareId'] = share.uuid Activity.objects.create( project=project_lookup[group.project_id], group=group, type=Activity.SET_PUBLIC, user=acting_user, ) # XXX(dcramer): this feels a bit shady like it should be its own # endpoint if result.get('merge') and len(group_list) > 1: # don't allow merging cross project if len(projects) > 1: return Response({'detail': 'Merging across multiple projects is not supported'}) group_list_by_times_seen = sorted( group_list, key=lambda g: (g.times_seen, g.id), reverse=True, ) primary_group, groups_to_merge = group_list_by_times_seen[0], group_list_by_times_seen[1:] group_ids_to_merge = [g.id for g in groups_to_merge] eventstream_state = eventstream.start_merge( primary_group.project_id, group_ids_to_merge, primary_group.id ) Group.objects.filter( id__in=group_ids_to_merge ).update( status=GroupStatus.PENDING_MERGE ) transaction_id = uuid4().hex merge_groups.delay( from_object_ids=group_ids_to_merge, to_object_id=primary_group.id, transaction_id=transaction_id, eventstream_state=eventstream_state, ) Activity.objects.create( project=project_lookup[primary_group.project_id], group=primary_group, type=Activity.MERGE, user=acting_user, data={ 'issues': [{ 'id': c.id } for c in groups_to_merge], }, ) result['merge'] = { 'parent': six.text_type(primary_group.id), 'children': [six.text_type(g.id) for g in groups_to_merge], } return Response(result)