def rehash_group_events(group_id, transaction_id=None, **kwargs): from sentry.models import Group, GroupHash group = Group.objects.get(id=group_id) # Clear out existing hashes to preempt new events being added # This can cause the new groups to be created before we get to them, but # its a tradeoff we're willing to take GroupHash.objects.filter(group=group).delete() has_more = _rehash_group_events(group) if has_more: rehash_group_events.delay( group_id=group.id, transaction_id=transaction_id, ) return delete_logger.info('object.delete.bulk_executed', extra={ 'group_id': group.id, 'transaction_id': transaction_id, 'model': GroupHash.__name__, }) delete_group.delay(group.id)
def delete(self, request, project): """ Bulk remove a list of aggregates Permanently remove the given aggregates. Only queries by 'id' are accepted. {method} {path}?id=1&id=2&id=3 If any ids are out of scope this operation will succeed without any data mutation """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] else: # missing any kind of filter return Response('{"detail": "You must specify a list of IDs for this operation"}', status=400) if not group_ids: return Response(status=204) # TODO(dcramer): set status to pending deletion for group in group_list: delete_group.delay(object_id=group.id) return Response(status=204)
def delete(self, request, project_id): """ Permanently remove the given groups. Only queries by 'id' are accepted. {method} {path}?id=1&id=2&id=3 If any ids are out of scope this operation will succeed without any data mutation """ project = Project.objects.get_from_cache( id=project_id, ) assert_perm(project, request.user, request.auth) group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] else: # missing any kind of filter return Response(status=400) if not group_ids: return Response(status=204) # TODO(dcramer): set status to pending deletion for group in group_list: delete_group.delay(object_id=group.id) return Response(status=204)
def delete(self, request, project): """ Bulk remove a list of aggregates Permanently remove the given aggregates. Only queries by 'id' are accepted. {method} {path}?id=1&id=2&id=3 If any ids are out of scope this operation will succeed without any data mutation """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] else: # missing any kind of filter return Response( '{"detail": "You must specify a list of IDs for this operation"}', status=400) if not group_ids: return Response(status=204) # TODO(dcramer): set status to pending deletion for group in group_list: delete_group.delay(object_id=group.id) return Response(status=204)
def delete(self, request, group): """ Delete an aggregate Deletes an individual aggregate. {method} {path} """ from sentry.tasks.deletion import delete_group delete_group.delay(object_id=group.id) return Response(status=202)
def rehash_group_events(group_id, **kwargs): from sentry.models import Group, GroupHash group = Group.objects.get(id=group_id) # Clear out existing hashes to preempt new events being added # This can cause the new groups to be created before we get to them, but # its a tradeoff we're willing to take GroupHash.objects.filter(group=group).delete() has_more = _rehash_group_events(group) if has_more: rehash_group_events.delay(group_id=group.id) return delete_group.delay(group.id)
def delete(self, request, group_id): """ Delete an aggregate Deletes an individual aggregate. {method} {path} """ from sentry.tasks.deletion import delete_group group = Group.objects.get(id=group_id, ) assert_perm(group, request.user, request.auth) delete_group.delay(object_id=group.id) return Response(status=202)
def remove_group(request, team, project, group_id): from sentry.tasks.deletion import delete_group try: group = Group.objects.get(pk=group_id) except Group.DoesNotExist: return HttpResponseForbidden() delete_group.delay(object_id=group.id) if request.is_ajax(): response = HttpResponse("{}") response["Content-Type"] = "application/json" else: messages.add_message(request, messages.SUCCESS, _("Deletion has been queued and should occur shortly.")) response = HttpResponseRedirect(reverse("sentry-stream", args=[team.slug, project.slug])) return response
def remove_group(request, organization, project, group_id): from sentry.tasks.deletion import delete_group try: group = Group.objects.get(pk=group_id) except Group.DoesNotExist: return HttpResponseForbidden() delete_group.delay(object_id=group.id) if request.is_ajax(): response = HttpResponse('{}') response['Content-Type'] = 'application/json' else: messages.add_message(request, messages.SUCCESS, _('Deletion has been queued and should occur shortly.')) response = HttpResponseRedirect(reverse('sentry-stream', args=[organization.slug, project.slug])) return response
def rehash_group_events(group_id, **kwargs): from sentry.models import Group, GroupHash group = Group.objects.get(id=group_id) # Clear out existing hashes to preempt new events being added # This can cause the new groups to be created before we get to them, but # its a tradeoff we're willing to take GroupHash.objects.filter(group=group).delete() has_more = _rehash_group_events(group) if has_more: rehash_group_events.delay( group_id=group.id ) return delete_group.delay(group.id)
def delete(self, request, group_id): """ Delete an aggregate Deletes an individual aggregate. {method} {path} """ from sentry.tasks.deletion import delete_group group = Group.objects.get( id=group_id, ) assert_perm(group, request.user, request.auth) delete_group.delay(object_id=group.id) return Response(status=202)
def delete(self, request, project): """ Bulk Remove a List of Aggregates ```````````````````````````````` Permanently remove the given aggregates. The list of groups to modify is given through the `id` query parameter. It is repeated for each group that should be removed. Only queries by 'id' are accepted. If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the groups to be removed. This parameter shall be repeated for each group. :pparam string organization_slug: the slug of the organization the groups belong to. :pparam string project_slug: the slug of the project the groups belong to. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] else: # missing any kind of filter return Response( '{"detail": "You must specify a list of IDs for this operation"}', status=400) if not group_ids: return Response(status=204) # TODO(dcramer): set status to pending deletion for group in group_list: delete_group.delay(object_id=group.id) return Response(status=204)
def delete(self, request, group): """ Remove an Issue ``````````````` Removes an individual issue. :pparam string issue_id: the ID of the issue to delete. :auth: required """ from sentry.tasks.deletion import delete_group updated = Group.objects.filter(id=group.id, ).exclude(status__in=[ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS, ]).update(status=GroupStatus.PENDING_DELETION) if updated: delete_group.delay(object_id=group.id) return Response(status=202)
def delete(self, request, group): """ Remove an Issue ``````````````` Removes an individual issue. :pparam string issue_id: the ID of the issue to delete. :auth: required """ from sentry.tasks.deletion import delete_group updated = ( Group.objects.filter(id=group.id) .exclude(status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]) .update(status=GroupStatus.PENDING_DELETION) ) if updated: delete_group.delay(object_id=group.id, countdown=3600) return Response(status=202)
def delete(self, request, project): """ Bulk Remove a List of Aggregates ```````````````````````````````` Permanently remove the given aggregates. The list of groups to modify is given through the `id` query parameter. It is repeated for each group that should be removed. Only queries by 'id' are accepted. If any ids are out of scope this operation will succeed without any data mutation. :qparam int id: a list of IDs of the groups to be removed. This parameter shall be repeated for each group. :pparam string organization_slug: the slug of the organization the groups belong to. :pparam string project_slug: the slug of the project the groups belong to. :auth: required """ group_ids = request.GET.getlist('id') if group_ids: group_list = Group.objects.filter(project=project, id__in=group_ids) # filter down group ids to only valid matches group_ids = [g.id for g in group_list] else: # missing any kind of filter return Response('{"detail": "You must specify a list of IDs for this operation"}', status=400) if not group_ids: return Response(status=204) # TODO(dcramer): set status to pending deletion for group in group_list: delete_group.delay(object_id=group.id, countdown=3600) return Response(status=204)
def delete(self, request, group): """ Remove an Aggregate ``````````````````` Removes an individual aggregate. :pparam string group_id: the ID of the group to delete. :auth: required """ from sentry.tasks.deletion import delete_group updated = Group.objects.filter( id=group.id, ).exclude( status__in=[ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS, ] ).update(status=GroupStatus.PENDING_DELETION) if updated: delete_group.delay(object_id=group.id) return Response(status=202)