def get_event_by_id(self, project_id, event_id, additional_columns=None): """ Get an event given a project ID and event ID Returns None if an event cannot be found """ event_id = normalize_event_id(event_id) if not event_id: return None if options.get("eventstore.use-nodestore"): return self.__get_event_by_id_nodestore(project_id, event_id) cols = self.__get_columns(additional_columns) result = snuba.raw_query( selected_columns=cols, filter_keys={ "event_id": [event_id], "project_id": [project_id] }, referrer="eventstore.get_event_by_id", limit=1, ) if "error" not in result and len(result["data"]) == 1: return self.__make_event(result["data"][0]) return None
def get_event_by_id(self, project_id, event_id, group_id=None): """ Get an event given a project ID and event ID Returns None if an event cannot be found """ event_id = normalize_event_id(event_id) if not event_id: return None event = Event(project_id=project_id, event_id=event_id) # Return None if there was no data in nodestore if len(event.data) == 0: return None if group_id is not None: # Set passed group_id if not a transaction if event.get_event_type() == "transaction": logger.warning("eventstore.passed-group-id-for-transaction") else: event.group_id = group_id elif event.get_event_type() != "transaction": # Load group_id from Snuba if not a transaction result = snuba.raw_query( selected_columns=["group_id"], start=event.datetime, end=event.datetime + timedelta(seconds=1), filter_keys={ "project_id": [project_id], "event_id": [event_id] }, limit=1, referrer="eventstore.get_event_by_id_nodestore", ) # Return None if the event from Nodestore was not yet written to Snuba if len(result["data"]) != 1: logger.warning( "eventstore.missing-snuba-event", extra={ "project_id": project_id, "event_id": event_id, "group_id": group_id, "event_datetime": event.datetime, "event_timestamp": event.timestamp, "nodestore_insert": event.data.get("nodestore_insert"), "received": event.data.get("received"), "len_data": len(result["data"]), }, ) return None event.group_id = result["data"][0]["group_id"] return event
def _get_events_legacy( self, request, group, environments, query, tags, start, end, ): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) event_id = normalize_event_id(query) if event_id: q |= Q(event_id__exact=event_id) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, [env.id for env in environments], tags, start, end, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # Filter start/end here in case we didn't filter by tags at all if start: events = events.filter(datetime__gte=start) if end: events = events.filter(datetime__lte=end) # filter out events which are beyond the retention period retention = quotas.get_event_retention( organization=group.project.organization) if retention: events = events.filter(datetime__gte=timezone.now() - timedelta(days=retention)) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def get_direct_hit_response(request, query, snuba_params, referrer): """ Checks whether a query is a direct hit for an event, and if so returns a response. Otherwise returns None """ event_id = normalize_event_id(query) if event_id: snuba_filter = get_filter(query=f"id:{event_id}", params=snuba_params) snuba_filter.conditions.append(["event.type", "!=", "transaction"]) results = eventstore.get_events(referrer=referrer, filter=snuba_filter) if len(results) == 1: response = Response(serialize(results, request.user)) response["X-Sentry-Direct-Hit"] = "1" return response
def get_direct_hit_response(request, query, snuba_params, referrer): """ Checks whether a query is a direct hit for an event, and if so returns a response. Otherwise returns None """ event_id = normalize_event_id(query) if event_id: snuba_filter = get_filter(query=u"id:{}".format(event_id), params=snuba_params) results = eventstore.get_events(referrer=referrer, filter=snuba_filter) if len(results) == 1: response = Response(serialize(results, request.user)) response["X-Sentry-Direct-Hit"] = "1" return response
def get_event_by_id(self, project_id, event_id): """ Get an event given a project ID and event ID Returns None if an event cannot be found """ event_id = normalize_event_id(event_id) if not event_id: return None event = Event(project_id=project_id, event_id=event_id) # Return None if there was no data in nodestore if len(event.data) == 0: return None event_time = datetime.fromtimestamp(event.data["timestamp"]) # Load group_id from Snuba if not a transaction if event.get_event_type() != "transaction": result = snuba.raw_query( selected_columns=["group_id"], start=event_time, end=event_time + timedelta(seconds=1), filter_keys={ "project_id": [project_id], "event_id": [event_id] }, limit=1, referrer="eventstore.get_event_by_id_nodestore", ) # Return None if the event from Nodestore was not yet written to Snuba if len(result["data"]) != 1: logger.warning( "eventstore.missing-snuba-event", extra={ "project_id": project_id, "event_id": event_id }, ) return None event.group_id = result["data"][0]["group_id"] return event
def get_direct_hit_response(request, query, snuba_params, referrer): """ Checks whether a query is a direct hit for an event, and if so returns a response. Otherwise returns None """ event_id = normalize_event_id(query) if event_id: snuba_args = get_snuba_query_args(query=u'id:{}'.format(event_id), params=snuba_params) results = raw_query(selected_columns=SnubaEvent.selected_columns, referrer=referrer, **snuba_args)['data'] if len(results) == 1: response = Response( serialize([SnubaEvent(row) for row in results], request.user)) response['X-Sentry-Direct-Hit'] = '1' return response
def from_event_id(self, id_or_event_id, project_id): """ Get an Event by either its id primary key or its hex event_id. Will automatically try to infer the type of id, and grab the correct event. If the provided id is a hex event_id, the project_id must also be provided to disambiguate it. Returns None if the event cannot be found under either scheme. """ # TODO (alexh) instrument this to report any times we are still trying # to get events by id. # TODO (alexh) deprecate lookup by id so we can move to snuba. event = None if id_or_event_id.isdigit() and int(id_or_event_id) <= BoundedBigIntegerField.MAX_VALUE: # If its a numeric string, check if it's an event Primary Key first try: if project_id is None: event = self.get( id=id_or_event_id, ) else: event = self.get( id=id_or_event_id, project_id=project_id, ) except ObjectDoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by # that instead. event_id = normalize_event_id(id_or_event_id) if project_id is not None and event is None and event_id: try: event = self.get( event_id=event_id, project_id=project_id, ) except ObjectDoesNotExist: pass return event
def get_event_by_id(self, project_id, event_id, additional_columns=None): """ Get an event given a project ID and event ID Returns None if an event cannot be found """ cols = self.__get_columns(additional_columns) event_id = normalize_event_id(event_id) if not event_id: return None result = snuba.raw_query( selected_columns=cols, filter_keys={ 'event_id': [event_id], 'project_id': [project_id], }, referrer='eventstore.get_event_by_id', limit=1, ) if 'error' not in result and len(result['data']) == 1: return SnubaEvent(result['data'][0]) return None
def from_event_id(self, id_or_event_id, project_id): """ Get a SnubaEvent by either its id primary key or its hex event_id. Returns None if the event cannot be found under either scheme. Log any attempt to fetch a SnubaEvent by primary key and eventually remove. """ from sentry.models import SnubaEvent, Event event_id = normalize_event_id(id_or_event_id) if not event_id: logger.warning('Attempt to fetch SnubaEvent by primary key', exc_info=True, extra={ 'event_id': event_id }) event = Event.objects.from_event_id(id_or_event_id, project_id) if not event: return None event_id = event.event_id return SnubaEvent.get_event(project_id, event_id)
def get(self, request, organization): """ List an Organization's Issues ````````````````````````````` Return a list of issues (groups) bound to an organization. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``groupStatsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' The ``statsPeriod`` parameter can be used to select a date window starting from now. Ex. ``14d``. The ``start`` and ``end`` parameters can be used to select an absolute date period to fetch issues from. :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string groupStatsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string start: Beginning date. You must also provide ``end``. :qparam string end: End date. You must also provide ``start``. :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :auth: required """ stats_period = request.GET.get('groupStatsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None environments = self.get_environments(request, organization) serializer = functools.partial( StreamGroupSerializerSnuba, environment_ids=[env.id for env in environments], stats_period=stats_period, ) projects = self.get_projects(request, organization) project_ids = [p.id for p in projects] if not projects: return Response([]) if len(projects) > 1 and not features.has('organizations:global-views', organization, actor=request.user): return Response( { 'detail': 'You do not have the multi project stream feature enabled' }, status=400) # we ignore date range for both short id and event ids query = request.GET.get('query', '').strip() if query: # check to see if we've got an event ID event_id = normalize_event_id(query) if event_id: groups = list( Group.objects.filter_by_event_id(project_ids, event_id)) if len(groups) == 1: response = Response( serialize(groups, request.user, serializer(matching_event_id=event_id))) response['X-Sentry-Direct-Hit'] = '1' return response if groups: return Response( serialize(groups, request.user, serializer())) group = get_by_short_id(organization.id, request.GET.get('shortIdLookup'), query) if group is not None: # check all projects user has access to if request.access.has_project_access(group.project): response = Response( serialize([group], request.user, serializer())) response['X-Sentry-Direct-Hit'] = '1' return response # If group ids specified, just ignore any query components try: group_ids = set(map(int, request.GET.getlist('group'))) except ValueError: return Response({'detail': 'Group ids must be integers'}, status=400) if group_ids: groups = list( Group.objects.filter(id__in=group_ids, project_id__in=project_ids)) if any(g for g in groups if not request.access.has_project_access(g.project)): raise PermissionDenied return Response(serialize(groups, request.user, serializer())) try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: cursor_result, query_kwargs = self._search(request, organization, projects, environments, { 'count_hits': True, 'date_to': end, 'date_from': start, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get('search_filters', []) if search_filter.key.name == 'status' ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) # TODO(jess): add metrics that are similar to project endpoint here return response
def get(self, request, project): """ List a Project's Issues ``````````````````````` Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: ``""`` (disable), ``"24h"``, ``"14d"`` :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :qparam string environment: this restricts the issues to ones containing events from this environment :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :auth: required """ stats_period = request.GET.get("statsPeriod") if stats_period not in (None, "", "24h", "14d"): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = "24h" elif stats_period == "": # disable stats stats_period = None serializer = functools.partial( StreamGroupSerializer, environment_func=self._get_environment_func( request, project.organization_id), stats_period=stats_period, ) query = request.GET.get("query", "").strip() if query: matching_group = None matching_event = None event_id = normalize_event_id(query) if event_id: # check to see if we've got an event ID try: matching_group = Group.objects.from_event_id( project, event_id) except Group.DoesNotExist: pass else: matching_event = eventstore.get_event_by_id( project.id, event_id) elif matching_group is None: matching_group = get_by_short_id( project.organization_id, request.GET.get("shortIdLookup"), query) if matching_group is not None and matching_group.project_id != project.id: matching_group = None if matching_group is not None: matching_event_environment = None try: matching_event_environment = ( matching_event.get_environment().name if matching_event else None) except Environment.DoesNotExist: pass response = Response( serialize( [matching_group], request.user, serializer( matching_event_id=getattr(matching_event, "event_id", None), matching_event_environment= matching_event_environment, ), )) response["X-Sentry-Direct-Hit"] = "1" return response try: cursor_result, query_kwargs = prep_search(self, request, project, {"count_hits": True}) except ValidationError as exc: return Response({"detail": str(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get("search_filters", []) if search_filter.key.name == "status" ] if status and (GroupStatus.UNRESOLVED in status[0].value.raw_value): status_labels = { QUERY_STATUS_LOOKUP[s] for s in status[0].value.raw_value } context = [ r for r in context if "status" not in r or r["status"] in status_labels ] response = Response(context) self.add_cursor_headers(request, response, cursor_result) if results and query: advanced_search.send(project=project, sender=request.user) analytics.record( "project_issue.searched", user_id=request.user.id, organization_id=project.organization_id, project_id=project.id, query=query, ) return response
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) normalized_event_id = normalize_event_id(event_id) if normalized_event_id: event_id = normalized_event_id elif event_id: return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) key = self._get_project_key(request) if not key: return self._smart_response( request, {'dsn': 'Missing or invalid parameter.'}, status=404) origin = self._get_origin(request) if not is_valid_origin(origin, key.project): return self._smart_response(request, status=403) if request.method == 'OPTIONS': return self._smart_response(request) # customization options options = DEFAULT_OPTIONS.copy() for name in six.iterkeys(options): if name in request.GET: options[name] = six.text_type(request.GET[name]) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm( request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: event = Event.objects.filter(project_id=report.project.id, event_id=report.event_id)[0] except IndexError: try: report.group = Group.objects.from_event_id( report.project, report.event_id) except Group.DoesNotExist: pass else: Event.objects.bind_nodes([event]) report.environment = event.get_environment() report.group = event.group try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) else: if report.group: report.notify() user_feedback_received.send(project=report.project, group=report.group, sender=self) return self._smart_response(request) elif request.method == 'POST': return self._smart_response(request, { "errors": dict(form.errors), }, status=400) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1') == '1' template = render_to_string( 'sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, 'title': options['title'], 'subtitle': options['subtitle'], 'subtitle2': options['subtitle2'], 'name_label': options['labelName'], 'email_label': options['labelEmail'], 'comments_label': options['labelComments'], 'submit_label': options['labelSubmit'], 'close_label': options['labelClose'], }) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(options['errorGeneric']), 'form_error': six.text_type(options['errorFormEntry']), 'sent_message': six.text_type(options['successMessage']), }), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def dispatch(self, request): try: event_id = request.GET["eventId"] except KeyError: return self._smart_response( request, {"eventId": "Missing or invalid parameter."}, status=400) normalized_event_id = normalize_event_id(event_id) if normalized_event_id: event_id = normalized_event_id elif event_id: return self._smart_response( request, {"eventId": "Missing or invalid parameter."}, status=400) key = self._get_project_key(request) if not key: return self._smart_response( request, {"dsn": "Missing or invalid parameter."}, status=404) origin = self._get_origin(request) if not is_valid_origin(origin, key.project): return self._smart_response(request, status=403) if request.method == "OPTIONS": return self._smart_response(request) # customization options options = DEFAULT_OPTIONS.copy() for name in options.keys(): if name in request.GET: options[name] = str(request.GET[name]) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { "name": request.GET.get("name"), "email": request.GET.get("email") } form = UserReportForm( request.POST if request.method == "POST" else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project_id = key.project_id report.event_id = event_id event = eventstore.get_event_by_id(report.project_id, report.event_id) if event is not None: report.environment_id = event.get_environment().id report.group_id = event.group_id try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter(project_id=report.project_id, event_id=report.event_id).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) else: if report.group_id: report.notify() user_feedback_received.send( project=Project.objects.get(id=report.project_id), sender=self, ) return self._smart_response(request) elif request.method == "POST": return self._smart_response(request, {"errors": dict(form.errors)}, status=400) show_branding = (ProjectOption.objects.get_value( project=key.project, key="feedback:branding", default="1") == "1") template = render_to_string( "sentry/error-page-embed.html", context={ "form": form, "show_branding": show_branding, "title": options["title"], "subtitle": options["subtitle"], "subtitle2": options["subtitle2"], "name_label": options["labelName"], "email_label": options["labelEmail"], "comments_label": options["labelComments"], "submit_label": options["labelSubmit"], "close_label": options["labelClose"], }, ) context = { "endpoint": mark_safe("*/" + json.dumps(absolute_uri(request.get_full_path())) + ";/*"), "template": mark_safe("*/" + json.dumps(template) + ";/*"), "strings": mark_safe("*/" + json.dumps_htmlsafe( { "generic_error": str(options["errorGeneric"]), "form_error": str(options["errorFormEntry"]), "sent_message": str(options["successMessage"]), }) + ";/*"), } return render_to_response("sentry/error-page-embed.js", context, request, content_type="text/javascript")
def test_normalize_event_id(): assert (normalize_event_id("b802415f7531431caa27f5c0bf923302") == "b802415f7531431caa27f5c0bf923302") assert (normalize_event_id("B802415F7531431CAA27F5C0BF923302") == "b802415f7531431caa27f5c0bf923302") assert (normalize_event_id("b802415f-7531-431c-aa27-f5c0bf923302") == "b802415f7531431caa27f5c0bf923302") assert (normalize_event_id("B802415F-7531-431C-AA27-F5C0BF923302") == "b802415f7531431caa27f5c0bf923302") assert (normalize_event_id(b"b802415f7531431caa27f5c0bf923302") == "b802415f7531431caa27f5c0bf923302") assert normalize_event_id("") is None assert normalize_event_id("b802415f7531431caa") is None assert normalize_event_id("XXXX415f7531431caa27f5c0bf92XXXX") is None assert normalize_event_id(4711) is None assert normalize_event_id(False) is None assert normalize_event_id(None) is None
def get_event_by_id(self, project_id, event_id, group_id=None): """ Get an event given a project ID and event ID Returns None if an event cannot be found """ event_id = normalize_event_id(event_id) if not event_id: return None event = Event(project_id=project_id, event_id=event_id) # Return None if there was no data in nodestore if len(event.data) == 0: return None if group_id is not None: # Set passed group_id if not a transaction if event.get_event_type() == "transaction": logger.warning("eventstore.passed-group-id-for-transaction") else: event.group_id = group_id elif event.get_event_type() != "transaction": # Load group_id from Snuba if not a transaction raw_query_kwargs = {} if event.datetime > timezone.now() - timedelta(hours=1): # XXX: This is a hack to bust the snuba cache. We want to avoid the case where # we cache an empty result, since this can result in us failing to fetch new events # in some cases. raw_query_kwargs["conditions"] = [[ "timestamp", ">", datetime.fromtimestamp(random.randint(0, 1000000000)) ]] result = snuba.raw_query( selected_columns=["group_id"], start=event.datetime, end=event.datetime + timedelta(seconds=1), filter_keys={ "project_id": [project_id], "event_id": [event_id] }, limit=1, referrer="eventstore.get_event_by_id_nodestore", **raw_query_kwargs, ) # Return None if the event from Nodestore was not yet written to Snuba if len(result["data"]) != 1: logger.warning( "eventstore.missing-snuba-event", extra={ "project_id": project_id, "event_id": event_id, "group_id": group_id, "event_datetime": event.datetime, "event_timestamp": event.timestamp, "nodestore_insert": event.data.get("nodestore_insert"), "received": event.data.get("received"), "len_data": len(result["data"]), }, ) return None event.group_id = result["data"][0]["group_id"] return event
def get(self, request, project): """ List a Project's Issues ``````````````````````` Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :auth: required """ stats_period = request.GET.get('statsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None serializer = functools.partial( StreamGroupSerializer, environment_func=self._get_environment_func( request, project.organization_id), stats_period=stats_period, ) query = request.GET.get('query', '').strip() if query: matching_group = None matching_event = None event_id = normalize_event_id(query) if event_id: # check to see if we've got an event ID try: matching_group = Group.objects.from_event_id( project, event_id) except Group.DoesNotExist: pass else: matching_event = Event.objects.from_event_id( event_id, project.id) if matching_event is not None: Event.objects.bind_nodes([matching_event], 'data') elif matching_group is None: matching_group = get_by_short_id( project.organization_id, request.GET.get('shortIdLookup'), query, ) if matching_group is not None and matching_group.project_id != project.id: matching_group = None if matching_group is not None: matching_event_environment = None try: matching_event_environment = matching_event.get_environment( ).name if matching_event else None except Environment.DoesNotExist: pass response = Response( serialize([matching_group], request.user, serializer( matching_event_id=getattr( matching_event, 'id', None), matching_event_environment= matching_event_environment, ))) response['X-Sentry-Direct-Hit'] = '1' return response try: cursor_result, query_kwargs = self._search(request, project, {'count_hits': True}) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get('search_filters', []) if search_filter.key.name == 'status' ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) if results and query not in DEFAULT_SAVED_SEARCH_QUERIES: advanced_search.send(project=project, sender=request.user) analytics.record('project_issue.searched', user_id=request.user.id, organization_id=project.organization_id, project_id=project.id, query=query) return response
def get(self, request, organization): """ List an Organization's Issues ````````````````````````````` Return a list of issues (groups) bound to an organization. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``groupStatsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' The ``statsPeriod`` parameter can be used to select a date window starting from now. Ex. ``14d``. The ``start`` and ``end`` parameters can be used to select an absolute date period to fetch issues from. :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string groupStatsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string start: Beginning date. You must also provide ``end``. :qparam string end: End date. You must also provide ``start``. :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :auth: required TODO(Chris F.): Add details on expand/collapse. """ stats_period = request.GET.get("groupStatsPeriod") try: start, end = get_date_range_from_params(request.GET) except InvalidParams as e: raise ParseError(detail=six.text_type(e)) expand = request.GET.getlist("expand", []) collapse = request.GET.getlist("collapse", []) has_inbox = features.has("organizations:inbox", organization, actor=request.user) if stats_period not in (None, "", "24h", "14d", "auto"): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = "24h" elif stats_period == "": # disable stats stats_period = None if stats_period == "auto": stats_period_start = start stats_period_end = end else: stats_period_start = None stats_period_end = None environments = self.get_environments(request, organization) serializer = functools.partial( StreamGroupSerializerSnuba, environment_ids=[env.id for env in environments], stats_period=stats_period, stats_period_start=stats_period_start, stats_period_end=stats_period_end, expand=expand, collapse=collapse, has_inbox=has_inbox, ) projects = self.get_projects(request, organization) project_ids = [p.id for p in projects] if not projects: return Response([]) if len(projects) > 1 and not features.has( "organizations:global-views", organization, actor=request.user ): return Response( {"detail": "You do not have the multi project stream feature enabled"}, status=400 ) # we ignore date range for both short id and event ids query = request.GET.get("query", "").strip() if query: # check to see if we've got an event ID event_id = normalize_event_id(query) if event_id: # For a direct hit lookup we want to use any passed project ids # (we've already checked permissions on these) plus any other # projects that the user is a member of. This gives us a better # chance of returning the correct result, even if the wrong # project is selected. direct_hit_projects = set(project_ids) | set( [project.id for project in request.access.projects] ) groups = list(Group.objects.filter_by_event_id(direct_hit_projects, event_id)) if len(groups) == 1: response = Response( serialize(groups, request.user, serializer(matching_event_id=event_id)) ) response["X-Sentry-Direct-Hit"] = "1" return response if groups: return Response(serialize(groups, request.user, serializer())) group = get_by_short_id(organization.id, request.GET.get("shortIdLookup"), query) if group is not None: # check all projects user has access to if request.access.has_project_access(group.project): response = Response(serialize([group], request.user, serializer())) response["X-Sentry-Direct-Hit"] = "1" return response # If group ids specified, just ignore any query components try: group_ids = set(map(int, request.GET.getlist("group"))) except ValueError: return Response({"detail": "Group ids must be integers"}, status=400) if group_ids: groups = list(Group.objects.filter(id__in=group_ids, project_id__in=project_ids)) if any(g for g in groups if not request.access.has_project_access(g.project)): raise PermissionDenied return Response(serialize(groups, request.user, serializer())) try: cursor_result, query_kwargs = self._search( request, organization, projects, environments, {"count_hits": True, "date_to": end, "date_from": start}, ) except (ValidationError, discover.InvalidSearchQuery) as exc: return Response({"detail": six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize( results, request.user, serializer( start=start, end=end, search_filters=query_kwargs["search_filters"] if "search_filters" in query_kwargs else None, ), ) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get("search_filters", []) if search_filter.key.name == "status" ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r["status"] == "unresolved"] response = Response(context) self.add_cursor_headers(request, response, cursor_result) # TODO(jess): add metrics that are similar to project endpoint here return response
def test_normalize_event_id(): assert normalize_event_id('b802415f7531431caa27f5c0bf923302' ) == 'b802415f7531431caa27f5c0bf923302' assert normalize_event_id('B802415F7531431CAA27F5C0BF923302' ) == 'b802415f7531431caa27f5c0bf923302' assert normalize_event_id('b802415f-7531-431c-aa27-f5c0bf923302' ) == 'b802415f7531431caa27f5c0bf923302' assert normalize_event_id('B802415F-7531-431C-AA27-F5C0BF923302' ) == 'b802415f7531431caa27f5c0bf923302' assert normalize_event_id(b'b802415f7531431caa27f5c0bf923302' ) == 'b802415f7531431caa27f5c0bf923302' assert normalize_event_id('') is None assert normalize_event_id('b802415f7531431caa') is None assert normalize_event_id('XXXX415f7531431caa27f5c0bf92XXXX') is None assert normalize_event_id(4711) is None assert normalize_event_id(False) is None assert normalize_event_id(None) is None