def get(self, request, group): # optional queryparam `key` can be used to get results # only for specific keys. keys = [tagstore.prefix_reserved_key(k) for k in request.GET.getlist('key') if k] or None # There are 2 use-cases for this method. For the 'Tags' tab we # get the top 10 values, for the tag distribution bars we get 9 # This should ideally just be specified by the client if keys: value_limit = 9 else: value_limit = 10 use_snuba = request.GET.get('enable_snuba') == '1' environment_ids = [e.id for e in get_environments(request, group.project.organization)] if not use_snuba: # TODO(jess): This is just to ensure we're not breaking the old # issue page somehow -- non-snuba tagstore versions will raise # if more than one env is passed if environment_ids: environment_ids = environment_ids[:1] tag_keys = tagstore.get_group_tag_keys_and_top_values( group.project_id, group.id, environment_ids, keys=keys, value_limit=value_limit) return Response(serialize(tag_keys, request.user))
def get(self, request: Request, group) -> Response: """ Retrieve the Oldest Event for an Issue `````````````````````````````````````` Retrieves the details of the oldest event for an issue. :pparam string group_id: the ID of the issue """ environments = [e.name for e in get_environments(request, group.project.organization)] event = group.get_oldest_event_for_environments(environments) if not event: return Response({"detail": "No events found for group"}, status=404) try: return client.get( f"/projects/{event.organization.slug}/{event.project.slug}/events/{event.event_id}/", request=request, data={"environment": environments, "group_id": event.group_id}, ) except client.ApiError as e: return Response(e.body, status=e.status_code)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) use_snuba = ( request.GET.get('enable_snuba') == '1' or options.get('snuba.events-queries.enabled') ) backend = self._get_events_snuba if use_snuba else self._get_events_legacy start, end = get_date_range_from_params(request.GET, optional=True) try: return backend(request, group, environments, query, tags, start, end) except GroupEventsError as exc: return Response({'detail': six.text_type(exc)}, status=400)
def get(self, request, group): """ Retrieve the Oldest Event for an Issue `````````````````````````````````````` Retrieves the details of the oldest event for an issue. :pparam string group_id: the ID of the issue """ environments = [e.name for e in get_environments(request, group.project.organization)] event = group.get_oldest_event_for_environments(environments) if not event: return Response({'detail': 'No events found for group'}, status=404) try: return client.get(u'/projects/{}/{}/events/{}/'.format( event.organization.slug, event.project.slug, event.event_id ), request=request) except client.ApiError as e: return Response(e.body, status=e.status_code)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :qparam bool full: if this is set to true then the event payload will include the full event body, including the stacktrace. Set to 1 to enable. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments) except InvalidQuery as exc: return Response({"detail": six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) try: start, end = get_date_range_from_params(request.GET, optional=True) except InvalidParams as e: raise ParseError(detail=six.text_type(e)) try: return self._get_events_snuba(request, group, environments, query, tags, start, end) except GroupEventsError as exc: raise ParseError(detail=six.text_type(exc))
def get(self, request, group): """ Retrieve the Latest Event for an Issue `````````````````````````````````````` Retrieves the details of the latest event for an issue. :pparam string group_id: the ID of the issue """ environments = [ e.name for e in get_environments(request, group.project.organization) ] event = group.get_latest_event_for_environments(environments) if not event: return Response({"detail": "No events found for group"}, status=404) try: return client.get( u"/projects/{}/{}/events/{}/".format(event.organization.slug, event.project.slug, event.event_id), request=request, ) except client.ApiError as e: return Response(e.body, status=e.status_code)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) use_snuba = (request.GET.get('enable_snuba') == '1' or options.get('snuba.events-queries.enabled')) backend = self._get_events_snuba if use_snuba else self._get_events_legacy start, end = get_date_range_from_params(request.GET, optional=True) return backend(request, group, environments, query, tags, start, end)
def get(self, request: Request, team) -> Response: """ Return the oldest issues owned by a team """ limit = min(100, int(request.GET.get("limit", 10))) environments = [ e.id for e in get_environments(request, team.organization) ] group_environment_filter = (Q( groupenvironment__environment_id=environments[0]) if environments else Q()) group_list = list( Group.objects.filter_to_team(team).filter( group_environment_filter, status=GroupStatus.UNRESOLVED, last_seen__gt=datetime.now() - timedelta(days=90), ).order_by("first_seen")[:limit]) return Response( serialize( group_list, request.user, GroupSerializer(environment_func=self._get_environment_func( request, team.organization_id)), ))
def get(self, request, group): # optional queryparam `key` can be used to get results # only for specific keys. keys = [ tagstore.prefix_reserved_key(k) for k in request.GET.getlist("key") if k ] or None # There are 2 use-cases for this method. For the 'Tags' tab we # get the top 10 values, for the tag distribution bars we get 9 # This should ideally just be specified by the client if keys: value_limit = 9 else: value_limit = 10 environment_ids = [ e.id for e in get_environments(request, group.project.organization) ] tag_keys = tagstore.get_group_tag_keys_and_top_values( group.project_id, group.id, environment_ids, keys=keys, value_limit=value_limit) return Response(serialize(tag_keys, request.user))
def get(self, request: Request, group) -> Response: """ Retrieve the Latest Event for an Issue `````````````````````````````````````` Retrieves the details of the latest event for an issue. :pparam string group_id: the ID of the issue """ environments = [ e.name for e in get_environments(request, group.project.organization) ] event = group.get_latest_event_for_environments(environments) if not event: return Response({"detail": "No events found for group"}, status=404) collapse = request.GET.getlist("collapse", []) if "stacktraceOnly" in collapse: return Response(serialize(event, request.user, EventSerializer())) try: return client.get( f"/projects/{event.organization.slug}/{event.project.slug}/events/{event.event_id}/", request=request, data={ "environment": environments, "group_id": event.group_id }, ) except client.ApiError as e: return Response(e.body, status=e.status_code)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) start, end = get_date_range_from_params(request.GET, optional=True) try: return self._get_events_snuba(request, group, environments, query, tags, start, end) except GroupEventsError as exc: return Response({'detail': six.text_type(exc)}, status=400)
def get(self, request: Request, team) -> Response: """ Return a a time bucketed list of mean group resolution times for a given team. """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response({"detail": "You do not have the insights feature enabled"}, status=400) start, end = get_date_range_from_params(request.GET) end = end.date() + timedelta(days=1) start = start.date() + timedelta(days=1) environments = [e.id for e in get_environments(request, team.organization)] grouphistory_environment_filter = ( Q(group__groupenvironment__environment_id=environments[0]) if environments else Q() ) history_list = ( GroupHistory.objects.filter_to_team(team) .filter( grouphistory_environment_filter, status=GroupHistoryStatus.RESOLVED, date_added__gte=start, date_added__lte=end, ) .annotate(bucket=TruncDay("date_added")) .values("bucket", "prev_history_date") # We need to coalesce here since we won't store the initial `UNRESOLVED` row for every # group, since it's unnecessary and just takes extra storage. .annotate( ttr=F("date_added") - Coalesce(F("prev_history_date"), F("group__first_seen")) ) .annotate(avg_ttr=Avg("ttr")) ) sums = defaultdict(lambda: {"sum": timedelta(), "count": 0}) for gh in history_list: key = str(gh["bucket"].date()) sums[key]["sum"] += gh["ttr"] sums[key]["count"] += 1 avgs = {} current_day = start while current_day < end: key = str(current_day) if key in sums: avg = int((sums[key]["sum"] / sums[key]["count"]).total_seconds()) count = sums[key]["count"] else: avg = count = 0 avgs[key] = {"avg": avg, "count": count} current_day += timedelta(days=1) return Response(avgs)
def get(self, request, group, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key for an issue. When paginated can return at most 1000 values. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ lookup_key = tagstore.prefix_reserved_key(key) environment_ids = [ e.id for e in get_environments(request, group.project.organization) ] try: tagstore.get_tag_key(group.project_id, None, lookup_key) except tagstore.TagKeyNotFound: raise ResourceDoesNotExist sort = request.GET.get("sort") if sort == "date": order_by = "-last_seen" elif sort == "age": order_by = "-first_seen" elif sort == "count": order_by = "-times_seen" else: order_by = "-id" if key == "user": serializer_cls = UserTagValueSerializer(group.project_id) else: serializer_cls = None paginator = tagstore.get_group_tag_value_paginator(group.project_id, group.id, environment_ids, lookup_key, order_by=order_by) return self.paginate( request=request, paginator=paginator, on_results=lambda results: serialize(results, request.user, serializer_cls), )
def get_filter_params(self, request, project, date_filter_optional=False): """Similar to the version on the organization just for a single project.""" # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params(request.GET, optional=date_filter_optional) except InvalidParams as e: raise ProjectEventsError(str(e)) environments = [env.name for env in get_environments(request, project.organization)] params = {"start": start, "end": end, "project_id": [project.id]} if environments: params["environment"] = environments return params
def get(self, request, group): """Get the current release in the group's project. Find the most recent release in the project associated with the issue being viewed, regardless of whether the issue has been reported in that release. (That is, the latest release in which the user might expect to have seen the issue.) Then, if the issue has indeed been seen in that release, provide a reference to it. If not, indicate so with a null value for "current release". If the user is filtering by environment, include only releases in those environments. If `environments` is empty, include all environments because the user is not filtering. """ environments = get_environments(request, group.project.organization) with sentry_sdk.start_span( op="CurrentReleaseEndpoint.get.current_release") as span: span.set_data("Environment Count", len(environments)) span.set_data( "Raw Parameters", { "group.id": group.id, "group.project_id": group.project_id, "group.project.organization_id": group.project.organization_id, "environments": [{ "id": e.id, "name": e.name } for e in environments], }, ) current_release = self._get_current_release(group, environments) data = { "currentRelease": serialize(current_release, request.user, GroupReleaseWithStatsSerializer()) } return Response(data)
def get(self, request: Request, team: Team) -> Response: """ Return a time bucketed list of how old unresolved issues are. """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response( {"detail": "You do not have the insights feature enabled"}, status=400) environments = [ e.id for e in get_environments(request, team.organization) ] group_environment_filter = (Q( groupenvironment__environment_id=environments[0]) if environments else Q()) current_time = timezone.now() unresolved_ages = list( Group.objects.filter_to_team(team).filter( group_environment_filter, status=GroupStatus.UNRESOLVED, last_seen__gt=datetime.now() - timedelta(days=90), ).annotate(bucket=Case( *[ When(first_seen__gt=current_time - delta, then=Value(label)) for (label, delta) in buckets ], default=Value(OLDEST_LABEL), output_field=TextField(), )).values("bucket").annotate(count=Count("id"))) unresolved_ages_dict = { unresolved["bucket"]: unresolved["count"] for unresolved in unresolved_ages } for label, _ in buckets: unresolved_ages_dict.setdefault(label, 0) unresolved_ages_dict.setdefault(OLDEST_LABEL, 0) return Response(unresolved_ages_dict)
def get(self, request: Request, team: Team) -> Response: """ Returns cumulative counts of unresolved groups per day within the stats period time range. Response: { <project_id>: { <isoformat_date>: {"unresolved": <unresolved_count>}, ... } ... } """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response( {"detail": "You do not have the insights feature enabled"}, status=400) # Team has no projects project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) if len(project_list) == 0: return Response({}) start, end = get_date_range_from_params(request.GET) end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) start = start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) environments = [ e.id for e in get_environments(request, team.organization) ] environment_id = environments[0] if environments else None return Response( calculate_unresolved_counts(team, project_list, start, end, environment_id))
def get(self, request: Request, group) -> Response: """ Retrieve an Issue ````````````````` Return details on an individual issue. This returns the basic stats for the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ from sentry.utils import snuba try: # TODO(dcramer): handle unauthenticated/public response organization = group.project.organization environments = get_environments(request, organization) environment_ids = [e.id for e in environments] expand = request.GET.getlist("expand", []) collapse = request.GET.getlist("collapse", []) # WARNING: the rest of this endpoint relies on this serializer # populating the cache SO don't move this :) data = serialize( group, request.user, GroupSerializerSnuba(environment_ids=environment_ids)) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=100) seen_by = self._get_seen_by(request, group) if "release" not in collapse: first_release, last_release = get_first_last_release( request, group) data.update({ "firstRelease": first_release, "lastRelease": last_release, }) get_range = functools.partial(tsdb.get_range, environment_ids=environment_ids) tags = tagstore.get_group_tag_keys(group.project_id, group.id, environment_ids, limit=100) if not environment_ids: user_reports = UserReport.objects.filter(group_id=group.id) else: user_reports = UserReport.objects.filter( group_id=group.id, environment_id__in=environment_ids) now = timezone.now() hourly_stats = tsdb.rollup( get_range(model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=1)), 3600, )[group.id] daily_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=30), ), 3600 * 24, )[group.id] participants = GroupSubscriptionManager.get_participating_users( group) if "inbox" in expand: inbox_map = get_inbox_details([group]) inbox_reason = inbox_map.get(group.id) data.update({"inbox": inbox_reason}) action_list = self._get_actions(request, group) data.update({ "activity": serialize(activity, request.user), "seenBy": seen_by, "participants": serialize(participants, request.user), "pluginActions": action_list, "pluginIssues": self._get_available_issue_plugins(request, group), "pluginContexts": self._get_context_plugins(request, group), "userReportCount": user_reports.count(), "tags": sorted(serialize(tags, request.user), key=lambda x: x["name"]), "stats": { "24h": hourly_stats, "30d": daily_stats }, }) metrics.incr( "group.update.http_response", sample_rate=1.0, tags={ "status": 200, "detail": "group_details:get:response" }, ) return Response(data) except snuba.RateLimitExceeded: metrics.incr( "group.update.http_response", sample_rate=1.0, tags={ "status": 429, "detail": "group_details:get:snuba.RateLimitExceeded" }, ) raise except Exception: metrics.incr( "group.update.http_response", sample_rate=1.0, tags={ "status": 500, "detail": "group_details:get:Exception" }, ) raise
def get_environments(self, request, organization): return get_environments(request, organization)
def get(self, request: Request, team: Team) -> Response: """ Returns a dict of team projects, and a time-series dict of issue stat breakdowns for each. If a list of statuses is passed then we return the count of each status and the totals. Otherwise we the count of reviewed issues and the total count of issues. """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response( {"detail": "You do not have the insights feature enabled"}, status=400) start, end = get_date_range_from_params(request.GET) end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) start = start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) environments = [ e.id for e in get_environments(request, team.organization) ] if "statuses" in request.GET: statuses = [ string_to_status_lookup[status] for status in request.GET.getlist("statuses") ] new_format = True else: statuses = [GroupHistoryStatus.UNRESOLVED] + ACTIONED_STATUSES new_format = False new_issues = [] base_day_format = {"total": 0} if new_format: for status in statuses: base_day_format[status_to_string_lookup[status]] = 0 else: base_day_format["reviewed"] = 0 if GroupHistoryStatus.NEW in statuses: group_environment_filter = (Q( groupenvironment__environment_id=environments[0]) if environments else Q()) statuses.remove(GroupHistoryStatus.NEW) new_issues = list( Group.objects.filter_to_team(team).filter( group_environment_filter, first_seen__gte=start, first_seen__lte=end).annotate(bucket=TruncDay( "first_seen")).order_by("bucket").values( "project", "bucket").annotate( count=Count("id"), status=Value(GroupHistoryStatus.NEW, output_field=IntegerField()), )) group_history_enviornment_filter = (Q( group__groupenvironment__environment_id=environments[0]) if environments else Q()) bucketed_issues = (GroupHistory.objects.filter_to_team(team).filter( group_history_enviornment_filter, status__in=statuses, date_added__gte=start, date_added__lte=end, ).annotate(bucket=TruncDay("date_added")).order_by("bucket").values( "project", "bucket", "status").annotate(count=Count("id"))) current_day, date_series_dict = start, {} while current_day < end: date_series_dict[current_day.isoformat()] = copy.deepcopy( base_day_format) current_day += timedelta(days=1) project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) agg_project_counts = { project.id: copy.deepcopy(date_series_dict) for project in project_list } for r in chain(bucketed_issues, new_issues): bucket = agg_project_counts[r["project"]][r["bucket"].isoformat()] bucket["total"] += r["count"] if not new_format and r["status"] != GroupHistoryStatus.UNRESOLVED: bucket["reviewed"] += r["count"] if new_format: bucket[status_to_string_lookup[r["status"]]] += r["count"] return Response(agg_project_counts)
def get_environments(self, request, organization): return get_environments(request, organization)
def get_environments(self, request, organization): with sentry_sdk.start_span(op="PERF: Org.get_environments"): return get_environments(request, organization)
def get(self, request, group): """ Retrieve an Issue ````````````````` Return details on an individual issue. This returns the basic stats for the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ # TODO(dcramer): handle unauthenticated/public response # TODO(jess): This can be removed when tagstore v2 is deprecated use_snuba = request.GET.get('enable_snuba') == '1' environments = get_environments(request, group.project.organization) environment_ids = [e.id for e in environments] if use_snuba: # WARNING: the rest of this endpoint relies on this serializer # populating the cache SO don't move this :) data = serialize( group, request.user, GroupSerializerSnuba( environment_ids=environment_ids, ) ) else: # TODO(jess): This is just to ensure we're not breaking the old # issue page somehow -- non-snuba tagstore versions will raise # if more than one env is passed if environments: environments = environments[:1] environment_ids = environment_ids[:1] data = serialize( group, request.user, GroupSerializer( # Just in case multiple envs are passed, let's make # sure we're using the same one for all the stats environment_func=lambda: environments[0] if environments else None ) ) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=100) seen_by = self._get_seen_by(request, group) first_release = group.get_first_release() if first_release is not None: last_release = group.get_last_release() else: last_release = None action_list = self._get_actions(request, group) if first_release: first_release = self._get_release_info(request, group, first_release) if last_release: last_release = self._get_release_info(request, group, last_release) get_range = functools.partial(tsdb.get_range, environment_ids=environment_ids) tags = tagstore.get_group_tag_keys( group.project_id, group.id, environment_ids, limit=100) if not environment_ids: user_reports = UserReport.objects.filter(group=group) else: user_reports = UserReport.objects.filter( group=group, environment_id__in=environment_ids ) now = timezone.now() hourly_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=1), ), 3600 )[group.id] daily_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=30), ), 3600 * 24 )[group.id] participants = list( User.objects.filter( groupsubscription__is_active=True, groupsubscription__group=group, ) ) data.update( { 'firstRelease': first_release, 'lastRelease': last_release, 'activity': serialize(activity, request.user), 'seenBy': seen_by, 'participants': serialize(participants, request.user), 'pluginActions': action_list, 'pluginIssues': self._get_available_issue_plugins(request, group), 'pluginContexts': self._get_context_plugins(request, group), 'userReportCount': user_reports.count(), 'tags': sorted(serialize(tags, request.user), key=lambda x: x['name']), 'stats': { '24h': hourly_stats, '30d': daily_stats, } } ) # the current release is the 'latest seen' release within the # environment even if it hasnt affected this issue if environments: try: current_release = GroupRelease.objects.filter( group_id=group.id, environment__in=[env.name for env in environments], release_id=ReleaseEnvironment.objects.filter( release_id__in=ReleaseProject.objects.filter(project_id=group.project_id ).values_list('release_id', flat=True), organization_id=group.project.organization_id, environment_id__in=environment_ids, ).order_by('-first_seen').values_list('release_id', flat=True)[:1], )[0] except IndexError: current_release = None data.update({ 'currentRelease': serialize( current_release, request.user, GroupReleaseWithStatsSerializer() ) }) return Response(data)
def get(self, request, group): """ Retrieve an Issue ````````````````` Return details on an individual issue. This returns the basic stats for the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ # TODO(dcramer): handle unauthenticated/public response organization = group.project.organization environments = get_environments(request, organization) environment_ids = [e.id for e in environments] # WARNING: the rest of this endpoint relies on this serializer # populating the cache SO don't move this :) data = serialize(group, request.user, GroupSerializerSnuba(environment_ids=environment_ids)) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=100) seen_by = self._get_seen_by(request, group) first_release = group.get_first_release() if first_release is not None: last_release = group.get_last_release() else: last_release = None action_list = self._get_actions(request, group) if first_release: first_release = self._get_release_info(request, group, first_release) if last_release: last_release = self._get_release_info(request, group, last_release) get_range = functools.partial(tsdb.get_range, environment_ids=environment_ids) tags = tagstore.get_group_tag_keys(group.project_id, group.id, environment_ids, limit=100) if not environment_ids: user_reports = UserReport.objects.filter(group=group) else: user_reports = UserReport.objects.filter( group=group, environment_id__in=environment_ids) now = timezone.now() hourly_stats = tsdb.rollup( get_range(model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=1)), 3600, )[group.id] daily_stats = tsdb.rollup( get_range(model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=30)), 3600 * 24, )[group.id] participants = list( User.objects.filter(groupsubscription__is_active=True, groupsubscription__group=group)) data.update({ "firstRelease": first_release, "lastRelease": last_release, "activity": serialize(activity, request.user), "seenBy": seen_by, "participants": serialize(participants, request.user), "pluginActions": action_list, "pluginIssues": self._get_available_issue_plugins(request, group), "pluginContexts": self._get_context_plugins(request, group), "userReportCount": user_reports.count(), "tags": sorted(serialize(tags, request.user), key=lambda x: x["name"]), "stats": { "24h": hourly_stats, "30d": daily_stats }, }) # the current release is the 'latest seen' release within the # environment even if it hasnt affected this issue if environments: try: current_release = GroupRelease.objects.filter( group_id=group.id, environment__in=[env.name for env in environments], release_id=ReleaseEnvironment.objects.filter( release_id__in=ReleaseProject.objects.filter( project_id=group.project_id).values_list( "release_id", flat=True), organization_id=group.project.organization_id, environment_id__in=environment_ids, ).order_by("-first_seen").values_list("release_id", flat=True)[:1], )[0] except IndexError: current_release = None data.update({ "currentRelease": serialize(current_release, request.user, GroupReleaseWithStatsSerializer()) }) return Response(data)
def get(self, request, group): """ Retrieve an Issue ````````````````` Return details on an individual issue. This returns the basic stats for the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ # TODO(dcramer): handle unauthenticated/public response # TODO(jess): This can be removed when tagstore v2 is deprecated use_snuba = request.GET.get('enable_snuba') == '1' environments = get_environments(request, group.project.organization) environment_ids = [e.id for e in environments] if use_snuba: # WARNING: the rest of this endpoint relies on this serializer # populating the cache SO don't move this :) data = serialize( group, request.user, GroupSerializerSnuba(environment_ids=environment_ids, )) else: # TODO(jess): This is just to ensure we're not breaking the old # issue page somehow -- non-snuba tagstore versions will raise # if more than one env is passed if environments: environments = environments[:1] environment_ids = environment_ids[:1] data = serialize( group, request.user, GroupSerializer( # Just in case multiple envs are passed, let's make # sure we're using the same one for all the stats environment_func=lambda: environments[0] if environments else None)) # TODO: these probably should be another endpoint activity = self._get_activity(request, group, num=100) seen_by = self._get_seen_by(request, group) first_release = group.get_first_release() if first_release is not None: last_release = group.get_last_release() else: last_release = None action_list = self._get_actions(request, group) if first_release: first_release = self._get_release_info(request, group, first_release) if last_release: last_release = self._get_release_info(request, group, last_release) get_range = functools.partial(tsdb.get_range, environment_ids=environment_ids) tags = tagstore.get_group_tag_keys(group.project_id, group.id, environment_ids, limit=100) if not environment_ids: user_reports = UserReport.objects.filter(group=group) else: user_reports = UserReport.objects.filter( group=group, environment_id__in=environment_ids) now = timezone.now() hourly_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=1), ), 3600)[group.id] daily_stats = tsdb.rollup( get_range( model=tsdb.models.group, keys=[group.id], end=now, start=now - timedelta(days=30), ), 3600 * 24)[group.id] participants = list( User.objects.filter( groupsubscription__is_active=True, groupsubscription__group=group, )) data.update({ 'firstRelease': first_release, 'lastRelease': last_release, 'activity': serialize(activity, request.user), 'seenBy': seen_by, 'participants': serialize(participants, request.user), 'pluginActions': action_list, 'pluginIssues': self._get_available_issue_plugins(request, group), 'pluginContexts': self._get_context_plugins(request, group), 'userReportCount': user_reports.count(), 'tags': sorted(serialize(tags, request.user), key=lambda x: x['name']), 'stats': { '24h': hourly_stats, '30d': daily_stats, } }) # the current release is the 'latest seen' release within the # environment even if it hasnt affected this issue if environments: try: current_release = GroupRelease.objects.filter( group_id=group.id, environment__in=[env.name for env in environments], release_id=ReleaseEnvironment.objects.filter( release_id__in=ReleaseProject.objects.filter( project_id=group.project_id).values_list( 'release_id', flat=True), organization_id=group.project.organization_id, environment_id__in=environment_ids, ).order_by('-first_seen').values_list('release_id', flat=True)[:1], )[0] except IndexError: current_release = None data.update({ 'currentRelease': serialize(current_release, request.user, GroupReleaseWithStatsSerializer()) }) return Response(data)