def test_no_params(self): start, end = get_date_range_from_params({}) assert start == timezone.now() - MAX_STATS_PERIOD assert end == timezone.now() start, end = get_date_range_from_params({}, optional=True) assert start is None assert end is None
def test_no_params(self): start, end = get_date_range_from_params({}) assert start == timezone.now() - MAX_STATS_PERIOD assert end == timezone.now() start, end = get_date_range_from_params({}, optional=True) assert start is None assert end is None
def test_date_range(self): start, end = get_date_range_from_params({"start": "2018-11-01", "end": "2018-11-07"}) assert start == datetime.datetime(2018, 11, 1, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 11, 7, tzinfo=timezone.utc) with self.assertRaises(InvalidParams): get_date_range_from_params( {"start": "2018-11-01T00:00:00", "end": "2018-11-01T00:00:00"} )
def test_date_range(self): start, end = get_date_range_from_params({ 'start': '2018-11-01', 'end': '2018-11-07', }) assert start == datetime.datetime(2018, 11, 1, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 11, 7, tzinfo=timezone.utc) with self.assertRaises(InvalidParams): get_date_range_from_params({'start': '2018-11-01'})
def test_date_range(self): start, end = get_date_range_from_params({ 'start': '2018-11-01', 'end': '2018-11-07', }) assert start == datetime.datetime(2018, 11, 1, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 11, 7, tzinfo=timezone.utc) with self.assertRaises(InvalidParams): get_date_range_from_params({'start': '2018-11-01'})
def test_stats_period(self): start, end = get_date_range_from_params({"statsPeriod": "14h"}) assert end - datetime.timedelta(hours=14) == start start, end = get_date_range_from_params({"statsPeriod": "14d"}) assert end - datetime.timedelta(days=14) == start start, end = get_date_range_from_params({"statsPeriod": "60m"}) assert end - datetime.timedelta(minutes=60) == start start, end = get_date_range_from_params({"statsPeriod": "3600s"}) assert end - datetime.timedelta(seconds=3600) == start start, end = get_date_range_from_params({"statsPeriod": "91d"}) assert end - datetime.timedelta(days=91) == start
def test_stats_period(self): start, end = get_date_range_from_params({'statsPeriod': '14h'}) assert end - datetime.timedelta(hours=14) == start start, end = get_date_range_from_params({'statsPeriod': '14d'}) assert end - datetime.timedelta(days=14) == start start, end = get_date_range_from_params({'statsPeriod': '60m'}) assert end - datetime.timedelta(minutes=60) == start start, end = get_date_range_from_params({'statsPeriod': '3600s'}) assert end - datetime.timedelta(seconds=3600) == start start, end = get_date_range_from_params({'statsPeriod': '91d'}) assert end - datetime.timedelta(days=91) == start
def test_stats_period(self): start, end = get_date_range_from_params({'statsPeriod': '14h'}) assert end - datetime.timedelta(hours=14) == start start, end = get_date_range_from_params({'statsPeriod': '14d'}) assert end - datetime.timedelta(days=14) == start start, end = get_date_range_from_params({'statsPeriod': '60m'}) assert end - datetime.timedelta(minutes=60) == start start, end = get_date_range_from_params({'statsPeriod': '3600s'}) assert end - datetime.timedelta(seconds=3600) == start start, end = get_date_range_from_params({'statsPeriod': '91d'}) assert end - datetime.timedelta(days=91) == start
def get(self, request: Request, organization) -> Response: if not self.has_feature(organization, request): return Response(status=404) start, end = get_date_range_from_params(request.GET) time_params = get_time_params(start, end) query_params = self.get_snuba_params(request, organization) query = request.GET.get("query") query = f"{query} event.type:transaction" if query else "event.type:transaction" datetime_format = "%Y-%m-%d %H:%M:%S" ads_request = { "query": query, "params": query_params, "start": start.strftime(datetime_format), "end": end.strftime(datetime_format), "granularity": time_params.granularity, } # overwrite relevant time params query_params["start"] = time_params.query_start query_params["end"] = time_params.query_end with self.handle_query_errors(): snuba_response = timeseries_query( selected_columns=["count()"], query=query, params=query_params, rollup=time_params.granularity, referrer="transaction-anomaly-detection", zerofill_results=False, ) ads_request["data"] = snuba_response.data["data"] return get_anomalies(ads_request)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) use_snuba = ( request.GET.get('enable_snuba') == '1' or options.get('snuba.events-queries.enabled') ) backend = self._get_events_snuba if use_snuba else self._get_events_legacy start, end = get_date_range_from_params(request.GET, optional=True) try: return backend(request, group, environments, query, tags, start, end) except GroupEventsError as exc: return Response({'detail': six.text_type(exc)}, status=400)
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :qparam bool full: if this is set to true then the event payload will include the full event body, including the stacktrace. Set to 1 to enable. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments) except InvalidQuery as exc: return Response({"detail": six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) try: start, end = get_date_range_from_params(request.GET, optional=True) except InvalidParams as e: raise ParseError(detail=six.text_type(e)) try: return self._get_events_snuba(request, group, environments, query, tags, start, end) except GroupEventsError as exc: raise ParseError(detail=six.text_type(exc))
def validate(self, data): organization = self.context["organization"] query_info = data["query_info"] # Validate the project field, if provided # A PermissionDenied error will be raised in `get_projects_by_id` if the request is invalid project_query = query_info.get("project") if project_query: get_projects_by_id = self.context["get_projects_by_id"] # Coerce the query into a set if isinstance(project_query, list): projects = get_projects_by_id(set(map(int, project_query))) else: projects = get_projects_by_id({int(project_query)}) query_info["project"] = [project.id for project in projects] # Discover Pre-processing if data["query_type"] == ExportQueryType.DISCOVER_STR: # coerce the fields into a list as needed fields = query_info.get("field", []) if not isinstance(fields, list): fields = [fields] if len(fields) > MAX_FIELDS: detail = f"You can export up to {MAX_FIELDS} fields at a time. Please delete some and try again." raise serializers.ValidationError(detail) query_info["field"] = fields if "project" not in query_info: projects = self.context["get_projects"]() query_info["project"] = [project.id for project in projects] # make sure to fix the export start/end times to ensure consistent results try: start, end = get_date_range_from_params(query_info) except InvalidParams as e: sentry_sdk.set_tag("query.error_reason", "Invalid date params") raise serializers.ValidationError(str(e)) if "statsPeriod" in query_info: del query_info["statsPeriod"] if "statsPeriodStart" in query_info: del query_info["statsPeriodStart"] if "statsPeriodEnd" in query_info: del query_info["statsPeriodEnd"] query_info["start"] = start.isoformat() query_info["end"] = end.isoformat() # validate the query string by trying to parse it processor = DiscoverProcessor( discover_query=query_info, organization_id=organization.id, ) try: get_filter(query_info["query"], processor.params) except InvalidSearchQuery as err: raise serializers.ValidationError(str(err)) return data
def get(self, request, organization): query = request.GET.get('query') conditions = [] if query: conditions.append( [['positionCaseInsensitive', ['message', "'%s'" % (query,)]], '!=', 0]) try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], start=start, end=end, conditions=conditions, filter_keys={'project_id': project_ids}, selected_columns=SnubaEvent.selected_columns, orderby='-timestamp', ) return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn) )
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) start, end = get_date_range_from_params(request.GET, optional=True) try: return self._get_events_snuba(request, group, environments, query, tags, start, end) except GroupEventsError as exc: return Response({'detail': six.text_type(exc)}, status=400)
def validate(self, data): data['arrayjoin'] = self.arrayjoin # prevent conflicting date ranges from being supplied date_fields = ['start', 'statsPeriod', 'range', 'statsPeriodStart'] date_fields_provided = len([data.get(f) for f in date_fields if data.get(f) is not None]) if date_fields_provided == 0: raise serializers.ValidationError('You must specify a date filter') elif date_fields_provided > 1: raise serializers.ValidationError('Conflicting date filters supplied') if not data.get('fields') and not data.get('aggregations'): raise serializers.ValidationError('Specify at least one field or aggregation') try: start, end = get_date_range_from_params({ 'start': data.get('start'), 'end': data.get('end'), 'statsPeriod': data.get('statsPeriod') or data.get('range'), 'statsPeriodStart': data.get('statsPeriodStart'), 'statsPeriodEnd': data.get('statsPeriodEnd'), }, optional=True) except InvalidParams as exc: raise serializers.ValidationError(exc.message) if start is None or end is None: raise serializers.ValidationError('Either start and end dates or range is required') data['start'] = start data['end'] = end return data
def validate(self, data): data['arrayjoin'] = self.arrayjoin # prevent conflicting date ranges from being supplied date_fields = ['start', 'statsPeriod', 'range', 'statsPeriodStart'] date_fields_provided = len([data.get(f) for f in date_fields if data.get(f) is not None]) if date_fields_provided == 0: raise serializers.ValidationError('You must specify a date filter') elif date_fields_provided > 1: raise serializers.ValidationError('Conflicting date filters supplied') try: start, end = get_date_range_from_params({ 'start': data.get('start'), 'end': data.get('end'), 'statsPeriod': data.get('statsPeriod') or data.get('range'), 'statsPeriodStart': data.get('statsPeriodStart'), 'statsPeriodEnd': data.get('statsPeriodEnd'), }, optional=True, validate_window=False) except InvalidParams as exc: raise serializers.ValidationError(exc.message) if start is None or end is None: raise serializers.ValidationError('Either start and end dates or range is required') data['start'] = start data['end'] = end return data
def get_filter_params(self, request, organization): # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: raise OrganizationEventsError(exc.message) try: project_ids = self.get_project_ids(request, organization) except ValueError: raise OrganizationEventsError('Invalid project ids') if not project_ids: raise NoProjects environments = self.get_environments(request, organization) params = { 'start': start, 'end': end, 'project_id': project_ids, } if environments: params['environment'] = environments return params
def __init__(self, organization_id, discover_query): self.projects = self.get_projects(organization_id, discover_query) self.environments = self.get_environments(organization_id, discover_query) self.start, self.end = get_date_range_from_params(discover_query) self.params = { "organization_id": organization_id, "project_id": [project.id for project in self.projects], "start": self.start, "end": self.end, } # make sure to only include environment if any are given # an empty list DOES NOT work if self.environments: self.params["environment"] = self.environments equations = discover_query.get("equations", []) self.header_fields = ( map(lambda x: get_function_alias(x), discover_query["field"]) + equations ) self.equation_aliases = { f"equation[{index}]": equation for index, equation in enumerate(equations) } self.data_fn = self.get_data_fn( fields=discover_query["field"], equations=equations, query=discover_query["query"], params=self.params, sort=discover_query.get("sort"), )
def get(self, request, group): """ List an Issue's Events `````````````````````` This endpoint lists an issue's events. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ try: environments = get_environments(request, group.project.organization) query, tags = self._get_search_query_and_tags( request, group, environments, ) except InvalidQuery as exc: return Response({'detail': six.text_type(exc)}, status=400) except (NoResults, ResourceDoesNotExist): return Response([]) use_snuba = (request.GET.get('enable_snuba') == '1' or options.get('snuba.events-queries.enabled')) backend = self._get_events_snuba if use_snuba else self._get_events_legacy start, end = get_date_range_from_params(request.GET, optional=True) return backend(request, group, environments, query, tags, start, end)
def validate(self, data): data["arrayjoin"] = self.arrayjoin # prevent conflicting date ranges from being supplied date_fields = ["start", "statsPeriod", "range", "statsPeriodStart"] date_fields_provided = len([data.get(f) for f in date_fields if data.get(f) is not None]) if date_fields_provided == 0: raise serializers.ValidationError("You must specify a date filter") elif date_fields_provided > 1: raise serializers.ValidationError("Conflicting date filters supplied") if not data.get("fields") and not data.get("aggregations"): raise serializers.ValidationError("Specify at least one field or aggregation") try: start, end = get_date_range_from_params( { "start": data.get("start"), "end": data.get("end"), "statsPeriod": data.get("statsPeriod") or data.get("range"), "statsPeriodStart": data.get("statsPeriodStart"), "statsPeriodEnd": data.get("statsPeriodEnd"), }, optional=True, ) except InvalidParams as e: raise serializers.ValidationError(six.text_type(e)) if start is None or end is None: raise serializers.ValidationError("Either start and end dates or range is required") data["start"] = start data["end"] = end return data
def test_relative_date_range(self): start, end = get_date_range_from_params({ 'statsPeriodStart': '14d', 'statsPeriodEnd': '7d', }) assert start == datetime.datetime(2018, 11, 27, 3, 21, 34, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 12, 4, 3, 21, 34, tzinfo=timezone.utc)
def test_relative_date_range(self): start, end = get_date_range_from_params({ 'statsPeriodStart': '14d', 'statsPeriodEnd': '7d', }) assert start == datetime.datetime(2018, 11, 27, 3, 21, 34, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 12, 4, 3, 21, 34, tzinfo=timezone.utc)
def test_date_range(self): start, end = get_date_range_from_params({ "start": "2018-11-01", "end": "2018-11-07" }) assert start == datetime.datetime(2018, 11, 1, tzinfo=timezone.utc) assert end == datetime.datetime(2018, 11, 7, tzinfo=timezone.utc)
def get_filter_params(self, request, organization, date_filter_optional=False, project_ids=None): """ Extracts common filter parameters from the request and returns them in a standard format. :param request: :param organization: Organization to get params for :param date_filter_optional: Defines what happens if no date filter :param project_ids: Project ids if they were already grabbed but not validated yet parameters are passed. If False, no date filtering occurs. If True, we provide default values. :return: A dict with keys: - start: start date of the filter - end: end date of the filter - project_id: A list of project ids to filter on - environment(optional): If environments were passed in, a list of environment names """ # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params( request.GET, optional=date_filter_optional) if start and end: with configure_scope() as scope: scope.set_tag("query.period", (end - start).total_seconds()) except InvalidParams as e: raise ParseError(detail=u"Invalid date range: {}".format(e)) with sentry_sdk.start_span( op="PERF: org.get_filter_params - projects"): try: projects = self.get_projects(request, organization, project_ids) except ValueError: raise ParseError(detail="Invalid project ids") if not projects: raise NoProjects environments = self.get_environments(request, organization) params = { "start": start, "end": end, "project_id": [p.id for p in projects], "organization_id": organization.id, } if environments: params["environment"] = [env.name for env in environments] params["environment_objects"] = environments return params
def get(self, request: Request, project: Project, rule: Rule) -> Response: per_page = self.get_per_page(request) cursor = self.get_cursor_from_request(request) start, end = get_date_range_from_params(request.GET) results = fetch_rule_groups_paginated(rule, start, end, cursor, per_page) response = Response(serialize(results.results, request.user, RuleGroupHistorySerializer())) self.add_cursor_headers(request, response, results) return response
def get(self, request, organization): """ Returns a time series view over statsPeriod over interval. """ try: lookup = SnubaLookup.get(request.GET['tag']) except KeyError: raise ResourceDoesNotExist try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) interval = parse_stats_period(request.GET.get('interval', '1h')) if interval is None: interval = timedelta(hours=1) try: project_ids = self.get_project_ids(request, organization) except ValueError: return Response({'detail': 'Invalid project ids'}, status=400) if not project_ids: return self.empty() environment = self.get_environment(request, organization) query_condition = self.get_query_condition(request, organization) rollup = int(interval.total_seconds()) data = query( end=end, start=start, rollup=rollup, selected_columns=lookup.selected_columns, aggregations=[ ('count()', '', 'count'), ], filter_keys={ 'project_id': project_ids, }, conditions=lookup.conditions + query_condition + environment, groupby=['time'] + lookup.columns, orderby='time', ) serializer = SnubaTSResultSerializer(organization, lookup, request.user) return Response( serializer.serialize( SnubaTSResult(data, start, end, rollup), ), status=200, )
def get(self, request: Request, team) -> Response: """ Return a a time bucketed list of mean group resolution times for a given team. """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response({"detail": "You do not have the insights feature enabled"}, status=400) start, end = get_date_range_from_params(request.GET) end = end.date() + timedelta(days=1) start = start.date() + timedelta(days=1) environments = [e.id for e in get_environments(request, team.organization)] grouphistory_environment_filter = ( Q(group__groupenvironment__environment_id=environments[0]) if environments else Q() ) history_list = ( GroupHistory.objects.filter_to_team(team) .filter( grouphistory_environment_filter, status=GroupHistoryStatus.RESOLVED, date_added__gte=start, date_added__lte=end, ) .annotate(bucket=TruncDay("date_added")) .values("bucket", "prev_history_date") # We need to coalesce here since we won't store the initial `UNRESOLVED` row for every # group, since it's unnecessary and just takes extra storage. .annotate( ttr=F("date_added") - Coalesce(F("prev_history_date"), F("group__first_seen")) ) .annotate(avg_ttr=Avg("ttr")) ) sums = defaultdict(lambda: {"sum": timedelta(), "count": 0}) for gh in history_list: key = str(gh["bucket"].date()) sums[key]["sum"] += gh["ttr"] sums[key]["count"] += 1 avgs = {} current_day = start while current_day < end: key = str(current_day) if key in sums: avg = int((sums[key]["sum"] / sums[key]["count"]).total_seconds()) count = sums[key]["count"] else: avg = count = 0 avgs[key] = {"avg": avg, "count": count} current_day += timedelta(days=1) return Response(avgs)
def __init__(self, organization_id, discover_query): self.projects = self.get_projects(organization_id, discover_query) self.start, self.end = get_date_range_from_params(discover_query) self.params = { "organization_id": organization_id, "project_id": [project.id for project in self.projects], "start": self.start, "end": self.end, } self.header_fields = map(lambda x: get_function_alias(x), discover_query["field"]) self.data_fn = self.get_data_fn(fields=discover_query["field"], query=discover_query["query"], params=self.params)
def get_date_range(params: Mapping) -> Tuple[datetime, datetime, int]: """Get start, end, rollup for the given parameters. Apply a similar logic as `sessions_v2.get_constrained_date_range`, but with fewer constraints. More constraints may be added in the future. Note that this function returns a right-exclusive date range [start, end), contrary to the one used in sessions_v2. """ interval = parse_stats_period(params.get("interval", "1h")) interval = int(3600 if interval is None else interval.total_seconds()) # hard code min. allowed resolution to 10 seconds allowed_resolution = AllowedResolution.ten_seconds smallest_interval, interval_str = allowed_resolution.value if interval % smallest_interval != 0 or interval < smallest_interval: raise InvalidParams( f"The interval has to be a multiple of the minimum interval of {interval_str}." ) if ONE_DAY % interval != 0: raise InvalidParams( "The interval should divide one day without a remainder.") start, end = get_date_range_from_params(params) date_range = end - start date_range = timedelta( seconds=int(interval * math.ceil(date_range.total_seconds() / interval))) if date_range.total_seconds() / interval > MAX_POINTS: raise InvalidParams( "Your interval and date range would create too many results. " "Use a larger interval, or a smaller date range.") end_ts = int(interval * math.ceil(to_timestamp(end) / interval)) end = to_datetime(end_ts) start = end - date_range # NOTE: The sessions_v2 implementation cuts the `end` time to now + 1 minute # if `end` is in the future. This allows for better real time results when # caching is enabled on the snuba queries. Removed here for simplicity, # but we might want to reconsider once caching becomes an issue for metrics. return start, end, interval
def generate_saved_query(project, transaction_title, name): org = project.organization start, end = get_date_range_from_params({}) params = { "start": start, "end": end, "project_id": [project.id], "organization_id": org.id } data = { "version": 2, "name": name, "fields": [ "title", "browser.name", "count()", "p75(transaction.duration)", "p95(transaction.duration)", "p99(transaction.duration)", ], "widths": ["-1", "-1", "-1", "-1", "-1", "-1"], "orderby": "-count", "query": f"title:{transaction_title}", "projects": [project.id], "range": "7d", "environment": [], "yAxis": "p75(transaction.duration)", "display": "daily", } serializer = DiscoverSavedQuerySerializer(data=data, context={"params": params}) if not serializer.is_valid(): raise Exception(serializer.errors) data = serializer.validated_data DiscoverSavedQuery.objects.create( organization=org, name=data["name"], query=data["query"], version=data["version"], )
def get_filter_params(self, request, organization, date_filter_optional=False): """ Extracts common filter parameters from the request and returns them in a standard format. :param request: :param organization: Organization to get params for :param date_filter_optional: Defines what happens if no date filter parameters are passed. If False, no date filtering occurs. If True, we provide default values. :return: A dict with keys: - start: start date of the filter - end: end date of the filter - project_id: A list of project ids to filter on - environment(optional): If environments were passed in, a list of environment names """ # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params( request.GET, optional=date_filter_optional) except InvalidParams as e: raise ParseError(detail=u"Invalid date range: {}".format(e)) with sentry_sdk.start_span( op="PERF: org.get_filter_params - projects"): try: projects = self.get_projects(request, organization) except ValueError: raise ParseError(detail="Invalid project ids") if not projects: raise NoProjects environments = [ env.name for env in self.get_environments(request, organization) ] params = { "start": start, "end": end, "project_id": [p.id for p in projects] } if environments: params["environment"] = environments return params
def get_filter_params(self, request, organization, date_filter_optional=False): """ Extracts common filter parameters from the request and returns them in a standard format. :param request: :param organization: Organization to get params for :param date_filter_optional: Defines what happens if no date filter parameters are passed. If False, no date filtering occurs. If True, we provide default values. :return: A dict with keys: - start: start date of the filter - end: end date of the filter - project_id: A list of project ids to filter on - environment(optional): If environments were passed in, a list of environment names """ # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params( request.GET, optional=date_filter_optional, ) except InvalidParams as exc: raise OrganizationEventsError(exc.message) try: projects = self.get_projects(request, organization) except ValueError: raise OrganizationEventsError('Invalid project ids') if not projects: raise NoProjects environments = [ e.name for e in self.get_environments(request, organization) ] params = { 'start': start, 'end': end, 'project_id': [p.id for p in projects], } if environments: params['environment'] = environments return params
def get_filter_params(self, request, project, date_filter_optional=False): """Similar to the version on the organization just for a single project.""" # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params(request.GET, optional=date_filter_optional) except InvalidParams as e: raise ProjectEventsError(str(e)) environments = [env.name for env in get_environments(request, project.organization)] params = {"start": start, "end": end, "project_id": [project.id]} if environments: params["environment"] = environments return params
def get(self, request, organization): stats_period = request.GET.get("groupStatsPeriod") try: start, end = get_date_range_from_params(request.GET) except InvalidParams as e: raise ParseError(detail=six.text_type(e)) if stats_period not in (None, "", "24h", "14d", "auto"): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) environments = self.get_environments(request, organization) projects = self.get_projects(request, organization) if not projects: return Response([]) if len(projects) > 1 and not features.has("organizations:global-views", organization, actor=request.user): return Response( { "detail": "You do not have the multi project stream feature enabled" }, status=400) queries = request.GET.getlist("query") response = {} for query in queries: try: count = self._count( request, query, organization, projects, environments, { "count_hits": True, "date_to": end, "date_from": start }, ) response[query] = count except (ValidationError, discover.InvalidSearchQuery) as exc: return Response({"detail": six.text_type(exc)}, status=400) return Response(response)
def get(self, request, project, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key. The `query` parameter can be used to to perform a "contains" match on values. When paginated can return at most 1000 values. :pparam string organization_slug: the slug of the organization. :pparam string project_slug: the slug of the project. :pparam string key: the tag key to look up. :auth: required """ lookup_key = tagstore.prefix_reserved_key(key) try: environment_id = self._get_environment_id_from_request( request, project.organization_id) except Environment.DoesNotExist: # if the environment doesn't exist then the tag can't possibly exist raise ResourceDoesNotExist try: tagkey = tagstore.get_tag_key(project.id, environment_id, lookup_key) except tagstore.TagKeyNotFound: raise ResourceDoesNotExist start, end = get_date_range_from_params(request.GET) paginator = tagstore.get_tag_value_paginator( project.id, environment_id, tagkey.key, start=start, end=end, query=request.GET.get("query"), order_by="-last_seen", ) return self.paginate( request=request, paginator=paginator, on_results=lambda results: serialize(results, request.user), )
def get(self, request: Request, team) -> Response: """ Returns a dict of team projects, and a time-series list of release counts for each. """ if not features.has("organizations:team-insights", team.organization, actor=request.user): return Response( {"detail": "You do not have the insights feature enabled"}, status=400) project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) start, end = get_date_range_from_params(request.GET) end = end.date() + timedelta(days=1) start = start.date() + timedelta(days=1) per_project_daily_release_counts = (Release.objects.filter( projects__in=project_list, date_added__gte=start, date_added__lte=end, ).annotate(bucket=TruncDay("date_added")).order_by("bucket").values( "projects", "bucket").annotate(count=Count("id"))) agg_project_counts = {} project_avgs = defaultdict(int) this_week_totals = defaultdict(int) this_week_start = now() - timedelta(days=7) for row in per_project_daily_release_counts: project_avgs[row["projects"]] += row["count"] agg_project_counts[str(row["bucket"].date())] = row["count"] if row["bucket"] >= this_week_start: this_week_totals[row["projects"]] += row["count"] for row in project_avgs: project_avgs[row] = (project_avgs[row] / (end - start).days) * 7 current_day = start while current_day < end: agg_project_counts.setdefault(str(current_day), 0) current_day += timedelta(days=1) return Response({ "release_counts": agg_project_counts, "project_avgs": project_avgs, "last_week_totals": this_week_totals, })
def get_filter_params(self, request, organization, date_filter_optional=False): """ Extracts common filter parameters from the request and returns them in a standard format. :param request: :param organization: Organization to get params for :param date_filter_optional: Defines what happens if no date filter parameters are passed. If False, no date filtering occurs. If True, we provide default values. :return: A dict with keys: - start: start date of the filter - end: end date of the filter - project_id: A list of project ids to filter on - environment(optional): If environments were passed in, a list of environment names """ # get the top level params -- projects, time range, and environment # from the request try: start, end = get_date_range_from_params( request.GET, optional=date_filter_optional, ) except InvalidParams as exc: raise OrganizationEventsError(exc.message) try: projects = self.get_projects(request, organization) except ValueError: raise OrganizationEventsError('Invalid project ids') if not projects: raise NoProjects environments = [e.name for e in self.get_environments(request, organization)] params = { 'start': start, 'end': end, 'project_id': [p.id for p in projects], } if environments: params['environment'] = environments return params
def test_stats_period(self): start, end = get_date_range_from_params({'statsPeriod': '14h'}) assert end - datetime.timedelta(hours=14) == start start, end = get_date_range_from_params({'statsPeriod': '14d'}) assert end - datetime.timedelta(days=14) == start start, end = get_date_range_from_params({'statsPeriod': '60m'}) assert end - datetime.timedelta(minutes=60) == start start, end = get_date_range_from_params({'statsPeriod': '3600s'}) assert end - datetime.timedelta(seconds=3600) == start with self.assertRaises(InvalidParams): get_date_range_from_params({'statsPeriod': '1s'})
def get(self, request, organization): """ List an Organization's Issues ````````````````````````````` Return a list of issues (groups) bound to an organization. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``groupStatsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' The ``statsPeriod`` parameter can be used to select a date window starting from now. Ex. ``14d``. The ``start`` and ``end`` parameters can be used to select an absolute date period to fetch issues from. :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string groupStatsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string start: Beginning date. You must also provide ``end``. :qparam string end: End date. You must also provide ``start``. :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :auth: required """ stats_period = request.GET.get('groupStatsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None environments = self.get_environments(request, organization) serializer = functools.partial( StreamGroupSerializerSnuba, environment_ids=[env.id for env in environments], stats_period=stats_period, ) projects = self.get_projects(request, organization) project_ids = [p.id for p in projects] if not projects: return Response([]) if len(projects) > 1 and not features.has( 'organizations:global-views', organization, actor=request.user): return Response({ 'detail': 'You do not have the multi project stream feature enabled' }, status=400) # we ignore date range for both short id and event ids query = request.GET.get('query', '').strip() if query: # check to see if we've got an event ID if is_event_id(query): groups = list( Group.objects.filter_by_event_id(project_ids, query) ) if len(groups) == 1: response = Response( serialize( groups, request.user, serializer( matching_event_id=query ) ) ) response['X-Sentry-Direct-Hit'] = '1' return response if groups: return Response(serialize(groups, request.user, serializer())) group = get_by_short_id(organization.id, request.GET.get('shortIdLookup'), query) if group is not None: # check all projects user has access to if request.access.has_project_access(group.project): response = Response( serialize( [group], request.user, serializer() ) ) response['X-Sentry-Direct-Hit'] = '1' return response try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: cursor_result, query_kwargs = self._search( request, organization, projects, environments, { 'count_hits': True, 'date_to': end, 'date_from': start, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get('search_filters', []) if search_filter.key.name == 'status' ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) # TODO(jess): add metrics that are similar to project endpoint here return response
def test_relative_date_range_incomplete(self): with self.assertRaises(InvalidParams): start, end = get_date_range_from_params({ 'statsPeriodStart': '14d', })