def prepare_project_series(start__stop, project, rollup=60 * 60 * 24): start, stop = start__stop resolution, series = tsdb.get_optimal_rollup_series(start, stop, rollup) assert resolution == rollup, "resolution does not match requested value" clean = partial(clean_series, start, stop, rollup) issue_ids = project.group_set.filter(status=GroupStatus.RESOLVED, resolved_at__gte=start, resolved_at__lt=stop).values_list( "id", flat=True) tsdb_range = _query_tsdb_chunked(tsdb.get_range, issue_ids, start, stop, rollup) return merge_series( reduce( merge_series, map(clean, tsdb_range.values()), clean([(timestamp, 0) for timestamp in series]), ), clean( tsdb.get_range(tsdb.models.project, [project.id], start, stop, rollup=rollup)[project.id]), lambda resolved, total: (resolved, total - resolved), # unresolved )
def build_calendar_data(project): start, stop = reports.get_calendar_query_range(interval, 3) rollup = 60 * 60 * 24 series = [] weekend = frozenset((5, 6)) value = int(random.weibullvariate(5000, 3)) for timestamp in tsdb.get_optimal_rollup_series(start, stop, rollup)[1]: damping = random.uniform(0.2, 0.6) if to_datetime(timestamp).weekday in weekend else 1 jitter = random.paretovariate(1.2) series.append((timestamp, int(value * damping * jitter))) value = value * random.uniform(0.25, 2) return reports.clean_calendar_data(project, series, start, stop, rollup, stop)
def get(self, request, sentry_app): """ :qparam float since :qparam float until :qparam resolution - optional """ query_args = self._parse_args(request) installations = SentryAppInstallation.with_deleted.filter( sentry_app=sentry_app, date_added__range=(query_args["start"], query_args["end"])).values_list( "date_added", "date_deleted", "organization_id") rollup, series = tsdb.get_optimal_rollup_series( query_args["start"], query_args["end"]) install_counter = 0 uninstall_counter = 0 install_stats = dict.fromkeys(series, 0) uninstall_stats = dict.fromkeys(series, 0) for date_added, date_deleted, organization_id in installations: install_counter += 1 install_norm_epoch = tsdb.normalize_to_epoch(date_added, rollup) if install_norm_epoch in install_stats: install_stats[install_norm_epoch] += 1 if date_deleted is not None: uninstall_counter += 1 uninstall_norm_epoch = tsdb.normalize_to_epoch( date_deleted, rollup) if uninstall_norm_epoch in uninstall_stats: uninstall_stats[uninstall_norm_epoch] += 1 result = { "total_installs": install_counter, "total_uninstalls": uninstall_counter, "install_stats": sorted(install_stats.items(), key=lambda x: x[0]), "uninstall_stats": sorted(uninstall_stats.items(), key=lambda x: x[0]), } return Response(result)
def prepare_project_series(start__stop, project, rollup=60 * 60 * 24): start, stop = start__stop resolution, series = tsdb.get_optimal_rollup_series(start, stop, rollup) assert resolution == rollup, 'resolution does not match requested value' clean = functools.partial(clean_series, start, stop, rollup) return merge_series( reduce( merge_series, map( clean, tsdb.get_range( tsdb.models.group, list( project.group_set.filter( status=GroupStatus.RESOLVED, resolved_at__gte=start, resolved_at__lt=stop, ).values_list('id', flat=True), ), start, stop, rollup=rollup, ).values(), ), clean([(timestamp, 0) for timestamp in series]), ), clean( tsdb.get_range( tsdb.models.project, [project.id], start, stop, rollup=rollup, )[project.id], ), lambda resolved, total: ( resolved, total - resolved, # unresolved ), )
def merge_series(target, other, function=operator.add): """ Merge two series into a single series. Both series must have the same start and end points as well as the same resolution. """ missing = object() results = [] for x, y in itertools.izip_longest(target, other, fillvalue=missing): assert x is not missing and y is not missing, 'series must be same length' assert x[0] == y[0], 'series timestamps must match' results.append((x[0], function(x[1], y[1]))) return results def prepare_project_series((start, stop), project, rollup=60 * 60 * 24): resolution, series = tsdb.get_optimal_rollup_series(start, stop, rollup) assert resolution == rollup, 'resolution does not match requested value' clean = functools.partial(clean_series, start, stop, rollup) return merge_series( reduce( merge_series, map( clean, tsdb.get_range( tsdb.models.group, project.group_set.filter( status=GroupStatus.RESOLVED, resolved_at__gte=start, resolved_at__lt=stop, ).values_list('id', flat=True), start,
def build_project_series(start__stop, project): start, stop = start__stop rollup = ONE_DAY resolution, series = tsdb.get_optimal_rollup_series(start, stop, rollup) assert resolution == rollup, "resolution does not match requested value" clean = partial(clean_series, start, stop, rollup) def zerofill_clean(data): return clean(zerofill(data, start, stop, rollup, fill_default=0)) # Note: this section can be removed issue_ids = project.group_set.filter(status=GroupStatus.RESOLVED, resolved_at__gte=start, resolved_at__lt=stop).values_list( "id", flat=True) # TODO: The TSDB calls could be replaced with a SnQL call here tsdb_range_resolved = _query_tsdb_groups_chunked(tsdb.get_range, issue_ids, start, stop, rollup) resolved_error_series = reduce( merge_series, map(clean, tsdb_range_resolved.values()), clean([(timestamp, 0) for timestamp in series]), ) # end # Use outcomes to compute total errors and transactions outcomes_query = Query( dataset=Dataset.Outcomes.value, match=Entity("outcomes"), select=[ Column("time"), Column("category"), Function("sum", [Column("quantity")], "total"), ], where=[ Condition(Column("timestamp"), Op.GTE, start), Condition(Column("timestamp"), Op.LT, stop + timedelta(days=1)), Condition(Column("project_id"), Op.EQ, project.id), Condition(Column("org_id"), Op.EQ, project.organization_id), Condition(Column("outcome"), Op.EQ, Outcome.ACCEPTED), Condition( Column("category"), Op.IN, [*DataCategory.error_categories(), DataCategory.TRANSACTION], ), ], groupby=[Column("time"), Column("category")], granularity=Granularity(rollup), orderby=[OrderBy(Column("time"), Direction.ASC)], ) outcome_series = raw_snql_query(outcomes_query, referrer="reports.outcome_series") total_error_series = OrderedDict() for v in outcome_series["data"]: if v["category"] in DataCategory.error_categories(): timestamp = int(to_timestamp(parse_snuba_datetime(v["time"]))) total_error_series[timestamp] = total_error_series.get( timestamp, 0) + v["total"] total_error_series = zerofill_clean(list(total_error_series.items())) transaction_series = [(int(to_timestamp(parse_snuba_datetime(v["time"]))), v["total"]) for v in outcome_series["data"] if v["category"] == DataCategory.TRANSACTION] transaction_series = zerofill_clean(transaction_series) error_series = merge_series( resolved_error_series, total_error_series, lambda resolved, total: (resolved, total - resolved), # Resolved, Unresolved ) # Format of this series: [(resolved , unresolved, transactions)] return merge_series( error_series, transaction_series, lambda errors, transactions: errors + (transactions, ), )