def test_join_query(self) -> None: ev = Entity("events", "ev") gm = Entity("groupedmessage", "gm") join = Join([Relationship(ev, "grouped", gm)]) query = (Query("discover", join).set_select([ Column("group_id", ev), Column("status", gm), Function("avg", [Column("retention_days", ev)], "avg"), ]).set_groupby([Column("group_id", ev), Column("status", gm)]).set_where([ Condition(Column("project_id", ev), Op.EQ, self.project_id), Condition(Column("project_id", gm), Op.EQ, self.project_id), Condition(Column("timestamp", ev), Op.GTE, self.base_time), Condition(Column("timestamp", ev), Op.LT, self.next_time), ])) response = self.post("/discover/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200 assert data["data"] == []
def test_invalid_query() -> None: with pytest.raises(InvalidQueryError, match=re.escape("queries must have a valid dataset")): Query(dataset=1, match=Entity("events")) # type: ignore with pytest.raises(InvalidQueryError, match=re.escape("queries must have a valid dataset")): Query(dataset="", match=Entity("events")) with pytest.raises(InvalidQueryError, match=re.escape("queries must have a valid Entity")): Query(dataset="discover", match="events") # type: ignore with pytest.raises( InvalidConditionError, match=re.escape( "invalid condition: LHS of a condition must be a Column, CurriedFunction or Function, not <class 'snuba_sdk.aliased_expression.AliasedExpression'>" ), ): (Query("discover", Entity("events")).set_select( [AliasedExpression(Column("transaction"), "tn")]).set_where([ Condition(AliasedExpression(Column("project_id"), "pi"), Op.IN, (1, )) ] # type: ignore ))
def test_invalid_subquery() -> None: with pytest.raises( InvalidQueryError, match=re.escape( "inner query is invalid: query must have at least one expression in select" ), ): Query("discover", Query(dataset="discover", match=Entity("events"))).set_select( [Column("event_id"), Column("title")]) with pytest.raises( InvalidQueryError, match=re.escape( "inner query is invalid: query must have at least one expression in select" ), ): Query( "discover", Query( dataset="discover", match=Entity("events"), select=[Column("title"), Column("timestamp")], ), ).set_match(Query(dataset="discover", match=Entity("events"))).set_select([ Function("uniq", [Column("new_event")], "uniq_event"), Column("title") ])
def test_entity(name: Any, sample: Any, formatted: Optional[str], exception: Optional[Exception]) -> None: if exception is not None: with pytest.raises(type(exception), match=re.escape(str(exception))): Entity(name, sample) else: entity = Entity(name, sample) assert entity.name == name assert entity.sample == sample if formatted is not None: assert TRANSLATOR.visit(entity) == formatted
def test_invalid_query() -> None: with pytest.raises(InvalidQuery, match=re.escape("queries must have a valid dataset")): Query(dataset=1, match=Entity("events")) # type: ignore with pytest.raises(InvalidQuery, match=re.escape("queries must have a valid dataset")): Query(dataset="", match=Entity("events")) with pytest.raises(InvalidQuery, match=re.escape("queries must have a valid Entity")): Query(dataset="discover", match="events") # type: ignore
def _check_releases_have_health_data( organization_id: int, project_ids: List[int], release_versions: List[str], start: datetime, end: datetime, ) -> Set[str]: """ Returns a set of all release versions that have health data within a given period of time. """ if not release_versions: return set() query = Query( dataset="sessions", match=Entity("sessions"), select=[Column("release")], groupby=[Column("release")], where=[ Condition(Column("started"), Op.GTE, start), Condition(Column("started"), Op.LT, end), Condition(Column("org_id"), Op.EQ, organization_id), Condition(Column("project_id"), Op.IN, project_ids), Condition(Column("release"), Op.IN, release_versions), ], ) data = snuba.raw_snql_query(query, referrer="snuba.sessions.check_releases_have_health_data")[ "data" ] return {row["release"] for row in data}
def test_sub_query(self) -> None: inner_query = (Query("discover", Entity("discover_events")).set_select( [Function("count", [], "count") ]).set_groupby([Column("project_id"), Column("tags[custom_tag]")]).set_where([ Condition(Column("type"), Op.NEQ, "transaction"), Condition(Column("project_id"), Op.EQ, self.project_id), Condition(Column("timestamp"), Op.GTE, self.base_time), Condition(Column("timestamp"), Op.LT, self.next_time), ])) query = (Query("discover", inner_query).set_select( [Function("avg", [Column("count")], "avg_count")]).set_orderby([ OrderBy(Function("avg", [Column("count")], "avg_count"), Direction.ASC) ]).set_limit(1000)) response = self.post("/discover/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200, data assert data["data"] == [{"avg_count": 1.0}]
def test_join_validate_match( conditions: ConditionGroup, entity: Entity, exception: Optional[Exception], ) -> None: other_join_entity = Entity("test_b", "tb", None, SCHEMA) join2_conditions = [ Condition(Column("required1", other_join_entity), Op.IN, [1, 2, 3]), Condition(Column("required2", other_join_entity), Op.EQ, 1), Condition(Column("time", other_join_entity), Op.GTE, BEFORE), Condition(Column("time", other_join_entity), Op.LT, AFTER), *conditions, ] query = Query( dataset="test", match=Join([Relationship(entity, "has", other_join_entity)]), select=[ Column("test1", entity), Column("required1", other_join_entity) ], where=join2_conditions, ) if exception is not None: with pytest.raises(type(exception), match=re.escape(str(exception))): validate_required_columns(query) else: validate_required_columns(query)
def test_simple(self) -> None: query = Query( dataset="sessions", match=Entity("org_sessions"), select=[Column("org_id"), Column("project_id")], groupby=[Column("org_id"), Column("project_id")], where=[ Condition(Column("started"), Op.GTE, datetime.utcnow() - timedelta(hours=6)), Condition(Column("started"), Op.LT, datetime.utcnow()), ], granularity=Granularity(3600), ) response = self.app.post( "/sessions/snql", data=query.snuba(), ) data = json.loads(response.data) assert response.status_code == 200, response.data assert len(data["data"]) == 2 assert data["data"][0]["org_id"] == self.org_id assert data["data"][0]["project_id"] == self.project_id assert data["data"][1]["org_id"] == self.org_id assert data["data"][1]["project_id"] == self.project_id2
def test_invalid_query_set() -> None: query = Query("discover", Entity("events")) tests: Mapping[str, Sequence[Any]] = { "match": (0, "0 must be a valid Entity"), "select": ( (0, [], [0]), "select clause must be a non-empty list of Column and/or Function", ), "groupby": ( [0, [0]], "groupby clause must be a list of Column and/or Function", ), "where": ([0, [0]], "where clause must be a list of Condition"), "having": ([0, [0]], "having clause must be a list of Condition"), "orderby": ([0, [0]], "orderby clause must be a list of OrderBy"), "limitby": ("a", "limitby clause must be a LimitBy"), "limit": (100000, "limit '100000' is capped at 10,000"), "offset": ("", "offset '' must be an integer"), "granularity": (-1, "granularity '-1' must be at least 1"), } match, err = tests["match"] with pytest.raises(InvalidQuery, match=re.escape(err)): query.set_match(match) for val in tests["select"][0]: with pytest.raises(InvalidQuery, match=re.escape(tests["select"][1])): query.set_select(val) for val in tests["groupby"][0]: with pytest.raises(InvalidQuery, match=re.escape(tests["groupby"][1])): query.set_groupby(val) for val in tests["where"][0]: with pytest.raises(InvalidQuery, match=re.escape(tests["where"][1])): query.set_where(val) for val in tests["having"][0]: with pytest.raises(InvalidQuery, match=re.escape(tests["having"][1])): query.set_having(val) for val in tests["orderby"][0]: with pytest.raises(InvalidQuery, match=re.escape(tests["orderby"][1])): query.set_orderby(val) with pytest.raises(InvalidQuery, match=re.escape(tests["limitby"][1])): query.set_limitby(tests["limitby"][0]) with pytest.raises(InvalidExpression, match=re.escape(tests["limit"][1])): query.set_limit(tests["limit"][0]) with pytest.raises(InvalidExpression, match=re.escape(tests["offset"][1])): query.set_offset(tests["offset"][0]) with pytest.raises(InvalidExpression, match=re.escape(tests["granularity"][1])): query.set_granularity(tests["granularity"][0])
def get_snql_query(self) -> Query: return Query( dataset=self.dataset.value, match=Entity(self.dataset.value), select=self.select, where=self.where, groupby=self.groupby, limit=self.limit, )
def build_project_usage_outcomes(start__stop, project): start, stop = start__stop # XXX(epurkhiser): Tsdb used to use day buckets, where the end would # represent a whole day. Snuba queries more accurately thus we must # capture the entire last day end = stop + timedelta(days=1) query = Query( dataset=Dataset.Outcomes.value, match=Entity("outcomes"), select=[ Column("outcome"), Column("category"), Function("sum", [Column("quantity")], "total"), ], where=[ Condition(Column("timestamp"), Op.GTE, start), Condition(Column("timestamp"), Op.LT, end), Condition(Column("project_id"), Op.EQ, project.id), Condition(Column("org_id"), Op.EQ, project.organization_id), Condition( Column("outcome"), Op.IN, [Outcome.ACCEPTED, Outcome.FILTERED, Outcome.RATE_LIMITED]), Condition( Column("category"), Op.IN, [*DataCategory.error_categories(), DataCategory.TRANSACTION], ), ], groupby=[Column("outcome"), Column("category")], granularity=Granularity(ONE_DAY), ) data = raw_snql_query(query, referrer="reports.outcomes")["data"] return ( # Accepted errors sum(row["total"] for row in data if row["category"] in DataCategory.error_categories() and row["outcome"] == Outcome.ACCEPTED), # Dropped errors sum(row["total"] for row in data if row["category"] in DataCategory.error_categories() and row["outcome"] == Outcome.RATE_LIMITED), # accepted transactions sum(row["total"] for row in data if row["category"] == DataCategory.TRANSACTION and row["outcome"] == Outcome.ACCEPTED), # Dropped transactions sum(row["total"] for row in data if row["category"] == DataCategory.TRANSACTION and row["outcome"] == Outcome.RATE_LIMITED), )
def get_snql_query(self) -> Query: return Query( dataset=self.dataset.value, match=Entity(self.dataset.value), select=self.select, where=self.where, having=self.having, groupby=self.groupby, orderby=[OrderBy(self.time_column, Direction.ASC)], granularity=self.granularity, limit=self.limit, )
def test_escape_edge_cases(self) -> None: query = (Query("events", Entity("events")).set_select( [Function("count", [], "times_seen")]).set_where([ Condition(Column("project_id"), Op.EQ, self.project_id), Condition(Column("timestamp"), Op.GTE, self.base_time), Condition(Column("timestamp"), Op.LT, self.next_time), Condition(Column("environment"), Op.EQ, "\\' \n \\n \\"), ])) response = self.post("/events/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200, data
def run_outcomes_query_timeseries(query: QueryDefinition) -> ResultSet: snql_query = Query( dataset=query.dataset.value, match=Entity(query.match), select=query.select_params, groupby=query.group_by + [Column(TS_COL)], where=query.conditions, limit=Limit(10000), offset=Offset(0), granularity=Granularity(query.rollup), ) result_timeseries = raw_snql_query(snql_query, referrer="outcomes.timeseries") return _format_rows(result_timeseries["data"], query)
def get_snql_query(self) -> Query: self.validate_having_clause() return Query( dataset=self.dataset.value, match=Entity(self.dataset.value), select=self.select, where=self.where, having=self.having, groupby=self.groupby, orderby=self.orderby, limit=self.limit, offset=self.offset, )
def test_basic(self) -> None: now = datetime.now() self._insert_event_for_time(now) query = (Query(dataset="events", match=Entity("events")).set_select([ Function("count", [], "count") ]).set_groupby([Column("project_id")]).set_where([ Condition(Column("project_id"), Op.EQ, self.project.id), Condition(Column("timestamp"), Op.GTE, now - timedelta(days=1)), Condition(Column("timestamp"), Op.LT, now + timedelta(days=1)), ])) result = snuba.raw_snql_query(query) assert len(result["data"]) == 1 assert result["data"][0] == {"count": 1, "project_id": self.project.id}
def test_orderby(self) -> None: self.project_id3 = next(self.id_iter) self.org_id2 = next(self.id_iter) self.generate_session_events(self.org_id2, self.project_id3) query = Query( dataset="sessions", match=Entity("org_sessions"), select=[Column("org_id"), Column("project_id")], groupby=[Column("org_id"), Column("project_id")], where=[ Condition(Column("started"), Op.GTE, datetime.utcnow() - timedelta(hours=6)), Condition(Column("started"), Op.LT, datetime.utcnow()), ], granularity=Granularity(3600), orderby=[OrderBy(Column("org_id"), Direction.ASC)], ) response = self.app.post( "/sessions/snql", data=query.snuba(), ) data = json.loads(response.data) assert response.status_code == 200, response.data assert len(data["data"]) == 3 assert data["data"][0]["org_id"] == self.org_id assert data["data"][0]["project_id"] == self.project_id assert data["data"][1]["org_id"] == self.org_id assert data["data"][1]["project_id"] == self.project_id2 assert data["data"][2]["org_id"] == self.org_id2 assert data["data"][2]["project_id"] == self.project_id3 query = query.set_orderby( [OrderBy(Column("org_id"), Direction.DESC)], ) response = self.app.post( "/sessions/snql", data=query.snuba(), ) data = json.loads(response.data) assert response.status_code == 200, response.data assert len(data["data"]) == 3 assert data["data"][0]["org_id"] == self.org_id2 assert data["data"][0]["project_id"] == self.project_id3 assert data["data"][1]["org_id"] == self.org_id assert data["data"][1]["project_id"] == self.project_id assert data["data"][2]["org_id"] == self.org_id assert data["data"][2]["project_id"] == self.project_id2
def _get_project_releases_count( organization_id: int, project_ids: Sequence[int], scope: str, stats_period: Optional[str] = None, environments: Optional[Sequence[str]] = None, ) -> int: """ Fetches the total count of releases/project combinations """ if stats_period is None: stats_period = "24h" # Special rule that we support sorting by the last 24h only. if scope.endswith("_24h"): stats_period = "24h" _, stats_start, _ = get_rollup_starts_and_buckets(stats_period) where = [ Condition(Column("started"), Op.GTE, stats_start), Condition(Column("started"), Op.LT, datetime.now()), Condition(Column("project_id"), Op.IN, project_ids), Condition(Column("org_id"), Op.EQ, organization_id), ] if environments is not None: where.append(Condition(Column("environment"), Op.IN, environments)) having = [] # Filter out releases with zero users when sorting by either `users` or `crash_free_users` if scope in ["users", "crash_free_users"]: having.append(Condition(Column("users"), Op.GT, 0)) query = Query( dataset="sessions", match=Entity("sessions"), select=[ Function( "uniqExact", [Column("release"), Column("project_id")], alias="count") ], where=where, having=having, ) data = snuba.raw_snql_query( query, referrer="snuba.sessions.get_project_releases_count")["data"] return data[0]["count"] if data else 0
def get_snql_query(self) -> Query: self.validate_having_clause() return Query( dataset=self.dataset.value, match=Entity(self.dataset.value, sample=self.sample_rate), select=self.columns, array_join=self.array_join, where=self.where, having=self.having, groupby=self.groupby, orderby=self.orderby, limit=self.limit, offset=self.offset, limitby=self.limitby, turbo=self.turbo, )
def test_cache(self): """Minimal test to verify if use_cache works""" results = snuba.raw_snql_query( Query( "events", Entity("events"), select=[Column("event_id")], where=[ Condition(Column("project_id"), Op.EQ, self.project.id), Condition(Column("timestamp"), Op.GTE, timezone.now() - timedelta(days=1)), Condition(Column("timestamp"), Op.LT, timezone.now()), ], limit=Limit(1), ), use_cache=True, ) assert results["data"] == []
def query_p95(interval): start, stop = interval query = Query( dataset=Dataset.Transactions.value, match=Entity("transactions"), select=[ Column("transaction_name"), Function("quantile(0.95)", [Column("duration")], "p95"), ], where=[ Condition(Column("finish_ts"), Op.GTE, start), Condition(Column("finish_ts"), Op.LT, stop + timedelta(days=1)), Condition(Column("transaction_name"), Op.IN, transaction_names), Condition(Column("project_id"), Op.EQ, project.id), ], groupby=[Column("transaction_name")], ) return raw_snql_query(query, referrer="reports.key_transactions.p95")
def build_key_errors(interval, project): start, stop = interval # Take the 3 most frequently occuring events query = Query( dataset=Dataset.Events.value, match=Entity("events"), select=[Column("group_id"), Function("count", [])], where=[ Condition(Column("timestamp"), Op.GTE, start), Condition(Column("timestamp"), Op.LT, stop + timedelta(days=1)), Condition(Column("project_id"), Op.EQ, project.id), ], groupby=[Column("group_id")], orderby=[OrderBy(Function("count", []), Direction.DESC)], limit=Limit(3), ) query_result = raw_snql_query(query, referrer="reports.key_errors") key_errors = query_result["data"] return [(e["group_id"], e["count()"]) for e in key_errors]
def test_tags_in_groupby(self) -> None: query = (Query("events", Entity("events")).set_select([ Function("count", [], "times_seen"), Function("min", [Column("timestamp")], "first_seen"), Function("max", [Column("timestamp")], "last_seen"), ]).set_groupby([Column("tags[k8s-app]")]).set_where([ Condition(Column("project_id"), Op.EQ, self.project_id), Condition(Column("timestamp"), Op.GTE, self.base_time), Condition(Column("timestamp"), Op.LT, self.next_time), Condition(Column("tags[k8s-app]"), Op.NEQ, ""), Condition(Column("type"), Op.NEQ, "transaction"), ]).set_orderby([ OrderBy( Function("max", [Column("timestamp")], "last_seen"), Direction.DESC, ) ]).set_limit(1000)) response = self.post("/events/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200, data
def test_sessions_query(self) -> None: query = (Query("sessions", Entity("sessions")).set_select( [Column("project_id"), Column("release")]).set_groupby( [Column("project_id"), Column("release")]).set_where([ Condition(Column("project_id"), Op.IN, [self.project_id]), Condition(Column("org_id"), Op.EQ, self.org_id), Condition( Column("started"), Op.GTE, datetime(2021, 1, 1, 17, 5, 59, 554860), ), Condition(Column("started"), Op.LT, datetime(2022, 1, 1, 17, 6, 0, 554981)), ]).set_orderby([OrderBy(Column("sessions"), Direction.DESC)]).set_limit(100)) response = self.post("/sessions/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200 assert data["data"] == []
def test_arrayjoin(self) -> None: query = (Query("events", Entity("events")).set_select([ Function("count", [], "times_seen"), Function("min", [Column("timestamp")], "first_seen"), Function("max", [Column("timestamp")], "last_seen"), ]).set_groupby([Column("exception_frames.filename")]).set_array_join( Column("exception_frames.filename")).set_where([ Condition(Column("exception_frames.filename"), Op.LIKE, "%.java"), Condition(Column("project_id"), Op.EQ, self.project_id), Condition(Column("timestamp"), Op.GTE, self.base_time), Condition(Column("timestamp"), Op.LT, self.next_time), ]).set_orderby([ OrderBy( Function("max", [Column("timestamp")], "last_seen"), Direction.DESC, ) ]).set_limit(1000)) response = self.post("/events/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200, data assert len(data["data"]) == 6
def test_simple_query(self) -> None: query = (Query("discover", Entity("discover_events")).set_select( [Function("count", [], "count")]).set_groupby( [Column("project_id"), Column("tags[custom_tag]")]).set_where([ Condition(Column("type"), Op.NEQ, "transaction"), Condition(Column("project_id"), Op.EQ, self.project_id), Condition(Column("timestamp"), Op.GTE, self.base_time), Condition(Column("timestamp"), Op.LT, self.next_time), ]).set_orderby([ OrderBy(Function("count", [], "count"), Direction.ASC) ]).set_limit(1000).set_consistent(True).set_debug(True)) response = self.post("/discover/snql", data=query.snuba()) data = json.loads(response.data) assert response.status_code == 200, data assert data["stats"]["consistent"] assert data["data"] == [{ "count": 1, "tags[custom_tag]": "custom_value", "project_id": self.project_id, }]
Direction, Function, Granularity, Limit, LimitBy, Offset, OrderBy, Totals, ) from snuba_sdk.query import Query NOW = datetime(2021, 1, 2, 3, 4, 5, 6, timezone.utc) tests = [ pytest.param( Query("discover", Entity("events")).set_select([Column("event_id")]).set_where([ Condition(Column("timestamp"), Op.GT, NOW) ]).set_limit(10).set_offset(1).set_granularity(3600), ( "MATCH (events)", "SELECT event_id", "WHERE timestamp > toDateTime('2021-01-02T03:04:05.000006')", "LIMIT 10", "OFFSET 1", "GRANULARITY 3600", ), None, id="basic query", ), pytest.param( Query(
def build_key_transactions(interval, project): start, stop = interval # Take the 3 most frequently occuring transactions query = Query( dataset=Dataset.Transactions.value, match=Entity("transactions"), select=[ Column("transaction_name"), Function("count", []), ], where=[ Condition(Column("finish_ts"), Op.GTE, start), Condition(Column("finish_ts"), Op.LT, stop + timedelta(days=1)), Condition(Column("project_id"), Op.EQ, project.id), ], groupby=[Column("transaction_name")], orderby=[OrderBy(Function("count", []), Direction.DESC)], limit=Limit(3), ) query_result = raw_snql_query(query, referrer="reports.key_transactions") key_errors = query_result["data"] transaction_names = map(lambda p: p["transaction_name"], key_errors) def query_p95(interval): start, stop = interval query = Query( dataset=Dataset.Transactions.value, match=Entity("transactions"), select=[ Column("transaction_name"), Function("quantile(0.95)", [Column("duration")], "p95"), ], where=[ Condition(Column("finish_ts"), Op.GTE, start), Condition(Column("finish_ts"), Op.LT, stop + timedelta(days=1)), Condition(Column("transaction_name"), Op.IN, transaction_names), Condition(Column("project_id"), Op.EQ, project.id), ], groupby=[Column("transaction_name")], ) return raw_snql_query(query, referrer="reports.key_transactions.p95") query_result = query_p95((start, stop)) this_week_p95 = {} for point in query_result["data"]: this_week_p95[point["transaction_name"]] = point["p95"] query_result = query_p95( (start - timedelta(days=7), stop - timedelta(days=7))) last_week_p95 = {} for point in query_result["data"]: last_week_p95[point["transaction_name"]] = point["p95"] return [( e["transaction_name"], e["count()"], project.id, this_week_p95.get(e["transaction_name"], None), last_week_p95.get(e["transaction_name"], None), ) for e in key_errors]
def build_project_series(start__stop, project): start, stop = start__stop rollup = ONE_DAY resolution, series = tsdb.get_optimal_rollup_series(start, stop, rollup) assert resolution == rollup, "resolution does not match requested value" clean = partial(clean_series, start, stop, rollup) def zerofill_clean(data): return clean(zerofill(data, start, stop, rollup, fill_default=0)) # Note: this section can be removed issue_ids = project.group_set.filter(status=GroupStatus.RESOLVED, resolved_at__gte=start, resolved_at__lt=stop).values_list( "id", flat=True) # TODO: The TSDB calls could be replaced with a SnQL call here tsdb_range_resolved = _query_tsdb_groups_chunked(tsdb.get_range, issue_ids, start, stop, rollup) resolved_error_series = reduce( merge_series, map(clean, tsdb_range_resolved.values()), clean([(timestamp, 0) for timestamp in series]), ) # end # Use outcomes to compute total errors and transactions outcomes_query = Query( dataset=Dataset.Outcomes.value, match=Entity("outcomes"), select=[ Column("time"), Column("category"), Function("sum", [Column("quantity")], "total"), ], where=[ Condition(Column("timestamp"), Op.GTE, start), Condition(Column("timestamp"), Op.LT, stop + timedelta(days=1)), Condition(Column("project_id"), Op.EQ, project.id), Condition(Column("org_id"), Op.EQ, project.organization_id), Condition(Column("outcome"), Op.EQ, Outcome.ACCEPTED), Condition( Column("category"), Op.IN, [*DataCategory.error_categories(), DataCategory.TRANSACTION], ), ], groupby=[Column("time"), Column("category")], granularity=Granularity(rollup), orderby=[OrderBy(Column("time"), Direction.ASC)], ) outcome_series = raw_snql_query(outcomes_query, referrer="reports.outcome_series") total_error_series = OrderedDict() for v in outcome_series["data"]: if v["category"] in DataCategory.error_categories(): timestamp = int(to_timestamp(parse_snuba_datetime(v["time"]))) total_error_series[timestamp] = total_error_series.get( timestamp, 0) + v["total"] total_error_series = zerofill_clean(list(total_error_series.items())) transaction_series = [(int(to_timestamp(parse_snuba_datetime(v["time"]))), v["total"]) for v in outcome_series["data"] if v["category"] == DataCategory.TRANSACTION] transaction_series = zerofill_clean(transaction_series) error_series = merge_series( resolved_error_series, total_error_series, lambda resolved, total: (resolved, total - resolved), # Resolved, Unresolved ) # Format of this series: [(resolved , unresolved, transactions)] return merge_series( error_series, transaction_series, lambda errors, transactions: errors + (transactions, ), )