def test_get_entity_subscription_for_metrics_dataset_for_users(self) -> None:
     aggregate = "percentage(users_crashed, users) AS _crash_rate_alert_aggregate"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         aggregate=aggregate,
         time_window=3600,
         extra_fields={"org_id": self.organization.id},
     )
     assert isinstance(entity_subscription, MetricsSetsEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     org_id = self.organization.id
     groupby = [tag_key(org_id, "session.status")]
     assert entity_subscription.get_entity_extra_params() == {
         "organization": self.organization.id,
         "groupby": groupby,
         "granularity": 10,
     }
     assert entity_subscription.entity_key == EntityKey.MetricsSets
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[EntityKey.MetricsSets]
     assert entity_subscription.dataset == QueryDatasets.METRICS
     session_status = tag_key(org_id, "session.status")
     session_status_tag_values = get_tag_values_list(org_id, ["crashed", "init"])
     snuba_filter = entity_subscription.build_snuba_filter("", None, None)
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=", metric_id(org_id, SessionMetricKey.USER)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == groupby
     assert snuba_filter.rollup == entity_subscription.get_granularity()
    def aggregate_query_results(
        self, data: List[Dict[str, Any]], alias: Optional[str] = None
    ) -> List[Dict[str, Any]]:
        aggregated_results: List[Dict[str, Any]]
        value_col_name = alias if alias else "value"
        try:
            translated_data: Dict[str, Any] = {}
            session_status = tag_key(self.org_id, "session.status")
            for row in data:
                tag_value = reverse_tag_value(self.org_id, row[session_status])
                translated_data[tag_value] = row[value_col_name]

            total_session_count = translated_data.get("init", 0)
            crash_count = translated_data.get("crashed", 0)
            if total_session_count == 0:
                metrics.incr(
                    "incidents.entity_subscription.metrics.aggregate_query_results.no_session_data"
                )
                crash_free_rate = None
            else:
                crash_free_rate = round((1 - crash_count / total_session_count) * 100, 3)

            col_name = alias if alias else CRASH_RATE_ALERT_AGGREGATE_ALIAS

            aggregated_results = [{col_name: crash_free_rate}]
        except MetricIndexNotFound:
            aggregated_results = [{}]
        return aggregated_results
    def get_crash_rate_alert_metrics_aggregation_value(self,
                                                       subscription_update):
        """
        Handles validation and extraction of Crash Rate Alerts subscription updates values over
        metrics dataset.
        The subscription update looks like
        [
            {'project_id': 8, 'tags[5]': 6, 'value': 2.0},
            {'project_id': 8, 'tags[5]': 13,'value': 1.0}
        ]
        where each entry represents a session status and the count of that specific session status.
        As an example, `tags[5]` represents string `session.status`, while `tags[5]: 6` could
        mean something like there are 2 sessions of status `crashed`. Likewise the other entry
        represents the number of sessions started. In this method, we need to reverse match these
        strings to end up with something that looks like
        {"init": 2, "crashed": 4}
        - `init` represents sessions or users sessions that were started, hence to get the crash
        free percentage, we would need to divide number of crashed sessions by that number,
        and subtract that value from 1. This is also used when CRASH_RATE_ALERT_MINIMUM_THRESHOLD is
        set in the sense that if the minimum threshold is greater than the session count,
        then the update is dropped. If the minimum threshold is not set then the total sessions
        count is just ignored
        - `crashed` represents the total sessions or user counts that crashed.
        """
        try:
            session_status = tag_key(self.subscription.project.organization.id,
                                     "session.status")
            data = {}

            # ToDo(ahmed): Refactor this logic by calling the aggregate_query_results on the
            #  BaseMetricsEntitySubscription cls
            for row in subscription_update["values"]["data"]:
                tag_value = reverse_tag_value(
                    self.subscription.project.organization.id,
                    row[session_status])
                data[tag_value] = row["value"]

            total_session_count = data.get("init", 0)
            crash_count = data.get("crashed", 0)

            if total_session_count == 0:
                metrics.incr(
                    "incidents.alert_rules.ignore_update_no_session_data")
                return

            if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
                min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
                if total_session_count < min_threshold:
                    metrics.incr(
                        "incidents.alert_rules.ignore_update_count_lower_than_min_threshold"
                    )
                    return

            aggregation_value = round(
                (1 - crash_count / total_session_count) * 100, 3)
        except MetricIndexNotFound:
            metrics.incr(
                "incidents.alert_rules.ignore_update.metric_index_not_found")
            aggregation_value = None
        return aggregation_value
Exemple #4
0
 def test_simple_users_for_metrics(self):
     for tag in [
             SessionMetricKey.USER.value, "session.status", "crashed",
             "init"
     ]:
         indexer.record(tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="",
         environment=None,
     )
     org_id = self.organization.id
     session_status = tag_key(org_id, "session.status")
     session_status_tag_values = get_tag_values_list(
         org_id, ["crashed", "init"])
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          metric_id(org_id, SessionMetricKey.USER)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == [session_status]
Exemple #5
0
 def test_query_and_environment_users_metrics(self):
     env = self.create_environment(self.project, name="development")
     for tag in [
             SessionMetricKey.USER.value,
             "session.status",
             "environment",
             "development",
             "init",
             "crashed",
             "release",
             "[email protected]",
     ]:
         indexer.record(tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="release:[email protected]",
         environment=env,
     )
     org_id = self.organization.id
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.groupby == [tag_key(org_id, "session.status")]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          metric_id(org_id, SessionMetricKey.USER)],
         [
             tag_key(org_id, "session.status"),
             "IN",
             get_tag_values_list(org_id, ["crashed", "init"]),
         ],
         [
             tag_key(org_id, "environment"), "=",
             tag_value(org_id, "development")
         ],
         [tag_key(org_id, "release"), "=",
          tag_value(org_id, "[email protected]")],
     ]
    def build_snuba_filter(
        self,
        query: str,
        environment: Optional[Environment],
        params: Optional[Mapping[str, Any]] = None,
    ) -> Filter:
        snuba_filter = get_filter(query, params=params)
        conditions = copy(snuba_filter.conditions)
        session_status_tag_values = get_tag_values_list(self.org_id, ["crashed", "init"])
        snuba_filter.update_with(
            {
                "aggregations": [[f"{self.aggregation_func}(value)", None, "value"]],
                "conditions": [
                    ["metric_id", "=", metric_id(self.org_id, self.metric_key)],
                    [self.session_status, "IN", session_status_tag_values],
                ],
                "groupby": self.get_query_groupby(),
                "rollup": self.get_granularity(),
            }
        )
        if environment:
            snuba_filter.conditions.append(
                [tag_key(self.org_id, "environment"), "=", tag_value(self.org_id, environment.name)]
            )
        if query and len(conditions) > 0:
            release_conditions = [
                condition for condition in conditions if condition[0] == "release"
            ]

            for release_condition in release_conditions:
                snuba_filter.conditions.append(
                    [
                        tag_key(self.org_id, release_condition[0]),
                        release_condition[1],
                        tag_value(self.org_id, release_condition[2]),
                    ]
                )

        return snuba_filter
 def __init__(
     self, aggregate: str, time_window: int, extra_fields: Optional[_EntitySpecificParams] = None
 ):
     super().__init__(aggregate, time_window, extra_fields)
     self.aggregate = aggregate
     if not extra_fields or "org_id" not in extra_fields:
         raise InvalidQuerySubscription(
             "org_id is a required param when "
             "building snuba filter for a metrics subscription"
         )
     self.org_id = extra_fields["org_id"]
     self.session_status = tag_key(self.org_id, "session.status")
     self.time_window = time_window