Esempio n. 1
0
 def test_get_entity_subscription_for_metrics_dataset_missing_organization(
         self) -> None:
     aggregate = "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate"
     with self.assertRaises(InvalidQuerySubscription):
         get_entity_subscription_for_dataset(dataset=QueryDatasets.METRICS,
                                             aggregate=aggregate,
                                             time_window=3600)
 def test_get_entity_subscription_for_metrics_dataset_non_supported_aggregate(self) -> None:
     aggregate = "count(sessions)"
     with self.assertRaises(UnsupportedQuerySubscription):
         get_entity_subscription_for_dataset(
             dataset=QueryDatasets.METRICS,
             aggregate=aggregate,
             time_window=3600,
             extra_fields={"org_id": self.organization.id},
         )
Esempio n. 3
0
 def test_get_entity_subscription_for_metrics_dataset_for_users(
         self) -> None:
     aggregate = "percentage(users_crashed, users) AS _crash_rate_alert_aggregate"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         aggregate=aggregate,
         time_window=3600,
         extra_fields={"org_id": self.organization.id},
     )
     assert isinstance(entity_subscription, MetricsSetsEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     groupby = [resolve_tag_key("session.status")]
     assert entity_subscription.get_entity_extra_params() == {
         "organization": self.organization.id,
         "groupby": groupby,
         "granularity": 10,
     }
     assert entity_subscription.entity_key == EntityKey.MetricsSets
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[
         EntityKey.MetricsSets]
     assert entity_subscription.dataset == QueryDatasets.METRICS
     session_status = resolve_tag_key("session.status")
     session_status_tag_values = resolve_many_weak(["crashed", "init"])
     snuba_filter = entity_subscription.build_snuba_filter("", None, None)
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          resolve(SessionMetricKey.USER.value)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == groupby
     assert snuba_filter.rollup == entity_subscription.get_granularity()
 def test_get_entity_subscription_for_metrics_dataset_for_sessions(self) -> None:
     aggregate = "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         aggregate=aggregate,
         time_window=3600,
         extra_fields={"org_id": self.organization.id},
     )
     assert isinstance(entity_subscription, MetricsCountersEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     org_id = self.organization.id
     groupby = [tag_key(org_id, "session.status")]
     assert entity_subscription.get_entity_extra_params() == {
         "organization": self.organization.id,
         "groupby": groupby,
         "granularity": 10,
     }
     assert entity_subscription.entity_key == EntityKey.MetricsCounters
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[EntityKey.MetricsCounters]
     assert entity_subscription.dataset == QueryDatasets.METRICS
     session_status = tag_key(org_id, "session.status")
     session_status_tag_values = get_tag_values_list(org_id, ["crashed", "init"])
     snuba_filter = entity_subscription.build_snuba_filter("", None, None)
     assert snuba_filter
     assert snuba_filter.aggregations == [["sum(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=", metric_id(org_id, SessionMetricKey.SESSION)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == groupby
     assert snuba_filter.rollup == entity_subscription.get_granularity()
Esempio n. 5
0
 def test_simple_sessions_for_metrics(self):
     org_id = self.organization.id
     for tag in [
             SessionMetricKey.SESSION.value, "session.status", "crashed",
             "init"
     ]:
         indexer.record(org_id, tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": org_id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="",
         environment=None,
     )
     session_status = resolve_tag_key("session.status")
     session_status_tag_values = resolve_many_weak(["crashed", "init"])
     assert snuba_filter
     assert snuba_filter.aggregations == [["sum(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          resolve(SessionMetricKey.SESSION.value)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == [session_status]
Esempio n. 6
0
 def test_boolean_query(self):
     entity_subscription = get_entity_subscription_for_dataset(
         QueryDatasets.EVENTS,
         aggregate="count_unique(user)",
         time_window=3600,
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="release:latest OR release:123",
         environment=None)
     assert snuba_filter
     assert snuba_filter.conditions == [
         ["type", "=", "error"],
         [
             [
                 "or",
                 [
                     ["equals", ["tags[sentry:release]", "'latest'"]],
                     ["equals", ["tags[sentry:release]", "'123'"]],
                 ],
             ],
             "=",
             1,
         ],
     ]
     assert snuba_filter.aggregations == [[
         "uniq", "tags[sentry:user]", "count_unique_user"
     ]]
Esempio n. 7
0
 def test_query_and_environment_users(self):
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.SESSIONS,
         aggregate=
         "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
         time_window=3600,
     )
     env = self.create_environment(self.project, name="development")
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="release:[email protected]",
         environment=env,
     )
     assert snuba_filter
     assert snuba_filter.aggregations == [
         [
             "if(greater(users,0),divide(users_crashed,users),null)",
             None,
             "_crash_rate_alert_aggregate",
         ],
         ["identity", "users", "_total_count"],
     ]
     assert snuba_filter.conditions == [
         ["release", "=", "[email protected]"],
         ["environment", "=", "development"],
     ]
Esempio n. 8
0
 def test_get_entity_subscriptions_for_sessions_dataset(self) -> None:
     aggregate = "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.SESSIONS,
         aggregate=aggregate,
         time_window=3600,
         extra_fields={"org_id": self.organization.id},
     )
     assert isinstance(entity_subscription, SessionsEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     assert entity_subscription.get_entity_extra_params() == {
         "organization": self.organization.id
     }
     assert entity_subscription.entity_key == EntityKey.Sessions
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[
         EntityKey.Sessions]
     assert entity_subscription.dataset == QueryDatasets.SESSIONS
     snuba_filter = entity_subscription.build_snuba_filter("", None, None)
     assert snuba_filter
     assert snuba_filter.aggregations == [
         [
             "if(greater(sessions,0),divide(sessions_crashed,sessions),null)",
             None,
             "_crash_rate_alert_aggregate",
         ],
         ["identity", "sessions", "_total_count"],
     ]
Esempio n. 9
0
 def test_event_types(self):
     entity_subscription = get_entity_subscription_for_dataset(
         QueryDatasets.EVENTS,
         aggregate="count_unique(user)",
         time_window=3600,
         extra_fields={
             "event_types": [
                 SnubaQueryEventType.EventType.ERROR,
                 SnubaQueryEventType.EventType.DEFAULT,
             ]
         },
     )
     snuba_filter = build_snuba_filter(
         entity_subscription=entity_subscription,
         query="release:latest OR release:123",
         environment=None,
     )
     assert snuba_filter
     assert snuba_filter.conditions == [
         [["or", [["equals", ["type", "'error'"]], ["equals", ["type", "'default'"]]]], "=", 1],
         [
             [
                 "or",
                 [
                     ["equals", ["tags[sentry:release]", "'latest'"]],
                     ["equals", ["tags[sentry:release]", "'123'"]],
                 ],
             ],
             "=",
             1,
         ],
     ]
     assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", "count_unique_user"]]
Esempio n. 10
0
 def test_simple_users_for_metrics(self):
     for tag in [
             SessionMetricKey.USER.value, "session.status", "crashed",
             "init"
     ]:
         indexer.record(tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="",
         environment=None,
     )
     org_id = self.organization.id
     session_status = tag_key(org_id, "session.status")
     session_status_tag_values = get_tag_values_list(
         org_id, ["crashed", "init"])
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          metric_id(org_id, SessionMetricKey.USER)],
         [session_status, "IN", session_status_tag_values],
     ]
     assert snuba_filter.groupby == [session_status]
Esempio n. 11
0
 def test_simple_transactions(self):
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.TRANSACTIONS, time_window=3600, aggregate="count_unique(user)"
     )
     snuba_filter = build_snuba_filter(entity_subscription, "", environment=None)
     assert snuba_filter
     assert snuba_filter.conditions == []
     assert snuba_filter.aggregations == [["uniq", "user", "count_unique_user"]]
Esempio n. 12
0
 def test_simple_events(self):
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.EVENTS,
         time_window=3600,
         aggregate="count_unique(user)",
     )
     snuba_filter = build_snuba_filter(entity_subscription, "", environment=None)
     assert snuba_filter
     assert snuba_filter.conditions == [["type", "=", "error"]]
     assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", "count_unique_user"]]
Esempio n. 13
0
 def test_user_query_transactions(self):
     entity_subscription = get_entity_subscription_for_dataset(
         QueryDatasets.TRANSACTIONS,
         aggregate="p95()",
         time_window=3600,
     )
     snuba_filter = build_snuba_filter(
         entity_subscription, query="user:[email protected]", environment=None
     )
     assert snuba_filter
     assert snuba_filter.conditions == [["user", "=", "*****@*****.**"]]
     assert snuba_filter.aggregations == [["quantile(0.95)", "duration", "p95"]]
Esempio n. 14
0
def _create_in_snuba(subscription: QuerySubscription) -> str:
    snuba_query = subscription.snuba_query
    entity_subscription = get_entity_subscription_for_dataset(
        dataset=QueryDatasets(snuba_query.dataset),
        aggregate=snuba_query.aggregate,
        time_window=snuba_query.time_window,
        extra_fields={
            "org_id": subscription.project.organization_id,
            "event_types": snuba_query.event_types,
        },
    )
    snuba_filter = build_snuba_filter(
        entity_subscription,
        snuba_query.query,
        snuba_query.environment,
    )

    body = {
        "project_id": subscription.project_id,
        "project": subscription.project_id,  # for SnQL SDK
        "dataset": snuba_query.dataset,
        "conditions": snuba_filter.conditions,
        "aggregations": snuba_filter.aggregations,
        "time_window": snuba_query.time_window,
        "resolution": snuba_query.resolution,
        **entity_subscription.get_entity_extra_params(),
    }

    try:
        metrics.incr("snuba.snql.subscription.create", tags={"dataset": snuba_query.dataset})
        snql_query = json_to_snql(body, entity_subscription.entity_key.value)
        snql_query.validate()
        body["query"] = str(snql_query)
        body["type"] = "delegate"  # mark this as a combined subscription
    except Exception as e:
        logger.warning(
            "snuba.snql.subscription.parsing.error",
            extra={"error": str(e), "params": json.dumps(body), "dataset": snuba_query.dataset},
        )
        metrics.incr("snuba.snql.subscription.parsing.error", tags={"dataset": snuba_query.dataset})

    response = _snuba_pool.urlopen(
        "POST",
        f"/{snuba_query.dataset}/{entity_subscription.entity_key.value}/subscriptions",
        body=json.dumps(body),
    )
    if response.status != 202:
        metrics.incr("snuba.snql.subscription.http.error", tags={"dataset": snuba_query.dataset})
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
Esempio n. 15
0
 def test_user_query(self):
     entity_subscription = get_entity_subscription_for_dataset(
         QueryDatasets.EVENTS,
         aggregate="count()",
         time_window=3600,
     )
     snuba_filter = build_snuba_filter(entity_subscription,
                                       query="user:[email protected]",
                                       environment=None)
     assert snuba_filter
     assert snuba_filter.conditions == [
         ["type", "=", "error"],
         ["tags[sentry:user]", "=", "*****@*****.**"],
     ]
     assert snuba_filter.aggregations == [["count", None, "count"]]
Esempio n. 16
0
 def test_aliased_query_transactions(self):
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.TRANSACTIONS,
         time_window=3600,
         aggregate="percentile(transaction.duration,.95)",
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         "release:latest",
         environment=None,
     )
     assert snuba_filter
     assert snuba_filter.conditions == [["release", "=", "latest"]]
     assert snuba_filter.aggregations == [[
         "quantile(0.95)", "duration", "percentile_transaction_duration__95"
     ]]
 def test_get_entity_subscription_for_transactions_dataset(self) -> None:
     aggregate = "percentile(transaction.duration,.95)"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.TRANSACTIONS, aggregate=aggregate, time_window=3600
     )
     assert isinstance(entity_subscription, TransactionsEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     assert entity_subscription.get_entity_extra_params() == {}
     assert entity_subscription.entity_key == EntityKey.Transactions
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[EntityKey.Transactions]
     assert entity_subscription.dataset == QueryDatasets.TRANSACTIONS
     snuba_filter = entity_subscription.build_snuba_filter("", None, None)
     assert snuba_filter
     assert snuba_filter.aggregations == [
         ["quantile(0.95)", "duration", "percentile_transaction_duration__95"]
     ]
 def test_get_entity_subscription_for_events_dataset(self) -> None:
     aggregate = "count_unique(user)"
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.EVENTS, aggregate=aggregate, time_window=3600
     )
     assert isinstance(entity_subscription, EventsEntitySubscription)
     assert entity_subscription.aggregate == aggregate
     assert entity_subscription.get_entity_extra_params() == {}
     assert entity_subscription.entity_key == EntityKey.Events
     assert entity_subscription.time_col == ENTITY_TIME_COLUMNS[EntityKey.Events]
     assert entity_subscription.dataset == QueryDatasets.EVENTS
     snuba_filter = entity_subscription.build_snuba_filter("release:latest", None, None)
     assert snuba_filter
     assert snuba_filter.conditions == [
         ["type", "=", "error"],
         ["tags[sentry:release]", "=", "latest"],
     ]
     assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", "count_unique_user"]]
Esempio n. 19
0
 def test_query_and_environment_users_metrics(self):
     env = self.create_environment(self.project, name="development")
     for tag in [
             SessionMetricKey.USER.value,
             "session.status",
             "environment",
             "development",
             "init",
             "crashed",
             "release",
             "[email protected]",
     ]:
         indexer.record(tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="release:[email protected]",
         environment=env,
     )
     org_id = self.organization.id
     assert snuba_filter
     assert snuba_filter.aggregations == [["uniq(value)", None, "value"]]
     assert snuba_filter.groupby == [tag_key(org_id, "session.status")]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          metric_id(org_id, SessionMetricKey.USER)],
         [
             tag_key(org_id, "session.status"),
             "IN",
             get_tag_values_list(org_id, ["crashed", "init"]),
         ],
         [
             tag_key(org_id, "environment"), "=",
             tag_value(org_id, "development")
         ],
         [tag_key(org_id, "release"), "=",
          tag_value(org_id, "[email protected]")],
     ]
Esempio n. 20
0
 def test_query_and_environment_sessions_metrics(self):
     env = self.create_environment(self.project, name="development")
     org_id = self.organization.id
     for tag in [
             SessionMetricKey.SESSION.value,
             "session.status",
             "environment",
             "development",
             "init",
             "crashed",
             "release",
             "[email protected]",
     ]:
         indexer.record(org_id, tag)
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.METRICS,
         time_window=3600,
         aggregate=
         "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": org_id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="release:[email protected]",
         environment=env,
     )
     assert snuba_filter
     assert snuba_filter.aggregations == [["sum(value)", None, "value"]]
     assert snuba_filter.groupby == [resolve_tag_key("session.status")]
     assert snuba_filter.conditions == [
         ["metric_id", "=",
          resolve(SessionMetricKey.SESSION.value)],
         [
             resolve_tag_key("session.status"),
             "IN",
             resolve_many_weak(["crashed", "init"]),
         ],
         [resolve_tag_key("environment"), "=",
          resolve_weak("development")],
         [resolve_tag_key("release"), "=",
          resolve_weak("[email protected]")],
     ]
Esempio n. 21
0
 def test_simple_users(self):
     entity_subscription = get_entity_subscription_for_dataset(
         dataset=QueryDatasets.SESSIONS,
         time_window=3600,
         aggregate="percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
         extra_fields={"org_id": self.organization.id},
     )
     snuba_filter = build_snuba_filter(
         entity_subscription,
         query="",
         environment=None,
     )
     assert snuba_filter
     assert snuba_filter.aggregations == [
         [
             "if(greater(users,0),divide(users_crashed,users),null)",
             None,
             "_crash_rate_alert_aggregate",
         ],
         ["identity", "users", "_total_count"],
     ]
Esempio n. 22
0
    def validate(self, data):
        """
        Performs validation on an alert rule's data.
        This includes ensuring there is either 1 or 2 triggers, which each have
        actions, and have proper thresholds set. The critical trigger should
        both alert and resolve 'after' the warning trigger (whether that means
        > or < the value depends on threshold type).
        """
        data.setdefault("dataset", QueryDatasets.EVENTS)
        project_id = data.get("projects")
        if not project_id:
            # We just need a valid project id from the org so that we can verify
            # the query. We don't use the returned data anywhere, so it doesn't
            # matter which.
            project_id = list(
                self.context["organization"].project_set.all()[:1])

        try:
            entity_subscription = get_entity_subscription_for_dataset(
                dataset=QueryDatasets(data["dataset"]),
                aggregate=data["aggregate"],
                time_window=int(
                    timedelta(minutes=data["time_window"]).total_seconds()),
                extra_fields={
                    "org_id": project_id[0].organization_id,
                    "event_types": data.get("event_types"),
                },
            )
        except UnsupportedQuerySubscription as e:
            raise serializers.ValidationError(f"{e}")

        try:
            snuba_filter = build_snuba_filter(
                entity_subscription,
                data["query"],
                data.get("environment"),
                params={
                    "project_id": [p.id for p in project_id],
                    "start": timezone.now() - timedelta(minutes=10),
                    "end": timezone.now(),
                },
            )
            if any(cond[0] == "project_id"
                   for cond in snuba_filter.conditions):
                raise serializers.ValidationError(
                    {"query": "Project is an invalid search term"})
        except (InvalidSearchQuery, ValueError) as e:
            raise serializers.ValidationError(f"Invalid Query or Metric: {e}")
        else:
            if not snuba_filter.aggregations:
                raise serializers.ValidationError(
                    "Invalid Metric: Please pass a valid function for aggregation"
                )

            dataset = Dataset(data["dataset"].value)
            self._validate_time_window(dataset, data.get("time_window"))

            conditions = copy(snuba_filter.conditions)
            time_col = entity_subscription.time_col
            conditions += [
                [time_col, ">=", snuba_filter.start],
                [time_col, "<", snuba_filter.end],
            ]

            body = {
                "project": project_id[0].id,
                "project_id": project_id[0].id,
                "aggregations": snuba_filter.aggregations,
                "conditions": conditions,
                "filter_keys": snuba_filter.filter_keys,
                "having": snuba_filter.having,
                "dataset": dataset.value,
                "limit": 1,
                **entity_subscription.get_entity_extra_params(),
            }

            try:
                snql_query = json_to_snql(body,
                                          entity_subscription.entity_key.value)
                snql_query.validate()
            except Exception as e:
                raise serializers.ValidationError(str(e),
                                                  params={
                                                      "params":
                                                      json.dumps(body),
                                                      "dataset":
                                                      data["dataset"].value
                                                  })

            try:
                raw_snql_query(snql_query,
                               referrer="alertruleserializer.test_query")
            except Exception:
                logger.exception(
                    "Error while validating snuba alert rule query")
                raise serializers.ValidationError(
                    "Invalid Query or Metric: An error occurred while attempting "
                    "to run the query")

        triggers = data.get("triggers", [])
        if not triggers:
            raise serializers.ValidationError(
                "Must include at least one trigger")
        if len(triggers) > 2:
            raise serializers.ValidationError(
                "Must send 1 or 2 triggers - A critical trigger, and an optional warning trigger"
            )

        event_types = data.get("event_types")

        valid_event_types = dataset_valid_event_types.get(
            data["dataset"], set())
        if event_types and set(event_types) - valid_event_types:
            raise serializers.ValidationError(
                "Invalid event types for this dataset. Valid event types are %s"
                % sorted(et.name.lower() for et in valid_event_types))

        for i, (trigger, expected_label) in enumerate(
                zip(triggers,
                    (CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL))):
            if trigger.get("label", None) != expected_label:
                raise serializers.ValidationError(
                    f'Trigger {i + 1} must be labeled "{expected_label}"')
        threshold_type = data["threshold_type"]
        self._translate_thresholds(threshold_type,
                                   data.get("comparison_delta"), triggers,
                                   data)

        critical = triggers[0]

        self._validate_trigger_thresholds(threshold_type, critical,
                                          data.get("resolve_threshold"))

        if len(triggers) == 2:
            warning = triggers[1]
            self._validate_trigger_thresholds(threshold_type, warning,
                                              data.get("resolve_threshold"))
            self._validate_critical_warning_triggers(threshold_type, critical,
                                                     warning)

        return data