예제 #1
0
def update_subscription_in_snuba(query_subscription_id, old_dataset=None, **kwargs):
    """
    Task to update a corresponding subscription in Snuba from a `QuerySubscription` in
    Sentry. Updating in Snuba means deleting the existing subscription, then creating a
    new one.
    """
    try:
        subscription = QuerySubscription.objects.get(id=query_subscription_id)
    except QuerySubscription.DoesNotExist:
        metrics.incr("snuba.subscriptions.update.subscription_does_not_exist")
        return

    if subscription.status != QuerySubscription.Status.UPDATING.value:
        metrics.incr("snuba.subscriptions.update.incorrect_status")
        return

    if subscription.subscription_id is not None:
        dataset = old_dataset if old_dataset is not None else subscription.snuba_query.dataset
        entity_key: EntityKey = map_aggregate_to_entity_key(
            QueryDatasets(dataset), subscription.snuba_query.aggregate
        )
        _delete_from_snuba(
            QueryDatasets(dataset),
            subscription.subscription_id,
            entity_key,
        )

    subscription_id = _create_in_snuba(subscription)
    subscription.update(
        status=QuerySubscription.Status.ACTIVE.value, subscription_id=subscription_id
    )
예제 #2
0
def migrate_subscriptions(apps, schema_editor):
    QuerySubscription = apps.get_model("sentry", "QuerySubscription")

    for subscription in RangeQuerySetWrapperWithProgressBar(
            QuerySubscription.objects.select_related("snuba_query").all()):
        if subscription.subscription_id is not None:
            subscription_id = None
            try:
                subscription_id = _create_in_snuba(subscription)
            except Exception as e:
                logging.exception(
                    f"failed to recreate {subscription.subscription_id}: {e}")
                continue

            try:
                _delete_from_snuba(
                    QueryDatasets(subscription.snuba_query.dataset),
                    subscription.subscription_id,
                )
            except Exception as e:
                try:
                    # Delete the subscription we just created to avoid orphans
                    _delete_from_snuba(
                        QueryDatasets(subscription.snuba_query.dataset),
                        subscription_id,
                    )
                except Exception as oe:
                    logging.exception(
                        f"failed to delete orphan {subscription_id}: {oe}")

                logging.exception(
                    f"failed to delete {subscription.subscription_id}: {e}")
                continue

            subscription.update(subscription_id=subscription_id)
예제 #3
0
파일: tasks.py 프로젝트: pierredup/sentry
def _create_in_snuba(subscription):
    conditions = resolve_discover_aliases(get_filter(subscription.query))[0].conditions
    try:
        environment = subscription.environments.all()[:1].get()
    except Environment.DoesNotExist:
        environment = None

    if environment:
        conditions.append(["environment", "=", environment.name])
    conditions = apply_dataset_conditions(QueryDatasets(subscription.dataset), conditions)
    response = _snuba_pool.urlopen(
        "POST",
        "/%s/subscriptions" % (subscription.dataset,),
        body=json.dumps(
            {
                "project_id": subscription.project_id,
                "dataset": subscription.dataset,
                # We only care about conditions here. Filter keys only matter for
                # filtering to project and groups. Projects are handled with an
                # explicit param, and groups can't be queried here.
                "conditions": conditions,
                "aggregations": [
                    query_aggregation_to_snuba[QueryAggregations(subscription.aggregation)]
                ],
                "time_window": subscription.time_window,
                "resolution": subscription.resolution,
            }
        ),
    )
    if response.status != 202:
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
예제 #4
0
 def validate_dataset(self, dataset):
     try:
         return QueryDatasets(dataset)
     except ValueError:
         raise serializers.ValidationError(
             "Invalid dataset, valid values are %s" %
             [item.value for item in QueryDatasets])
예제 #5
0
def update_snuba_query(snuba_query, dataset, query, aggregate, time_window,
                       resolution, environment):
    """
    Updates a SnubaQuery. Triggers updates to any related QuerySubscriptions.

    :param snuba_query: The `SnubaQuery` to update.
    :param dataset: The snuba dataset to query and aggregate over
    :param query: An event search query that we can parse and convert into a
    set of Snuba conditions
    :param aggregate: An aggregate to calculate over the time window
    :param time_window: The time window to aggregate over
    :param resolution: How often to receive updates/bucket size
    :param environment: An optional environment to filter by
    :return: A list of QuerySubscriptions
    """
    old_dataset = QueryDatasets(snuba_query.dataset)
    with transaction.atomic():
        query_subscriptions = list(snuba_query.subscriptions.all())
        snuba_query.update(
            dataset=dataset.value,
            query=query,
            aggregate=aggregate,
            time_window=int(time_window.total_seconds()),
            resolution=int(resolution.total_seconds()),
            environment=environment,
        )
        bulk_update_snuba_subscriptions(query_subscriptions, old_dataset)
예제 #6
0
    def _build_discover_query(self, incident):
        query = incident.alert_rule.snuba_query.query
        if QueryDatasets(incident.alert_rule.snuba_query.dataset) == QueryDatasets.EVENTS:
            condition = "event.type:error"
            query = "{} {}".format(condition, query) if query else condition

        return query
예제 #7
0
def update_subscription_in_snuba(query_subscription_id):
    """
    Task to update a corresponding subscription in Snuba from a `QuerySubscription` in
    Sentry. Updating in Snuba means deleting the existing subscription, then creating a
    new one.
    """
    try:
        subscription = QuerySubscription.objects.get(id=query_subscription_id)
    except QuerySubscription.DoesNotExist:
        metrics.incr("snuba.subscriptions.update.subscription_does_not_exist")
        return

    if subscription.status != QuerySubscription.Status.UPDATING.value:
        metrics.incr("snuba.subscriptions.update.incorrect_status")
        return

    if subscription.subscription_id is not None:
        _delete_from_snuba(
            QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id
        )

    subscription_id = _create_in_snuba(subscription)
    subscription.update(
        status=QuerySubscription.Status.ACTIVE.value, subscription_id=subscription_id
    )
예제 #8
0
 def _build_discover_query(self, incident):
     return apply_dataset_query_conditions(
         QueryDatasets(incident.alert_rule.snuba_query.dataset),
         incident.alert_rule.snuba_query.query,
         incident.alert_rule.snuba_query.event_types,
         discover=True,
     )
예제 #9
0
def update_snuba_subscription(subscription, query, aggregation, time_window,
                              resolution, environment_names):
    """
    Updates a subscription to a snuba query.

    :param query: An event search query that we can parse and convert into a
    set of Snuba conditions
    :param aggregation: An aggregation to calculate over the time window
    :param time_window: The time window to aggregate over
    :param resolution: How often to receive updates/bucket size
    :param environment_names: List of environment names to filter by
    :return: The QuerySubscription representing the subscription
    """
    # TODO: Move this call to snuba into a task. This lets us successfully update a
    # subscription in postgres and rollback as needed without having to create/delete
    # from snuba
    _delete_from_snuba(subscription)
    subscription_id = _create_in_snuba(
        subscription.project,
        QueryDatasets(subscription.dataset),
        query,
        aggregation,
        time_window,
        resolution,
        environment_names,
    )
    subscription.update(
        subscription_id=subscription_id,
        query=query,
        aggregation=aggregation.value,
        time_window=int(time_window.total_seconds()),
        resolution=int(resolution.total_seconds()),
    )
    return subscription
예제 #10
0
파일: tasks.py 프로젝트: sugusbs/sentry
def _create_in_snuba(subscription):
    snuba_query = subscription.snuba_query
    snuba_filter = build_snuba_filter(
        QueryDatasets(snuba_query.dataset),
        snuba_query.query,
        snuba_query.aggregate,
        snuba_query.environment,
        snuba_query.event_types,
    )
    response = _snuba_pool.urlopen(
        "POST",
        "/%s/subscriptions" % (snuba_query.dataset,),
        body=json.dumps(
            {
                "project_id": subscription.project_id,
                "dataset": snuba_query.dataset,
                "conditions": snuba_filter.conditions,
                "aggregations": snuba_filter.aggregations,
                "time_window": snuba_query.time_window,
                "resolution": snuba_query.resolution,
            }
        ),
    )
    if response.status != 202:
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
예제 #11
0
파일: tasks.py 프로젝트: sugusbs/sentry
def delete_subscription_from_snuba(query_subscription_id, **kwargs):
    """
    Task to delete a corresponding subscription in Snuba from a `QuerySubscription` in
    Sentry.
    If the local subscription is marked for deletion (as opposed to disabled),
    then we delete the local subscription once we've successfully removed from Snuba.
    """
    try:
        subscription = QuerySubscription.objects.get(id=query_subscription_id)
    except QuerySubscription.DoesNotExist:
        metrics.incr("snuba.subscriptions.delete.subscription_does_not_exist")
        return

    if subscription.status not in [
        QuerySubscription.Status.DELETING.value,
        QuerySubscription.Status.DISABLED.value,
    ]:
        metrics.incr("snuba.subscriptions.delete.incorrect_status")
        return

    if subscription.subscription_id is not None:
        _delete_from_snuba(
            QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id
        )

    if subscription.status == QuerySubscription.Status.DELETING.value:
        subscription.delete()
    else:
        subscription.update(subscription_id=None)
예제 #12
0
def subscribe_projects_to_alert_rule(alert_rule, projects):
    """
    Subscribes a list of projects to an alert rule
    :return: The list of created subscriptions
    """
    try:
        environment = alert_rule.environment.all()[:1].get()
    except Environment.DoesNotExist:
        environment = None

    subscriptions = bulk_create_snuba_subscriptions(
        projects,
        tasks.INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
        QueryDatasets(alert_rule.dataset),
        alert_rule.query,
        QueryAggregations(alert_rule.aggregation),
        timedelta(minutes=alert_rule.time_window),
        timedelta(minutes=alert_rule.resolution),
        environment,
    )
    subscription_links = [
        AlertRuleQuerySubscription(query_subscription=subscription,
                                   alert_rule=alert_rule)
        for subscription in subscriptions
    ]
    AlertRuleQuerySubscription.objects.bulk_create(subscription_links)
    return subscriptions
예제 #13
0
파일: tasks.py 프로젝트: winter5080/sentry
def _create_in_snuba(subscription):
    snuba_query = subscription.snuba_query
    snuba_filter = get_filter(snuba_query.query)
    snuba_filter.update_with(
        resolve_field_list([snuba_query.aggregate],
                           snuba_filter,
                           auto_fields=False))
    snuba_filter = resolve_discover_aliases(snuba_filter)[0]
    if snuba_query.environment:
        snuba_filter.conditions.append(
            ["environment", "=", snuba_query.environment.name])
    conditions = apply_dataset_conditions(QueryDatasets(snuba_query.dataset),
                                          snuba_filter.conditions)
    response = _snuba_pool.urlopen(
        "POST",
        "/%s/subscriptions" % (snuba_query.dataset, ),
        body=json.dumps({
            "project_id": subscription.project_id,
            "dataset": snuba_query.dataset,
            "conditions": conditions,
            "aggregations": snuba_filter.aggregations,
            "time_window": snuba_query.time_window,
            "resolution": snuba_query.resolution,
        }),
    )
    if response.status != 202:
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
예제 #14
0
파일: logic.py 프로젝트: FengLee1113/sentry
def build_incident_query_params(incident,
                                start=None,
                                end=None,
                                windowed_stats=False):
    params = {}
    params["start"], params["end"] = calculate_incident_time_range(
        incident, start, end, windowed_stats=windowed_stats)

    group_ids = list(
        IncidentGroup.objects.filter(incident=incident).values_list("group_id",
                                                                    flat=True))
    if group_ids:
        params["group_ids"] = group_ids
    project_ids = list(
        IncidentProject.objects.filter(incident=incident).values_list(
            "project_id", flat=True))
    if project_ids:
        params["project_id"] = project_ids

    snuba_filter = get_filter(incident.alert_rule.snuba_query.query, params)
    conditions = resolve_discover_aliases(snuba_filter)[0].conditions
    if incident.alert_rule:
        conditions = apply_dataset_conditions(
            QueryDatasets(incident.alert_rule.snuba_query.dataset), conditions)
    return {
        "start": snuba_filter.start,
        "end": snuba_filter.end,
        "conditions": conditions,
        "filter_keys": snuba_filter.filter_keys,
        "having": [],
    }
예제 #15
0
def build_incident_query_params(incident, start=None, end=None, windowed_stats=False):
    params = {}
    params["start"], params["end"] = calculate_incident_time_range(
        incident, start, end, windowed_stats=windowed_stats
    )

    group_ids = list(
        IncidentGroup.objects.filter(incident=incident).values_list("group_id", flat=True)
    )
    if group_ids:
        params["group_ids"] = group_ids
    project_ids = list(
        IncidentProject.objects.filter(incident=incident).values_list("project_id", flat=True)
    )
    if project_ids:
        params["project_id"] = project_ids

    snuba_query = incident.alert_rule.snuba_query
    snuba_filter = build_snuba_filter(
        QueryDatasets(snuba_query.dataset),
        snuba_query.query,
        snuba_query.aggregate,
        snuba_query.environment,
        params=params,
    )

    return {
        "start": snuba_filter.start,
        "end": snuba_filter.end,
        "conditions": snuba_filter.conditions,
        "filter_keys": snuba_filter.filter_keys,
        "having": [],
        "aggregations": snuba_filter.aggregations,
    }
예제 #16
0
파일: tasks.py 프로젝트: sugusbs/sentry
def create_subscription_in_snuba(query_subscription_id, **kwargs):
    """
    Task to create a corresponding subscription in Snuba from a `QuerySubscription` in
    Sentry. We store the snuba subscription id locally on success.
    """
    try:
        subscription = QuerySubscription.objects.get(id=query_subscription_id)
    except QuerySubscription.DoesNotExist:
        metrics.incr("snuba.subscriptions.create.subscription_does_not_exist")
        return
    if subscription.status != QuerySubscription.Status.CREATING.value:
        metrics.incr("snuba.subscriptions.create.incorrect_status")
        return
    if subscription.subscription_id is not None:
        metrics.incr("snuba.subscriptions.create.already_created_in_snuba")
        # This mostly shouldn't happen, but it's possible that a subscription can get
        # into this state. Just attempt to delete the existing subscription and then
        # create a new one.
        try:
            _delete_from_snuba(
                QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id
            )
        except SnubaError:
            logger.exception("Failed to delete subscription")

    subscription_id = _create_in_snuba(subscription)
    subscription.update(
        status=QuerySubscription.Status.ACTIVE.value, subscription_id=subscription_id
    )
예제 #17
0
def migrate_subscriptions(apps, schema_editor):
    QuerySubscription = apps.get_model("sentry", "QuerySubscription")
    AppSnubaQueryEventType = apps.get_model("sentry", "SnubaQueryEventType")

    for subscription in RangeQuerySetWrapperWithProgressBar(
            QuerySubscription.objects.select_related("snuba_query").all()):
        if subscription.subscription_id is not None:
            # The migration apps don't build this property, so manually set it.
            raw_event_types = AppSnubaQueryEventType.objects.filter(
                snuba_query=subscription.snuba_query).all()
            event_types = [
                SnubaQueryEventType.EventType(ev.type)
                for ev in raw_event_types
            ]
            setattr(subscription.snuba_query, "event_types", event_types)

            subscription_id = None
            try:
                subscription_id = _create_in_snuba(subscription)
            except Exception as e:
                logging.exception(
                    f"failed to recreate {subscription.subscription_id}: {e}")
                continue

            try:
                _delete_from_snuba(
                    QueryDatasets(subscription.snuba_query.dataset),
                    subscription.subscription_id,
                )
            except Exception as e:
                try:
                    # Delete the subscription we just created to avoid orphans
                    _delete_from_snuba(
                        QueryDatasets(subscription.snuba_query.dataset),
                        subscription_id,
                    )
                except Exception as oe:
                    logging.exception(
                        f"failed to delete orphan {subscription_id}: {oe}")

                logging.exception(
                    f"failed to delete {subscription.subscription_id}: {e}")
                continue

            QuerySubscription.objects.filter(id=subscription.id).update(
                subscription_id=subscription_id)
예제 #18
0
def _create_in_snuba(subscription):
    snuba_query = subscription.snuba_query
    snuba_filter = build_snuba_filter(
        QueryDatasets(snuba_query.dataset),
        snuba_query.query,
        snuba_query.aggregate,
        snuba_query.environment,
        snuba_query.event_types,
    )

    body = {
        "project_id": subscription.project_id,
        "project": subscription.project_id,  # for SnQL SDK
        "dataset": snuba_query.dataset,
        "conditions": snuba_filter.conditions,
        "aggregations": snuba_filter.aggregations,
        "time_window": snuba_query.time_window,
        "resolution": snuba_query.resolution,
    }

    if Dataset(snuba_query.dataset) == Dataset.Sessions:
        body.update({
            "organization": subscription.project.organization_id,
        })

    try:
        metrics.incr("snuba.snql.subscription.create",
                     tags={"dataset": snuba_query.dataset})
        snql_query = json_to_snql(body, snuba_query.dataset)
        snql_query.validate()
        body["query"] = str(snql_query)
        body["type"] = "delegate"  # mark this as a combined subscription
    except Exception as e:
        logger.warning(
            "snuba.snql.subscription.parsing.error",
            extra={
                "error": str(e),
                "params": json.dumps(body),
                "dataset": snuba_query.dataset
            },
        )
        metrics.incr("snuba.snql.subscription.parsing.error",
                     tags={"dataset": snuba_query.dataset})

    response = _snuba_pool.urlopen(
        "POST",
        f"/{snuba_query.dataset}/subscriptions",
        body=json.dumps(body),
    )
    if response.status != 202:
        metrics.incr("snuba.snql.subscription.http.error",
                     tags={"dataset": snuba_query.dataset})
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
예제 #19
0
    def get_comparison_aggregation_value(self, subscription_update,
                                         aggregation_value):
        # For comparison alerts run a query over the comparison period and use it to calculate the
        # % change.
        delta = timedelta(seconds=self.alert_rule.comparison_delta)
        end = subscription_update["timestamp"] - delta
        snuba_query = self.subscription.snuba_query
        start = end - timedelta(seconds=snuba_query.time_window)

        entity_subscription = get_entity_subscription_for_dataset(
            dataset=QueryDatasets(snuba_query.dataset),
            aggregate=snuba_query.aggregate,
            time_window=snuba_query.time_window,
            extra_fields={
                "org_id": self.subscription.project.organization,
                "event_types": snuba_query.event_types,
            },
        )
        try:
            snuba_filter = build_snuba_filter(
                entity_subscription,
                snuba_query.query,
                snuba_query.environment,
                params={
                    "project_id": [self.subscription.project_id],
                    "start": start,
                    "end": end,
                },
            )
            results = raw_query(
                aggregations=snuba_filter.aggregations,
                start=snuba_filter.start,
                end=snuba_filter.end,
                conditions=snuba_filter.conditions,
                filter_keys=snuba_filter.filter_keys,
                having=snuba_filter.having,
                dataset=Dataset(snuba_query.dataset),
                limit=1,
                referrer="subscription_processor.comparison_query",
            )
            comparison_aggregate = list(results["data"][0].values())[0]
        except Exception:
            logger.exception("Failed to run comparison query")
            return

        if not comparison_aggregate:
            metrics.incr(
                "incidents.alert_rules.skipping_update_comparison_value_invalid"
            )
            return

        return (aggregation_value / comparison_aggregate) * 100
예제 #20
0
def delete_snuba_subscription(subscription):
    """
    Deletes a subscription to a snuba query.
    :param subscription: The subscription to delete
    :return:
    """
    with transaction.atomic():
        subscription.delete()
        # TODO: Move this call to snuba into a task. This lets us successfully delete a
        # subscription in postgres and rollback as needed without having to create/delete
        # from snuba
        _delete_from_snuba(QueryDatasets(subscription.dataset),
                           subscription.subscription_id)
예제 #21
0
    def _build_discover_query(self, incident):
        query = incident.alert_rule.snuba_query.query
        dataset = QueryDatasets(incident.alert_rule.snuba_query.dataset)
        condition = None

        if dataset == QueryDatasets.EVENTS:
            condition = "event.type:error"
        elif dataset == QueryDatasets.TRANSACTIONS:
            condition = "event.type:transaction"

        if condition:
            query = "{} {}".format(condition, query) if query else condition

        return query
예제 #22
0
def _create_in_snuba(subscription: QuerySubscription) -> str:
    snuba_query = subscription.snuba_query
    entity_subscription = get_entity_subscription_for_dataset(
        dataset=QueryDatasets(snuba_query.dataset),
        aggregate=snuba_query.aggregate,
        time_window=snuba_query.time_window,
        extra_fields={
            "org_id": subscription.project.organization_id,
            "event_types": snuba_query.event_types,
        },
    )
    snuba_filter = build_snuba_filter(
        entity_subscription,
        snuba_query.query,
        snuba_query.environment,
    )

    body = {
        "project_id": subscription.project_id,
        "project": subscription.project_id,  # for SnQL SDK
        "dataset": snuba_query.dataset,
        "conditions": snuba_filter.conditions,
        "aggregations": snuba_filter.aggregations,
        "time_window": snuba_query.time_window,
        "resolution": snuba_query.resolution,
        **entity_subscription.get_entity_extra_params(),
    }

    try:
        metrics.incr("snuba.snql.subscription.create", tags={"dataset": snuba_query.dataset})
        snql_query = json_to_snql(body, entity_subscription.entity_key.value)
        snql_query.validate()
        body["query"] = str(snql_query)
        body["type"] = "delegate"  # mark this as a combined subscription
    except Exception as e:
        logger.warning(
            "snuba.snql.subscription.parsing.error",
            extra={"error": str(e), "params": json.dumps(body), "dataset": snuba_query.dataset},
        )
        metrics.incr("snuba.snql.subscription.parsing.error", tags={"dataset": snuba_query.dataset})

    response = _snuba_pool.urlopen(
        "POST",
        f"/{snuba_query.dataset}/{entity_subscription.entity_key.value}/subscriptions",
        body=json.dumps(body),
    )
    if response.status != 202:
        metrics.incr("snuba.snql.subscription.http.error", tags={"dataset": snuba_query.dataset})
        raise SnubaError("HTTP %s response from Snuba!" % response.status)
    return json.loads(response.data)["subscription_id"]
예제 #23
0
def bulk_build_incident_query_params(incidents,
                                     start=None,
                                     end=None,
                                     windowed_stats=False):
    incident_groups = defaultdict(list)
    for incident_id, group_id in IncidentGroup.objects.filter(
            incident__in=incidents).values_list("incident_id", "group_id"):
        incident_groups[incident_id].append(group_id)
    incident_projects = defaultdict(list)
    for incident_id, project_id in IncidentProject.objects.filter(
            incident__in=incidents).values_list("incident_id", "project_id"):
        incident_projects[incident_id].append(project_id)

    attach_foreignkey(incidents, Incident.alert_rule)

    query_args_list = []
    for incident in incidents:
        params = {}

        params["start"], params["end"] = calculate_incident_time_range(
            incident, start, end, windowed_stats=windowed_stats)

        group_ids = incident_groups[incident.id]
        if group_ids:
            params["group_ids"] = group_ids
        project_ids = incident_projects[incident.id]
        if project_ids:
            params["project_id"] = project_ids

        snuba_filter = get_filter(incident.alert_rule.snuba_query.query,
                                  params)
        conditions = resolve_discover_aliases(snuba_filter)[0].conditions
        if incident.alert_rule:
            conditions = apply_dataset_conditions(
                QueryDatasets(incident.alert_rule.snuba_query.dataset),
                conditions)
        snuba_args = {
            "start": snuba_filter.start,
            "end": snuba_filter.end,
            "conditions": conditions,
            "filter_keys": snuba_filter.filter_keys,
            "having": [],
        }
        query_args_list.append(snuba_args)

    return query_args_list
예제 #24
0
def update_snuba_query(snuba_query, dataset, query, aggregate, time_window,
                       resolution, environment, event_types):
    """
    Updates a SnubaQuery. Triggers updates to any related QuerySubscriptions.

    :param snuba_query: The `SnubaQuery` to update.
    :param dataset: The snuba dataset to query and aggregate over
    :param query: An event search query that we can parse and convert into a
    set of Snuba conditions
    :param aggregate: An aggregate to calculate over the time window
    :param time_window: The time window to aggregate over
    :param resolution: How often to receive updates/bucket size
    :param environment: An optional environment to filter by
    :param event_types: A (currently) optional list of event_types that apply to this
    query. If not passed, we'll use the existing event types on the query.
    :return: A list of QuerySubscriptions
    """
    current_event_types = set(snuba_query.event_types)
    if not event_types:
        event_types = current_event_types

    new_event_types = set(event_types) - current_event_types
    removed_event_types = current_event_types - set(event_types)
    old_dataset = QueryDatasets(snuba_query.dataset)
    with transaction.atomic():
        query_subscriptions = list(snuba_query.subscriptions.all())
        snuba_query.update(
            dataset=dataset.value,
            query=query,
            aggregate=aggregate,
            time_window=int(time_window.total_seconds()),
            resolution=int(resolution.total_seconds()),
            environment=environment,
        )
        if new_event_types:
            SnubaQueryEventType.objects.bulk_create([
                SnubaQueryEventType(snuba_query=snuba_query,
                                    type=event_type.value)
                for event_type in set(new_event_types)
            ])
        if removed_event_types:
            SnubaQueryEventType.objects.filter(
                snuba_query=snuba_query,
                type__in=[et.value for et in removed_event_types]).delete()

        bulk_update_snuba_subscriptions(query_subscriptions, old_dataset)
예제 #25
0
def convert_alert_rule_to_snuba_query(alert_rule):
    """
    Temporary method to convert existing alert rules to have a snuba query
    """
    if alert_rule.snuba_query:
        return

    with transaction.atomic():
        snuba_query = create_snuba_query(
            QueryDatasets(alert_rule.dataset),
            alert_rule.query,
            QueryAggregations(alert_rule.aggregation),
            timedelta(minutes=alert_rule.time_window),
            timedelta(minutes=alert_rule.resolution),
            alert_rule.environment,
        )
        alert_rule.update(snuba_query=snuba_query)
        alert_rule.query_subscriptions.all().update(snuba_query=snuba_query)
예제 #26
0
파일: tasks.py 프로젝트: pierredup/sentry
def delete_subscription_from_snuba(query_subscription_id):
    """
    Task to delete a corresponding subscription in Snuba from a `QuerySubscription` in
    Sentry. Deletes the local subscription once we've successfully removed from Snuba.
    """
    try:
        subscription = QuerySubscription.objects.get(id=query_subscription_id)
    except QuerySubscription.DoesNotExist:
        metrics.incr("snuba.subscriptions.delete.subscription_does_not_exist")
        return

    if subscription.status != QuerySubscription.Status.DELETING.value:
        metrics.incr("snuba.subscriptions.delete.incorrect_status")
        return

    if subscription.subscription_id is not None:
        _delete_from_snuba(QueryDatasets(subscription.dataset), subscription.subscription_id)

    subscription.delete()
예제 #27
0
파일: logic.py 프로젝트: zvrr/sentry
def subscribe_projects_to_alert_rule(alert_rule, projects):
    """
    Subscribes a list of projects to an alert rule
    :return: The list of created subscriptions
    """
    subscriptions = bulk_create_snuba_subscriptions(
        projects,
        tasks.INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
        QueryDatasets(alert_rule.dataset),
        alert_rule.query,
        QueryAggregations(alert_rule.aggregation),
        alert_rule.time_window,
        alert_rule.resolution,
    )
    subscription_links = [
        AlertRuleQuerySubscription(query_subscription=subscription, alert_rule=alert_rule)
        for subscription in subscriptions
    ]
    AlertRuleQuerySubscription.objects.bulk_create(subscription_links)
    return subscriptions
예제 #28
0
    def build_snuba_filter(
        self,
        query: str,
        environment: Optional[Environment],
        params: Optional[Mapping[str, Any]] = None,
    ) -> Filter:
        resolve_func = resolve_column(Dataset(self.dataset.value))

        query = apply_dataset_query_conditions(QueryDatasets(self.dataset), query, self.event_types)
        snuba_filter = get_filter(query, params=params)
        snuba_filter.update_with(
            resolve_field_list([self.aggregate], snuba_filter, auto_fields=False)
        )
        snuba_filter = resolve_snuba_aliases(snuba_filter, resolve_func)[0]
        if snuba_filter.group_ids:
            snuba_filter.conditions.append(
                ["group_id", "IN", list(map(int, snuba_filter.group_ids))]
            )
        if environment:
            snuba_filter.conditions.append(["environment", "=", environment.name])
        return snuba_filter
예제 #29
0
파일: logic.py 프로젝트: yujiaqi/sentry
def update_alert_rule(
    alert_rule,
    dataset=None,
    projects=None,
    name=None,
    query=None,
    aggregate=None,
    time_window=None,
    environment=None,
    threshold_type=None,
    threshold_period=None,
    resolve_threshold=None,
    include_all_projects=None,
    excluded_projects=None,
):
    """
    Updates an alert rule.

    :param alert_rule: The alert rule to update
    :param excluded_projects: List of projects to subscribe to the rule. Ignored if
    `include_all_projects` is True
    :param name: Name for the alert rule. This will be used as part of the
    incident name, and must be unique per project.
    :param query: An event search query to subscribe to and monitor for alerts
    :param aggregate: A string representing the aggregate used in this alert rule
    :param time_window: Time period to aggregate over, in minutes.
    :param environment: An optional environment that this rule applies to
    :param threshold_type: An AlertRuleThresholdType
    :param threshold_period: How many update periods the value of the
    subscription needs to exceed the threshold before triggering
    :param resolve_threshold: Optional value that the subscription needs to reach to
    resolve the alert
    :param include_all_projects: Whether to include all current and future projects
    from this organization
    :param excluded_projects: List of projects to exclude if we're using
    `include_all_projects`. Ignored otherwise.
    :return: The updated `AlertRule`
    """
    if (
        name
        and alert_rule.name != name
        and AlertRule.objects.filter(organization=alert_rule.organization, name=name).exists()
    ):
        raise AlertRuleNameAlreadyUsedError()

    updated_fields = {}
    updated_query_fields = {}
    if name:
        updated_fields["name"] = name
    if query is not None:
        validate_alert_rule_query(query)
        updated_query_fields["query"] = query
    if aggregate is not None:
        updated_query_fields["aggregate"] = aggregate
    if time_window:
        updated_query_fields["time_window"] = timedelta(minutes=time_window)
    if threshold_type:
        updated_fields["threshold_type"] = threshold_type.value
    if resolve_threshold:
        updated_fields["resolve_threshold"] = resolve_threshold
    if threshold_period:
        updated_fields["threshold_period"] = threshold_period
    if include_all_projects is not None:
        updated_fields["include_all_projects"] = include_all_projects
    if dataset is not None and dataset.value != alert_rule.snuba_query.dataset:
        updated_query_fields["dataset"] = dataset

    with transaction.atomic():
        incidents = Incident.objects.filter(alert_rule=alert_rule).exists()
        if incidents:
            snapshot_alert_rule(alert_rule)
        alert_rule.update(**updated_fields)

        if updated_query_fields or environment != alert_rule.snuba_query.environment:
            snuba_query = alert_rule.snuba_query
            updated_query_fields.setdefault("dataset", QueryDatasets(snuba_query.dataset))
            updated_query_fields.setdefault("query", snuba_query.query)
            updated_query_fields.setdefault("aggregate", snuba_query.aggregate)
            updated_query_fields.setdefault(
                "time_window", timedelta(seconds=snuba_query.time_window)
            )
            update_snuba_query(
                alert_rule.snuba_query,
                resolution=timedelta(minutes=DEFAULT_ALERT_RULE_RESOLUTION),
                environment=environment,
                **updated_query_fields
            )

        existing_subs = []
        if (
            query is not None
            or aggregate is not None
            or time_window is not None
            or projects is not None
            or include_all_projects is not None
            or excluded_projects is not None
        ):
            existing_subs = alert_rule.snuba_query.subscriptions.all().select_related("project")

        new_projects = []
        deleted_subs = []

        if not alert_rule.include_all_projects:
            # We don't want to have any exclusion rows present if we're not in
            # `include_all_projects` mode
            get_excluded_projects_for_alert_rule(alert_rule).delete()

        if alert_rule.include_all_projects:
            if include_all_projects or excluded_projects is not None:
                # If we're in `include_all_projects` mode, we want to just fetch
                # projects that aren't already subscribed, and haven't been excluded so
                # we can add them.
                excluded_project_ids = (
                    {p.id for p in excluded_projects} if excluded_projects else set()
                )
                project_exclusions = get_excluded_projects_for_alert_rule(alert_rule)
                project_exclusions.exclude(project_id__in=excluded_project_ids).delete()
                existing_excluded_project_ids = {pe.project_id for pe in project_exclusions}
                new_exclusions = [
                    AlertRuleExcludedProjects(alert_rule=alert_rule, project_id=project_id)
                    for project_id in excluded_project_ids
                    if project_id not in existing_excluded_project_ids
                ]
                AlertRuleExcludedProjects.objects.bulk_create(new_exclusions)

                new_projects = Project.objects.filter(organization=alert_rule.organization).exclude(
                    id__in=set([sub.project_id for sub in existing_subs]) | excluded_project_ids
                )
                # If we're subscribed to any of the excluded projects then we want to
                # remove those subscriptions
                deleted_subs = [
                    sub for sub in existing_subs if sub.project_id in excluded_project_ids
                ]
        elif projects is not None:
            existing_project_slugs = {sub.project.slug for sub in existing_subs}
            # Determine whether we've added any new projects as part of this update
            new_projects = [
                project for project in projects if project.slug not in existing_project_slugs
            ]
            updated_project_slugs = {project.slug for project in projects}
            # Find any subscriptions that were removed as part of this update
            deleted_subs = [
                sub for sub in existing_subs if sub.project.slug not in updated_project_slugs
            ]

        if new_projects:
            subscribe_projects_to_alert_rule(alert_rule, new_projects)

        if deleted_subs:
            bulk_delete_snuba_subscriptions(deleted_subs)

    return alert_rule
예제 #30
0
    def validate(self, data):
        """
        Performs validation on an alert rule's data.
        This includes ensuring there is either 1 or 2 triggers, which each have
        actions, and have proper thresholds set. The critical trigger should
        both alert and resolve 'after' the warning trigger (whether that means
        > or < the value depends on threshold type).
        """
        data.setdefault("dataset", QueryDatasets.EVENTS)
        project_id = data.get("projects")
        if not project_id:
            # We just need a valid project id from the org so that we can verify
            # the query. We don't use the returned data anywhere, so it doesn't
            # matter which.
            project_id = list(
                self.context["organization"].project_set.all()[:1])

        try:
            entity_subscription = get_entity_subscription_for_dataset(
                dataset=QueryDatasets(data["dataset"]),
                aggregate=data["aggregate"],
                time_window=int(
                    timedelta(minutes=data["time_window"]).total_seconds()),
                extra_fields={
                    "org_id": project_id[0].organization_id,
                    "event_types": data.get("event_types"),
                },
            )
        except UnsupportedQuerySubscription as e:
            raise serializers.ValidationError(f"{e}")

        try:
            snuba_filter = build_snuba_filter(
                entity_subscription,
                data["query"],
                data.get("environment"),
                params={
                    "project_id": [p.id for p in project_id],
                    "start": timezone.now() - timedelta(minutes=10),
                    "end": timezone.now(),
                },
            )
            if any(cond[0] == "project_id"
                   for cond in snuba_filter.conditions):
                raise serializers.ValidationError(
                    {"query": "Project is an invalid search term"})
        except (InvalidSearchQuery, ValueError) as e:
            raise serializers.ValidationError(f"Invalid Query or Metric: {e}")
        else:
            if not snuba_filter.aggregations:
                raise serializers.ValidationError(
                    "Invalid Metric: Please pass a valid function for aggregation"
                )

            dataset = Dataset(data["dataset"].value)
            self._validate_time_window(dataset, data.get("time_window"))

            conditions = copy(snuba_filter.conditions)
            time_col = entity_subscription.time_col
            conditions += [
                [time_col, ">=", snuba_filter.start],
                [time_col, "<", snuba_filter.end],
            ]

            body = {
                "project": project_id[0].id,
                "project_id": project_id[0].id,
                "aggregations": snuba_filter.aggregations,
                "conditions": conditions,
                "filter_keys": snuba_filter.filter_keys,
                "having": snuba_filter.having,
                "dataset": dataset.value,
                "limit": 1,
                **entity_subscription.get_entity_extra_params(),
            }

            try:
                snql_query = json_to_snql(body,
                                          entity_subscription.entity_key.value)
                snql_query.validate()
            except Exception as e:
                raise serializers.ValidationError(str(e),
                                                  params={
                                                      "params":
                                                      json.dumps(body),
                                                      "dataset":
                                                      data["dataset"].value
                                                  })

            try:
                raw_snql_query(snql_query,
                               referrer="alertruleserializer.test_query")
            except Exception:
                logger.exception(
                    "Error while validating snuba alert rule query")
                raise serializers.ValidationError(
                    "Invalid Query or Metric: An error occurred while attempting "
                    "to run the query")

        triggers = data.get("triggers", [])
        if not triggers:
            raise serializers.ValidationError(
                "Must include at least one trigger")
        if len(triggers) > 2:
            raise serializers.ValidationError(
                "Must send 1 or 2 triggers - A critical trigger, and an optional warning trigger"
            )

        event_types = data.get("event_types")

        valid_event_types = dataset_valid_event_types.get(
            data["dataset"], set())
        if event_types and set(event_types) - valid_event_types:
            raise serializers.ValidationError(
                "Invalid event types for this dataset. Valid event types are %s"
                % sorted(et.name.lower() for et in valid_event_types))

        for i, (trigger, expected_label) in enumerate(
                zip(triggers,
                    (CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL))):
            if trigger.get("label", None) != expected_label:
                raise serializers.ValidationError(
                    f'Trigger {i + 1} must be labeled "{expected_label}"')
        threshold_type = data["threshold_type"]
        self._translate_thresholds(threshold_type,
                                   data.get("comparison_delta"), triggers,
                                   data)

        critical = triggers[0]

        self._validate_trigger_thresholds(threshold_type, critical,
                                          data.get("resolve_threshold"))

        if len(triggers) == 2:
            warning = triggers[1]
            self._validate_trigger_thresholds(threshold_type, warning,
                                              data.get("resolve_threshold"))
            self._validate_critical_warning_triggers(threshold_type, critical,
                                                     warning)

        return data