Exemplo n.º 1
0
def parse_field(field: str) -> Tuple[Optional[str], str]:
    matches = FIELD_REGEX.match(field)
    try:
        if matches is None:
            raise TypeError
        operation = matches[1]
        metric_name = matches[2]
        if metric_name in DERIVED_METRICS and isinstance(
            DERIVED_METRICS[metric_name], DerivedMetric
        ):
            raise DerivedMetricParseException(
                f"Failed to parse {field}. No operations can be applied on this field as it is "
                f"already a derived metric with an aggregation applied to it."
            )
    except (IndexError, TypeError):
        if field in DERIVED_METRICS and isinstance(DERIVED_METRICS[field], DerivedMetric):
            # The isinstance check is there to foreshadow adding raw metric aliases
            return None, field
        raise InvalidField(
            f"Failed to parse '{field}'. Must be something like 'sum(my_metric)', or a supported "
            f"aggregate derived metric like `session.crash_free_rate"
        )
    else:
        if operation not in OPERATIONS:
            raise InvalidField(
                f"Invalid operation '{operation}'. Must be one of {', '.join(OPERATIONS)}"
            )

        return operation, metric_name
Exemplo n.º 2
0
    def __init__(
        self,
        query: QueryDict,
        params: Mapping[Any, Any],
        allow_minute_resolution: Optional[bool] = True,
    ):
        raw_fields = query.getlist("field", [])
        raw_groupby = query.getlist("groupBy", [])
        if len(raw_fields) == 0:
            raise InvalidField('At least one "field" is required.')
        self.fields = {}
        self.query: List[Any] = []  # not used but needed for compat with sessions logic
        allowed_resolution = (
            AllowedResolution.one_minute if allow_minute_resolution else AllowedResolution.one_hour
        )
        start, end, rollup = get_constrained_date_range(query, allowed_resolution)
        self.dataset, self.match = _outcomes_dataset(rollup)
        self.rollup = rollup
        self.start = start
        self.end = end
        self.select_params = []
        for key in raw_fields:
            if key not in COLUMN_MAP:
                raise InvalidField(f'Invalid field: "{key}"')
            field = COLUMN_MAP[key]
            self.select_params.append(field.select_params(self.dataset))
            self.fields[key] = field

        self.groupby = []
        for key in raw_groupby:
            if key not in GROUPBY_MAP:
                raise InvalidField(f'Invalid groupBy: "{key}"')
            self.groupby.append(GROUPBY_MAP[key])

        if len(query.getlist("category", [])) == 0 and "category" not in raw_groupby:
            raise InvalidQuery("Query must have category as groupby or filter")

        query_columns = set()
        for field in self.fields.values():
            query_columns.update(field.get_snuba_columns(raw_groupby))
        for groupby in self.groupby:
            query_columns.update(groupby.get_snuba_columns())
        self.query_columns = list(query_columns)

        query_groupby = set()
        for groupby in self.groupby:
            query_groupby.update(groupby.get_snuba_groupby())
        self.query_groupby = list(query_groupby)

        self.group_by = []
        for key in self.query_groupby:
            self.group_by.append(Column(key))

        self.conditions = self.get_conditions(query, params)
Exemplo n.º 3
0
def parse_field(field: str) -> Tuple[str, str]:
    matches = FIELD_REGEX.match(field)
    try:
        operation = matches[1]
        metric_name = matches[2]
    except (IndexError, TypeError):
        raise InvalidField(f"Failed to parse '{field}'. Must be something like 'sum(my_metric)'.")
    else:
        if operation not in OPERATIONS:

            raise InvalidField(
                f"Invalid operation '{operation}'. Must be one of {', '.join(OPERATIONS)}"
            )

        return operation, metric_name
Exemplo n.º 4
0
    def __init__(self, query_params, allow_minute_resolution=False):
        self.query = query_params.get("query", "")
        raw_fields = query_params.getlist("field", [])
        self.groupby = query_params.getlist("groupBy", [])

        if len(raw_fields) == 0:
            raise InvalidField('Request is missing a "field"')

        self.fields = {key: parse_field(key) for key in raw_fields}

        start, end, rollup = get_constrained_date_range(
            query_params, allow_minute_resolution, max_points=MAX_POINTS)
        self.rollup = rollup
        self.start = start
        self.end = end
Exemplo n.º 5
0
 def resolve_filter(self, raw_filter: Sequence[str]) -> List[DataCategory]:
     resolved_categories = set()
     for category in raw_filter:
         # combine DEFAULT, ERROR, and SECURITY as errors.
         # see relay: py/sentry_relay/consts.py and relay-cabi/include/relay.h
         parsed_category = DataCategory.parse(category)
         if parsed_category is None:
             raise InvalidField(f'Invalid category: "{category}"')
         elif parsed_category == DataCategory.ERROR:
             resolved_categories.update(DataCategory.error_categories())
         else:
             resolved_categories.add(parsed_category)
     if DataCategory.ATTACHMENT in resolved_categories and len(resolved_categories) > 1:
         raise InvalidQuery("if filtering by attachment no other category may be present")
     return list(resolved_categories)
Exemplo n.º 6
0
    def __init__(self, query_params):

        self.query = query_params.get("query", "")
        self.parsed_query = parse_query(self.query) if self.query else None
        raw_fields = query_params.getlist("field", [])
        self.groupby = query_params.getlist("groupBy", [])

        if len(raw_fields) == 0:
            raise InvalidField('Request is missing a "field"')

        self.fields = {key: parse_field(key) for key in raw_fields}

        self.orderby = self._parse_orderby(query_params)
        self.limit = self._parse_limit(query_params)

        start, end, rollup = get_date_range(query_params)
        self.rollup = rollup
        self.start = start
        self.end = end
Exemplo n.º 7
0
    def __init__(self, query_params, paginator_kwargs: Optional[Dict] = None):
        paginator_kwargs = paginator_kwargs or {}

        self.query = query_params.get("query", "")
        self.parsed_query = parse_query(self.query) if self.query else None
        raw_fields = query_params.getlist("field", [])
        self.groupby = query_params.getlist("groupBy", [])

        if len(raw_fields) == 0:
            raise InvalidField('Request is missing a "field"')

        self.fields = {key: parse_field(key) for key in raw_fields}

        self.orderby = self._parse_orderby(query_params)
        self.limit = self._parse_limit(query_params, paginator_kwargs)
        self.offset = self._parse_offset(query_params, paginator_kwargs)

        start, end, rollup = get_date_range(query_params)
        self.rollup = rollup
        self.start = start
        self.end = end

        # Validates that time series limit will not exceed the snuba limit of 10,000
        self._validate_series_limit(query_params)
Exemplo n.º 8
0
 def _parse_outcome(outcome: str) -> Outcome:
     try:
         return Outcome.parse(outcome)
     except KeyError:
         raise InvalidField(f'Invalid outcome: "{outcome}"')