Ejemplo n.º 1
0
def get_time_range_estimate(
    query: ProcessableQuery[Table],
) -> Tuple[Optional[datetime], Optional[datetime]]:
    """
    Best guess to find the time range for the query.
    We pick the first column that is compared with a datetime Literal.
    """
    pattern = FunctionCall(
        Or([String(ConditionFunctions.GT),
            String(ConditionFunctions.GTE)]),
        (Column(None, Param("col_name", Any(str))), Literal(Any(datetime))),
    )

    from_date, to_date = None, None
    condition = query.get_condition()
    if condition is None:
        return None, None
    for exp in condition:
        result = pattern.match(exp)
        if result is not None:
            from_date, to_date = get_time_range(query,
                                                result.string("col_name"))
            break

    return from_date, to_date
Ejemplo n.º 2
0
def _get_date_range(query: Query) -> Optional[int]:
    """
    Best guess to find the time range for the query.
    We pick the first column that is compared with a datetime Literal.
    """
    pattern = FunctionCall(
        Or([String(ConditionFunctions.GT),
            String(ConditionFunctions.GTE)]),
        (Column(None, Param("col_name", Any(str))), Literal(Any(datetime))),
    )

    condition = query.get_condition_from_ast()
    if condition is None:
        return None
    for exp in condition:
        result = pattern.match(exp)
        if result is not None:
            from_date, to_date = get_time_range(query,
                                                result.string("col_name"))
            if from_date is None or to_date is None:
                return None
            else:
                return (to_date - from_date).days

    return None
Ejemplo n.º 3
0
def extract_granularity_from_query(query: Query, column: str) -> Optional[int]:
    """
    This extracts the `granularity` from the `groupby` statement of the query.
    The matches are essentially the reverse of `TimeSeriesProcessor.__group_time_function`.
    """
    groupby = query.get_groupby()

    column_match = ColumnMatch(None, String(column))
    fn_match = FunctionCallMatch(
        Param(
            "time_fn",
            Or(
                [
                    String("toStartOfHour"),
                    String("toStartOfMinute"),
                    String("toStartOfDay"),
                    String("toDate"),
                ]
            ),
        ),
        (column_match,),
        with_optionals=True,
    )
    expr_match = FunctionCallMatch(
        String("toDateTime"),
        (
            FunctionCallMatch(
                String("multiply"),
                (
                    FunctionCallMatch(
                        String("intDiv"),
                        (
                            FunctionCallMatch(String("toUInt32"), (column_match,)),
                            LiteralMatch(Param("granularity", Any(int))),
                        ),
                    ),
                    LiteralMatch(Param("granularity", Any(int))),
                ),
            ),
            LiteralMatch(Any(str)),
        ),
    )

    for top_expr in groupby:
        for expr in top_expr:
            result = fn_match.match(expr)
            if result is not None:
                return GRANULARITY_MAPPING[result.string("time_fn")]

            result = expr_match.match(expr)
            if result is not None:
                return result.integer("granularity")

    return None
Ejemplo n.º 4
0
    def __init__(self, column_name: str, hash_map_name: str, killswitch: str) -> None:
        self.__column_name = column_name
        self.__hash_map_name = hash_map_name
        self.__killswitch = killswitch

        # TODO: Add the support for IN conditions.
        self.__optimizable_pattern = FunctionCall(
            function_name=String("equals"),
            parameters=(
                Or(
                    [
                        mapping_pattern,
                        FunctionCall(
                            function_name=String("ifNull"),
                            parameters=(mapping_pattern, Literal(String(""))),
                        ),
                    ]
                ),
                Param("right_hand_side", Literal(Any(str))),
            ),
        )
        self.__tag_exists_patterns = [
            FunctionCall(
                function_name=String("notEquals"),
                parameters=(
                    Or(
                        [
                            mapping_pattern,
                            FunctionCall(
                                function_name=String("ifNull"),
                                parameters=(mapping_pattern, Literal(String(""))),
                            ),
                        ]
                    ),
                    Param("right_hand_side", Literal(Any(str))),
                ),
            ),
            FunctionCall(
                function_name=String("has"),
                parameters=(
                    ColumnMatcher(
                        Param(TABLE_MAPPING_PARAM, AnyOptionalString()),
                        Param(VALUE_COL_MAPPING_PARAM, String(f"{column_name}.key")),
                    ),
                    Literal(Param(KEY_MAPPING_PARAM, Any(str))),
                ),
            ),
        ]
Ejemplo n.º 5
0
def get_time_range_expressions(
    conditions: Sequence[Expression],
    timestamp_field: str,
    table_name: Optional[str] = None,
) -> Tuple[Optional[Tuple[datetime, FunctionCallExpr]], Optional[Tuple[
        datetime, FunctionCallExpr]], ]:
    max_lower_bound: Optional[Tuple[datetime, FunctionCallExpr]] = None
    min_upper_bound: Optional[Tuple[datetime, FunctionCallExpr]] = None
    table_match = String(table_name) if table_name else None
    for c in conditions:
        match = FunctionCall(
            Param(
                "operator",
                Or([
                    String(OPERATOR_TO_FUNCTION[">="]),
                    String(OPERATOR_TO_FUNCTION["<"]),
                ]),
            ),
            (
                Column(table_match, String(timestamp_field)),
                Literal(Param("timestamp", Any(datetime))),
            ),
        ).match(c)

        if match is not None:
            timestamp = cast(datetime, match.scalar("timestamp"))
            assert isinstance(c, FunctionCallExpr)
            if match.string("operator") == OPERATOR_TO_FUNCTION[">="]:
                if not max_lower_bound or timestamp > max_lower_bound[0]:
                    max_lower_bound = (timestamp, c)
            else:
                if not min_upper_bound or timestamp < min_upper_bound[0]:
                    min_upper_bound = (timestamp, c)

    return (max_lower_bound, min_upper_bound)
Ejemplo n.º 6
0
class DefaultIfNullFunctionMapper(FunctionCallMapper):
    """
    If a function is being called on a column that doesn't exist, or is being
    called on NULL, change the entire function to be NULL.
    """

    function_match = FunctionCallMatch(StringMatch("identity"),
                                       (LiteralMatch(value=Any(type(None))), ))

    def attempt_map(
        self,
        expression: FunctionCall,
        children_translator: SnubaClickhouseStrictTranslator,
    ) -> Optional[FunctionCall]:

        # HACK: Quick fix to avoid this function dropping important conditions from the query
        logical_functions = {"and", "or", "xor"}

        if expression.function_name in logical_functions:
            return None

        parameters = tuple(
            p.accept(children_translator) for p in expression.parameters)
        for param in parameters:
            # All impossible columns will have been converted to the identity function.
            # So we know that if a function has the identity function as a parameter, we can
            # collapse the entire expression.
            fmatch = self.function_match.match(param)
            if fmatch is not None:
                return identity(Literal(None, None), expression.alias)

        return None
Ejemplo n.º 7
0
def test_accessors() -> None:
    func = FunctionCall(
        String("f_name"),
        (
            FunctionCall(String("f"), (Column(None, String("my_col")), )),
            Param(
                "second_function",
                FunctionCall(Param("second_function_name", Any(str)), None),
            ),
        ),
    )

    result = func.match(
        FunctionCallExpr(
            "irrelevant",
            "f_name",
            (
                FunctionCallExpr(None, "f",
                                 (ColumnExpr(None, None, "my_col"), )),
                FunctionCallExpr(None, "second_name", tuple()),
            ),
        ))

    assert result is not None
    assert result.expression("second_function") == FunctionCallExpr(
        None, "second_name", tuple())
    assert result.scalar("second_function_name") == "second_name"
    def extractor(condition: Expression) -> Set[str]:
        match = FunctionCall(
            String(ConditionFunctions.EQ),
            (key_pattern, Literal(Param("key", Any(str)))),
        ).match(condition)

        if match is None:
            return set()

        return {match.string("key")}
Ejemplo n.º 9
0
    def get_project_ids_in_condition(
            condition: Expression) -> Optional[Set[int]]:
        """
        Extract project ids from an expression. Returns None if no project
        if condition is found. It returns an empty set of conflicting project_id
        conditions are found.
        """
        match = FunctionCall(
            None,
            String(ConditionFunctions.EQ),
            (
                Column(column_name=String(project_column)),
                Literal(value=Param("project_id", Any(int))),
            ),
        ).match(condition)
        if match is not None:
            return {match.integer("project_id")}

        match = is_in_condition_pattern(
            Column(column_name=String(project_column))).match(condition)
        if match is not None:
            projects = match.expression("tuple")
            assert isinstance(projects, FunctionCallExpr)
            return {
                l.value
                for l in projects.parameters
                if isinstance(l, LiteralExpr) and isinstance(l.value, int)
            }

        match = FunctionCall(
            None,
            Param(
                "operator",
                Or([String(BooleanFunctions.AND),
                    String(BooleanFunctions.OR)]),
            ),
            (Param("lhs", AnyExpression()), Param("rhs", AnyExpression())),
        ).match(condition)
        if match is not None:
            lhs_projects = get_project_ids_in_condition(
                match.expression("lhs"))
            rhs_projects = get_project_ids_in_condition(
                match.expression("rhs"))
            if lhs_projects is None:
                return rhs_projects
            elif rhs_projects is None:
                return lhs_projects
            else:
                return (lhs_projects & rhs_projects if match.string("operator")
                        == BooleanFunctions.AND else lhs_projects
                        | rhs_projects)

        return None
 def is_skippable_condition(conditions: Expression) -> bool:
     """
     A condition composed of a bunch of has(column, ...) conditions OR'ed together
     can be ignored when looking for filter keys because these are the conditions
     used for the bloom filter index on the array column.
     """
     for column_name in column_names:
         has_pattern = FunctionCall(
             String("has"),
             (Column(column_name=String(column_name)), Literal(Any(str))),
         )
         if all(
             has_pattern.match(c) for c in get_first_level_or_conditions(conditions)
         ):
             return True
     return False
Ejemplo n.º 11
0
def get_time_range(
        query: Query,
        timestamp_field: str) -> Tuple[Optional[datetime], Optional[datetime]]:
    """
    Finds the minimal time range for this query. Which means, it finds
    the >= timestamp condition with the highest datetime literal and
    the < timestamp condition with the smallest and returns the interval
    in the form of a tuple of Literals. It only looks into first level
    AND conditions since, if the timestamp is nested in an OR we cannot
    say anything on how that compares to the other timestamp conditions.
    """

    condition_clause = query.get_condition_from_ast()
    if not condition_clause:
        return (None, None)

    max_lower_bound = None
    min_upper_bound = None
    for c in get_first_level_and_conditions(condition_clause):
        match = FunctionCall(
            None,
            Param(
                "operator",
                Or([
                    String(OPERATOR_TO_FUNCTION[">="]),
                    String(OPERATOR_TO_FUNCTION["<"]),
                ]),
            ),
            (
                Column(None, None, String(timestamp_field)),
                Literal(None, Param("timestamp", Any(datetime))),
            ),
        ).match(c)

        if match is not None:
            timestamp = cast(datetime, match.scalar("timestamp"))
            if match.string("operator") == OPERATOR_TO_FUNCTION[">="]:
                if not max_lower_bound or timestamp > max_lower_bound:
                    max_lower_bound = timestamp
            else:
                if not min_upper_bound or timestamp < min_upper_bound:
                    min_upper_bound = timestamp

    return (max_lower_bound, min_upper_bound)
def _get_mapping_keys_in_condition(
    condition: Expression, column_name: str
) -> Optional[Set[str]]:
    """
    Finds the top level conditions that include filter based on the arrayJoin.
    This is meant to be used to find the keys the query is filtering the arrayJoin
    on.
    We can only apply the arrayFilter optimization to arrayJoin conditions
    that are not in OR with other columns. To simplify the problem, we only
    consider those conditions that are included in the first level of the query:
    [['tagskey' '=' 'a'],['col' '=' 'b'],['col2' '=' 'c']]  works
    [[['tagskey' '=' 'a'], ['col2' '=' 'b']], ['tagskey' '=' 'c']] does not

    If we encounter an OR condition we return None, which means we cannot
    safely apply the optimization. Empty set means we did not find any
    suitable arrayJoin for optimization in this condition but that does
    not disqualify the whole query in the way the OR condition does.
    """
    keys_found = set()

    conditions = get_first_level_and_conditions(condition)
    for c in conditions:
        if is_binary_condition(c, BooleanFunctions.OR):
            return None

        match = FunctionCall(
            None,
            String(ConditionFunctions.EQ),
            (array_join_pattern(column_name), Literal(None, Param("key", Any(str)))),
        ).match(c)
        if match is not None:
            keys_found.add(match.string("key"))

        match = is_in_condition_pattern(array_join_pattern(column_name)).match(c)
        if match is not None:
            function = match.expression("tuple")
            assert isinstance(function, FunctionCallExpr)
            keys_found |= {
                lit.value
                for lit in function.parameters
                if isinstance(lit, LiteralExpr) and isinstance(lit.value, str)
            }

    return keys_found
Ejemplo n.º 13
0
    def __init__(self, column_name: str, hash_map_name: str,
                 killswitch: str) -> None:
        self.__column_name = column_name
        self.__hash_map_name = hash_map_name
        self.__killswitch = killswitch

        # TODO: Add the support for IN connditions.
        self.__optimizable_pattern = FunctionCall(
            function_name=String("equals"),
            parameters=(
                Or([
                    mapping_pattern,
                    FunctionCall(
                        function_name=String("ifNull"),
                        parameters=(mapping_pattern, Literal(String(""))),
                    ),
                ]),
                Param("right_hand_side", Literal(Any(str))),
            ),
        )
Ejemplo n.º 14
0
 def __init__(self, array_columns: Sequence[str]):
     self.__array_has_pattern = FunctionCall(
         String("equals"),
         (
             Param(
                 "has",
                 FunctionCall(
                     String("has"),
                     (
                         Column(
                             column_name=Or(
                                 [String(column) for column in array_columns]
                             )
                         ),
                         Literal(Any(str)),
                     ),
                 ),
             ),
             Literal(Integer(1)),
         ),
     )
Ejemplo n.º 15
0
            LiteralExpr(None, None),
        ),
    )


TABLE_MAPPING_PARAM = "table_name"
VALUE_COL_MAPPING_PARAM = "value_column"
KEY_COL_MAPPING_PARAM = "key_column"
KEY_MAPPING_PARAM = "key"
mapping_pattern = FunctionCall(
    None,
    String("arrayElement"),
    (
        Column(
            None,
            Param(TABLE_MAPPING_PARAM, AnyOptionalString()),
            Param(VALUE_COL_MAPPING_PARAM, Any(str)),
        ),
        FunctionCall(
            None,
            String("indexOf"),
            (
                Column(None, None, Param(KEY_COL_MAPPING_PARAM, Any(str))),
                Literal(None, Param(KEY_MAPPING_PARAM, Any(str))),
            ),
        ),
    ),
)

# TODO: build more of these mappers.
Ejemplo n.º 16
0
    Param,
    Pattern,
    String,
    SubscriptableReference,
)

test_cases = [
    (
        "Literal match",
        Literal(None),
        LiteralExpr("random_alias", 1),
        MatchResult(),
    ),
    (
        "Literal match with none type",
        Literal(Any(type(None))),
        LiteralExpr("alias", 1),
        None,
    ),
    (
        "Single node match",
        Column(OptionalString("table"), String("test_col")),
        ColumnExpr("alias_we_don't_care_of", "table", "test_col"),
        MatchResult(),
    ),
    (
        "Single node no match",
        Column(None, String("test_col")),
        ColumnExpr(None, None, "not_a_test_col"),
        None,
    ),
Ejemplo n.º 17
0
    def __init__(self, time_group_columns: Mapping[str, str],
                 time_parse_columns: Sequence[str]) -> None:
        # Column names that should be mapped to different columns.
        self.__time_replace_columns = time_group_columns

        # time_parse_columns is a list of columns that, if used in a condition, should be compared with datetimes.
        # The columns here might overlap with the columns that get replaced, so we have to search for transformed
        # columns.
        column_match = ColumnMatch(
            None,
            Param(
                "column_name",
                Or([String(tc) for tc in time_parse_columns]),
            ),
        )
        self.condition_match = FunctionCallMatch(
            Or([
                String(ConditionFunctions.GT),
                String(ConditionFunctions.GTE),
                String(ConditionFunctions.LT),
                String(ConditionFunctions.LTE),
                String(ConditionFunctions.EQ),
                String(ConditionFunctions.NEQ),
            ]),
            (
                Or([
                    column_match,
                    FunctionCallMatch(
                        Or([
                            String("toStartOfHour"),
                            String("toStartOfMinute"),
                            String("toStartOfDay"),
                            String("toDate"),
                        ]),
                        (column_match, ),
                        with_optionals=True,
                    ),
                    FunctionCallMatch(
                        String("toDateTime"),
                        (
                            FunctionCallMatch(
                                String("multiply"),
                                (
                                    FunctionCallMatch(
                                        String("intDiv"),
                                        (
                                            FunctionCallMatch(
                                                String("toUInt32"),
                                                (column_match, ),
                                            ),
                                            LiteralMatch(Any(int)),
                                        ),
                                    ),
                                    LiteralMatch(Any(int)),
                                ),
                            ),
                            LiteralMatch(Any(str)),
                        ),
                    ),
                ]),
                Param("literal", LiteralMatch(Any(str))),
            ),
        )
Ejemplo n.º 18
0
    condition_pattern,
    get_first_level_and_conditions,
)
from snuba.query.matchers import Any
from snuba.query.matchers import Column as ColumnPattern
from snuba.query.matchers import FunctionCall as FunctionCallPattern
from snuba.query.matchers import Literal as LiteralPattern
from snuba.query.matchers import Or, Param, Pattern, String
from snuba.request.request_settings import RequestSettings
from snuba.state import get_config

logger = logging.getLogger(__name__)

EQ_CONDITION_PATTERN = condition_pattern(
    {ConditionFunctions.EQ},
    ColumnPattern(None, Param("lhs", Any(str))),
    LiteralPattern(Any(int)),
    commutative=True,
)

FULL_CONDITION_PATTERN = Or([
    EQ_CONDITION_PATTERN,
    FunctionCallPattern(
        String(ConditionFunctions.IN),
        (
            ColumnPattern(None, Param("lhs", Any(str))),
            FunctionCallPattern(Or([String("tuple"),
                                    String("array")]), None),
        ),
    ),
], )