Пример #1
0

def get_first_level_and_conditions(condition: Expression) -> Sequence[Expression]:
    return _get_first_level_conditions(condition, BooleanFunctions.AND)


def get_first_level_or_conditions(condition: Expression) -> Sequence[Expression]:
    return _get_first_level_conditions(condition, BooleanFunctions.OR)


TOP_LEVEL_CONDITIONS = {
    func_name: Or(
        [
            FunctionCallPattern(
                String(func_name),
                (Param("left", AnyExpression()), Param("right", AnyExpression())),
            ),
            FunctionCallPattern(
                String("equals"),
                (
                    FunctionCallPattern(
                        String(func_name),
                        (
                            Param("left", AnyExpression()),
                            Param("right", AnyExpression()),
                        ),
                    ),
                    LiteralPattern(Integer(1)),
                ),
            ),
        ]
Пример #2
0
 ),
 (
     "Single node match",
     Column(OptionalString("table"), String("test_col")),
     ColumnExpr("alias_we_don't_care_of", "table", "test_col"),
     MatchResult(),
 ),
 (
     "Single node no match",
     Column(None, String("test_col")),
     ColumnExpr(None, None, "not_a_test_col"),
     None,
 ),
 (
     "Matches a None table name",
     Column(Param("table_name", AnyOptionalString()), None),
     ColumnExpr(None, None, "not_a_test_col"),
     MatchResult({"table_name": None}),
 ),
 (
     "Matches None as table name",
     Column(Param("table_name", OptionalString(None)), None),
     ColumnExpr(None, None, "not_a_test_col"),
     MatchResult({"table_name": None}),
 ),
 (
     "Not matching a non None table",
     Column(Param("table_name", OptionalString(None)), None),
     ColumnExpr(None, "not None", "not_a_test_col"),
     None,
 ),
Пример #3
0
from snuba.query.project_extension import ProjectExtension
from snuba.query.subscripts import subscript_key_column_name
from snuba.query.timeseries_extension import TimeSeriesExtension
from snuba.request.request_settings import RequestSettings
from snuba.util import qualified_column
from snuba.utils.metrics.wrapper import MetricsWrapper

EVENTS = EntityKey.EVENTS
TRANSACTIONS = EntityKey.TRANSACTIONS
EVENTS_AND_TRANSACTIONS = "events_and_transactions"

metrics = MetricsWrapper(environment.metrics, "api.discover")
logger = logging.getLogger(__name__)

EVENT_CONDITION = FunctionCallMatch(
    Param("function", Or([StringMatch(op) for op in BINARY_OPERATORS])),
    (
        Or([ColumnMatch(None, StringMatch("type")),
            LiteralMatch(None)]),
        Param("event_type", Or([ColumnMatch(), LiteralMatch()])),
    ),
)


def match_query_to_table(
        query: Query, events_only_columns: ColumnSet,
        transactions_only_columns: ColumnSet) -> Union[EntityKey, str]:
    # First check for a top level condition on the event type
    condition = query.get_condition_from_ast()
    event_types = set()
    if condition:
Пример #4
0
    condition_pattern,
    get_first_level_and_conditions,
)
from snuba.query.matchers import Any
from snuba.query.matchers import Column as ColumnPattern
from snuba.query.matchers import FunctionCall as FunctionCallPattern
from snuba.query.matchers import Literal as LiteralPattern
from snuba.query.matchers import Or, Param, Pattern, String
from snuba.query.query_settings import QuerySettings
from snuba.state import get_config

logger = logging.getLogger(__name__)

EQ_CONDITION_PATTERN = condition_pattern(
    {ConditionFunctions.EQ},
    ColumnPattern(None, Param("lhs", Any(str))),
    LiteralPattern(Any(int)),
    commutative=True,
)

FULL_CONDITION_PATTERN = Or(
    [
        EQ_CONDITION_PATTERN,
        FunctionCallPattern(
            String(ConditionFunctions.IN),
            (
                ColumnPattern(None, Param("lhs", Any(str))),
                FunctionCallPattern(Or([String("tuple"), String("array")]), None),
            ),
        ),
    ],
Пример #5
0
        if not isinstance(exp, Column):
            return exp

        parts = exp.column_name.split(".", 1)
        if len(parts) != 2 or parts[0] not in aliases:
            raise ParsingException(
                f"column {exp.column_name} must be qualified in a join query"
            )

        return Column(exp.alias, parts[0], parts[1])

    query.transform_expressions(transform)


DATETIME_MATCH = FunctionCallMatch(
    StringMatch("toDateTime"), (Param("date_string", LiteralMatch(AnyMatch(str))),)
)


def _parse_datetime_literals(
    query: Union[CompositeQuery[QueryEntity], LogicalQuery]
) -> None:
    def parse(exp: Expression) -> Expression:
        result = DATETIME_MATCH.match(exp)
        if result is not None:
            date_string = result.expression("date_string")
            assert isinstance(date_string, Literal)  # mypy
            assert isinstance(date_string.value, str)  # mypy
            return Literal(exp.alias, parse_datetime(date_string.value))

        return exp
Пример #6
0
class ParamType(ABC):
    def validate(self, expression: Expression, schema: ColumnSet) -> None:
        raise NotImplementedError


class Any(ParamType):
    def validate(self, expression: Expression, schema: ColumnSet) -> None:
        return

    def __str__(self) -> str:
        return "Any"


COLUMN_PATTERN = ColumnMatcher(
    table_name=None, column_name=Param("column_name", AnyMatcher(str)),
)

LITERAL_PATTERN = LiteralMatcher()

AllowedTypes = Union[
    Type[Array],
    Type[String],
    Type[UUID],
    Type[IPv4],
    Type[IPv6],
    Type[FixedString],
    Type[UInt],
    Type[Float],
    Type[Date],
    Type[DateTime],
Пример #7
0
    def __init__(self, time_group_columns: Mapping[str, str],
                 time_parse_columns: Sequence[str]) -> None:
        # Column names that should be mapped to different columns.
        self.__time_replace_columns = time_group_columns

        # time_parse_columns is a list of columns that, if used in a condition, should be compared with datetimes.
        # The columns here might overlap with the columns that get replaced, so we have to search for transformed
        # columns.
        column_match = ColumnMatch(
            None,
            Param(
                "column_name",
                Or([String(tc) for tc in time_parse_columns]),
            ),
        )
        self.condition_match = FunctionCallMatch(
            Or([
                String(ConditionFunctions.GT),
                String(ConditionFunctions.GTE),
                String(ConditionFunctions.LT),
                String(ConditionFunctions.LTE),
                String(ConditionFunctions.EQ),
                String(ConditionFunctions.NEQ),
            ]),
            (
                Or([
                    column_match,
                    FunctionCallMatch(
                        Or([
                            String("toStartOfHour"),
                            String("toStartOfMinute"),
                            String("toDate"),
                        ]),
                        (column_match, LiteralMatch(Any(str))),
                    ),
                    FunctionCallMatch(
                        String("toDateTime"),
                        (
                            FunctionCallMatch(
                                String("multiply"),
                                (
                                    FunctionCallMatch(
                                        String("intDiv"),
                                        (
                                            FunctionCallMatch(
                                                String("toUInt32"),
                                                (column_match, ),
                                            ),
                                            LiteralMatch(Any(int)),
                                        ),
                                    ),
                                    LiteralMatch(Any(int)),
                                ),
                            ),
                            LiteralMatch(Any(str)),
                        ),
                    ),
                ]),
                Param("literal", LiteralMatch(Any(str))),
            ),
        )
Пример #8
0
class ParamType(ABC):
    def validate(self, expression: Expression, schema: ColumnSet) -> None:
        raise NotImplementedError


class Any(ParamType):
    def validate(self, expression: Expression, schema: ColumnSet) -> None:
        return

    def __str__(self) -> str:
        return "Any"


COLUMN_PATTERN = ColumnMatcher(
    table_name=None,
    column_name=Param("column_name", AnyMatcher(str)),
)

LITERAL_PATTERN = LiteralMatcher()

AllowedTypes = Union[Type[Array], Type[String], Type[UUID], Type[IPv4],
                     Type[IPv6], Type[FixedString], Type[UInt], Type[Float],
                     Type[Date], Type[DateTime], ]

AllowedScalarTypes = Union[Type[None], Type[bool], Type[str], Type[float],
                           Type[int], Type[date], Type[datetime], ]


class Column(ParamType):
    """
    Validates that the type of a Column expression is in a set of
Пример #9
0
                (ColumnExpr(None, table_name, f"{col_name}.key"), mapping_key),
            ),
            build_mapping_expr(None, table_name, col_name, mapping_key),
            LiteralExpr(None, None),
        ),
    )


TABLE_MAPPING_PARAM = "table_name"
VALUE_COL_MAPPING_PARAM = "value_column"
KEY_COL_MAPPING_PARAM = "key_column"
KEY_MAPPING_PARAM = "key"
mapping_pattern = FunctionCall(
    String("arrayElement"),
    (
        Column(
            Param(TABLE_MAPPING_PARAM, AnyOptionalString()),
            Param(VALUE_COL_MAPPING_PARAM, Any(str)),
        ),
        FunctionCall(
            String("indexOf"),
            (
                Column(None, Param(KEY_COL_MAPPING_PARAM, Any(str))),
                Literal(Param(KEY_MAPPING_PARAM, Any(str))),
            ),
        ),
    ),
)

# TODO: build more of these mappers.