Ejemplo n.º 1
0
    def testComparingTokens(self):
        t1 = grammar.Token("lparen", "(", 1, 2)
        t2 = grammar.Token("lparen", "(", 20, 21)
        self.assertEqual(t1, t2)

        t3 = grammar.Token("lparen", "(", 0, 10)
        t4 = grammar.Token("lparen", ")", 0, 10)
        self.assertNotEqual(t3, t4)
Ejemplo n.º 2
0
    def emit_param(self, match, pattern, **_):
        param_name = match.group(1)

        if not param_name or param_name == "?":
            param_name = self._param_idx
            self._param_idx += 1
        elif param_name and re.match(r"^\d+$", param_name):
            param_name = int(param_name)

        return grammar.Token(name=pattern.name,
                             value=param_name,
                             start=match.start(),
                             end=match.end())
Ejemplo n.º 3
0
# DottySQL's operator table. The parser only supports pure prefix and infix
# operators, as well as infix operators that have a suffix (like x[y]).
#
# Circumfix and pure suffix operators can be declared, but won't do anything.
OPERATORS = common.OperatorTable(

    # Infix operators:
    common.Operator(name="or",
                    precedence=0,
                    assoc="left",
                    handler=ast.Union,
                    docstring="Logical OR.",
                    prefix=None,
                    suffix=None,
                    infix=common.Token("symbol", "or")),
    common.Operator(name="and",
                    precedence=1,
                    assoc="left",
                    handler=ast.Intersection,
                    docstring="Logical AND.",
                    prefix=None,
                    suffix=None,
                    infix=common.Token("symbol", "and")),
    common.Operator(name="==",
                    precedence=3,
                    assoc="left",
                    handler=ast.Equivalence,
                    docstring="Equivalence.",
                    prefix=None,
                    suffix=None,
Ejemplo n.º 4
0
class ExpressionParserTest(testlib.EfilterTestCase):
    OPERATORS = [
        # Infix with precedence
        grammar.Operator(name="+",
                         precedence=4,
                         assoc="left",
                         handler=ast.Sum,
                         docstring=None,
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "+")),
        grammar.Operator(name="eq",
                         precedence=3,
                         assoc="left",
                         handler=ast.Equivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "eq"),
                         suffix=None),

        # Prefix
        grammar.Operator(name="unary -",
                         precedence=5,
                         assoc="right",
                         handler=ast_transforms.NegateValue,
                         docstring=None,
                         infix=None,
                         suffix=None,
                         prefix=grammar.Token("symbol", "-")),

        # Mixfix (infix + suffix)
        grammar.Operator(name="[]",
                         precedence=12,
                         assoc="left",
                         handler=ast.Select,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("lbracket", "["),
                         suffix=grammar.Token("rbracket", "]")),

        # Circumfix with separator
        grammar.Operator(name="list builder",
                         precedence=14,
                         assoc="left",
                         handler=ast.Tuple,
                         docstring=None,
                         prefix=grammar.Token("lbracket", "["),
                         infix=grammar.Token("comma", ","),
                         suffix=grammar.Token("rbracket", "]"))
    ]

    def parseQuery(self, query):
        t = tokenizer.LazyTokenizer(query)
        p = parser.ExpressionParser(self.OPERATORS, t)
        return p.parse()

    def assertQueryParses(self, query, expected):
        self.assertEqual(q.Query(expected), q.Query(self.parseQuery(query)))

    def testFailures(self):
        with self.assertRaises(errors.EfilterParseError):
            self.parseQuery("+ 5")

        with self.assertRaises(errors.EfilterParseError):
            self.parseQuery("5 +")

        with self.assertRaises(errors.EfilterParseError):
            self.parseQuery("")

        with self.assertRaises(errors.EfilterParseError):
            self.parseQuery("5 * 10")

    def testInfix(self):
        self.assertQueryParses(
            "5 + 5 eq 10",
            ast.Equivalence(ast.Sum(ast.Literal(5), ast.Literal(5)),
                            ast.Literal(10)))

    def testParens(self):
        self.assertQueryParses(
            "5 + (5 eq 10)",  # It doesn't have to make sense.
            ast.Sum(ast.Literal(5),
                    ast.Equivalence(ast.Literal(5), ast.Literal(10))))

    def testPrefix(self):
        self.assertQueryParses(
            "-5 + 5 eq - (10)",
            ast.Equivalence(
                ast.Sum(ast.Product(ast.Literal(-1), ast.Literal(5)),
                        ast.Literal(5)),
                ast.Product(ast.Literal(-1), ast.Literal(10))))

    def testMixfix(self):
        self.assertQueryParses("'foo'[0  ]",
                               ast.Select(ast.Literal("foo"), ast.Literal(0)))

        self.assertQueryParses(
            # I refer you to my previous statement about making sense.
            " (5 +5) [ 'foo']",
            ast.Select(ast.Sum(ast.Literal(5), ast.Literal(5)),
                       ast.Literal("foo")))

        self.assertQueryParses(
            "5 + 5['foo' + 10]",
            ast.Sum(
                ast.Literal(5),
                ast.Select(ast.Literal(5),
                           ast.Sum(ast.Literal("foo"), ast.Literal(10)))))

    def testCircumfix(self):
        self.assertQueryParses(
            "[1, 2, 3]",
            ast.Tuple(ast.Literal(1), ast.Literal(2), ast.Literal(3)))

        self.assertQueryParses(
            # Lists and selection are non-ambiguous.
            "10 + ['foo', 'bar'][1]",
            ast.Sum(
                ast.Literal(10),
                ast.Select(ast.Tuple(ast.Literal("foo"), ast.Literal("bar")),
                           ast.Literal(1))))
Ejemplo n.º 5
0
    def testOperatorLookups(self):
        tl = grammar.TokenLookupTable()
        tl.set(grammar.Token("symbol", "func"), "function")
        tl.set((grammar.Token("symbol", "end"), ), "end_st")

        self.assertEqual(tl.match([grammar.Token("symbol", "func", 1, 20)]),
                         ("function",
                          (grammar.Token("symbol", "func", 1, 20), )))

        self.assertEqual(
            tl.match([
                grammar.Token("symbol", "func_not", 0, None),
                grammar.Token("symbol", "func", 1, 20)
            ]), (None, None))

        # Multi-token matches
        tl.set((grammar.Token("symbol", "not"), grammar.Token("symbol", "in")),
               "not in")

        self.assertEqual(
            tl.match([
                grammar.Token("symbol", "not"),
                grammar.Token("symbol", "in"),
                grammar.Token("blah", "blah")
            ]),
            ("not in",
             (grammar.Token("symbol", "not"), grammar.Token("symbol", "in"))))

        # Default match is the longest
        tl.set(grammar.Token("symbol", "not"), "not")
        self.assertEqual(
            tl.match([
                grammar.Token("symbol", "not"),
                grammar.Token("symbol", "in"),
                grammar.Token("blah", "blah")
            ]),
            ("not in",
             (grammar.Token("symbol", "not"), grammar.Token("symbol", "in"))))

        self.assertEqual(
            tl.match([
                grammar.Token("symbol", "not"),
                grammar.Token("blah", "blah")
            ]), ("not", (grammar.Token("symbol", "not"), )))
Ejemplo n.º 6
0
class ObjectFilterSyntax(syntax.Syntax):
    OPERATORS = [
        # Aliases for equivalence:
        grammar.Operator(name="equals",
                         precedence=3,
                         assoc="left",
                         handler=ast.Equivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "equals"),
                         suffix=None),
        grammar.Operator(name="is",
                         precedence=3,
                         assoc="left",
                         handler=ast.Equivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "is"),
                         suffix=None),
        grammar.Operator(name="==",
                         precedence=3,
                         assoc="left",
                         handler=ast.Equivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "=="),
                         suffix=None),
        grammar.Operator(name="notequals",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ComplementEquivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "notequals"),
                         suffix=None),
        grammar.Operator(name="isnot",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ComplementEquivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "isnot"),
                         suffix=None),
        grammar.Operator(name="!=",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ComplementEquivalence,
                         docstring=None,
                         prefix=None,
                         infix=grammar.Token("symbol", "!="),
                         suffix=None),

        # Logical:
        grammar.Operator(name="or",
                         precedence=0,
                         assoc="left",
                         handler=ast.Union,
                         docstring="Logical OR.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "or")),
        grammar.Operator(name="and",
                         precedence=1,
                         assoc="left",
                         handler=ast.Intersection,
                         docstring="Logical AND.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "and")),
        grammar.Operator(name="||",
                         precedence=0,
                         assoc="left",
                         handler=ast.Union,
                         docstring="Logical OR.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "||")),
        grammar.Operator(name="&&",
                         precedence=1,
                         assoc="left",
                         handler=ast.Intersection,
                         docstring="Logical AND.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "&&")),

        # Comparisons:
        grammar.Operator(name=">=",
                         precedence=3,
                         assoc="left",
                         handler=ast.PartialOrderedSet,
                         docstring="Equal-or-greater-than.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", ">=")),
        grammar.Operator(name="<=",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ReversePartialOrderedSet,
                         docstring="Equal-or-less-than.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "<=")),
        grammar.Operator(name=">",
                         precedence=3,
                         assoc="left",
                         handler=ast.StrictOrderedSet,
                         docstring="Greater-than.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", ">")),
        grammar.Operator(name="<",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ReverseStrictOrderedSet,
                         docstring="Less-than.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "<")),

        # Set ops:
        grammar.Operator(name="notinset",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ComplementMembership,
                         docstring="Left-hand operand is not in list.",
                         prefix=None,
                         suffix=None,
                         infix=(grammar.Token("symbol", "notinset"))),
        grammar.Operator(name="inset",
                         precedence=3,
                         assoc="left",
                         handler=ast.Membership,
                         docstring="Left-hand operand is in list.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "inset")),
        grammar.Operator(name="notcontains",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ReverseComplementMembership,
                         docstring="Right-hand operand is not in list.",
                         prefix=None,
                         suffix=None,
                         infix=(grammar.Token("symbol", "notcontains"))),
        grammar.Operator(name="contains",
                         precedence=3,
                         assoc="left",
                         handler=ast_transforms.ReverseMembership,
                         docstring="Right-hand operand is in list.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "contains")),

        # Miscellaneous:
        grammar.Operator(name="unary -",
                         precedence=5,
                         assoc="right",
                         handler=ast_transforms.NegateValue,
                         docstring=None,
                         infix=None,
                         suffix=None,
                         prefix=grammar.Token("symbol", "-")),
        grammar.Operator(name="list builder",
                         precedence=14,
                         assoc="left",
                         handler=ast.Tuple,
                         docstring=None,
                         prefix=grammar.Token("lbracket", "["),
                         infix=grammar.Token("comma", ","),
                         suffix=grammar.Token("rbracket", "]")),
        grammar.Operator(name="regexp",
                         precedence=3,
                         assoc="left",
                         handler=ast.RegexFilter,
                         docstring="Match LHS against regex on RHS.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", "regexp")),
        grammar.Operator(name=".",
                         precedence=12,
                         assoc="left",
                         handler=ast_transforms.NormalizeResolve,
                         docstring="OBJ.MEMBER -> return MEMBER of OBJ.",
                         prefix=None,
                         suffix=None,
                         infix=grammar.Token("symbol", ".")),
    ]

    def __init__(self, original, params=None):
        super(ObjectFilterSyntax, self).__init__(original)
        if params is not None:
            raise ValueError("ObjectFilterSyntax doesn't support parameters.")

        t = tokenizer.LazyTokenizer(original)
        self.parser = parser.ExpressionParser(operators=self.OPERATORS,
                                              tokenizer=t)

    @property
    def root(self):
        return self.parser.parse()
Ejemplo n.º 7
0
 def string_end(self, pattern, match, **_):
     self._pop_state()
     return grammar.Token(name=pattern.name,
                          value=self.string,
                          start=self.string_position,
                          end=match.end())
Ejemplo n.º 8
0
 def emit_float(self, string, match, pattern, **_):
     return grammar.Token(name=pattern.name,
                          value=float(string),
                          start=match.start(),
                          end=match.end())
Ejemplo n.º 9
0
 def emit(self, string, match, pattern, **_):
     """Emits a token using the current pattern match and pattern label."""
     return grammar.Token(name=pattern.name,
                          value=string,
                          start=match.start(),
                          end=match.end())