コード例 #1
0
    def __init__(self, original, params=None):
        super(ObjectFilterSyntax, self).__init__(original)
        if params is not None:
            raise ValueError("ObjectFilterSyntax doesn't support parameters.")

        t = tokenizer.LazyTokenizer(original)
        self.parser = parser.ExpressionParser(operators=self.OPERATORS,
                                              tokenizer=t)
コード例 #2
0
    def testCorrectEnd(self):
        query = "1 + 1 == 2"
        t = tokenizer.LazyTokenizer(query)
        while t.next_token():
            pass

        # Should be exhausted now.
        self.assertIsNone(t.peek(0))

        # Should be empty now.
        self.assertEqual(0, len(list(iter(t))))
コード例 #3
0
    def testFilters(self):
        t = tokenizer.LazyTokenizer("5 + 5 == foobar and 'hello, world!'")
        ts = token_stream.TokenStream(tokenizer=t)

        self.assertTrue(ts.accept(grammar.literal))
        self.assertFalse(ts.accept(grammar.literal))

        with self.assertRaises(errors.EfilterParseError):
            ts.expect(grammar.literal)

        self.assertTrue(ts.accept(grammar.symbol))

        with self.assertRaises(errors.EfilterParseError):
            ts.reject(grammar.literal)
コード例 #4
0
 def testPeeking(self):
     query = "1 in (5, 10) == foo"
     l = tokenizer.LazyTokenizer(query)
     self.assertEqual(l.peek(0).value, 1)
     self.assertEqual(l.peek(2).name, "lparen", None)
     self.assertEqual(l.current_token.value, 1)
     self.assertEqual(l.peek(20), None)
     self.assertEqual(l.current_token.value, 1)
     self.assertEqual(l.next_token().value, "in")
     self.assertEqual(l.current_token.value, "in")
     self.assertEqual(l.next_token().name, "lparen")
     self.assertEqual(l.next_token().value, 5)
     self.assertEqual(l.peek().name, "comma")
     self.assertEqual(l.next_token().name, "comma")
     self.assertEqual(l.next_token().value, 10)
コード例 #5
0
ファイル: parser.py プロジェクト: rlugojr/dotty
    def __init__(self, original, params=None):
        super(Parser, self).__init__(original)

        self.tokens = token_stream.TokenStream(
            tokenizer.LazyTokenizer(self.original))

        if isinstance(params, list):
            self.params = {}
            for idx, val in enumerate(params):
                self.params[idx] = val
        elif isinstance(params, dict):
            self.params = params
        elif params is None:
            self.params = {}
        else:
            raise TypeError("Params must be a list or a dict, not %r." %
                            type(params))
コード例 #6
0
 def parseQuery(self, query):
     t = tokenizer.LazyTokenizer(query)
     p = parser.ExpressionParser(self.OPERATORS, t)
     return p.parse()
コード例 #7
0
 def assertQueryMatches(self, query, expected):
     l = tokenizer.LazyTokenizer(query)
     actual = [(token.name, token.value) for token in l]
     self.assertEqual(expected, actual)