Exemple #1
0
    def testFilters(self):
        t = tokenizer.LazyTokenizer("5 + 5 == foobar and 'hello, world!'")
        ts = token_stream.TokenStream(tokenizer=t)

        self.assertTrue(ts.accept(grammar.literal))
        self.assertFalse(ts.accept(grammar.literal))

        with self.assertRaises(errors.EfilterParseError):
            ts.expect(grammar.literal)

        self.assertTrue(ts.accept(grammar.symbol))

        with self.assertRaises(errors.EfilterParseError):
            ts.reject(grammar.literal)
Exemple #2
0
    def __init__(self, original, params=None):
        super(Parser, self).__init__(original)

        self.tokens = token_stream.TokenStream(
            tokenizer.LazyTokenizer(self.original))

        if isinstance(params, list):
            self.params = {}
            for idx, val in enumerate(params):
                self.params[idx] = val
        elif isinstance(params, dict):
            self.params = params
        elif params is None:
            self.params = {}
        else:
            raise TypeError("Params must be a list or a dict, not %r." %
                            type(params))
Exemple #3
0
 def __init__(self, operators, tokenizer):
     self.operators = grammar.OperatorTable(*operators)
     self.tokens = token_stream.TokenStream(tokenizer)