def test_iter(self, env): token_types = [ t.type for t in TokenStream(self.test_tokens, "foo", "bar") ] assert token_types == [ "block_begin", "block_end", ]
def test_iter(self): token_types = [ t.type for t in TokenStream(self.test_tokens, "foo", "bar") ] assert token_types == [ 'block_begin', 'block_end', ]
def filter_stream(self, stream:TokenStream): """ jinja stream hook """ return TokenStream( self._arg_substitute(stream), stream.name, stream.filename )
def _tokenize(self, source, name, filename=None, state=None): source = self.preprocess(source, name, filename) stream = self.lexer.tokenize(source, name, filename, state) for ext in self.iter_extensions(): stream = ext.filter_stream(stream) if not isinstance(stream, TokenStream): stream = TokenStream(stream, name, filename) return stream
def _tokenize(self, source, name, filename=None, state=None): """Called by the parser to do the preprocessing and filtering for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. """ source = self.preprocess(source, name, filename) stream = self.lexer.tokenize(source, name, filename, state) for ext in self.iter_extensions(): stream = ext.filter_stream(stream) if not isinstance(stream, TokenStream): stream = TokenStream(stream, name, filename) return stream
def test_simple(self, env): ts = TokenStream(self.test_tokens, "foo", "bar") assert ts.current.type is TOKEN_BLOCK_BEGIN assert bool(ts) assert not bool(ts.eos) next(ts) assert ts.current.type is TOKEN_BLOCK_END assert bool(ts) assert not bool(ts.eos) next(ts) assert ts.current.type is TOKEN_EOF assert not bool(ts) assert bool(ts.eos)