Пример #1
0
    def test_tokenize(self):
        """ Test with simple rules
        """
        import re
        from wheezy.template.lexer import Lexer
        from wheezy.template.lexer import lexer_scan

        def word_token(m):
            return m.end(), 'w', m.group()

        def blank_token(m):
            return m.end(), 'b', m.group()

        def to_upper(s):
            return s.upper()

        def cleanup(tokens):
            for i in range(len(tokens)):
                t = tokens[i]
                if t[i] == 'b':
                    tokens[i] = (t[0], 'b', ' ')

        class Extension(object):
            lexer_rules = {
                100: (re.compile(r'\w+'), word_token),
                200: (re.compile(r'\s+'), blank_token)
            }
            preprocessors = [to_upper]
            postprocessors = [cleanup]

        lexer = Lexer(**lexer_scan([Extension]))
        assert [(1, 'w', 'HELLO'),
                (1, 'b', ' '),
                (2, 'w', 'WORLD')] == lexer.tokenize('hello\n world')
Пример #2
0
    def test_tokenize(self) -> None:
        """Test with simple rules"""
        def word_token(m: typing.Match[str]) -> Token:
            return m.end(), "w", m.group()

        def blank_token(m: typing.Match[str]) -> Token:
            return m.end(), "b", m.group()

        def to_upper(s: str) -> str:
            return s.upper()

        def cleanup(tokens: typing.List[Token]) -> None:
            for i in range(len(tokens)):
                t = tokens[i]
                if t[i] == "b":
                    tokens[i] = (t[0], "b", " ")

        class Extension(object):
            lexer_rules = {
                100: (re.compile(r"\w+"), word_token),
                200: (re.compile(r"\s+"), blank_token),
            }
            preprocessors = [to_upper]
            postprocessors = [cleanup]

        lexer = Lexer(**lexer_scan([Extension]))
        assert [
            (1, "w", "HELLO"),
            (1, "b", " "),
            (2, "w", "WORLD"),
        ] == lexer.tokenize("hello\n world")
Пример #3
0
 def __init__(self, loader, extensions, template_class=None):
     self.lock = allocate_lock()
     self.templates = {}
     self.renders = {}
     self.modules = {}
     self.global_vars = {'_r': self.render, '_i': self.import_name}
     self.loader = loader
     self.template_class = template_class or Template
     self.compiler = Compiler(self.global_vars, -2)
     self.lexer = Lexer(**lexer_scan(extensions))
     self.parser = Parser(**parser_scan(extensions))
     self.builder = SourceBuilder(**builder_scan(extensions))
Пример #4
0
 def __init__(self, loader, extensions, template_class=None):
     self.lock = allocate_lock()
     self.templates = {}
     self.renders = {}
     self.modules = {}
     self.global_vars = {
         '_r': self.render,
         '_i': self.import_name
     }
     self.loader = loader
     self.template_class = template_class or Template
     self.compiler = Compiler(self.global_vars, -2)
     self.lexer = Lexer(**lexer_scan(extensions))
     self.parser = Parser(**parser_scan(extensions))
     self.builder = SourceBuilder(**builder_scan(extensions))
Пример #5
0
 def __init__(
     self,
     loader: Loader,
     extensions: typing.List[typing.Any],
     template_class: typing.Optional[TemplateClass] = None,
 ) -> None:
     self.lock = allocate_lock()
     self.templates: typing.Dict[str, SupportsRender] = {}
     self.renders: typing.Dict[str, RenderTemplate] = {}
     self.modules: typing.Dict[str, ModuleType] = {}
     self.global_vars = {"_r": self.render, "_i": self.import_name}
     self.loader = loader
     self.template_class = template_class or Template
     self.compiler = Compiler(self.global_vars, -2)
     self.lexer = Lexer(**lexer_scan(extensions))
     self.parser = Parser(**parser_scan(extensions))
     self.builder = SourceBuilder(**builder_scan(extensions))