Beispiel #1
0
    def get_string_deps(self, text, context=None, *, filename=None):
        """Find dependencies for a template string."""
        lex = lexer.Lexer(text=text, filename=filename, input_encoding='utf-8')
        lex.parse()

        deps = []
        for n in lex.template.nodes:
            keyword = getattr(n, 'keyword', None)
            if keyword in ["inherit", "namespace"] or isinstance(
                    n, parsetree.IncludeTag):
                filename = n.attributes["file"]
                if '${' in filename:
                    # Support for comment helper inclusions
                    filename = re.sub(r'''\${context\[['"](.*?)['"]]}''',
                                      lambda m: context[m.group(1)], filename)
                deps.append(filename)
        # Some templates will include "foo.tmpl" and we need paths, so normalize them
        # using the template lookup
        for i, d in enumerate(deps):
            dep = self.get_template_path(d)
            if dep:
                deps[i] = dep
            else:
                LOGGER.error("Cannot find template {0} referenced in {1}", d,
                             filename)
        return deps
Beispiel #2
0
def get_deps(filename):
    text = util.read_file(filename)
    lex = lexer.Lexer(text=text, filename=filename)
    lex.parse()

    deps = []
    for n in lex.template.nodes:
        if getattr(n, 'keyword', None) == "inherit":
            deps.append(n.attributes['file'])
        # TODO: include tags are not handled
    return deps
Beispiel #3
0
    def get_template_reference(template):
        lex = lexer.Lexer(template)
        node = lex.parse()

        # Dummy compiler. _Identifiers class requires one
        # but only interested in the reserved_names field
        compiler = lambda: None
        compiler.reserved_names = set()
        identifiers = codegen._Identifiers(compiler, node)

        return list(identifiers.undeclared)
Beispiel #4
0
    def get_deps(self, filename):
        """Get dependencies for a template (internal function)."""
        text = util.read_file(filename)
        lex = lexer.Lexer(text=text, filename=filename)
        lex.parse()

        deps = []
        for n in lex.template.nodes:
            keyword = getattr(n, 'keyword', None)
            if keyword in ["inherit", "namespace"] or isinstance(n, parsetree.IncludeTag):
                deps.append(n.attributes['file'])
        return deps
Beispiel #5
0
    def get_deps(self, filename):
        """Get dependencies for a template (internal function)."""
        text = util.read_file(filename)
        lex = lexer.Lexer(text=text, filename=filename)
        lex.parse()

        deps = []
        for n in lex.template.nodes:
            keyword = getattr(n, 'keyword', None)
            if keyword in ["inherit", "namespace"]:
                deps.append(n.attributes['file'])
            # TODO: include tags are not handled
        return deps
Beispiel #6
0
def extract_haml(fileobj, keywords, comment_tags, options):
    """ babel translation token extract function for haml files """

    import haml
    from mako import lexer, parsetree
    from mako.ext.babelplugin import extract_nodes

    encoding = options.get('input_encoding', options.get('encoding', None))
    template_node = lexer.Lexer(haml.preprocessor(fileobj.read()),
                                input_encoding=encoding).parse()
    for extracted in extract_nodes(template_node.get_children(), keywords,
                                   comment_tags, options):
        yield extracted
Beispiel #7
0
    def get_string_deps(self, text, filename=None):
        """Find dependencies for a template string."""
        lex = lexer.Lexer(text=text, filename=filename, input_encoding='utf-8')
        lex.parse()

        deps = []
        for n in lex.template.nodes:
            keyword = getattr(n, 'keyword', None)
            if keyword in ["inherit", "namespace"] or isinstance(
                    n, parsetree.IncludeTag):
                deps.append(n.attributes['file'])
        # Some templates will include "foo.tmpl" and we need paths, so normalize them
        # using the template lookup
        for i, d in enumerate(deps):
            deps[i] = self.get_template_path(d)
        return deps
Beispiel #8
0
def extract(fileobj, keywords, comment_tags, options):
    """Extract messages from plain Mako defs and from Python
    expressions inside Mako templates.
    """
    encoding = "utf-8"
    template_node = lexer.Lexer(fileobj.read(),
                                input_encoding=encoding).parse()

    for extracted in extract_nodes(template_node.get_children(),
                                   keywords, comment_tags, options):
        yield extracted

    # proceed with the standard Mako extractor
    for extracted in extract_mako_nodes(template_node.get_children(),
                                        keywords, comment_tags, options):
        yield extracted
    def get_template_reference(template):
        lex = lexer.Lexer(template)

        try:
            node = lex.parse()
        except MakoException as e:
            logger.warning('pipeline get template[%s] reference error[%s]' % (template, e))
            return []

        # Dummy compiler. _Identifiers class requires one
        # but only interested in the reserved_names field
        def compiler():
            return None
        compiler.reserved_names = set()
        identifiers = codegen._Identifiers(compiler, node)

        return list(identifiers.undeclared)
Beispiel #10
0
def extract(fileobj, keywords, comment_tags, options):
    """Extract messages from Mako templates.

    :param fileobj: the file-like object the messages should be extracted from
    :param keywords: a list of keywords (i.e. function names) that should be
                     recognized as translation functions
    :param comment_tags: a list of translator tags to search for and include
                         in the results
    :param options: a dictionary of additional options (optional)
    :return: an iterator over ``(lineno, funcname, message, comments)`` tuples
    :rtype: ``iterator``
    """
    encoding = options.get('input_encoding', options.get('encoding', None))

    template_node = lexer.Lexer(fileobj.read(),
                                input_encoding=encoding).parse()
    for extracted in extract_nodes(template_node.get_children(), keywords,
                                   comment_tags, options):
        yield extracted
Beispiel #11
0
def _mako_template_names(template):
    """
    Return all the used identifiers in the Mako template.

    From Igonato's code at https://stackoverflow.com/a/23577289/622408.
    """
    from mako import lexer, codegen

    lexer = lexer.Lexer(template)
    node = lexer.parse()
    # ^ The node is the root element for the parse tree.
    # The tree contains all the data from a template
    # needed for the code generation process

    # Dummy compiler. _Identifiers class requires one
    # but only interested in the reserved_names field
    compiler = lambda: None
    compiler.reserved_names = set()

    identifiers = codegen._Identifiers(compiler, node)
    return identifiers.undeclared
Beispiel #12
0
 def process_file(self, fileobj):
     template_node = lexer.Lexer(
         fileobj.read(), input_encoding=self.config["encoding"]).parse()
     for extracted in self.extract_nodes(template_node.get_children()):
         yield extracted
"""

from mako.template import Template


bar = Template('Basic template with ${name}.')

print('call: bar.render(name=\'...\')')
print(bar.render(name='testing epta'))

#################################################
# how to find identifiers in bar?

from mako import lexer, codegen

lexer = lexer.Lexer(bar.source)
node = lexer.parse()

compiler = lambda: None
compiler.reserved_names = set()

identifiers = codegen._Identifiers(compiler, node)

# All template variables can be found found using this
# object but you are probably interested in the
# undeclared variables:

print('\nIdentifiers in bar template:')
print(identifiers.undeclared)