def extract_vue(fileobj, keywords, comment_tags, options):
    """Extract messages from Vue template files.

    :param fileobj: the file-like the messages should be extracted from
    :param keywords: a list of keywords (i.e. function names) that should be recognize as translation functions
    :param comment_tags: a list of translator tags to search for and include in the results
    :param options: a dictionary of additional options (optional)
    :return: an iterator over ``(lineno, funcname, message, comments)``
    :rtype: ``iterator``
    """
    encoding = options.get('encoding', 'utf-8')
    contents = fileobj.read().decode(encoding=encoding)
    lexer = Lexer(contents, None)
    for t in lexer.tokenize():  # type: Token
        if t.token_type in TOKENS:
            for i in extract_javascript(
                    BytesIO(t.contents.encode(encoding=encoding)), keywords,
                    comment_tags, options):
                if i:
                    yield (t.lineno, i[1], i[2], i[3])
    # Inline JS part
    matchjs = re.search(r'<script>(.+)</script>', contents, re.DOTALL)
    if not matchjs:
        return
    jscontent = matchjs.group(1)
    skipped_line_count = contents[:matchjs.start(1)].count('\n')
    for i in extract_javascript(BytesIO(jscontent.encode(encoding=encoding)),
                                keywords, comment_tags, options):
        yield (skipped_line_count + i[0], i[1], i[2], i[3])
Beispiel #2
0
 def test_colon_attr(self):
     content = "<div :html='foo'>"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(token_type=0, contents="<div "),
             Token(TOKEN_DIRECTIVE, "foo")
         ])
     content = '<div :html="bar">'
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(token_type=0, contents="<div "),
             Token(TOKEN_DIRECTIVE, "bar")
         ])
Beispiel #3
0
def extract_vue(fileobj, keywords, comment_tags, options):
    """Extract messages from Vue template files.

    :param fileobj: the file-like the messages should be extracted from
    :param keywords: a list of keywords (i.e. function names) that should be recognize as translation functions
    :param comment_tags: a list of translator tags to search for and include in the results
    :param options: a dictionary of additional options (optional)
    :return: an iterator over ``(lineno, funcname, message, comments)``
    :rtype: ``iterator``
    """
    contents = fileobj.read()
    u = lambda s: s if isinstance(s, six.text_type) else s.decode(
        encoding=options.get('encoding', 'utf-8'))
    contents = u(contents)
    lexer = Lexer(contents, None)
    for t in lexer.tokenize():  # type: Token
        if t.token_type in TOKENS:
            try:
                tree = ast.parse(t.contents).body[0]
            except (SyntaxError, IndexError, AttributeError):
                pass
            else:
                for node in ast.walk(tree):  # type: ast.Call
                    if (isinstance(node, ast.Call)
                            and isinstance(node.func, ast.Name)):
                        func_name = node.func.id
                        args = node.args
                        messages = None
                        if func_name == "gettext":
                            if len(args) != 1:
                                raise TypeError(
                                    "Error at line %s: Function gettext()"
                                    " requires exactly one argument" %
                                    t.lineno)
                            if isinstance(
                                    args[0], ast.Str
                            ):  # ignore other argument types such as variables
                                messages = u(args[0].s)
                        elif func_name == "ngettext":
                            if len(node.args) != 3:
                                raise TypeError(
                                    "Error at line %s: Function ngettext()"
                                    " requires exactly 3 arguments" % t.lineno)
                            if isinstance(args[0], ast.Str) and isinstance(
                                    args[1], ast.Str):
                                messages = u(args[0].s), u(args[1].s)

                        if messages:
                            yield t.lineno, func_name, messages, []
Beispiel #4
0
 def test_var(self):
     content = "<div>{{ bar }}</div>"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_TEXT, '<div>'),
             Token(TOKEN_VAR, "bar"),
             Token(TOKEN_TEXT, '</div>')
         ])
def extract_vue(fileobj, keywords, comment_tags, options):
    """Extract messages from Vue template files.

    :param fileobj: the file-like the messages should be extracted from
    :param keywords: a list of keywords (i.e. function names) that should be recognize as translation functions
    :param comment_tags: a list of translator tags to search for and include in the results
    :param options: a dictionary of additional options (optional)
    :return: an iterator over ``(lineno, funcname, message, comments)``
    :rtype: ``iterator``
    """
    contents = fileobj.read()
    u = lambda s: s if isinstance(s, six.text_type) else s.decode(encoding=options.get('encoding', 'utf-8'))
    contents = u(contents)
    lexer = Lexer(contents, None)
    for t in lexer.tokenize():  # type: Token
        if t.token_type in TOKENS:
            try:
                tree = ast.parse(t.contents).body[0]
            except (SyntaxError, IndexError, AttributeError):
                pass
            else:
                for node in ast.walk(tree):  # type: ast.Call
                    if (isinstance(node, ast.Call)
                            and isinstance(node.func, ast.Name)):
                        func_name = node.func.id
                        args = node.args
                        messages = None
                        if func_name == "gettext":
                            if len(args) != 1:
                                raise TypeError("Error at line %s: Function gettext()"
                                                " requires exactly one argument" % t.lineno)
                            if isinstance(args[0], ast.Str):  # ignore other argument types such as variables
                                messages = u(args[0].s)
                        elif func_name == "ngettext":
                            if len(node.args) != 3:
                                raise TypeError("Error at line %s: Function ngettext()"
                                                " requires exactly 3 arguments" % t.lineno)
                            if isinstance(args[0], ast.Str) and isinstance(args[1], ast.Str):
                                messages = u(args[0].s), u(args[1].s)

                        if messages:
                            yield t.lineno, func_name, messages, []
def extract_vue(fileobj, keywords, comment_tags, options):
    """Extract messages from Vue template files.

    :param fileobj: the file-like the messages should be extracted from
    :param keywords: a list of keywords (i.e. function names) that should be recognize as translation functions
    :param comment_tags: a list of translator tags to search for and include in the results
    :param options: a dictionary of additional options (optional)
    :return: an iterator over ``(lineno, funcname, message, comments)``
    :rtype: ``iterator``
    """
    encoding = options.get('encoding', 'utf-8')
    contents = fileobj.read().decode(encoding=encoding)
    lexer = Lexer(contents, None)
    for t in lexer.tokenize():  # type: Token
        if t.token_type in TOKENS:
            for i in extract_javascript(
                    BytesIO(t.contents.encode(encoding=encoding)), keywords,
                    comment_tags, options):
                if i:
                    yield (t.lineno, i[1], i[2], i[3])
Beispiel #7
0
    def test_combine(self):
        content = "<div>{{ gettext('Hello') }}</div>" \
                  "<div>{{* gettext('Hello') }}</div>" \
                  "<div><!-- Hello -->" \
                  "<span>{{{ Blablabla }}}</span></div>"

        self.assertTokensEqual(
            Lexer(content).tokenize(), [
                Token(token_type=0, contents="<div>"),
                Token(token_type=1, contents="gettext('Hello')"),
                Token(token_type=0, contents="</div><div>"),
                Token(token_type=2, contents="gettext('Hello')"),
                Token(token_type=0, contents="</div><div>"),
                Token(token_type=3, contents="Hello"),
                Token(token_type=0, contents="<span>"),
                Token(token_type=4, contents="Blablabla"),
                Token(token_type=0, contents="</span></div>")
            ])
Beispiel #8
0
 def test_double_way_binding(self):
     content = "{{@ foo }}"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_DOUBLE_WAY_BINDING, "foo")
         ])
Beispiel #9
0
 def test_const(self):
     content = "{{* bar }}"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_CONST, "bar")
         ])
Beispiel #10
0
 def test_raw_html(self):
     content = "{{{ <div></div> }}}"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_RAW_HTML, "<div></div>")
         ])
Beispiel #11
0
 def test_comments(self):
     content = "<!-- comments -->"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_COMMENT, "comments")
         ])
Beispiel #12
0
 def test_text(self):
     content = "<div></div>"
     self.assertTokensEqual(
         Lexer(content).tokenize(), [
             Token(TOKEN_TEXT, '<div></div>')
         ])