Exemple #1
0
 def test_string(self):
     src = r'"hello" "world!" "new\nline" "qu\"ote"'
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(STRING, '"hello"', 0))
     self.assertEqual(lexer.tokens[1], Token(STRING, '"world!"', 8))
     self.assertEqual(lexer.tokens[2], Token(STRING, r'"new\nline"', 17))
     self.assertEqual(lexer.tokens[3], Token(STRING, r'"qu\"ote"', 29))
Exemple #2
0
 def test_string(self):
     src = r'"hello" "world!" "new\nline" "qu\"ote"'
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(STRING, '"hello"', 0))
     self.assertEqual(lexer.tokens[1], Token(STRING, '"world!"', 8))
     self.assertEqual(lexer.tokens[2], Token(STRING, r'"new\nline"', 17))
     self.assertEqual(lexer.tokens[3], Token(STRING, r'"qu\"ote"', 29))
Exemple #3
0
 def test_multi_line_comment(self):
     src = "/* hello\nworld *//* good bye\nworld */"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0],
                      Token(COMMENT, '/* hello\nworld */', 0))
     self.assertEqual(lexer.tokens[1],
                      Token(COMMENT, '/* good bye\nworld */', 17))
Exemple #4
0
 def test_property_named_property(self):
     src = "Item { property var property }"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(ELEMENT, 'Item', 0))
     self.assertEqual(lexer.tokens[1], Token(BLOCK_START, '{', 5))
     self.assertEqual(lexer.tokens[2], Token(KEYWORD, 'property', 7))
     self.assertEqual(lexer.tokens[3], Token(ELEMENT, 'var', 16))
     self.assertEqual(lexer.tokens[4], Token(ELEMENT, 'property', 20))
Exemple #5
0
 def test_property_named_property(self):
     src = "Item { property var property }"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(COMPONENT, 'Item', 0))
     self.assertEqual(lexer.tokens[1], Token(BLOCK_START, '{', 5))
     self.assertEqual(lexer.tokens[2], Token(KEYWORD, 'property', 7))
     self.assertEqual(lexer.tokens[3], Token(ELEMENT, 'var', 16))
     self.assertEqual(lexer.tokens[4], Token(ELEMENT, 'property', 20))
    def test(self):
        src = "Item { function foo() {} function bar() {} }"
        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        self.assertEqual(qmlclass.base_name, "Item")

        functions = qmlclass.get_functions()
        self.assertEqual(len(functions), 2)
        self.assertEqual(functions[0].name, "foo")
        self.assertEqual(functions[1].name, "bar")
Exemple #7
0
    def test_function_property(self):
        src = """Item {
            property var fnProperty: function (arg1, arg2) { return arg1 + arg2; }
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(properties[0].name, "fnProperty")
        self.assertEqual(properties[0].type, "var")
    def test_property_named_property(self):
        src = """Item {
            property var property
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(properties[0].name, "property")
        self.assertEqual(properties[0].type, "var")
Exemple #9
0
    def test_var_property(self):
        src = """Item {
            property var varProperty: { "key1": "value1", "key2": "value2" }
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(properties[0].name, "varProperty")
        self.assertEqual(properties[0].type, "var")
Exemple #10
0
    def test_normal_arguments(self):
        src = """Item {
                     function foo(arg1, arg2) {
                         return arg1 + arg2;
                     }
                 }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        functions = qmlclass.get_functions()
        self.assertEqual(functions[0].name, "foo")
        self.assertEqual(functions[0].type, "void")
Exemple #11
0
    def test_keyword_arguments(self):
        src = """Item {
                     function foo(propertyArgument, signalArgument) {
                         return propertyArgument + signalArgument;
                     }
                 }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        functions = qmlclass.get_functions()
        self.assertEqual(functions[0].name, "foo")
        self.assertEqual(functions[0].type, "void")
Exemple #12
0
def main(argv=None, out=None):
    if argv is None:
        argv = sys.argv[1:]
    if out is None:
        out == sys.stdout

    args = parse_args(argv)

    name = args.qml_file
    namespace = args.namespace
    text = open(name, encoding="utf-8").read()

    lexer = Lexer(text)
    try:
        lexer.tokenize()
    except LexerError as exc:
        logging.error("Failed to tokenize %s" % name)
        row, msg = info_for_error_at(text, exc.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if args.debug:
            raise
        else:
            return -1

    if args.debug:
        for token in lexer.tokens:
            print("%20s %s" % (token.type, token.value))

    classname, classversion = find_classname(name, namespace)
    if args.no_since_version:
        classversion = None

    qml_class = QmlClass(classname, classversion)

    try:
        qmlparser.parse(lexer.tokens, qml_class)
    except qmlparser.QmlParserError as exc:
        logging.error("Failed to parse %s" % name)
        row, msg = info_for_error_at(text, exc.token.idx)
        logging.error("Lexer error line %d: %s\n%s", row, exc, msg)
        if args.debug:
            raise
        else:
            return -1

    print(qml_class, file=out)

    return 0
    def test_multiline_string(self):
        src = """Item {
            prop1: "A string that spans \\
            multiple lines"
            /// prop2 doc
            property string prop2: "bar"
            }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(len(properties), 1)
        self.assertEqual(properties[0].name, "prop2")
        self.assertEqual(properties[0].type, "string")
        self.assertEqual(properties[0].doc, "/// prop2 doc")
    def test_signals(self):
        src = """Item {
                     signal userAdded(string username, int age)
                 }"""

        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        signals = qmlclass.get_signals()
        self.assertEqual(len(signals), 1)
        signal = signals[0]
        self.assertEqual(signal.name, "userAdded")

        self.assertEqual(len(signal.args), 2)
        self.assertEqual(signal.args[0].name, "username")
        self.assertEqual(signal.args[0].type, "string")
        self.assertEqual(signal.args[1].name, "age")
        self.assertEqual(signal.args[1].type, "int")
Exemple #15
0
    def test_readonly_property(self):
        src = """Item {
            /// v1 doc
            readonly property int v1
            /// v2 doc
            property int v2
            }"""
        lexer = Lexer(src)
        lexer.tokenize()
        qmlclass = QmlClass("Foo")
        qmlparser.parse(lexer.tokens, qmlclass)

        properties = qmlclass.get_properties()
        self.assertEqual(properties[0].name, "v1")
        self.assertEqual(properties[0].type, "int")
        self.assertEqual(properties[0].doc, "/// v1 doc")
        self.assertTrue(properties[0].is_readonly)

        self.assertEqual(properties[1].name, "v2")
        self.assertEqual(properties[1].type, "int")
        self.assertEqual(properties[1].doc, "/// v2 doc")
        self.assertFalse(properties[1].is_readonly)
Exemple #16
0
 def test_import(self):
     src = "import foo\n import bar"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(IMPORT, "import foo", 0))
     self.assertEqual(lexer.tokens[1], Token(IMPORT, "import bar", 12))
Exemple #17
0
 def test_pragma(self):
     src = "pragma foo\n pragma bar"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(PRAGMA, "pragma foo", 0))
     self.assertEqual(lexer.tokens[1], Token(PRAGMA, "pragma bar", 12))
Exemple #18
0
 def test_single_line_comment(self):
     src = "// hello\nimport bob"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(COMMENT, '// hello', 0))
     self.assertEqual(lexer.tokens[1], Token(IMPORT, 'import bob', 9))
Exemple #19
0
 def test_single_line_comment(self):
     src = "// hello\nimport bob"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(COMMENT, '// hello', 0))
     self.assertEqual(lexer.tokens[1], Token(IMPORT, 'import bob', 9))
Exemple #20
0
 def test_pragma(self):
     src = "pragma foo\n pragma bar"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(PRAGMA, "pragma foo", 0))
     self.assertEqual(lexer.tokens[1], Token(PRAGMA, "pragma bar", 12))
Exemple #21
0
 def test_import(self):
     src = "import foo\n import bar"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(IMPORT, "import foo", 0))
     self.assertEqual(lexer.tokens[1], Token(IMPORT, "import bar", 12))
Exemple #22
0
 def test_multi_line_comment(self):
     src = "/* hello\nworld *//* good bye\nworld */"
     lexer = Lexer(src)
     lexer.tokenize()
     self.assertEqual(lexer.tokens[0], Token(COMMENT, '/* hello\nworld */', 0))
     self.assertEqual(lexer.tokens[1], Token(COMMENT, '/* good bye\nworld */', 17))