Esempio n. 1
0
    def test_expression(self):
        """expression"""
        tokens = preprocessor.Lexer("foo(a,b) * 124 + 'c'").tokenize()
        self.assertTrue(len(tokens) == 10)
        self.assertTrue(isinstance(tokens[0], preprocessor.Identifier))
        self.assertTrue(isinstance(tokens[1], preprocessor.Punctuator))
        self.assertTrue(isinstance(tokens[2], preprocessor.Identifier))
        self.assertTrue(isinstance(tokens[3], preprocessor.Punctuator))
        self.assertTrue(isinstance(tokens[4], preprocessor.Identifier))
        self.assertTrue(isinstance(tokens[5], preprocessor.Punctuator))
        self.assertTrue(isinstance(tokens[6], preprocessor.Operator))
        self.assertTrue(isinstance(tokens[7], preprocessor.NumericalConstant))
        self.assertTrue(isinstance(tokens[8], preprocessor.Operator))
        self.assertTrue(isinstance(tokens[9], preprocessor.CharacterConstant))

        tokens = preprocessor.Lexer(
            "a > b ? \"true_string\" : \"false_string\"").tokenize()
        self.assertTrue(len(tokens) == 7)
        self.assertTrue(isinstance(tokens[0], preprocessor.Identifier))
        self.assertTrue(isinstance(tokens[1], preprocessor.Operator))
        self.assertTrue(isinstance(tokens[2], preprocessor.Identifier))
        self.assertTrue(isinstance(tokens[3], preprocessor.Operator))
        self.assertTrue(isinstance(tokens[4], preprocessor.StringConstant))
        self.assertTrue(isinstance(tokens[5], preprocessor.Operator))
        self.assertTrue(isinstance(tokens[6], preprocessor.StringConstant))
 def test_cat(self):
     test_str = "CATTEST=first ## 2"
     macro = preprocessor.macro_from_definition_string(test_str)
     tokens = preprocessor.Lexer("CATTEST").tokenize()
     p = platform.Platform("Test", self.rootdir)
     p._definitions = {macro.name: macro}
     expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
     expected_tokens = preprocessor.Lexer("first2").tokenize()
     self.assertEqual([x.token for x in expanded_tokens],
                      [x.token for x in expected_tokens])
 def test_stringify_quote(self):
     test_str = "STR(x)= #x"
     macro = preprocessor.macro_from_definition_string(test_str)
     tokens = preprocessor.Lexer("STR(foo(\"4 + 5\"))").tokenize()
     p = platform.Platform("Test", self.rootdir)
     p._definitions = {macro.name: macro}
     expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
     expected_tokens = preprocessor.Lexer(
         "\"foo(\\\"4 + 5\\\")\"").tokenize()
     self.assertEqual([x.token for x in expanded_tokens],
                      [x.token for x in expected_tokens])
    def test_self_reference_macros_3(self):
        """Self referencing macros test 3"""

        def_string = 'foo(x)=bar x'
        macro = preprocessor.macro_from_definition_string(def_string)
        tokens = preprocessor.Lexer("foo(foo) (2)").tokenize()
        p = platform.Platform("Test", self.rootdir)
        p._definitions = {macro.name: macro}
        expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
        expected_tokens = preprocessor.Lexer("bar foo (2)").tokenize()
        self.assertEqual([(x.prev_white, x.token) for x in expanded_tokens],
                         [(x.prev_white, x.token) for x in expected_tokens])
 def test_stringify_ws(self):
     test_str = "STR(x)= TEST #x"
     macro = preprocessor.macro_from_definition_string(test_str)
     to_expand_str = r'STR(L      + 2-2 "\" \n")'
     tokens = preprocessor.Lexer(to_expand_str).tokenize()
     p = platform.Platform("Test", self.rootdir)
     p._definitions = {macro.name: macro}
     expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
     expected_str = r'TEST "L + 2-2 \"\\\" \\n\""'
     expected_tokens = preprocessor.Lexer(expected_str).tokenize()
     self.assertEqual([x.token for x in expanded_tokens],
                      [x.token for x in expected_tokens])
 def test_unsupported(self):
     """unsupported"""
     for directive in ["#line", "#warning", "#error"]:
         tokens = preprocessor.Lexer(directive).tokenize()
         node = preprocessor.DirectiveParser(tokens).parse()
         self.assertTrue(
             isinstance(node, preprocessor.UnrecognizedDirectiveNode))
Esempio n. 7
0
    def test_variadic(self):
        """variadic macros"""

        expected_expansion = [
            preprocessor.Identifier("Unknown", 0, False, "fprintf"),
            preprocessor.Punctuator("Unknown", 0, False, "("),
            preprocessor.Identifier("Unknown", 0, False, "stderr"),
            preprocessor.Punctuator("Unknown", 0, False, ","),
            preprocessor.StringConstant("Unknown", 0, True, "%d, %f, %e"),
            preprocessor.Punctuator("Unknown", 0, False, ","),
            preprocessor.Identifier("Unknown", 0, True, "a"),
            preprocessor.Punctuator("Unknown", 0, False, ","),
            preprocessor.Identifier("Unknown", 0, True, "b"),
            preprocessor.Punctuator("Unknown", 0, False, ","),
            preprocessor.Identifier("Unknown", 0, True, "c"),
            preprocessor.Punctuator("Unknown", 0, False, ")")
        ]

        for def_string in [
                "eprintf(...)=fprintf(stderr, __VA_ARGS__)",
                "eprintf(args...)=fprintf(stderr, args)"
        ]:
            macro = preprocessor.Macro.from_definition_string(def_string)
            tokens = preprocessor.Lexer(
                "eprintf(\"%d, %f, %e\", a, b, c)").tokenize()
            p = platform.Platform("Test", self.rootdir)
            p._definitions = {macro.name: macro}
            expanded_tokens = preprocessor.MacroExpander(tokens).expand(p)
            self.assertTrue(len(expanded_tokens) == len(expected_expansion))
            for i in range(len(expected_expansion)):
                self.assertEqual(expanded_tokens[i].prev_white,
                                 expected_expansion[i].prev_white)
                self.assertEqual(expanded_tokens[i].token,
                                 expected_expansion[i].token)
Esempio n. 8
0
    def test_self_reference_macros_1(self):
        """Self referencing macros test 1"""

        expected_expansion = [
            preprocessor.Punctuator('Unknown', 4, False, '('),
            preprocessor.NumericalConstant('Unknown', 5, False, '4'),
            preprocessor.Operator('Unknown', 7, True, '+'),
            preprocessor.Identifier('Unknown', 9, True, 'FOO'),
            preprocessor.Punctuator('Unknown', 12, False, ')')
        ]

        def_string = 'FOO=(4 + FOO)'
        macro = preprocessor.Macro.from_definition_string(def_string)
        tokens = preprocessor.Lexer("FOO").tokenize()
        p = platform.Platform("Test", self.rootdir)
        p._definitions = {macro.name: macro}
        expanded_tokens = preprocessor.MacroExpander(tokens).expand(p)
        self.assertTrue(len(expanded_tokens) == len(expected_expansion))
        for i in range(len(expected_expansion)):
            self.assertEqual(expanded_tokens[i].line,
                             expected_expansion[i].line)
            self.assertEqual(expanded_tokens[i].col, expected_expansion[i].col)
            self.assertEqual(expanded_tokens[i].prev_white,
                             expected_expansion[i].prev_white)
            self.assertEqual(expanded_tokens[i].token,
                             expected_expansion[i].token)
 def test_paths(self):
     input_str = r'FUNCTION(looks/2like/a/path/with_/bad%%identifiers)'
     tokens = preprocessor.Lexer(input_str).tokenize()
     p = platform.Platform("Test", self.rootdir)
     macro = preprocessor.macro_from_definition_string("FUNCTION(x)=#x")
     p._definitions = {macro.name: macro}
     exp = preprocessor.MacroExpander(p).expand(tokens)
 def test_ifdef(self):
     """ifdef"""
     tokens = preprocessor.Lexer("#ifdef FOO").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.IfNode))
     self.assertTrue(len(node.tokens) == 4)
     self.assertTrue(isinstance(node.tokens[0], preprocessor.Identifier))
     self.assertTrue(node.tokens[0].token == "defined")
    def test_stringify_nested(self):
        mac_xstr = preprocessor.macro_from_definition_string("xstr(s)=str(s)")
        mac_str = preprocessor.macro_from_definition_string("str(s)=#s")
        mac_def = preprocessor.macro_from_definition_string("foo=4")
        p = platform.Platform("Test", self.rootdir)
        p._definitions = {x.name: x for x in [mac_xstr, mac_str, mac_def]}

        tokens = preprocessor.Lexer("str(foo)").tokenize()
        expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
        expected_tokens = preprocessor.Lexer("\"foo\"").tokenize()
        self.assertEqual([x.token for x in expanded_tokens],
                         [x.token for x in expected_tokens])

        tokens = preprocessor.Lexer("xstr(foo)").tokenize()
        expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
        expected_tokens = preprocessor.Lexer("\"4\"").tokenize()
        self.assertEqual([x.token for x in expanded_tokens],
                         [x.token for x in expected_tokens])
Esempio n. 12
0
 def test_operator(self):
     """operators"""
     operators = ["||", "&&", ">>", "<<", "!=", ">=", "<=", "==", "##"] + \
         ["-", "+", "!", "*", "/", "|", "&", "^", "<", ">", "?", ":", "~", "#", "=", "%"]
     for op in operators:
         tokens = preprocessor.Lexer(op).tokenize()
         self.assertTrue(len(tokens) == 1)
         self.assertTrue(isinstance(tokens[0], preprocessor.Operator))
         self.assertTrue(str(tokens[0].token) == op)
Esempio n. 13
0
 def test_numerical(self):
     """numbers"""
     numbers = [
         "123", "123ul", "123.4", "123.4e+05", ".123", "0xFF", "0b10"
     ]
     for number in numbers:
         tokens = preprocessor.Lexer(number).tokenize()
         self.assertTrue(len(tokens) == 1)
         self.assertTrue(
             isinstance(tokens[0], preprocessor.NumericalConstant))
Esempio n. 14
0
 def test_puncuator(self):
     """punctuators"""
     punctuators = [
         "(", ")", "{", "}", "[", "]", ",", ".", ";", "'", "\"", "\\"
     ]
     for punc in punctuators:
         tokens = preprocessor.Lexer(punc).tokenize()
         self.assertTrue(len(tokens) == 1)
         self.assertTrue(isinstance(tokens[0], preprocessor.Punctuator))
         self.assertTrue(str(tokens[0].token) == punc)
    def test_include(self):
        """include"""
        tokens = preprocessor.Lexer(
            "#include <path/to/system/header>").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.IncludeNode))
        self.assertTrue(isinstance(node.value, preprocessor.IncludePath))
        self.assertTrue(node.value.system)

        tokens = preprocessor.Lexer(
            "#include \"path/to/local/header\"").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.IncludeNode))
        self.assertTrue(isinstance(node.value, preprocessor.IncludePath))
        self.assertTrue(not node.value.system)

        tokens = preprocessor.Lexer("#include COMPUTED_INCLUDE").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.IncludeNode))
        self.assertTrue(len(node.value) == 1)
        self.assertTrue(isinstance(node.value[0], preprocessor.Identifier))
    def test_define(self):
        """define"""
        tokens = preprocessor.Lexer("#define FOO").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "FOO")
        self.assertTrue(node.args is None)
        self.assertTrue(node.value == [])

        tokens = preprocessor.Lexer("#define FOO string").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "FOO")
        self.assertTrue(node.args is None)
        self.assertTrue(len(node.value) == 1)
        self.assertTrue(isinstance(node.value[0], preprocessor.Identifier))

        tokens = preprocessor.Lexer("#define FOO (a, b)").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "FOO")
        self.assertTrue(node.args is None)
        self.assertTrue(len(node.value) == 5)
        self.assertTrue(isinstance(node.value[0], preprocessor.Punctuator))
        self.assertTrue(isinstance(node.value[1], preprocessor.Identifier))
        self.assertTrue(isinstance(node.value[2], preprocessor.Punctuator))
        self.assertTrue(isinstance(node.value[3], preprocessor.Identifier))
        self.assertTrue(isinstance(node.value[4], preprocessor.Punctuator))

        tokens = preprocessor.Lexer("#define FOO(a, b)").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "FOO")
        self.assertTrue(len(node.args) == 2)
        self.assertTrue(node.value == [])

        tokens = preprocessor.Lexer("#define eprintf(...)").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "eprintf")
        self.assertTrue(len(node.args) == 1)
        self.assertTrue(node.args[0].token == "...")

        tokens = preprocessor.Lexer("#define eprintf(args...)").tokenize()
        node = preprocessor.DirectiveParser(tokens).parse()
        self.assertTrue(isinstance(node, preprocessor.DefineNode))
        self.assertTrue(str(node.identifier) == "eprintf")
        self.assertTrue(len(node.args) == 1)
        self.assertTrue(node.args[0].token == "args...")
Esempio n. 17
0
    def test_self_reference_macros_2(self):
        """Self referencing macros test 2"""

        expected_expansion = [
            preprocessor.Identifier('Unknown', 4, False, 'FOO')
        ]

        def_string = 'FOO=FOO'
        macro = preprocessor.Macro.from_definition_string(def_string)
        tokens = preprocessor.Lexer("FOO").tokenize()
        p = platform.Platform("Test", self.rootdir)
        p._definitions = {macro.name: macro}
        expanded_tokens = preprocessor.MacroExpander(tokens).expand(p)
        self.assertTrue(len(expanded_tokens) == len(expected_expansion))
        for i in range(len(expected_expansion)):
            self.assertEqual(expanded_tokens[i].line,
                             expected_expansion[i].line)
            self.assertEqual(expanded_tokens[i].col, expected_expansion[i].col)
            self.assertEqual(expanded_tokens[i].prev_white,
                             expected_expansion[i].prev_white)
            self.assertEqual(expanded_tokens[i].token,
                             expected_expansion[i].token)
 def test_strings(self):
     expected_str = r'"L + 2-2 \"\\\" \\n\""'
     tokens = preprocessor.Lexer(expected_str).tokenize()
     expected = preprocessor.StringConstant('Unknown', 'Unknown', False,
                                            r'L + 2-2 \"\\\" \\n\"')
     self.assertEqual(tokens[0].token, expected.token)
 def test_else(self):
     """else"""
     tokens = preprocessor.Lexer("#else").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.ElseNode))
 def test_endif(self):
     """endif"""
     tokens = preprocessor.Lexer("#endif").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.EndIfNode))
Esempio n. 21
0
    def test_indirect_self_reference_macros(self):
        """ Indirect self referencing macros test"""

        x_expected_expansion = [
            preprocessor.Punctuator('Unknown', 2, False, '('),
            preprocessor.NumericalConstant('Unknown', 3, False, '4'),
            preprocessor.Operator('Unknown', 5, True, '+'),
            preprocessor.Punctuator('Unknown', 2, False, '('),
            preprocessor.NumericalConstant('Unknown', 3, False, '2'),
            preprocessor.Operator('Unknown', 5, True, '*'),
            preprocessor.Identifier('Unknown', 7, True, 'x'),
            preprocessor.Punctuator('Unknown', 8, False, ')'),
            preprocessor.Punctuator('Unknown', 8, False, ')')
        ]

        y_expected_expansion = [
            preprocessor.Punctuator('Unknown', 2, False, '('),
            preprocessor.NumericalConstant('Unknown', 3, False, '2'),
            preprocessor.Operator('Unknown', 5, True, '*'),
            preprocessor.Punctuator('Unknown', 2, False, '('),
            preprocessor.NumericalConstant('Unknown', 3, False, '4'),
            preprocessor.Operator('Unknown', 5, True, '+'),
            preprocessor.Identifier('Unknown', 7, True, 'y'),
            preprocessor.Punctuator('Unknown', 8, False, ')'),
            preprocessor.Punctuator('Unknown', 8, False, ')')
        ]

        x_string = 'x=(4 + y)'
        x_macro = preprocessor.Macro.from_definition_string(x_string)
        y_string = 'y=(2 * x)'
        y_macro = preprocessor.Macro.from_definition_string(y_string)

        x_tokens = preprocessor.Lexer("x").tokenize()
        y_tokens = preprocessor.Lexer("y").tokenize()

        p = platform.Platform("Test", self.rootdir)
        p._definitions = {x_macro.name: x_macro, y_macro.name: y_macro}

        x_expanded_tokens = preprocessor.MacroExpander(x_tokens).expand(p)

        y_expanded_tokens = preprocessor.MacroExpander(y_tokens).expand(p)

        self.assertTrue(len(x_expanded_tokens) == len(x_expected_expansion))
        for i in range(len(x_expected_expansion)):
            self.assertEqual(x_expanded_tokens[i].line,
                             x_expected_expansion[i].line)
            self.assertEqual(x_expanded_tokens[i].col,
                             x_expected_expansion[i].col)
            self.assertEqual(x_expanded_tokens[i].prev_white,
                             x_expected_expansion[i].prev_white)
            self.assertEqual(x_expanded_tokens[i].token,
                             x_expected_expansion[i].token)

        self.assertTrue(len(y_expanded_tokens) == len(y_expected_expansion))
        for i in range(len(y_expected_expansion)):
            self.assertEqual(y_expanded_tokens[i].line,
                             y_expected_expansion[i].line)
            self.assertEqual(y_expanded_tokens[i].col,
                             y_expected_expansion[i].col)
            self.assertEqual(y_expanded_tokens[i].prev_white,
                             y_expected_expansion[i].prev_white)
            self.assertEqual(y_expanded_tokens[i].token,
                             y_expected_expansion[i].token)
 def test_pragma(self):
     """pragma"""
     tokens = preprocessor.Lexer("#pragma anything").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.PragmaNode))
 def test_undef(self):
     """undef"""
     tokens = preprocessor.Lexer("#undef FOO").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.UndefNode))
 def test_if(self):
     """if"""
     tokens = preprocessor.Lexer("#if FOO == BAR").tokenize()
     node = preprocessor.DirectiveParser(tokens).parse()
     self.assertTrue(isinstance(node, preprocessor.IfNode))
     self.assertTrue(len(node.tokens) == 3)
Esempio n. 25
0
 def test_character(self):
     """characters"""
     tokens = preprocessor.Lexer("'c'").tokenize()
     self.assertTrue(len(tokens) == 1)
     self.assertTrue(isinstance(tokens[0], preprocessor.CharacterConstant))
 def test_power(self):
     """integer power"""
     with self.assertRaises(preprocessor.ParseError):
         tokens = preprocessor.Lexer("* 10").tokenize()
         preprocessor.ExpressionEvaluator(tokens).evaluate()
 def test_overflow(self):
     """integer overflow"""
     with self.assertRaises(OverflowError):
         tokens = preprocessor.Lexer(
             "0xFFFFFFFFFFFFFFFF * 0xFFFFFFFFFFFFFFFF").tokenize()
         preprocessor.ExpressionEvaluator(tokens).evaluate()
 def test_oversized_constant(self):
     """oversized constant"""
     with self.assertRaises(OverflowError):
         tokens = preprocessor.Lexer(
             "10000000000000000000000000000000000000").tokenize()
         preprocessor.ExpressionEvaluator(tokens).evaluate()
Esempio n. 29
0
 def test_string(self):
     """strings"""
     tokens = preprocessor.Lexer("\"this is a string constant\"").tokenize()
     self.assertTrue(len(tokens) == 1)
     self.assertTrue(isinstance(tokens[0], preprocessor.StringConstant))
Esempio n. 30
0
 def test_identifier(self):
     """identifiers"""
     tokens = preprocessor.Lexer("this is a string of words").tokenize()
     self.assertTrue(len(tokens) == 6)
     self.assertTrue(
         all([isinstance(t, preprocessor.Identifier) for t in tokens]))