Пример #1
0
    def test_include1(self):
        dir_path = os.path.dirname(os.path.realpath(__file__))
        file_path = os.path.join(
            dir_path, "examples/include/01_include_test_config.cpp")
        with open(file_path, 'r', encoding='utf-8', newline=None) as fp:
            input_data = fp.read()
        tokens = Lexer(input_data, file_path).tokenize()
        preprocessor = PreProcessor(tokens, file_path)
        preprocessor.preprocess()
        tokens = preprocessor.tokens
        output = generator.from_tokens(tokens)

        expected_output = """1_include_test_file1_line1
1_include_test_file1_line2
1_include_test_file1_line3

1_include_test_file2_line1
1_include_test_file2_line2
1_include_test_file2_line3
class Foo {};
1_include_test_file3_line1
1_include_test_file3_line2
1_include_test_file3_line3"""

        self.assertEqual(expected_output, output)
Пример #2
0
    def test_escaped_newlines(self):
        input_data = """\\
"""
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        preprocessor = PreProcessor(tokens, lexer.STRING_INPUT_FILE)
        preprocessor._remove_escaped_newlines()
        output = generator.from_tokens(preprocessor.tokens)
        expected_output = ""
        self.assertEqual(expected_output, output)
Пример #3
0
 def _test_preprocessor(self, input_data, expected_output):
     tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
     preprocessor = PreProcessor(tokens, lexer.STRING_INPUT_FILE)
     preprocessor.preprocess()
     output = generator.from_tokens(preprocessor.tokens)
     self.assertEqual(expected_output, output)
Пример #4
0
 def _test_generator_tokens(self, input_data, expected_output):
     tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
     output = generator.from_tokens(tokens)
     self.assertEqual(expected_output, output)