def main(): '''if len(sys.argv) == 1: print(f"Usage; {sys.argv[0]} [-t | <file_name>]") return string = None if sys.argv[1] == "-t": string = sys.stdin.read() else: with open(sys.argv[1], "r") as f: string = f.read() if string is None: print("Unknown Error, try again") return''' #string = sys.stdin.read() string =""" int main(){ int x; x = 5; print(0, x, false); return 0; } """ lexer = MyLexer() token_list = [] for token in lexer.tokenize(string): token.index = find_column(string, token) token_list.append(token) parser = MyParser(token_list) out = parser.parse(iter(token_list)) if out is None: print(False) else: print(out[1])
def toJson(data): lexer = MyLexer() parser = MyParser() try: result = parser.parse(lexer.tokenize(data)) new_result = normalize(result) json_str = json.dumps(new_result, indent=4, ensure_ascii=False) json_str = json_str.replace('"\\\"', '"').replace('\\""', '"') return json_str except TypeError: return 'Syntax error!' except EOFError: return 'EOF error!'
#!/usr/bin/env python from MyParser import MyParser if __name__ == '__main__': parser = MyParser("Books.xml") parser.parse() parser.print_books()
def parse(token_list: list) -> tuple: parser = MyParser(token_list) return parser.parse(iter(token_list))
def test_PathToFileIsSetAndFoundFileThatIsNotEmpty(self): parser = MyParser("Books.xml") parser.parse() self.assertNotEqual(0, len(parser._books_list))
} """ # 词法分析器获得输入 lexer.input(source_str) # 标记化 # for tok in lexer: # print(tok) # 语法分析 # 构建语法分析器 parser = MyParser("AST") # 语法分析器分析输入 root_node = parser.parse(source_str, lexer=lexer) # 语义分析器构建符号表和错误检查 my_semantic_analyzer = MySemanticAnalyzer() my_semantic_analyzer.build_semantic_analyzer(root_node) if not my_semantic_analyzer.error: # 代码生成初始化 build_code_generator(root_node) print(emit_util.result) # 打印语法树 # root_node.print() # 打印作用域和符号表信息 # print_scope()