def setconfig(self, config): Parser.setconfig(self, config) self.intr = anidb.AniDBInterface(user=self.config['aPuser'],password=config['aPpassword'], dburl=self.config['aPdburl'], session=self.config['aPsession']) self.amask = config['aPdbamask'] self.fmask = self.config['aPdbfmask'] self.folder = self.config['aPfolder'] self.moves=[] self.intr.auth(self.intr.user,self.intr.password)
def main(): if len(argv) != 2: error(f'引数の個数が正しくありません {argv}') c_code = argv[1] tokenizer = Tokenizer(c_code) tcontext = tokenizer.tokenize() parser = Parser(tcontext) ncontext = parser.parse() generator = Generator(ncontext) assembly = generator.generate() for x in assembly: print(x)
def do_POST(self): try: self.send_header("Access-Control-Allow-Origin", "*") if self.path == "/graphs": contentLength = int(self.headers.get("Content-Length")) body = self.rfile.read(contentLength) newData = Parser(body).parse() self.send_response(200) self.send_header("Content-Type", "application/json; charset=utf-8") self.end_headers() self.wfile.write(newData.encode()) else: self.send_response(404) self.wfile.write("\nUnknown service!".encode()) except Exception as e: self.send_response(500) self.wfile.write("\n" + str(e).encode())
self.text += 'j compare_str_char\n' self.text += 'finish_compare_str:\n' offset = self.var_offset[self.current_function.name][node.result] self.text += f'sw $a0, {offset}($sp)\n' # store comparison result in local if __name__ == '__main__': import sys from cparser import Parser from lexer import Lexer from semantic_analyzer import SemanticAnalyzer from cool_to_cil import COOLToCILVisitor lexer = Lexer() parser = Parser() if len(sys.argv) > 1: input_file = sys.argv[1] with open(input_file, encoding="utf-8") as file: cool_program_code = file.read() lexer.input(cool_program_code) for token in lexer: pass if lexer.errors: print(lexer.errors[0]) exit(1)
builder.visit(self.ast) #'=============== CHECKING TYPES ================' scope = None if not self.errors: checker = TypeChecker(context, self.errors) scope = checker.visit(self.ast) return context, scope if __name__ == '__main__': import sys from cparser import Parser parser = Parser() if len(sys.argv) > 1: input_file = sys.argv[1] with open(input_file, encoding="utf-8") as file: cool_program_code = file.read() parse_result = parser.parse(cool_program_code) if parser.errors: print(parser.errors[0]) exit(1) semantic_analyzer = SemanticAnalyzer(parse_result) semantic_analyzer.analyze()
from cparser import Parser import sys from symboltable import SymbolTable import argparse # for more information goto https://docs.python.org/2/library/argparse.html parser = argparse.ArgumentParser(description='Compiler for cs660.') parser.add_argument("source",nargs='?',type=str,help="Specifies the input source file.") parser.add_argument("-p",nargs=1,default=" ",type=str,dest="parselogfile",metavar='Parse Log Output', help="Specifies the parse log output file for production shifts and reduces.") parser.add_argument("-t",nargs=1,default="tokenfile.txt",type=str,dest="tokenfile", metavar='Token Log Output', help="The token log output file specifier.") parser.add_argument("-v",default="Version 1.0.0",type=str,metavar='Version information.') args = parser.parse_args() if args.source != None: #print args.source # Read the file from command line input_file = open(args.source, 'r') data = input_file.read() input_file.close() # Build and Call the scanner if type(args.tokenfile) != str: args.tokenfile = args.tokenfile[0] scan = Parser(data,args.parselogfile != " ",args.parselogfile[0],args.tokenfile) st = SymbolTable() scan.set_symbol_table(st) #scan.scan(data) scan.run()
def compile(args): print("Compiling", args.source_file) SymbolTableManager.init() MemoryManager.init() parser = Parser(args.source_file) start = time.time() parser.parse() stop = time.time() - start print(f"Compilation took {stop:.6f} s") if not SymbolTableManager.error_flag: print("Compilation successful!") else: print("Compilation failed due to the following errors:\n") print(parser.scanner.lexical_errors) print(parser.syntax_errors) print(parser.semantic_analyzer.semantic_errors) if args.abstract_syntax_tree: parser.save_parse_tree() if args.symbol_table: parser.scanner.save_symbol_table() if args.tokens: parser.scanner.save_tokens() if args.error_files: parser.save_syntax_errors() parser.scanner.save_lexical_errors() parser.semantic_analyzer.save_semantic_errors() parser.code_generator.save_output() if args.run and not SymbolTableManager.error_flag: print("Executing compiled program") if os.name == "nt": tester_file = os.path.join(script_dir, "interpreter", "tester_Windows.exe") elif os.name == "posix": tester_file = os.path.join(script_dir, "interpreter", "tester_Linux.out") else: tester_file = os.path.join(script_dir, "interpreter", "tester_Mac.out") output_file = os.path.join(script_dir, "output", "output.txt") output_dir = os.path.dirname(output_file) if os.path.exists(output_file): preexec_fn = limit_virtual_memory if os.name != "nt" else None stderr = sp.PIPE if not args.verbose else None start = time.time() try: tester_output = sp.check_output( tester_file, cwd=output_dir, stderr=stderr, timeout=10, preexec_fn=preexec_fn).decode("utf-8") except sp.TimeoutExpired: print("RuntimeError: Execution timed out!") else: if not args.verbose: tester_output = "\n".join([ line.replace("PRINT", "").strip() for line in tester_output.splitlines() if line.startswith("PRINT") ]) stop = time.time() - start print(f"Execution took {stop:.6f} s") print("Program output:") print(tester_output)
def __init__(self): Parser.__init__(self) self.miscLoc = "Misc" self.moves = []