def _load(data=None): if data is None: data = pickle.load(open(__path__ + 'bin', 'rb')) else: data = pickle.loads(data) lark_data, lexer_terminals = data return (Lark.load(BytesIO(lark_data)).parser, get_lexer_from_tokens([ TerminalDef.deserialize(token, {}) for token in lexer_terminals ]))
def create_parser ( self, rule : str = None, read_cache : bool = True, write_cache : bool = True ): with open( Path( __file__ ).parent / "grammar.lark", "r" ) as f: grammar_content = f.read() cache_path = self.get_cache_path( rule, grammar_content ) if read_cache or write_cache else None parser = None if read_cache and cache_path.exists(): with open( cache_path, "rb" ) as f: parser = Lark.load( f ) else: parser = Lark( grammar_content, start = rule or 'start', parser='lalr', debug=False, propagate_positions = True, maybe_placeholders = True ) if write_cache: with open( cache_path, "wb" ) as f: parser.save( f ) return parser
%import common.WS -> WHITESPACE %ignore WHITESPACE %ignore SINGLE_LINE_COMMENT %ignore MULTI_LINE_COMMENT """ keyWords = [ 'void', 'int', 'double', 'bool', 'string', 'class', 'interface', 'null', 'this', 'extends', 'implements', 'for', 'while', 'if', 'else', 'return', 'break', 'new', 'NewArray', 'Print', 'ReadInteger', 'ReadLine' ] # parser = Lark(grammar, parser='lalr', debug=False) # parser.save(open("parser.lark", "wb")) parser = Lark.load(open("parser.lark", "rb")) with open("Tests\input.d", "r") as input_file: code = input_file.read() x = parser.parse(code) parseTree = ParseTree(x) # s = 0 # for j in parseTree.nodes: # print(s, " ", j) # s += 1 sys.stdout = open("mips.s", "w") cgen(parseTree) sys.stdout.close()