def parse_name_list(self, name=None): if name: var_list = [name] else: var_list = [ ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data) ] while self.lex.look_ahead().kind == lexer.TokenKind.SEP_COMMA: self.lex.next_token() var_list.append( ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data)) return var_list
def parse_simple_exp(self): look_token = self.lex.look_ahead() if look_token.kind == lexer.TokenKind.KW_NIL: self.lex.next_token() return ast.NilExp() elif look_token.kind == lexer.TokenKind.KW_FALSE: self.lex.next_token() return ast.BoolConstExp(False) elif look_token.kind == lexer.TokenKind.KW_TRUE: self.lex.next_token() return ast.BoolConstExp(True) elif look_token.kind == lexer.TokenKind.NUMBER: return self.parse_number_exp() elif look_token.kind == lexer.TokenKind.STRING: self.lex.next_token() return ast.StringExp(look_token.data) elif look_token.kind == lexer.TokenKind.VARARG: self.lex.next_token() return ast.VarargExp() elif look_token.kind == lexer.TokenKind.KW_FUNCTION: return self.parse_func_def_exp() elif look_token.kind == lexer.TokenKind.SEP_LCURLY: return self.parse_table_constructor_exp() else: return self.parse_prefix_exp()
def parse_local_func_def_stat(self): self.lex.next_token_of_kind(lexer.TokenKind.KW_FUNCTION) var_list = [ ast.StringExp( self.lex.next_token_of_kind(lexer.TokenKind.IDENTIFIER).data) ] exp_list = [self.parse_func_body_exp(False)] return ast.LocalDeclStat(var_list, exp_list)
def parse_func_body_exp(self, has_colon): self.lex.next_token_of_kind(lexer.TokenKind.SEP_LPAREN) parlist, is_var_arg = self.parse_parlist() self.lex.next_token_of_kind(lexer.TokenKind.SEP_RPAREN) if has_colon: parlist.insert(0, ast.StringExp('self')) body = self.parse_block() self.lex.next_token_of_kind(lexer.TokenKind.KW_END) return ast.FunctionDefExp(parlist, is_var_arg, body)
def parse_func_name_exp(self): has_colon = False name_exp = ast.NameExp( self.lex.next_token_of_kind(lexer.TokenKind.IDENTIFIER).data) while self.lex.look_ahead().kind == lexer.TokenKind.SEP_DOT: self.lex.next_token() name_exp = ast.TableAccessExp( name_exp, ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data)) if self.lex.look_ahead().kind == lexer.TokenKind.SEP_COLON: self.lex.next_token() name_exp = ast.TableAccessExp( name_exp, ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data)) has_colon = True return name_exp, has_colon
def parse_prefix_exp(self): look_token = self.lex.look_ahead() if look_token.kind == lexer.TokenKind.SEP_LPAREN: self.lex.next_token() exp = self.parse_exp(0)[1] self.lex.next_token_of_kind(lexer.TokenKind.SEP_RPAREN) else: name = self.lex.next_token_of_kind(lexer.TokenKind.IDENTIFIER) exp = ast.NameExp(name.data) while True: look_token = self.lex.look_ahead() if look_token.kind == lexer.TokenKind.SEP_DOT: self.lex.next_token() idx_exp = ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data) exp = ast.TableAccessExp(exp, idx_exp) elif look_token.kind == lexer.TokenKind.SEP_COLON: self.lex.next_token() args_exp = [exp] idx_exp = ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data) exp = ast.TableAccessExp(exp, idx_exp) args_exp.extend(self.parse_func_args()) exp = ast.FunctionCallExp(exp, args_exp) elif look_token.kind in [ lexer.TokenKind.SEP_LPAREN, lexer.TokenKind.SEP_LCURLY, lexer.TokenKind.STRING ]: args_exp = self.parse_func_args() exp = ast.FunctionCallExp(exp, args_exp) elif look_token.kind == lexer.TokenKind.SEP_LBRACK: self.lex.next_token() idx_exp = self.parse_exp(0)[1] exp = ast.TableAccessExp(exp, idx_exp) self.lex.next_token_of_kind(lexer.TokenKind.SEP_RBRACK) else: break return exp
def parse_parlist(self): parlist = [] is_var_arg = False if self.lex.look_ahead().kind == lexer.TokenKind.SEP_RPAREN: return parlist, is_var_arg if self.lex.look_ahead().kind == lexer.TokenKind.VARARG: is_var_arg = True self.lex.next_token() return parlist, is_var_arg parlist.append( ast.StringExp( self.lex.next_token_of_kind(lexer.TokenKind.IDENTIFIER).data)) while self.lex.look_ahead().kind == lexer.TokenKind.SEP_COMMA: self.lex.next_token() if self.lex.look_ahead().kind == lexer.TokenKind.IDENTIFIER: parlist.append( ast.StringExp( self.lex.next_token_of_kind( lexer.TokenKind.IDENTIFIER).data)) else: self.lex.next_token_of_kind(lexer.TokenKind.VARARG) is_var_arg = True break return parlist, is_var_arg
def parse_field(self): if self.lex.look_ahead().kind == lexer.TokenKind.SEP_LBRACK: self.lex.next_token() key_exp = self.parse_exp(0)[1] self.lex.next_token_of_kind(lexer.TokenKind.SEP_RBRACK) self.lex.next_token_of_kind(lexer.TokenKind.OP_ASSIGN) val_exp = self.parse_exp(0)[1] return key_exp, val_exp exp = self.parse_exp(0)[1] if self.lex.look_ahead().kind == lexer.TokenKind.OP_ASSIGN: if not isinstance(exp, ast.NameExp): raise Exception("syntax error near '%s'" % token) self.lex.next_token() key_exp = ast.StringExp(exp.id_name) val_exp = self.parse_exp(0)[1] return key_exp, val_exp return ast.NilExp(), exp