def parse_cmp_expr(): expr = parse_add_expr() while is_cmp_op(): op = phlex.token.t_type next_token() expr = ExprBinary(op, expr, parse_add_expr()) return expr
def parse_mul_expr(): expr = parse_range_expr() while is_mul_op(): op = phlex.token.t_type next_token() expr = ExprBinary(op, expr, parse_range_expr()) return expr
def parse_base_stmt(): expr = parse_expr() if is_token(TOKEN_INC) or is_token(TOKEN_DEC): op = phlex.token.t_type next_token() return StmtAssign(op, expr, None) elif is_assign_op(): op = phlex.token.t_type next_token() return StmtAssign(op, expr, parse_expr()) return StmtExpr(expr)
def parse_type(): type_ = parse_base_type() while is_token(TOKEN_LBRACK) or is_token(TOKEN_MUL): if match_token(TOKEN_LBRACK): expr = None if not is_token(TOKEN_RBRACK): expr = parse_expr() expect_token(TOKEN_RBRACK) type_ = TypespecArray(type_, expr) else: next_token() type_ = TypespecPtr(type_) return type_
def parse_base_expr(): expr = parse_operand_expr() while is_token(TOKEN_LPAREN) or is_token(TOKEN_LBRACK) or is_token( TOKEN_DOT): if match_token(TOKEN_LPAREN): call_args = [] if not is_token(TOKEN_RPAREN): call_args.append(parse_expr()) while match_token(TOKEN_COMMA): call_args.append(parse_expr()) expect_token(TOKEN_RPAREN) expr = ExprCall(expr, call_args) elif match_token(TOKEN_LBRACK): index = parse_expr() expect_token(TOKEN_RBRACK) expr = ExprIndex(expr, index) else: next_token() name = parse_name() expr = ExprField(expr, name) return expr
def parse_operand_expr(): if is_token(TOKEN_INT): int_val = phlex.token.t_val next_token() return ExprInt(int_val) elif is_token(TOKEN_FLOAT): float_val = phlex.token.t_val next_token() return ExprFloat(float_val) elif is_token(TOKEN_STR): str_val = phlex.token.t_val next_token() return ExprStr(str_val) elif is_token(TOKEN_NAME): name = phlex.token.t_val next_token() if is_token(TOKEN_LBRACE): return parse_compound_expr(TypespecName(name)) return ExprName(name) elif match_token(TOKEN_LPAREN): if match_token(TOKEN_COLON): type_ = parse_type() expect_token(TOKEN_RBRACE) return parse_compound_expr(type_) expr = parse_expr() expect_token(TOKEN_RPAREN) return expr elif is_token(TOKEN_LBRACE): return parse_compound_expr(None) elif match_keyword('sizeof'): expect_token(TOKEN_LPAREN) if match_token(TOKEN_COLON): type_ = parse_type() expect_token(TOKEN_RPAREN) return ExprSizeofType(type_) expr = parse_expr() expect_token(TOKEN_RPAREN) return ExprSizeofExpr(expr) else: syntax_error('Unexpected token %s in expression', str(phlex.token))
def match_keyword(keyword): assert (type(keyword) is str) if phlex.token == TOKEN_KEYWORD and phlex.token.t_val == keyword: phlex.next_token() return True return False
def parse_unary_expr(): if is_unary_op(): op = phlex.token.t_type next_token() return ExprUnary(op, parse_unary_expr()) return parse_base_expr()
def match_token(t_type): if is_token(t_type): phlex.next_token() return True return False