def while_statement(token): """ Implements recursive descent for the rule: <while_statement> ==> 34 TokenType.KeywordWhile TokenType.OpenParen <expression> TokenType.CloseParen <code_block> """ if token.t_type == TokenType.KeywordWhile: print(34, end=" ") before_while_lbl, after_while_lbl = CG.gen_label("while") # Write label for beginning of while loop CG.code_gen_label(before_while_lbl) Parser.match(token, TokenType.KeywordWhile) Parser.match(token, TokenType.OpenParen) er_condition = Parser.expression(token) Parser.match(token, TokenType.CloseParen) # Perform the test CG.code_gen_if(er_condition, after_while_lbl) # Write the contents of the loop Parser.code_block(token) # Branch back to the test again CG.code_gen("b", before_while_lbl) # Write label for end of while loop, to pick up when the test fails CG.code_gen_label(after_while_lbl) else: Parser.raise_production_not_found_error(token, 'while_statement')
def return_statement(token): """ Implements recursive descent for the rule: <return_statement> ==> 30 TokenType.KeywordReturn TokenType.Semicolon """ if token.t_type == TokenType.KeywordReturn: print(30, end="") Parser.match(token, TokenType.KeywordReturn) Parser.match(token, TokenType.Semicolon) CG.code_gen("jr", "$ra") else: Parser.raise_production_not_found_error(token, 'return_statement')
def if_statement(token): """ Implements recursive descent for the rule: <if_statement> ==> 31 TokenType.KeywordIf TokenType.OpenParen <expression> TokenType.CloseParen <code_block> [ <else_clause> ] """ if token.t_type == TokenType.KeywordIf: print(31, end=" ") Parser.match(token, TokenType.KeywordIf) Parser.match(token, TokenType.OpenParen) er_condition = Parser.expression(token) if er_condition.data_type != DataTypes.BOOL: raise SemanticError( "If statement requires boolean expression " "as an argument", Parser.file_reader.get_line_data()) Parser.match(token, TokenType.CloseParen) else_label, after_else_label = CG.gen_label("else") CG.code_gen_if(er_condition, else_label) Parser.code_block(token) if token.t_type == TokenType.KeywordElse: Parser.match(token, TokenType.KeywordElse) # the last code block must branch to after the else clause CG.code_gen("b", after_else_label) # if test failed, then pick up program execution here CG.code_gen_label(else_label) # generate the else block Parser.code_block(token) # make the after_else label CG.code_gen_label(after_else_label) else: # if test failed, then pick up program execution here CG.code_gen_label(else_label) else: Parser.raise_production_not_found_error(token, 'if_statement')
def function_decl(token): """ Implements recursive descent for the rule: <function_decl> ==> 5 TokenType.KeywordFunc TokenType.Identifier TokenType.OpenParen <param_list> TokenType.CloseParen <return_identifier> <return_datatype> TokenType.OpenCurly <statement_list> TokenType.CloseCurly """ if token.t_type == TokenType.KeywordFunc: print(5, end=" ") Parser.match(token, TokenType.KeywordFunc) # add the function identifier to the symbol table function_id = token.lexeme # check that the identifier hasn't already been declared Parser.error_on_variable_usage(function_id, is_decl_stmt=True) func_signature = FunctionSignature(function_id) old_signature = Parser.s_table.find_in_all_scopes(function_id) if not old_signature: # we don't need to check that signatures match Parser.s_table.insert(function_id, func_signature) elif not isinstance(old_signature, FunctionSignature): raise SemanticError( "Tried to redeclare %s as a function, " "but it was already a variable" % function_id, Parser.file_reader.get_line_data()) # open a new scope Parser.s_table.open_scope() Parser.match(token, TokenType.Identifier) Parser.match(token, TokenType.OpenParen) param_list = Parser.param_list(token) param_types = [x[1] for x in param_list] if not old_signature: func_signature.param_list_types = param_types else: # verify that param types match for i in range(len(param_types)): if param_types[i] != old_signature.param_list_types[i]: raise SemanticError( "In declaration of function %s, parameter #%d is " "of type %r, but previous forward declaration was " "of type %r" % (function_id, i, param_types[i], old_signature.param_list_types[i]), Parser.file_reader.get_line_data()) Parser.match(token, TokenType.CloseParen) return_val_id = token.lexeme Parser.return_identifier(token) return_val_type = Parser.return_datatype(token) er_return_val = ExpressionRecord(return_val_type, (4 * len(param_list) + 4), is_temp=False) Parser.s_table.insert(return_val_id, er_return_val) if not old_signature: func_signature.return_type = return_val_type func_signature.label = CG.gen_function_label(function_id) else: # verify that return types match if return_val_type != old_signature.return_type: raise SemanticError( "In declaration of function %s, return datatype is " "of type %r, but previous forward declaration was " "of type %r" % (function_id, return_val_type, old_signature.return_type), Parser.file_reader.get_line_data()) # at this point, we are guaranteed that the return types, # param types, and identifier are equal to that of the old # signature, so we can use that signature instead func_signature = old_signature # record that the signature has been defined old_signature.is_prototype = False CG.code_gen_label(func_signature.label, comment=str(func_signature)) offset = (4 * len(param_list)) # In new function, return var is at (4*len(params)+8)($fp), # params are at 4($fp) thru (4*len(params)+4)($fp) for identifier, data_type, size in param_list: er_param = ExpressionRecord(data_type, offset, is_temp=False, is_reference=True) Parser.s_table.insert(identifier, er_param) offset -= 4 Parser.match(token, TokenType.OpenCurly) Parser.statement_list(token) Parser.match(token, TokenType.CloseCurly) # close the function's scope Parser.s_table.close_scope() # reset the stack offsets CG.next_offset = -8 CG.code_gen("jr", "$ra") else: Parser.raise_production_not_found_error(token, 'function_decl')