Esempio n. 1
0
    def function_prototype(token):
        """
        Implements recursive descent for the rule:
        <function_prototype> ==>
            4   TokenType.KeywordProto TokenType.Identifier TokenType.OpenParen
                <param_list> TokenType.CloseParen <return_identifier>
                <return_datatype> TokenType.Semicolon
        """
        if token.t_type == TokenType.KeywordProto:
            print(4, end=" ")
            Parser.match(token, TokenType.KeywordProto)

            # add the function identifier to the symbol table
            function_id = token.lexeme

            # check that the identifier hasn't already been declared
            Parser.error_on_variable_usage(function_id,
                                           is_decl_stmt=True,
                                           is_prototype=True)

            Parser.match(token, TokenType.Identifier)
            Parser.match(token, TokenType.OpenParen)

            param_list = Parser.param_list(token)
            param_types = [x[1] for x in param_list]

            Parser.match(token, TokenType.CloseParen)

            Parser.return_identifier(token)
            return_val_type = Parser.return_datatype(token)

            func_signature = FunctionSignature(
                identifier=function_id,
                label=CG.gen_function_label(function_id),
                param_list_types=param_types,
                return_type=return_val_type,
                is_prototype=True)
            Parser.s_table.insert(function_id, func_signature)
            Parser.match(token, TokenType.Semicolon)
Esempio n. 2
0
    def function_decl(token):
        """
        Implements recursive descent for the rule:
        <function_decl> ==>
            5   TokenType.KeywordFunc TokenType.Identifier
                TokenType.OpenParen <param_list> TokenType.CloseParen
                <return_identifier> <return_datatype>
                TokenType.OpenCurly <statement_list> TokenType.CloseCurly
        """
        if token.t_type == TokenType.KeywordFunc:
            print(5, end=" ")
            Parser.match(token, TokenType.KeywordFunc)

            # add the function identifier to the symbol table
            function_id = token.lexeme

            # check that the identifier hasn't already been declared
            Parser.error_on_variable_usage(function_id, is_decl_stmt=True)

            func_signature = FunctionSignature(function_id)

            old_signature = Parser.s_table.find_in_all_scopes(function_id)
            if not old_signature:
                # we don't need to check that signatures match
                Parser.s_table.insert(function_id, func_signature)
            elif not isinstance(old_signature, FunctionSignature):
                raise SemanticError(
                    "Tried to redeclare %s as a function, "
                    "but it was already a variable" % function_id,
                    Parser.file_reader.get_line_data())

            # open a new scope
            Parser.s_table.open_scope()

            Parser.match(token, TokenType.Identifier)
            Parser.match(token, TokenType.OpenParen)

            param_list = Parser.param_list(token)
            param_types = [x[1] for x in param_list]

            if not old_signature:
                func_signature.param_list_types = param_types
            else:
                # verify that param types match
                for i in range(len(param_types)):
                    if param_types[i] != old_signature.param_list_types[i]:
                        raise SemanticError(
                            "In declaration of function %s, parameter #%d is "
                            "of type %r, but previous forward declaration was "
                            "of type %r" % (function_id, i, param_types[i],
                                            old_signature.param_list_types[i]),
                            Parser.file_reader.get_line_data())

            Parser.match(token, TokenType.CloseParen)

            return_val_id = token.lexeme
            Parser.return_identifier(token)
            return_val_type = Parser.return_datatype(token)

            er_return_val = ExpressionRecord(return_val_type,
                                             (4 * len(param_list) + 4),
                                             is_temp=False)
            Parser.s_table.insert(return_val_id, er_return_val)

            if not old_signature:
                func_signature.return_type = return_val_type
                func_signature.label = CG.gen_function_label(function_id)
            else:
                # verify that return types match
                if return_val_type != old_signature.return_type:
                    raise SemanticError(
                        "In declaration of function %s, return datatype is "
                        "of type %r, but previous forward declaration was "
                        "of type %r" % (function_id, return_val_type,
                                        old_signature.return_type),
                        Parser.file_reader.get_line_data())

                # at this point, we are guaranteed that the return types,
                # param types, and identifier are equal to that of the old
                # signature, so we can use that signature instead
                func_signature = old_signature

                # record that the signature has been defined
                old_signature.is_prototype = False

            CG.code_gen_label(func_signature.label,
                              comment=str(func_signature))

            offset = (4 * len(param_list))

            # In new function, return var is at (4*len(params)+8)($fp),
            # params are at 4($fp) thru (4*len(params)+4)($fp)
            for identifier, data_type, size in param_list:

                er_param = ExpressionRecord(data_type,
                                            offset,
                                            is_temp=False,
                                            is_reference=True)
                Parser.s_table.insert(identifier, er_param)

                offset -= 4

            Parser.match(token, TokenType.OpenCurly)
            Parser.statement_list(token)
            Parser.match(token, TokenType.CloseCurly)

            # close the function's scope
            Parser.s_table.close_scope()

            # reset the stack offsets
            CG.next_offset = -8

            CG.code_gen("jr", "$ra")
        else:
            Parser.raise_production_not_found_error(token, 'function_decl')