예제 #1
0
def main():
    """check right amount of the command line arguments and start the lexcial analyzer"""

    if len(argv) == 2:
        # 2 command line arguments (sys.argv[1]: program file)
        # interpret the program file supplied by user

        # check whether the file exists in the file system
        if os.path.exists(argv[1]):

            # check suffix of the file
            Tools.check_suffix(argv[1])

            # create lexer
            lexer = Lexer()

            # get the raw program string
            raw_program_string = Tools.get_raw_program_string(argv[1])

            # lexical analyze the program given, break it down to token stream
            token_stream = lexer.lex(raw_program_string)

            # replica of token stream for checking the length
            # since token stream can only be looped once, we have to replicate it
            token_stream_replica = lexer.lex(raw_program_string)

            # find the length of the token stream
            Tools.check_token_length(token_stream_replica)

            # create parser
            parser = Parser()

            # parse the lexical token stream to AST (abstract syntax tree)
            parsed_AST = parser.parse(token_stream)

            print(parsed_AST)

        else:
            Tools.print_error("file \"" + argv[1] +
                              "\" does not exists in the file system.\n")
    else:
        # other number of command line arguments
        # print usage message
        Tools.print_warning("Usage: python main.py <program-to-run>\n")
예제 #2
0
    def interpret(raw_program_string):
        """start the interpretation of the program"""

        # create the Lexer
        lexer = Lexer()
        token_stream = lexer.lex(raw_program_string)

        # replica of token stream for checking the length
        # since token stream can only be looped once, we have to replicate it
        token_stream_replica = lexer.lex(raw_program_string)

        # find the length of the token stream
        Tools.check_token_length(token_stream_replica)
        
        # create parser
        parser = Parser()
        parsed_AST = parser.parse(token_stream)

        # start the type checking process
        parsed_AST.typecheck(AST.Type_Context.get_empty_context())

        # start the evaluation process
        parsed_AST.eval(AST.Eval_Context.get_empty_context())