Exemple #1
0
def interpret(mode: str):
    lex_start = time.time()

    if mode == "sp":
        lexer = spl_lexer.Tokenizer()
        lexer.setup(script.get_spl_path(),
                    file_name,
                    argv["dir"],
                    link=argv["link"],
                    import_lang=argv["import"])
        lexer.tokenize(f)
    # elif mode == "lsp":
    #     lexer = spl_lexer.Tokenizer()
    #     lexer.restore_tokens(f)
    else:
        raise Exception

    if argv["tokens"]:
        print(lexer.get_tokens())

    parse_start = time.time()

    parser = psr.Parser(lexer.get_tokens())
    block = parser.parse()

    if argv["ast"]:
        print("===== Abstract Syntax Tree =====")
        print(block)
        print("===== End of AST =====")
    if argv["debugger"]:
        spl_interpreter.DEBUG = True

    interpret_start = time.time()

    ioe = (argv["in"], argv["out"], argv["err"])

    itr = spl_interpreter.Interpreter(argv["argv"], argv["dir"],
                                      argv["encoding"], ioe)
    itr.set_ast(block)
    result = itr.interpret()

    end = time.time()

    sys.stdout.flush()
    sys.stderr.flush()

    if argv["exit"]:
        print("Process finished with exit value " +
              lib.replace_bool_none(str(result)))

    if argv["vars"]:
        print(itr.env)

    if argv["timer"]:
        print("Time used: tokenize: {}s, parse: {}s, execute: {}s.".format(
            parse_start - lex_start, interpret_start - parse_start,
            end - interpret_start))

    if argv["exec_time"]:
        print(block)
Exemple #2
0
def interpret(mode: str):
    lex_start = time.time()

    if mode == "tp":
        lexer = spl_lexer.Tokenizer()
        lexer.setup(script.get_spl_path(), file_name, argv["dir"])
        lexer.tokenize(f)
    # elif mode == "lsp":
    #     lexer = spl_lexer.Tokenizer()
    #     lexer.restore_tokens(f)
    else:
        raise Exception

    if argv["tokens"]:
        print(lexer.get_tokens())

    parse_start = time.time()

    parser = psr.Parser(lexer.get_tokens())
    block = parser.parse()

    # pre = tpp.PreProcessor()
    # pre.process(block)

    if argv["ast"]:
        print("===== Abstract Syntax Tree =====")
        print(block)
        print("===== End of AST =====")
    if argv["debugger"]:
        spl_interpreter.DEBUG = True

    interpret_start = time.time()

    # ioe = (argv["in"], argv["out"], argv["err"])

    itr = spl_interpreter.Interpreter()
    itr.set_ast(block, parser.literal_bytes)
    result = itr.interpret()

    end = time.time()

    sys.stdout.flush()
    sys.stderr.flush()

    if argv["exit"]:
        print("Process finished with exit value " + str(result))

    # if argv["vars"]:
    #     print(itr.env)

    if argv["timer"]:
        print("Time used: tokenize: {}s, parse: {}s, execute: {}s.".format(
            parse_start - lex_start, interpret_start - parse_start,
            end - interpret_start))

    if argv["exec_time"]:
        print(block)
Exemple #3
0
    psr.add_number("1")
    psr.build_expr()
    psr.build_line()
    psr.build_block()

    psr.build_line()

    psr.add_name("a")
    psr.build_line()

    psr.build_block()
    psr.build_line()

    psr.add_name("res")
    psr.add_assignment()
    psr.add_call("main")
    psr.add_number("3")
    psr.add_number("5")
    psr.build_call()
    # psr.build_expr2()
    psr.build_line()

    print(psr)

    block = spl_ast.BlockStmt()
    block.lines = psr.elements

    itr = spl_interpreter.Interpreter(block)
    print(itr.interpret())
    print(itr.env.variables)
Exemple #4
0
def print_waring(msg):
    sys.stderr.write(str(msg) + "\n")
    sys.stderr.flush()


if __name__ == "__main__":

    line_terminated = True

    lex2 = lex.Tokenizer()

    lex2.setup(script.get_spl_path(),
               "console",
               script.get_spl_path(),
               import_lang=True)
    itr = spl_interpreter.Interpreter([], os.getcwd(), "utf8",
                                      (sys.stdin, sys.stdout, sys.stderr))

    # Makes the interpreter import the "lang.sp"
    lex2.tokenize([])
    parser_ = psr.Parser(lex2.get_tokens())
    block = parser_.parse()
    itr.set_ast(block)
    itr.interpret()
    lines = []

    lex2.import_lang = False

    def error_handler(e2):
        raise e2

    itr.set_error_handler(error_handler)
Exemple #5
0
if __name__ == "__main__":
    from bin import spl_ast, spl_interpreter

    psr = spl_ast.Parser()
    psr.add_name("a")
    psr.add_assignment()
    psr.add_number("3")
    psr.add_operator("+")
    psr.add_number("2")
    psr.add_operator("*")
    psr.add_number("5")
    psr.add_operator("+")
    psr.add_number("4")
    psr.build_expr()
    psr.build_line()

    print(psr)

    itr = spl_interpreter.Interpreter(psr.get_as_block())
    print(itr.interpret())
    print(itr.env.variables)