Exemplo n.º 1
0
def interpret(mode: str):
    lex_start = time.time()

    if mode == "sp":
        lexer = spl_lexer.Tokenizer()
        lexer.setup(script.get_spl_path(),
                    file_name,
                    argv["dir"],
                    link=argv["link"],
                    import_lang=argv["import"])
        lexer.tokenize(f)
    # elif mode == "lsp":
    #     lexer = spl_lexer.Tokenizer()
    #     lexer.restore_tokens(f)
    else:
        raise Exception

    if argv["tokens"]:
        print(lexer.get_tokens())

    parse_start = time.time()

    parser = psr.Parser(lexer.get_tokens())
    block = parser.parse()

    if argv["ast"]:
        print("===== Abstract Syntax Tree =====")
        print(block)
        print("===== End of AST =====")
    if argv["debugger"]:
        spl_interpreter.DEBUG = True

    interpret_start = time.time()

    ioe = (argv["in"], argv["out"], argv["err"])

    itr = spl_interpreter.Interpreter(argv["argv"], argv["dir"],
                                      argv["encoding"], ioe)
    itr.set_ast(block)
    result = itr.interpret()

    end = time.time()

    sys.stdout.flush()
    sys.stderr.flush()

    if argv["exit"]:
        print("Process finished with exit value " +
              lib.replace_bool_none(str(result)))

    if argv["vars"]:
        print(itr.env)

    if argv["timer"]:
        print("Time used: tokenize: {}s, parse: {}s, execute: {}s.".format(
            parse_start - lex_start, interpret_start - parse_start,
            end - interpret_start))

    if argv["exec_time"]:
        print(block)
Exemplo n.º 2
0
def interpret(mode: str):
    lex_start = time.time()

    if mode == "tp":
        lexer = spl_lexer.Tokenizer()
        lexer.setup(script.get_spl_path(), file_name, argv["dir"])
        lexer.tokenize(f)
    # elif mode == "lsp":
    #     lexer = spl_lexer.Tokenizer()
    #     lexer.restore_tokens(f)
    else:
        raise Exception

    if argv["tokens"]:
        print(lexer.get_tokens())

    parse_start = time.time()

    parser = psr.Parser(lexer.get_tokens())
    block = parser.parse()

    # pre = tpp.PreProcessor()
    # pre.process(block)

    if argv["ast"]:
        print("===== Abstract Syntax Tree =====")
        print(block)
        print("===== End of AST =====")
    if argv["debugger"]:
        spl_interpreter.DEBUG = True

    interpret_start = time.time()

    # ioe = (argv["in"], argv["out"], argv["err"])

    itr = spl_interpreter.Interpreter()
    itr.set_ast(block, parser.literal_bytes)
    result = itr.interpret()

    end = time.time()

    sys.stdout.flush()
    sys.stderr.flush()

    if argv["exit"]:
        print("Process finished with exit value " + str(result))

    # if argv["vars"]:
    #     print(itr.env)

    if argv["timer"]:
        print("Time used: tokenize: {}s, parse: {}s, execute: {}s.".format(
            parse_start - lex_start, interpret_start - parse_start,
            end - interpret_start))

    if argv["exec_time"]:
        print(block)
Exemplo n.º 3
0
import os
import sys


def print_waring(msg):
    sys.stderr.write(str(msg) + "\n")
    sys.stderr.flush()


if __name__ == "__main__":

    line_terminated = True

    lex2 = lex.Tokenizer()

    lex2.setup(script.get_spl_path(),
               "console",
               script.get_spl_path(),
               import_lang=True)
    itr = spl_interpreter.Interpreter([], os.getcwd(), "utf8",
                                      (sys.stdin, sys.stdout, sys.stderr))

    # Makes the interpreter import the "lang.sp"
    lex2.tokenize([])
    parser_ = psr.Parser(lex2.get_tokens())
    block = parser_.parse()
    itr.set_ast(block)
    itr.interpret()
    lines = []

    lex2.import_lang = False
Exemplo n.º 4
0
import sys
import bin.spl_ast as ast
import bin.spl_compiler as cmp
import bin.spl_parser as psr
import bin.spl_lexer as lex
import script

if __name__ == '__main__':
    argv = sys.argv
    src_file = argv[1]
    target_file = argv[2]

    with open(src_file, "r") as rf:
        lexer = lex.Tokenizer()
        lexer.setup(script.get_spl_path(),
                    src_file,
                    lex.get_dir(argv[0]),
                    False,
                    import_lang=False)
        lexer.tokenize(rf)

        tokens = lexer.get_tokens()

        parser = psr.Parser(tokens)
        root = parser.parse()

        compiler = cmp.Compiler(root)
        compiler.compile()
        byt = compiler.get_bytes()

        with open(target_file, "wb") as wf: