예제 #1
0
def run(fn, text):
    # generate tokens
    lexer = Lexer(fn, text)
    tokens, error = lexer.make_tokens()

    if error:
        return tokens, error

    # generate abstract syntax tree (AST)
    parser = Parser(tokens)
    ast = parser.parse()

    return ast, None
예제 #2
0
def run(fn: str, text: str) -> (float, Error):
    # generate tokens from source with lexical analysis
    lexer = Lexer(fn, text)
    tokens, error = lexer.make_tokens()
    if error: return None, error

    if len(tokens) <= 1:
        return None, None

    # generate an abstract syntax tree by parsing the text, also known as syntax analysis
    parser = Parser(tokens)
    ast = parser.parse()
    if ast.error: return None, ast.error

    # interpret the ast
    interpreter = Interpreter()
    context = Context("<Program>")
    context.symbol_table = global_symbol_table
    result = interpreter.visit(ast.node, context)

    return result.value, result.error