def proc_load(scope, fname): tokens = lexer.tokenize_file(fname) ast, balance = lexer.get_ast(tokens) if balance != 0: raise Exception("Unbalanced parentheses") ast = lexer.expand_quotes(ast) ast = lexer.expand_define(ast) for expr in ast: scope.eval(expr)
def start(self): stored_tokens = [] while True: try: line = raw_input("scheme> ").strip() except EOFError: print break if not line: continue try: tokens = lexer.tokenize(line) except Exception as e: print(e) continue stored_tokens += tokens ast, balance = lexer.get_ast(stored_tokens) if balance > 0: continue elif balance < 0: print('Unexpected ")"') stored_tokens = [] continue stored_tokens = [] ast = lexer.expand_quotes(ast) ast = lexer.expand_define(ast) for expr in ast: print(self.scope.eval(expr))