def read(s): """Parse an expression from a string. If the string does not contain an expression, None is returned. If the string cannot be parsed, a SyntaxError is raised. >>> read('lambda f: f(0)') LambdaExpr(['f'], CallExpr(Name('f'), [Literal(0)])) >>> read('(lambda x: x)(5)') CallExpr(LambdaExpr(['x'], Name('x')), [Literal(5)]) >>> read('(lambda: 5)()') CallExpr(LambdaExpr([], Literal(5)), []) >>> read('lambda x y: 10') Traceback (most recent call last): ... SyntaxError: expected ':' but got 'y' >>> read(' ') # returns None """ src = Buffer(tokenize(s)) if src.current() is not None: return read_expr(src)
def read(s): src = Buffer(tokenize(s)) if src.current() is not None: return read_expr(src)
is raised. >>> read('lambda f: f(0)') LambdaExpr(['f'], CallExpr(Name('f'), [Literal(0)])) >>> read('(lambda x: x)(5)') CallExpr(LambdaExpr(['x'], Name('x')), [Literal(5)]) >>> read('(lambda: 5)()') CallExpr(LambdaExpr([], Literal(5)), []) >>> read('lambda x y: 10') Traceback (most recent call last): ... SyntaxError: expected ':' but got 'y' >>> read(' ') # returns None """ src = Buffer(tokenize(s)) if src.current() is not None: return read_expr(src) ########### ## Lexer ## ########### def tokenize(s): """Splits the string s into tokens and returns a list of them. >>> tokenize('lambda f: f(0, 4.2)') ['lambda', 'f', ':', 'f', '(', 0, ',', 4.2, ')'] """ src = Buffer(s) tokens = []