def parse_stream(stream, filename="<unknown>", mode="exec"): import utokenize as tokenize from . import parser tstream = tokenize.tokenize(stream.readline) p = parser.Parser(tstream) p.match(tokenize.ENCODING) if mode == "exec": t = p.match_mod() elif mode == "eval": t = Expression(body=p.require_expr()) elif mode == "single": t = Interactive(body=p.match_stmt()) else: raise ValueError return t
import sys import utokenize as tokenize f = open(sys.argv[1], "r") for t in tokenize.tokenize(f.readline): #print(t) print("TokenInfo(type=%d (%s), string=%r, startl=%d)" % \ (t.type, tokenize.tok_name[t.type], t.string, t.start))
def parse_stream(stream, filename="<unknown>", mode="exec"): import utokenize as tokenize tstream = tokenize.tokenize(stream.readline) return parse_tokens(tstream)