def tokenize(self): """Generate tokens from the source.""" if self.tokens is not None: return try: self.tokens = list(tokenize.generate_tokens(self.source.readline)) except tokenize.TokenError as err: raise PycodeError('tokenizing failed', err) self.source.close()
def __init__(self, filename, stream=None): close_stream = None if stream is None: stream = open(filename) close_stream = stream.close self.filename = filename self.stream = stream self.generator = tokenize.generate_tokens(stream.readline) self.gettoken() # Initialize lookahead self.dfas, self.startsymbol = self.parse() if close_stream is not None: close_stream() self.first = {} # map from symbol name to set of tokens self.addfirstsets()
def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" tokens = tokenize.generate_tokens(generate_lines(text).next) return self.parse_tokens(tokens, debug)
def parse_stream_raw(self, stream, debug=False): """Parse a stream and return the syntax tree.""" tokens = tokenize.generate_tokens(stream.readline) return self.parse_tokens(tokens, debug)
def tokenize(self): """Generate tokens from the source.""" if self.tokens is not None: return self.tokens = list(tokenize.generate_tokens(self.source.readline)) self.source.close()