Esempio n. 1
0
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     try:
         self.tokens = list(tokenize.generate_tokens(self.source.readline))
     except tokenize.TokenError as err:
         raise PycodeError('tokenizing failed', err)
     self.source.close()
Esempio n. 2
0
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     try:
         self.tokens = list(tokenize.generate_tokens(self.source.readline))
     except tokenize.TokenError as err:
         raise PycodeError('tokenizing failed', err)
     self.source.close()
Esempio n. 3
0
 def __init__(self, filename, stream=None):
     close_stream = None
     if stream is None:
         stream = open(filename)
         close_stream = stream.close
     self.filename = filename
     self.stream = stream
     self.generator = tokenize.generate_tokens(stream.readline)
     self.gettoken() # Initialize lookahead
     self.dfas, self.startsymbol = self.parse()
     if close_stream is not None:
         close_stream()
     self.first = {} # map from symbol name to set of tokens
     self.addfirstsets()
Esempio n. 4
0
 def parse_string(self, text, debug=False):
     """Parse a string and return the syntax tree."""
     tokens = tokenize.generate_tokens(generate_lines(text).next)
     return self.parse_tokens(tokens, debug)
Esempio n. 5
0
 def parse_stream_raw(self, stream, debug=False):
     """Parse a stream and return the syntax tree."""
     tokens = tokenize.generate_tokens(stream.readline)
     return self.parse_tokens(tokens, debug)
Esempio n. 6
0
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     self.tokens = list(tokenize.generate_tokens(self.source.readline))
     self.source.close()