コード例 #1
0
ファイル: __init__.py プロジェクト: tu44okawa3/sphinx
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     try:
         self.tokens = list(tokenize.generate_tokens(self.source.readline))
     except tokenize.TokenError as err:
         raise PycodeError('tokenizing failed', err)
     self.source.close()
コード例 #2
0
ファイル: __init__.py プロジェクト: Antimarvin/sphinx
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     try:
         self.tokens = list(tokenize.generate_tokens(self.source.readline))
     except tokenize.TokenError as err:
         raise PycodeError('tokenizing failed', err)
     self.source.close()
コード例 #3
0
 def __init__(self, filename, stream=None):
     close_stream = None
     if stream is None:
         stream = open(filename)
         close_stream = stream.close
     self.filename = filename
     self.stream = stream
     self.generator = tokenize.generate_tokens(stream.readline)
     self.gettoken() # Initialize lookahead
     self.dfas, self.startsymbol = self.parse()
     if close_stream is not None:
         close_stream()
     self.first = {} # map from symbol name to set of tokens
     self.addfirstsets()
コード例 #4
0
 def parse_string(self, text, debug=False):
     """Parse a string and return the syntax tree."""
     tokens = tokenize.generate_tokens(generate_lines(text).next)
     return self.parse_tokens(tokens, debug)
コード例 #5
0
 def parse_stream_raw(self, stream, debug=False):
     """Parse a stream and return the syntax tree."""
     tokens = tokenize.generate_tokens(stream.readline)
     return self.parse_tokens(tokens, debug)
コード例 #6
0
 def tokenize(self):
     """Generate tokens from the source."""
     if self.tokens is not None:
         return
     self.tokens = list(tokenize.generate_tokens(self.source.readline))
     self.source.close()