コード例 #1
0
ファイル: tokenizer.py プロジェクト: renesugar/pyxl
 def __init__(self, readline):
     self.orig_readline = readline
     self.unshift_buffer = []
     self.rewound_buffer = None
     self._tokens = tokenize.generate_tokens(self._readline)
     self.zero_row, self.zero_col = (0, 0)
     self.stop_readline = False
コード例 #2
0
ファイル: tokenizer.py プロジェクト: FuegoFro/pyxl
 def __init__(self, readline):
     self.orig_readline = readline
     self.unshift_buffer = []
     self.rewound_buffer = None
     self._tokens = tokenize.generate_tokens(self._readline)
     self.zero_row, self.zero_col = (0, 0)
     self.stop_readline = False
コード例 #3
0
ファイル: tokenizer.py プロジェクト: FuegoFro/pyxl
 def rewind_and_retokenize(self, rewind_token):
     """Rewind the given token (which is expected to be the last token read from this stream, or
     the end of such token); then restart tokenization."""
     ttype, tvalue, (row, col), tend, tline = rewind_token
     tokens = [rewind_token] + self._flush()
     self.zero_row, self.zero_col = (row - 1, col - 1)
     self.rewound_buffer = StringIO(Untokenizer().untokenize(tokens))
     self.unshift_buffer = []
     self._tokens = tokenize.generate_tokens(self._readline)
コード例 #4
0
ファイル: tokenizer.py プロジェクト: renesugar/pyxl
 def rewind_and_retokenize(self, rewind_token):
     """Rewind the given token (which is expected to be the last token read from this stream, or
     the end of such token); then restart tokenization."""
     ttype, tvalue, (row, col), tend, tline = rewind_token
     tokens = [rewind_token] + self._flush()
     self.zero_row, self.zero_col = (
         row - 1, col)  # rows are 1-indexed, cols are 0-indexed
     self.rewound_buffer = StringIO(Untokenizer().untokenize(tokens))
     self.unshift_buffer = []
     self._tokens = tokenize.generate_tokens(self._readline)