def filter(inp, out, writer=HTMLWriter): out.write(b'<pre>') printer = TokenPrinter(writer(out.write).write).printtoken try: for token in _tokenize(inp.readline): (tokenType, string, start, end, line) = token printer(tokenType, string, start, end, line) except tokenize.TokenError: pass out.write(b'</pre>\n')
def lastColorizedLine(source): """ Tokenize and colorize the given Python source. Returns a VT102-format colorized version of the last line of C{source}. @param source: Python source code @type source: L{str} or L{bytes} @return: L{bytes} of colorized source """ if not isinstance(source, bytes): source = source.encode("utf-8") w = VT102Writer() p = TokenPrinter(w.write).printtoken s = BytesIO(source) for token in _tokenize(s.readline): (tokenType, string, start, end, line) = token p(tokenType, string, start, end, line) return bytes(w)
def lastColorizedLine(source): """ Tokenize and colorize the given Python source. Returns a VT102-format colorized version of the last line of C{source}. @param source: Python source code @type source: L{str} or L{bytes} @return: L{bytes} of colorized source """ if not isinstance(source, bytes): source = source.encode("utf-8") w = VT102Writer() p = TokenPrinter(w.write).printtoken s = BytesIO(source) for token in _tokenize(s.readline): (tokenType, string, start, end, line) = token p(tokenType, string, start, end, line) line = str(w).encode("utf-8") return line