def get_chunks(code) -> Generator[Chunk, None, None]: g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline) chunk = Chunk() try: for item in g: t = PyToken(item) reuse = chunk.append(t) if chunk.complete: yield chunk chunk = Chunk() if reuse: reuse = chunk.append(t) # assert not reuse if chunk.complete: yield chunk chunk = Chunk() yield chunk except tokenize.TokenError as e: if state.verbose: traceback.print_exc() print(e)
def get_quote_type(code: str): from flynt.lexer.PyToken import PyToken g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline) next(g) token = PyToken(next(g)) return token.get_quote_type()
def get_chunks(code) -> Generator[Chunk, None, None]: g = tokenize.tokenize(io.BytesIO(code.encode("utf-8")).readline) chunk = Chunk() for item in g: t = PyToken(item) reuse = chunk.append(t) if chunk.complete: yield chunk chunk = Chunk() if reuse: reuse = chunk.append(t) # assert not reuse if chunk.complete: yield chunk chunk = Chunk() yield chunk