def get_tokens(s): """ Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``. """ state = {'indents': [0], 'last': None, 'pymode': [(True, '', '', (0, 0))], 'stream': tokenize.tokenize(BytesIO(s.encode('utf-8')).readline)} while True: try: token = next(state['stream']) yield from handle_token(state, token) except StopIteration: if len(state['pymode']) > 1: pm, o, m, p = state['pymode'][-1] l, c = p e = 'Unmatched "{}" at line {}, column {}' yield _new_token('ERRORTOKEN', e.format(o, l, c), (0, 0)) break except tokenize.TokenError as e: # this is recoverable in single-line mode (from the shell) # (e.g., EOF while scanning string literal) yield _new_token('ERRORTOKEN', e.args[0], (0, 0)) break except IndentationError as e: # this is never recoverable yield _new_token('ERRORTOKEN', e, (0, 0)) break
def get_tokens(s): """ Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``. """ state = { "indents": [0], "last": None, "pymode": [(True, "", "", (0, 0))], "stream": tokenize(io.BytesIO(s.encode("utf-8")).readline), } while True: try: token = next(state["stream"]) yield from handle_token(state, token) except StopIteration: if len(state["pymode"]) > 1: pm, o, m, p = state["pymode"][-1] l, c = p e = 'Unmatched "{}" at line {}, column {}' yield _new_token("ERRORTOKEN", e.format(o, l, c), (0, 0)) break except TokenError as e: # this is recoverable in single-line mode (from the shell) # (e.g., EOF while scanning string literal) yield _new_token("ERRORTOKEN", e.args[0], (0, 0)) break except IndentationError as e: # this is never recoverable yield _new_token("ERRORTOKEN", e, (0, 0)) break
def get_tokens(s, tolerant): """ Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``. """ state = { "indents": [0], "last": None, "pymode": [(True, "", "", (0, 0))], "stream": tokenize(io.BytesIO(s.encode("utf-8")).readline, tolerant), "tolerant": tolerant, } while True: try: token = next(state["stream"]) yield from handle_token(state, token) except StopIteration: if len(state["pymode"]) > 1 and not tolerant: pm, o, m, p = state["pymode"][-1] l, c = p e = 'Unmatched "{}" at line {}, column {}' yield _new_token("ERRORTOKEN", e.format(o, l, c), (0, 0)) break except TokenError as e: # this is recoverable in single-line mode (from the shell) # (e.g., EOF while scanning string literal) yield _new_token("ERRORTOKEN", e.args[0], (0, 0)) break except IndentationError as e: # this is never recoverable yield _new_token("ERRORTOKEN", e, (0, 0)) break
def get_tokens(s): """ Given a string containing xonsh code, generates a stream of relevant PLY tokens using ``handle_token``. """ state = {'indents': [0], 'last': None, 'pymode': [(True, '', '', (0, 0))], 'stream': tokenize(BytesIO(s.encode('utf-8')).readline)} while True: try: token = next(state['stream']) yield from handle_token(state, token) except StopIteration: if len(state['pymode']) > 1: pm, o, m, p = state['pymode'][-1] l, c = p e = 'Unmatched "{}" at line {}, column {}' yield _new_token('ERRORTOKEN', e.format(o, l, c), (0, 0)) break except TokenError as e: # this is recoverable in single-line mode (from the shell) # (e.g., EOF while scanning string literal) yield _new_token('ERRORTOKEN', e.args[0], (0, 0)) break except IndentationError as e: # this is never recoverable yield _new_token('ERRORTOKEN', e, (0, 0)) break