Beispiel #1
0
def lexer(code: StringIO, *, fname:str=None) -> StringIO:
    out = TokenIO()
    for ptkn, ctkn in zip_prev(TokenIO(code), STARTMARKER):
        if is_pow(ptkn, ctkn):
            out.write_token(ctkn, override='**__PYSH_POW__**')
        else:
            out.write_token(ctkn)

    return out
Beispiel #2
0
def lexer(code: StringIO) -> StringIO:
    out = TokenIO()
    for ptkn, ctkn in zip_prev(TokenIO(code), STARTMARKER):
        if ctkn.type == STRING and ptkn.type == NAME and ptkn.string == '_':
            if ctkn.start == ptkn.end:
                out.write_token(ctkn, override='[r' + ctkn.string + ']')
                continue

        out.write_token(ctkn)

    return out
Beispiel #3
0
def lexer(code: StringIO) -> StringIO:
    out = TokenIO()
    tokens = TokenIO(code).iter_tokens()
    for ptkn, ctkn in zip_prev(tokens, STARTMARKER):
        assert isinstance(ptkn, TokenInfo)  #XXX Mypy stuff

        if ctkn.type == STRING and ptkn.type == NAME and ptkn.string == '_':
            if ctkn.start == ptkn.end:
                out.write_token(ctkn, override='[r' + ctkn.string + ']')
                continue

        out.write_token(ctkn)

    return out
Beispiel #4
0
def lexer(code: StringIO) -> StringIO:
    out = TokenIO()
    for ptkn, ctkn in zip_prev(TokenIO(code), STARTMARKER):
        if is_prefix(ctkn):
            continue  # defer until next token

        if is_prefix(ptkn):
            if ctkn.type == STRING and ctkn.start == ptkn.end:
                out.write_token(ptkn,
                                override='__PYSH_RESTRING__(r' + ctkn.string +
                                ')')
                continue
            else:
                out.write_token(ptkn)

        out.write_token(ctkn)

    return out
Beispiel #5
0
def lexer(code: StringIO) -> StringIO:
    out = TokenIO()
    tokens = TokenIO(code).iter_tokens()
    for ptkn, ctkn in zip_prev(tokens, STARTMARKER):
        assert isinstance(ptkn, TokenInfo)  #XXX Mypy stuff

        if is_prefix(ctkn):
            continue  # defer until next token

        if is_prefix(ptkn):
            if ctkn.type == STRING and ctkn.start == ptkn.end:
                out.write_token(ptkn,
                                override='__PYSH_RESTRING__(r' + ctkn.string +
                                ')')
                continue
            else:
                out.write_token(ptkn)

        out.write_token(ctkn)

    return out
Beispiel #6
0
def tokens_with_space(tokens):
    for ptkn, ctkn in zip_prev(tokens):
        if ptkn and ctkn.type != ENDMARKER and ctkn.start > ptkn.end:
            yield WsTokenInfo(WS, ' ', ptkn.end, ctkn.start, ptkn.line)
        yield ctkn