コード例 #1
0
ファイル: tokenize.py プロジェクト: hsfzxjy/lambdex
    def _handle_token(self, token: TokenInfo):
        if token.annotation == A.STMT_END and not self.buffering:
            self.action = actions.StartBuffer()
        elif self.buffering and token.annotation in (
                A.BODY_RSQB,
                A.CLS_BODY_RSQB,
                A.STMT_START,
        ):

            if token.annotation != A.STMT_START:
                pos = self.insert_last_stmt_at
                if pos is None:
                    annotation = A.LAST_STMT_WITHOUT_COMMA
                else:
                    annotation = A.LAST_STMT_WITH_COMMA
                pos = pos or 1

                yield from self.buffer[:pos]
                yield TokenInfo.new_sentinel_after(self.buffer[pos - 1],
                                                   annotation)
                yield from self.buffer[pos:]
            else:
                yield from self.buffer

            self.insert_last_stmt_at = None
            self.action = actions.StopBuffer(dont_yield_buffer=True)
        elif self.buffering and token == A.STMT_START:
            self.insert_last_stmt_at = None
            self.action = actions.StopBuffer()
コード例 #2
0
def r(ctx: Context, token: TokenInfo):
    sentinel = TokenInfo.new_sentinel_before(token, A.STMT_END)
    ctx.push_ret(sentinel)

    token.annotation = A.STMT_COMMA
    ctx.push_ret(token)

    sentinel = TokenInfo.new_sentinel_after(token, A.STMT_START)
    ctx.push_ret(sentinel)

    return actions.Default(dont_store=True)
コード例 #3
0
def r(ctx: Context, token: TokenInfo):

    ctx.pop_state()
    ctx.push_state(State.IN_LBDX_BODY_LIST)
    token.annotation = A.BODY_LSQB
    ctx.push_op(token)
    ctx.push_ret(token)

    sentinel = TokenInfo.new_sentinel_after(token, A.STMT_START)
    ctx.push_ret(sentinel)

    return actions.Default(dont_store=True)
コード例 #4
0
def r(ctx: Context, token: TokenInfo):
    if ctx.last_op[0].annotation not in (A.BODY_LSQB, A.CLS_BODY_LSQB):
        return

    sentinel = TokenInfo.new_sentinel_before(token, A.STMT_END)
    ctx.push_ret(sentinel)

    token.annotation = A.STMT_COMMA
    ctx.push_ret(token)

    sentinel = TokenInfo.new_sentinel_after(token, A.STMT_START)
    ctx.push_ret(sentinel)

    return actions.Default(dont_store=True)
コード例 #5
0
def r(ctx: Context, token: TokenInfo):
    ctx.pop_state()
    sentinel = TokenInfo.new_sentinel_before(token, A.STMT_END)
    ctx.push_ret(sentinel)

    token.annotation = A.STMT_COMMA
    ctx.push_ret(token)

    sentinel = TokenInfo.new_sentinel_after(token, A.STMT_START)
    ctx.push_ret(sentinel)

    if ctx.is_buffering():
        return actions.StopBuffer(dont_store=True)

    return actions.Default(dont_store=True)
コード例 #6
0
def r(ctx: Context, token: TokenInfo):
    if ctx.is_buffering():
        return actions.StopBuffer(dont_consume=True)

    ctx.pop_state()
    ctx.push_state(State.IN_LBDX_CLS_BODY)
    token.annotation = A.CLS_BODY_LSQB
    ctx.push_op(token)

    _annotate_clause_declarer(ctx)
    ctx.cache = None

    ctx.push_ret(token)
    sentinel = TokenInfo.new_sentinel_after(token, A.STMT_START)
    ctx.push_ret(sentinel)

    return actions.Default(dont_store=True)