def _apply_in_expr(self, token: tokenize.TokenInfo) -> None: assert self._current_ctx # noqa: S101 # This is not the whole expression, but we only need where it starts: token_index = self.file_tokens.index(token) self._current_ctx.in_exprs.append(next_meaningful_token( self.file_tokens, token_index, ))
def _apply_expr(self, token: tokenize.TokenInfo) -> None: assert self._current_ctx # noqa: S101 if self._current_ctx.expr: return # we set this value only once # What we do here: # 1. We find an opening bracket # 2. Then we find the next meaningful (non-NL) token # that represents the actual expr of a comprehension # 3. We assign it to the current comprehension structure token_index = self.file_tokens.index(self._current_ctx.left_bracket) self._current_ctx.expr = next_meaningful_token( self.file_tokens, token_index, )