def _check_individual_line( self, tokens: List[tokenize.TokenInfo], previous_token: Optional[tokenize.TokenInfo], next_token: Optional[tokenize.TokenInfo], ) -> None: for index, token in enumerate(tokens): if token.exact_type != tokenize.STRING or token in self._docstrings: continue if has_triple_string_quotes(token.string): self._check_token(index, tokens, previous_token, next_token)
def _check_correct_multiline(self, token: tokenize.TokenInfo) -> None: _, string_def = split_prefixes(token) if has_triple_string_quotes(string_def): if '\n' not in string_def and token not in self._docstrings: self.add_violation(WrongMultilineStringViolation(token))