def write_comment(self, comment=''): """ Write the comment string, followed by a newline """ _check_instance(str, comment) if self.closed: raise ValueError("attempt to write to object after it was closed") if frozenset(comment) - self.LEGAL_COMMENT: raise ValueError("invalid characters in comment %s" % (repr(comment),)) # no encoding self._ensure_whitespace(self.NEWLINE) self._write_graphicals((self.comment_prefix,)) self._write_graphicals(comment.split()) self._write_string(self.NEWLINE)
def write_comment(self, comment=''): """ Write the comment string, followed by a newline """ _check_instance(str, comment) if self.closed: raise ValueError("attempt to write to object after it was closed") if frozenset(comment) - self.LEGAL_COMMENT: raise ValueError("invalid characters in comment %s" % (repr(comment), )) # no encoding self._ensure_whitespace(self.NEWLINE) self._write_graphicals((self.comment_prefix, )) self._write_graphicals(comment.split()) self._write_string(self.NEWLINE)
def encoder(iterable): for token in iterable: _check_instance(str, token) if frozenset(token) & NOT_ENCODEABLE: raise ValueError("illegal characters in token %s" % (repr(token),)) # slightly awkward dealing with fact that empty strings don't iterate... encoded = joinnosp(get(char, char) for char in token) if token else get(token) # we must have generated something assert encoded, repr(token) if encoded == end_of_data_token: # forcibly encode the first character encoded = encode_char(encoded[0]) + encoded[1:] assert encoded != end_of_data_token yield encoded
def encoder(iterable): for token in iterable: _check_instance(str, token) if frozenset(token) & NOT_ENCODEABLE: raise ValueError("illegal characters in token %s" % (repr(token), )) # slightly awkward dealing with fact that empty strings don't iterate... encoded = joinnosp(get(char, char) for char in token) if token else get(token) # we must have generated something assert encoded, repr(token) if encoded == end_of_data_token: # forcibly encode the first character encoded = encode_char(encoded[0]) + encoded[1:] assert encoded != end_of_data_token yield encoded