def setUp(self): threads = { 'test_owner,test_user': ChatThread(participants=['test_owner', 'test_user']).add_message( ChatMessage(_NOW, 'test_owner', '白人看不懂', 0)), 'test_user,test_user_1,test_user_2': ChatThread( participants=['test_owner', 'test_user_1', 'test_user_2']). add_message(ChatMessage( _NOW, 'test_owner', '白人看不懂', 1)).add_message( ChatMessage(_NOW, 'test_user_1', 'Что это?', 2)).add_message( ChatMessage(_NOW, 'test_user_2', 'En ymmärrä', 2)), } self.history = FacebookChatHistory(user="******", threads=threads) self.output = BytesIO() if six.PY3: import io self.output_handle = io.TextIOWrapper(self.output, encoding='UTF-8', errors='replace') else: from encodings.utf_8 import StreamWriter self.output_handle = StreamWriter(self.output)
class TestWriters(unittest.TestCase): def setUp(self): threads = { 'test_owner,test_user': ChatThread(participants=['test_owner', 'test_user']).add_message( ChatMessage(_NOW, 'test_owner', '白人看不懂', 0)), 'test_user,test_user_1,test_user_2': ChatThread( participants=['test_owner', 'test_user_1', 'test_user_2']). add_message(ChatMessage( _NOW, 'test_owner', '白人看不懂', 1)).add_message( ChatMessage(_NOW, 'test_user_1', 'Что это?', 2)).add_message( ChatMessage(_NOW, 'test_user_2', 'En ymmärrä', 2)), } self.history = FacebookChatHistory(user="******", threads=threads) self.output = BytesIO() if six.PY3: import io self.output_handle = io.TextIOWrapper(self.output, encoding='UTF-8', errors='replace') else: from encodings.utf_8 import StreamWriter self.output_handle = StreamWriter(self.output) def assert_output(self, format, expected=None): write(format, self.history, stream_or_dir=self.output_handle) self.output_handle.flush() if expected is not None: self.assertEqual(expected, self.output.getvalue()) def test_json(self): # TODO: Write tests for json expected output. self.assert_output('json') def test_csv(self): # TODO: Write tests for csv expected output. self.assert_output('csv') def test_yaml(self): # TODO: Write tests for yaml expected output. self.assert_output('yaml') def test_text(self): # TODO: Write tests for text expected output. self.assert_output('text')
def stream(self, stream, wfile): """Transfers data from the utf8 text stream to the wfile until stream exhausts.""" # we assume utf8 encoding of the stream from encodings.utf_8 import StreamWriter w = StreamWriter(wfile) # do not handle exceptions or close the stream here shutil.copyfileobj(stream, w) wfile.flush()
def savehtml(url,content): outfile = "" if url == rooturl: outfile = outputlocation + "\\" + "Index.html" else: relativepath = url[len(rooturl):] dirs = relativepath.split('/') dirs.pop() currentdir = outputlocation for stepdir in dirs: currentdir = currentdir + "\\" + stepdir if not os.path.exists(currentdir): os.mkdir(currentdir) outfile = outputlocation + "\\" + relativepath.replace('/','\\') if "?page=" in outfile: outfile = outfile.replace("?page=","_") if not os.path.exists(outfile): file = open(outfile,"wb") writer = StreamWriter(file) replacedContent = generatecontent(content) writer.write(replacedContent) file.close()
# Wrap them in a safe UTF-8 encoders. PDB doesn't like it when # the streams are wrapped in StreamWriter. sys.stdout = io.TextIOWrapper(sys.stdout, encoding='UTF-8', errors='replace') sys.stderr = io.TextIOWrapper(sys.stderr, encoding='UTF-8', errors='replace') else: from encodings.utf_8 import StreamWriter # Wrap the raw Python 2 output streams in smart UTF-8 encoders. # Python 2 doesn't like it when the raw file handles are wrapped in # TextIOWrapper. sys.stderr = StreamWriter(sys.stderr) sys.stdout = StreamWriter(sys.stdout) app = clip.App() @app.main(description='A program for converting Facebook chat history to a ' 'number of more usable formats') @clip.opt('-f', '--format', default='text', help='Format to convert to (%s)' % ', '.join(BUILTIN_WRITERS + ('stats', ))) @clip.opt('-t', '--thread', default=None,
from six.moves.urllib.request import url2pathname, urlopen from rdflib import RDF, RDFS, URIRef, BNode, Literal, Namespace, Graph from rdflib.exceptions import ParserError from rdflib.util import first import logging _logger = logging.getLogger("parser_rdfcore") verbose = 0 from encodings.utf_8 import StreamWriter import sys sw = StreamWriter(sys.stdout) def write(msg): _logger.info(msg + "\n") # sw.write(msg+"\n") class TestStore(Graph): __test__ = False def __init__(self, expected): super(TestStore, self).__init__() self.expected = expected def add(self, spo):
def _compress_record(cls, record, buf): buf.seek(0, 0) buf.truncate() with GzipFile(fileobj=buf, mode='wb', compresslevel=9) as g: with StreamWriter(stream=g, errors='backslashescape') as s: ujson.dump(record, s, escape_forward_slashes=False)