def test_streaming_decoder_single_bytes(self): decoder = netstring.StreamingDecoder() self.assertEqual([], decoder.feed(b'3')) self.assertEqual([], decoder.feed(b':')) self.assertEqual([b'a'], decoder.feed(b'a')) self.assertEqual([b'b'], decoder.feed(b'b')) self.assertEqual([b'c'], decoder.feed(b'c')) self.assertEqual([b''], decoder.feed(b','))
def xtest_100MB_netstring(self): block_count = 100 * 2**10 block_size = 2**10 reference_hash = hashlib.sha256() parsed_hash = hashlib.sha256() decoder = netstring.StreamingDecoder() # feed netstring header assert [] == decoder.feed(b'%d:' % (block_size * block_count)) for _ in range(block_count): block = os.urandom(block_size) reference_hash.update(block) chunks = decoder.feed(block) assert all(chunks) for chunk in chunks: parsed_hash.update(chunk) # feed netstring terminator assert [b''] == decoder.feed(b',') # check encoded and decoded streams equal assert reference_hash.digest() == parsed_hash.digest()
def test_100MB_netstring(self): block_count = 100 * 2**10 block_size = 2**10 reference_hash = hashlib.sha256() parsed_hash = hashlib.sha256() decoder = netstring.StreamingDecoder() # feed netstring header self.assertEqual( [], decoder.feed(bytes(str(block_size * block_count), 'utf-8') + b':')) for _ in range(block_count): block = os.urandom(block_size) reference_hash.update(block) chunks = decoder.feed(block) self.assertTrue(all(chunks)) for chunk in chunks: parsed_hash.update(chunk) # feed netstring terminator self.assertEqual([b''], decoder.feed(b',')) # check encoded and decoded streams equal self.assertEqual(reference_hash.digest(), parsed_hash.digest())
def test_streaming_decoder_pending_data(self): decoder = netstring.StreamingDecoder() self.assertEqual([b'abcd', b'', b'!!!!', b'', b'', b'12'], decoder.feed('4:abcd,4:!!!!,0:,4:12'))
def test_streaming_decoder_multiple_netstrings(self): decoder = netstring.StreamingDecoder() self.assertEqual([b'ab'], decoder.feed(b'3:ab')) self.assertEqual([b'c', b'', b'ab', b''], decoder.feed(b'c,2:ab,'))