def add_stream(self, stream, path, compress, flags): """Add the contents of an iterable to the MAR file. Args: stream (iterable): yields blocks of data path (str): name of this file in the MAR file compress (str): One of 'xz', 'bz2', or None. Defaults to None. flags (int): permission of this file in the MAR file """ self.data_fileobj.seek(self.last_offset) if compress == 'bz2': stream = bz2_compress_stream(stream) elif compress == 'xz': stream = xz_compress_stream(stream) elif compress is None: pass else: raise ValueError('Unsupported compression type: {}'.format(compress)) size = write_to_file(stream, self.data_fileobj) # On Windows, convert \ to / # very difficult to mock this out for coverage on linux if os.sep == '\\': # pragma: no cover path = path.replace('\\', '/') e = dict( name=six.u(path), offset=self.last_offset, size=size, flags=flags, ) self.entries.append(e) self.last_offset += e['size']
def test_bz2_stream_large(): # This is only to test the case where the compressor returns data before # the stream ends n = 70000 stream = repeat(b'hello', n) stream = bz2_decompress_stream(bz2_compress_stream(stream, level=1)) assert b''.join(stream) == b'hello' * n
def test_auto_decompress(): n = 10000 stream = repeat(b'hello', n) stream = auto_decompress_stream(bz2_compress_stream(stream)) assert b''.join(stream) == b'hello' * n n = 10000 stream = repeat(b'hello', n) stream = auto_decompress_stream(stream) assert b''.join(stream) == b'hello' * n
def test_bz2_stream_exact_blocksize(): stream = [b'0' * 100000] stream = bz2_decompress_stream(bz2_compress_stream(stream, level=1)) assert b''.join(stream) == b'0' * 100000
def test_bz2_streams(data, level): stream = bz2_decompress_stream(bz2_compress_stream(data, level)) assert b''.join(stream) == b''.join(data)