def test_BloscpackHeader_decode(): format_version = struct.pack('<B', FORMAT_VERSION) raw = MAGIC + format_version + \ b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff' + \ b'\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00' def mod_raw(offset, replacement): return raw[0:offset] + replacement + \ raw[offset+len(replacement):] # check with no args yield nt.assert_equal, BloscpackHeader(), BloscpackHeader.decode(raw) for kwargs, offset, replacement in [ # check with format_version ({ 'format_version': 23 }, 4, b'\x17'), # check with options ({ 'offsets': True }, 5, b'\x01'), ({ 'metadata': True }, 5, b'\x02'), ({ 'metadata': True, 'offsets': True }, 5, b'\x03'), # check with checksum ({ 'checksum': 'adler32' }, 6, b'\x01'), ({ 'checksum': 'sha384' }, 6, b'\x07'), # check with typesize ({ 'typesize': 1 }, 7, b'\x01'), ({ 'typesize': 2 }, 7, b'\x02'), ({ 'typesize': 4 }, 7, b'\x04'), ({ 'typesize': 8 }, 7, b'\x08'), ({ 'typesize': blosc.BLOSC_MAX_TYPESIZE }, 7, b'\xff'), # check with chunk_size ({ 'chunk_size': 1 }, 8, b'\x01\x00\x00\x00'), ({ 'chunk_size': reverse_pretty('1M') }, 8, b'\x00\x00\x10\x00'), ({ 'chunk_size': blosc.BLOSC_MAX_BUFFERSIZE }, 8, b'\xef\xff\xff\x7f'), # check with last_chunk ({ 'last_chunk': 1 }, 12, b'\x01\x00\x00\x00'), ({ 'last_chunk': reverse_pretty('1M') }, 12, b'\x00\x00\x10\x00'), ({ 'last_chunk': blosc.BLOSC_MAX_BUFFERSIZE }, 12, b'\xef\xff\xff\x7f'), # check with nchunks ({ 'nchunks': 1 }, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00'), ({ 'nchunks': reverse_pretty('1M') }, 16, b'\x00\x00\x10\x00\x00\x00\x00\x00'), ({ 'nchunks': MAX_CHUNKS }, 16, b'\xff\xff\xff\xff\xff\xff\xff\x7f'), # check with max_app_chunks ({ 'nchunks': 1, 'max_app_chunks': 0 }, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), ({ 'nchunks': 1, 'max_app_chunks': 1 }, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'), ({ 'nchunks': 1, 'max_app_chunks': reverse_pretty('1M') }, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00'), # Maximum value is MAX_CHUNKS - 1 since nchunks is already 1 ({ 'nchunks': 1, 'max_app_chunks': MAX_CHUNKS - 1 }, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\x7f'), ]: yield (nt.assert_equal, BloscpackHeader(**kwargs), BloscpackHeader.decode(mod_raw(offset, replacement)))
def test_BloscpackHeader_decode(): format_version = struct.pack('<B', FORMAT_VERSION) raw = MAGIC + format_version + \ b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff' + \ b'\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00' def mod_raw(offset, replacement): return raw[0:offset] + replacement + \ raw[offset+len(replacement):] # check with no args yield nt.assert_equal, BloscpackHeader(), BloscpackHeader.decode(raw) for kwargs, offset, replacement in [ # check with format_version ({'format_version': 23}, 4, b'\x17'), # check with options ({'offsets': True}, 5, b'\x01'), ({'metadata': True}, 5, b'\x02'), ({'metadata': True, 'offsets': True}, 5, b'\x03'), # check with checksum ({'checksum': 'adler32'}, 6, b'\x01'), ({'checksum': 'sha384'}, 6, b'\x07'), # check with typesize ({'typesize': 1}, 7, b'\x01'), ({'typesize': 2}, 7, b'\x02'), ({'typesize': 4}, 7, b'\x04'), ({'typesize': 8}, 7, b'\x08'), ({'typesize': blosc.BLOSC_MAX_TYPESIZE}, 7, b'\xff'), # check with chunk_size ({'chunk_size': 1}, 8, b'\x01\x00\x00\x00'), ({'chunk_size': reverse_pretty('1M')}, 8, b'\x00\x00\x10\x00'), ({'chunk_size': blosc.BLOSC_MAX_BUFFERSIZE}, 8, b'\xef\xff\xff\x7f'), # check with last_chunk ({'last_chunk': 1}, 12, b'\x01\x00\x00\x00'), ({'last_chunk': reverse_pretty('1M')}, 12, b'\x00\x00\x10\x00'), ({'last_chunk': blosc.BLOSC_MAX_BUFFERSIZE}, 12, b'\xef\xff\xff\x7f'), # check with nchunks ({'nchunks': 1}, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00'), ({'nchunks': reverse_pretty('1M')}, 16, b'\x00\x00\x10\x00\x00\x00\x00\x00'), ({'nchunks': MAX_CHUNKS}, 16, b'\xff\xff\xff\xff\xff\xff\xff\x7f'), # check with max_app_chunks ({'nchunks': 1, 'max_app_chunks': 0}, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), ({'nchunks': 1, 'max_app_chunks': 1}, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'), ({'nchunks': 1, 'max_app_chunks': reverse_pretty('1M')}, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00'), # Maximum value is MAX_CHUNKS - 1 since nchunks is already 1 ({'nchunks': 1, 'max_app_chunks': MAX_CHUNKS-1}, 16, b'\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\x7f'), ]: yield (nt.assert_equal, BloscpackHeader(**kwargs), BloscpackHeader.decode(mod_raw(offset, replacement)))
def test_BloscpackHeader_encode(): # the raw encoded header as produces w/o any kwargs format_version = struct.pack('<B', FORMAT_VERSION) raw = MAGIC + format_version + \ b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff' + \ b'\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00' # modify the raw encoded header with the value starting at offset def mod_raw(offset, replacement): return raw[0:offset] + replacement + \ raw[offset+len(replacement):] # test with no arguments yield nt.assert_equal, raw, BloscpackHeader().encode() for offset, replacement, kwargs in [ (4, struct.pack('<B', 23), { 'format_version': 23 }), # test with options (5, b'\x01', { 'offsets': True }), (5, b'\x02', { 'metadata': True }), (5, b'\x03', { 'offsets': True, 'metadata': True }), # test with checksum (6, b'\x01', { 'checksum': 'adler32' }), (6, b'\x08', { 'checksum': 'sha512' }), # test with typesize (7, b'\x01', { 'typesize': 1 }), (7, b'\x02', { 'typesize': 2 }), (7, b'\x04', { 'typesize': 4 }), (7, b'\x10', { 'typesize': 16 }), (7, b'\xff', { 'typesize': 255 }), # test with chunksize (8, b'\xff\xff\xff\xff', { 'chunk_size': -1 }), (8, b'\x01\x00\x00\x00', { 'chunk_size': 1 }), (8, b'\x00\x00\x10\x00', { 'chunk_size': reverse_pretty('1M') }), (8, b'\xef\xff\xff\x7f', { 'chunk_size': blosc.BLOSC_MAX_BUFFERSIZE }), # test with last_chunk (12, b'\xff\xff\xff\xff', { 'last_chunk': -1 }), (12, b'\x01\x00\x00\x00', { 'last_chunk': 1 }), (12, b'\x00\x00\x10\x00', { 'last_chunk': reverse_pretty('1M') }), (12, b'\xef\xff\xff\x7f', { 'last_chunk': blosc.BLOSC_MAX_BUFFERSIZE }), # test nchunks (16, b'\xff\xff\xff\xff\xff\xff\xff\xff', { 'nchunks': -1 }), (16, b'\x00\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 0 }), (16, b'\x01\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 1 }), (16, b'\x7f\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 127 }), (16, b'\xff\xff\xff\xff\xff\xff\xff\x7f', { 'nchunks': MAX_CHUNKS }), # test max_app_chunks (16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 1, 'max_app_chunks': 0 }), (16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 1, 'max_app_chunks': 1 }), (16, b'\x01\x00\x00\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x00\x00', { 'nchunks': 1, 'max_app_chunks': 127 }), # Maximum value is MAX_CHUNKS - 1 since nchunks is already 1 (16, b'\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\x7f', { 'nchunks': 1, 'max_app_chunks': MAX_CHUNKS - 1 }), ]: yield nt.assert_equal, mod_raw(offset, replacement), \ BloscpackHeader(**kwargs).encode()
def test_append_fp(): orig, new, new_size, dcmp = prep_array_for_append() # check that the header and offsets are as we expected them to be orig_bloscpack_header, orig_offsets = reset_read_beginning(orig)[0:4:3] expected_orig_bloscpack_header = BloscpackHeader( format_version=3, offsets=True, metadata=False, checksum='adler32', typesize=8, chunk_size=1048576, last_chunk=271360, nchunks=16, max_app_chunks=160, ) expected_orig_offsets = [1440, 195299, 368931, 497746, 634063, 767529, 903070, 1038157, 1174555, 1297424, 1420339, 1544469, 1667805, 1791142, 1914839, 2038360] nt.assert_equal(expected_orig_bloscpack_header, orig_bloscpack_header) nt.assert_equal(expected_orig_offsets, orig_offsets) # perform the append reset_append_fp(orig, new, new_size) # check that the header and offsets are as we expected them to be after # appending app_bloscpack_header, app_offsets = reset_read_beginning(orig)[0:4:3] expected_app_bloscpack_header = { 'chunk_size': 1048576, 'nchunks': 31, 'last_chunk': 542720, 'max_app_chunks': 145, 'format_version': 3, 'offsets': True, 'checksum': 'adler32', 'typesize': 8, 'metadata': False } expected_app_offsets = [1440, 195299, 368931, 497746, 634063, 767529, 903070, 1038157, 1174555, 1297424, 1420339, 1544469, 1667805, 1791142, 1914839, 2038360, 2221798, 2390194, 2533644, 2663010, 2803431, 2936406, 3071130, 3209565, 3333390, 3457344, 3581581, 3705533, 3829188, 3952136, 4075509] nt.assert_equal(expected_app_bloscpack_header, app_bloscpack_header) nt.assert_equal(expected_app_offsets, app_offsets) # now check by unpacking source = CompressedFPSource(orig) sink = PlainFPSink(dcmp) unpack(source, sink) dcmp.seek(0) new.seek(0) new_str = new.read() dcmp_str = dcmp.read() nt.assert_equal(len(dcmp_str), len(new_str * 2)) nt.assert_equal(dcmp_str, new_str * 2)