def reset_append_fp(original_fp, new_content_fp, new_size, blosc_args=None): """ like ``append_fp`` but with ``seek(0)`` on the file pointers. """ nchunks = append_fp(original_fp, new_content_fp, new_size, blosc_args=blosc_args) original_fp.seek(0) new_content_fp.seek(0) return nchunks
def test_mixing_clevel(): # the first set of chunks has max compression blosc_args = BloscArgs(clevel=9) orig, new, new_size, dcmp = prep_array_for_append() # get the original size orig.seek(0, 2) orig_size = orig.tell() orig.seek(0) # get a backup of the settings bloscpack_header, metadata, metadata_header, offsets = \ reset_read_beginning(orig) # compressed size of the last chunk, including checksum last_chunk_compressed_size = orig_size - offsets[-1] # do append # use the typesize from the file and # make the second set of chunks have no compression blosc_args = BloscArgs(typesize=None, clevel=0) nchunks = append_fp(orig, new, new_size, blosc_args=blosc_args) # get the final size orig.seek(0, 2) final_size = orig.tell() orig.seek(0) # the original file minus the compressed size of the last chunk discounted_orig_size = orig_size - last_chunk_compressed_size # size of the appended data # * raw new size, since we have no compression # * uncompressed size of the last chunk # * nchunks + 1 times the blosc and checksum overhead appended_size = new_size + bloscpack_header['last_chunk'] + (nchunks + 1) * (16 + 4) # final size should be original plus appended data nt.assert_equal(final_size, appended_size + discounted_orig_size) # check by unpacking source = CompressedFPSource(orig) sink = PlainFPSink(dcmp) unpack(source, sink) dcmp.seek(0) new.seek(0) new_str = new.read() dcmp_str = dcmp.read() nt.assert_equal(len(dcmp_str), len(new_str * 2)) nt.assert_equal(dcmp_str, new_str * 2)
def test_mixing_clevel(): # the first set of chunks has max compression blosc_args = BloscArgs(clevel=9) orig, new, new_size, dcmp = prep_array_for_append() # get the original size orig.seek(0, 2) orig_size = orig.tell() orig.seek(0) # get a backup of the settings bloscpack_header, metadata, metadata_header, offsets = \ reset_read_beginning(orig) # compressed size of the last chunk, including checksum last_chunk_compressed_size = orig_size - offsets[-1] # do append # use the typesize from the file and # make the second set of chunks have no compression blosc_args = BloscArgs(typesize=None, clevel=0) nchunks = append_fp(orig, new, new_size, blosc_args=blosc_args) # get the final size orig.seek(0, 2) final_size = orig.tell() orig.seek(0) # the original file minus the compressed size of the last chunk discounted_orig_size = orig_size - last_chunk_compressed_size # size of the appended data # * raw new size, since we have no compression # * uncompressed size of the last chunk # * nchunks + 1 times the blosc and checksum overhead appended_size = new_size + bloscpack_header['last_chunk'] + (nchunks+1) * (16 + 4) # final size should be original plus appended data nt.assert_equal(final_size, appended_size + discounted_orig_size) # check by unpacking source = CompressedFPSource(orig) sink = PlainFPSink(dcmp) unpack(source, sink) dcmp.seek(0) new.seek(0) new_str = new.read() dcmp_str = dcmp.read() nt.assert_equal(len(dcmp_str), len(new_str * 2)) nt.assert_equal(dcmp_str, new_str * 2)