def test_pretty_filesieze(): nt.assert_equal('0B', pretty_size(0)) nt.assert_equal('9.0T', pretty_size(9898989898879)) nt.assert_equal('4.78G', pretty_size(5129898234)) nt.assert_equal('12.3M', pretty_size(12898234)) nt.assert_equal('966.7K', pretty_size(989898)) nt.assert_equal('128.0B', pretty_size(128)) nt.assert_equal(0, reverse_pretty('0B')) nt.assert_equal(8, reverse_pretty('8B')) nt.assert_equal(8192, reverse_pretty('8K')) nt.assert_equal(134217728, reverse_pretty('128M')) nt.assert_equal(2147483648, reverse_pretty('2G')) nt.assert_equal(2199023255552, reverse_pretty('2T')) # can't handle Petabytes, yet nt.assert_raises(ValueError, reverse_pretty, '2P')
def get_fs(file_name): return bpp.pretty_size(path.getsize(file_name))
with bpt.create_tmp_files() as (tdir, in_file, out_file, dcmp_file): print('create the test data', end='') bpt.create_array(100, in_file, progress=bpt.simple_progress) repeats = 3 print("%s\t%s\t\t%s\t\t%s" % ("chunk_size", "comp-time", "decomp-time", "ratio")) for chunk_size in (int(2**i) for i in numpy.arange(19, 23.5, 0.5)): cmp_times, dcmp_times = [], [] for _ in range(repeats): drop_caches() tic = time.time() pack_file_to_file(in_file, out_file, chunk_size=chunk_size) sync() toc = time.time() cmp_times.append(toc - tic) drop_caches() tic = time.time() unpack_file_from_file(out_file, dcmp_file) sync() toc = time.time() dcmp_times.append(toc - tic) ratio = path.getsize(in_file) / path.getsize(out_file) print("%s\t\t%f\t\t%f\t\t%f" % ( pretty_size(chunk_size), sum(cmp_times) / repeats, sum(dcmp_times) / repeats, ratio, ))
with bpt.create_tmp_files() as (tdir, in_file, out_file, dcmp_file): print('create the test data', end='') bpt.create_array(100, in_file, progress=bpt.simple_progress) repeats = 3 print("%s\t%s\t\t%s\t\t%s" % ("chunk_size", "comp-time", "decomp-time", "ratio")) for chunk_size in (int(2**i) for i in numpy.arange(19, 23.5, 0.5)): cmp_times, dcmp_times = [], [] for _ in range(repeats): drop_caches() tic = time.time() pack_file_to_file(in_file, out_file, chunk_size=chunk_size) sync() toc = time.time() cmp_times.append(toc-tic) drop_caches() tic = time.time() unpack_file_from_file(out_file, dcmp_file) sync() toc = time.time() dcmp_times.append(toc-tic) ratio = path.getsize(in_file)/path.getsize(out_file) print("%s\t\t%f\t\t%f\t\t%f" % (pretty_size(chunk_size), sum(cmp_times)/repeats, sum(dcmp_times)/repeats, ratio, ) )