def bench_bigf_writeff(): # bigfile & mapping f = BigFile_File(tmpf.name, blksize) fh = f.fileh_open() vma = fh.mmap(0, filesize // blksize) memset(vma, 0xFF) fh.dirty_writeout(WRITEOUT_STORE | WRITEOUT_MARKSTORED) # TODO cleanup del vma # vma.close() del fh # fh.close() del f # f.close()
def _bench_bigf_read(hasher, expect): # bigfile & mapping f = BigFile_File(tmpf.name, blksize) fh = f.fileh_open() vma = fh.mmap(0, filesize // blksize) # hash of the whole content h = hasher() h.update(vma) # TODO cleanup del vma # vma.close() del fh # fh.close() del f # f.close() assert h.digest() == expect
def test_bigfile_filefile(): f = BigFile_File(tmpf.name, blksize) fh = f.fileh_open() # TODO + ram vma = fh.mmap(0, blen) # XXX assumes blksize == pagesize m = memoryview(vma) # verify via bigfile interface that file contents is the same we've wrote # to it. for i in xrange(0, blen*blkitems, blkitems): data0 = arange(i, i + blkitems, dtype=be4) # NOTE ndarray(..., buffer=m) does not work on py2 data = asarray(m[i*4:(i+blkitems)*4]).view(dtype=be4) assert array_equal(data0, data) # change file data m[0:5] = b'Hello' m[4*blksize+0:4*blksize+5] = b'World' fh.dirty_writeout(WRITEOUT_STORE | WRITEOUT_MARKSTORED) del m del vma # TODO vma.unmap() del fh # TODO fh.close() del f # TODO close f f = BigFile_File(tmpf.name, blksize) fh = f.fileh_open() # TODO + ram vma = fh.mmap(0, blen) # XXX assumes blksize == pagesize m = memoryview(vma) # verify that the file content is original + changes for i in xrange(0, blen*blkitems, blkitems): data0 = arange(i, i + blkitems, dtype=be4) _ = data0.view(uint8) if i//blkitems == 0: _[0:5] = [ord(c) for c in 'Hello'] if i//blkitems == 4: _[0:5] = [ord(c) for c in 'World'] # NOTE ndarray(..., buffer=m) does not work on py2 data = asarray(m[i*4:(i+blkitems)*4]).view(dtype=be4) assert array_equal(data0, data)