def compressDir(name, outname=None): """ :type string: name - the name of the dir to tar Generic compress method for creating .lz4r from a dir. """ if not outname: outname = '.'.join([name.rstrip('/'), 'lz4r']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = tarfile.open(fileobj=buff, mode='w') tarbuff.add(name) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressFile(cls, name, overwrite=False, outname=None, prefs=None): """ This is large file safe. It will now read the input in 64Kb chunks. """ if not outname: outname = '.'.join([name, 'lz4r']) if os.path.exists(outname): if not overwrite: print('File Exists!') return print('Overwrite authorized') if not os.path.exists(name): print('Unable to locate the original file. Please check filename.') return cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with open(outname, 'wb') as out: out.write(header) with open(name, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx)
def compressTarDefault(name, overwrite=None, outname=None, prefs=None): """ :type string: dirName - the name of the dir to tar :type bool: overwrite - overwrite destination Generic compress method for creating .tar.lz4 from a dir. ***WARNING*** Currently uses StringIO object until lz4file supports write. Avoid using for large directories, it will consume quite a bit of RAM. """ if not outname: outname = '.'.join([name.rstrip('/'), 'tar', 'lz4']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = Lz4Tar.open(fileobj=buff, mode='w') tarbuff.add(name) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressTarDefault(dirName, overwrite=None, outname=None, prefs=None): """ :type string: dirName - the name of the dir to tar :type bool: overwrite - overwrite destination Generic compress method for creating .tar.lz4 from a dir. ***WARNING*** Currently uses StringIO object until lz4file supports write. Avoid using for large directories, it will consume quite a bit of RAM. """ if not outname: outname = '.'.join([dirName.rstrip('/'), 'tar', 'lz4']) if not os.path.exists(dirName): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = Lz4Tar.open(fileobj=buff, mode='w') tarbuff.add(dirName) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1<<10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressDir(cls, name, overwrite=None, outname=None, prefs=None): """ Be careful with directory which has many files """ if not outname: outname = '.'.join([name.rstrip('/'), 'lz4r']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return # if the dir is huge and use a buff to hold the dir, the size of buff # will become huge, which is unacceptable. So load it into a tar file # firstly, then read this tar file and compress it with lz4. # Notes: # I guess it's better to use 'tar' command directly here because # tarfile is not good for python 2* when dealing with directory which # have many files. see http://stackoverflow.com/questions/21039974/ # high-memory-usage-with-pythons-native-tarfile-lib tarname = name + '.tar' tar = tarfile.open(tarname, "w") tar.add(name) tar.close() cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with open(outname, 'wb') as out: out.write(header) with open(tarname, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx) os.remove(tarname)
def compressFileDefault(name, overwrite=False, outname=None, prefs=None): """ :type string: name - name of file to compress :type bool: overwrite - overwrite destination :type string: outname - name for compressed file, not required. Default will be '.'.join([name, 'lz4']) Generic compress method for a file. Adds .lz4 to original file name for output, unless outname is provided. ***NOTE*** No longer uses compressFrame. This is now large file safe! It will now read the input in 64Kb chunks. """ if not outname: outname = '.'.join([name, 'lz4']) if os.path.exists(outname): if not overwrite: print('File Exists!') return print('Overwrite authorized') if not os.path.exists(name): print('Unable to locate the original file. Please check filename.') return cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) with __builtin__.open(name, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx)
def write(p, s): cs = lz4f.compressUpdate(str(s), p.ctx) p.outf.write(cs)
dst = ffi.new("char[]", dst_size) dst_buf = ffi.buffer(dst) infile = open("test.tar", "rb") outfile = open("test.tar.lz4", "wb") filesize = 0 compressedfilesize = 0 read_size = infile.raw.readinto(src_buf) filesize += read_size header_size = compressBegin(ctx[0], dst, dst_size, lz4_preferences) size_check = outfile.write(dst_buf[:header_size]) compressedfilesize += header_size while read_size > 0: out_size = compressUpdate(ctx[0], dst, dst_size, src, read_size) size_check = outfile.write(dst_buf[:out_size]) compressedfilesize += out_size read_size = infile.raw.readinto(src_buf) filesize += read_size footer_size = compressEnd(ctx[0], dst, dst_size) size_check = outfile.write(dst_buf[:footer_size]) compressedfilesize += footer_size freeCompressionContext(ctx[0]) infile.close() outfile.close()