def compressTarDefault(name, overwrite=None, outname=None, prefs=None): """ :type string: dirName - the name of the dir to tar :type bool: overwrite - overwrite destination Generic compress method for creating .tar.lz4 from a dir. ***WARNING*** Currently uses StringIO object until lz4file supports write. Avoid using for large directories, it will consume quite a bit of RAM. """ if not outname: outname = '.'.join([name.rstrip('/'), 'tar', 'lz4']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = Lz4Tar.open(fileobj=buff, mode='w') tarbuff.add(name) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressFile(cls, name, overwrite=False, outname=None, prefs=None): """ This is large file safe. It will now read the input in 64Kb chunks. """ if not outname: outname = '.'.join([name, 'lz4r']) if os.path.exists(outname): if not overwrite: print('File Exists!') return print('Overwrite authorized') if not os.path.exists(name): print('Unable to locate the original file. Please check filename.') return cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with open(outname, 'wb') as out: out.write(header) with open(name, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx)
def compressDir(name, outname=None): """ :type string: name - the name of the dir to tar Generic compress method for creating .lz4r from a dir. """ if not outname: outname = '.'.join([name.rstrip('/'), 'lz4r']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = tarfile.open(fileobj=buff, mode='w') tarbuff.add(name) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressTarDefault(dirName, overwrite=None, outname=None, prefs=None): """ :type string: dirName - the name of the dir to tar :type bool: overwrite - overwrite destination Generic compress method for creating .tar.lz4 from a dir. ***WARNING*** Currently uses StringIO object until lz4file supports write. Avoid using for large directories, it will consume quite a bit of RAM. """ if not outname: outname = '.'.join([dirName.rstrip('/'), 'tar', 'lz4']) if not os.path.exists(dirName): print('Unable to locate the directory to compress.') return buff = StringIO() tarbuff = Lz4Tar.open(fileobj=buff, mode='w') tarbuff.add(dirName) tarbuff.close() buff.seek(0) cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) while True: decompData = buff.read((64*(1<<10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() lz4f.freeCompContext(cCtx) del tarbuff, buff
def compressDir(cls, name, overwrite=None, outname=None, prefs=None): """ Be careful with directory which has many files """ if not outname: outname = '.'.join([name.rstrip('/'), 'lz4r']) if not os.path.exists(name): print('Unable to locate the directory to compress.') return # if the dir is huge and use a buff to hold the dir, the size of buff # will become huge, which is unacceptable. So load it into a tar file # firstly, then read this tar file and compress it with lz4. # Notes: # I guess it's better to use 'tar' command directly here because # tarfile is not good for python 2* when dealing with directory which # have many files. see http://stackoverflow.com/questions/21039974/ # high-memory-usage-with-pythons-native-tarfile-lib tarname = name + '.tar' tar = tarfile.open(tarname, "w") tar.add(name) tar.close() cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with open(outname, 'wb') as out: out.write(header) with open(tarname, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx) os.remove(tarname)
def compressFileDefault(name, overwrite=False, outname=None, prefs=None): """ :type string: name - name of file to compress :type bool: overwrite - overwrite destination :type string: outname - name for compressed file, not required. Default will be '.'.join([name, 'lz4']) Generic compress method for a file. Adds .lz4 to original file name for output, unless outname is provided. ***NOTE*** No longer uses compressFrame. This is now large file safe! It will now read the input in 64Kb chunks. """ if not outname: outname = '.'.join([name, 'lz4']) if os.path.exists(outname): if not overwrite: print('File Exists!') return print('Overwrite authorized') if not os.path.exists(name): print('Unable to locate the original file. Please check filename.') return cCtx = lz4f.createCompContext() header = lz4f.compressBegin(cCtx, prefs) with __builtin__.open(outname, 'wb') as out: out.write(header) with __builtin__.open(name, 'rb') as infile: while True: decompData = infile.read((64*(1 << 10))) if not decompData: break compData = lz4f.compressUpdate(decompData, cCtx) out.write(compData) out.write(lz4f.compressEnd(cCtx)) out.flush() out.close() lz4f.freeCompContext(cCtx)
def close(p): p.outf.write(lz4f.compressEnd(p.ctx)) lz4f.freeCompContext(p.ctx)