def compress_files(files, name, excluded): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" tgz_contents = StringIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_contents) def addfile(name, contents, tar): info = tarfile.TarInfo(name=name) string = StringIO(contents) info.size = len(contents) tar.addfile(tarinfo=info, fileobj=string) for the_file, content in files.iteritems(): if the_file not in excluded: addfile(the_file, content, tgz) tgz.close() ret = {} for e in excluded: if e in files: ret[e] = files[e] ret[name] = tgz_contents.getvalue() return ret
def compress_files(files, name, excluded): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_contents) def addfile(name, file_info, tar): info = tarfile.TarInfo(name=name) the_str = BytesIO(file_info["contents"]) info.size = len(file_info["contents"]) info.mode = file_info["mode"] tar.addfile(tarinfo=info, fileobj=the_str) for the_file, info in files.items(): if the_file not in excluded: addfile(the_file, info, tgz) tgz.close() ret = {} for e in excluded: if e in files: ret[e] = files[e]["contents"] ret[name] = tgz_contents.getvalue() return ret
def compress_package_files(files): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" tgz_contents = StringIO() tgz = gzopen_without_timestamps(PACKAGE_TGZ_NAME, mode="w", fileobj=tgz_contents) def addfile(name, contents, tar): info = tarfile.TarInfo(name=name) string = StringIO(contents) info.size = len(contents) tar.addfile(tarinfo=info, fileobj=string) for the_file, content in files.iteritems(): if the_file not in (CONANINFO, CONAN_MANIFEST): addfile(the_file, content, tgz) tgz.close() ret = {} if CONANINFO in files: ret[CONANINFO] = files[CONANINFO] if CONAN_MANIFEST in files: ret[CONAN_MANIFEST] = files[CONAN_MANIFEST] ret[PACKAGE_TGZ_NAME] = tgz_contents.getvalue() return ret
def compress_files(files, name, excluded, dest_dir): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) def addfile(name, abs_path, tar): info = tarfile.TarInfo(name=name) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode with open(abs_path, 'rb') as file_handler: tar.addfile(tarinfo=info, fileobj=file_handler) for filename, abs_path in files.items(): if filename not in excluded: addfile(filename, abs_path, tgz) tgz.close() ret = {} for e in excluded: if e in files: ret[e] = files[e] ret[name] = tgz_path return ret
def compress_files(files, name, dest_dir): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, abs_path in files.items(): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() return tgz_path
def compress_files(files, symlinks, name, dest_dir): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def compress_files(files, symlinks, name, dest_dir): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) set_dirty(tgz_path) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() clean_dirty(tgz_path) duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def compress_files(files, symlinks, name, dest_dir, output=None): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) with set_dirty_context_manager(tgz_path), open(tgz_path, "wb") as tgz_handle: tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest info.size = 0 # A symlink shouldn't have size tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) with progress_bar.iterate_list_with_progress(sorted(files.items()), output, "Compressing %s" % name) as pg_file_list: for filename, abs_path in pg_file_list: info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.size = 0 # A symlink shouldn't have size info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def compress_files(files, symlinks, name, dest_dir, output=None): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) with set_dirty_context_manager(tgz_path), open(tgz_path, "wb") as tgz_handle: tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest info.size = 0 # A symlink shouldn't have size tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) i_file = 0 n_files = len(files) last_progress = None if output and n_files > 1 and not output.is_terminal: output.write("[") elif output and n_files > 1 and output.is_terminal: progress_bar = tqdm(total=len(files), desc="Compressing %s" % name, unit="files", leave=True, dynamic_ncols=False, ascii=True, file=output) for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.size = 0 # A symlink shouldn't have size info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) if output and n_files > 1: i_file = i_file + 1 units = min(50, int(50 * i_file / n_files)) if last_progress != units: # Avoid screen refresh if nothing has change if not output.is_terminal: output.write('=' * (units - (last_progress or 0))) last_progress = units if output.is_terminal: progress_bar.update() if output and n_files > 1: if output.is_terminal: progress_bar.close() else: output.writeln("]") tgz.close() duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def compress_files(files, symlinks, name, dest_dir, output=None): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) set_dirty(tgz_path) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest info.size = 0 # A symlink shouldn't have size tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) i_file = 0 n_files = len(files) last_progress = None if output and n_files > 1 and not output.is_terminal: output.write("[") for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.size = 0 # A symlink shouldn't have size info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) if output and n_files > 1: i_file = i_file + 1 units = min(50, int(50 * i_file / n_files)) if last_progress != units: # Avoid screen refresh if nothing has change if output.is_terminal: text = "%s/%s files" % (i_file, n_files) output.rewrite_line("[%s%s] %s" % ('=' * units, ' ' * (50 - units), text)) else: output.write('=' * (units - (last_progress or 0))) last_progress = units if output and n_files > 1: if output.is_terminal: output.writeln("") else: output.writeln("]") tgz.close() clean_dirty(tgz_path) duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def gzopen_patched(name, mode="r", fileobj=None, compresslevel=None, **kwargs): if name == PACKAGE_TGZ_NAME: raise ConanException("Error gzopen %s" % name) return gzopen_without_timestamps(name, mode, fileobj, compresslevel, **kwargs)
def test_linkame_striproot_folder(self): tmp_folder = temp_folder() other_tmp_folder = temp_folder() save(os.path.join(other_tmp_folder, "foo.txt"), "") tgz_path = os.path.join(tmp_folder, "foo.tgz") with open(tgz_path, "wb") as tgz_handle: tgz = gzopen_without_timestamps("name", mode="w", fileobj=tgz_handle) # Regular file info = tarfile.TarInfo(name="common/foo.txt") info.name = "common/subfolder/foo.txt" info.path = "common/subfolder/foo.txt" with open(os.path.join(other_tmp_folder, "foo.txt"), 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) # A hardlink to the regular file info = tarfile.TarInfo(name="common/foo.txt") info.linkname = "common/subfolder/foo.txt" info.linkpath = "common/subfolder/foo.txt" info.name = "common/subfolder/bar/foo.txt" info.path = "common/subfolder/bar/foo.txt" info.type = b'1' # This indicates a hardlink to the tgz file "common/subfolder/foo.txt" tgz.addfile(tarinfo=info, fileobj=None) tgz.close() assert not os.path.exists( os.path.join(tmp_folder, "subfolder", "foo.txt")) assert not os.path.exists( os.path.join(tmp_folder, "subfolder", "bar", "foo.txt")) untargz(tgz_path, destination=tmp_folder, strip_root=True) assert os.path.exists(os.path.join(tmp_folder, "subfolder", "foo.txt")) assert os.path.exists( os.path.join(tmp_folder, "subfolder", "bar", "foo.txt")) # Check develop2 public unzip rmdir(os.path.join(tmp_folder, "subfolder")) assert not os.path.exists( os.path.join(tmp_folder, "subfolder", "foo.txt")) assert not os.path.exists( os.path.join(tmp_folder, "subfolder", "bar", "foo.txt")) unzip_dev2(ConanFileMock(), tgz_path, destination=tmp_folder, strip_root=True) assert os.path.exists(os.path.join(tmp_folder, "subfolder", "foo.txt")) assert os.path.exists( os.path.join(tmp_folder, "subfolder", "bar", "foo.txt"))
def tgz_with_contents(files): folder = temp_folder() file_path = os.path.join(folder, "myfile.tar.gz") with open(file_path, "wb") as tgz_handle: tgz = gzopen_without_timestamps("myfile.tar.gz", mode="w", fileobj=tgz_handle) for name, content in files.items(): info = tarfile.TarInfo(name=name) data = content.encode('utf-8') info.size = len(data) tgz.addfile(tarinfo=info, fileobj=BytesIO(data)) tgz.close() return file_path
def _compress_folder(self, folder, tgz_path, folder_entry=None): # Create a tar.gz file with the files in the folder and an additional TarInfo entry # for the folder_entry (the gather files doesn't return empty dirs) with open(tgz_path, "wb") as tgz_handle: tgz = gzopen_without_timestamps("name", mode="w", fileobj=tgz_handle) if folder_entry: # Create an empty folder in the tgz file t = tarfile.TarInfo(folder_entry) t.mode = 488 t.type = tarfile.DIRTYPE tgz.addfile(t) files, _ = gather_files(folder) for filename, abs_path in files.items(): info = tarfile.TarInfo(name=filename) with open(os.path.join(folder, filename), 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close()
def setUp(self): self.tmp_folder = temp_folder() with chdir(self.tmp_folder): # Create a couple of files ori_files_dir = os.path.join(self.tmp_folder, "ori") file1 = os.path.join(ori_files_dir, "file1") file2 = os.path.join(ori_files_dir, "folder", "file2") save(file1, "") save(file2, "") # Create a tar.gz file with the above files self.tgz_file = os.path.join(self.tmp_folder, "file.tar.gz") with open(self.tgz_file, "wb") as tgz_handle: tgz = gzopen_without_timestamps("name", mode="w", fileobj=tgz_handle) files, _ = gather_files(ori_files_dir) for filename, abs_path in files.items(): info = tarfile.TarInfo(name=filename) with open(file1, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close()
def compress_files(files, symlinks, name, dest_dir): """Compress the package and returns the new dict (name => content) of files, only with the conanXX files and the compressed file""" t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) is_export_sources = (name == EXPORT_SOURCES_TGZ_NAME) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in symlinks.items(): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest tgz.addfile(tarinfo=info) for filename, abs_path in files.items(): if is_export_sources: # temporary backwards compat TGZ creation filename = ".c_src/%s" % filename info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path