def add_real_file( self, path: str, archived_path: str, recursive: bool = True, predicate: Optional[ArchiveAdapter.FileFilter] = None, mtime: Optional[float] = None, ) -> None: info = ZipInfo.from_file(path, archived_path, strict_timestamps=False) self._set_zipinfo_compression(info) if mtime is None: mtime = os.stat(path).st_mtime info.date_time = self._prepare_zipinfo_date_time(mtime) if predicate is not None and not predicate(info.filename): return if info.is_dir(): self._inner.open(info, "w").close() if recursive: for f in sorted(os.listdir(path)): self.add_real_file( os.path.join(path, f), os.path.join(archived_path, f), recursive=recursive, predicate=predicate, ) else: with open(path, "rb") as src, self._inner.open(info, "w") as dest: shutil.copyfileobj(src, dest, 1024 * 8)
def transfer_store(self, key: str, filename: str): """ e.g.: filename=".git/annex/objects/qW/pV/SHA256E-s148273064--5880ac1cd05eee90db251c027771b4c9f0a55b7c8b2813c95eff59eef465ebd3.wav/SHA256E-s148273064--5880ac1cd05eee90db251c027771b4c9f0a55b7c8b2813c95eff59eef465ebd3.wav" """ file_path = Path(filename) if self.check_file_sizes(key, file_path): return zip_path = self._get_zip_path(key) zinfo = ZipInfo.from_file(file_path, arcname=key, strict_timestamps=True) zinfo.compress_type = self.compression_algorithm # TODO: create inconsistent state context manager to avoid partial/corrupt # transfers when user KeyboardInterrupts during a copyfileobj call # a lockfile perhaps? with ZipFile(zip_path, 'a', compression=self.compression_algorithm, allowZip64=True) as myzip: with open(file_path, "rb") as src, myzip.open(zinfo, 'w') as dest: copyfileobj(src, dest, callback=self.annex.progress, file_size=file_path.stat().st_size) if not self.check_file_sizes(key, file_path): print("Unknown error while storing the key.") print("Attempting to delete corrupt key from remote...") delete_from_zip(zip_path, key) print("Corrupted key was successfully deleted.") msg = "Could not store this key. drop it --from this-remote and retry." raise RemoteError(msg)
def patch_wheel(version: str) -> Path: src_path = Path.cwd() / "dist" / f"blackfast-{version}-py3-none-any.whl" wheel_name = f"blackfast-{version}-py3-none-{get_platform().replace('.', '_').replace('-', '_')}.whl" dst_path = Path.cwd() / "dist" / wheel_name name = "blackfast.exe" if sys.platform == "win32" else "blackfast" with ZipFile(src_path, "r") as src, ZipFile(dst_path, "w") as dst: binary = Path.cwd() / "target" / "release" / name arcname = f"blackfast-{version}.data/scripts/{name}" zinfo = ZipInfo.from_file(binary, arcname) zinfo.external_attr |= stat.S_IXUSR with binary.open("rb") as fobj: data = fobj.read() size = len(data) hash_digest = (urlsafe_b64encode( hashlib.sha256(data).digest()).decode("ascii").rstrip("=")) dst.writestr(zinfo, data) record = "{},sha256={},{}\n".format(arcname, hash_digest, size).encode("utf-8") record_path = f"blackfast-{version}.dist-info/RECORD" for info in src.infolist(): if info.filename == record_path: records = record + src.read(info.filename) info.file_size = len(records) dst.writestr(info, records) else: dst.writestr(info, src.read(info.filename)) src_path.unlink() return dst_path
def decompress (file, password = None) : if is_zipfile(file) : try : fileinfo = ZipInfo.from_file(file) filetype = fileinfo.compress_type except Exception as e : filetype = 0 print(e) try : ZipFile(file, compression = filetype).extractall(pwd = password.encode()) print("Le fichier à été extrait avec succès !") print("Vous pouvez ouvrir le fichier souhaitez avec le raccourci Ctrl-C ou Menu->Fichier->Ouvrir.") except : print("Ce mot de passe ne correspond pas.") elif is_tarfile(file) : try : fileinfo = TarInfo.from_file(file) filetype = fileinfo.compress_type except Exception as e : filetype = 0 print(e) try : TarFile(file, tarinfo = TarInfo).extractall(pwd = password.encoded()) print("Le fichier à été extrait avec succès !") print("Vous pouvez ouvrir le fichier souhaitez avec le raccourci Ctrl-C ou Menu->Fichier->Ouvrir.") except : print("Ce mot de passe ne correspond pas.") else : print("Ce fichier n'est nis un fichier tar ni un fichier zip.")
def add_real_file(self, path, archived_path, recursive=True, predicate=None): info = ZipInfo.from_file( path, archived_path, strict_timestamps=self._zipfile._strict_timestamps) self._set_zipinfo_compression(info) if predicate is not None and not predicate(info.filename): return if info.is_dir(): self._zipfile.open(info, "w").close() if recursive: for f in sorted(os.listdir(path)): self.add_real_file( os.path.join(path, f), os.path.join(archived_path, f), recursive=recursive, predicate=predicate, ) else: with open(path, "rb") as src, self._zipfile.open(info, "w") as dest: copyfileobj(src, dest, 1024 * 8)
def write_zst_file(zf, zst_path, arcname): ''' Add a file pre-compressed with zstd to the archive Of course zipfile doesn't support this use-case (because it sucks), so abuse generous access to its internals to implement it here. ''' log_file(zst_path, arcname, ZIP_ZSTANDARD) zip64 = False with zst_path.open('rb') as zst_file: zst_size = zst_file.seek(0, 2) zst_file.seek(0, 0) zi = ZipInfo.from_file(str(zst_path), arcname=arcname) zi.compress_type = ZIP_ZSTANDARD zi.create_version = ZSTANDARD_VERSION zi.extract_version = ZSTANDARD_VERSION zi.compress_size = zst_size if not zi.external_attr: zi.external_attr = 0o600 << 16 # permissions: ?rw------- # Unfortunately we must decompress it to compute crc32. # We'll also compute file size from decompressed data instead of relying on frame headers. zi.file_size = 0 zi.CRC = 0 for chunk in zstd_decompressor.read_to_iter(zst_file): zi.file_size += len(chunk) zi.CRC = zlib.crc32(chunk, zi.CRC) if zf._seekable: zf.fp.seek(zf.start_dir) zi.header_offset = zf.fp.tell() zf._writecheck(zi) zf._didModify = True zf.fp.write(zi.FileHeader(zip64)) zf._writing = True try: zst_file.seek(0, 0) shutil.copyfileobj(zst_file, zf.fp) assert zst_file.tell() == zi.compress_size zf.filelist.append(zi) zf.NameToInfo[zi.filename] = zi zf.start_dir = zf.fp.tell() finally: zf._writing = False
def zipfile_generator2(files, stream): with ZipFile(stream, mode='w') as zf: for path, name in files: z_info = ZipInfo.from_file(path, arcname=name) with open(path, 'rb') as entry, zf.open(z_info, mode='w') as dest: for chunk in iter(lambda: entry.read(16384), b''): dest.write(chunk) # Yield chunk of the zip file stream in bytes. yield stream.get() # ZipFile was closed. yield stream.get()
def _zip_write_dir(self, zip_file: ZipFile, source_dpath: Path, rel_dpath: Path): # print(f'{rel_dpath}...') dt_tuple = self._get_datetime_tuple(source_dpath) # zip_info = ZipInfo(str(rel_dpath) + '/', dt_tuple) zip_info = ZipInfo.from_file(source_dpath, arcname=str(rel_dpath) + '/') zip_info.compress_type = ZIP_DEFLATED zip_info.date_time = dt_tuple zip_file.writestr(zip_info, b'')
def _add_to_zip( zipfile, file, root_folder, exclude, exclude_dotfiles, relative_to, compression, ): """Recursively add the `file` to the (open) `zipfile`.""" logger = logging.getLogger(__name__) if file.is_file(): if root_folder is None: filename = file.relative_to(relative_to) else: filename = root_folder / file.relative_to(relative_to) data = file.read_bytes() if exclude_dotfiles and filename.stem.startswith("."): logger.debug("Skipping %s (exclude dotfiles)", filename) return for pattern in exclude: if isinstance(pattern, RegexPattern): if pattern.match(str(filename)): logger.debug( "Skipping %s (exclude RX %r)", filename, pattern.pattern, ) return elif isinstance(pattern, str): if filename.match(pattern): logger.debug("Skipping %s (exclude pattern %r)", filename, pattern) return else: raise TypeError("Invalid type for pattern %r" % pattern) logger.debug("Adding %s to zip as %s", file, filename) zinfo = ZipInfo.from_file(file, arcname=str(filename)) zinfo.compress_type = compression zipfile.writestr(zinfo, data) elif file.is_dir(): directory = file for file_in_dir in directory.iterdir(): _add_to_zip( zipfile, file_in_dir, root_folder, exclude, exclude_dotfiles, relative_to, compression, )
def generator(self): """ @ from method 'ZipInfo.from_file()' filename should be the path to a file or directory on the filesystem. arcname is the name which it will have within the archive (by default, this will be the same as filename, but without a drive letter and with leading path separators removed). """ stream = Stream() with ZipFile(stream, mode = self.mode, compression = self.compression, allowZip64 = self.allowZip64,) as zf: for path in self.paths: # name in filesystem and name in zip file z_info = ZipInfo.from_file( path['fs'], path['n'] ) with open( path['fs'], 'rb' ) as e: with zf.open( z_info, mode = self.mode ) as d: for chunk in iter( lambda: e.read( self.chunksize ), b'' ): # write chunk to zip file d.write( chunk ) # getting bytes from stream to the next iterator yield stream.get() zf.comment = self.comment # last piece yield stream.get() # TESTING (not used) self._buffer_size = stream.size() # Flush and close this stream. stream.close()
def get(self): output = ZipfileGenerator.UnseekableStream() with ZipFile(output, mode='w') as zf: for path in self.paths: try: if len(path['arcname']) == 0: path['arcname'] = path['filename'] z_info = ZipInfo.from_file(path['filename'], path['arcname']) # it's not worth the resources, achieves max 0.1% on JPEGs... #z_info.compress_type = ZIP_DEFLATED # should we try to fix the disk timestamps? # or should it be solved by setting the system time with the browser time? with open(path['filename'], 'rb') as entry, zf.open(z_info, mode='w') as dest: for chunk in iter(lambda: entry.read(self.chunk_size), b''): dest.write(chunk) # yield chunk of the zip file stream in bytes. yield output.get() except FileNotFoundError: # this should probably be logged, but how? pass # ZipFile was closed: get the final bytes yield output.get()