def read_upload_file(self, data): fp = self.get_path(data['path']) f = os.open(fp, os.O_WRONLY | os.O_CREAT, self.permissions_file) xattr_data = pickle.loads(self.inpipe.read(data['xattr_size'])) if os.path.exists(fp) and self.allowdelete: if os.path.islink(fp): os.remove(fp) elif os.path.isdir(fp): shutil.rmtree(fp, ignore_errors=True) with os.fdopen(os.dup(f), 'wb') as output: util.copy_file_limited(self.inpipe, output, data['size']) self._set_stat_and_xattr(fp, data['stat'], xattr_data) parent_dir = self.filedb.get_path(os.path.dirname(data['path'])) ent = FileDbEntry(os.path.basename(data['path']), parent_dir) ent.size = data['size'] ent.mtime = data['stat']['mtime'] with open(fp, 'rb') as fh: ent.sha256 = util.sha256_file(fh) self.filedb.append(ent) os.close(f) return True
def verify(self): for root, dirs, files in os.walk(self.pkg_root): if root == os.path.join(self.pkg_root, 'share'): dirs.remove('xyz') for f in files: fn = os.path.join(root, f) fn_base = fn[len(self.pkg_root):] if fn_base not in self.all_files: print("warning: no pkg: ", fn_base) elif sha256_file(fn) != self.all_files[fn_base][0]: print("warning: bad hash:", fn_base)
def process_local_file(path): global total_uploaded_size local_files[path] = True full_path = os.path.join(root_dir, path) is_symlink = os.path.islink(full_path) symlink_to = None if is_symlink: symlink_to = os.readlink(full_path) if symlink_to[0] == '/': # absolute path symlink_to = '/' + os.path.relpath(symlink_to, root_dir) server_file = server_files.find_path(path) if server_file is not None: if server_file.is_symlink( ) and is_symlink and symlink_to == server_file.symlink: return if not server_file.is_directory() and not server_file.is_symlink( ) and not is_symlink: stat_info = os.stat(full_path) if stat_info.st_size == server_file.size and stat_info.st_mtime_ns == server_file.mtime: # print(f"Skipping {path} - already uploaded (time)") return try: with open(full_path, 'rb') as fh: local_sha256 = util.sha256_file(fh) except PermissionError: print("Error opening file for SHA256 calculation", file=sys.stderr) return if local_sha256 == server_file.sha256: # print(f"Skipping {path} - already uploaded (sha256)") return if not create_parent_dirs(path): return if is_symlink: print(f"Symlinking {full_path} -> {symlink_to}") if not args.dry_run: client.symlink(path, symlink_to, full_path) return print(f"Uploading {full_path}") if not is_symlink: total_uploaded_size += os.stat(full_path).st_size if not args.dry_run: try: fh = os.open(full_path, os.O_RDONLY) except PermissionError: print("Error opening file", file=sys.stderr) return client.upload_file(path, fh) os.close(fh)
def _package(self): ensure_dir(self.j('{release_dir}')) pkg_root = self.j('{prefix_dir}') # Create the package listing file. files = list(file_list(self.j('{prefix_dir}'))) pkg_list_fn = self.j('{prefix_dir}', 'share', 'xyz', '{variant_name}') self.ensure_dir(os.path.dirname(pkg_list_fn)) with open(pkg_list_fn, 'w') as pkg_list_f: pkg_list_f.write('{variant_name}\n'.format(**self.config)) if not self.group_only: pkg_list_f.write("Source Version: {}\n".format(git_ver('{source_dir}'.format(**self.config)))) pkg_list_f.write("XYZ Version: {}\n".format(git_ver('.'))) pkg_list_f.write('\n') for fn in files: pkg_list_f.write('{} {}\n'.format( sha256_file(self.j('{prefix_dir}', fn)), fn)) logger.info("Creating tar.gz %s/%s -> %s", os.getcwd(), pkg_root, self.config['release_file']) tar_gz('{release_file}'.format(**self.config), pkg_root)