def unpack_rpm(package_file_name, files, tmp_dir, destdir, keeprpm, exact_files=False): """ Unpacks a single rpm located in tmp_dir into destdir. Arguments: package_file_name - name of the rpm file files - files to extract from the rpm tmp_dir - temporary directory where the rpm file is located destdir - destination directory for the rpm package extraction keeprpm - check if the user wants to delete rpms from the tmp directory exact_files - extract only specified files Returns: RETURN_FAILURE in case of a serious problem """ package_full_path = tmp_dir + "/" + package_file_name log1("Extracting %s to %s", package_full_path, destdir) log2("%s", files) print _("Extracting cpio from {0}").format(package_full_path) unpacked_cpio_path = tmp_dir + "/unpacked.cpio" try: unpacked_cpio = open(unpacked_cpio_path, 'wb') except IOError, ex: print _("Can't write to '{0}': {1}").format(unpacked_cpio_path, ex) return RETURN_FAILURE
def end(self, payload, status, msg): # One may objects that this call back isn't necessary because the # progress() callback is called when downloading finishes, but if the # downloaded package is already in a local cache, DNF doesn't call # progress() callback at all but yet we want to inform user about that # progress. if status in [STATUS_OK, STATUS_DRPM, STATUS_ALREADY_EXISTS]: self.observer.update(str(payload), 100) elif status == STATUS_MIRROR: # In this case dnf (librepo) tries other mirror if available log1("Mirror failed: %s" % (msg or "DNF did not provide more details")) elif status == STATUS_FAILED: log2("Downloading failed: %s" % (msg or "DNF did not provide more details")) else: sys.stderr.write("Unknown DNF download status: %s\n" % (msg))
def updateProgress(self, name, frac, fread, ftime): """ A method used to update the progress Arguments: name - filename frac - progress fracment (0 -> 1) fread - formated string containing BytesRead ftime - formated string containing remaining or elapsed time """ pct = int(frac * 100) if pct == self.last_pct: log2("percentage is the same, not updating progress") return self.last_pct = pct # if run from terminal we can have fancy output if sys.stdout.isatty(): sys.stdout.write("\033[sDownloading (%i of %i) %s: %3u%%\033[u" % (self.downloaded_pkgs + 1, self.total_pkgs, name, pct) ) if pct == 100: #print (_("Downloading (%i of %i) %s: %3u%%") # % (self.downloaded_pkgs + 1, self.total_pkgs, name, pct) #) print (_("Downloading ({0} of {1}) {2}: {3:3}%").format( self.downloaded_pkgs + 1, self.total_pkgs, name, pct ) ) # but we want machine friendly output when spawned from abrt-server else: t = time.time() if self.last_time == 0: self.last_time = t # update only every 5 seconds if pct == 100 or self.last_time > t or t - self.last_time >= 5: print (_("Downloading ({0} of {1}) {2}: {3:3}%").format( self.downloaded_pkgs + 1, self.total_pkgs, name, pct ) ) self.last_time = t if pct == 100: self.last_time = 0 sys.stdout.flush()
def triage(self, files): q = self.base.sack.query() i = q.available() package_files_dict = {} not_found = [] todownload_size = 0 installed_size = 0 for debuginfo_path in files: packages = i.filter(file=debuginfo_path); if not packages: log2("not found package for %s", debuginfo_path) not_found.append(debuginfo_path) else: if packages[0] in package_files_dict.keys(): package_files_dict[packages[0]].append(debuginfo_path) else: package_files_dict[packages[0]] = [debuginfo_path] todownload_size += float(packages[0].downloadsize) installed_size += float(packages[0].installsize) log2("found packages for %s: %s", debuginfo_path, packages[0]) return (package_files_dict, not_found, todownload_size, installed_size)
def download_package(self, pkg): remote = pkg.returnSimple('relativepath') local = os.path.basename(remote) local = os.path.join(self.tmpdir, local) remote_path = pkg.returnSimple('remote_url') # check if the pkg is in a local repo and copy it if it is err = None if remote_path.startswith('file:///'): pkg_path = remote_path[7:] log2("copying from local repo: %s", remote) try: shutil.copy(pkg_path, local) except OSError as ex: err = _("Cannot copy file '{0}': {1}").format(pkg_path, str(ex)) else: # pkg is in a remote repo, we need to download it to tmpdir pkg.localpath = local # Hack: to set the localpath we want err = self.base.downloadPkgs(pkglist=[pkg]) # normalize the name # just str(pkg) doesn't work because it can have epoch return (local, err)
def update(self, name, pct): """ A method used to update the progress Arguments: name - filename pct - percent downloaded """ if pct == self.last_pct: log2("percentage is the same, not updating progress") return self.last_pct = pct # if run from terminal we can have fancy output if sys.stdout.isatty(): sys.stdout.write( "\033[sDownloading ({0} of {1}) {2}: {3:3}%\033[u".format( self.downloaded_pkgs + 1, self.total_pkgs, name, pct) ) if pct == 100: print(_("Downloading ({0} of {1}) {2}: {3:3}%").format( self.downloaded_pkgs + 1, self.total_pkgs, name, pct)) # but we want machine friendly output when spawned from abrt-server else: t = time.time() if self.last_time == 0: self.last_time = t # update only every 5 seconds if pct == 100 or self.last_time > t or t - self.last_time >= 5: print(_("Downloading ({0} of {1}) {2}: {3:3}%").format( self.downloaded_pkgs + 1, self.total_pkgs, name, pct)) self.last_time = t if pct == 100: self.last_time = 0 sys.stdout.flush()
def triage(self, files): dnf_query = self.base.sack.query() dnf_available = dnf_query.available() package_files_dict = {} not_found = [] todownload_size = 0 installed_size = 0 def required_packages(query, package, origin): """ Recursive function to find all required packages of required packages of ... origin - should stop infinite recursion (A => B => ... => X => A) """ required_pkg_list = [] if package.requires: pkg_reqs = query.filter(provides=package.requires, arch=package.arch) for p in pkg_reqs: if p.name != origin.name and p not in required_pkg_list: required_pkg_list.append(p) required_pkg_list += required_packages(query, p, origin) return required_pkg_list for debuginfo_path in files: di_package_list = [] packages = dnf_available.filter(file=debuginfo_path) if not packages: log2("not found package for %s", debuginfo_path) not_found.append(debuginfo_path) else: di_package_list.append(packages[0]) if packages[0].requires: package_reqs = required_packages(dnf_available, packages[0], packages[0]) for pkg in package_reqs: if pkg not in di_package_list: di_package_list.append(pkg) log2("found required package {0} for {1}".format(pkg, packages[0])) for pkg in di_package_list: if pkg in package_files_dict.keys(): package_files_dict[pkg].append(debuginfo_path) else: package_files_dict[pkg] = [debuginfo_path] todownload_size += float(pkg.downloadsize) installed_size += float(pkg.installsize) log2("found packages for %s: %s", debuginfo_path, pkg) return (package_files_dict, not_found, todownload_size, installed_size)
def triage(self, files): not_found = [] package_files_dict = {} todownload_size = 0 installed_size = 0 for debuginfo_path in files: log2("yum whatprovides %s", debuginfo_path) pkg = self.base.pkgSack.searchFiles(debuginfo_path) # sometimes one file is provided by more rpms, we can use either of # them, so let's use the first match if pkg: if pkg[0] in package_files_dict.keys(): package_files_dict[pkg[0]].append(debuginfo_path) else: package_files_dict[pkg[0]] = [debuginfo_path] todownload_size += float(pkg[0].size) installed_size += float(pkg[0].installedsize) log2("found pkg for %s: %s", debuginfo_path, pkg[0]) else: log2("not found pkg for %s", debuginfo_path) not_found.append(debuginfo_path) return (package_files_dict, not_found, todownload_size, installed_size)
def start(self, total_files, total_size): log2("Started downloading of a package")
def progress(self, payload, done): log2("Updated a package") self.observer.update(str(payload), int(100 * (done / payload.download_size)))
def unpack_rpm(package_full_path, files, tmp_dir, destdir, exact_files=False): """ Unpacks a single rpm located in tmp_dir into destdir. Arguments: package_full_path - full file system path to the rpm file files - files to extract from the rpm tmp_dir - temporary directory where the rpm file is located destdir - destination directory for the rpm package extraction exact_files - extract only specified files Returns: RETURN_FAILURE in case of a serious problem """ log1("Extracting %s to %s", package_full_path, destdir) log2("%s", files) print(_("Extracting cpio from {0}").format(package_full_path)) unpacked_cpio_path = tmp_dir + "/unpacked.cpio" try: unpacked_cpio = open(unpacked_cpio_path, 'wb') except IOError as ex: print(_("Can't write to '{0}': {1}").format(unpacked_cpio_path, ex)) return RETURN_FAILURE rpm2cpio = Popen(["rpm2cpio", package_full_path], stdout=unpacked_cpio, bufsize=-1) retcode = rpm2cpio.wait() if retcode == 0: log1("cpio written OK") else: unpacked_cpio.close() print(_("Can't extract package '{0}'").format(package_full_path)) return RETURN_FAILURE # close the file unpacked_cpio.close() # and open it for reading unpacked_cpio = open(unpacked_cpio_path, 'rb') print(_("Caching files from {0} made from {1}").format("unpacked.cpio", os.path.basename(package_full_path))) file_patterns = "" cpio_args = ["cpio", "-idu"] if exact_files: for filename in files: file_patterns += "." + filename + " " cpio_args = ["cpio", "-idu", file_patterns.strip()] with tempfile.NamedTemporaryFile(prefix='abrt-unpacking-', dir='/tmp', delete=False) as log_file: log_file_name = log_file.name cpio = Popen(cpio_args, cwd=destdir, bufsize=-1, stdin=unpacked_cpio, stdout=log_file, stderr=log_file) retcode = cpio.wait() unpacked_cpio.close() if retcode == 0: log1("files extracted OK") #print _("Removing temporary cpio file") os.unlink(log_file_name) os.unlink(unpacked_cpio_path) else: print(_("Can't extract files from '{0}'. For more information see '{1}'") .format(unpacked_cpio_path, log_file_name)) return RETURN_FAILURE
def filter_installed_debuginfos(build_ids, cache_dirs): """ Checks for installed debuginfos. Arguments: build_ids - string containing build ids cache_dirs - list of cache directories Returns: List of missing debuginfo files. """ files = build_ids_to_path("", build_ids) missing = [] # 1st pass -> search in /usr/lib for debuginfo_path in files: log2("looking: %s", debuginfo_path) if os.path.exists(debuginfo_path): log2("found: %s", debuginfo_path) continue log2("not found: %s", debuginfo_path) missing.append(debuginfo_path) if missing: files = missing missing = [] else: # nothing is missing, we can stop looking return missing for cache_dir in cache_dirs: log2("looking in %s" % cache_dir) for debuginfo_path in files: cache_debuginfo_path = cache_dir + debuginfo_path log2("looking: %s", cache_debuginfo_path) if os.path.exists(cache_debuginfo_path): log2("found: %s", cache_debuginfo_path) continue log2("not found: %s", debuginfo_path) missing.append(debuginfo_path) # in next iteration look only for files missing # from previous iterations if missing: files = missing missing = [] else: # nothing is missing, we can stop looking return missing return files
# from fedora-debuginfo: [Errno 256] No more mirrors to try. self.repos.populateSack(mdtype='filelists', cacheonly=1) except YumBaseError, ex: print _("Error retrieving filelists: '{0!s}'").format(ex) # we don't want to die here, some repos might be already processed # so there is a chance we already have what we need #return 1 #if verbose == 0: # # re-enable the output to stdout # unmute_stdout() not_found = [] package_files_dict = {} for debuginfo_path in files: log2("yum whatprovides %s", debuginfo_path) pkg = self.pkgSack.searchFiles(debuginfo_path) # sometimes one file is provided by more rpms, we can use either of # them, so let's use the first match if pkg: if pkg[0] in package_files_dict.keys(): package_files_dict[pkg[0]].append(debuginfo_path) else: package_files_dict[pkg[0]] = [debuginfo_path] todownload_size += float(pkg[0].size) installed_size += float(pkg[0].installedsize) total_pkgs += 1 log2("found pkg for %s: %s", debuginfo_path, pkg[0]) else: log2("not found pkg for %s", debuginfo_path)
def unpack_rpm(package_full_path, files, tmp_dir, destdir, exact_files=False): """ Unpacks a single rpm located in tmp_dir into destdir. Arguments: package_full_path - full file system path to the rpm file files - files to extract from the rpm tmp_dir - temporary directory where the rpm file is located destdir - destination directory for the rpm package extraction exact_files - extract only specified files Returns: RETURN_FAILURE in case of a serious problem """ log1("Extracting %s to %s", package_full_path, destdir) log2("%s", files) print(_("Extracting cpio from {0}").format(package_full_path)) unpacked_cpio_path = tmp_dir + "/unpacked.cpio" try: unpacked_cpio = open(unpacked_cpio_path, 'wb') except IOError as ex: print(_("Can't write to '{0}': {1}").format(unpacked_cpio_path, ex)) return RETURN_FAILURE rpm2cpio = Popen(["rpm2cpio", package_full_path], stdout=unpacked_cpio, bufsize=-1) retcode = rpm2cpio.wait() if retcode == 0: log1("cpio written OK") else: unpacked_cpio.close() print(_("Can't extract package '{0}'").format(package_full_path)) return RETURN_FAILURE # close the file unpacked_cpio.close() # and open it for reading unpacked_cpio = open(unpacked_cpio_path, 'rb') print( _("Caching files from {0} made from {1}").format( "unpacked.cpio", os.path.basename(package_full_path))) file_patterns = "" cpio_args = ["cpio", "-idu"] if exact_files: for filename in files: file_patterns += "." + filename + " " cpio_args = ["cpio", "-idu", file_patterns.strip()] with tempfile.NamedTemporaryFile(prefix='abrt-unpacking-', dir='/tmp', delete=False) as log_file: log_file_name = log_file.name cpio = Popen(cpio_args, cwd=destdir, bufsize=-1, stdin=unpacked_cpio, stdout=log_file, stderr=log_file) retcode = cpio.wait() unpacked_cpio.close() if retcode == 0: log1("files extracted OK") #print _("Removing temporary cpio file") os.unlink(log_file_name) os.unlink(unpacked_cpio_path) else: print( _("Can't extract files from '{0}'. For more information see '{1}'" ).format(unpacked_cpio_path, log_file_name)) return RETURN_FAILURE