def _unpack(self, file_path): unpack_func = self._get_unpack_func(file_path) if unpack_func: with unpack_func(file_path, "rb") as packed: unpacked_file_path = file_path.rsplit(".", maxsplit=1)[0] with open(unpacked_file_path, "wb") as unpacked: while True: chunk = packed.read(self.chunk_size) if chunk == b"": break unpacked.write(chunk) remove_file_if_exists(file_path) self.progress_logger.update(source=file_path, target=unpacked_file_path) else: self.progress_logger.update(source=file_path, target="(unknown archive format)")
def dump(self): """Dump necessary data to disk file""" starttime = now() timestamp = format_datetime(starttime) self._update_pkgtree_timestamp(timestamp) dump_filename = '%s-%s' % (self.filename, timestamp) self.outputdata['timestamp'] = timestamp self.outputdata['packages'] = {} LOGGER.info("Loading pkgtree data") try: self._load_packagenames() self._load_evr() self._load_arch() self._load_repodata() self._load_cves() self._load_errata() self._associate_cves_to_errata() self._load_packages() self._load_module_streams() self._load_modules() self._associate_modules() self._associate_repos() self._associate_errata() except Exception: # pylint: disable=broad-except # database exceptions caught here LOGGER.exception("Failed to export pkgtree") else: # only write pkgtree if all db queries succeeded LOGGER.info("Exporting data to %s", dump_filename) with gzip.open(dump_filename, 'wt') as dump_file: json.dump(self.outputdata, dump_file, indent=self.pkgtree_indent, ensure_ascii=False) # relink to the latest file remove_file_if_exists(self.filename) os.symlink(dump_filename, self.filename) LOGGER.info("Finished exporting data. Elapsed time: %s", now() - starttime) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[self.pkgtree_keep_copies:]: LOGGER.info("Removing old dump %s", fname) remove_file_if_exists(fname)
def dump(self): """Dump necessary data tu disk file""" timestamp = format_datetime(now()) dump_filename = "%s-%s" % (self.filename, timestamp) LOGGER.info("Exporting data to %s", dump_filename) try: with shelve.open(dump_filename, 'c') as dump: self._dump_packagename(dump) self._dump_content_set_with_pkg_names(dump) self._dump_all_content_sets(dump) self._dump_cpes(dump) self._dump_updates(dump) self._dump_evr(dump) self._dump_arch(dump) self._dump_arch_compat(dump) self._dump_package_details(dump) self._dump_repo(dump) self._dump_errata(dump) self._dump_cves(dump) self._dump_modules(dump) self._dump_dbchange(dump) dump["dbchange:exported"] = timestamp except Exception: # pylint: disable=broad-except # database exceptions caught here LOGGER.exception("Failed to create dbdump") remove_file_if_exists(dump_filename) else: # relink to the latest file only if no db exceptions remove_file_if_exists(self.filename) os.symlink(dump_filename, self.filename) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[self.keep_copies:]: LOGGER.info("Removing old dump %s", fname) remove_file_if_exists(fname)