def _load_repodata(self): """Load the repo data""" with self._named_cursor() as cursor: cursor.execute("""select r.id, cs.label, cs.name, r.basearch_id, r.releasever, r.revision from repo r join content_set cs on cs.id = r.content_set_id """) for repo_id, label, name, arch_id, releasever, revision in cursor: archname = '' if arch_id in self.archid2arch: archname = self.archid2arch[arch_id] self.repodata[repo_id] = { 'revision': revision, 'data': { 'label': label, 'name': name, 'arch': archname, 'releasever': releasever, 'revision': format_datetime(revision) } }
def dump(self): """Dump necessary data tu disk file""" timestamp = format_datetime(now()) dump_filename = "%s-%s" % (self.filename, timestamp) LOGGER.info("Exporting data to %s", dump_filename) with shelve.open(dump_filename, 'c') as dump: self.dump_packagename(dump) self.dump_updates(dump) self.dump_evr(dump) self.dump_arch(dump) self.dump_arch_compat(dump) self.dump_package_details(dump) self.dump_repo(dump) self.dump_errata(dump) self.dump_cves(dump) self.dump_dbchange(dump) dump["dbchange:exported"] = timestamp # relink to the latest file try: os.unlink(self.filename) except FileNotFoundError: pass os.symlink(dump_filename, self.filename) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[self.keep_copies:]: LOGGER.info("Removing old dump %s", fname) os.unlink(fname)
def dump(self): """Dump necessary data tu disk file""" timestamp = format_datetime(now()) dump_filename = "%s-%s" % (self.filename, timestamp) LOGGER.info("Exporting data to %s", dump_filename) try: with shelve.open(dump_filename, 'c') as dump: self._dump_packagename(dump) self._dump_content_set_with_pkg_names(dump) self._dump_all_content_sets(dump) self._dump_cpes(dump) self._dump_updates(dump) self._dump_evr(dump) self._dump_arch(dump) self._dump_arch_compat(dump) self._dump_package_details(dump) self._dump_repo(dump) self._dump_errata(dump) self._dump_cves(dump) self._dump_modules(dump) self._dump_dbchange(dump) dump["dbchange:exported"] = timestamp except Exception: # pylint: disable=broad-except # database exceptions caught here LOGGER.exception("Failed to create dbdump") remove_file_if_exists(dump_filename) else: # relink to the latest file only if no db exceptions remove_file_if_exists(self.filename) os.symlink(dump_filename, self.filename) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[self.keep_copies:]: LOGGER.info("Removing old dump %s", fname) remove_file_if_exists(fname)
def dump(self): """Dump necessary data tu disk file""" starttime = now() timestamp = format_datetime(starttime) dump_filename = '%s-%s' % (self.filename, timestamp) self.outputdata['timestamp'] = timestamp self.outputdata['packages'] = {} LOGGER.info("Loading data") self.load_packagenames() self.load_evr() self.load_arch() self.load_repodata() self.load_cves() self.load_errata() self.associate_cves_to_errata() self.load_packages() self.associate_repos() self.associate_errata() LOGGER.info("Exporting data to %s", dump_filename) with open(dump_filename, 'w') as dump_file: json.dump(self.outputdata, dump_file, indent=2, ensure_ascii=False) # relink to the latest file try: os.unlink(self.filename) except FileNotFoundError: pass os.symlink(dump_filename, self.filename) LOGGER.info("Finished exporting data. Elapsed time: %s", now() - starttime) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[KEEP_COPIES:]: LOGGER.info("Removing old dump %s", fname) os.unlink(fname)
def _load_errata(self): """Load the errata data""" with self._named_cursor() as cursor: cursor.execute("select id, name, issued from errata") for errata_id, name, issued in cursor: self.erratadata[errata_id] = {'issued': issued, 'data': {'name': name, 'issued': format_datetime(issued)} }
def _save_lastmodified(self, lastmodified): lastmodified = format_datetime(lastmodified) cur = self.conn.cursor() # Update timestamp cur.execute("update metadata set value = %s where key = %s", (lastmodified, self.UPDATED_KEY,)) if cur.rowcount < 1: cur.execute("insert into metadata (key, value) values (%s, %s)", (self.UPDATED_KEY, lastmodified)) cur.close() self.conn.commit()
def _dump_repo(self, dump): """Select repo mappings""" # Select repo detail mapping with self._named_cursor() as cursor: cursor.execute("""select r.id, cs.label, cs.name as repo_name, r.url, a.name as basearch_name, r.releasever, p.name as product_name, p.id as product_id, r.revision, cs.third_party from repo r join content_set cs on cs.id = r.content_set_id left join arch a on a.id = r.basearch_id join product p on p.id = cs.product_id """) repolabel2ids = {} productid2repoids = {} for oid, label, name, url, basearch, releasever, product, product_id, revision, third_party in cursor: dump["repo_detail:%s" % oid] = (label, name, url, basearch, releasever, product, product_id, format_datetime(revision), third_party) repolabel2ids.setdefault("repolabel2ids:%s" % label, []).append(oid) productid2repoids.setdefault( "productid2repoids:%s" % product_id, []).append(oid) dump.update(repolabel2ids) dump.update(productid2repoids) if self.package_ids: # Select package ID to repo IDs mapping with self._named_cursor() as cursor: cursor.execute( """select pkg_id, repo_id from pkg_repo where pkg_id in %s """, [tuple(self.package_ids)]) pkgid2repoids = {} for pkg_id, repo_id in cursor: pkgid2repoids.setdefault("pkgid2repoids:%s" % pkg_id, []).append(repo_id) dump.update(pkgid2repoids)
def dump(self): """Dump necessary data to disk file""" starttime = now() timestamp = format_datetime(starttime) self._update_pkgtree_timestamp(timestamp) dump_filename = '%s-%s' % (self.filename, timestamp) self.outputdata['timestamp'] = timestamp self.outputdata['packages'] = {} LOGGER.info("Loading pkgtree data") try: self._load_packagenames() self._load_evr() self._load_arch() self._load_repodata() self._load_cves() self._load_errata() self._associate_cves_to_errata() self._load_packages() self._load_module_streams() self._load_modules() self._associate_modules() self._associate_repos() self._associate_errata() except Exception: # pylint: disable=broad-except # database exceptions caught here LOGGER.exception("Failed to export pkgtree") else: # only write pkgtree if all db queries succeeded LOGGER.info("Exporting data to %s", dump_filename) with gzip.open(dump_filename, 'wt') as dump_file: json.dump(self.outputdata, dump_file, indent=self.pkgtree_indent, ensure_ascii=False) # relink to the latest file remove_file_if_exists(self.filename) os.symlink(dump_filename, self.filename) LOGGER.info("Finished exporting data. Elapsed time: %s", now() - starttime) # remove old data above limit old_data = sorted(glob.glob("%s-*" % self.filename), reverse=True) for fname in old_data[self.pkgtree_keep_copies:]: LOGGER.info("Removing old dump %s", fname) remove_file_if_exists(fname)
def test_datetime_to_iso(self, date_param): """Test formatting datetime to ISO format.""" date = dateutil.format_datetime(date_param[1]) assert isinstance(date, str) assert RE_ISO.match(date) is not None