def _save_packages(self, db, db_report, packages, count=1): for package in packages: role = "RELATED" if "package_role" in package: if package["package_role"] == "affected": role = "CRASHED" elif package["package_role"] == "selinux_policy": role = "SELINUX_POLICY" db_package = get_package_by_nevra(db, name=package["name"], epoch=package["epoch"], version=package["version"], release=package["release"], arch=package["architecture"]) if db_package is None: self.log_warn("Package {0}-{1}:{2}-{3}.{4} not found in " "storage".format(package["name"], package["epoch"], package["version"], package["release"], package["architecture"])) db_unknown_pkg = get_unknown_package(db, db_report, role, package["name"], package["epoch"], package["version"], package["release"], package["architecture"]) if db_unknown_pkg is None: db_arch = get_arch_by_name(db, package["architecture"]) if db_arch is None: continue db_unknown_pkg = ReportUnknownPackage() db_unknown_pkg.report = db_report db_unknown_pkg.name = package["name"] db_unknown_pkg.epoch = package["epoch"] db_unknown_pkg.version = package["version"] db_unknown_pkg.release = package["release"] db_unknown_pkg.arch = db_arch db_unknown_pkg.type = role db_unknown_pkg.count = 0 db.session.add(db_unknown_pkg) db_unknown_pkg.count += count continue db_reportpackage = get_reportpackage(db, db_report, db_package) if db_reportpackage is None: db_reportpackage = ReportPackage() db_reportpackage.report = db_report db_reportpackage.installed_package = db_package db_reportpackage.count = 0 db_reportpackage.type = role db.session.add(db_reportpackage) db_reportpackage.count += count
def run(self, cmdline, db): repo = (db.session.query(Repo).filter( Repo.name == cmdline.NAME).first()) if not repo: self.log_error("Repository '{0}' not found".format(cmdline.NAME)) return 1 arch_list = [] opsys_list = [] for item_name in cmdline.OPSYS + cmdline.ARCH: opsys = get_opsys_by_name(db, item_name) arch = get_arch_by_name(db, item_name) if not (opsys or arch): self.log_error("Item '{0}' not found".format(item_name)) return 1 if opsys: opsys_list.append(opsys) else: arch_list.append(arch) repo.opsys_list += opsys_list repo.arch_list += arch_list db.session.flush() self.log_info("Assigned '{0}' to {1} operating system(s)" " and {2} architecture(s)".format( repo.name, len(opsys_list), len(arch_list)))
def _save_packages(self, db, db_report, packages, count=1): for package in packages: role = "RELATED" if "package_role" in package: if package["package_role"] == "affected": role = "CRASHED" elif package["package_role"] == "selinux_policy": role = "SELINUX_POLICY" db_package = get_package_by_nevra(db, name=package["name"], epoch=package["epoch"], version=package["version"], release=package["release"], arch=package["architecture"]) if db_package is None: self.log_warn("Package {0}-{1}:{2}-{3}.{4} not found in " "storage".format(package["name"], package["epoch"], package["version"], package["release"], package["architecture"])) db_unknown_pkg = get_unknown_package(db, db_report, role, package["name"], package["epoch"], package["version"], package["release"], package["architecture"]) if db_unknown_pkg is None: db_arch = get_arch_by_name(db, package["architecture"]) if db_arch is None: continue db_unknown_pkg = ReportUnknownPackage() db_unknown_pkg.report = db_report db_unknown_pkg.name = package["name"] db_unknown_pkg.epoch = package["epoch"] db_unknown_pkg.version = package["version"] db_unknown_pkg.release = package["release"] db_unknown_pkg.semver = to_semver(package["version"]) db_unknown_pkg.semrel = to_semver(package["release"]) db_unknown_pkg.arch = db_arch db_unknown_pkg.type = role db_unknown_pkg.count = 0 db.session.add(db_unknown_pkg) db_unknown_pkg.count += count continue db_reportpackage = get_reportpackage(db, db_report, db_package) if db_reportpackage is None: db_reportpackage = ReportPackage() db_reportpackage.report = db_report db_reportpackage.installed_package = db_package db_reportpackage.count = 0 db_reportpackage.type = role db.session.add(db_reportpackage) db_reportpackage.count += count
def run(self, cmdline, db): for archname in cmdline.NAME: arch = get_arch_by_name(db, archname) if arch: self.log_error("Architecture '{0}' already defined" .format(archname)) return 1 self.log_info("Adding architecture '{0}'".format(archname)) new = Arch() new.name = archname db.session.add(new) db.session.flush()
def run(self, cmdline, db): for archname in cmdline.NAME: arch = get_arch_by_name(db, archname) if arch: self.log_error( "Architecture '{0}' already defined".format(archname)) return 1 self.log_info("Adding architecture '{0}'".format(archname)) new = Arch() new.name = archname db.session.add(new) db.session.flush()
def run(self, cmdline, db): repo = (db.session.query(Repo) .filter(Repo.name == cmdline.NAME) .first()) if not repo: self.log_error("Repository '{0}' not found" .format(cmdline.NAME)) return 1 arch_list = [] opsys_list = [] for item_name in cmdline.OPSYS + cmdline.ARCH: opsys = get_opsys_by_name(db, item_name) arch = get_arch_by_name(db, item_name) if not (opsys or arch): #If name is rhel we will search Red Hat Enterprise Linux if item_name == "rhel": item_name = "Red Hat Enterprise Linux" opsys = get_opsys_by_name(db, item_name) if not opsys: self.log_error("Item '{0}' not found" .format(item_name)) return 1 else: self.log_error("Item '{0}' not found" .format(item_name)) return 1 if opsys: opsys_list.append(opsys) else: arch_list.append(arch) repo.opsys_list += opsys_list repo.arch_list += arch_list db.session.flush() self.log_info("Assigned '{0}' to {1} operating system(s)" " and {2} architecture(s)" .format(repo.name, len(opsys_list), len(arch_list)))
def run(self, cmdline, db): for arch in Init.archs: db_arch = get_arch_by_name(db, arch) if db_arch is not None: continue self.log_info("Adding architecture '{0}'".format(arch)) new = Arch() new.name = arch db.session.add(new) db.session.flush() plugins = set() for cls in Plugin.__subclasses__(): plugins |= set(cls.__subclasses__()) for plugin in plugins: if not plugin.installed(db): plugin.install(db, logger=log.getChildLogger(plugin.__name__)) db.session.flush()
def run(self, cmdline, db) -> int: repo = (db.session.query(Repo).filter( Repo.name == cmdline.REPO).first()) if not repo: self.log_error("Repository '{0}' not found".format(cmdline.REPO)) return 1 arch_list = [] opsys_list = [] opsysrelease_list = [] for item_name in cmdline.OPSYS + cmdline.ARCH: pos_name, pos_release = self._parser_osrelease(item_name) opsysrelease = get_osrelease(db, pos_name, pos_release) opsys = get_opsys_by_name(db, item_name) arch = get_arch_by_name(db, item_name) if not (opsys or arch or opsysrelease): #If name is rhel we will search Red Hat Enterprise Linux if item_name == "rhel": item_name = "Red Hat Enterprise Linux" opsys = get_opsys_by_name(db, item_name) if not opsys: self.log_error( "Item '{0}' not found".format(item_name)) return 1 elif pos_name == "rhel": pos_name = "Red Hat Enterprise Linux" opsysrelease = get_osrelease(db, pos_name, pos_release) if not opsysrelease: self.log_error( "Item '{0}' not found".format(item_name)) return 1 else: self.log_error("Item '{0}' not found".format(item_name)) return 1 if opsys: opsys_list.append(opsys) elif opsysrelease: opsysrelease_list.append(opsysrelease) else: arch_list.append(arch) # test if url type correspond with type of repo if any('$' in url.url for url in repo.url_list) and opsysrelease_list: self.log_error("Assigning operating system with release to " "parametrized repo. Assign only operating system.") return 1 if any('$' not in url.url for url in repo.url_list) and opsys_list: self.log_error("Assigning operating system without release to " "non - parametrized repo. Assign operating system" " with release.") return 1 repo.opsys_list += opsys_list repo.opsysrelease_list += opsysrelease_list repo.arch_list += arch_list db.session.flush() self.log_info( "Assigned '{0}' to {1} operating system(s)" ", {2} operating systems with release(s) and {3} architecture(s)". format(repo.name, len(opsys_list), len(opsysrelease_list), (len(arch_list)))) return 0
def run(self, cmdline, db) -> int: if not cmdline.OPSYS and not cmdline.RELEASE and not cmdline.arch: self.log_error("None of the arguments were specified.") return 1 if (cmdline.OPSYS or cmdline.RELEASE) and cmdline.arch: self.log_error( "Argument --arch not allowed with OPSYS and RELEASE.") return 1 if cmdline.OPSYS and not cmdline.RELEASE: self.log_error("Missing RELEASE argument.") return 1 # in case we're using the web UI: if not hasattr(cmdline, "dry_run"): cmdline.dry_run = False if cmdline.OPSYS: # nobody will write the full name if cmdline.OPSYS == "rhel": cmdline.OPSYS = "Red Hat Enterprise Linux" # check if operating system is known if not get_opsys_by_name(db, cmdline.OPSYS): self.log_error( "Selected operating system '%s' is not supported.", cmdline.OPSYS) return 1 self.log_info("Selected operating system: '%s'", cmdline.OPSYS) # check if release is known opsysrelease = get_osrelease(db, cmdline.OPSYS, cmdline.RELEASE) if not opsysrelease: self.log_error("Selected release '%s' is not supported.", cmdline.RELEASE) return 1 self.log_info("Selected release: '%s'", cmdline.RELEASE) # find all builds, that are assigned to this opsysrelease but none other # architecture is missed out intentionally all_builds = get_builds_by_opsysrelease_id(db, opsysrelease.id) #delete all records, where the opsysrelease.id is present query = (db.session.query(BuildOpSysReleaseArch).filter( BuildOpSysReleaseArch.opsysrelease_id == opsysrelease.id)) elif cmdline.arch: # check if operating system is known architecture = get_arch_by_name(db, cmdline.arch) if not architecture: self.log_error("Selected architecture '%s' is not supported.", cmdline.arch) return 1 self.log_info("Selected architecture: '%s'", cmdline.arch) # find all builds, that are assigned to this arch_id but none other all_builds = get_builds_by_arch_id(db, architecture.id) #delete all records, where the arch.id is present query = (db.session.query(BuildOpSysReleaseArch).filter( BuildOpSysReleaseArch.arch_id == architecture.id)) else: self.log_error( "Architecture or operating system was not selected.") return 1 self.log_info("%d links will be removed", query.count()) if cmdline.dry_run: self.log_info("Dry run active, removal will be skipped") else: for build in all_builds: for pkg in (db.session.query(Package).filter( Package.build_id == build.build_id).all()): self.delete_package(pkg, cmdline.dry_run) query.delete() return 0
def save_ureport2(db, ureport, create_component=False, timestamp=None, count=1): """ Save uReport2 """ if timestamp is None: timestamp = datetime.datetime.utcnow() osplugin = systems[ureport["os"]["name"]] problemplugin = problemtypes[ureport["problem"]["type"]] db_osrelease = get_osrelease(db, osplugin.nice_name, ureport["os"]["version"]) if db_osrelease is None: raise FafError( "Operating system '{0} {1}' not found in storage".format( osplugin.nice_name, ureport["os"]["version"])) report_hash = problemplugin.hash_ureport(ureport["problem"]) db_report = get_report(db, report_hash) if db_report is None: component_name = problemplugin.get_component_name(ureport["problem"]) db_component = get_component_by_name(db, component_name, osplugin.nice_name) if db_component is None: if create_component: log.info("Creating an unsupported component '{0}' in " "operating system '{1}'".format( component_name, osplugin.nice_name)) db_component = OpSysComponent() db_component.name = component_name db_component.opsys = db_osrelease.opsys db.session.add(db_component) else: raise FafError("Unknown component '{0}' in operating system " "{1}".format(component_name, osplugin.nice_name)) db_report = Report() db_report.type = problemplugin.name db_report.first_occurrence = timestamp db_report.last_occurrence = timestamp db_report.count = 0 db_report.component = db_component db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.report = db_report db_report_hash.hash = report_hash db.session.add(db_report_hash) if db_report.first_occurrence > timestamp: db_report.first_occurrence = timestamp if db_report.last_occurrence < timestamp: db_report.last_occurrence = timestamp db_reportosrelease = get_reportosrelease(db, db_report, db_osrelease) if db_reportosrelease is None: db_reportosrelease = ReportOpSysRelease() db_reportosrelease.report = db_report db_reportosrelease.opsysrelease = db_osrelease db_reportosrelease.count = 0 db.session.add(db_reportosrelease) db_reportosrelease.count += count db_arch = get_arch_by_name(db, ureport["os"]["architecture"]) if db_arch is None: raise FafError("Architecture '{0}' is not supported".format( ureport["os"]["architecture"])) db_reportarch = get_reportarch(db, db_report, db_arch) if db_reportarch is None: db_reportarch = ReportArch() db_reportarch.report = db_report db_reportarch.arch = db_arch db_reportarch.count = 0 db.session.add(db_reportarch) db_reportarch.count += count reason = ureport["reason"].encode("utf-8") db_reportreason = get_reportreason(db, db_report, reason) if db_reportreason is None: db_reportreason = ReportReason() db_reportreason.report = db_report db_reportreason.reason = reason db_reportreason.count = 0 db.session.add(db_reportreason) db_reportreason.count += count day = timestamp.date() db_daily = get_history_day(db, db_report, db_osrelease, day) if db_daily is None: db_daily = ReportHistoryDaily() db_daily.report = db_report db_daily.opsysrelease = db_osrelease db_daily.day = day db_daily.count = 0 db_daily.unique = 0 db.session.add(db_daily) if "serial" in ureport["problem"] and ureport["problem"]["serial"] == 1: db_daily.unique += 1 db_daily.count += count week = day - datetime.timedelta(days=day.weekday()) db_weekly = get_history_week(db, db_report, db_osrelease, week) if db_weekly is None: db_weekly = ReportHistoryWeekly() db_weekly.report = db_report db_weekly.opsysrelease = db_osrelease db_weekly.week = week db_weekly.count = 0 db_weekly.unique = 0 db.session.add(db_weekly) if "serial" in ureport["problem"] and ureport["problem"]["serial"] == 1: db_weekly.unique += 1 db_weekly.count += count month = day.replace(day=1) db_monthly = get_history_month(db, db_report, db_osrelease, month) if db_monthly is None: db_monthly = ReportHistoryMonthly() db_monthly.report = db_report db_monthly.opsysrelease = db_osrelease db_monthly.month = month db_monthly.count = 0 db_monthly.unique = 0 db.session.add(db_monthly) if "serial" in ureport["problem"] and ureport["problem"]["serial"] == 1: db_monthly.unique += 1 db_monthly.count += count osplugin.save_ureport(db, db_report, ureport["os"], ureport["packages"], count=count) problemplugin.save_ureport(db, db_report, ureport["problem"], count=count) # Update count as last, so that handlers listening to its "set" event have # as much information as possible db_report.count += count db.session.flush() problemplugin.save_ureport_post_flush()
def run(self, cmdline, db): # nobody will write the full name if cmdline.OPSYS == "rhel": cmdline.OPSYS = "Red Hat Enterprise Linux" # check if operating system is known if not get_opsys_by_name(db, cmdline.OPSYS): self.log_error( "Selected operating system '{0}' is not supported.".format( cmdline.OPSYS)) return 1 else: self.log_info("Selected operating system: '{0}'".format( cmdline.OPSYS)) # check if release is known opsysrelease = get_osrelease(db, cmdline.OPSYS, cmdline.RELEASE) if not opsysrelease: self.log_error("Selected release '{0}' is not supported.".format( cmdline.RELEASE)) return 1 else: self.log_info("Selected release: '{0}'".format(cmdline.RELEASE)) # check if architecture is known arch = get_arch_by_name(db, cmdline.ARCH) if not arch: self.log_error( "Selected architecture '{0}' is not supported.".format( cmdline.ARCH)) return 1 else: self.log_info("Selected architecture: '{0}'".format(cmdline.ARCH)) # when release-builds argument specified if cmdline.released_builds: self.log_info("Assigning released builds for '{0} {1}'".format( cmdline.OPSYS, cmdline.RELEASE)) opsys = self._edit_opsys(cmdline.OPSYS) if not opsys in systems.keys(): self.log_error( "There are no known released builds for '{0}'".format( cmdline.OPSYS)) return 1 for build in systems[opsys].get_released_builds(cmdline.RELEASE): found_build = (db.session.query(Build).filter( Build.base_package_name == build["name"]).filter( Build.version == build["version"]).filter( Build.release == build["release"]).filter( Build.epoch == build["epoch"]).first()) if found_build: self._add_into_build_opsysrelease_arch( db, found_build, opsysrelease, arch) # when expression argument was passed if cmdline.expression: self.log_info("Selecting builds by expression: '{0}'".format( cmdline.expression)) found_builds = (db.session.query(Build).filter( Build.release.like("%{0}".format(cmdline.expression))).all()) for build in found_builds: self._add_into_build_opsysrelease_arch(db, build, opsysrelease, arch) db.session.flush()
def save_ureport2(db, ureport, create_component=False, timestamp=None, count=1): """ Save uReport2 """ if timestamp is None: timestamp = datetime.datetime.utcnow() osplugin = systems[ureport["os"]["name"]] problemplugin = problemtypes[ureport["problem"]["type"]] db_osrelease = get_osrelease(db, osplugin.nice_name, ureport["os"]["version"]) if db_osrelease is None: raise FafError("Operating system '{0} {1}' not found in storage" .format(osplugin.nice_name, ureport["os"]["version"])) report_hash = problemplugin.hash_ureport(ureport["problem"]) db_report = get_report(db, report_hash) if db_report is None: component_name = problemplugin.get_component_name(ureport["problem"]) db_component = get_component_by_name(db, component_name, osplugin.nice_name) if db_component is None: if create_component: log.info("Creating an unsupported component '{0}' in " "operating system '{1}'".format(component_name, osplugin.nice_name)) db_component = OpSysComponent() db_component.name = component_name db_component.opsys = db_osrelease.opsys db.session.add(db_component) else: raise FafError("Unknown component '{0}' in operating system " "{1}".format(component_name, osplugin.nice_name)) db_report = Report() db_report.type = problemplugin.name db_report.first_occurrence = timestamp db_report.last_occurrence = timestamp db_report.count = 0 db_report.component = db_component db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.report = db_report db_report_hash.hash = report_hash db.session.add(db_report_hash) if db_report.first_occurrence > timestamp: db_report.first_occurrence = timestamp if db_report.last_occurrence < timestamp: db_report.last_occurrence = timestamp db_reportosrelease = get_reportosrelease(db, db_report, db_osrelease) if db_reportosrelease is None: db_reportosrelease = ReportOpSysRelease() db_reportosrelease.report = db_report db_reportosrelease.opsysrelease = db_osrelease db_reportosrelease.count = 0 db.session.add(db_reportosrelease) db_reportosrelease.count += count db_arch = get_arch_by_name(db, ureport["os"]["architecture"]) if db_arch is None: raise FafError("Architecture '{0}' is not supported" .format(ureport["os"]["architecture"])) db_reportarch = get_reportarch(db, db_report, db_arch) if db_reportarch is None: db_reportarch = ReportArch() db_reportarch.report = db_report db_reportarch.arch = db_arch db_reportarch.count = 0 db.session.add(db_reportarch) db_reportarch.count += count reason = ureport["reason"].encode("utf-8") db_reportreason = get_reportreason(db, db_report, reason) if db_reportreason is None: db_reportreason = ReportReason() db_reportreason.report = db_report db_reportreason.reason = reason db_reportreason.count = 0 db.session.add(db_reportreason) db_reportreason.count += count day = timestamp.date() db_daily = get_history_day(db, db_report, db_osrelease, day) if db_daily is None: db_daily = ReportHistoryDaily() db_daily.report = db_report db_daily.opsysrelease = db_osrelease db_daily.day = day db_daily.count = 0 db.session.add(db_daily) db_daily.count += count week = day - datetime.timedelta(days=day.weekday()) db_weekly = get_history_week(db, db_report, db_osrelease, week) if db_weekly is None: db_weekly = ReportHistoryWeekly() db_weekly.report = db_report db_weekly.opsysrelease = db_osrelease db_weekly.week = week db_weekly.count = 0 db.session.add(db_weekly) db_weekly.count += count month = day.replace(day=1) db_monthly = get_history_month(db, db_report, db_osrelease, month) if db_monthly is None: db_monthly = ReportHistoryMonthly() db_monthly.report = db_report db_monthly.opsysrelease = db_osrelease db_monthly.month = month db_monthly.count = 0 db.session.add(db_monthly) db_monthly.count += count osplugin.save_ureport(db, db_report, ureport["os"], ureport["packages"], count=count) problemplugin.save_ureport(db, db_report, ureport["problem"], count=count) # Update count as last, so that handlers listening to its "set" event have # as much information as possible db_report.count += count db.session.flush() problemplugin.save_ureport_post_flush()
def run(self, cmdline, db): repo = (db.session.query(Repo) .filter(Repo.name == cmdline.NAME) .first()) if not repo: self.log_error("Repository '{0}' not found" .format(cmdline.NAME)) return 1 arch_list = [] opsys_list = [] opsysrelease_list = [] for item_name in cmdline.OPSYS + cmdline.ARCH: pos_name, pos_release = self._parser_osrelease(item_name) opsysrelease = get_osrelease(db, pos_name, pos_release) opsys = get_opsys_by_name(db, item_name) arch = get_arch_by_name(db, item_name) if not (opsys or arch or opsysrelease): #If name is rhel we will search Red Hat Enterprise Linux if item_name == "rhel": item_name = "Red Hat Enterprise Linux" opsys = get_opsys_by_name(db, item_name) if not opsys: self.log_error("Item '{0}' not found" .format(item_name)) return 1 elif pos_name == "rhel": pos_name = "Red Hat Enterprise Linux" opsysrelease = get_osrelease(db, pos_name, pos_release) if not opsysrelease: self.log_error("Item '{0}' not found" .format(item_name)) return 1 else: self.log_error("Item '{0}' not found" .format(item_name)) return 1 if opsys: opsys_list.append(opsys) elif opsysrelease: opsysrelease_list.append(opsysrelease) else: arch_list.append(arch) # test if url type correspond with type of repo if any('$' in url.url for url in repo.url_list) and opsysrelease_list: self.log_error("Assigning operating system with release to " "parametrized repo. Assign only operating system.") return 1 if any('$' not in url.url for url in repo.url_list) and opsys_list: self.log_error("Assigning operating system without release to " "non - parametrized repo. Assign operating system" " with release.") return 1 repo.opsys_list += opsys_list repo.opsysrelease_list += opsysrelease_list repo.arch_list += arch_list db.session.flush() self.log_info("Assigned '{0}' to {1} operating system(s)" ", {2} operating systems with release(s) and {3} architecture(s)" .format(repo.name, len(opsys_list), len(opsysrelease_list), (len(arch_list))))
def run(self, cmdline, db): # nobody will write the full name if cmdline.OPSYS == "rhel": cmdline.OPSYS = "Red Hat Enterprise Linux" # check if operating system is known if not get_opsys_by_name(db, cmdline.OPSYS): self.log_error("Selected operating system '{0}' is not supported." .format(cmdline.OPSYS)) return 1 self.log_info("Selected operating system: '{0}'" .format(cmdline.OPSYS)) # check if release is known opsysrelease = get_osrelease(db, cmdline.OPSYS, cmdline.RELEASE) if not opsysrelease: self.log_error("Selected release '{0}' is not supported." .format(cmdline.RELEASE)) return 1 self.log_info("Selected release: '{0}'".format(cmdline.RELEASE)) # check if architecture is known arch = get_arch_by_name(db, cmdline.ARCH) if not arch: self.log_error("Selected architecture '{0}' is not supported." .format(cmdline.ARCH)) return 1 self.log_info("Selected architecture: '{0}'".format(cmdline.ARCH)) # when release-builds argument specified if cmdline.released_builds: self.log_info("Assigning released builds for '{0} {1}'" .format(cmdline.OPSYS, cmdline.RELEASE)) opsys = self._edit_opsys(cmdline.OPSYS) if opsys not in systems.keys(): self.log_error("There are no known released builds for '{0}'" .format(cmdline.OPSYS)) return 1 for build in systems[opsys].get_released_builds(cmdline.RELEASE): found_build = (db.session.query(Build) .filter(Build.base_package_name == build["name"]) .filter(Build.version == build["version"]) .filter(Build.release == build["release"]) .filter(Build.epoch == build["epoch"]) .first()) if found_build: self._add_into_build_opsysrelease_arch(db, found_build, opsysrelease, arch) # when expression argument was passed if cmdline.expression: self.log_info("Selecting builds by expression: '{0}'" .format(cmdline.expression)) found_builds = (db.session.query(Build) .filter(Build.release.like("%{0}" .format(cmdline.expression))) .all()) for build in found_builds: self._add_into_build_opsysrelease_arch(db, build, opsysrelease, arch) db.session.flush() return 0
def run(self, cmdline, db): repo_instances = [] for repo in db.session.query(Repo): if cmdline.NAME and repo.name not in cmdline.NAME: continue if not repo.type in repo_types: self.log_error("No plugin installed to handle repository type " "{0}, skipping.".format(repo.type)) continue if "$" in repo.url: # parametrized self.log_info("Processing parametrized repo '{0}'" .format(repo.name)) if not repo.opsys_list: self.log_error("Parametrized repository is not assigned" " with an operating system") return 1 if not repo.arch_list: self.log_error("Parametrized repository is not assigned" " with an architecture") return 1 repo_instances += list(self._get_parametrized_variants(repo)) else: repo_instance = repo_types[repo.type](repo.name, repo.url) repo_instances.append(repo_instance) architectures = map(lambda x: x.name, db.session.query(Arch)) for repo_instance in repo_instances: self.log_info("Processing repository '{0}' URL: '{1}'" .format(repo_instance.name, repo_instance.urls[0])) pkglist = repo_instance.list_packages(architectures) total = len(pkglist) self.log_info("Repository has '{0}' packages".format(total)) for num, pkg in enumerate(pkglist): self.log_debug("[{0} / {1}] Processing package {2}" .format(num + 1, total, pkg["name"])) arch = get_arch_by_name(db, pkg["arch"]) if not arch: self.log_error("Architecture '{0}' not found, skipping" .format(pkg["arch"])) continue build = (db.session.query(Build) .filter(Build.base_package_name == pkg["base_package_name"]) .filter(Build.version == pkg["version"]) .filter(Build.release == pkg["release"]) .filter(Build.epoch == pkg["epoch"]) .first()) if not build: self.log_debug("Adding build {0}-{1}".format( pkg["base_package_name"], pkg["version"])) build = Build() build.base_package_name = pkg["base_package_name"] build.version = pkg["version"] build.release = pkg["release"] build.epoch = pkg["epoch"] db.session.add(build) build_arch = BuildArch() build_arch.build = build build_arch.arch = arch db.session.add(build_arch) db.session.flush() package = (db.session.query(Package) .filter(Package.name == pkg["name"]) .filter(Package.pkgtype == pkg["type"]) .filter(Package.build == build) .filter(Package.arch == arch) .first()) if not package: self.log_info("Adding package {0}".format(pkg["filename"])) package = Package() package.name = pkg["name"] package.pkgtype = pkg["type"] package.arch = arch package.build = build db.session.add(package) db.session.flush() if cmdline.no_download_rpm: continue # Catching too general exception Exception # pylint: disable-msg=W0703 try: self.log_info("Downloading {0}".format(pkg["url"])) self._download(package, "package", pkg["url"]) except Exception as exc: self.log_error("Exception ({0}) after multiple attemps" " while trying to download {1}," " skipping.".format(exc, pkg["url"])) db.session.delete(package) db.session.flush() continue # pylint: enable-msg=W0703 res = True if pkg["type"] == "rpm": res = store_rpm_deps(db, package, repo.nogpgcheck) if not res: self.log_error("Post-processing failed, skipping") db.session.delete(package) db.session.flush() continue if cmdline.no_store_rpm: try: package.del_lob("package") self.log_info("Package deleted.") except Exception as exc: self.log_error("Error deleting the RPM file.") else: self.log_debug("Known package {0}".format(pkg["filename"]))