def find_packages_for_ssource(self, db, db_ssource): if db_ssource.build_id is None: self.log_debug("No kernel information for '{0}' @ '{1}'".format( db_ssource.symbol.name, db_ssource.path)) return db_ssource, (None, None, None) if db_ssource.build_id in self._kernel_pkg_map: return db_ssource, self._kernel_pkg_map[db_ssource.build_id] if self.archnames is None: self.archnames = set(arch.name for arch in get_archs(db)) kernelver = self._parse_kernel_build_id(db_ssource.build_id, self.archnames) version, release, arch, flavour = kernelver if flavour is not None: basename = "kernel-{0}-debuginfo".format(flavour) else: basename = "kernel-debuginfo" db_debug_pkg = get_package_by_nevra(db, basename, 0, version, release, arch) nvra = "{0}-{1}-{2}.{3}".format(basename, version, release, arch) db_src_pkg = None if db_debug_pkg is None: self.log_debug("Package {0} not found in storage".format(nvra)) elif not self.skipsrc: srcname = "kernel-debuginfo-common-{0}".format(arch) db_src_pkg = get_package_by_name_build_arch( db, srcname, db_debug_pkg.build, db_debug_pkg.arch) if db_src_pkg is None: self.log_debug( "Package {0}-{1}-{2}.{3} not found in storage".format( srcname, version, release, arch)) result = db_debug_pkg, db_debug_pkg, db_src_pkg self._kernel_pkg_map[db_ssource.build_id] = result return db_ssource, result
def find_packages_for_ssource(self, db, db_ssource): if db_ssource.build_id is None: self.log_debug("No kernel information for '{0}' @ '{1}'" .format(db_ssource.symbol.name, db_ssource.path)) return db_ssource, (None, None, None) if db_ssource.build_id in self._kernel_pkg_map: return db_ssource, self._kernel_pkg_map[db_ssource.build_id] if self.archnames is None: self.archnames = set(arch.name for arch in get_archs(db)) kernelver = self._parse_kernel_build_id(db_ssource.build_id, self.archnames) version, release, arch, flavour = kernelver if flavour is not None: basename = "kernel-{0}-debuginfo".format(flavour) else: basename = "kernel-debuginfo" db_debug_pkg = get_package_by_nevra(db, basename, 0, version, release, arch) nvra = "{0}-{1}-{2}.{3}".format(basename, version, release, arch) db_src_pkg = None if db_debug_pkg is None: self.log_debug("Package {0} not found in storage".format(nvra)) elif not self.skipsrc: srcname = "kernel-debuginfo-common-{0}".format(arch) db_src_pkg = get_package_by_name_build_arch(db, srcname, db_debug_pkg.build, db_debug_pkg.arch) if db_src_pkg is None: self.log_debug("Package {0}-{1}-{2}.{3} not found in storage" .format(srcname, version, release, arch)) result = db_debug_pkg, db_debug_pkg, db_src_pkg self._kernel_pkg_map[db_ssource.build_id] = result return db_ssource, result
def run(self, cmdline, db) -> None: repo_instances = [] for repo in db.session.query(Repo): if cmdline.REPO and repo.name not in cmdline.REPO: continue if cmdline.match_repos not in repo.name: continue if not repo.type in repo_types: self.log_error("No plugin installed to handle repository type " "{0}, skipping.".format(repo.type)) continue if repo.opsys_list: # parametrized self.log_info("Processing parametrized repo '{0}'".format( repo.name)) if not repo.arch_list: self.log_error("Parametrized repository is not assigned" " with an architecture, skipping") continue try: repo_instances += list( self._get_parametrized_variants(repo)) except: # pylint: disable=bare-except self.log_error( "No valid mirror for repository '{0}', skipping". format(repo.name)) continue elif repo.opsysrelease_list: self.log_info( "Processing repo '{0}' assigned with OpSysRelease".format( repo.name)) if not repo.arch_list: self.log_error("OpSysRelease repository is not assigned" " with an architecture, skipping") continue try: repo_instances += list( self._get_opsysrelease_variants(repo)) except: # pylint: disable=bare-except self.log_error( "No valid mirror for repository '{0}', skipping". format(repo.name)) continue else: if any('$' in url.url for url in repo.url_list): self.log_error("No operating system assigned to" "parametrized repo '{0}', skipping".format( repo.name)) continue for arch in repo.arch_list: try: repo_instance = { 'instance': repo_types[repo.type]( repo.name, [url.url for url in repo.url_list]), 'opsys': None, 'release': None, 'arch': arch.name, 'nogpgcheck': repo.nogpgcheck } repo_instances.append(repo_instance) except: # pylint: disable=bare-except self.log_error( "No valid mirror for repository '{0}', skipping". format(repo.name)) continue cmdline.name_prefix = cmdline.name_prefix.lower() architectures = dict((x.name, x) for x in get_archs(db)) for repo_instance in repo_instances: self.log_info( "Processing repository '{0}' assigned to OS release '{1}' and arch '{2}', URL: '{3}'" .format(repo_instance['instance'].name, repo_instance['release'], repo_instance['arch'], repo_instance['instance'].urls)) if cmdline.no_cache: repo_instance['instance'].cache_lifetime = 0 pkglist = \ repo_instance['instance'].list_packages(list(architectures.keys())) total = len(pkglist) self.log_info("Repository has {0} packages".format(total)) for num, pkg in enumerate(pkglist, start=1): self.log_info("[%d / %d] Processing package %s", num, total, pkg["name"]) if not pkg["name"].lower().startswith(cmdline.name_prefix): self.log_debug("Skipped package %s", pkg["name"]) continue arch = architectures.get(pkg["arch"], None) if not arch: self.log_error( "Architecture '{0}' not found, skipping".format( pkg["arch"])) continue repo_arch = architectures.get(repo_instance["arch"], None) if not repo_arch: self.log_error( "Architecture '{0}' not found, skipping".format( repo_instance["arch"])) continue build = (db.session.query(Build).filter( Build.base_package_name == pkg["base_package_name"]). filter(Build.version == pkg["version"]).filter( Build.release == pkg["release"]).filter( Build.epoch == pkg["epoch"]).first()) if not build: self.log_debug("Adding build %s-%s", pkg["base_package_name"], pkg["version"]) build = Build() build.base_package_name = pkg["base_package_name"] build.version = pkg["version"] build.release = pkg["release"] build.epoch = pkg["epoch"] db.session.add(build) db.session.flush() build_arch = (db.session.query(BuildArch).filter( BuildArch.build_id == build.id).filter( BuildArch.arch_id == arch.id).first()) if not build_arch: build_arch = BuildArch() build_arch.build = build build_arch.arch = arch db.session.add(build_arch) db.session.flush() build_opsysrelease_arch = (db.session.query( BuildOpSysReleaseArch ).join(Build).join(OpSysRelease).join(Arch).filter( Build.id == build.id).filter( OpSys.name == repo_instance['opsys']).filter( OpSysRelease.version == repo_instance['release'] ).filter(Arch.name == repo_instance['arch']).first()) if not build_opsysrelease_arch and repo_instance[ 'release'] and repo_instance['opsys']: self.log_info("Adding link between build '{0}-{1}' " "and operating system '{2} {3} {4}'".format( pkg["base_package_name"], pkg["version"], repo_instance['opsys'], repo_instance['release'], repo_instance['arch'])) opsysrelease = (db.session.query(OpSysRelease).filter( OpSys.name == repo_instance['opsys']).filter( OpSysRelease.version == repo_instance['release']).first()) bosra = BuildOpSysReleaseArch() bosra.build = build bosra.opsysrelease = opsysrelease bosra.arch = repo_arch db.session.add(bosra) db.session.flush() package = (db.session.query(Package).filter( Package.name == pkg["name"]).filter( Package.pkgtype == pkg["type"]).filter( Package.build == build).filter( Package.arch == arch).first()) if not package: self.log_info("Adding package {0}".format(pkg["filename"])) package = Package() package.name = pkg["name"] package.pkgtype = pkg["type"] package.arch = arch package.build = build db.session.add(package) db.session.flush() if cmdline.no_download_rpm: continue # Catching too general exception Exception # pylint: disable-msg=W0703 try: self.log_info("Downloading {0}".format(pkg["url"])) self._download(package, "package", pkg["url"]) except Exception as exc: self.log_error( "Exception ({0}) after multiple attempts" " while trying to download {1}," " skipping.".format(exc, pkg["url"])) db.session.delete(package) db.session.flush() continue # pylint: enable-msg=W0703 if pkg["type"] == "rpm": try: store_rpm_provides(db, package, repo_instance['nogpgcheck']) except FafError as ex: self.log_error( "Post-processing failed, skipping: {}".format( ex)) db.session.delete(package) db.session.flush() continue if cmdline.no_store_rpm: try: package.del_lob("package") self.log_info("Package deleted.") except Exception as exc: # pylint: disable=broad-except self.log_error("Error deleting the RPM file.") else: self.log_debug("Known package %s", pkg["filename"]) if not package.has_lob( "package") and not cmdline.no_download_rpm: self.log_info( "Package {} does not have a LOB. Re-downloading.". format(pkg["name"])) try: self._download(package, "package", pkg["url"]) except (FafError, URLError) as exc: self.log_error( "Exception ({0}) after multiple attempts" " while trying to download {1}," " skipping.".format(exc, pkg["url"]))
def run(self, cmdline, db): repo_instances = [] for repo in db.session.query(Repo): if cmdline.NAME and repo.name not in cmdline.NAME: continue if not repo.type in repo_types: self.log_error("No plugin installed to handle repository type " "{0}, skipping.".format(repo.type)) continue if repo.opsys_list: # parametrized self.log_info("Processing parametrized repo '{0}'" .format(repo.name)) if not repo.arch_list: self.log_error("Parametrized repository is not assigned" " with an architecture") return 1 try: repo_instances += list(self._get_parametrized_variants(repo)) except: self.log_error("No valid mirror for repository {0}" .format(repo.name)) return 1 elif repo.opsysrelease_list: self.log_info("Processing repo '{0}' assigned with OpSysRelease" .format(repo.name)) if not repo.arch_list: self.log_error("OpSysRelease repository is not assigned" " with an architecture") return 1 try: repo_instances += list(self._get_opsysrelease_variants(repo)) except: self.log_error("No valid mirror for repository {0}" .format(repo.name)) return 1 else: if any('$' in url.url for url in repo.url_list): self.log_error("No operating system assigned to" "parametrized repo '{0}".format(repo.name)) return 1 for arch in repo.arch_list: try: repo_instance = { 'instance' : repo_types[repo.type](repo.name, [url.url for url in repo.url_list]), 'opsys' : None, 'release' : None, 'arch' : arch.name} repo_instances.append(repo_instance) except: self.log_error("No valid mirror for repository {0}" .format(repo.name)) return 1 cmdline.name_prefix = cmdline.name_prefix.lower() architectures = dict((x.name, x) for x in get_archs(db)) for repo_instance in repo_instances: self.log_info("Processing repository '{0}' URL: '{1}'" .format(repo_instance['instance'].name, repo_instance['instance'].urls)) pkglist = \ repo_instance['instance'].list_packages(architectures.keys()) total = len(pkglist) self.log_info("Repository has '{0}' packages".format(total)) for num, pkg in enumerate(pkglist): self.log_debug("[{0} / {1}] Processing package {2}" .format(num + 1, total, pkg["name"])) if not pkg["name"].lower().startswith(cmdline.name_prefix): self.log_debug("Skipped package {0}" .format(pkg["name"])) continue arch = architectures.get(pkg["arch"], None) if not arch: self.log_error("Architecture '{0}' not found, skipping" .format(pkg["arch"])) continue repo_arch = architectures.get(repo_instance["arch"], None) if not repo_arch: self.log_error("Architecture '{0}' not found, skipping" .format(repo_instance["arch"])) continue build = (db.session.query(Build) .filter(Build.base_package_name == pkg["base_package_name"]) .filter(Build.version == pkg["version"]) .filter(Build.release == pkg["release"]) .filter(Build.epoch == pkg["epoch"]) .first()) if not build: self.log_debug("Adding build {0}-{1}".format( pkg["base_package_name"], pkg["version"])) build = Build() build.base_package_name = pkg["base_package_name"] build.version = pkg["version"] build.release = pkg["release"] build.epoch = pkg["epoch"] db.session.add(build) db.session.flush() build_arch = (db.session.query(BuildArch) .filter(BuildArch.build_id == build.id) .filter(BuildArch.arch_id == arch.id) .first()) if not build_arch: build_arch = BuildArch() build_arch.build = build build_arch.arch = arch db.session.add(build_arch) db.session.flush() build_opsysrelease_arch = ( db.session.query(BuildOpSysReleaseArch) .join(Build) .join(OpSysRelease) .join(Arch) .filter(Build.id == build.id) .filter(OpSys.name == repo_instance['opsys']) .filter(OpSysRelease.version == repo_instance['release']) .filter(Arch.name == repo_instance['arch']) .first()) if not build_opsysrelease_arch and repo_instance['release'] and repo_instance['opsys']: self.log_info("Adding link between build {0}-{1} " "operating system '{2}', release '{3} and " "architecture {4}".format(pkg["base_package_name"], pkg["version"], repo_instance['opsys'], repo_instance['release'], repo_instance['arch'])) opsysrelease = ( db.session.query(OpSysRelease) .filter(OpSys.name == repo_instance['opsys']) .filter(OpSysRelease.version == repo_instance['release']) .first()) bosra = BuildOpSysReleaseArch() bosra.build = build bosra.opsysrelease = opsysrelease bosra.arch = repo_arch db.session.add(bosra) db.session.flush() package = (db.session.query(Package) .filter(Package.name == pkg["name"]) .filter(Package.pkgtype == pkg["type"]) .filter(Package.build == build) .filter(Package.arch == arch) .first()) if not package: self.log_info("Adding package {0}".format(pkg["filename"])) package = Package() package.name = pkg["name"] package.pkgtype = pkg["type"] package.arch = arch package.build = build db.session.add(package) db.session.flush() if cmdline.no_download_rpm: continue # Catching too general exception Exception # pylint: disable-msg=W0703 try: self.log_info("Downloading {0}".format(pkg["url"])) self._download(package, "package", pkg["url"]) except Exception as exc: self.log_error("Exception ({0}) after multiple attemps" " while trying to download {1}," " skipping.".format(exc, pkg["url"])) db.session.delete(package) db.session.flush() continue # pylint: enable-msg=W0703 res = True if pkg["type"] == "rpm": res = store_rpm_deps(db, package, repo.nogpgcheck) if not res: self.log_error("Post-processing failed, skipping") db.session.delete(package) db.session.flush() continue if cmdline.no_store_rpm: try: package.del_lob("package") self.log_info("Package deleted.") except Exception as exc: self.log_error("Error deleting the RPM file.") else: self.log_debug("Known package {0}".format(pkg["filename"]))
def run(self, cmdline, db): repo_instances = [] for repo in db.session.query(Repo): if cmdline.NAME and repo.name not in cmdline.NAME: continue if not repo.type in repo_types: self.log_error("No plugin installed to handle repository type " "{0}, skipping.".format(repo.type)) continue if "$" in repo.url: # parametrized self.log_info("Processing parametrized repo '{0}'" .format(repo.name)) if not repo.opsys_list: self.log_error("Parametrized repository is not assigned" " with an operating system") return 1 if not repo.arch_list: self.log_error("Parametrized repository is not assigned" " with an architecture") return 1 repo_instances += list(self._get_parametrized_variants(repo)) else: repo_instance = repo_types[repo.type](repo.name, repo.url) repo_instances.append(repo_instance) cmdline.name_prefix = cmdline.name_prefix.lower() architectures = dict((x.name, x) for x in get_archs(db)) for repo_instance in repo_instances: self.log_info("Processing repository '{0}' URL: '{1}'" .format(repo_instance.name, repo_instance.urls[0])) pkglist = repo_instance.list_packages(architectures.keys()) total = len(pkglist) self.log_info("Repository has '{0}' packages".format(total)) for num, pkg in enumerate(pkglist): self.log_debug("[{0} / {1}] Processing package {2}" .format(num + 1, total, pkg["name"])) if not pkg["name"].lower().startswith(cmdline.name_prefix): self.log_debug("Skipped package {0}" .format(pkg["name"])) continue arch = architectures.get(pkg["arch"], None) if not arch: self.log_error("Architecture '{0}' not found, skipping" .format(pkg["arch"])) continue build = (db.session.query(Build) .filter(Build.base_package_name == pkg["base_package_name"]) .filter(Build.version == pkg["version"]) .filter(Build.release == pkg["release"]) .filter(Build.epoch == pkg["epoch"]) .first()) if not build: self.log_debug("Adding build {0}-{1}".format( pkg["base_package_name"], pkg["version"])) build = Build() build.base_package_name = pkg["base_package_name"] build.version = pkg["version"] build.release = pkg["release"] build.epoch = pkg["epoch"] db.session.add(build) build_arch = BuildArch() build_arch.build = build build_arch.arch = arch db.session.add(build_arch) db.session.flush() package = (db.session.query(Package) .filter(Package.name == pkg["name"]) .filter(Package.pkgtype == pkg["type"]) .filter(Package.build == build) .filter(Package.arch == arch) .first()) if not package: self.log_info("Adding package {0}".format(pkg["filename"])) package = Package() package.name = pkg["name"] package.pkgtype = pkg["type"] package.arch = arch package.build = build db.session.add(package) db.session.flush() if cmdline.no_download_rpm: continue # Catching too general exception Exception # pylint: disable-msg=W0703 try: self.log_info("Downloading {0}".format(pkg["url"])) self._download(package, "package", pkg["url"]) except Exception as exc: self.log_error("Exception ({0}) after multiple attemps" " while trying to download {1}," " skipping.".format(exc, pkg["url"])) db.session.delete(package) db.session.flush() continue # pylint: enable-msg=W0703 res = True if pkg["type"] == "rpm": res = store_rpm_deps(db, package, repo.nogpgcheck) if not res: self.log_error("Post-processing failed, skipping") db.session.delete(package) db.session.flush() continue if cmdline.no_store_rpm: try: package.del_lob("package") self.log_info("Package deleted.") except Exception as exc: self.log_error("Error deleting the RPM file.") else: self.log_debug("Known package {0}".format(pkg["filename"]))