def list_packages(self, filters): """ list packages""" self.sack.populate(self.repo, 'metadata', None, 0) pkglist = self.sack.returnPackages() pkglist = yum.misc.unique(pkglist) self.num_packages = len(pkglist) if not filters: # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies(self.sack, pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) except yum.Errors.RepoError as e: if "No more mirrors" in str(e): reqFile = re.search('failure:\s+(.+)\s+from', str(e)).groups()[0] raise ChannelTimeoutException( "Retrieving '%s' failed: File not found in repository '%s'" % (reqFile, self.repo)) else: raise pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] for cs in pack.checksums: new_pack.checksums[cs[0]] = cs[1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ List available packages. :returns: list """ pkglist = self._get_solvable_packages() pkglist.sort(key=cmp_to_key(self._sort_packages)) self.num_packages = len(pkglist) pkglist = self._apply_filters(pkglist, filters) if latest: latest_pkgs = {} new_pkgs = [] for pkg in pkglist: ident = '{}.{}'.format(pkg.name, pkg.arch) if ident not in latest_pkgs.keys() or LooseVersion(str( pkg.evr)) > LooseVersion(str(latest_pkgs[ident].evr)): latest_pkgs[ident] = pkg pkglist = list(latest_pkgs.values()) to_return = [] for pack in pkglist: new_pack = ContentPackage() epoch, version, release = RawSolvablePackage._parse_solvable_evr( pack.evr) new_pack.setNVREA(pack.name, version, release, epoch, pack.arch) new_pack.unique_id = RawSolvablePackage(pack) checksum = pack.lookup_checksum(solv.SOLVABLE_CHECKSUM) new_pack.checksum_type = checksum.typestr() new_pack.checksum = checksum.hex() to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" pkglist = self.repo.get_package_list() self.num_packages = len(pkglist) if latest: # TODO pass pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) if filters: pkglist = self._filter_packages(pkglist, filters) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksum_type new_pack.checksum = pack.checksum to_return.append(new_pack) return to_return
def test_associate_package(self): pack = ContentPackage() pack.setNVREA('name1', 'version1', 'release1', 'epoch1', 'arch1') pack.unique_id = 1 pack.a_pkg = rhn_rpm.RPM_Package(None) pack.a_pkg.checksum = 'checksum1' pack.a_pkg.checksum_type = 'c_type1' pack.a_pkg.header = {'epoch': 'epoch1'} pack.checksums[1] = 'checksum1' mocked_backend = Mock() self.reposync.SQLBackend = Mock(return_value=mocked_backend) rs = self._create_mocked_reposync() rs._importer_run = Mock() rs.channel_label = 'Label1' rs.channel = {'id': 'channel1', 'org_id': 1} package = { 'name': 'name1', 'version': 'version1', 'release': 'release1', 'epoch': 'epoch1', 'arch': 'arch1', 'checksum': 'checksum1', 'checksum_type': 'c_type1', 'org_id': 1, 'channels': [{ 'label': 'Label1', 'id': 'channel1' }] } refpack = importLib.IncompletePackage().populate(package) ipack = rs.associate_package(pack) self.assertEqual(ipack, refpack)
def test_associate_package(self): pack = ContentPackage() pack.setNVREA('name1', 'version1', 'release1', 'epoch1', 'arch1') pack.unique_id = 1 pack.a_pkg = rhn_rpm.RPM_Package(None) pack.a_pkg.checksum = 'checksum1' pack.a_pkg.checksum_type = 'c_type1' pack.a_pkg.header = {'epoch': 'epoch1'} pack.checksums[1] = 'checksum1' mocked_backend = Mock() self.reposync.SQLBackend = Mock(return_value=mocked_backend) rs = self._create_mocked_reposync() rs._importer_run = Mock() rs.channel_label = 'Label1' rs.channel = {'id': 'channel1', 'org_id': 1} package = {'name': 'name1', 'version': 'version1', 'release': 'release1', 'epoch': 'epoch1', 'arch': 'arch1', 'checksum': 'checksum1', 'checksum_type': 'c_type1', 'org_id': 1, 'channels': [{'label': 'Label1', 'id': 'channel1'}]} refpack = importLib.IncompletePackage().populate(package) ipack = rs.associate_package(pack) self.assertEqual(ipack, refpack)
def list_packages(self, filters, latest): """ list packages""" pkglist = self.repo.get_package_list() self.num_packages = len(pkglist) if latest: # TODO pass pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(("+", [p])) for p in self.repo.exclude: filters.append(("-", [p])) if filters: # TODO pass to_return = [] for pack in pkglist: new_pack = ContentPackage() new_pack.setNVREA(pack["name"], pack["version"], pack["release"], pack["epoch"], pack["arch"]) new_pack.unique_id = pack new_pack.checksum_type = pack["checksum_type"] new_pack.checksum = pack["checksum"] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" self.dnfbase.fill_sack(load_system_repo=False, load_available_repos=True) pkglist = self.dnfbase.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES) self.num_packages = len(pkglist) if latest: pkglist = pkglist.latest() pkglist = list(dict.fromkeys(pkglist)) # Filter out duplicates if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.dnfbase.repos[self.repoid].includepkgs: filters.append(('+', [p])) for p in self.dnfbase.repos[self.repoid].exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies(self.dnfbase.sack, pkglist) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = RawSolvablePackage(pack) # new_pack.hawkey_id = pack new_pack.checksum_type = pack.returnIdSum()[0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.returnIdSum()[1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ list packages""" self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] to_return.append(new_pack) return to_return
def list_packages(self, filters, latest): """ List available packages. :returns: list """ if not self.repo.is_configured: self.setup_repo(self.repo) pool = solv.Pool() repo = pool.add_repo(str(self.channel_label or self.reponame)) solv_path = os.path.join(self.repo.root, ZYPP_SOLV_CACHE_PATH, self.channel_label or self.reponame, 'solv') if not os.path.isfile(solv_path) or not repo.add_solv(solv.xfopen(str(solv_path)), 0): raise SolvFileNotFound(solv_path) #TODO: Implement latest #if latest: # pkglist = pkglist.returnNewestByNameArch() #TODO: Implement sort #pkglist.sort(self._sort_packages) to_return = [] for pack in repo.solvables: # Solvables with ":" in name are not packages if ':' in pack.name: continue new_pack = ContentPackage() epoch, version, release = RawSolvablePackage._parse_solvable_evr(pack.evr) new_pack.setNVREA(pack.name, version, release, epoch, pack.arch) new_pack.unique_id = RawSolvablePackage(pack) checksum = pack.lookup_checksum(solv.SOLVABLE_CHECKSUM) new_pack.checksum_type = checksum.typestr() new_pack.checksum = checksum.hex() to_return.append(new_pack) self.num_packages = len(to_return) return to_return
def list_packages(self, filters, latest): """ list packages""" pkglist = self.repo.get_package_list() self.num_packages = len(pkglist) if latest: latest_pkgs = {} new_pkgs = [] for pkg in pkglist: ident = '{}.{}'.format(pkg.name, pkg.arch) if ident not in latest_pkgs.keys() or LooseVersion( pkg.evr()) > LooseVersion(latest_pkgs[ident].evr()): latest_pkgs[ident] = pkg pkglist = list(latest_pkgs.values()) pkglist.sort(key=cmp_to_key(self._sort_packages)) if not filters: # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) if filters: pkglist = self._filter_packages(pkglist, filters) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksum_type new_pack.checksum = pack.checksum to_return.append(new_pack) return to_return
class ContentSource(object): def __init__(self, url, name, insecure=False, interactive=True, yumsrc_conf=YUMSRC_CONF, org="1", channel_label="", no_mirrors=False, ca_cert_file=None, client_cert_file=None, client_key_file=None): # pylint can't see inside the SplitResult class # pylint: disable=E1103 if urlparse.urlsplit(url).scheme: self.url = url else: self.url = "file://%s" % url self.name = name self.insecure = insecure self.interactive = interactive self.yumbase = yum.YumBase() self.yumbase.preconf.fn = yumsrc_conf if not os.path.exists(yumsrc_conf): self.yumbase.preconf.fn = '/dev/null' self.configparser = ConfigParser() if org: self.org = org else: self.org = "NULL" self.proxy_url = None self.proxy_user = None self.proxy_pass = None self.authtoken = None # read the proxy configuration # /etc/rhn/rhn.conf has more priority than yum.conf initCFG('server.satellite') # keep authtokens for mirroring (_scheme, _netloc, _path, query, _fragid) = urlparse.urlsplit(url) if query: self.authtoken = query if CFG.http_proxy: self.proxy_url, self.proxy_user, self.proxy_pass = get_proxy( self.url) else: yb_cfg = self.yumbase.conf.cfg section_name = None if yb_cfg.has_section(self.name): section_name = self.name elif yb_cfg.has_section('main'): section_name = 'main' if section_name: if yb_cfg.has_option(section_name, option='proxy'): self.proxy_url = "http://%s" % yb_cfg.get(section_name, option='proxy') if yb_cfg.has_option(section_name, 'proxy_username'): self.proxy_user = yb_cfg.get(section_name, 'proxy_username') if yb_cfg.has_option(section_name, 'proxy_password'): self.proxy_pass = yb_cfg.get(section_name, 'proxy_password') self._authenticate(url) # Check for settings in yum configuration files (for custom repos/channels only) if org: repos = self.yumbase.repos.repos else: repos = None if repos and name in repos: repo = repos[name] elif repos and channel_label in repos: repo = repos[channel_label] # In case we are using Repo object based on channel config, override it's id to name of the repo # To not create channel directories in cache directory repo.id = name else: # Not using values from config files repo = yum.yumRepo.YumRepository(name) repo.populate(self.configparser, name, self.yumbase.conf) self.repo = repo self.setup_repo(repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file) self.num_packages = 0 self.num_excluded = 0 self.gpgkey_autotrust = None # if '?' not in url: # real_urls.append(url) #self.repo.urls = real_urls self.groupsfile = None def __del__(self): # close log files for yum plugin for handler in logging.getLogger("yum.filelogging").handlers: handler.close() self.repo.close() def _authenticate(self, url): pass @staticmethod def interrupt_callback(*args, **kwargs): # pylint: disable=W0613 # Just re-raise e = sys.exc_info()[1] raise e def setup_repo(self, repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file): """Fetch repository metadata""" repo.cache = 0 repo.mirrorlist = self.url repo.baseurl = [self.url] repo.basecachedir = os.path.join(CACHE_DIR, self.org) repo.setAttribute('_override_sigchecks', False) if self.insecure: repo.repo_gpgcheck = False else: repo.repo_gpgcheck = True # base_persistdir have to be set before pkgdir if hasattr(repo, 'base_persistdir'): repo.base_persistdir = repo.basecachedir pkgdir = os.path.join(CFG.MOUNT_POINT, CFG.PREPENDED_DIR, self.org, 'stage') if not os.path.isdir(pkgdir): fileutils.makedirs(pkgdir, user='******', group='www') repo.pkgdir = pkgdir repo.sslcacert = ca_cert_file repo.sslclientcert = client_cert_file repo.sslclientkey = client_key_file repo.proxy = None repo.proxy_username = None repo.proxy_password = None if "file://" in self.url: repo.copy_local = 1 if self.proxy_url is not None: repo.proxy = self.proxy_url repo.proxy_username = self.proxy_user repo.proxy_password = self.proxy_pass # Do not try to expand baseurl to other mirrors if no_mirrors: repo.urls = repo.baseurl # FIXME: SUSE # Make sure baseurl ends with / and urljoin will work correctly if repo.urls[0][-1] != '/': repo.urls[0] += '/' else: warnings = YumWarnings() warnings.disable() try: repo.baseurlSetup() except: warnings.restore() raise warnings.restore() # if self.url is metalink it will be expanded into # real urls in repo.urls and also save this metalink # in begin of the url list ("for repolist -v ... or anything else wants to know the baseurl") # Remove it from the list, we don't need it to download content of repo # # SUSE uses tokens which have ? and this must stay # #repo.urls = [url for url in repo.urls if '?' not in url] for burl in repo.baseurl: (scheme, netloc, path, query, fragid) = urlparse.urlsplit(burl) repo.gpgkey = [ urlparse.urlunsplit( (scheme, netloc, path + '/repodata/repomd.xml.key', query, fragid)) ] repo.setup(0, None, gpg_import_func=self.getKeyForRepo, confirm_func=self.askImportKey) # use a fix dir for repo metadata sig checks repo.gpgdir = GPG_DIR self.initgpgdir(repo.gpgdir) def get_md_checksum_type(self): """Return the checksum_type of primary.xml""" if 'primary' in self.repo.repoXML.repoData: checksum = self.repo.repoXML.repoData['primary'].checksum return checksum[0] #tuple (checksum_type,checksum) else: return "sha1" def number_of_packages(self): for dummy_index in range(3): try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) break except YumErrors.RepoError: pass return len(self.repo.getPackageSack().returnPackages()) def raw_list_packages(self, filters=None): for dummy_index in range(3): try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) break except YumErrors.RepoError: pass rawpkglist = self.repo.getPackageSack().returnPackages() self.num_packages = len(rawpkglist) if not filters: filters = [] # if there's no include/exclude filter on command line or in database for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) if filters: rawpkglist = self._filter_packages(rawpkglist, filters) rawpkglist = self._get_package_dependencies( self.repo.getPackageSack(), rawpkglist) # do not pull in dependencies if they're explicitly excluded rawpkglist = self._filter_packages(rawpkglist, filters, True) self.num_excluded = self.num_packages - len(rawpkglist) return rawpkglist def list_packages(self, filters, latest): """ list packages""" try: self.repo.getPackageSack().populate(self.repo, 'metadata', None, 0) except yum.Errors.RepoError, e: if "No more mirrors" in str(e): reqFile = re.search('failure:\s+(.+)\s+from', str(e)).groups()[0] raise ChannelTimeoutException( "Retrieving '%s' failed: File not found in repository '%s'" % (reqFile, self.repo)) else: raise pkglist = ListPackageSack(self.repo.getPackageSack().returnPackages()) self.num_packages = len(pkglist) if latest: pkglist = pkglist.returnNewestByNameArch() pkglist = yum.misc.unique(pkglist) pkglist.sort(self._sort_packages) if not filters: # if there's no include/exclude filter on command line or in database # check repository config file for p in self.repo.includepkgs: filters.append(('+', [p])) for p in self.repo.exclude: filters.append(('-', [p])) filters = self._expand_package_groups(filters) if filters: pkglist = self._filter_packages(pkglist, filters) pkglist = self._get_package_dependencies( self.repo.getPackageSack(), pkglist) # do not pull in dependencies if they're explicitly excluded pkglist = self._filter_packages(pkglist, filters, True) self.num_excluded = self.num_packages - len(pkglist) to_return = [] for pack in pkglist: if pack.arch == 'src': continue new_pack = ContentPackage() new_pack.setNVREA(pack.name, pack.version, pack.release, pack.epoch, pack.arch) new_pack.unique_id = pack new_pack.checksum_type = pack.checksums[0][0] if new_pack.checksum_type == 'sha': new_pack.checksum_type = 'sha1' new_pack.checksum = pack.checksums[0][1] for cs in pack.checksums: new_pack.checksums[cs[0]] = cs[1] to_return.append(new_pack) return to_return