def affected_to_status(affected, pkgname, fixed_version): # early exit if unknown or not affected if Affected.not_affected == affected: return Status.not_affected if Affected.unknown == affected: return Status.unknown versions = db.session.query(Package).filter_by(name=pkgname) \ .group_by(Package.name, Package.version).all() versions = sort_packages(versions) # unknown if no version was found if not versions: return Status.unknown version = versions[0] # vulnerable if the latest version is still affected if not fixed_version or 0 > vercmp(version.version, fixed_version): return Status.vulnerable # check if any non-testing versions are fixed non_testing = list(filter(lambda e: 'testing' not in e.database, versions)) latest_non_testing = non_testing[0] if 0 <= vercmp(latest_non_testing.version, fixed_version): return Status.fixed # check if latest version is testing if 'testing' in version.database: return Status.testing # return vulnerable otherwise return Status.vulnerable
def check_updates(repo, batch=False): all_pkgs = sorted(list_packages(repo)) if batch: pkg_map = {} params = ['type=multiinfo'] for pkg in all_pkgs: pkg_map[pkg.name] = pkg params.append('arg[]={}'.format(pkg.name)) r = requests.get('https://aur.archlinux.org/rpc.php?{}'.format( '&'.join(params))) data = r.json() for aurpkg in data['results']: pkg = pkg_map[aurpkg['Name']] if pyalpm.vercmp(aurpkg['Version'], pkg.version) > 0: yield pkg, aurpkg['Version'] else: for pkg in all_pkgs: r = requests.get( 'https://aur.archlinux.org/rpc.php?type=info&arg={}'.format( pkg.name)) data = r.json() if type(data['results']) == dict: aur_version = data['results']['Version'] if pyalpm.vercmp(aur_version, pkg.version) > 0: yield pkg, aur_version else: print("warning: {} is not in the AUR".format(pkg.name), file=sys.stderr)
def aur_pre_build( name: Optional[str] = None, *, do_vcs_update: Optional[bool] = None, ) -> None: if os.path.exists('PKGBUILD'): pkgver, pkgrel = get_pkgver_and_pkgrel() else: pkgver = None _g.aur_pre_files = clean_directory() if name is None: name = os.path.basename(os.getcwd()) _g.aur_building_files = _download_aur_pkgbuild(name) aur_pkgver, aur_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == aur_pkgver: # change to larger pkgrel if pyalpm.vercmp(f'1-{pkgrel}', f'1-{aur_pkgrel}') > 0: update_pkgrel(pkgrel) if do_vcs_update is None: do_vcs_update = name.endswith(('-git', '-hg', '-svn', '-bzr')) if do_vcs_update: vcs_update() # recheck after sync, because AUR pkgver may lag behind new_pkgver, new_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == new_pkgver: if pyalpm.vercmp(f'1-{pkgrel}', f'1-{new_pkgrel}') > 0: update_pkgrel(pkgrel)
def affected_to_status(affected, pkgname, fixed_version): # early exit if unknown or not affected if Affected.not_affected == affected: return Status.not_affected if Affected.unknown == affected: return Status.unknown versions = db.session.query(Package).filter_by(name=pkgname) \ .group_by(Package.name, Package.version).all() versions = sort_packages(versions) # unknown if no version was found if not versions: return Status.unknown version = versions[0] # vulnerable if the latest version is still affected if not fixed_version or 0 > vercmp(version.version, fixed_version): return Status.vulnerable # at least one version is fixed fixed_versions = [ p for p in versions if vercmp(p.version, fixed_version) >= 0 ] # if the only fixed versions are in [testing], return testing if all('testing' in p.database for p in fixed_versions): return Status.testing # otherwise a fixed version exists outside [testing] return Status.fixed
def aur_pre_build( name: Optional[str] = None, *, do_vcs_update: Optional[bool] = None, maintainers: Union[str, Container[str]] = (), ) -> None: # import pyalpm here so that lilac can be easily used on non-Arch # systems (e.g. Travis CI) import pyalpm if name is None: name = os.path.basename(os.getcwd()) if maintainers: maintainer, last_packager = _get_aur_packager(name) if last_packager == 'lilac': who = maintainer else: who = last_packager error = False if isinstance(maintainers, str): error = who != maintainers else: error = who not in maintainers if error: raise Exception('unexpected AUR package maintainer / packager', who) pkgver, pkgrel = get_pkgver_and_pkgrel() _g.aur_pre_files = clean_directory() _g.aur_building_files = _download_aur_pkgbuild(name) aur_pkgver, aur_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == aur_pkgver: if pyalpm.vercmp(f'1-{pkgrel}', f'1-{aur_pkgrel}') < 0: # use aur pkgrel pass else: # bump update_pkgrel() if do_vcs_update is None: do_vcs_update = name.endswith(VCS_SUFFIXES) if do_vcs_update: vcs_update() # recheck after sync, because AUR pkgver may lag behind new_pkgver, new_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == new_pkgver: if pkgrel is None: next_pkgrel = 1 else: next_pkgrel = _next_pkgrel(pkgrel) if pyalpm.vercmp(f'1-{next_pkgrel}', f'1-{new_pkgrel}') > 0: update_pkgrel(next_pkgrel)
def aur_pre_build( name: Optional[str] = None, *, do_vcs_update: Optional[bool] = None, maintainers: Union[str, Container[str]] = (), ) -> None: # import pyalpm here so that lilac can be easily used on non-Arch # systems (e.g. Travis CI) import pyalpm if name is None: name = os.path.basename(os.getcwd()) if maintainers: info = s.get(AUR_URL, params={ "v": 5, "type": "info", "arg[]": name }).json() maintainer = info['results'][0]['Maintainer'] error = False if isinstance(maintainers, str): error = maintainer != maintainers else: error = maintainer not in maintainers if error: raise Exception('unexpected AUR package maintainer', maintainer) pkgver, pkgrel = get_pkgver_and_pkgrel() _g.aur_pre_files = clean_directory() _g.aur_building_files = _download_aur_pkgbuild(name) aur_pkgver, aur_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == aur_pkgver: if pyalpm.vercmp(f'1-{pkgrel}', f'1-{aur_pkgrel}') < 0: # use aur pkgrel pass else: # bump update_pkgrel() if do_vcs_update is None: do_vcs_update = name.endswith(VCS_SUFFIXES) if do_vcs_update: vcs_update() # recheck after sync, because AUR pkgver may lag behind new_pkgver, new_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == new_pkgver: if pkgrel is None: next_pkgrel = 1 else: next_pkgrel = _next_pkgrel(pkgrel) if pyalpm.vercmp(f'1-{next_pkgrel}', f'1-{new_pkgrel}') > 0: update_pkgrel(next_pkgrel)
def validate(self, pkgnames): """Check if packages were installed.""" DS.fancy_msg(_('Validating installation status...')) DS.log.info('Validating: ' + '; '.join(pkgnames)) pyc = pycman.config.init_with_config('/etc/pacman.conf') localdb = pyc.get_localdb() aurpkgs = self.utils.info(pkgnames) i = 0 for pkgname in pkgnames: pkg = localdb.get_pkg(pkgname) aurversion = aurpkgs[i]['Version'] i += 1 if pkg is None: DS.fancy_error2(_('{}: NOT installed').format(pkgname)) else: if pyalpm.vercmp(aurversion, pkg.version) > 0: DS.fancy_error2(_('{}: outdated {}').format(pkgname, pkg.version)) else: DS.fancy_msg2(_('{}: installed {}').format(pkgname, pkg.version))
def validate(self, quiet): """Check if packages were installed. :param bool quiet: suppress messages :return: number of packages that were not installed :rtype: int """ if self.pkgnames: if not quiet: DS.fancy_msg(_('Validating installation status...')) DS.log.info('Validating: ' + '; '.join(self.pkgnames)) DS.pycreload() localdb = DS.pyc.get_localdb() aurpkgs = { aurpkg.name: aurpkg.version for aurpkg in pkgbuilder.utils.info(self.pkgnames) } wrong = len(self.pkgnames) else: wrong = 0 for pkgname in self.pkgnames: lpkg = localdb.get_pkg(pkgname) try: aurversion = aurpkgs[pkgname] except KeyError: if not lpkg: if not quiet: DS.fancy_error2( _('{0}: not an AUR package').format(pkgname)) else: wrong -= 1 if not quiet: DS.fancy_msg2( _('{0}: installed {1}').format( pkgname, lpkg.version)) else: if not lpkg: if not quiet: DS.fancy_error2( _('{0}: NOT installed').format(pkgname)) else: if pyalpm.vercmp(aurversion, lpkg.version) > 0: if not quiet: DS.fancy_error2( _('{0}: outdated {1}').format( pkgname, lpkg.version)) else: wrong -= 1 if not quiet: DS.fancy_msg2( _('{0}: installed {1}').format( pkgname, lpkg.version)) self.invalid = wrong self._set_status_from_return(wrong, TransactionStatus.validated, TransactionStatus.validation_failed) return wrong
def vulernablepackges(): count = 0 handle = init_with_config('/etc/pacman.conf') db = handle.get_localdb() # XXX: error handling user_agent = {'User-Agent': USER_AGENT} r = requests.get('https://security.archlinux.org/issues.json', headers=user_agent) advisories = r.json() for adv in advisories: version = adv['fixed'] packages = adv['packages'] if not version: continue if not any(db.get_pkg(pkg) for pkg in packages): continue for pkg in packages: alpm_pkg = db.get_pkg(pkg) if not alpm_pkg: continue if vercmp(version, alpm_pkg.version) > 0: count += 1 return count
def pre_build(): oldver, oldpkgrel = _G.oldver.split('-') newver, newpkgrel = _G.newver.split('-') if vercmp(newver, oldver) >= 0: update_pkgver_and_pkgrel(newver, updpkgsums=True) else: update_pkgver_and_pkgrel(oldver, updpkgsums=True)
def get_upd_pkgs(self): ''' Get updated packages Returns: dictionary: {pkgname:{'oldver':installedversion, 'newver':aur_version},...} ''' self.__init_remote() # Initialize the return list add_to_update = {} # We want to go through each package dictionary returned by InfoPkg for pkginfo in self.all_pkg_info: if not self.ood and pkginfo['OutOfDate'] == 0: continue if pkginfo['Name'] in self.ign_pkg: continue # Get the pkgname and pkgver from each package pkgname = pkginfo['Name'] pkgver = pkginfo['Version'] # Use pkgname to get the same info from the aurpkgs dictionary local_version = self.aurpkgs[pkgname] comp = vercmp(pkgver, local_version) if comp > 0.0: add_to_update[pkgname] = { 'oldver': local_version, 'newver': pkgver } return add_to_update
def validate(self, pkgnames): """Check if packages were installed.""" DS.fancy_msg(_('Validating installation status...')) DS.log.info('Validating: ' + '; '.join(pkgnames)) pyc = pycman.config.init_with_config('/etc/pacman.conf') localdb = pyc.get_localdb() aurpkgs = {i['Name']: i['Version'] for i in self.utils.info(pkgnames)} for pkgname in pkgnames: pkg = localdb.get_pkg(pkgname) try: aurversion = aurpkgs[pkgname] except KeyError: if not pkg: DS.fancy_error2(_('{}: not an AUR package').format( pkgname)) else: DS.fancy_msg2(_('{}: installed {}').format(pkgname, pkg.version)) else: if not pkg: DS.fancy_error2(_('{}: NOT installed').format(pkgname)) else: if pyalpm.vercmp(aurversion, pkg.version) > 0: DS.fancy_error2(_('{}: outdated {}').format(pkgname, pkg.version)) else: DS.fancy_msg2(_('{}: installed {}').format(pkgname, pkg.version))
def get_upd_pkgs(self): ''' Get updated packages Returns: dictionary: {pkgname:{'oldver':installedversion, 'newver':aur_version},...} ''' self.__init_remote() # Initialize the return list add_to_update = {} # We want to go through each package dictionary returned by InfoPkg for pkginfo in self.all_pkg_info: if not self.ood and pkginfo['OutOfDate'] == 0: continue if pkginfo['Name'] in self.ign_pkg: continue # Get the pkgname and pkgver from each package pkgname = pkginfo['Name'] pkgver = pkginfo['Version'] # Use pkgname to get the same info from the aurpkgs dictionary local_version = self.aurpkgs[pkgname] comp = vercmp(pkgver, local_version) if comp > 0.0: add_to_update[pkgname] = {'oldver': local_version, 'newver': pkgver} return add_to_update
def validate(self, pkgnames): """Check if packages were installed.""" DS.fancy_msg(_('Validating installation status...')) DS.log.info('Validating: ' + '; '.join(pkgnames)) pyc = pycman.config.init_with_config('/etc/pacman.conf') localdb = pyc.get_localdb() aurpkgs = self.utils.info(pkgnames) i = 0 for pkgname in pkgnames: pkg = localdb.get_pkg(pkgname) aurversion = aurpkgs[i]['Version'] i += 1 if pkg is None: DS.fancy_error2(_('{}: NOT installed').format(pkgname)) else: if pyalpm.vercmp(aurversion, pkg.version) > 0: DS.fancy_error2( _('{}: outdated {}').format(pkgname, pkg.version)) else: DS.fancy_msg2( _('{}: installed {}').format(pkgname, pkg.version))
async def check_outofdate_async(pkgbase): pkgdir = f'{rootdir}/{pkgbase}' latestver_path = f'{pkgdir}/LATESTVER' if not os.path.exists(latestver_path): if not isvcs(pkgbase): logger.warning(f'{latestver_path} does not exist.') else: async with semaphore: returncode, stdout, _ = await run_async( [latestver_path], cwd=pkgdir, stdout=asyncio.subprocess.PIPE) if returncode == 0: srcinfo = Srcinfo.parsestr(await readsrcinfo_async(pkgbase, cache=True)) version = f'{srcinfo["pkgver"]}-{srcinfo["pkgrel"]}' indentation = ' ' * len(pkgbase) newercount = 0 for v in stdout.strip().splitlines(): v = v.split(':', maxsplit=1)[-1] if vercmp(v, version) > 0: newercount += 1 if newercount == 1: print(f'{pkgbase} {version}') print(indentation, v)
def get_newest_of(pkglist): if pkglist: newest = pkglist[0] for pkg in pkglist: if 1 == pyalpm.vercmp(pkg.version, newest.version): newest = pkg return newest
def check_srcinfo() -> None: srcinfo = get_srcinfo() bad_groups = [] bad_packages = [] pkgnames = [] for line in srcinfo: line = line.strip() if line.startswith('groups = '): g = line.split()[-1] if g in _official_groups: bad_groups.append(g) elif line.startswith('replaces = '): pkg = line.split()[-1] if pkg in _official_packages: bad_packages.append(pkg) elif line.startswith('pkgname = '): pkgnames.append(line.split()[-1]) _G.epoch, _G.pkgver, _G.pkgrel = _get_package_version(srcinfo) # check if the newly built package is older than the existing # package in repos or not built_version = format_package_version(_G.epoch, _G.pkgver, _G.pkgrel) for pkgname in pkgnames: try: repo_version = _repo_package_versions[pkgname] if pyalpm.vercmp(built_version, repo_version) < 0: raise DowngradingError(pkgname, built_version, repo_version) except KeyError: # the newly built package is not in repos yet - fine pass if bad_groups or bad_packages: raise ConflictWithOfficialError(bad_groups, bad_packages)
def validate(self): rv = BaseForm.validate(self) if not rv: return False if self.fixed.data and 0 <= vercmp(self.affected.data, self.fixed.data): self.fixed.errors.append('Version must be newer.') return False return True
def main(options): # Initialize pyalpm configpath = '/etc/pacman.conf' handle = pycman.config.init_with_config(configpath) db = handle.get_localdb() # Handle the situation where a user has enabled [testing] if len([ syncdb for syncdb in handle.get_syncdbs() if syncdb.name == 'testing' ]): status = ['Testing', 'Fixed'] else: status = ['Fixed'] # Unfortunately pyalpm does not allow us to set the dbpath to a # 'fake' symlinked dbpath as 'checkupdates' does. Neither does it allow # use config.init_with_config_and_options since it expects an argparse obj. if options.sync: subprocess.check_output(["sudo", "pacman", "-Sy"]) if options.file: data = json.load(options.file) else: # Fetch latest JSON API r = requests.get(API_URL + '/issues/all.json') data = r.json() for avg in data: search_str = '^({})$'.format('|'.join(avg['packages'])) pkgs = db.search(search_str) if not pkgs: continue # for every pkg, check if affected for pkg in pkgs: if avg['status'] == 'Vulnerable' and options.upgradable: if options.quiet: print('{}={}'.format(pkg.name, pkg.version)) else: print('{}-{} is vulnerable to {}'.format( pkg.name, pkg.version, avg['type'])) print('No fixed package in the repositories.') print('AVG: {}/{}'.format(API_URL, avg['name'])) print('') elif avg['status'] in status and options.vulnerable: if vercmp(pkg.version, avg['fixed']) < 0: if options.quiet: print('{}={}'.format(pkg.name, pkg.version)) else: print('{}-{} is vulnerable to {}'.format( pkg.name, pkg.version, avg['type'])) print('Upgrade to {}'.format(avg['fixed'])) print('AVG: {}/{}'.format(API_URL, avg['name'])) for advisory in avg['advisories']: print('Advisory: {}/{}/raw'.format( API_URL, advisory)) print('')
def list_upgradable(pkglist, vcsup=False, aurcache=None): """Compare package versions and returns upgradable ones.""" localdb = DS.pyc.get_localdb() if aurcache: aurlist = aurcache else: aurlist = pkgbuilder.utils.info(pkglist) # It’s THAT easy. Oh, and by the way: it is much, MUCH faster # than others. It makes ONE multiinfo request rather than # len(installed_packages) info requests. upgradable = [] downgradable = [] for rpkg in aurlist: lpkg = localdb.get_pkg(rpkg.name) if lpkg is not None: vc = pyalpm.vercmp(rpkg.version, lpkg.version) if vc > 0: upgradable.append([rpkg.name, lpkg.version, rpkg.version]) elif vc < 0: # If the package version is a date or the name ends in # -{git,hg,bzr,svn,cvs,darcs}, do not mark it as downgradable. # BTW: the above is yours truly’s list of VCS preference, if # you added big a gap between git and hg and then HUGE gaps # between everything else. try: # For epoch packages. Also, cheating here. v = rpkg.version.split(':')[1] except IndexError: v = rpkg.version try: datetime.datetime.strptime(v.split('-')[0], '%Y%m%d') datever = True except: datever = False dt = datetime.date.today().strftime('%Y%m%d-1') if (rpkg.name.endswith(('git', 'hg', 'bzr', 'svn', 'cvs', 'darcs'))): if vcsup: upgradable.append([rpkg.name, lpkg.version, dt]) else: DS.log.warning('{0} is -[vcs], ignored for ' 'downgrade.'.format(rpkg.name)) elif datever: if vcsup: upgradable.append([rpkg.name, lpkg.version, dt]) else: DS.log.warning('{0} version is a date, ignored ' 'for downgrade.'.format(rpkg.name)) else: downgradable.append([rpkg.name, lpkg.version, rpkg.version]) return [upgradable, downgradable]
def compare_versions(version1: str, version2: str) -> int: """ vercmp is used to determine the relationship between two given version numbers. It outputs values as follows: < 0 : if ver1 < ver2 = 0 : if ver1 == ver2 > 0 : if ver1 > ver2 """ return pyalpm.vercmp(version1, version2)
def compare_versions(current_version, new_version): """ vercmp is used to determine the relationship between two given version numbers. It outputs values as follows: < 0 : if ver1 < ver2 = 0 : if ver1 == ver2 > 0 : if ver1 > ver2 """ return vercmp(current_version, new_version)
def CheckUpdates(self, success, nosuccess): success('') syncfirst = False updates = [] _ignorepkgs = set() self.get_handle() for group in self.handle.ignoregrps: db = self.localdb grp = db.read_grp(group) if grp: name, pkg_list = grp for pkg in pkg_list: _ignorepkgs.add(pkg.name) for name in self.handle.ignorepkgs: pkg = self.localdb.get_pkg(name) if pkg: _ignorepkgs.add(pkg.name) if config.syncfirst: for name in config.syncfirst: pkg = self.localdb.get_pkg(name) if pkg: candidate = pyalpm.sync_newversion(pkg, self.syncdbs) if candidate: syncfirst = True updates.append( (candidate.name, candidate.version, candidate.db.name, '', candidate.download_size)) if not updates: if config.enable_aur: if not self.aur_updates_checked: self.get_local_packages() self.local_packages -= _ignorepkgs for pkg in self.localdb.pkgcache: if not pkg.name in _ignorepkgs: candidate = pyalpm.sync_newversion(pkg, self.syncdbs) if candidate: updates.append( (candidate.name, candidate.version, candidate.db.name, '', candidate.download_size)) self.local_packages.discard(pkg.name) if config.enable_aur: if not self.aur_updates_checked: if self.local_packages: self.aur_updates_pkgs = aur.multiinfo( self.local_packages) self.aur_updates_checked = True for aur_pkg in self.aur_updates_pkgs: if self.localdb.get_pkg(aur_pkg.name): comp = pyalpm.vercmp( aur_pkg.version, self.localdb.get_pkg(aur_pkg.name).version) if comp == 1: updates.append((aur_pkg.name, aur_pkg.version, aur_pkg.db.name, aur_pkg.tarpath, aur_pkg.download_size)) self.EmitAvailableUpdates((syncfirst, updates))
def list_upgradable(self, pkglist, vcsup=False): """Compares package versions and returns upgradable ones.""" H = pycman.config.init_with_config('/etc/pacman.conf') localdb = H.get_localdb() aurlist = self.aur.multiinfo(pkglist, DS.protocol)['results'] # It's THAT easy. Oh, and by the way: it is much, MUCH faster # than others. It makes ONE multiinfo request rather than # len(installed_packages) info requests. upgradable = [] downgradable = [] for i in aurlist: pkg = localdb.get_pkg(i['Name']) if pkg is not None: vc = pyalpm.vercmp(i['Version'], pkg.version) if vc > 0: upgradable.append([i['Name'], pkg.version, i['Version']]) elif vc < 0: # If the package version is a date or the name ends in # -{git,hg,bzr,svn,cvs,darcs}, do not mark it as # downgradable. BTW: the above is yours truly’s list of # VCS preference, if you added a gap between git and hg and # then HUGE gaps between everything else. try: # For epoch packages. Also, cheating here. v = i['Version'].split(':')[1] except IndexError: v = i['Version'] try: d = datetime.datetime.strptime(v.split('-')[0], '%Y%m%d') datever = True except: datever = False dt = datetime.date.today().strftime('%Y%m%d') if (i['Name'].endswith(('git', 'hg', 'bzr', 'svn', 'cvs', 'darcs'))): if vcsup: upgradable.append([i['Name'], pkg.version, dt]) else: DS.log.warning('{} is -[vcs], ignored for ' 'downgrade.'.format(i['Name'])) elif datever: if vcsup: upgradable.append([i['Name'], pkg.version, dt]) else: DS.log.warning('{} version is a date, ignored ' 'for downgrade.'.format(i['Name'])) else: downgradable.append([i['Name'], pkg.version, i['Version']]) return [upgradable, downgradable]
def list_upgradable(self, pkglist, vcsup=False): """Compares package versions and returns upgradable ones.""" H = pycman.config.init_with_config('/etc/pacman.conf') localdb = H.get_localdb() aurlist = self.aur.multiinfo(pkglist, DS.protocol)['results'] # It's THAT easy. Oh, and by the way: it is much, MUCH faster # than others. It makes ONE multiinfo request rather than # len(installed_packages) info requests. upgradable = [] downgradable = [] for i in aurlist: pkg = localdb.get_pkg(i['Name']) if pkg is not None: vc = pyalpm.vercmp(i['Version'], pkg.version) if vc > 0: upgradable.append([i['Name'], pkg.version, i['Version']]) elif vc < 0: # If the package version is a date or the name ends in # -{git,hg,bzr,svn,cvs,darcs}, do not mark it as # downgradable. BTW: the above is yours truly’s list of # VCS preference, if you added a gap between git and hg and # then HUGE gaps between everything else. try: # For epoch packages. Also, cheating here. v = i['Version'].split(':')[1] except IndexError: v = i['Version'] try: d = datetime.datetime.strptime( v.split('-')[0], '%Y%m%d') datever = True except: datever = False dt = datetime.date.today().strftime('%Y%m%d') if (i['Name'].endswith( ('git', 'hg', 'bzr', 'svn', 'cvs', 'darcs'))): if vcsup: upgradable.append([i['Name'], pkg.version, dt]) else: DS.log.warning('{} is -[vcs], ignored for ' 'downgrade.'.format(i['Name'])) elif datever: if vcsup: upgradable.append([i['Name'], pkg.version, dt]) else: DS.log.warning('{} version is a date, ignored ' 'for downgrade.'.format(i['Name'])) else: downgradable.append( [i['Name'], pkg.version, i['Version']]) return [upgradable, downgradable]
def validate(self, quiet): """Check if packages were installed. :param bool quiet: suppress messages :return: number of packages that were not installed :rtype: int """ if self.pkgnames: if not quiet: DS.fancy_msg(_('Validating installation status...')) DS.log.info('Validating: ' + '; '.join(self.pkgnames)) DS.pycreload() localdb = DS.pyc.get_localdb() aurpkgs = {aurpkg.name: aurpkg.version for aurpkg in pkgbuilder.utils.info(self.pkgnames)} wrong = len(self.pkgnames) else: wrong = 0 for pkgname in self.pkgnames: lpkg = localdb.get_pkg(pkgname) try: aurversion = aurpkgs[pkgname] except KeyError: if not lpkg: if not quiet: DS.fancy_error2(_('{0}: not an AUR package').format( pkgname)) else: wrong -= 1 if not quiet: DS.fancy_msg2(_('{0}: installed {1}').format( pkgname, lpkg.version)) else: if not lpkg: if not quiet: DS.fancy_error2(_('{0}: NOT installed').format( pkgname)) else: if pyalpm.vercmp(aurversion, lpkg.version) > 0: if not quiet: DS.fancy_error2(_('{0}: outdated {1}').format( pkgname, lpkg.version)) else: wrong -= 1 if not quiet: DS.fancy_msg2(_('{0}: installed {1}').format( pkgname, lpkg.version)) self.invalid = wrong self._set_status_from_return(wrong, TransactionStatus.validated, TransactionStatus.validation_failed) return wrong
def _test_dependency(available, difference, wanted): """Test a dependency requirement.""" if '-' in available: # Stripping the pkgver. available = available.split('-')[0] vercmp = pyalpm.vercmp(available, wanted) return (('<' in difference and vercmp == -1) or ('=' in difference and vercmp == 0) or ('>' in difference and vercmp == 1))
def is_outdated(self, remote: Package, paths: RepositoryPaths) -> bool: """ check if package is out-of-dated :param remote: package properties from remote source :param paths: repository paths instance. Required for VCS packages cache :return: True if the package is out-of-dated and False otherwise """ remote_version = remote.actual_version( paths) # either normal version or updated VCS result: int = vercmp(self.version, remote_version) return result < 0
def print_package_search(self, pkg, use_categories=True, cachemode=False, prefix='', prefixp=''): """ Outputs/returns a package representation, which is close to the output of ``pacman -Ss``. """ size = subprocess.check_output(['stty', 'size']) try: termwidth = int(size.split()[1]) except IndexError: termwidth = 9001 # Auto-wrap by terminal. A reference to an old # meme and a cheat, too. Sorry. H = pycman.config.init_with_config('/etc/pacman.conf') localdb = H.get_localdb() lpkg = localdb.get_pkg(pkg['Name']) category = '' installed = '' prefix2 = prefix + ' ' prefixp2 = prefixp + ' ' if lpkg is not None: if pyalpm.vercmp(pkg['Version'], lpkg.version) != 0: installed = _(' [installed: {}]').format(lpkg.version) else: installed = _(' [installed]') if pkg['OutOfDate'] > 0: installed = (installed + ' ' + DS.colors['red'] + _( '[out of date]') + DS.colors['all_off']) if pkg['CategoryID'] != 0: if use_categories: category = DS.categories[pkg['CategoryID']] else: category = 'aur' else: category = pkg['Category'] # ABS build cheat. descl = textwrap.wrap(pkg['Description'], termwidth - len(prefixp2)) desc = [] for i in descl: desc.append(prefix2 + i) desc = '\n'.join(desc) base = (prefix + '{0}/{1} {2} ({4} ' + _('votes') + '){5}\n' + '{3}') entry = (base.format(category, pkg['Name'], pkg['Version'], desc, pkg['NumVotes'], installed)) if cachemode: return entry else: print(entry)
def print_package_search(self, pkg, use_categories=True, cachemode=False, prefix='', prefixp=''): """ Outputs/returns a package representation, which is close to the output of ``pacman -Ss``. """ size = subprocess.check_output(['stty', 'size']) try: termwidth = int(size.split()[1]) except IndexError: termwidth = 9001 # Auto-wrap by terminal. A reference to an old # meme and a cheat, too. Sorry. H = pycman.config.init_with_config('/etc/pacman.conf') localdb = H.get_localdb() lpkg = localdb.get_pkg(pkg['Name']) category = '' installed = '' prefix2 = prefix + ' ' prefixp2 = prefixp + ' ' if lpkg is not None: if pyalpm.vercmp(pkg['Version'], lpkg.version) != 0: installed = _(' [installed: {}]').format(lpkg.version) else: installed = _(' [installed]') if pkg['OutOfDate'] == '1': installed = (installed + ' ' + DS.colors['red'] + _('[out of date]') + DS.colors['all_off']) if use_categories: category = DS.categories[int(pkg['CategoryID'])] else: category = 'aur' descl = textwrap.wrap(pkg['Description'], termwidth - len(prefixp2)) desc = [] for i in descl: desc.append(prefix2 + i) desc = '\n'.join(desc) base = (prefix + '{0}/{1} {2} ({4} ' + _('votes') + '){5}\n' + '{3}') entry = (base.format(category, pkg['Name'], pkg['Version'], desc, pkg['NumVotes'], installed)) if cachemode: return entry else: print(entry)
def aur_pre_build( name: Optional[str] = None, *, do_vcs_update: Optional[bool] = None, ) -> None: # import pyalpm here so that lilac can be easily used on non-Arch # systems (e.g. Travis CI) import pyalpm if os.path.exists('PKGBUILD'): pkgver, pkgrel = get_pkgver_and_pkgrel() else: pkgver = None _g.aur_pre_files = clean_directory() if name is None: name = os.path.basename(os.getcwd()) _g.aur_building_files = _download_aur_pkgbuild(name) aur_pkgver, aur_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == aur_pkgver: if pyalpm.vercmp(f'1-{pkgrel}', f'1-{aur_pkgrel}') > 0: # change to larger pkgrel update_pkgrel(pkgrel) elif pkgrel == aur_pkgrel: # update for rebuild update_pkgrel() if do_vcs_update is None: do_vcs_update = name.endswith(VCS_SUFFIXES) if do_vcs_update: vcs_update() # recheck after sync, because AUR pkgver may lag behind new_pkgver, new_pkgrel = get_pkgver_and_pkgrel() if pkgver and pkgver == new_pkgver: if pyalpm.vercmp(f'1-{pkgrel}', f'1-{new_pkgrel}') > 0: update_pkgrel(pkgrel)
def get_vulnerable_pkgs(cve_list): alpm_handle = pyalpm.Handle("/", "/var/lib/pacman") alpmdb = alpm_handle.get_localdb() vulnerables = [] for pkgname, info in cve_list.items(): pkg = alpmdb.get_pkg(pkgname) if not pkg: continue # Not installed for cve_version, cves in info.items(): if pyalpm.vercmp(pkg.version, cve_version) != 1: vulnerables.append((pkgname, pkg.version, cves)) return vulnerables
def may_update_pkgrel() -> Generator[None, None, None]: pkgver, pkgrel = get_pkgver_and_pkgrel() yield if pkgver is None or pkgrel is None: return pkgver2, pkgrel2 = get_pkgver_and_pkgrel() if pkgver2 is None or pkgrel2 is None: return if pkgver == pkgver2 and \ pyalpm.vercmp(f'1-{pkgrel}', f'1-{pkgrel2}') >= 0: update_pkgrel(_next_pkgrel(pkgrel))
def CheckUpdates(self, success, nosuccess, loop): #success('') syncfirst = False updates = [] _ignorepkgs = set() self.get_handle() for group in self.handle.ignoregrps: db = self.localdb grp = db.read_grp(group) if grp: name, pkg_list = grp for pkg in pkg_list: _ignorepkgs.add(pkg.name) for name in self.handle.ignorepkgs: pkg = self.localdb.get_pkg(name) if pkg: _ignorepkgs.add(pkg.name) if config.syncfirst: for name in config.syncfirst: pkg = self.localdb.get_pkg(name) if pkg: candidate = pyalpm.sync_newversion(pkg, self.syncdbs) if candidate: syncfirst = True updates.append((candidate.name, candidate.version, candidate.db.name, '', candidate.download_size)) if not updates: if config.enable_aur: if not self.aur_updates_checked: self.get_local_packages() self.local_packages -= _ignorepkgs for pkg in self.localdb.pkgcache: if not pkg.name in _ignorepkgs: candidate = pyalpm.sync_newversion(pkg, self.syncdbs) if candidate: updates.append((candidate.name, candidate.version, candidate.db.name, '', candidate.download_size)) self.local_packages.discard(pkg.name) if config.enable_aur: if not self.aur_updates_checked: if self.local_packages: self.aur_updates_pkgs = aur.multiinfo(self.local_packages) self.aur_updates_checked = True for aur_pkg in self.aur_updates_pkgs: if self.localdb.get_pkg(aur_pkg.name): comp = pyalpm.vercmp(aur_pkg.version, self.localdb.get_pkg(aur_pkg.name).version) if comp == 1: updates.append((aur_pkg.name, aur_pkg.version, aur_pkg.db.name, aur_pkg.tarpath, aur_pkg.download_size)) self.EmitAvailableUpdates(syncfirst, updates) loop.quit()
def print_package_search(pkg, cachemode=False, prefix='', prefixp=''): """Output/return a package representation. Based on `pacman -Ss`. .. versionchanged:: 4.0.0 """ termwidth = get_termwidth(9001) localdb = DS.pyc.get_localdb() lpkg = localdb.get_pkg(pkg.name) category = '' installed = '' prefix2 = prefix + ' ' prefixp2 = prefixp + ' ' if lpkg is not None: if pyalpm.vercmp(pkg.version, lpkg.version) != 0: installed = _(' [installed: {0}]').format(lpkg.version) else: installed = _(' [installed]') try: if pkg.is_outdated: installed = (installed + ' ' + DS.colors['red'] + _('[out of date]') + DS.colors['all_off']) except AttributeError: pass # for ABS packages category = pkg.repo descl = textwrap.wrap(pkg.description, termwidth - len(prefixp2)) desc2 = [] for i in descl: desc2.append(prefix2 + i) desc = '\n'.join(desc2) if pkg.is_abs: base = (prefix + '{0}/{1} {2}{3}\n{4}') entry = (base.format(category, pkg.name, pkg.version, installed, desc)) else: base = (prefix + '{0}/{1} {2} ({3} {4}){5}\n{6}') entry = (base.format(category, pkg.name, pkg.version, pkg.votes, _('votes'), installed, desc)) if cachemode: return entry else: print(entry)
def print_package_search(pkg, cachemode=False, prefix='', prefixp=''): """Output/return a package representation. Based on `pacman -Ss`. .. versionchanged:: 4.0.0 """ termwidth = get_termwidth(9001) localdb = DS.pyc.get_localdb() lpkg = localdb.get_pkg(pkg.name) category = '' installed = '' prefix2 = prefix + ' ' prefixp2 = prefixp + ' ' if lpkg is not None: if pyalpm.vercmp(pkg.version, lpkg.version) != 0: installed = _(' [installed: {0}]').format(lpkg.version) else: installed = _(' [installed]') try: if pkg.is_outdated: installed = (installed + ' ' + DS.colors['red'] + _('[out of date]') + DS.colors['all_off']) except AttributeError: pass # for repository packages category = pkg.repo descl = textwrap.wrap(pkg.description, termwidth - len(prefixp2)) desc2 = [] for i in descl: desc2.append(prefix2 + i) desc = '\n'.join(desc2) if pkg.is_abs: base = (prefix + '{0}/{1} {2}{3}\n{4}') entry = (base.format(category, pkg.name, pkg.version, installed, desc)) else: base = (prefix + '{0}/{1} {2} ({3} {4}){5}\n{6}') entry = (base.format(category, pkg.name, pkg.version, pkg.votes, _('votes'), installed, desc)) if cachemode: return entry else: print(entry)
def is_newer_than(self, version: str) -> bool: """Check whether the version is newer than a provided version Parameters ---------- version: str Another version string to compare that of self to Returns ------- True if self.version is newer than the provided version, False otherwise. """ if vercmp(self.version, version) > 0: return True else: return False
def update_packages(finder, *, force=False, only_repos=None): updated_pkgs = [] # update packages in the django database for db in finder.sync_db.get_syncdbs(): if only_repos and db.name not in only_repos: continue logger.info("Updating packages from repository '{}'...".format( db.name)) for pkg in db.pkgcache: result = Package.objects.filter(repo=db.name, name=pkg.name) assert len(result) in {0, 1} if len(result) == 0: db_package = Package() db_package.repo = db.name db_package.name = pkg.name db_package.arch = pkg.arch updated_pkgs.append(pkg) else: db_package = result[0] if pyalpm.vercmp(db_package.version, pkg.version) == -1: updated_pkgs.append(pkg) elif force is True: updated_pkgs.append(pkg) else: # skip void update of db_package continue # update volatile fields (this is run iff the pkg was added to updated_pkgs) db_package.version = pkg.version db_package.description = pkg.desc db_package.url = pkg.url db_package.build_date = datetime.datetime.fromtimestamp( pkg.builddate, tz=datetime.timezone.utc) db_package.licenses = pkg.licenses db_package.save() # delete old packages from the django database for db_package in Package.objects.order_by("repo").order_by("name"): if not finder.pkg_exists(db_package.repo, db_package.name): Package.objects.filter(repo=db_package.repo, name=db_package.name).delete() return updated_pkgs
def get_transaction_sum(): transaction_dict = { 'to_remove': [], 'to_build': [], 'to_install': [], 'to_update': [], 'to_reinstall': [], 'to_downgrade': [] } for pkg in to_build: transaction_dict['to_build'].append(pkg.name + ' ' + pkg.version) _to_remove = sorted(To_Remove()) for name, version in _to_remove: transaction_dict['to_remove'].append(name + ' ' + version) others = sorted(To_Add()) for name, version, dsize in others: pkg = get_localpkg(name) if pkg: comp = pyalpm.vercmp(version, pkg.version) if comp == 1: transaction_dict['to_update'].append( (name + ' ' + version, dsize)) elif comp == 0: transaction_dict['to_reinstall'].append( (name + ' ' + version, dsize)) elif comp == -1: transaction_dict['to_downgrade'].append( (name + ' ' + version, dsize)) else: transaction_dict['to_install'].append( (name + ' ' + version, dsize)) #~ if transaction_dict['to_build']: #~ print('To build:', [name for name in transaction_dict['to_build']]) #~ if transaction_dict['to_install']: #~ print('To install:', [name for name, size in transaction_dict['to_install']]) #~ if transaction_dict['to_reinstall']: #~ print('To reinstall:', [name for name, size in transaction_dict['to_reinstall']]) #~ if transaction_dict['to_downgrade']: #~ print('To downgrade:', [name for name, size in transaction_dict['to_downgrade']]) #~ if transaction_dict['to_remove']: #~ print('To remove:', [name for name in transaction_dict['to_remove']]) #~ if transaction_dict['to_update']: #~ print('To update:', [name for name, size in transaction_dict['to_update']]) return transaction_dict
def version_comparison(version1: str, comparison_operator: str, version2: str) -> bool: """ Compares two versions. e.g. "1.1" ">=" "1.0" -> True :param version1: Version1 :param comparison_operator: Comparison operator :param version2: Version2 :return: True if the conditional relationship holds, False otherwise """ vercmp_return = int(vercmp(version1, version2)) if vercmp_return < 0: return "<" in comparison_operator elif vercmp_return == 0: return "=" in comparison_operator else: return ">" in comparison_operator
def get_transaction_sum(): transaction_dict = { "to_remove": [], "to_build": [], "to_install": [], "to_update": [], "to_reinstall": [], "to_downgrade": [], } for pkg in to_build: transaction_dict["to_build"].append(pkg.name + " " + pkg.version) _to_remove = sorted(To_Remove()) for name, version in _to_remove: transaction_dict["to_remove"].append(name + " " + version) others = sorted(To_Add()) for name, version, dsize in others: pkg = get_localpkg(name) if pkg: comp = pyalpm.vercmp(version, pkg.version) if comp == 1: transaction_dict["to_update"].append((name + " " + version, dsize)) elif comp == 0: transaction_dict["to_reinstall"].append((name + " " + version, dsize)) elif comp == -1: transaction_dict["to_downgrade"].append((name + " " + version, dsize)) else: transaction_dict["to_install"].append((name + " " + version, dsize)) # ~ if transaction_dict['to_build']: # ~ print('To build:', [name for name in transaction_dict['to_build']]) # ~ if transaction_dict['to_install']: # ~ print('To install:', [name for name, size in transaction_dict['to_install']]) # ~ if transaction_dict['to_reinstall']: # ~ print('To reinstall:', [name for name, size in transaction_dict['to_reinstall']]) # ~ if transaction_dict['to_downgrade']: # ~ print('To downgrade:', [name for name, size in transaction_dict['to_downgrade']]) # ~ if transaction_dict['to_remove']: # ~ print('To remove:', [name for name in transaction_dict['to_remove']]) # ~ if transaction_dict['to_update']: # ~ print('To update:', [name for name, size in transaction_dict['to_update']]) return transaction_dict
def get_upgradable(self): """Search for upgradable packages.""" h = config.init_with_config("/etc/pacman.conf") installed = set(h.get_localdb().pkgcache) for db in h.get_syncdbs(): for pkg in list(installed): pkgname = pkg.name syncpkg = db.get_pkg(pkgname) if syncpkg: installed.remove(pkg) foreign = dict([(p.name,p) for p in installed]) ag1 = Aurget((p.name for p in installed)) for aur_pkg in ag1.get_aur_pkgs(): installed_pkg = foreign[aur_pkg['Name']] if pyalpm.vercmp(aur_pkg['Version'], installed_pkg.version) != 0: key = aur_pkg['Name'] self.aurs.append(key) self.dict1[key]= installed_pkg.version
temp_package = Package() temp_package.pkgname = temp_pkgname temp_package.arch = temp_arch temp_package.versions.append(temp_pkginfo) packages.append(temp_package) removing = [] for i in packages: counter1 = 0 while counter1 < len(i.versions): counter2 = counter1 while counter2 < len(i.versions): # Use pacman's alpm library to compare and sort the package versions if pyalpm.vercmp( i.versions[counter1].pkgver + '-' + i.versions[counter1].pkgrel, i.versions[counter2].pkgver + '-' + i.versions[counter2].pkgrel) < 0: i.versions[counter1], i.versions[counter2] = \ i.versions[counter2], i.versions[counter1] counter2 += 1 counter1 += 1 # Don't remove newest version del(i.versions[0]) for j in i.versions: removing.append(i.pkgname + '-' + j.pkgver + '-' + j.pkgrel + '-' + i.arch + '.pkg.tar.xz') if removing:
def version_is_greater(v1, v2): return pyalpm.vercmp(v1, v2) == 1
#print(installed) aur = AUR() #print(aur.info('mysql')) #[{'ID': 69409, 'NumVotes': 4, 'OutOfDate': 0, 'CategoryID': 2, 'FirstSubmitted': 1366917203, 'Version': '5.6.13-1', 'URL': 'https://www.mysql.com/products/community/', 'LastModified': 1375714132, 'Name': 'mysql', 'Maintainer': 'rustam', 'Description': 'A fast SQL database server', 'License': 'GPL', 'URLPath': '/packages/my/mysql/mysql.tar.gz'}] #print(aur.info(('mysql','heidisql'))) #[{'NumVotes': 4, 'Version': '5.6.13-1', 'URL': 'https://www.mysql.com/products/community/', 'Name': 'mysql', 'License': 'GPL', 'FirstSubmitted': 1366917203, 'Maintainer': 'rustam', 'URLPath': '/packages/my/mysql/mysql.tar.gz', 'ID': 69409, 'OutOfDate': 0, 'LastModified': 1375714132, 'Description': 'A fast SQL database server', 'CategoryID': 2}, {'NumVotes': 9, 'ID': 65392, 'URL': 'http://www.heidisql.com/', 'Name': 'heidisql', 'License': 'GPL', 'FirstSubmitted': 1355313819, 'Maintainer': 'crush', 'URLPath': '/packages/he/heidisql/heidisql.tar.gz', 'Version': '7.0-2', 'OutOfDate': 1372029392, 'LastModified': 1355314287, 'Description': 'A lightweight, Windows based interface for managing MySQL and Microsoft SQL databases. (uses Wine).', 'CategoryID': 3}] #display_fields = ('LocalVersion','Version', 'LastModified') #aurpkg['LastModified'] = strftime('%Y-%m-%d %H:%M:%S', localtime(aurpkg['LastModified'])) offical_repo = ['core', 'extra', 'community', 'multilib'] for db in h.get_syncdbs(): if db.name in offical_repo: for item in list(installed): if db.get_pkg(item.name): installed.remove(item) #print(pkg) #pkgname = pkg.name #syncpkg = db.get_pkg(pkgname) #print(syncpkg) #print(installed) pkgs = [pkg.name for pkg in installed] for item in aur.info(pkgs): for localpkg in installed: if localpkg.name == item['Name']: if pyalpm.vercmp(item['Version'], localpkg.version) != 0: if pyalpm.vercmp(item['Version'], localpkg.version) > 0: eq = '\033[1;31m=>\033[0m' #eq = '\033[1;37;40m=>\033[2;32;40m' else: eq = '<=' print(localpkg.name, localpkg.version, eq, item['Version'])
def version_column_sort_func(treemodel, treeiter1, treeiter2, data): return pyalpm.vercmp(treemodel[treeiter1][0].version, treemodel[treeiter2][0].version)
def main(tmp_db_path, sync_cmd=None): # Use a temporary database path to avoid issues caused by synchronizing the # sync database without a full system upgrade. # See the discussion here: # https://bbs.archlinux.org/viewtopic.php?pid=951285#p951285 # Basically, if you sync the database and then install packages without first # upgrading the system (-y), you can do some damage. tmp_db_path = os.path.abspath(tmp_db_path) # conf = config.PacmanConfig(conf = '/etc/pacman.conf') h = config.init_with_config("/etc/pacman.conf") db_path = h.dbpath if tmp_db_path == db_path: print("temporary path cannot be %s" % db_path) sys.exit(1) local_db_path = os.path.join(db_path, 'local') tmp_local_db_path = os.path.join(tmp_db_path, 'local') # Set up the temporary database path if not os.path.exists(tmp_db_path): os.makedirs(tmp_db_path) os.symlink(local_db_path, tmp_local_db_path) elif not os.path.islink(tmp_local_db_path): # Move instead of unlinking just in case. if os.path.exists(tmp_local_db_path): sys.stderr.write( "warning: expected file or directory at %s\n" % tmp_local_db_path ) i = 1 backup_path = tmp_local_db_path + ('.%d' % i) while os.path.exists(backup_path): i += 1 backup_path = tmp_local_db_path + ('.%d' % i) sys.stderr.write("attempting to move to %s\n" % backup_path) os.rename(tmp_local_db_path, backup_path) os.symlink(local_db_path, tmp_local_db_path) # Copy in the existing database files. If a repo is offline when paconky is # run then no database will be downloaded. If the databases are not copied # first then the output will be inconsistent due to missing information. For # example, if the Haskell repo is offline then Haskell packages will appear # in the [community] and [AUR] sections of the output. tmp_sync_db_path = os.path.join(tmp_db_path, 'sync') os.makedirs(tmp_sync_db_path, exist_ok=True) sync_db_path = os.path.join(db_path, 'sync') for db in glob.iglob(os.path.join(sync_db_path,'*.db')): tmp_db = os.path.join(tmp_sync_db_path, os.path.basename(db)) try: mtime = os.path.getmtime(tmp_db) except OSError as e: if e.errno != errno.ENOENT: raise e else: mtime = 0 if mtime < os.path.getmtime(db): shutil.copy2(db, tmp_db) # Sync the temporary database. # Support external synchronizers such as parisync. if sync_cmd: for index, item in enumerate(sync_cmd): if item == '%d': sync_cmd[index] = tmp_sync_db_path elif item == '%r': sync_cmd[index] = os.path.dirname(tmp_sync_db_path) p = subprocess.Popen(sync_cmd, stdout=subprocess.PIPE) e = p.wait() if e != 0: sys.stderr.write("sync command exited with %d\n" % e) # Re-initialize with new databases. args = action_sync.parse_options(('-b', tmp_db_path)) h = config.init_with_config_and_options(args) else: args = action_sync.parse_options(('-b', tmp_db_path, '-y')) h = config.init_with_config_and_options(args) sys.stdout = sys.__stderr__ try: t = transaction.init_from_options(h, args) except pyalpm.error as e: sys.stderr.write('%s\n' % (e,)) eno = e.args[1] if eno == 10: lckpath = os.path.join(tmp_db_path, 'db.lck') sys.stderr.write(' %s\n' % lckpath) sys.exit(1) for db in h.get_syncdbs(): try: db.update(False) except pyalpm.error as e: sys.stderr.write('%s: %s\n' % (db.name, e)) t.release() sys.stdout = sys.__stdout__ installed = set(p for p in h.get_localdb().pkgcache) upgradable = OrderedDict() syncdbs = h.get_syncdbs() for db in syncdbs: # Without "list" the set cannot be altered with "remove" below. for pkg in list(installed): pkgname = pkg.name syncpkg = db.get_pkg(pkgname) if syncpkg: if pyalpm.vercmp(syncpkg.version, pkg.version) > 0: try: upgradable[db.name].add((pkg, syncpkg)) except KeyError: upgradable[db.name] = set(((pkg, syncpkg),)) installed.remove(pkg) foreign = dict([(p.name,p) for p in installed]) try: aur = AUR.AUR() aur_pkgs = aur.info(foreign.keys()) upgradable_aur = list() for aur_pkg in aur_pkgs: try: installed_pkg = foreign[aur_pkg['Name']] except KeyError: upgradable_aur.append(aur_pkg) continue if pyalpm.vercmp(aur_pkg['Version'], installed_pkg.version) > 0: upgradable_aur.append(aur_pkg) installed.remove(installed_pkg) except AUR.AURError as e: sys.stderr.write(str(e)) sys.exit(1) except urllib.error.URLError as e: sys.stderr.write( 'error: failed to retrieve information from the AUR (%s)\n' % e.reason ) upgradable_aur = None except TypeError: upgradable_aur = None display(upgradable, upgradable_aur)
tds = tr.xpath('td') if len(tds) == 0: continue texts = [el.text_content().strip() for el in tds] cves = filter(lambda x: x.startswith("CVE"), texts[0].split()) pkgname = texts[1] version = texts[3] if version == "?" or version == "-": version = "" version = version.strip(' <=') status = texts[6] if status.startswith("Invalid") or status.startswith("Not affected"): continue infos[pkgname][version] += cves print("Checking CVE list against local PKG DB...") alpm_handle = pyalpm.Handle("/", "/var/lib/pacman") alpmdb = alpm_handle.get_localdb() for pkgname, info in infos.items(): pkg = alpmdb.get_pkg(pkgname) if not pkg: continue # Not installed for cve_version, cves in info.items(): if pyalpm.vercmp(pkg.version, cve_version) == -1: print("{} {} is vulnerable to {}".format(pkgname, pkg.version, ','.join(cves)))
def main(tmp_db_path): # Use a temporary database path to avoid issues caused by synchronizing the # sync database without a full system upgrade. # See the discussion here: # https://bbs.archlinux.org/viewtopic.php?pid=951285#p951285 # Basically, if you sync the database and then install packages without first # upgrading the system (-y), you can do some damage. tmp_db_path = os.path.abspath(tmp_db_path) conf = config.PacmanConfig(conf = '/etc/pacman.conf') db_path = conf.options['DBPath'] if tmp_db_path == db_path: print("temporary path cannot be %s" % db_path) sys.exit(1) local_db_path = os.path.join(db_path, 'local') tmp_local_db_path = os.path.join(tmp_db_path, 'local') # Set up the temporary database path if not os.path.exists(tmp_db_path): os.makedirs(tmp_db_path) os.symlink(local_db_path, tmp_local_db_path) elif not os.path.islink(tmp_local_db_path): # Move instead of unlinking just in case. os.rename(tmp_local_db_path, tmp_local_db_path + '.old') os.symlink(local_db_path, tmp_local_db_path) # Redirect the stdout messages download messages to stderr. with open(os.devnull, 'w') as f: sys.stdout = f action_sync.main(('-b', tmp_db_path, '-y')) sys.stdout = sys.__stdout__ installed = set(p for p in pyalpm.get_localdb().pkgcache) upgradable = OrderedDict() syncdbs = pyalpm.get_syncdbs() for db in syncdbs: # Without "list" the set cannot be altered with "remove" below. for pkg in list(installed): pkgname = pkg.name syncpkg = db.get_pkg(pkgname) if syncpkg: if pyalpm.vercmp(syncpkg.version, pkg.version) > 0: try: upgradable[db.name].add((pkg, syncpkg)) except KeyError: upgradable[db.name] = set(((pkg, syncpkg),)) installed.remove(pkg) foreign = dict([(p.name,p) for p in installed]) try: aur = AUR.AUR(threads=10) aur_pkgs = aur.info(foreign.keys()) except AUR.AURError as e: sys.stderr.write(str(e)) sys.exit(1) upgradable_aur = list() for aur_pkg in aur_pkgs: installed_pkg = foreign[aur_pkg['Name']] if pyalpm.vercmp(aur_pkg['Version'], installed_pkg.version) > 0: upgradable_aur.append((installed_pkg, aur_pkg)) installed.remove(installed_pkg) display(upgradable, upgradable_aur)
def need_upgrade(self): vcmp = pyalpm.vercmp(self.pkgver, self.instver) return vcmp > 0