def depcheck(self, depends): """Performs a dependency check.""" if depends == []: # THANK YOU, MAINTAINER, FOR HAVING NO DEPS AND DESTROYING ME! return {} else: parseddeps = {} pyc = pycman.config.init_with_config('/etc/pacman.conf') localpkgs = pyc.get_localdb().pkgcache syncpkgs = [] for j in [i.pkgcache for i in pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) for dep in depends: if dep == '': continue if re.search('[<=>]', dep): vpat = ('>=<|><=|=><|=<>|<>=|<=>|>=|=>|><|<>|=<|' '<=|>|=|<') ver_base = re.split(vpat, dep) dep = ver_base[0] if pyalpm.find_satisfier(localpkgs, dep): parseddeps[dep] = 0 elif pyalpm.find_satisfier(syncpkgs, dep): parseddeps[dep] = 1 elif self.utils.info([dep]): parseddeps[dep] = 2 else: parseddeps[dep] = -1 raise PBError( _('depcheck: cannot find {} ' 'anywhere').format(dep)) return parseddeps
def select_optdeps(widget, pkg, optdeps): transaction.choose_label.set_markup('<b>{}</b>'.format(_('{pkgname} has {number} uninstalled optional deps.\nPlease choose those you would like to install:').format(pkgname = pkg.name, number = str(len(optdeps))))) transaction.choose_list.clear() for long_string in optdeps: transaction.choose_list.append([False, long_string]) transaction.ChooseDialog.run() # some optdep can be virtual package so check for providers for name in transaction.to_add: if not transaction.get_syncpkg(name): transaction.to_add.discard(name) # if a provider is already installed, do nothing if pyalpm.find_satisfier(transaction.localdb.pkgcache, name): continue providers = set() for db in transaction.syncdbs: pkgs = db.pkgcache provider = pyalpm.find_satisfier(pkgs, name) while provider: providers.add(provider.name) for pkg in pkgs: if pkg.name == provider.name: pkgs.remove(pkg) break provider = pyalpm.find_satisfier(pkgs, name) transaction.choose_provides((providers, name)) if transaction.to_add: ManagerValidButton.set_sensitive(True) ManagerCancelButton.set_sensitive(True)
def depcheck(self, depends): """Performs a dependency check.""" if depends == []: # THANK YOU, MAINTAINER, FOR HAVING NO DEPS AND DESTROYING ME! return {} else: parseddeps = {} pyc = pycman.config.init_with_config('/etc/pacman.conf') localpkgs = pyc.get_localdb().pkgcache syncpkgs = [] for j in [i.pkgcache for i in pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) for dep in depends: if dep == '': continue if re.search('[<=>]', dep): vpat = ('>=<|><=|=><|=<>|<>=|<=>|>=|=>|><|<>|=<|' '<=|>|=|<') ver_base = re.split(vpat, dep) dep = ver_base[0] if pyalpm.find_satisfier(localpkgs, dep): parseddeps[dep] = 0 elif pyalpm.find_satisfier(syncpkgs, dep): parseddeps[dep] = 1 elif self.utils.info([dep]): parseddeps[dep] = 2 else: parseddeps[dep] = -1 raise PBError(_('depcheck: cannot find {} ' 'anywhere').format(dep)) return parseddeps
def select_optdeps(widget, pkg, optdeps): transaction.choose_label.set_markup('<b>{}</b>'.format( _('{pkgname} has {number} uninstalled optional deps.\nPlease choose those you would like to install:' ).format(pkgname=pkg.name, number=str(len(optdeps))))) transaction.choose_list.clear() for long_string in optdeps: transaction.choose_list.append([False, long_string]) transaction.ChooseDialog.run() # some optdep can be virtual package so check for providers for name in transaction.to_add: if not transaction.get_syncpkg(name): transaction.to_add.discard(name) # if a provider is already installed, do nothing if pyalpm.find_satisfier(transaction.localdb.pkgcache, name): continue providers = set() for db in transaction.syncdbs: pkgs = db.pkgcache provider = pyalpm.find_satisfier(pkgs, name) while provider: providers.add(provider.name) for pkg in pkgs: if pkg.name == provider.name: pkgs.remove(pkg) break provider = pyalpm.find_satisfier(pkgs, name) transaction.choose_provides((providers, name)) if transaction.to_add: ManagerValidButton.set_sensitive(True) ManagerCancelButton.set_sensitive(True)
def test_find_satisfier_error(): with pytest.raises(TypeError) as excinfo: pyalpm.find_satisfier() assert 'takes a Package list and a string' in str(excinfo.value) with pytest.raises(TypeError) as excinfo: pyalpm.find_satisfier(["foo"], PKG) assert 'list must contain only Package objects' in str(excinfo.value)
def alpm_depcheck(packages): # check the availability of the package names 'packages' # in the packman repository and return three (not necessarily disjoint!) # lists of package names of the following form: class DepCheckResult: def __init__(self): # Each of them is a list of required packages ... # ... that are installable via a (non-ignored) repository: self.repoinstall = [ ] # ... that are present in an ignored repository: self.repoignore = [ ] # ... that are not found in any pacman/alpm repository self.missing = [ ] # ... that are already installed self.installed = [ ] def __str__(self): sep = ', ' return """repoinstall = %s repoignore = %s missing = %s installed = %s""" % (sep.join(self.repoinstall), sep.join(self.repoignore), sep.join(self.missing), sep.join(self.installed)) res = DepCheckResult() alpm = ALPM.get() local_db = alpm.get_localdb() dbs = alpm.get_syncdbs() repo_ignore_re = re.compile("thorsten") for dep in packages: is_installed = False if pyalpm.find_satisfier(local_db.pkgcache, dep): res.installed.append(dep) is_installed = True repo_found = False for db in dbs: match = repo_ignore_re.match(db.name) if match and match.end() == len(db.name) and db.get_pkg(dep): res.repoignore.append(dep) continue pkg = pyalpm.find_satisfier(db.pkgcache, dep) if pkg is not None: #print("%s is in repo %s" % (dep,db.name)) repo_found = True if pkg is not None and not is_installed: # mark that it still needs to be installed res.repoinstall.append(dep) if not repo_found: #print("%s missing" % dep) res.missing.append(dep) return res
def resolve_dependency(self, dep): pkgname = self.requirement2pkgname(dep) if dep in self.all_pkgs: return dep pkg = pyalpm.find_satisfier(self.packages, dep) if pkg is None: return None return pkg.name
def find_local_satisfier(pkgname): ldb_lock.acquire() try: satisfier = pyalpm.find_satisfier(ldb.pkgcache, pkgname) finally: ldb_lock.release() return satisfier
def get_pkg(self, pkgname): """ Get a pkg, which provides pkgname """ dbs = self.handle.get_syncdbs() for db in dbs: pkg = pyalpm.find_satisfier(db.pkgcache, pkgname) if pkg is not None: return pkg
def get_satisfier_in_syncdb(pkg): """Returns the name of a package satisfying dependency_name""" # local_result = pyalpm.find_satisfier(db.pkgcache, pkg) # if local_result: # return local_result.name for syncdb in handle.get_syncdbs(): result = pyalpm.find_satisfier(syncdb.pkgcache, pkg) if result: return result.name
def find_satisfier_in_syncdbs(pkgname): for lock, db in sdbs: lock.acquire() try: s = pyalpm.find_satisfier(db.pkgcache, pkgname) finally: lock.release() if s: return s return None
def uninstalled_packages(self): """Tell which packages by this package are not installed""" alpm = ALPM.get() local_db = alpm.get_localdb() uninstalled = [ ] for f in self.packagelist(): dep = f.name + '=' + f.ver + '-' + f.rel if pyalpm.find_satisfier(local_db.pkgcache, dep): continue else: uninstalled.append(f) return uninstalled
def Add(self, pkgname): error = '' try: for db in self.syncdbs: # this is a security, in case of virtual package it will # choose the first provider, the choice should have been # done by the client pkg = pyalpm.find_satisfier(db.pkgcache, pkgname) if pkg: self.t.add_pkg(pkg) break except pyalpm.error as e: error = format_error(e.args) finally: return error
def set_deps_list(pkg, style): deps_list.clear() if pkg.depends: deps_list.append([_('Depends On')+':', '\n'.join(pkg.depends)]) if pkg.optdepends: optdeps = [] for optdep in pkg.optdepends: if pyalpm.find_satisfier(transaction.localdb.pkgcache, optdep.split(':')[0]): optdeps.append(optdep+' ['+_('Installed')+']') else: optdeps.append(optdep) deps_list.append([_('Optional Deps')+':', '\n'.join(optdeps)]) if style == 'local': if pkg.compute_requiredby(): deps_list.append([_('Required By')+':', '\n'.join(pkg.compute_requiredby())]) if pkg.provides: deps_list.append([_('Provides')+':', '\n'.join(pkg.provides)]) if pkg.replaces: deps_list.append([_('Replaces')+':', '\n'.join(pkg.replaces)]) if pkg.conflicts: deps_list.append([_('Conflicts With')+':', '\n'.join(pkg.conflicts)])
def set_deps_list(pkg, style): deps_list.clear() if pkg.depends: deps_list.append([_('Depends On') + ':', '\n'.join(pkg.depends)]) if pkg.optdepends: optdeps = [] for optdep in pkg.optdepends: if pyalpm.find_satisfier(transaction.localdb.pkgcache, optdep.split(':')[0]): optdeps.append(optdep + ' [' + _('Installed') + ']') else: optdeps.append(optdep) deps_list.append([_('Optional Deps') + ':', '\n'.join(optdeps)]) if style == 'local': if pkg.compute_requiredby(): deps_list.append( [_('Required By') + ':', '\n'.join(pkg.compute_requiredby())]) if pkg.provides: deps_list.append([_('Provides') + ':', '\n'.join(pkg.provides)]) if pkg.replaces: deps_list.append([_('Replaces') + ':', '\n'.join(pkg.replaces)]) if pkg.conflicts: deps_list.append([_('Conflicts With') + ':', '\n'.join(pkg.conflicts)])
def resolve_dependency(cls, dep): pkg = cls.localdb.get_pkg(dep) if pkg is None: pkg = pyalpm.find_satisfier(cls.packages, dep) return pkg
def check_extra_modules(self): to_add = set(pkg.name for pkg in self.t.to_add) to_remove = set(pkg.name for pkg in self.t.to_remove) to_check = [pkg for pkg in self.t.to_add] already_checked = set(pkg.name for pkg in to_check) depends = [to_check] # get installed kernels and modules pkgs = self.localdb.search('linux') installed_kernels = set() installed_modules = set() for pkg in pkgs: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: installed_kernels.add(match.group(1)) if match.group(2): installed_modules.add(match.group(2)) for pkg in self.t.to_add: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: installed_kernels.add(match.group(1)) if match.group(2): installed_modules.add(match.group(2)) # check in to_remove if there is a kernel and if so, auto-remove the corresponding modules for pkg in self.t.to_remove: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: if not match.group(2): installed_kernels.discard(match.group(1)) for module in installed_modules: pkgname = match.group(1)+module if not pkgname in to_remove: _pkg = self.localdb.get_pkg(pkgname) if _pkg: # Check we won't remove a third party kernel third_party = False for provide in _pkg.provides: if 'linux=' in provide: third_party = True if not third_party: to_remove.add(pkgname) self.t.remove_pkg(_pkg) # start loops to check pkgs i = 0 while depends[i]: # add a empty list for new pkgs to check next loop depends.append([]) # start to check one pkg for pkg in depends[i]: # check if the current pkg is a kernel and if so, check if a module is required to install match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: if not match.group(2): # match pkg is a kernel for module in installed_modules: pkgname = match.group(1) + module if not self.localdb.get_pkg(pkgname): for db in self.syncdbs: _pkg = db.get_pkg(pkgname) if _pkg: if not _pkg.name in already_checked: depends[i+1].append(_pkg) already_checked.add(_pkg.name) if not _pkg.name in to_add | to_remove: to_add.add(_pkg.name) self.t.add_pkg(_pkg) break # check if the current pkg is a kernel module and if so, install it for all installed kernels match = re.match("(linux[0-9]{2,3})(.*-modules)", pkg.name) if match: for kernel in installed_kernels: pkgname = kernel + match.group(2) if not self.localdb.get_pkg(pkgname): for db in self.syncdbs: _pkg = db.get_pkg(pkgname) if _pkg: if not _pkg.name in already_checked: depends[i+1].append(_pkg) already_checked.add(_pkg.name) if not _pkg.name in to_add | to_remove: to_add.add(_pkg.name) self.t.add_pkg(_pkg) break for depend in pkg.depends: found_depend = pyalpm.find_satisfier(self.localdb.pkgcache, depend) if not found_depend: for db in self.syncdbs: found_depend = pyalpm.find_satisfier(db.pkgcache, depend) if found_depend: break if found_depend: # add the dep in list to check its deps in next loop if not found_depend.name in already_checked: depends[i+1].append(found_depend) already_checked.add(found_depend.name) i += 1
def check_extra_modules(self): to_add = set(pkg.name for pkg in self.t.to_add) to_remove = set(pkg.name for pkg in self.t.to_remove) to_check = [pkg for pkg in self.t.to_add] already_checked = set(pkg.name for pkg in to_check) depends = [to_check] # get installed kernels and modules pkgs = self.localdb.search('linux') installed_kernels = set() installed_modules = set() for pkg in pkgs: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: installed_kernels.add(match.group(1)) if match.group(2): installed_modules.add(match.group(2)) for pkg in self.t.to_add: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: installed_kernels.add(match.group(1)) if match.group(2): installed_modules.add(match.group(2)) # check in to_remove if there is a kernel and if so, auto-remove the corresponding modules for pkg in self.t.to_remove: match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: if not match.group(2): installed_kernels.discard(match.group(1)) for module in installed_modules: pkgname = match.group(1) + module if not pkgname in to_remove: _pkg = self.localdb.get_pkg(pkgname) if _pkg: # Check we won't remove a third party kernel third_party = False for provide in _pkg.provides: if 'linux=' in provide: third_party = True if not third_party: to_remove.add(pkgname) self.t.remove_pkg(_pkg) # start loops to check pkgs i = 0 while depends[i]: # add a empty list for new pkgs to check next loop depends.append([]) # start to check one pkg for pkg in depends[i]: # check if the current pkg is a kernel and if so, check if a module is required to install match = re.match("(linux[0-9]{2,3})(.*)", pkg.name) if match: if not match.group(2): # match pkg is a kernel for module in installed_modules: pkgname = match.group(1) + module if not self.localdb.get_pkg(pkgname): for db in self.syncdbs: _pkg = db.get_pkg(pkgname) if _pkg: if not _pkg.name in already_checked: depends[i + 1].append(_pkg) already_checked.add(_pkg.name) if not _pkg.name in to_add | to_remove: to_add.add(_pkg.name) self.t.add_pkg(_pkg) break # check if the current pkg is a kernel module and if so, install it for all installed kernels match = re.match("(linux[0-9]{2,3})(.*-modules)", pkg.name) if match: for kernel in installed_kernels: pkgname = kernel + match.group(2) if not self.localdb.get_pkg(pkgname): for db in self.syncdbs: _pkg = db.get_pkg(pkgname) if _pkg: if not _pkg.name in already_checked: depends[i + 1].append(_pkg) already_checked.add(_pkg.name) if not _pkg.name in to_add | to_remove: to_add.add(_pkg.name) self.t.add_pkg(_pkg) break for depend in pkg.depends: found_depend = pyalpm.find_satisfier( self.localdb.pkgcache, depend) if not found_depend: for db in self.syncdbs: found_depend = pyalpm.find_satisfier( db.pkgcache, depend) if found_depend: break if found_depend: # add the dep in list to check its deps in next loop if not found_depend.name in already_checked: depends[i + 1].append(found_depend) already_checked.add(found_depend.name) i += 1
def check_to_build(): global to_build global to_add global to_mark_as_dep global make_depends global build_depends make_depends = set() builds_depends = set() # check if base_devel packages are installed for name in base_devel: if not pyalpm.find_satisfier(localdb.pkgcache, name): make_depends.add(name) already_checked = set() build_order = [] i = 0 error = '' while i < len(to_build): while Gtk.events_pending(): Gtk.main_iteration() pkg = to_build[i] # if current pkg is not in build_order add it at the end of the list if not pkg.name in build_order: build_order.append(pkg.name) # download end extract tarball from AUR srcdir = aur.get_extract_tarball(pkg) if srcdir: # get PKGBUILD and parse it to create a new pkg object with makedeps and deps new_pkgs = aur.get_pkgs(srcdir + '/PKGBUILD') for new_pkg in new_pkgs: while Gtk.events_pending(): Gtk.main_iteration() print('checking', new_pkg.name) # check if some makedeps must be installed for makedepend in new_pkg.makedepends: while Gtk.events_pending(): Gtk.main_iteration() if not makedepend in already_checked: if not pyalpm.find_satisfier(localdb.pkgcache, makedepend): print('found make dep:', makedepend) for db in syncdbs: provider = pyalpm.find_satisfier( db.pkgcache, makedepend) if provider: break if provider: make_depends.add(provider.name) already_checked.add(makedepend) else: # current makedep need to be built raw_makedepend = common.format_pkg_name( makedepend) if raw_makedepend in build_order: # add it in build_order before pkg build_order.remove(raw_makedepend) index = build_order.index(pkg.name) build_order.insert(index, raw_makedepend) else: # get infos about it makedep_pkg = aur.info(raw_makedepend) if makedep_pkg: # add it in to_build so it will be checked to_build.append(makedep_pkg) # add it in build_order before pkg index = build_order.index(pkg.name) build_order.insert( index, raw_makedepend) # add it in already_checked and to_add_as_as_dep already_checked.add(raw_makedepend) to_mark_as_dep.add(raw_makedepend) else: if error: error += '\n' error += _( '{pkgname} depends on {dependname} but it is not installable' ).format(pkgname=pkg.name, dependname=makedepend) # check if some deps must be installed or built for depend in new_pkg.depends: while Gtk.events_pending(): Gtk.main_iteration() if not depend in already_checked: if not pyalpm.find_satisfier(localdb.pkgcache, depend): print('found dep:', depend) for db in syncdbs: provider = pyalpm.find_satisfier( db.pkgcache, depend) if provider: break if provider: # current dep need to be installed build_depends.add(provider.name) already_checked.add(depend) else: # current dep need to be built raw_depend = common.format_pkg_name(depend) if raw_depend in build_order: # add it in build_order before pkg build_order.remove(raw_depend) index = build_order.index(pkg.name) build_order.insert(index, raw_depend) else: # get infos about it dep_pkg = aur.info(raw_depend) if dep_pkg: # add it in to_build so it will be checked to_build.append(dep_pkg) # add it in build_order before pkg index = build_order.index(pkg.name) build_order.insert(index, raw_depend) # add it in already_checked and to_add_as_as_dep already_checked.add(raw_depend) to_mark_as_dep.add(raw_depend) else: if error: error += '\n' error += _( '{pkgname} depends on {dependname} but it is not installable' ).format(pkgname=pkg.name, dependname=depend) else: if error: error += '\n' error += _('Failed to get {pkgname} archive from AUR').format( pkgname=pkg.name) i += 1 if error: return error # add pkgname in make_depends and build_depends in to_add and to_mark_as_dep for name in make_depends: to_add.add(name) to_mark_as_dep.add(name) for name in build_depends: to_add.add(name) to_mark_as_dep.add(name) # reorder to_build following build_order to_build.sort(key=lambda pkg: build_order.index(pkg.name)) #print('order:', build_order) print('to build:', to_build) print('makedeps:', make_depends) print('builddeps:', build_depends) return error
def deptest(deps): db = handle.get_localdb() missing = [ dep for dep in deps if pyalpm.find_satisfier(db.pkgcache, dep) is None ] return missing
def build_runner(pkgname, performdepcheck=True, pkginstall=True): """A build function, which actually links to others. DO NOT use it unless you re-implement auto_build! """ pkg = None try: pkg = pkgbuilder.utils.info([pkgname])[0] except IndexError: DS.log.info( '{0} not found in the AUR, checking in repositories'.format( pkgname)) syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) if abspkg: # abspkg can be None or a pyalpm.Package object. pkg = pkgbuilder.package.ABSPackage.from_pyalpm(abspkg) subpackages = [pkg.name] # no way to get it if not pkg: raise pkgbuilder.exceptions.PackageNotFoundError(pkgname, 'build') DS.fancy_msg(_('Building {0}...').format(pkg.name)) pkgbuilder.utils.print_package_search(pkg, prefix=DS.colors['blue'] + ' ->' + DS.colors['all_off'] + DS.colors['bold'] + ' ', prefixp=' -> ') sys.stdout.write(DS.colors['all_off']) if pkg.is_abs: DS.fancy_msg(_('Retrieving from ASP...')) rc = asp_export(pkg) if rc > 0: raise pkgbuilder.exceptions.NetworkError( _('Failed to retieve {0} (from ASP).').format(pkg.name), source='asp', pkg=pkg, retcode=rc) existing = find_packagefile(pkg.name) if any(pkg.name in i for i in existing[0]): DS.fancy_msg( _('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] try: os.chdir('./{0}'.format(pkg.name)) except FileNotFoundError: raise pkgbuilder.exceptions.PBException( 'The package download failed.\n This package might ' 'be generated from a split PKGBUILD. Please find out the ' 'name of the “main” package (eg. python- instead of python2-) ' 'and try again.', pkg.name, exit=False) if not os.path.exists('.SRCINFO'): # Create a .SRCINFO file for ASP/repo packages. # Slightly hacky, but saves us work on parsing bash. DS.log.debug("Creating .SRCINFO for repository package") srcinfo = subprocess.check_output(["makepkg", "--printsrcinfo"]) with open(".SRCINFO", "wb") as fh: fh.write(srcinfo) else: existing = find_packagefile(pkg.packagebase) if any(pkg.name in i for i in existing[0]): DS.fancy_msg( _('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] DS.fancy_msg(_('Cloning the git repository...')) clone(pkg.packagebase) os.chdir('./{0}/'.format(pkg.packagebase)) if not os.path.exists('.SRCINFO'): raise pkgbuilder.exceptions.EmptyRepoError(pkg.packagebase) subpackages = find_subpackages(os.path.abspath('./.SRCINFO')) if performdepcheck: DS.fancy_msg(_('Checking dependencies...')) depends = prepare_deps(os.path.abspath('./.SRCINFO')) deps = depcheck(depends, pkg) pkgtypes = [ _('found in system'), _('found in repos'), _('found in the AUR') ] aurbuild = [] if not deps: DS.fancy_msg2(_('none found')) for dpkg, pkgtype in deps.items(): if pkgtype == 2 and dpkg not in subpackages: # If we didn’t check for subpackages, we would get an infinite # loop if subpackages depended on each other aurbuild.append(dpkg) elif dpkg in subpackages: DS.log.debug("Package depends on itself, ignoring") DS.fancy_msg2(': '.join((dpkg, pkgtypes[pkgtype]))) if aurbuild != []: os.chdir('../') return [72337, aurbuild] mpparams = ['makepkg', '-sf'] if DS.clean: mpparams.append('-c') if not DS.pgpcheck: mpparams.append('--skippgpcheck') if not DS.confirm: mpparams.append('--noconfirm') if not DS.depcheck: mpparams.append('--nodeps') if not DS.colors_status: mpparams.append('--nocolor') DS.log.info("Running makepkg: {0}".format(mpparams)) mpstatus = subprocess.call(mpparams, shell=False) DS.log.info("makepkg status: {0}".format(mpstatus)) if pkginstall: toinstall = find_packagefile(os.getcwd()) else: toinstall = ([], []) if pkg.is_abs: os.chdir('../../') else: os.chdir('../') DS.log.info("Found package files: {0}".format(toinstall)) return [mpstatus, toinstall]
def installed(pkg): return pyalpm.find_satisfier(db.pkgcache, pkg)
def depcheck(depends, pkgobj=None): """Perform a dependency check.""" if depends == []: # THANK YOU, MAINTAINER, FOR HAVING NO DEPS AND DESTROYING ME! return {} else: parseddeps = {} localpkgs = DS.pyc.get_localdb().pkgcache syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) for dep in depends: if dep == '': continue if re.search('[<=>]', dep): vpat = ('>=<|><=|=><|=<>|<>=|<=>|>=|=>|><|<>|=<|' '<=|>|=|<') ver_base = re.split(vpat, dep) fdep = dep dep = ver_base[0] try: ver = ver_base[1] diff = re.match( '{0}(.*){1}'.format(re.escape(dep), re.escape(ver)), fdep).groups()[0] except IndexError: # No version requirement, no need to bother. We do the # actual checks later not to waste time. pass else: depmatch = False lsat = pyalpm.find_satisfier(localpkgs, dep) if lsat: depmatch = _test_dependency(lsat.version, diff, ver) parseddeps[dep] = 0 if not depmatch: ssat = pyalpm.find_satisfier(syncpkgs, dep) if ssat: depmatch = _test_dependency( ssat.version, diff, ver) parseddeps[dep] = 1 if not depmatch: asat = pkgbuilder.utils.info([dep]) if asat: depmatch = _test_dependency( asat[0].version, diff, ver) parseddeps[dep] = 2 if not depmatch: raise pkgbuilder.exceptions.PackageError( _('Failed to fulfill package dependency ' 'requirement: {0}').format(fdep), req=fdep, source=pkgobj) if dep not in parseddeps: if pyalpm.find_satisfier(localpkgs, dep): parseddeps[dep] = 0 elif pyalpm.find_satisfier(syncpkgs, dep): parseddeps[dep] = 1 elif pkgbuilder.utils.info([dep]): parseddeps[dep] = 2 else: raise pkgbuilder.exceptions.PackageNotFoundError( dep, 'depcheck') return parseddeps
def fetch_runner(pkgnames, preprocessed=False): """Run the fetch procedure.""" abspkgs = [] aurpkgs = [] allpkgs = [] try: if preprocessed: allpkgs = pkgnames pkgnames = [p.name for p in allpkgs] else: print(':: ' + _('Fetching package information...')) for pkgname in pkgnames: pkg = None try: pkg = pkgbuilder.utils.info([pkgname])[0] except IndexError: try: DS.log.info('{0} not found in the AUR, checking in ' 'repositories'.format(pkgname)) syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) pkg = pkgbuilder.package.ABSPackage.from_pyalpm(abspkg) except AttributeError: pass allpkgs.append(pkg) if not pkg: raise pkgbuilder.exceptions.PackageNotFoundError( pkgname, 'fetch') for pkg in allpkgs: if pkg.is_abs: abspkgs.append(pkg) else: aurpkgs.append(pkg) if abspkgs: print(_(':: Retrieving packages from asp...')) pm = pkgbuilder.ui.Progress(len(abspkgs)) for pkg in abspkgs: pm.msg(_('retrieving {0}').format(pkg.name), True) rc = asp_export(pkg) if rc > 0: raise pkgbuilder.exceptions.NetworkError( _('Failed to retieve {0} (from ASP).').format( pkg.name), source='asp', pkg=pkg, retcode=rc) if aurpkgs: print(_(':: Retrieving packages from aur...')) pm = pkgbuilder.ui.Progress(len(aurpkgs)) for pkg in aurpkgs: pm.msg(_('cloning {0}').format(pkg.packagebase), True) clone(pkg.packagebase) print(_('Successfully fetched: ') + ' '.join(pkgnames)) except pkgbuilder.exceptions.PBException as e: print(':: ERROR: ' + str(e.msg)) exit(1)
def deptest(deps): db = handle.get_localdb() missing = [dep for dep in deps if pyalpm.find_satisfier(db.pkgcache, dep) is None] return missing
def build_runner(pkgname, performdepcheck=True, pkginstall=True): """A build function, which actually links to others. DO NOT use it unless you re-implement auto_build! """ pkg = None try: pkg = pkgbuilder.utils.info([pkgname])[0] except IndexError: DS.log.info( '{0} not found in the AUR, checking in ABS'.format(pkgname)) syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) if abspkg: # abspkg can be None or a pyalpm.Package object. pkg = pkgbuilder.package.ABSPackage.from_pyalpm(abspkg) if not pkg: raise pkgbuilder.exceptions.PackageNotFoundError(pkgname, 'build') DS.fancy_msg(_('Building {0}...').format(pkg.name)) pkgbuilder.utils.print_package_search(pkg, prefix=DS.colors['blue'] + ' ->' + DS.colors['all_off'] + DS.colors['bold'] + ' ', prefixp=' -> ') sys.stdout.write(DS.colors['all_off']) if pkg.is_abs: DS.fancy_msg(_('Retrieving from ABS...')) rc = rsync(pkg) if rc > 0: raise pkgbuilder.exceptions.NetworkError( _('Failed to retieve {0} (from ABS/rsync).').format(pkg.name), pkg=pkg, retcode=rc) existing = find_packagefile(pkg.name) if any(pkg.name in i for i in existing[0]): DS.fancy_msg( _('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] try: os.chdir('./{0}/{1}'.format(pkg.repo, pkg.name)) except FileNotFoundError: raise pkgbuilder.exceptions.PBException( 'The package download failed.\n This package might ' 'be generated from a split PKGBUILD. Please find out the ' 'name of the “main” package (eg. python- instead of python2-) ' 'and try again.', '/'.join((pkg.repo, pkg.name)), exit=False) else: existing = find_packagefile(pkg.packagebase) if any(pkg.name in i for i in existing[0]): DS.fancy_msg( _('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] DS.fancy_msg(_('Cloning the git repository...')) if os.path.exists('./{0}/'.format(pkg.packagebase)): if DS.clean or DS.pacman: DS.fancy_warning2( _('removing existing directory {0}').format( pkg.packagebase)) shutil.rmtree('./{0}/'.format(pkg.packagebase)) else: DS.fancy_error( _('Directory {0} already exists, please run with `-c` to ' 'remove it.').format(pkg.packagebase)) DS.fancy_warning2( _('skipping package {0}').format(pkg.packagebase)) return [72335, [[], []]] clone(pkg.packagebase) os.chdir('./{0}/'.format(pkg.packagebase)) if not os.path.exists('.SRCINFO'): raise pkgbuilder.exceptions.EmptyRepoError(pkg.packagebase) if performdepcheck: DS.fancy_msg(_('Checking dependencies...')) depends = prepare_deps(os.path.abspath('./.SRCINFO')) deps = depcheck(depends, pkg) pkgtypes = [ _('found in system'), _('found in repos'), _('found in the AUR') ] aurbuild = [] if not deps: DS.fancy_msg2(_('none found')) for dpkg, pkgtype in deps.items(): if pkgtype == 2: aurbuild.append(dpkg) DS.fancy_msg2(': '.join((dpkg, pkgtypes[pkgtype]))) if aurbuild != []: os.chdir('../') return [72337, aurbuild] mpparams = ['makepkg', '-sf'] if DS.clean: mpparams.append('-c') if not DS.pgpcheck: mpparams.append('--skippgpcheck') if not DS.confirm: mpparams.append('--noconfirm') if not DS.depcheck: mpparams.append('--nodeps') if not DS.colors_status: mpparams.append('--nocolor') DS.log.info("Running makepkg: {0}".format(mpparams)) mpstatus = subprocess.call(mpparams, shell=False) DS.log.info("makepkg status: {0}".format(mpstatus)) if pkginstall: toinstall = find_packagefile(os.getcwd()) else: toinstall = ([], []) if pkg.is_abs: os.chdir('../../') else: os.chdir('../') DS.log.info("Found package files: {0}".format(toinstall)) return [mpstatus, toinstall]
def check_to_build(): global to_build global to_add global to_mark_as_dep global make_depends global build_depends make_depends = set() builds_depends = set() # check if base_devel packages are installed for name in base_devel: if not pyalpm.find_satisfier(localdb.pkgcache, name): make_depends.add(name) already_checked = set() build_order = [] i = 0 error = "" while i < len(to_build): while Gtk.events_pending(): Gtk.main_iteration() pkg = to_build[i] # if current pkg is not in build_order add it at the end of the list if not pkg.name in build_order: build_order.append(pkg.name) # download end extract tarball from AUR srcdir = aur.get_extract_tarball(pkg) if srcdir: # get PKGBUILD and parse it to create a new pkg object with makedeps and deps new_pkgs = aur.get_pkgs(srcdir + "/PKGBUILD") for new_pkg in new_pkgs: while Gtk.events_pending(): Gtk.main_iteration() print("checking", new_pkg.name) # check if some makedeps must be installed for makedepend in new_pkg.makedepends: while Gtk.events_pending(): Gtk.main_iteration() if not makedepend in already_checked: if not pyalpm.find_satisfier(localdb.pkgcache, makedepend): print("found make dep:", makedepend) for db in syncdbs: provider = pyalpm.find_satisfier(db.pkgcache, makedepend) if provider: break if provider: make_depends.add(provider.name) already_checked.add(makedepend) else: # current makedep need to be built raw_makedepend = common.format_pkg_name(makedepend) if raw_makedepend in build_order: # add it in build_order before pkg build_order.remove(raw_makedepend) index = build_order.index(pkg.name) build_order.insert(index, raw_makedepend) else: # get infos about it makedep_pkg = aur.info(raw_makedepend) if makedep_pkg: # add it in to_build so it will be checked to_build.append(makedep_pkg) # add it in build_order before pkg index = build_order.index(pkg.name) build_order.insert(index, raw_makedepend) # add it in already_checked and to_add_as_as_dep already_checked.add(raw_makedepend) to_mark_as_dep.add(raw_makedepend) else: if error: error += "\n" error += _( "{pkgname} depends on {dependname} but it is not installable" ).format(pkgname=pkg.name, dependname=makedepend) # check if some deps must be installed or built for depend in new_pkg.depends: while Gtk.events_pending(): Gtk.main_iteration() if not depend in already_checked: if not pyalpm.find_satisfier(localdb.pkgcache, depend): print("found dep:", depend) for db in syncdbs: provider = pyalpm.find_satisfier(db.pkgcache, depend) if provider: break if provider: # current dep need to be installed build_depends.add(provider.name) already_checked.add(depend) else: # current dep need to be built raw_depend = common.format_pkg_name(depend) if raw_depend in build_order: # add it in build_order before pkg build_order.remove(raw_depend) index = build_order.index(pkg.name) build_order.insert(index, raw_depend) else: # get infos about it dep_pkg = aur.info(raw_depend) if dep_pkg: # add it in to_build so it will be checked to_build.append(dep_pkg) # add it in build_order before pkg index = build_order.index(pkg.name) build_order.insert(index, raw_depend) # add it in already_checked and to_add_as_as_dep already_checked.add(raw_depend) to_mark_as_dep.add(raw_depend) else: if error: error += "\n" error += _( "{pkgname} depends on {dependname} but it is not installable" ).format(pkgname=pkg.name, dependname=depend) else: if error: error += "\n" error += _("Failed to get {pkgname} archive from AUR").format(pkgname=pkg.name) i += 1 if error: return error # add pkgname in make_depends and build_depends in to_add and to_mark_as_dep for name in make_depends: to_add.add(name) to_mark_as_dep.add(name) for name in build_depends: to_add.add(name) to_mark_as_dep.add(name) # reorder to_build following build_order to_build.sort(key=lambda pkg: build_order.index(pkg.name)) # print('order:', build_order) print("to build:", to_build) print("makedeps:", make_depends) print("builddeps:", build_depends) return error
def find_satisfier_in_syncdbs(pkgname): for db in sdbs: s = pyalpm.find_satisfier(db.pkgcache, pkgname) if s: return s return None
def build_runner(pkgname, performdepcheck=True, pkginstall=True): """A build function, which actually links to others. DO NOT use it unless you re-implement auto_build! """ pkg = None try: pkg = pkgbuilder.utils.info([pkgname])[0] except IndexError: DS.log.info('{0} not found in the AUR, checking in ABS'.format( pkgname)) syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) if abspkg: # abspkg can be None or a pyalpm.Package object. pkg = pkgbuilder.package.ABSPackage.from_pyalpm(abspkg) subpackages = [pkg.name] # no way to get it if not pkg: raise pkgbuilder.exceptions.PackageNotFoundError(pkgname, 'build') DS.fancy_msg(_('Building {0}...').format(pkg.name)) pkgbuilder.utils.print_package_search(pkg, prefix=DS.colors['blue'] + ' ->' + DS.colors['all_off'] + DS.colors['bold'] + ' ', prefixp=' -> ') sys.stdout.write(DS.colors['all_off']) if pkg.is_abs: DS.fancy_msg(_('Retrieving from ABS...')) rc = rsync(pkg) if rc > 0: raise pkgbuilder.exceptions.NetworkError( _('Failed to retieve {0} (from ABS/rsync).').format( pkg.name), pkg=pkg, retcode=rc) existing = find_packagefile(pkg.name) if any(pkg.name in i for i in existing[0]): DS.fancy_msg(_('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] try: os.chdir('./{0}/{1}'.format(pkg.repo, pkg.name)) except FileNotFoundError: raise pkgbuilder.exceptions.PBException( 'The package download failed.\n This package might ' 'be generated from a split PKGBUILD. Please find out the ' 'name of the “main” package (eg. python- instead of python2-) ' 'and try again.', '/'.join((pkg.repo, pkg.name)), exit=False) if not os.path.exists('.SRCINFO'): # Create a .SRCINFO file for ABS packages. # Slightly hacky, but saves us work on parsing bash. DS.log.debug("Creating .SRCINFO for ABS package") srcinfo = subprocess.check_output(["makepkg", "--printsrcinfo"]) with open(".SRCINFO", "wb") as fh: fh.write(srcinfo) else: existing = find_packagefile(pkg.packagebase) if any(pkg.name in i for i in existing[0]): DS.fancy_msg(_('Found an existing package for ' '{0}').format(pkgname)) if not pkginstall: existing = ([], []) return [72336, existing] DS.fancy_msg(_('Cloning the git repository...')) clone(pkg.packagebase) os.chdir('./{0}/'.format(pkg.packagebase)) if not os.path.exists('.SRCINFO'): raise pkgbuilder.exceptions.EmptyRepoError(pkg.packagebase) subpackages = find_subpackages(os.path.abspath('./.SRCINFO')) if performdepcheck: DS.fancy_msg(_('Checking dependencies...')) depends = prepare_deps(os.path.abspath('./.SRCINFO')) deps = depcheck(depends, pkg) pkgtypes = [_('found in system'), _('found in repos'), _('found in the AUR')] aurbuild = [] if not deps: DS.fancy_msg2(_('none found')) for dpkg, pkgtype in deps.items(): if pkgtype == 2 and dpkg not in subpackages: # If we didn’t check for subpackages, we would get an infinite # loop if subpackages depended on each other aurbuild.append(dpkg) elif dpkg in subpackages: DS.log.debug("Package depends on itself, ignoring") DS.fancy_msg2(': '.join((dpkg, pkgtypes[pkgtype]))) if aurbuild != []: os.chdir('../') return [72337, aurbuild] mpparams = ['makepkg', '-sf'] if DS.clean: mpparams.append('-c') if not DS.pgpcheck: mpparams.append('--skippgpcheck') if not DS.confirm: mpparams.append('--noconfirm') if not DS.depcheck: mpparams.append('--nodeps') if not DS.colors_status: mpparams.append('--nocolor') DS.log.info("Running makepkg: {0}".format(mpparams)) mpstatus = subprocess.call(mpparams, shell=False) DS.log.info("makepkg status: {0}".format(mpstatus)) if pkginstall: toinstall = find_packagefile(os.getcwd()) else: toinstall = ([], []) if pkg.is_abs: os.chdir('../../') else: os.chdir('../') DS.log.info("Found package files: {0}".format(toinstall)) return [mpstatus, toinstall]
def depcheck(depends, pkgobj=None): """Perform a dependency check.""" if depends == []: # THANK YOU, MAINTAINER, FOR HAVING NO DEPS AND DESTROYING ME! return {} else: parseddeps = {} localpkgs = DS.pyc.get_localdb().pkgcache syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) for dep in depends: if dep == '': continue if re.search('[<=>]', dep): vpat = ('>=<|><=|=><|=<>|<>=|<=>|>=|=>|><|<>|=<|' '<=|>|=|<') ver_base = re.split(vpat, dep) fdep = dep dep = ver_base[0] try: ver = ver_base[1] diff = re.match('{0}(.*){1}'.format(dep, ver), fdep).groups()[0] except IndexError: # No version requirement, no need to bother. We do the # actual checks later not to waste time. pass else: depmatch = False lsat = pyalpm.find_satisfier(localpkgs, dep) if lsat: depmatch = _test_dependency(lsat.version, diff, ver) parseddeps[dep] = 0 if not depmatch: ssat = pyalpm.find_satisfier(syncpkgs, dep) if ssat: depmatch = _test_dependency(ssat.version, diff, ver) parseddeps[dep] = 1 if not depmatch: asat = pkgbuilder.utils.info([dep]) if asat: depmatch = _test_dependency(asat[0].version, diff, ver) parseddeps[dep] = 2 if not depmatch: raise pkgbuilder.exceptions.PackageError( _('Failed to fulfill package dependency ' 'requirement: {0}').format(fdep), req=fdep, source=pkgobj) if dep not in parseddeps: if pyalpm.find_satisfier(localpkgs, dep): parseddeps[dep] = 0 elif pyalpm.find_satisfier(syncpkgs, dep): parseddeps[dep] = 1 elif pkgbuilder.utils.info([dep]): parseddeps[dep] = 2 else: raise pkgbuilder.exceptions.PackageNotFoundError( dep, 'depcheck') return parseddeps
def on_list_treeview_button_press_event(treeview, event): global right_click_menu liststore = packages_list_treeview.get_model() # Check if right mouse button was clicked if event.type == Gdk.EventType.BUTTON_PRESS and event.button == 3: while Gtk.events_pending(): Gtk.main_iteration() treepath, viewcolumn, x, y = treeview.get_path_at_pos(int(event.x), int(event.y)) treeiter = liststore.get_iter(treepath) if treeiter: if liststore[treeiter][0] != _('No package found') and not liststore[treeiter][0].name in config.holdpkg: right_click_menu = Gtk.Menu() if liststore[treeiter][0].name in transaction.to_add | transaction.to_remove or liststore[treeiter][0] in transaction.to_build: item = Gtk.ImageMenuItem(_('Deselect')) item.set_image(Gtk.Image.new_from_stock('gtk-undo', Gtk.IconSize.MENU)) item.connect('activate', mark_to_deselect, liststore[treeiter][0]) right_click_menu.append(item) elif liststore[treeiter][0].db.name == 'local': item = Gtk.ImageMenuItem(_('Remove')) item.set_image(Gtk.Image.new_from_pixbuf(to_remove_icon)) item.connect('activate', mark_to_remove, liststore[treeiter][0]) right_click_menu.append(item) if transaction.get_syncpkg(liststore[treeiter][0].name): if not pyalpm.sync_newversion(liststore[treeiter][0], transaction.syncdbs): item = Gtk.ImageMenuItem(_('Reinstall')) item.set_image(Gtk.Image.new_from_pixbuf(to_reinstall_icon)) item.connect('activate', mark_to_reinstall, liststore[treeiter][0]) right_click_menu.append(item) optdeps_strings = liststore[treeiter][0].optdepends if optdeps_strings: available_optdeps = [] for optdep_string in optdeps_strings: if not pyalpm.find_satisfier(transaction.localdb.pkgcache, optdep_string.split(':')[0]): available_optdeps.append(optdep_string) if available_optdeps: item = Gtk.ImageMenuItem(_('Install optional deps')) item.set_image(Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', select_optdeps, liststore[treeiter][0], available_optdeps) right_click_menu.append(item) if liststore[treeiter][0].reason == pyalpm.PKG_REASON_DEPEND: item = Gtk.MenuItem(_('Mark as explicitly installed')) item.connect('activate', mark_explicitly_installed, liststore[treeiter][0]) right_click_menu.append(item) else: item = Gtk.ImageMenuItem(_('Install')) item.set_image(Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', mark_to_install, liststore[treeiter][0]) right_click_menu.append(item) optdeps_strings = liststore[treeiter][0].optdepends if optdeps_strings: available_optdeps = [] for optdep_string in optdeps_strings: if not pyalpm.find_satisfier(transaction.localdb.pkgcache, optdep_string.split(':')[0]): available_optdeps.append(optdep_string) if available_optdeps: item = Gtk.ImageMenuItem(_('Install with optional deps')) item.set_image(Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', install_with_optdeps, liststore[treeiter][0], available_optdeps) right_click_menu.append(item) treeview.grab_focus() treeview.set_cursor(treepath, viewcolumn, 0) right_click_menu.show_all() right_click_menu.popup(None, None, None, None, event.button, event.time) return True
def on_list_treeview_button_press_event(treeview, event): global right_click_menu liststore = packages_list_treeview.get_model() # Check if right mouse button was clicked if event.type == Gdk.EventType.BUTTON_PRESS and event.button == 3: while Gtk.events_pending(): Gtk.main_iteration() treepath, viewcolumn, x, y = treeview.get_path_at_pos( int(event.x), int(event.y)) treeiter = liststore.get_iter(treepath) if treeiter: if liststore[treeiter][0] != _( 'No package found' ) and not liststore[treeiter][0].name in config.holdpkg: right_click_menu = Gtk.Menu() if liststore[treeiter][ 0].name in transaction.to_add | transaction.to_remove or liststore[ treeiter][0] in transaction.to_build: item = Gtk.ImageMenuItem(_('Deselect')) item.set_image( Gtk.Image.new_from_stock('gtk-undo', Gtk.IconSize.MENU)) item.connect('activate', mark_to_deselect, liststore[treeiter][0]) right_click_menu.append(item) elif liststore[treeiter][0].db.name == 'local': item = Gtk.ImageMenuItem(_('Remove')) item.set_image(Gtk.Image.new_from_pixbuf(to_remove_icon)) item.connect('activate', mark_to_remove, liststore[treeiter][0]) right_click_menu.append(item) if transaction.get_syncpkg(liststore[treeiter][0].name): if not pyalpm.sync_newversion(liststore[treeiter][0], transaction.syncdbs): item = Gtk.ImageMenuItem(_('Reinstall')) item.set_image( Gtk.Image.new_from_pixbuf(to_reinstall_icon)) item.connect('activate', mark_to_reinstall, liststore[treeiter][0]) right_click_menu.append(item) optdeps_strings = liststore[treeiter][0].optdepends if optdeps_strings: available_optdeps = [] for optdep_string in optdeps_strings: if not pyalpm.find_satisfier( transaction.localdb.pkgcache, optdep_string.split(':')[0]): available_optdeps.append(optdep_string) if available_optdeps: item = Gtk.ImageMenuItem( _('Install optional deps')) item.set_image( Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', select_optdeps, liststore[treeiter][0], available_optdeps) right_click_menu.append(item) if liststore[treeiter][ 0].reason == pyalpm.PKG_REASON_DEPEND: item = Gtk.MenuItem(_('Mark as explicitly installed')) item.connect('activate', mark_explicitly_installed, liststore[treeiter][0]) right_click_menu.append(item) else: item = Gtk.ImageMenuItem(_('Install')) item.set_image(Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', mark_to_install, liststore[treeiter][0]) right_click_menu.append(item) optdeps_strings = liststore[treeiter][0].optdepends if optdeps_strings: available_optdeps = [] for optdep_string in optdeps_strings: if not pyalpm.find_satisfier( transaction.localdb.pkgcache, optdep_string.split(':')[0]): available_optdeps.append(optdep_string) if available_optdeps: item = Gtk.ImageMenuItem( _('Install with optional deps')) item.set_image( Gtk.Image.new_from_pixbuf(to_install_icon)) item.connect('activate', install_with_optdeps, liststore[treeiter][0], available_optdeps) right_click_menu.append(item) treeview.grab_focus() treeview.set_cursor(treepath, viewcolumn, 0) right_click_menu.show_all() right_click_menu.popup(None, None, None, None, event.button, event.time) return True
def build_download_queue(alpm, args=None): """ Function to build a download queue. Needs a pkgname in args """ pargs = parse_args(args) handle = alpm.get_handle() conf = alpm.get_config() requested = set(pargs.pkgs) other = PkgSet() missing_deps = list() found = set() antdb = [db for db in handle.get_syncdbs() if 'antergos' == db.name] antdb = antdb[0] one_repo_groups_names = ['cinnamon', 'mate', 'mate-extra'] one_repo_groups = [] for one_repo_group_name in one_repo_groups_names: grp = antdb.read_grp(one_repo_group_name) if not grp: grp = ['None', []] logging.warning( "Error reading group '%s' from the antergos repo db", one_repo_group_name) one_repo_groups.append(grp) one_repo_pkgs = { pkg for one_repo_group in one_repo_groups for pkg in one_repo_group[1] if one_repo_group } for pkg in requested: other_grp = PkgSet() for db in handle.get_syncdbs(): if pkg in one_repo_pkgs and 'antergos' != db.name: # pkg should be sourced from the antergos repo only. db = antdb syncpkg = db.get_pkg(pkg) if syncpkg: other.add(syncpkg) break else: syncgrp = db.read_grp(pkg) if syncgrp: found.add(pkg) other_grp |= PkgSet(syncgrp[1]) break else: other |= other_grp # foreign_names = requested - set(x.name for x in other) # Resolve dependencies. if other and not pargs.nodeps: queue = deque(other) local_cache = handle.get_localdb().pkgcache syncdbs = handle.get_syncdbs() seen = set(queue) while queue: pkg = queue.popleft() for dep in pkg.depends: if pyalpm.find_satisfier(local_cache, dep) is None or pargs.alldeps: for db in syncdbs: prov = pyalpm.find_satisfier(db.pkgcache, dep) if prov is not None: other.add(prov) if prov.name not in seen: seen.add(prov.name) queue.append(prov) break else: missing_deps.append(dep) found |= set(other.pkgs) not_found = requested - found if pargs.needed: other = PkgSet(list(check_cache(conf, other))) download_queue = DownloadQueue() if pargs.db: for db in handle.get_syncdbs(): try: siglevel = conf[db.name]['SigLevel'].split()[0] except KeyError: siglevel = None download_sig = needs_sig(siglevel, pargs.sigs, 'Database') download_queue.add_db(db, download_sig) for pkg in other: try: siglevel = conf[pkg.db.name]['SigLevel'].split()[0] except KeyError: siglevel = None download_sig = needs_sig(siglevel, pargs.sigs, 'Package') urls = set(os.path.join(url, pkg.filename) for url in pkg.db.servers) # Limit to MAX_URLS url while len(urls) > MAX_URLS: urls.pop() download_queue.add_sync_pkg(pkg, urls, download_sig) return download_queue, not_found, missing_deps
def build_runner(self, pkgname, performdepcheck=True, pkginstall=True): """ A build function, which actually links to others. Do not use it unless you re-implement auto_build. """ try: pkg = None try: pkg = self.utils.info([pkgname]) pkg = pkg[0] useabs = False except IndexError: try: DS.log.info('{} not found in the AUR, checking in ' 'ABS'.format(pkgname)) pyc = pycman.config.init_with_config('/etc/pacman.conf') syncpkgs = [] for j in [i.pkgcache for i in pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) pkg = {'CategoryID': 0, 'Category': abspkg.db.name, 'Name': abspkg.name, 'Version': abspkg.version, 'Description': abspkg.desc, 'OutOfDate': 0, 'NumVotes': 'n/a', 'Arch': abspkg.arch} useabs = True except AttributeError: pass if not pkg: raise PBError(_('Package {} not found.').format(pkgname)) pkgname = pkg['Name'] DS.fancy_msg(_('Building {}...').format(pkgname)) self.utils.print_package_search(pkg, prefix=DS.colors['blue'] + ' ->' + DS.colors['all_off'] + DS.colors['bold'] + ' ', prefixp=' -> ') sys.stdout.write(DS.colors['all_off']) if useabs: DS.fancy_msg(_('Synchronizing the ABS tree...')) rsync = ['rsync', '-mrtv', '--no-motd', '--delete-after', '--no-p', '--no-o', '--no-g', # '--delete-excluded', '--include=/{}'.format(pkg['Category']), '--include=/{}/{}'.format(pkg['Category'], pkg['Name']), '--exclude=/{}/*'.format(pkg['Category']), '--exclude=/*', 'rsync.archlinux.org::abs/{}/'.format(pkg['Arch']), '.'] rstatus = subprocess.call(rsync) if rstatus > 0: raise PBError(_('Failed to synchronize the ABS tree.')) os.chdir('./{}/'.format(pkg['Category'])) else: filename = pkgname + '.tar.gz' DS.fancy_msg(_('Downloading the tarball...')) downloadbytes = self.download(pkg['URLPath'], filename) kbytes = int(downloadbytes) / 1000 DS.fancy_msg2(_('{} kB downloaded').format(kbytes)) DS.fancy_msg(_('Extracting...')) DS.fancy_msg2(_('{} files extracted').format(self.extract( filename))) os.chdir('./{}/'.format(pkgname)) if performdepcheck: DS.fancy_msg(_('Checking dependencies...')) depends = self.prepare_deps(os.path.abspath('./PKGBUILD')) deps = self.depcheck(depends) pkgtypes = [_('found in system'), _('found in repos'), _('found in the AUR')] aurbuild = [] if not deps: DS.fancy_msg2(_('none found')) for dpkg, pkgtype in deps.items(): # I checked for -1 here. Dropped this one as it was # handled by the depcheck function already. if pkgtype == 2: aurbuild.append(dpkg) DS.fancy_msg2('{}: {}'.format(dpkg, pkgtypes[pkgtype])) if aurbuild != []: return [72337, aurbuild] mpparams = '' if DS.cleanup: mpparams += ' -c' if DS.uid == 0: mpparams += ' --asroot' mpstatus = subprocess.call('makepkg -sf' + mpparams, shell=True) if pkginstall: # .pkg.tar.xz FTW, but some people change that. pkgfilestr = os.path.abspath('./{}-{}-{}.pkg.*') # I hope nobody builds VCS packages at 23:5* local. And if # they do, they will be caught by the 2nd fallback (crappy # packages) datep = datetime.date.today().strftime('%Y%m%d') att0 = set(glob.glob(pkgfilestr.format(pkgname, pkg['Version'], '*'))) att1 = set(glob.glob(pkgfilestr.format(pkgname, datep, '*'))) att2 = set(glob.glob(pkgfilestr.format(pkgname, '*', '*'))) sigf = set(glob.glob(pkgfilestr.format(pkgname, '*', '*' + 'sig'))) if not sigf: sigf = set() att0 = list(att0 - sigf) att1 = list(att1 - sigf) att2 = list(att2 - sigf) if att0: # Standard run, for humans. toinstall = [att0, list(sigf)] elif att1: # Fallback #1, for VCS packages toinstall = [att1, list(sigf)] elif att2: # Fallback #2, for crappy packages toinstall = [att2, list(sigf)] else: toinstall = [None, None] else: toinstall = [None, None] return [mpstatus, toinstall, useabs] except PBError as inst: DS.fancy_error(str(inst)) return [72789, None] except IOError as inst: DS.fancy_error(str(inst)) return [72101, None]
def is_installed(pkgname): return pyalpm.find_satisfier(ldb.pkgcache, pkgname)
def fetch_runner(pkgnames, preprocessed=False): """Run the fetch procedure.""" abspkgs = [] aurpkgs = [] allpkgs = [] try: if preprocessed: allpkgs = pkgnames pkgnames = [p.name for p in allpkgs] else: print(':: ' + _('Fetching package information...')) for pkgname in pkgnames: pkg = None try: pkg = pkgbuilder.utils.info([pkgname])[0] except IndexError: try: DS.log.info('{0} not found in the AUR, checking in ' 'ABS'.format(pkgname)) syncpkgs = [] for j in [i.pkgcache for i in DS.pyc.get_syncdbs()]: syncpkgs.append(j) syncpkgs = functools.reduce(lambda x, y: x + y, syncpkgs) abspkg = pyalpm.find_satisfier(syncpkgs, pkgname) pkg = pkgbuilder.package.ABSPackage.from_pyalpm(abspkg) except AttributeError: pass allpkgs.append(pkg) for pkg in allpkgs: if not pkg: raise pkgbuilder.exceptions.PackageNotFoundError( pkg.name, 'fetch') if pkg.is_abs: abspkgs.append(pkg) else: aurpkgs.append(pkg) if abspkgs: print(_(':: Retrieving packages from abs...')) pm = pkgbuilder.ui.Progress(len(abspkgs)) for pkg in abspkgs: pm.msg(_('retrieving {0}').format(pkg.name), True) rc = rsync(pkg, True) if rc > 0: raise pkgbuilder.exceptions.NetworkError( _('Failed to retieve {0} (from ABS/rsync).').format( pkg.name), source='rsync', pkg=pkg, retcode=rc) if aurpkgs: print(_(':: Retrieving packages from aur...')) pm = pkgbuilder.ui.Progress(len(aurpkgs)) for pkg in aurpkgs: pm.msg(_('cloning {0}').format(pkg.packagebase), True) clone(pkg.packagebase) print(_('Successfully fetched: ') + ' '.join(pkgnames)) except pkgbuilder.exceptions.PBException as e: print(':: ERROR: ' + str(e.msg)) exit(1)
def installed_version(pkgname): s = pyalpm.find_satisfier(ldb.pkgcache, pkgname) return s.version
def find_local_satisfier(pkgname): return pyalpm.find_satisfier(ldb.pkgcache, pkgname)
def test_find_satisfier(package): assert pyalpm.find_satisfier([package], PKG).name == package.name assert pyalpm.find_satisfier([package], 'bar') is None
def get_pkg_name_of_depend_name(self, depname): return pyalpm.find_satisfier(self.all_pkg, depname).name
def test_find_satisfier_error(): with pytest.raises(TypeError) as excinfo: pyalpm.find_satisfier() assert 'takes a Package list and a string' in str(excinfo.value)
def build_download_queue(alpm, args=None): """ Function to build a download queue. Needs a pkgname in args """ pargs = parse_args(args) ''' try: conf_file = pargs.conf alpm = pac.Pac(conf_path=conf_file, callback_queue=None) except Exception as ex: logging.error("Can't initialize pyalpm: %s", ex) return None, None, None ''' handle = alpm.get_handle() conf = alpm.get_config() requested = set(pargs.pkgs) other = PkgSet() missing_deps = list() found = set() # foreign_names = set() # not_found = set() for pkg in requested: other_grp = PkgSet() for db in handle.get_syncdbs(): syncpkg = db.get_pkg(pkg) if syncpkg: other.add(syncpkg) else: syncgrp = db.read_grp(pkg) if syncgrp: found.add(pkg) other_grp |= PkgSet(syncgrp[1]) else: other |= other_grp # foreign_names = requested - set(x.name for x in other) # Resolve dependencies. if other and not pargs.nodeps: queue = deque(other) local_cache = handle.get_localdb().pkgcache syncdbs = handle.get_syncdbs() seen = set(queue) while queue: pkg = queue.popleft() for dep in pkg.depends: if pyalpm.find_satisfier(local_cache, dep) is None or pargs.alldeps: for db in syncdbs: prov = pyalpm.find_satisfier(db.pkgcache, dep) if prov is not None: other.add(prov) if prov.name not in seen: seen.add(prov.name) queue.append(prov) break else: missing_deps.append(dep) found |= set(other.pkgs) not_found = requested - found if pargs.needed: other = PkgSet(list(check_cache(conf, other))) download_queue = DownloadQueue() if pargs.db: for db in handle.get_syncdbs(): try: siglevel = conf[db.name]['SigLevel'].split()[0] except KeyError: siglevel = None download_sig = needs_sig(siglevel, pargs.sigs, 'Database') download_queue.add_db(db, download_sig) for pkg in other: try: siglevel = conf[pkg.db.name]['SigLevel'].split()[0] except KeyError: siglevel = None download_sig = needs_sig(siglevel, pargs.sigs, 'Package') urls = set(os.path.join(url, pkg.filename) for url in pkg.db.servers) # Limit to MAX_URLS url while len(urls) > MAX_URLS: urls.pop() download_queue.add_sync_pkg(pkg, urls, download_sig) return download_queue, not_found, missing_deps
def check_conflicts(pkg_list): depends = [pkg_list] warning = "" # transaction.get_handle() pkgs = transaction.handle.get_localdb().search("linux3") installed_linux = [] for i in pkgs: if len(i.name) == 7: installed_linux.append(i.name) for to_install in transaction.to_add: if "linux3" in to_install: if len(to_install) == 7: installed_linux.append(to_install) i = 0 while depends[i]: depends.append([]) for pkg in depends[i]: for depend in pkg.depends: provide = pyalpm.find_satisfier(transaction.localpkgs.values(), depend) if provide: print(i, "local", provide) if provide.name != common.format_pkg_name(depend): if ("linux" in depend) or ("-module" in depend): for pkg in transaction.syncpkgs.values(): if not pkg.name in transaction.localpkgs.keys(): for name in pkg.provides: for linux in installed_linux: if linux in pkg.name: if common.format_pkg_name(depend) == common.format_pkg_name(name): depends[i + 1].append(pkg) transaction.to_add.append(pkg.name) else: provide = pyalpm.find_satisfier(transaction.syncpkgs.values(), depend) if provide: print(i, "sync", provide) if provide.name != common.format_pkg_name(depend): if ("linux" in depend) or ("-module" in depend): for pkg in transaction.syncpkgs.values(): if not pkg.name in transaction.localpkgs.keys(): for name in pkg.provides: for linux in installed_linux: if linux in pkg.name: if common.format_pkg_name(depend) == common.format_pkg_name(name): depends[i + 1].append(pkg) transaction.to_add.append(pkg.name) else: to_add_to_depends = choose_provides(depend) print(to_add_to_depends) for pkg in to_add_to_depends: depends[i + 1].append(pkg) transaction.to_add.append(pkg.name) else: depends[i + 1].append(provide) for replace in pkg.replaces: provide = pyalpm.find_satisfier(transaction.localpkgs.values(), replace) if provide: if provide.name != pkg.name: if not provide.name in transaction.to_remove: transaction.to_remove.append(provide.name) if warning: warning = warning + "\n" warning = warning + provide.name + " will be replaced by " + pkg.name for conflict in pkg.conflicts: provide = pyalpm.find_satisfier(transaction.localpkgs.values(), conflict) if provide: if provide.name != pkg.name: if not provide.name in transaction.to_remove: transaction.to_remove.append(provide.name) if warning: warning = warning + "\n" warning = warning + pkg.name + " conflicts with " + provide.name provide = pyalpm.find_satisfier(depends[0], conflict) if provide: if not common.format_pkg_name(conflict) in transaction.to_remove: if pkg.name in transaction.to_add and common.format_pkg_name(conflict) in transaction.to_add: transaction.to_add.remove(common.format_pkg_name(conflict)) transaction.to_add.remove(pkg.name) if warning: warning = warning + "\n" warning = ( warning + pkg.name + " conflicts with " + common.format_pkg_name(conflict) + "\nNone of them will be installed" ) i += 1 for pkg in transaction.localpkgs.values(): for conflict in pkg.conflicts: provide = pyalpm.find_satisfier(depends[0], conflict) if provide: if provide.name != pkg.name: if not provide.name in transaction.to_remove: transaction.to_remove.append(pkg.name) if warning: warning = warning + "\n" warning = warning + provide.name + " conflicts with " + pkg.name for pkg in transaction.syncpkgs.values(): for replace in pkg.replaces: provide = pyalpm.find_satisfier(transaction.localpkgs.values(), replace) if provide: if provide.name != pkg.name: if not pkg.name in transaction.localpkgs.keys(): if common.format_pkg_name(replace) in transaction.localpkgs.keys(): if not provide.name in transaction.to_remove: transaction.to_remove.append(provide.name) if warning: warning = warning + "\n" warning = warning + provide.name + " will be replaced by " + pkg.name if not pkg.name in transaction.to_add: transaction.to_add.append(pkg.name) print(transaction.to_add, transaction.to_remove) if warning: transaction.WarningDialog.format_secondary_text(warning) response = transaction.WarningDialog.run() if response: transaction.WarningDialog.hide()