def main(): """Example for PackageFile()""" apt_pkg.init() cache = apt_pkg.Cache() cnt=0 for pkgfile in cache.file_list: print 'Package-File:', pkgfile.filename # print 'Index-Type:', pkgfile.index_type # 'Debian Package Index' # if pkgfile.not_source: # print 'Source: None' # else: # if pkgfile.site: # # There is a source, and a site, print the site # print 'Source:', pkgfile.site # else: # # It seems to be a local repository # print 'Source: Local package file' # if pkgfile.not_automatic: # # The system won't be updated automatically (eg. experimental) # print 'Automatic: No' # else: # print 'Automatic: Yes' # print 'id:', pkgfile.id # print 'archive:', pkgfile.archive # print 'architecture:', pkgfile.architecture # print 'label:', pkgfile.label # print 'origin:', pkgfile.origin # print 'size', pkgfile.size # print 'version:', pkgfile.version # print cnt=cnt+1 print 'cnt:', cnt
def list_available_updates(self): self.ensure_mirrors(self.config['upstream_repos']) # needed in order to use apt_pkg.version_compare apt_pkg.init() # Store newest version available for each package with updates package_updates = {} for package_name, package_version in self.packages_from_config(): updates = [v for v in self.available_versions_for_package( package_name) if apt_pkg.version_compare( package_version, v) < 0] updates = sorted(set(updates), cmp=apt_pkg.version_compare) if updates: package_updates[package_name] = updates[-1] LOGGER.info("Package %(name)s Config Version: %(current)s" " Updates: %(updates)s" % {'name': package_name, 'current': package_version, 'updates': updates}) # write new config file with updated versions, if requested if package_updates and 'new_config_path' in self.args: for package, version in package_updates.iteritems(): self.config['packages'][package] = version self.config.write(self.args.new_config_path) LOGGER.info("updated config written to %s" % self.args.new_config_path)
def __init__(self, args): self.config_path = args.config_path self.args = args self.aptly = Aptly() self.config = Config(args.config_path) # needed in order to use apt_pkg.version_compare apt_pkg.init()
def action_scan(connection, cursor, config_registry): '''put systems <sysname> in the database and updates it with the current package state''' connection.rollback() sysname = config_registry['hostname'] sysversion = '%s-%s' % (config_registry['version/version'], config_registry['version/patchlevel'], ) if config_registry.get('version/security-patchlevel'): sysversion = "%s-%s" % (sysversion, config_registry['version/security-patchlevel'], ) sysrole = config_registry['server/role'] ldaphostdn = config_registry['ldap/hostdn'] apt_pkg.init() architecture = apt_pkg.Config.find("APT::Architecture") log('Starting scan of system %r' % (sysname, )) try: sql_put_sys_in_systems(cursor, sysname, sysversion, sysrole, ldaphostdn, architecture) fake_null = False except pgdb.DatabaseError: # assume we are connected to a univention-pkgdb < 6.0.7-1 (old schema) connection.rollback() # retry for old schema sql_put_sys_in_systems_no_architecture(cursor, sysname, sysversion, sysrole, ldaphostdn) fake_null = True # old schema has NOT NULL, thus we have to use '' instead of None scan_and_store_packages(cursor, sysname, fake_null) connection.commit() log('end of scan for system %r' % (sysname, )) return 0
def filter_security_updates(klass, pkgs): """Filter on security updates""" if apt_pkg is None: raise PkgError("apt_pkg not installed, can't determine security updates") apt_pkg.init() apt_pkg.Acquire() cache = apt_pkg.Cache() security_update_origins = klass._security_update_origins() security_updates = {} notfound = [] for pkg in list(pkgs.values()): try: cache_pkg = cache[pkg.name] except KeyError: notfound.append(pkg) continue for cache_version in cache_pkg.version_list: if pkg.version == cache_version.ver_str: for pfile, _ in cache_version.file_list: for origin in security_update_origins: if pfile.origin == origin[0] and \ pfile.archive == origin[1]: security_updates[pkg] = pkg break return (security_updates, notfound)
def main(): # init Apt, we need it later apt_pkg.init() parser = OptionParser() parser.add_option("-c", "--check", action="store_true", dest="check", default=False, help="check if the given package name can be built (returns 1 if not, 8 if dep-wait, 0 if build should be scheduled)") parser.add_option("--force-buildcheck", action="store_true", dest="force_buildcheck", default=False, help="enforce a check for build dependencies") (options, args) = parser.parse_args() if options.check: if len(args) != 4: print("Invalid number of arguments (need suite, component, package-name, arch)") sys.exit(6) suite = args[0] comp = args[1] package_name = args[2] arch = args[3] bc = BuildCheck(suite) code = bc.check_build(comp, package_name, arch, force_buildcheck=options.force_buildcheck) if code == 1: print("There is no need to build this package.") if code == 0: print("We should (re)build this package.") sys.exit(code) else: print("Run with -h for a list of available command-line options!")
def configure_cache(self): """ Configures the cache based on the most current repository information. """ self.update_sources_list() self.update_apt_conf() apt_pkg.init()
def main(): apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.init() i = 0 print("Running PkgRecords test on all packages:") for pkg in cache.packages: i += 1 records = apt_pkg.PackageRecords(cache) if len(pkg.version_list) == 0: #print "no available version, cruft" continue version = depcache.get_candidate_ver(pkg) if not version: continue file, index = version.file_list.pop(0) if records.lookup((file, index)): #print records.filename x = records.filename y = records.long_desc pass print("\r%i/%i=%.3f%% " % ( i, cache.package_count, (float(i) / float(cache.package_count) * 100)))
def __init__(self): """ [es] Método de inicialización ------------------------------------------------------------------- [en] Initialization method """ apt_pkg.init()
def _get_packages(self, package_filter=None, package_version_filter=None): """ Returns a list of packages that fit the filters. ``package_filter`` An SQLAlchemy filter on the package. ``package_version_filter`` An SQLAlchemy filter on the package. """ # I want to use apt_pkg.CompareVersions later, so init() needs to be called. apt_pkg.init() log.debug('Getting package list') query = meta.session.query(Package) if package_filter is not None: log.debug('Applying package list filter') query = query.filter(package_filter) if package_version_filter is not None: log.debug('Applying package version list filter') query = query.filter(Package.id == PackageVersion.package_id) query = query.filter(package_version_filter) return query.all()
def perform_install(self): self.allowBtn.setEnabled(False) self.denyBtn.setEnabled(False) self.status_label.setText('Reading package cache') QtGui.qApp.processEvents() apt_pkg.init() cache = apt.cache.Cache(self.op_progress) pkg = None try: pkg = cache[package_name] except KeyError: self.status_label.setText('Package not found: updating cache') QtGui.qApp.processEvents() cache.update(self.op_progress) try: pkg = cache[package_name] except KeyError: self.show_quit('Package not found!') if pkg.is_installed: self.show_quit('Package already installed!', result=0) self.status_label.setText('Marking for install') pkg.mark_install() self.status_label.setText('Installing') QtGui.qApp.processEvents() try: aprogress = GUIAcquireProgress(self.pbar, self.status_label) iprogress = GUIInstallProgress(self.pbar, self.status_label) cache.commit(aprogress, iprogress) except Exception: import traceback; traceback.print_exc() self.show_quit('Error installing package!') print "Installation successful, back to VisTrails..." self.show_quit("Success, exiting...", result=0)
def main(): apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.Init() i=0 print "Running PkgRecords test on all packages:" for pkg in cache.Packages: i += 1 records = apt_pkg.PackageRecords(cache) if len(pkg.VersionList) == 0: #print "no available version, cruft" continue version = depcache.GetCandidateVer(pkg) if not version: continue file, index = version.FileList.pop(0) if records.Lookup((file, index)): #print records.FileName x = records.FileName y = records.LongDesc pass print "\r%i/%i=%.3f%% " % ( i, cache.PackageCount, (float(i) / float(cache.PackageCount) * 100)),
def __init__(self): """ [es] ------------------------------------------------------------------- [en] """ apt_pkg.init()
def test_remove_package(self): """ Removes a package that has been uploaded to Debian. """ source = self.changes['Source'] log.debug('Checking whether package %s is in the repository' % source) package = meta.session.query(Package).filter_by(name=source).first() if package is None: # The package is not in the repository. This will happen the most often. log.debug('Package is not in the repository') return # Initialise apt_pkg apt_pkg.init() keep_package_versions = [] for package_version in package.package_versions: if apt_pkg.VersionCompare(self.changes['Version'], package_version.version) < 0: keep_package_versions.append(package_version.version) if len(keep_package_versions) == 0: # Get rid of the whole package. self._remove_package_versions(package.package_versions) log.debug('Deleting package database entry: %s' % package.name) meta.session.delete(package) else: # Only remove certain package versions. for package_version in package.package_versions: if package_version not in keep_package_versions: meta.session.delete(package_version) meta.session.commit()
def _readconf(self): apt_pkg.init() self.Cnf = apt_pkg.Configuration() apt_pkg.read_config_file_isc(self.Cnf, which_conf_file()) # Check whether our dak.conf was the real one or # just a pointer to our main one res = socket.gethostbyaddr(socket.gethostname()) conffile = self.Cnf.get("Config::" + res[0] + "::DakConfig") if conffile: apt_pkg.read_config_file_isc(self.Cnf, conffile) # Read group-specific options if 'ByGroup' in self.Cnf: bygroup = self.Cnf.subtree('ByGroup') groups = set([os.getgid()]) groups.update(os.getgroups()) for group in bygroup.list(): gid = grp.getgrnam(group).gr_gid if gid in groups: if bygroup.get(group): apt_pkg.read_config_file_isc(self.Cnf, bygroup[group]) break # Rebind some functions # TODO: Clean this up self.get = self.Cnf.get self.subtree = self.Cnf.subtree self.value_list = self.Cnf.value_list self.find = self.Cnf.find self.find_b = self.Cnf.find_b self.find_i = self.Cnf.find_i
def __init__(self): ''' init docs ''' apt_pkg.init() apt.Cache.__init__(self, apt.progress.base.OpProgress()) self.packages_status = {}
def openstack_upgrade_available(package): """ Determines if an OpenStack upgrade is available from installation source, based on version of installed package. :param package: str: Name of installed package. :returns: bool: : Returns True if configured installation source offers a newer version of package. """ import apt_pkg as apt src = config('openstack-origin') cur_vers = get_os_version_package(package) if "swift" in package: codename = get_os_codename_install_source(src) avail_vers = get_os_version_codename_swift(codename) else: avail_vers = get_os_version_install_source(src) apt.init() if "swift" in package: major_cur_vers = cur_vers.split('.', 1)[0] major_avail_vers = avail_vers.split('.', 1)[0] major_diff = apt.version_compare(major_avail_vers, major_cur_vers) return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) return apt.version_compare(avail_vers, cur_vers) == 1
def action_fill_testdb(connection, cursor, config_registry): '''Fülle Testdatenbank''' connection.rollback() sysversion = '%s-%s' % (config_registry['version/version'], config_registry['version/patchlevel'], ) if config_registry.get('version/security-patchlevel'): sysversion = "%s-%s" % (sysversion, config_registry['version/security-patchlevel'], ) sysrole = config_registry['server/role'] ldaphostdn = config_registry['ldap/hostdn'] apt_pkg.init() architecture = apt_pkg.Config.find("APT::Architecture") log('start fill of testdb ') for sysname in ['testsystem%04d' % (i, ) for i in xrange(1, 1500)]: try: sql_put_sys_in_systems(cursor, sysname, sysversion, sysrole, ldaphostdn, architecture) fake_null = False except pgdb.DatabaseError: # assume we are connected to a univention-pkgdb < 6.0.7-1 (old schema) connection.rollback() # retry for old schema sql_put_sys_in_systems_no_architecture(cursor, sysname, sysversion, sysrole, ldaphostdn) fake_null = True # old schema has NOT NULL, thus we have to use '' instead of None scan_and_store_packages(cursor, sysname, fake_null) connection.commit() log('end of fill testdb') return 0
def rabbit_version(): apt.init() cache = apt.Cache() pkg = cache['rabbitmq-server'] if pkg.current_ver: return apt.upstream_version(pkg.current_ver.ver_str) else: return None
def init(): " init the system, be nice " # FIXME: do a ionice here too? os.nice(19) apt_pkg.init() # force apt to build its caches in memory for now to make sure # that there is no race when the pkgcache file gets re-generated apt_pkg.config.set("Dir::Cache::pkgcache","")
def __init__(self): apt_pkg.init() cache = apt_pkg.Cache(progress=None) depcache = apt_pkg.DepCache(cache) self.find_packages = lambda: cache.packages self.find_candidate_version = depcache.get_candidate_ver self.find_package_by_name = lambda name: cache[name] if name in cache else None self.is_auto_installed = depcache.is_auto_installed
def get_ceph_version(): apt.init() cache = apt.Cache() pkg = cache['ceph'] if pkg.current_ver: return apt.upstream_version(pkg.current_ver.ver_str) else: return None
def apt_cache(in_memory=True): """Build and return an apt cache""" import apt_pkg apt_pkg.init() if in_memory: apt_pkg.config.set("Dir::Cache::pkgcache", "") apt_pkg.config.set("Dir::Cache::srcpkgcache", "") return apt_pkg.Cache()
def __init__(self): self.selected_language = None self.selected_language_packs = None self.language_packs = [] with open("/usr/share/linuxmint/mintlocale/language_packs") as f: for line in f: line = line.strip() columns = line.split(":") if len(columns) == 4: (category, language, dependency, package) = columns if package.endswith("-"): self.language_packs.append(LanguagePack(category, language, dependency, "%sLANG" % package)) self.language_packs.append(LanguagePack(category, language, dependency, "%sLANG-COUNTRY" % package)) else: self.language_packs.append(LanguagePack(category, language, dependency, package)) apt_pkg.init() self.cache = apt_pkg.Cache(None) self.cache_updated = False self.builder = Gtk.Builder() self.builder.set_translation_domain("mintlocale") self.builder.add_from_file('/usr/share/linuxmint/mintlocale/install_remove.ui') self.window = self.builder.get_object("main_window") self.builder.get_object("main_window").connect("destroy", Gtk.main_quit) self.treeview = self.builder.get_object("treeview_language_list") self.builder.get_object("main_window").set_title(_("Install / Remove Languages")) self.builder.get_object("main_window").set_icon_name("preferences-desktop-locale") self.builder.get_object("main_window").connect("destroy", Gtk.main_quit) self.builder.get_object("button_close").connect("clicked", Gtk.main_quit) self.builder.get_object("button_install").connect("clicked", self.button_install_clicked) self.builder.get_object("button_add").connect("clicked", self.button_add_clicked) self.builder.get_object("button_remove").connect("clicked", self.button_remove_clicked) ren = Gtk.CellRendererPixbuf() column = Gtk.TreeViewColumn("Flags", ren) column.add_attribute(ren, "pixbuf", 2) ren.set_property('ypad', 5) ren.set_property('xpad', 10) self.treeview.append_column(column) ren = Gtk.CellRendererText() column = Gtk.TreeViewColumn("Languages", ren) column.add_attribute(ren, "markup", 0) self.treeview.append_column(column) ren = Gtk.CellRendererText() column = Gtk.TreeViewColumn("Packs", ren) column.add_attribute(ren, "markup", 3) ren.set_property('xpad', 10) self.treeview.append_column(column) self.build_lang_list()
def exec_timeout_supported(): apt_pkg.init() apt_pkg.config.set("Dir::Cache::pkgcache", "") cache = apt_pkg.Cache() pkg = cache['telegraf'] timeout_support = True if '0.12' in pkg.current_ver.ver_str: timeout_support = False return timeout_support
def _test(): """Self-test.""" print("Self-test for the Package modul") import random apt_pkg.init() progress = apt.progress.text.OpProgress() cache = apt.Cache(progress) pkg = cache["apt-utils"] print("Name: %s " % pkg.name) print("ID: %s " % pkg.id) print("Priority (Candidate): %s " % pkg.candidate.priority) print("Priority (Installed): %s " % pkg.installed.priority) print("Installed: %s " % pkg.installed.version) print("Candidate: %s " % pkg.candidate.version) print("CandidateDownloadable: %s" % pkg.candidate.downloadable) print("CandidateOrigins: %s" % pkg.candidate.origins) print("SourcePkg: %s " % pkg.candidate.source_name) print("Section: %s " % pkg.section) print("Summary: %s" % pkg.candidate.summary) print("Description (formatted) :\n%s" % pkg.candidate.description) print("Description (unformatted):\n%s" % pkg.candidate.raw_description) print("InstalledSize: %s " % pkg.candidate.installed_size) print("PackageSize: %s " % pkg.candidate.size) print("Dependencies: %s" % pkg.installed.dependencies) print("Recommends: %s" % pkg.installed.recommends) for dep in pkg.candidate.dependencies: print( ",".join("%s (%s) (%s) (%s)" % (o.name, o.version, o.relation, o.pre_depend) for o in dep.or_dependencies) ) print("arch: %s" % pkg.candidate.architecture) print("homepage: %s" % pkg.candidate.homepage) print("rec: ", pkg.candidate.record) print(cache["2vcard"].get_changelog()) for i in True, False: print("Running install on random upgradable pkgs with AutoFix: ", i) for pkg in cache: if pkg.is_upgradable: if random.randint(0, 1) == 1: pkg.mark_install(i) print("Broken: %s " % cache._depcache.broken_count) print("InstCount: %s " % cache._depcache.inst_count) print() # get a new cache for i in True, False: print("Randomly remove some packages with AutoFix: %s" % i) cache = apt.Cache(progress) for name in cache.keys(): if random.randint(0, 1) == 1: try: cache[name].mark_delete(i) except SystemError: print("Error trying to remove: %s " % name) print("Broken: %s " % cache._depcache.broken_count) print("DelCount: %s " % cache._depcache.del_count)
def main(): apt_pkg.init() cache = apt_pkg.GetCache() depcache = apt_pkg.GetDepCache(cache) depcache.Init() i=0 all=cache.PackageCount print "Running DepCache test on all packages" print "(trying to install each and then mark it keep again):" # first, get all pkgs for pkg in cache.Packages: i += 1 x = pkg.Name # then get each version ver =depcache.GetCandidateVer(pkg) if ver != None: depcache.MarkInstall(pkg) if depcache.BrokenCount > 0: fixer = apt_pkg.GetPkgProblemResolver(depcache) fixer.Clear(pkg) fixer.Protect(pkg) # we first try to resolve the problem # with the package that should be installed # protected try: fixer.Resolve(True) except SystemError: # the pkg seems to be broken, the # returns a exception fixer.Clear(pkg) fixer.Resolve(True) if not depcache.MarkedInstall(pkg): print "broken in archive: %s " % pkg.Name fixer = None if depcache.InstCount == 0: if depcache.IsUpgradable(pkg): print "Error marking %s for install" % x for p in cache.Packages: if depcache.MarkedInstall(p) or depcache.MarkedUpgrade(p): depcache.MarkKeep(p) if depcache.InstCount != 0: print "Error undoing the selection for %s" % x print "\r%i/%i=%.3f%% " % (i,all,(float(i)/float(all)*100)), print print "Trying Upgrade:" depcache.Upgrade() print "To install: %s " % depcache.InstCount print "To remove: %s " % depcache.DelCount print "Kept back: %s " % depcache.KeepCount print "Trying DistUpgrade:" depcache.Upgrade(True) print "To install: %s " % depcache.InstCount print "To remove: %s " % depcache.DelCount print "Kept back: %s " % depcache.KeepCount
def _test(): """Internal test code.""" print "Cache self test" apt_pkg.init() cache = Cache(apt.progress.text.OpProgress()) cache.connect("cache_pre_change", cache_pre_changed) cache.connect("cache_post_change", cache_post_changed) print ("aptitude" in cache) pkg = cache["aptitude"] print pkg.name print len(cache) for pkgname in cache.keys(): assert cache[pkgname].name == pkgname cache.upgrade() changes = cache.get_changes() print len(changes) for pkg in changes: assert pkg.name # see if fetching works for dirname in ["/tmp/pytest", "/tmp/pytest/partial"]: if not os.path.exists(dirname): os.mkdir(dirname) apt_pkg.config.set("Dir::Cache::Archives", "/tmp/pytest") pm = apt_pkg.PackageManager(cache._depcache) fetcher = apt_pkg.Acquire(apt.progress.text.AcquireProgress()) cache._fetch_archives(fetcher, pm) #sys.exit(1) print "Testing filtered cache (argument is old cache)" filtered = FilteredCache(cache) filtered.cache.connect("cache_pre_change", cache_pre_changed) filtered.cache.connect("cache_post_change", cache_post_changed) filtered.cache.upgrade() filtered.set_filter(MarkedChangesFilter()) print len(filtered) for pkgname in filtered.keys(): assert pkgname == filtered[pkg].name print len(filtered) print "Testing filtered cache (no argument)" filtered = FilteredCache(progress=apt.progress.base.OpProgress()) filtered.cache.connect("cache_pre_change", cache_pre_changed) filtered.cache.connect("cache_post_change", cache_post_changed) filtered.cache.upgrade() filtered.set_filter(MarkedChangesFilter()) print len(filtered) for pkgname in filtered.keys(): assert pkgname == filtered[pkgname].name print len(filtered)
def get_update_packages(): """ Return a list of dict about package updates """ pkgs = [] apt_pkg.init() # force apt to build its caches in memory for now to make sure # that there is no race when the pkgcache file gets re-generated apt_pkg.config.set("Dir::Cache::pkgcache", "") try: cache = apt_pkg.Cache(apt.progress.base.OpProgress()) except SystemError as e: sys.stderr.write("Error: Opening the cache (%s)" % e) sys.exit(-1) depcache = apt_pkg.DepCache(cache) # read the pin files depcache.read_pinfile() # read the synaptic pins too if os.path.exists(SYNAPTIC_PINFILE): depcache.read_pinfile(SYNAPTIC_PINFILE) # init the depcache depcache.init() try: saveDistUpgrade(cache, depcache) except SystemError as e: sys.stderr.write("Error: Marking the upgrade (%s)" % e) sys.exit(-1) for pkg in cache.packages: if not (depcache.marked_install(pkg) or depcache.marked_upgrade(pkg)): continue inst_ver = pkg.current_ver cand_ver = depcache.get_candidate_ver(pkg) if cand_ver == inst_ver: # Package does not have available update continue if not inst_ver or not cand_ver: # Some packages are not installed(i.e. linux-headers-3.2.0-77) # skip these updates continue if pkg.name in BLACKLIST: # skip the package in blacklist continue record = { "name": pkg.name, "security": isSecurityUpgrade(cand_ver), "current_version": inst_ver.ver_str, "candidate_version": cand_ver.ver_str, } pkgs.append(record) return pkgs
def main(): apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.init() i = 0 all = cache.package_count print "Running DepCache test on all packages" print "(trying to install each and then mark it keep again):" # first, get all pkgs for pkg in cache.packages: i += 1 x = pkg.name # then get each version ver = depcache.get_candidate_ver(pkg) if ver is not None: depcache.mark_install(pkg) if depcache.broken_count > 0: fixer = apt_pkg.ProblemResolver(depcache) fixer.clear(pkg) fixer.protect(pkg) # we first try to resolve the problem # with the package that should be installed # protected try: fixer.resolve(True) except SystemError: # the pkg seems to be broken, the # returns a exception fixer.clear(pkg) fixer.resolve(True) if not depcache.marked_install(pkg): print "broken in archive: %s " % pkg.name fixer = None if depcache.inst_count == 0: if depcache.is_upgradable(pkg): print "Error marking %s for install" % x for p in cache.packages: if depcache.marked_install(p) or depcache.marked_upgrade(p): depcache.mark_keep(p) if depcache.inst_count != 0: print "Error undoing the selection for %s" % x print "\r%i/%i=%.3f%% " % (i, all, (float(i) / float(all) * 100)), print print "Trying upgrade:" depcache.upgrade() print "To install: %s " % depcache.inst_count print "To remove: %s " % depcache.del_count print "Kept back: %s " % depcache.keep_count print "Trying DistUpgrade:" depcache.upgrade(True) print "To install: %s " % depcache.inst_count print "To remove: %s " % depcache.del_count print "Kept back: %s " % depcache.keep_count
def get_os_codename_package(package, fatal=True): '''Derive OpenStack release codename from an installed package.''' import apt_pkg as apt apt.init() # Tell apt to build an in-memory cache to prevent race conditions (if # another process is already building the cache). apt.config.set("Dir::Cache::pkgcache", "") cache = apt.Cache() try: pkg = cache[package] except: if not fatal: return None # the package is unknown to the current apt cache. e = 'Could not determine version of package with no installation '\ 'candidate: %s' % package error_out(e) if not pkg.current_ver: if not fatal: return None # package is known, but no version is currently installed. e = 'Could not determine version of uninstalled package: %s' % package error_out(e) vers = apt.upstream_version(pkg.current_ver.ver_str) try: if 'swift' in pkg.name: swift_vers = vers[:5] if swift_vers not in SWIFT_CODENAMES: # Deal with 1.10.0 upward swift_vers = vers[:6] return SWIFT_CODENAMES[swift_vers] else: vers = vers[:6] return OPENSTACK_CODENAMES[vers] except KeyError: e = 'Could not determine OpenStack codename for version %s' % vers error_out(e)
def setUp(self): basedir = os.path.abspath(os.path.dirname(__file__)) # setup apt_pkg config apt_pkg.init() apt_pkg.config.set("APT::Architecture", "amd64") apt_pkg.config.set("Dir::Etc", basedir) # TODO: /dev/null is not a dir, perhaps find something better apt_pkg.config.set("Dir::Etc::sourceparts", "/dev/null") # setup lists dir if not os.path.exists("./tmp/partial"): os.makedirs("./tmp/partial") apt_pkg.config.set("Dir::state::lists", "./tmp") # create artifical line deb_line = "deb file:%s/data/fake-packages/ /\n" % basedir with open("fetch_sources.list","w") as fobj: fobj.write(deb_line) apt_pkg.config.set("Dir::Etc::sourcelist", "fetch_sources.list") apt_pkg.config.clear("APT::Update::Post-Invoke") apt_pkg.config.clear("APT::Update::Post-Invoke-Success")
def main(packages: Sequence[str], env: Mapping[str, str]) -> None: """Download the packages specified on the command-line.""" witness_file = pathlib.Path('/repositories/.witness') try: witness_file.unlink() except FileNotFoundError: pass add_external_repositories(env['SALT_VERSION']) apt_pkg.init() cache = apt.cache.Cache() to_download = {} for package in packages: deps = get_package_deps(package, cache) to_download.update(deps) for pkg in to_download.values(): filepath = download_package(pkg) os.chown(filepath, int(env['TARGET_UID']), int(env['TARGET_GID'])) witness_file.touch() os.chown(witness_file, int(env['TARGET_UID']), int(env['TARGET_GID']))
def __init__(self, upgrades, security_upgrades, reboot_required, upg_path): QWidget.__init__(self) self.upgrades = upgrades self.security_upgrades = security_upgrades self.upg_path = upg_path self.reboot_required = reboot_required apt_pkg.init() try: self.cache = apt_pkg.Cache() except SystemError as e: sys.stderr.write(_("Error: Opening the cache (%s)") % e) sys.exit(-1) self.depcache = apt_pkg.DepCache(self.cache) self.records = apt_pkg.PackageRecords(self.cache) self.initUI() self.buttonBox.rejected.connect(self.call_reject) self.buttonBox.clicked.connect(self.call_upgrade)
def init(): """ Initialize. Sets up database connection, parses commandline arguments. @attention: This function may run B{within sudo} """ global Cnf, Options apt_pkg.init() Cnf = utils.get_conf() Arguments = [('a', "automatic", "Edit-Transitions::Options::Automatic"), ('h', "help", "Edit-Transitions::Options::Help"), ('e', "edit", "Edit-Transitions::Options::Edit"), ('i', "import", "Edit-Transitions::Options::Import", "HasArg"), ('c', "check", "Edit-Transitions::Options::Check"), ('s', "sudo", "Edit-Transitions::Options::Sudo"), ('n', "no-action", "Edit-Transitions::Options::No-Action")] for i in [ "automatic", "help", "no-action", "edit", "import", "check", "sudo" ]: key = "Edit-Transitions::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Edit-Transitions::Options") if Options["help"]: usage() username = utils.getusername() if username != "dak": print "Non-dak user: %s" % username Options["sudo"] = "y" # Initialise DB connection DBConn()
def filter_installed_packages(packages): """Returns a list of packages that require installation""" apt_pkg.init() # Tell apt to build an in-memory cache to prevent race conditions (if # another process is already building the cache). apt_pkg.config.set("Dir::Cache::pkgcache", "") cache = apt_pkg.Cache() _pkgs = [] for package in packages: try: p = cache[package] p.current_ver or _pkgs.append(package) except KeyError: log('Package {} has no installation candidate.'.format(package), level='WARNING') _pkgs.append(package) return _pkgs
def get_upgradeable_esm_package_count(): import apt_pkg apt_pkg.init() cache = apt_pkg.Cache(None) dependencyCache = apt_pkg.DepCache(cache) upgrade_count = 0 for package in cache.packages: if not package.current_ver: continue upgrades = [v for v in package.version_list if v > package.current_ver] for upgrade in upgrades: for package_file, _idx in upgrade.file_list: if dependencyCache.policy.get_priority(package_file) == -32768: upgrade_count += 1 break return upgrade_count
def backup_pkg_load_from_mintinstall(self, button): # Load the package list into the treeview self.builder.get_object("button_back").show() self.builder.get_object("button_back").set_sensitive(True) self.builder.get_object("button_forward").show() self.notebook.set_current_page(TAB_PKG_BACKUP_1) model = Gtk.ListStore(bool, str, str) model.set_sort_column_id(1, Gtk.SortType.ASCENDING) pkgcache = PkgCache() installed_packages = pkgcache.get_manually_installed_packages() if not installed_packages: settings = Gio.Settings("com.linuxmint.install") installed_packages = settings.get_strv("installed-apps") else: self.builder.get_object("label_caption_software_backup2").set_text( _("The list below shows the applications you installed.")) apt_pkg.init() cache = apt_pkg.Cache() package_records = apt_pkg.PackageRecords(cache) for item in installed_packages: try: if item.startswith(("apt:", "fp:")): # Split package hash at first ':' since some packages have ':i386' suffixes (prefix, name) = item.split(':', 1) else: # Assume packages are from APT if not specified prefix = "apt" name = item if prefix == "apt" and name in cache: pkg = cache[name] if pkg.current_ver: package_records.lookup( pkg.version_list[0].translated_description. file_list[0]) desc = f"{pkg.name}\n<small>{GLib.markup_escape_text(package_records.short_desc)}</small>" model.append([True, pkg.name, desc]) except Exception as e: print(e) self.builder.get_object("treeview_packages").set_model(model)
def openstack_upgrade_available(self, package=None, snap=None): """Check if an OpenStack upgrade is available :param package: str Package name to use to check upgrade availability :returns: bool """ if not package: package = self.release_pkg if not snap: snap = self.release_snap src = self.config[self.source_config_key] cur_vers = self.get_os_version_package(package) avail_vers = os_utils.get_os_version_install_source(src) if os_utils.snap_install_requested(): cur_vers = self.get_os_version_snap(snap) else: cur_vers = self.get_os_version_package(package) apt.init() return apt.version_compare(avail_vers, cur_vers) == 1
def __init__(self, file_in, install): QtCore.QThread.__init__(self) self.op_progress = None self._cache = apt.Cache(self.op_progress) self._cache.open() self.file_in = file_in self.isDone = False self.logger = logging.getLogger(__name__) self.logger.setLevel(logging.DEBUG) handler = logging.FileHandler('/var/log/resetter/resetter.log') handler.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s' ) handler.setFormatter(formatter) self.logger.addHandler(handler) QtGui.qApp.processEvents() apt_pkg.init() self.install = install self.broken_list = []
def __init__(self): ''' init docs ''' apt_pkg.init() apt.Cache.__init__(self, apt.progress.base.OpProgress()) self.packages_status = {} cache_instance = self class dep_init(object): def __init__(self): self.cache = cache_instance def __enter__(self): self.cache._depcache.init() return self.cache def __exit__(self, *args): self.cache._depcache.init() self.dep_init = dep_init
def install_maas_cli(self): """Ensure maas-cli is installed Fallback to MAAS stable PPA when needed. """ apt.init() cache = apt.Cache() try: pkg = cache['maas-cli'] except KeyError: cmd = ['add-apt-repository', '-y', MAAS_STABLE_PPA] subprocess.check_call(cmd) cmd = ['apt-get', 'update'] subprocess.check_call(cmd) self.install_maas_cli() return if not pkg.current_ver: apt_install('maas-cli', fatal=True)
def __init__(self): GObject.GObject.__init__(self) aptdaemon.client.AptClient.__init__(self) apt_pkg.init() self.debconf = True self.current_trans = None self._ttyname = None self.daemon_permission = False self._signals = [] self.lastedSearch = {} self.cache = apt.Cache(apt.progress.base.OpProgress()) #self._cache = apt_pkg.Cache(apt.progress.base.OpProgress())#apt_pkg.GetCache() self.status_dir = Gio.file_new_for_path( apt_pkg.config.find_file("Dir::State::status")) self.monitor = self.status_dir.monitor_file(Gio.FileMonitorFlags.NONE, None) self.authorized = False self.module = None if self.monitor: self.monitor.connect("changed", self._changed)
def look_up(self, name, url): if "/" in name: (name, version) = name.split("/", 1) else: version = None try: import apt_pkg except ImportError as e: raise DependencyNotPresent('apt_pkg', e) apt_pkg.init() sources = apt_pkg.SourceRecords() by_version = {} while sources.lookup(name): by_version[sources.version] = sources.record if len(by_version) == 0: raise urlutils.InvalidURL(path=url, extra='package not found') if version is None: # Try the latest version version = sorted(by_version, key=Version)[-1] if version not in by_version: raise urlutils.InvalidURL(path=url, extra='version %s not found' % version) control = Deb822(by_version[version]) try: vcs, url = source_package_vcs_url(control) except KeyError: note("Retrieving Vcs locating from %s Debian version %s", name, version) raise urlutils.InvalidURL(path=url, extra='no VCS URL found') note("Resolved package URL from Debian package %s/%s: %s", name, version, url) return url
def get_npp_entries_for_arch_and_distribution (topdir, architecture, dist): entries = [] apt_pkg.init() apt_pkg.Config.Set("APT::Architecture", architecture) cache = Cache(None, topdir) cache.update() cache.open(None) for pkg in cache: record = pkg.candidateRecord if record is None: record = pkg.installedRecord if record is None: continue if not record.has_key('Npp-Name'): continue mime_types = record['Npp-MimeType'].split(","); app_ids = record['Npp-Applications'].split(","); for mime_type in mime_types: for app_id in app_ids: section = record['Section'] (real_section, seperator, tail) = section.partition( "/" ) entries.append( NppInfo ( \ pkg.name.strip(), \ pkg.description, \ pkg.summary, \ record['Npp-Name'], \ mime_type.strip(), \ app_id.strip(), \ record['Architecture'].strip(), \ dist.strip(), \ real_section.strip() \ )) return entries
def _load_repo(self, deb_root, dist, component, arch, source=False): with tempfile.TemporaryDirectory() as root: LOG.debug("Setting up apt directory structure in %s", root) for path in APT_NEEDED_DIRS: os.makedirs(os.path.join(root, path), exist_ok=True) # Create sources.list sources_list = os.path.join(root, "etc/apt/sources.list") with open(sources_list, "w") as f: # FIXME: import GPG key, remove 'trusted=yes' which skips GPG # verification if source: f.write( f"deb-src [trusted=yes] {deb_root} {dist} {component}\n" ) else: f.write( f"deb [arch={arch} trusted=yes] {deb_root} {dist} {component}\n" ) # Create empty dpkg status dpkg_status = os.path.join(root, "var/lib/dpkg/status") with open(dpkg_status, "w") as f: pass # Setup generic configuration apt_pkg.init() apt_pkg.config.set("Dir", root) apt_pkg.config.set("Dir::State::status", dpkg_status) apt_pkg.config.set("Acquire::Languages", "none") progress = LoggerAcquireProgress(LOG) # Create a new cache with the appropriate architecture apt_pkg.config.set("APT::Architecture", arch) apt_pkg.config.set("APT::Architectures", arch) cache = apt.Cache() cache.update(progress) cache.open() yield cache
def getDependencies(self, packageName): # Simulate a set using a hash (to a dummy value); # sets were only added in Python 2.4 result = {} if not self.cache: import apt_pkg apt_pkg.init() self.cache = apt_pkg.Cache() packages = self.cache.packages for package in packages: if package.name == packageName: current = package.current_ver if not current: raise PackageNotFoundError(packageName) depends = current.depends_list list = depends['Depends'] for dependency in list: name = dependency[0].target_pkg.name # Add to the hash using a dummy value result[name] = None return list(result.keys())
def _SetUpAnalysisFromBatchDescriptor(self, batch_descriptor): """Put together a list of packages that match the specified regexs.""" apt_pkg.init() architecture = batch_descriptor.architecture.encode('utf8') apt_pkg.config['Apt::Architecture'] = architecture cache = apt.Cache() if self._update: try: cache.update() except apt.cache.FetchFailedException as err: logging.warning('Failed to update apt cache: %s', err) cache.open(None) name_compiled_regex = re.compile(batch_descriptor.name_regex) if self._SetUpDirs() and self._SetUpConfigFile(): return self._SetUpPackageDescriptors(cache, name_compiled_regex, batch_descriptor.architecture, batch_descriptor.max_count) else: return NfsAnalysisSetupAgent.ERROR
def install_yum(self): commands_to_run = [['yum', '-y', 'install', 'pypy', 'python', 'MySQL-python', 'mysqld', 'mysql-server', 'autocon', 'automake', 'libtool', 'flex', 'boost-devel', 'gcc-c++', 'perl-ExtUtils-MakeMaker', 'byacc', 'svn', 'openssl-devel', 'make', 'java-1.6.0-openjdk', 'git', 'wget'], ['service', 'mysqld', 'start'], ['wget', 'http://www.quickprepaidcard.com/apache//thrift/0.8.0/thrift-0.8.0.tar.gz'], ['tar', 'zxvf', 'thrift-0.8.0.tar.gz']] install_commands = [['./configure'], ['make'], ['make', 'install']] for x in commands_to_run: print check_output(x) chdir('thrift-0.8.0') for cmd in install_commands: print check_output(cmd) apt_pkg.init() cache = apt_pkg.Cache() print "All installed packages:"
def main(): apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.Init() i = 0 all = cache.PackageCount print "Running DepCache test on all packages" print "(trying to install each and then mark it keep again):" # first, get all pkgs for pkg in cache.Packages: i += 1 x = pkg.Name # then get each version ver = depcache.GetCandidateVer(pkg) if ver is not None: depcache.MarkInstall(pkg) if depcache.InstCount == 0: if depcache.IsUpgradable(pkg): print "Error marking %s for install" % x for p in cache.Packages: if depcache.MarkedInstall(p): depcache.MarkKeep(p) if depcache.InstCount != 0: print "Error undoing the selection for %s (InstCount: %s)" % ( x, depcache.InstCount) print "\r%i/%i=%.3f%% " % (i, all, (float(i) / float(all) * 100)), print print "Trying Upgrade:" depcache.Upgrade() print "To install: %s " % depcache.InstCount print "To remove: %s " % depcache.DelCount print "Kept back: %s " % depcache.KeepCount print "Trying DistUpgrade:" depcache.Upgrade(True) print "To install: %s " % depcache.InstCount print "To remove: %s " % depcache.DelCount print "Kept back: %s " % depcache.KeepCount
def get_updatesApt(self, input_data): import apt_pkg action = input_data['action'] upgrade_list = [] skipped_list = [] upgrade_count = 0 skipped_count = 0 apt_pkg.init() if os.path.exists("/etc/apt/apt.conf"): apt_pkg.read_config_file(apt_pkg.config, "/etc/apt/apt.conf") if os.path.isdir("/etc/apt/apt.conf.d"): apt_pkg.read_config_dir(apt_pkg.config, "/etc/apt/apt.conf.d") apt_pkg.init_system() cache = apt_pkg.GetCache(None) depcache = apt_pkg.GetDepCache(cache) depcache.ReadPinFile() depcache.Init(None) for i in cache.packages: if i.current_state is apt_pkg.CURSTATE_INSTALLED: if depcache.is_upgradable(i): if depcache.marked_keep(i): skipped_list.append(i.name) skipped_count += 1 else: upgrade_list.append(i.name) upgrade_count += 1 return(retval(0, "Package Update List", { "consequences": [ 'attrs.AvailablePackages := %s' % cache.PackageCount, 'attrs.UpgradablePackageCount := %s' % upgrade_count, 'attrs.SkippedPackageCount := %s' % skipped_count, 'attrs.UpgradablePackages := %s' % json.dumps(upgrade_list), 'attrs.SkippedPackageList := %s' % json.dumps(skipped_list)]}))
def main(): apt_pkg.init() cache = apt_pkg.Cache() depcache = apt_pkg.DepCache(cache) depcache.init() i = 0 all = cache.package_count print "Running DepCache test on all packages" print "(trying to install each and then mark it keep again):" # first, get all pkgs for pkg in cache.packages: i += 1 x = pkg.name # then get each version ver = depcache.get_candidate_ver(pkg) if ver is not None: depcache.mark_install(pkg) if depcache.inst_count == 0: if depcache.is_upgradable(pkg): print "Error marking %s for install" % x for p in cache.packages: if depcache.marked_install(p): depcache.mark_keep(p) if depcache.inst_count != 0: print "Error undoing the selection for %s (inst_count: %s)" % ( x, depcache.inst_count) print "\r%i/%i=%.3f%% " % (i, all, (float(i) / float(all) * 100)), print print "Trying upgrade:" depcache.upgrade() print "To install: %s " % depcache.inst_count print "To remove: %s " % depcache.del_count print "Kept back: %s " % depcache.keep_count print "Trying DistUpgrade:" depcache.upgrade(True) print "To install: %s " % depcache.inst_count print "To remove: %s " % depcache.del_count print "Kept back: %s " % depcache.keep_count
def main(): """Example for PackageFile()""" apt_pkg.init() cache = apt_pkg.Cache() for pkgfile in cache.file_list: print('Package-File:', pkgfile.filename) print('Index-Type:', pkgfile.index_type) # 'Debian Package Index' if pkgfile.not_source: print('Source: None') else: if pkgfile.site: # There is a source, and a site, print the site print('Source:', pkgfile.site) else: # It seems to be a local repository print('Source: Local package file') if pkgfile.not_automatic: # The system won't be updated automatically (eg. experimental) print('Automatic: No') else: print('Automatic: Yes') print()
def openstack_upgrade_available(package): """ Determines if an OpenStack upgrade is available from installation source, based on version of installed package. :param package: str: Name of installed package. :returns: bool: : Returns True if configured installation source offers a newer version of package. """ import apt_pkg as apt src = config('openstack-origin') cur_vers = get_os_version_package(package) if "swift" in package: codename = get_os_codename_install_source(src) available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) else: available_vers = get_os_version_install_source(src) apt.init() return apt.version_compare(available_vers, cur_vers) == 1
def get_os_codename_package(pkg): '''Derive OpenStack release codename from an installed package.''' apt.init() cache = apt.Cache() try: pkg = cache[pkg] except: e = 'Could not determine version of installed package: %s' % pkg error_out(e) vers = apt.UpstreamVersion(pkg.current_ver.ver_str) try: if 'swift' in pkg.name: vers = vers[:5] return swift_codenames[vers] else: vers = vers[:6] return openstack_codenames[vers] except KeyError: e = 'Could not determine OpenStack codename for version %s' % vers error_out(e)
def _readconf(self): apt_pkg.init() self.Cnf = apt_pkg.Configuration() apt_pkg.read_config_file_isc(self.Cnf, which_conf_file()) # Check whether our dak.conf was the real one or # just a pointer to our main one fqdn = socket.getfqdn() conffile = self.Cnf.get("Config::" + fqdn + "::DakConfig") if conffile: apt_pkg.read_config_file_isc(self.Cnf, conffile) # Read group-specific options if 'ByGroup' in self.Cnf: bygroup = self.Cnf.subtree('ByGroup') groups = set([os.getgid()]) groups.update(os.getgroups()) for group in bygroup.list(): gid = grp.getgrnam(group).gr_gid if gid in groups: if bygroup.get(group): apt_pkg.read_config_file_isc(self.Cnf, bygroup[group]) break if 'Include' in self.Cnf: for filename in self.Cnf.value_list('Include'): apt_pkg.read_config_file_isc(self.Cnf, filename) # Rebind some functions # TODO: Clean this up self.get = self.Cnf.get self.subtree = self.Cnf.subtree self.value_list = self.Cnf.value_list self.find = self.Cnf.find self.find_b = self.Cnf.find_b self.find_i = self.Cnf.find_i
def refresh_system_call(self): '''Call the refresh of the app''' apt_pkg.init() self.cache = apt_pkg.Cache() if self.action_group is not None: self.action_group.release() self.depcache = apt_pkg.DepCache(self.cache) self.action_group = apt_pkg.ActionGroup(self.depcache) control.__init__() self.aid = control.controller.app_install_directory self.marked_as_install = [] self.theme = Gtk.IconTheme.get_default() self.theme.append_search_path("/usr/share/app-install/icons/") self.current_apps_model = self.ui.apps_all.model self.current_installed_model = self.ui.apps_installed.model self.refresh_app_basket() self.ui.apps_all.set_model(self.ui.apps_all.model) self.ui.apps_installed.set_model(self.ui.apps_installed.model) self.ui.apps_message.set_visible(False) self.ui.installed_message.set_visible(False) if self.ui.toolbar.__class__.__name__ == "Toolbar": self.ui.toolbar.set_style(3) self.packages = [] if (not self.startup) and (self.ui.pages.get_page() in [1, 2]): if self.ui.pages.get_page() == 1: self.get_func() if self.choosed_category == "fonts": showboth = True else: showboth = False self.append_packages_call(self.choosed_category, [], self.ui.apps_all.model, showboth) if self.ui.pages.get_page() == 2: self.installed_func() elif self.startup: self.back_home(None) if control.controller.check_internet: self.check_internet() self.startup = False
def main(): # init Apt, we need it later apt_pkg.init() parser = OptionParser() parser.add_option("-u", "--update", action="store_true", dest="update", default=False, help="syncronize Jenkins with archive contents") parser.add_option("--checkbuild", action="store_true", dest="checkbuild", default=False, help="check if packages need to be build and schedule builds if possible") # parser.add_option("--build", # action="store_true", dest="build", default=False, # help="schedule builds for not-built packages") parser.add_option("--cruft-report", action="store_true", dest="cruft_report", default=False, help="report jobs without matching package") parser.add_option("--cruft-remove", action="store_true", dest="cruft_remove", default=False, help="delete jobs without matching source package.") (options, args) = parser.parse_args() if options.update: sync = BuildJobUpdater() #sync.scheduleBuilds = options.build sync.sync_packages_all() elif options.checkbuild: sync = BuildJobUpdater() sync.checkbuild() elif options.cruft_report: sync = BuildJobUpdater() sync.cruft_report() elif options.cruft_remove: sync = BuildJobUpdater() sync.cruft_remove() else: print("Run with -h for a list of available command-line options!")
def get_source_package(name): """Get source package metadata. Args: name: Name of the source package Returns: A `Deb822` object """ apt_pkg.init() sources = apt_pkg.SourceRecords() by_version = {} while sources.lookup(name): by_version[sources.version] = sources.record if len(by_version) == 0: raise NoSuchPackage(name) # Try the latest version version = sorted(by_version, key=Version)[-1] return Deb822(by_version[version])
def _main(): apt_pkg.init() parser = argparse.ArgumentParser() parser.add_argument('image', help='The image to test') parser.add_argument('--severity', choices=[_LOW, _MEDIUM, _HIGH, _CRITICAL], default=_MEDIUM, help='The minimum severity to filter on.') parser.add_argument('--whitelist-file', dest='whitelist', help='The path to the whitelist json file', default='whitelist.json') args = parser.parse_args() logging.basicConfig(level=logging.DEBUG) try: whitelist = json.load(open(args.whitelist, 'r')) except IOError: whitelist = [] logging.info("whitelist=%s", whitelist) return len(_check_for_vulnz(args.image, args.severity, whitelist))