def change_rootdir(self, newrootdir): """ This method resets the rootdir to newrootdir. Unfortunately, apt.cache outside the chroot seems to commit to the host system even when rootdir has been defined. So this method comes handy to temporairly reset rootdir to / when the installer should work into the chroot and then to reset it to main_settings["target"] Keep in mind that this method does not re-read the APT configuration files again as the __init__() parent method seems to do when there is a rootdir specified. open() needs to be called after this method, to avoid nasty problems. """ if self._depcache: changes = self.get_changes() else: changes = () # Save in a dictionary, as we can't get marks after open() saved = {} for pkg in changes: if pkg.marked_delete: saved[pkg.name] = MarkType.DELETE elif pkg.marked_install: saved[pkg.name] = MarkType.INSTALL elif pkg.marked_keep: saved[pkg.name] = MarkType.KEEP elif pkg.marked_upgrade: saved[pkg.name] = MarkType.UPGRADE apt_pkg.config.set("Dir", newrootdir) apt_pkg.config.set("Dir::State::status", os.path.join(newrootdir, "var/lib/dpkg/status")) apt_pkg.config.set("Dir::bin::dpkg", os.path.join(newrootdir, "usr/bin/dpkg")) # Reset the sources.list to known values. apt_pkg.config.set("Dir::Etc::sourcelist", self.normal_source_list) apt_pkg.config.set("Dir::Etc::sourceparts", self.normal_source_dir) apt_pkg.config.set("APT::List-Cleanup", self.normal_source_cleanup) self._list = apt_pkg.SourceList() self._list.read_main_list() apt_pkg.init_system() self.open(progress=None) # Rebuild changes list for pkg, mark in saved.items(): if mark == MarkType.DELETE: self[pkg].mark_delete() elif mark == MarkType.INSTALL: self[pkg].mark_install() elif mark == MarkType.KEEP: self[pkg].mark_keep() elif mark == MarkType.UPGRADE: self[pkg].mark_upgrade()
def test_apt_cache_reopen_is_safe_swap(self): """Check that swapping a and b does not mark the wrong package.""" with tempfile.NamedTemporaryFile() as status: apt_pkg.config["Dir::Etc::SourceList"] = "/dev/null" apt_pkg.config["Dir::Etc::SourceParts"] = "/dev/null" apt_pkg.config["Dir::State::Status"] = status.name apt_pkg.init_system() self.write_status_file("abcdefghijklmnopqrstuvwxyz") c = apt.Cache() p = c["a"] a_id = p.id p_hash = hash(p) set_of_p = set([p]) self.write_status_file("baz") apt_pkg.init_system() c.open() # b now has the same id as a in the old cache self.assertEqual(c["b"].id, a_id) self.assertNotEqual(p.id, a_id) # Marking a should still mark a, not b. p.mark_delete() self.assertEqual([p], c.get_changes()) # Ensure that p can still be found in a set of it, as a test # for bug https://bugs.launchpad.net/bugs/1780099 self.assertEqual(hash(p), p_hash) self.assertIn(p, set_of_p)
def resetConfig(self): apt_pkg.config.clear("") for key in apt_pkg.config.list(): apt_pkg.config.clear(key) apt_pkg.init_config() apt_pkg.init_system()
def scan(self, update): ''' This method sets the (global) apt-context to this suite and updates the repository metadata in the local cache from the remote apt-repository if update==True. Call this method before accessing packages data, e.g. like in queryPackages(...). If update==False, the already cached local metadata are used. This method returns False if apt-pkg recognized an error during scan (it seems apt-pkg doesn't recognize all error situations, i.e. if a repository server is not available). ''' logger.debug("scanning repository/suite {} {} update".format( self.suite, 'with' if update else 'without')) apt_pkg.read_config_file(apt_pkg.config, self.rootdir + "/etc/apt/apt.conf") apt_pkg.config.set("Dir", self.rootdir) apt_pkg.config.set("Dir::State::status", self.rootdir + "/var/lib/dpkg/status") apt_pkg.init_system() self.cache = apt_pkg.Cache() ok = True if update: try: self.cache.update(self.__Progress(), self.__sources()) except SystemError as e: logger.warning( "Could not update the cache for suite {}:".format( self.suite)) for msg in re.sub(r"(\n,)? ([WE]:)", "\n\\2", str(e)).split("\n"): logger.warning(msg) ok = False self.cache = apt_pkg.Cache() self.records = apt_pkg.PackageRecords(self.cache) logger.debug("finished scan") return ok
def save_state(self, sourcedir, target, with_dpkg_repack=False, with_dpkg_status=False, scrub_sources=False, extra_files=None): """ save the current system state (installed pacakges, enabled repositories ...) into the apt-state.tar.gz file in targetdir """ if os.path.isdir(target): target = os.path.join(target, self.CLONE_FILENAME) else: if not target.endswith(".tar.gz"): target += ".apt-clone.tar.gz" if sourcedir != '/': apt_pkg.init_config() apt_pkg.config.set("Dir", sourcedir) apt_pkg.config.set("Dir::State::status", os.path.join(sourcedir, 'var/lib/dpkg/status')) apt_pkg.init_system() with tarfile.open(name=target, mode="w:gz") as tar: self._write_uname(tar) self._write_state_installed_pkgs(sourcedir, tar) self._write_state_auto_installed(tar) self._write_state_sources_list(tar, scrub_sources) self._write_state_apt_preferences(tar) self._write_state_apt_keyring(tar) self._write_state_extra_files(extra_files, tar) if with_dpkg_status: self._write_state_dpkg_status(tar) if with_dpkg_repack: self._dpkg_repack(tar)
def test_multi_arch_same(self): apt_pkg.config["APT::Architectures::"] = "i386" apt_pkg.config["APT::Architectures::"] = "amd64" apt_pkg.config["APT::Architecture"] = "amd64" apt_pkg.init_system() same = apt.debfile.DebPackage("./data/test_debs/testdep-same-arch_1.0-1_i386.deb") self.assertTrue(same.check(), same._failure_string)
def __init__(self, progress=None, rootdir=None, memonly=False): self._cache = None self._depcache = None self._records = None self._list = None self._callbacks = {} self._weakref = weakref.WeakValueDictionary() self._set = set() if memonly: # force apt to build its caches in memory apt_pkg.config.set("Dir::Cache::pkgcache", "") if rootdir: if os.path.exists(rootdir+"/etc/apt/apt.conf"): apt_pkg.read_config_file(apt_pkg.config, rootdir + "/etc/apt/apt.conf") if os.path.isdir(rootdir+"/etc/apt/apt.conf.d"): apt_pkg.read_config_dir(apt_pkg.config, rootdir + "/etc/apt/apt.conf.d") apt_pkg.config.set("Dir", rootdir) apt_pkg.config.set("Dir::State::status", rootdir + "/var/lib/dpkg/status") # create required dirs/files when run with special rootdir # automatically self._check_and_create_required_dirs(rootdir) # Call InitSystem so the change to Dir::State::Status is actually # recognized (LP: #320665) apt_pkg.init_system() self.open(progress)
def main(): args = get_args() # Initialize apt system apt_pkg.init_system() # Get domain list domains = get_domain_list() # Get back domains with version result try: hypervisor_qemu_version, domains = check_versions(domains) except HVQEMUVersionException: print_nagios_message(ExitCodes.unknown, 'Could not retrieve hypervisor QEMU version') exit(ExitCodes.unknown) except Exception: print_nagios_message( ExitCodes.warning, 'An exception occurred when retrieving HV QEMU version') exit(ExitCodes.warning) # Build output nsca_output, mismatch_doms, unknown_doms \ = build_nsca_output(hypervisor_qemu_version, domains) # Push NSCA results nsca_result = send_nsca(args.hosts, nsca_output) # Generate plugin results for HV code, reason = build_plugin_output(mismatch_doms, unknown_doms, nsca_result) print_nagios_message(code, reason) exit(code)
def test_multi_arch_same(self): apt_pkg.config["APT::Architectures::"] = "i386" apt_pkg.config["APT::Architectures::"] = "amd64" apt_pkg.config["APT::Architecture"] = "amd64" apt_pkg.init_system() same = apt.debfile.DebPackage( "./data/test_debs/testdep-same-arch_1.0-1_i386.deb") self.assertTrue(same.check(), same._failure_string)
def _configure_apt(self): """ Initializes the :mod:`apt_pkg` module global settings. """ apt_pkg.init_config() apt_pkg.init_system() apt_pkg.read_config_file(apt_pkg.config, self.conf_file_path) apt_pkg.config.set('Dir::Etc', self.cache_root_dir)
def test_multi_arch_allowed(self): apt_pkg.config["APT::Architectures::"] = "i386" apt_pkg.config["APT::Architectures::"] = "amd64" apt_pkg.config["APT::Architecture"] = "amd64" apt_pkg.init_system() allowed_any = apt.debfile.DebPackage("./data/test_debs/testdep-allowed-any_1.0-1_i386.deb") self.assertTrue(allowed_any.check(), allowed_any._failure_string)
def get_current_mirror_hostname(): apt_pkg.init_config() apt_pkg.init_system() source_list_obj = apt_pkg.SourceList() source_list_obj.read_main_list() url = source_list_obj.list[0].uri hostname = url.split(":")[0] + "://" + url.split("/")[2] return hostname
def get_cache_list(): cache = None #apt_pkg.InitConfig() #apt_pkg.InitSystem() apt_pkg.init_config() apt_pkg.init_system() cache = apt.Cache() return cache
def start_detect_source_available(self): apt_pkg.init_config() apt_pkg.init_system() source_list_obj = apt_pkg.SourceList() source_list_obj.read_main_list() uri = source_list_obj.list[0].uri.split("/")[2] uri = 'www.baidu.com' gobject.timeout_add(1000, self.network_detect_loop, uri)
def setup(self): self.mailer = Email('upload_removed_from_expo') self.mailer.connect_to_server() self.pkg_controller = PackageController() self.pkgs_controller = PackagesController() apt_pkg.init_system() self.last_cruft_run = datetime.datetime(year=1970, month=1, day=1) self.log.debug("%s loaded successfully" % (__name__))
def test_multi_arch_allowed(self): apt_pkg.config["APT::Architectures::"] = "i386" apt_pkg.config["APT::Architectures::"] = "amd64" apt_pkg.config["APT::Architecture"] = "amd64" apt_pkg.init_system() allowed_any = apt.debfile.DebPackage( "./data/test_debs/testdep-allowed-any_1.0-1_i386.deb") self.assertTrue(allowed_any.check(), allowed_any._failure_string)
def __init__(self): # init packagesystem apt_pkg.init_config() apt_pkg.init_system() # NullProgress : we do not want progress info in munin plugin # documented None did not worked self._cache = None self._depcache = None self._installedPackages = None self._upgradablePackages = None
def is_apt_package_installed(package_name): """ Проверка установлен ли пакет через apt-get. @param package_name: Имя пакета. Например ubuntu-desktop. @return: True - пакет установлен. False - пакет не установлен. """ apt_pkg.init_config() apt_pkg.init_system() return apt_pkg.Cache()[package_name].current_state in ( apt_pkg.INSTSTATE_REINSTREQ, apt_pkg.CURSTATE_INSTALLED)
def __init__(self): """Initial attributes.""" install_misc.InstallBase.__init__(self) if not os.path.exists('/var/lib/ubiquity'): os.makedirs('/var/lib/ubiquity') with open('/var/lib/ubiquity/started-installing', 'a'): pass if not os.path.exists('/var/lib/ubiquity'): os.makedirs('/var/lib/ubiquity') with open('/var/lib/ubiquity/started-installing', 'a'): pass self.update_proc = None if os.path.isdir('/rofs'): self.source = '/rofs' elif os.path.isdir('/UNIONFS'): # Klaus Knopper says this may not actually work very well # because it'll copy the WHOLE WORLD (~12GB). self.source = '/UNIONFS' else: self.source = '/var/lib/ubiquity/source' self.db = debconf.Debconf() self.blacklist = {} if 'UBIQUITY_OEM_USER_CONFIG' in os.environ: self.source = None self.target = '/' return assert os.path.ismount(self.target), \ 'Failed to mount the target: %s' % str(self.target) self.select_language_packs(save=True) self.select_ecryptfs() if self.db.get('ubiquity/install/generate-blacklist') == 'true': self.db.progress('START', 0, 100, 'ubiquity/install/title') self.db.progress('INFO', 'ubiquity/install/blacklist') self.generate_blacklist() apt_pkg.init_config() apt_pkg.config.set("Dir", self.target) apt_pkg.config.set("Dir::State::status", self.target_file('var/lib/dpkg/status')) apt_pkg.config.set("APT::GPGV::TrustedKeyring", self.target_file('etc/apt/trusted.gpg')) apt_pkg.config.set("Acquire::gpgv::Options::", "--ignore-time-conflict") apt_pkg.config.set("DPkg::Options::", "--root=%s" % self.target) # We don't want apt-listchanges or dpkg-preconfigure, so just clear # out the list of pre-installation hooks. apt_pkg.config.clear("DPkg::Pre-Install-Pkgs") apt_pkg.init_system()
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() apt_pkg.config.set("APT::Architecture", "powerpc") apt_pkg.config.set("Dir::Etc", os.path.abspath("data/aptsources_ports")) apt_pkg.config.set("Dir::Etc::sourceparts", tempfile.mkdtemp()) if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() if apt_pkg.config["APT::Architecture"] not in ('i386', 'amd64'): apt_pkg.config.set("APT::Architecture", "i386") apt_pkg.config.set("Dir::Etc", os.getcwd()) apt_pkg.config.set("Dir::Etc::sourceparts", "/xxx") if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() apt_pkg.config.set("APT::Architecture", "powerpc") apt_pkg.config.set("Dir::Etc", os.path.abspath("data/aptsources_ports")) apt_pkg.config.set("Dir::Etc::sourceparts", "/xxx") if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def main(): apt_pkg.init_config() apt_pkg.init_system() acquire = apt_pkg.Acquire() slist = apt_pkg.SourceList() # Read the list slist.read_main_list() # Add all indexes to the fetcher. slist.get_indexes(acquire, True) # Now print the URI of every item. for item in acquire.items: print(item.desc_uri)
def __init__(self, arch, suite, sources, prefs, keylist=None, noauth=False): # pylint: disable=too-many-arguments self.projectpath = mkdtemp() self.initialize_dirs() self.create_apt_sources_list(sources) self.create_apt_prefs(prefs) self.setup_gpg() if keylist: for k in keylist: self.add_pubkey_url(k) apt_pkg.config.set("APT::Architecture", arch) apt_pkg.config.set("APT::Architectures", arch) apt_pkg.config.set("Acquire::http::Proxy::127.0.0.1", "DIRECT") apt_pkg.config.set("APT::Install-Recommends", "0") apt_pkg.config.set("Dir::Etc", self.projectpath) apt_pkg.config.set("APT::Cache-Limit", "0") apt_pkg.config.set("APT::Cache-Start", "32505856") apt_pkg.config.set("APT::Cache-Grow", "2097152") apt_pkg.config.set("Dir::State", os.path.join(self.projectpath, "state")) apt_pkg.config.set("Dir::State::status", os.path.join(self.projectpath, "state/status")) apt_pkg.config.set("Dir::Cache", os.path.join(self.projectpath, "cache")) apt_pkg.config.set("Dir::Cache::archives", os.path.join(self.projectpath, "cache/archives")) apt_pkg.config.set("Dir::Etc", os.path.join(self.projectpath, "etc/apt")) apt_pkg.config.set("Dir::Log", os.path.join(self.projectpath, "log")) if noauth: apt_pkg.config.set("APT::Get::AllowUnauthenticated", "1") apt_pkg.config.set("Acquire::AllowInsecureRepositories", "1") else: apt_pkg.config.set("APT::Get::AllowUnauthenticated", "0") apt_pkg.config.set("Acquire::AllowInsecureRepositories", "0") apt_pkg.init_system() self.source = apt_pkg.SourceList() self.source.read_main_list() self.cache = apt_pkg.Cache() try: self.cache.update(self, self.source) except BaseException as e: print(e) apt_pkg.config.set("APT::Default-Release", suite) self.cache = apt_pkg.Cache() try: self.cache.update(self, self.source) except BaseException as e: print(e)
def main(): """Main.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() print("Essential packages:") for pkg in cache.packages: if pkg.essential: print(" ", pkg.name) print("Important packages:") for pkg in cache.packages: if pkg.important: print(" ", pkg.name)
def main(): parser = argparse.ArgumentParser(description='') parser.add_argument( '-p', '--packages', help='JSON file containing list of packages to bootstrap') parser.add_argument("-r", "--repos", help="Config file for bootstrap") args = parser.parse_args() vargs = vars(args) debian_packages = dict() debian_packages_basic = dict() apt_pkg.init_system() with open(vargs["repos"], "r") as f: config = json.load(f) with open(vargs["packages"], "r") as f: packages = json.load(f) # Get Dict of all packages in relevant Packages.gz files index = get_repo_contents(config) with open(vargs["packages"], "r") as f: packages = json.load(f) bindex = build_index(index[1]) deps = [] packages = [{ "name": "curl", "version": "", "version_test": "" }, { "name": "bash", "version": "", "version_test": "" }, { "name": "terminator", "version": "", "version_test": "" }] for pkg in packages: build_deps(pkg, deps, bindex) print([x["Package"] for x in deps]) return
def main(): """Main.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() print("depends_count:\t%s" % cache.depends_count) print("group_count:\t%s" % cache.group_count) print("package_count:\t%s" % cache.package_count) print("package_file_count:\t%s" % cache.package_file_count) print("provides_count:\t%s" % cache.provides_count) print("ver_file_count:\t%s" % cache.ver_file_count) print("version_count:\t%s" % cache.version_count) print "Essential packages:" for pkg in cache.packages: if pkg.essential: print " ", pkg.name print "Important packages:" for pkg in cache.packages: if pkg.important: print " ", pkg.name print for pkg in cache.packages: print pkg.id, pkg.name for pkgfile in cache.file_list: print 'Package-File:', pkgfile.filename # print 'Index-Type:', pkgfile.index_type # 'Debian Package Index' # if pkgfile.not_source: # print 'Source: None' # else: # if pkgfile.site: # # There is a source, and a site, print the site # print 'Source:', pkgfile.site # else: # # It seems to be a local repository # print 'Source: Local package file' # if pkgfile.not_automatic: # # The system won't be updated automatically (eg. experimental) # print 'Automatic: No' # else: # print 'Automatic: Yes' print 'id:', pkgfile.id # print 'archive:', pkgfile.archive # print 'architecture:', pkgfile.architecture # print 'label:', pkgfile.label # print 'origin:', pkgfile.origin # print 'size', pkgfile.size # print 'version:', pkgfile.version print
def setUp(self): apt_pkg.init_config() apt_pkg.config.set("APT::Architecture", "i386") # FIXME: When run via test_all.py, the tests fail without this if it # is set in the system. apt_pkg.config.clear("APT::Architectures") apt_pkg.config.set("Dir::State::status", "./data/test_debs/var/lib/dpkg/status") apt_pkg.config.set("Dir::State::lists", "./data/test_debs/var/lib/apt/lists") apt_pkg.config.set("Dir::Etc::sourcelist", "./data/test_debs/etc/apt/sources.list") apt_pkg.init_system() self.cache = apt.Cache()
def __init__(self, progress=None, rootdir=None, memonly=False): # type: (Optional[OpProgress], Optional[str], bool) -> None self._cache = cast(apt_pkg.Cache, None) # type: apt_pkg.Cache self._depcache = cast(apt_pkg.DepCache, None) # type: apt_pkg.DepCache self._records = cast(apt_pkg.PackageRecords, None) # type: apt_pkg.PackageRecords # noqa self._list = cast(apt_pkg.SourceList, None) # type: apt_pkg.SourceList self._callbacks = { } # type: Dict[str, List[Union[Callable[..., None],str]]] # noqa self._callbacks2 = { } # type: Dict[str, List[Tuple[Callable[..., Any], Tuple[Any, ...], Dict[Any,Any]]]] # noqa self._weakref = weakref.WeakValueDictionary( ) # type: weakref.WeakValueDictionary[str, apt.Package] # noqa self._weakversions = weakref.WeakSet( ) # type: weakref.WeakSet[Version] # noqa self._changes_count = -1 self._sorted_set = None # type: Optional[List[str]] self.connect("cache_post_open", "_inc_changes_count") self.connect("cache_post_change", "_inc_changes_count") if memonly: # force apt to build its caches in memory apt_pkg.config.set("Dir::Cache::pkgcache", "") if rootdir: rootdir = os.path.abspath(rootdir) if os.path.exists(rootdir + "/etc/apt/apt.conf"): apt_pkg.read_config_file(apt_pkg.config, rootdir + "/etc/apt/apt.conf") if os.path.isdir(rootdir + "/etc/apt/apt.conf.d"): apt_pkg.read_config_dir(apt_pkg.config, rootdir + "/etc/apt/apt.conf.d") apt_pkg.config.set("Dir", rootdir) apt_pkg.config.set("Dir::State::status", rootdir + "/var/lib/dpkg/status") # also set dpkg to the rootdir path so that its called for the # --print-foreign-architectures call apt_pkg.config.set("Dir::bin::dpkg", os.path.join(rootdir, "usr", "bin", "dpkg")) # create required dirs/files when run with special rootdir # automatically self._check_and_create_required_dirs(rootdir) # Call InitSystem so the change to Dir::State::Status is actually # recognized (LP: #320665) apt_pkg.init_system() # Prepare a lock object (context manager for archive lock) archive_dir = apt_pkg.config.find_dir("Dir::Cache::Archives") self._archive_lock = _WrappedLock(archive_dir) self.open(progress)
def setUp(self): apt_pkg.init_config() apt_pkg.config.set("APT::Architecture","i386") # FIXME: When run via test_all.py, the tests fail without this if it # is set in the system. apt_pkg.config.clear("APT::Architectures") apt_pkg.config.set("Dir::State::status", "./data/test_debs/var/lib/dpkg/status") apt_pkg.config.set("Dir::State::lists", "./data/test_debs/var/lib/apt/lists") apt_pkg.config.set("Dir::Etc::sourcelist", "./data/test_debs/etc/apt/sources.list") apt_pkg.init_system() self.cache = apt.Cache()
def get_sorted_versions_list(repo, package): apt_pkg.init_system() output = subprocess.check_output([ "aptly", "repo", "search", "-format='{{.Version}}'", repo, package ]) return sorted( unique_output(output), cmp=apt_pkg.version_compare, reverse=True )
def version_cmp(pkg1, pkg2, ignore_epoch=False): ''' Do a cmp-style comparison on two packages. Return -1 if pkg1 < pkg2, 0 if pkg1 == pkg2, and 1 if pkg1 > pkg2. Return None if there was a problem making the comparison. ignore_epoch : False Set to ``True`` to ignore the epoch when comparing versions .. versionadded:: 2015.8.10,2016.3.2 ''' normalize = lambda x: str(x).split(':', 1)[-1] \ if ignore_epoch else str(x) # both apt_pkg.version_compare and _cmd_quote need string arguments. pkg1 = normalize(pkg1) pkg2 = normalize(pkg2) # if we have apt_pkg, this will be quickier this way # and also do not rely on shell. if HAS_APTPKG: try: # the apt_pkg module needs to be manually initialized apt_pkg.init_system() # if there is a difference in versions, apt_pkg.version_compare will # return an int representing the difference in minor versions, or # 1/-1 if the difference is smaller than minor versions. normalize # to -1, 0 or 1. try: ret = apt_pkg.version_compare(pkg1, pkg2) except TypeError: ret = apt_pkg.version_compare(str(pkg1), str(pkg2)) return 1 if ret > 0 else -1 if ret < 0 else 0 except Exception: # Try to use shell version in case of errors w/python bindings pass try: for oper, ret in (('lt', -1), ('eq', 0), ('gt', 1)): cmd = ['dpkg', '--compare-versions', pkg1, oper, pkg2] retcode = __mods__['cmd.retcode'](cmd, output_loglevel='trace', python_shell=False, ignore_retcode=True) if retcode == 0: return ret except Exception as exc: log.error(exc) return None
def check_for_upgrade() -> Result: """ If the config has changed this will initiated a rolling upgrade :return: """ config = hookenv.config() if not config.changed("source"): # No upgrade requested log("No upgrade requested") return Ok(()) log("Getting current_version") current_version = get_glusterfs_version() log("Adding new source line") source = config("source") if not source.is_some(): # No upgrade requested log("Source not set. Cannot continue with upgrade") return Ok(()) add_source(source) log("Calling apt update") apt_update() log("Getting proposed_version") apt_pkg.init_system() proposed_version = get_candidate_package_version("glusterfs-server") if proposed_version.is_err(): return Err(proposed_version.value) version_compare = apt_pkg.version_compare(a=proposed_version.value, b=current_version) # Using semantic versioning if the new version is greater # than we allow the upgrade if version_compare > 0: log("current_version: {}".format(current_version)) log("new_version: {}".format(proposed_version.value)) log("{} to {} is a valid upgrade path. Proceeding.".format( current_version, proposed_version.value)) return roll_cluster(proposed_version.value) else: # Log a helpful error message log( "Invalid upgrade path from {} to {}. The new version needs to be \ greater than the old version".format( current_version, proposed_version.value), ERROR) return Ok(())
def resetConfig(self): apt_pkg.config.clear("") for key in apt_pkg.config.list(): apt_pkg.config.clear(key) # Avoid loading any host config files os.unsetenv("APT_CONFIG") apt_pkg.config["Dir::Etc::main"] = "/dev/null" apt_pkg.config["Dir::Etc::parts"] = "/dev/null" apt_pkg.init_config() apt_pkg.init_system() # Restore default values apt_pkg.config["Dir::Etc::main"] = "apt.conf" apt_pkg.config["Dir::Etc::parts"] = "apt.conf.d"
def __init__(self, opts): apt_pkg.init_config() apt_pkg.init_system() self.cache = apt.Cache(apt.progress.text.OpProgress()) self.opts = opts self.archive_dir = apt_pkg.config.find_dir('Dir::Cache::Archives') if not self.archive_dir: raise Exception(('No archive dir is set.' ' Usually it is /var/cache/apt/archives/')) self.lists_dir = apt_pkg.config.find_dir('Dir::State::Lists') if not self.archive_dir: raise Exception(('No package lists dir is set.' ' Usually it is /var/lib/apt/lists/')) for c in self.opts.aptconf: (cname, copt) = c.split("=", 1) apt_pkg.config.set(cname, copt)
def main(): apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() with open(file) as f: csvreader=csv.reader(f) for row in csvreader: pkgname = row[2].strip().lower() snumber = row[1] if pkgname.endswith("网页版"): not_found_packages.append([snumber, pkgname]) elif re.match("^[0-9a-zA-Z\s]+$", pkgname): match_pkg(cache, snumber, pkgname) else: noname_packages.append([snumber, pkgname])
def main(): """The main function.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() for pkg in sorted(cache.packages, key=lambda pkg: pkg.name): # pkg is from a list of packages, sorted by name. for version in pkg.version_list: # Check every version for pfile, _ in version.file_list: if (pfile.origin == "Debian" and pfile.component == "main" and pfile.archive == "unstable"): # We only want packages from Debian unstable main. check_version(version) break
def aptget_deps(): print "Installing required packages %s via apt" % ", ".join(INST_PKG_LIST) apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() cacheManager = apt_pkg.DepCache(cache) installerate = lambda pkgName: install_if_missing(cache, cacheManager, pkgName) result = map(installerate, INST_PKG_LIST) if any(result): # install or update these packages fetchProgress = apt.progress.text.TextProgress() installProgress = InstallProgressSync() cacheManager.commit(fetchProgress, installProgress) sys.exit("Packages required to complete the installation have been installed. Please rerun the program.") else: print "Packages already installed."
def test_apt_cache_reopen_is_safe_out_of_bounds_no_match(self): """Check that installing gone package raises correct exception""" with tempfile.NamedTemporaryFile() as status: apt_pkg.config["Dir::Etc::SourceList"] = "/dev/null" apt_pkg.config["Dir::Etc::SourceParts"] = "/dev/null" apt_pkg.config["Dir::State::Status"] = status.name apt_pkg.init_system() self.write_status_file("abcdefghijklmnopqrstuvwxyz") c = apt.Cache() p = c["z"] p_id = p.id self.write_status_file("a") apt_pkg.init_system() c.open() self.assertEqual(p.id, p_id) # Could not be remapped self.assertRaises(apt_pkg.CacheMismatchError, p.mark_delete)
def packageList(): import apt_pkg apt_pkg.init_config() apt_pkg.init_system() acquire = apt_pkg.Acquire() slist = apt_pkg.SourceList() slist.read_main_list() slist.get_indexes(acquire, True) output = {"packages": []} # Now print the URI of every item. for item in acquire.items: output["packages"].append(item.desc_uri) pipLog.sharedInstance.debug("APT:" + json.dumps(output)) return sendMessage(json.dumps(output))
def fetchPackages(self, prefix, installedOnlyPrefix=None): apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache(progress=None) depcache = apt_pkg.DepCache(cache) rt = {} for pkg in cache.packages: if pkg.name.startswith(prefix): cand = depcache.get_candidate_ver(pkg) candVersion = None current = pkg.current_ver.ver_str if pkg.current_ver else '' if cand and cand.ver_str != current: candVersion = cand.ver_str nv = NV(name=pkg.name, state=self.state_str(pkg.current_state), version=current, candidate=candVersion) last = rt.get(pkg.name) if last is not None: if last.state != 'installed' and nv.state == 'installed': last.state = 'installed' if last.candidate is None and nv.candidate is not None: last.candidate = nv.candidate elif last.candidate is not None and nv.candidate is not None: if last.candidate != nv.candidate: logging.info( "found 2 candidate versions for %s: %s and %s", pkg.name, last.candidate, nv.candidate) try: if nv.candidate > last.candidate: last.candidate = nv.candidate except Exception as e: pass if last.version == last.candidate: last.candidate = None else: rt[pkg.name] = nv rtlist = [] for k, pkg in rt.items(): if installedOnlyPrefix is not None and k.startswith( installedOnlyPrefix): if pkg.version is None or pkg.version == '': continue rtlist.append(pkg.dict()) logging.debug("fetchPackageList: %s", str(rtlist)) return rtlist
def __init__(self, progress=None, rootdir=None, memonly=False): # type: (OpProgress, str, bool) -> None self._cache = cast(apt_pkg.Cache, None) # type: apt_pkg.Cache self._depcache = cast(apt_pkg.DepCache, None) # type: apt_pkg.DepCache self._records = cast(apt_pkg.PackageRecords, None) # type: apt_pkg.PackageRecords # nopep8 self._list = cast(apt_pkg.SourceList, None) # type: apt_pkg.SourceList self._callbacks = {} # type: Dict[str, List[Union[Callable[..., None],str]]] # nopep8 self._callbacks2 = {} # type: Dict[str, List[Tuple[Callable[..., Any], Tuple[Any, ...], Dict[Any,Any]]]] # nopep8 self._weakref = weakref.WeakValueDictionary() # type: weakref.WeakValueDictionary[str, apt.Package] # nopep8 self._weakversions = weakref.WeakSet() # type: weakref.WeakSet[Version] # nopep8 self._changes_count = -1 self._sorted_set = None # type: Optional[List[str]] self.connect("cache_post_open", "_inc_changes_count") self.connect("cache_post_change", "_inc_changes_count") if memonly: # force apt to build its caches in memory apt_pkg.config.set("Dir::Cache::pkgcache", "") if rootdir: rootdir = os.path.abspath(rootdir) if os.path.exists(rootdir + "/etc/apt/apt.conf"): apt_pkg.read_config_file(apt_pkg.config, rootdir + "/etc/apt/apt.conf") if os.path.isdir(rootdir + "/etc/apt/apt.conf.d"): apt_pkg.read_config_dir(apt_pkg.config, rootdir + "/etc/apt/apt.conf.d") apt_pkg.config.set("Dir", rootdir) apt_pkg.config.set("Dir::State::status", rootdir + "/var/lib/dpkg/status") # also set dpkg to the rootdir path so that its called for the # --print-foreign-architectures call apt_pkg.config.set("Dir::bin::dpkg", os.path.join(rootdir, "usr", "bin", "dpkg")) # create required dirs/files when run with special rootdir # automatically self._check_and_create_required_dirs(rootdir) # Call InitSystem so the change to Dir::State::Status is actually # recognized (LP: #320665) apt_pkg.init_system() # Prepare a lock object (context manager for archive lock) archive_dir = apt_pkg.config.find_dir("Dir::Cache::Archives") self._archive_lock = _WrappedLock(archive_dir) self.open(progress)
def test_apt_cache_iteration_safe(self): """Check that iterating does not produce different results. This failed in 1.7.0~alpha2, because one part of the code looked up packages in the weak dict using the pretty name, and the other using the full name.""" with tempfile.NamedTemporaryFile() as status: apt_pkg.config["Dir::Etc::SourceList"] = "/dev/null" apt_pkg.config["Dir::Etc::SourceParts"] = "/dev/null" apt_pkg.config["Dir::State::Status"] = status.name apt_pkg.init_system() self.write_status_file("abcdefghijklmnopqrstuvwxyz") c = apt.Cache() c["a"].mark_delete() self.assertEqual([c["a"]], [p for p in c if p.marked_delete])
def __init_cache(self): """Initializes the Apt cache in use by the DpkgGraph.""" # Read the system's Apt configuration. logging.debug("Initializing Apt configuration ...") apt_pkg.init_config() # pylint: disable=no-member conf = apt_pkg.config # pylint: disable=no-member # Tweak the system's Apt configuration to only read the dpkg status # database as Purgatory is only interested in the installed packages. # This has the nice sideffect that this cuts down the Apt cache opening # time drastically as less files need to be parsed. dpkg_db = conf["Dir::State::status"] conf.clear("Dir::State") if self.__dpkg_db: conf["Dir::State::status"] = self.__dpkg_db else: conf["Dir::State::status"] = dpkg_db self.__dpkg_db = conf["Dir::State::status"] logging.debug("dpkg status database: %s", self.__dpkg_db) # As Purgatory uses a special configuration the Apt cache will be # built in memory so that the valid cache on disk for the full # configuration isn't overwritten. conf["Dir::Cache::pkgcache"] = "" conf["Dir::Cache::srcpkgcache"] = "" # Initialize Apt with the tweaked config. logging.debug("Initializing Apt system ...") apt_pkg.init_system() # pylint: disable=no-member # Opening Apt cache. This step actually reads the dpkg status database. logging.debug("Opening Apt cache ...") cache = apt.cache.Cache() # Filter Apt cache to only contain installed packages. filtered_cache = apt.cache.FilteredCache(cache) filtered_cache.set_filter(apt.cache.InstalledFilter()) logging.debug("%d installed packages in the Apt cache", len(filtered_cache)) if not len(filtered_cache): raise error.EmptyAptCacheError() self.__cache = filtered_cache
def test_apt_cache_reopen_is_safe_out_of_bounds(self): """Check that out of bounds access is remapped correctly.""" with tempfile.NamedTemporaryFile() as status: apt_pkg.config["Dir::Etc::SourceList"] = "/dev/null" apt_pkg.config["Dir::Etc::SourceParts"] = "/dev/null" apt_pkg.config["Dir::State::Status"] = status.name apt_pkg.init_system() self.write_status_file("abcdefghijklmnopqrstuvwxyz") c = apt.Cache() p = c["z"] p_id = p.id self.write_status_file("az") apt_pkg.init_system() c.open() self.assertNotEqual(p.id, p_id) self.assertLess(p.id, 2) p.mark_delete() self.assertEqual([p], c.get_changes())
def init(arch, cache_dir): # Clear APT and Dir config trees to avoid system configuration: del apt_pkg.config['APT'] del apt_pkg.config['Dir'] del apt_pkg.config['Dpkg'] # Initialize apt configuration, use our local apt cache hierarchy: apt_opts = [] for opt, val in { 'APT::Architecture': arch, 'APT::Architectures::': '', 'Dir': cache_dir, 'Dir::State::Status': 'dpkg.status', 'Acquire::Languages': 'none', }.items(): apt_pkg.config.set(opt, val) apt_opts.append('%s=%s' % (opt, val)) apt_pkg.init_config() apt_pkg.init_system()