def save_state(self, sourcedir, target, with_dpkg_repack=False, with_dpkg_status=False, scrub_sources=False, extra_files=None): """ save the current system state (installed pacakges, enabled repositories ...) into the apt-state.tar.gz file in targetdir """ if os.path.isdir(target): target = os.path.join(target, self.CLONE_FILENAME) else: if not target.endswith(".tar.gz"): target += ".apt-clone.tar.gz" if sourcedir != '/': apt_pkg.init_config() apt_pkg.config.set("Dir", sourcedir) apt_pkg.config.set("Dir::State::status", os.path.join(sourcedir, 'var/lib/dpkg/status')) apt_pkg.init_system() with tarfile.open(name=target, mode="w:gz") as tar: self._write_uname(tar) self._write_state_installed_pkgs(sourcedir, tar) self._write_state_auto_installed(tar) self._write_state_sources_list(tar, scrub_sources) self._write_state_apt_preferences(tar) self._write_state_apt_keyring(tar) self._write_state_extra_files(extra_files, tar) if with_dpkg_status: self._write_state_dpkg_status(tar) if with_dpkg_repack: self._dpkg_repack(tar)
def updateSources(self, dist): path = self.getDistDir(dist) if not os.path.exists(path): os.makedirs(path) # Create needed directories for d in [ 'etc/apt/apt.conf.d', 'etc/apt/preferences.d', 'var/lib/apt/lists/partial', 'var/cache/apt/archives/partial', 'var/lib/dpkg' ]: repo_dir = os.path.join(path, d) if not os.path.exists(repo_dir): os.makedirs(repo_dir) # Create sources.list sources_list = os.path.join(path, 'etc/apt/sources.list') with open(sources_list, 'w') as f: f.write('deb-src [trusted=yes] %s %s %s\n' % (self.mirrorURL(), dist, " ".join(self.components()))) # Setup configuration for key in apt_pkg.config.keys(): apt_pkg.config.clear(key) apt_pkg.config.set('Dir', path) apt_pkg.init_config() cache = apt.Cache(rootdir=path) cache.update()
def get_conf_file(): """Path to spacewalk.list configuration file""" apt_pkg.init_config() directory = apt_pkg.config.get('Dir::Etc::sourceparts', 'sources.list.d') if not os.path.isabs(directory): directory = os.path.join('/etc/apt', directory) return os.path.join(directory, 'spacewalk.list')
def resetConfig(self): apt_pkg.config.clear("") for key in apt_pkg.config.list(): apt_pkg.config.clear(key) apt_pkg.init_config() apt_pkg.init_system()
def setUp(self): # reset any config manipulations done in the individual tests apt_pkg.init_config() # save/restore the apt config self._cnf = {} for item in apt_pkg.config.keys(): self._cnf[item] = apt_pkg.config.find(item)
def main(): apt_pkg.init_config() cw = ConfigWriter() for filename in sorted(os.listdir("/etc/apt/apt.conf.d/")): lines = open("/etc/apt/apt.conf.d/%s" % filename).readlines() cw.parse(lines) print((cw.set_value("huhu::abc", "lumpi", "10glatzor")))
def aptcache(self): if not self.apt_cache: #import warnings ## Yes, apt, thanks, I know, the api isn't stable, thank you so very much ##warnings.simplefilter('ignore', FutureWarning) #warnings.filterwarnings("ignore","apt API not stable yet") import apt import apt_pkg #warnings.resetwarnings() if self.quietapt: class AptSilentProgress(apt.progress.text.OpProgress): def __init__(self): pass def done(self): pass def update(self, percent=None): pass aptprogress = AptSilentProgress() else: aptprogress = None apt_pkg.init_config() self.apt_cache = apt.Cache(progress=aptprogress) return self.apt_cache
def __init__(self, bus=None): if bus is None: bus = dbus.SystemBus() bus_name = dbus.service.BusName(self.DBUS_INTERFACE_NAME, bus=bus) dbus.service.Object.__init__(self, bus_name, '/') apt_pkg.init_config()
def setUp(self): testcommon.TestCase.setUp(self) rootdir = "./data/tmp" if os.path.exists(rootdir): shutil.rmtree(rootdir) try: os.makedirs(os.path.join(rootdir, "etc", "apt")) except OSError: pass for k in apt_pkg.config.keys(): apt_pkg.config.clear(k) apt_pkg.config["Dir"] = os.path.abspath(rootdir) apt_pkg.init_config() # set a local sources.list that does not need the network base_sources = os.path.abspath(os.path.join(rootdir, "etc", "apt", "sources.list")) # main sources.list sources_list = base_sources with open(sources_list, "w") as f: repo = os.path.abspath("./data/test-source-repo") f.write("deb-src [trusted=yes] copy:%s /\n" % repo) self.assertTrue(os.path.exists(sources_list)) # update a single sources.list cache = apt.Cache(rootdir=rootdir) cache.update(sources_list=sources_list)
def setUp(self): testcommon.TestCase.setUp(self) rootdir = "./data/tmp" if os.path.exists(rootdir): shutil.rmtree(rootdir) try: os.makedirs(os.path.join(rootdir, "etc", "apt")) except OSError: pass for k in apt_pkg.config.keys(): apt_pkg.config.clear(k) apt_pkg.config["Dir"] = os.path.abspath(rootdir) apt_pkg.init_config() # set a local sources.list that does not need the network base_sources = os.path.abspath( os.path.join(rootdir, "etc", "apt", "sources.list")) # main sources.list sources_list = base_sources with open(sources_list, "w") as f: repo = os.path.abspath("./data/test-source-repo") f.write("deb-src [trusted=yes] copy:%s /\n" % repo) self.assertTrue(os.path.exists(sources_list)) # update a single sources.list cache = apt.Cache(rootdir=rootdir) cache.update(sources_list=sources_list)
def _configure_apt(self): """ Initializes the :mod:`apt_pkg` module global settings. """ apt_pkg.init_config() apt_pkg.init_system() apt_pkg.read_config_file(apt_pkg.config, self.conf_file_path) apt_pkg.config.set('Dir::Etc', self.cache_root_dir)
def get_cache_list(): cache = None #apt_pkg.InitConfig() #apt_pkg.InitSystem() apt_pkg.init_config() apt_pkg.init_system() cache = apt.Cache() return cache
def start_detect_source_available(self): apt_pkg.init_config() apt_pkg.init_system() source_list_obj = apt_pkg.SourceList() source_list_obj.read_main_list() uri = source_list_obj.list[0].uri.split("/")[2] uri = 'www.baidu.com' gobject.timeout_add(1000, self.network_detect_loop, uri)
def get_current_mirror_hostname(): apt_pkg.init_config() apt_pkg.init_system() source_list_obj = apt_pkg.SourceList() source_list_obj.read_main_list() url = source_list_obj.list[0].uri hostname = url.split(":")[0] + "://" + url.split("/")[2] return hostname
def __init__(self, view, path): self.view = view self.cdrompath = path # the directories we found on disk with signatures, packages and i18n self.packages = set() self.signatures = set() self.i18n = set() apt_pkg.init_config()
def setUp(self): # reset any config manipulations done in the individual tests apt_pkg.init_config() # save/restore the apt config self._cnf = {} for item in apt_pkg.config.keys(): self._cnf[item] = apt_pkg.config.find(item) apt_pkg.config.clear("APT::Update::Post-Invoke") apt_pkg.config.clear("APT::Update::Post-Invoke-Success")
def is_apt_package_installed(package_name): """ Проверка установлен ли пакет через apt-get. @param package_name: Имя пакета. Например ubuntu-desktop. @return: True - пакет установлен. False - пакет не установлен. """ apt_pkg.init_config() apt_pkg.init_system() return apt_pkg.Cache()[package_name].current_state in ( apt_pkg.INSTSTATE_REINSTREQ, apt_pkg.CURSTATE_INSTALLED)
def __init__(self): # init packagesystem apt_pkg.init_config() apt_pkg.init_system() # NullProgress : we do not want progress info in munin plugin # documented None did not worked self._cache = None self._depcache = None self._installedPackages = None self._upgradablePackages = None
def __init__(self): """Initial attributes.""" install_misc.InstallBase.__init__(self) if not os.path.exists('/var/lib/ubiquity'): os.makedirs('/var/lib/ubiquity') with open('/var/lib/ubiquity/started-installing', 'a'): pass if not os.path.exists('/var/lib/ubiquity'): os.makedirs('/var/lib/ubiquity') with open('/var/lib/ubiquity/started-installing', 'a'): pass self.update_proc = None if os.path.isdir('/rofs'): self.source = '/rofs' elif os.path.isdir('/UNIONFS'): # Klaus Knopper says this may not actually work very well # because it'll copy the WHOLE WORLD (~12GB). self.source = '/UNIONFS' else: self.source = '/var/lib/ubiquity/source' self.db = debconf.Debconf() self.blacklist = {} if 'UBIQUITY_OEM_USER_CONFIG' in os.environ: self.source = None self.target = '/' return assert os.path.ismount(self.target), \ 'Failed to mount the target: %s' % str(self.target) self.select_language_packs(save=True) self.select_ecryptfs() if self.db.get('ubiquity/install/generate-blacklist') == 'true': self.db.progress('START', 0, 100, 'ubiquity/install/title') self.db.progress('INFO', 'ubiquity/install/blacklist') self.generate_blacklist() apt_pkg.init_config() apt_pkg.config.set("Dir", self.target) apt_pkg.config.set("Dir::State::status", self.target_file('var/lib/dpkg/status')) apt_pkg.config.set("APT::GPGV::TrustedKeyring", self.target_file('etc/apt/trusted.gpg')) apt_pkg.config.set("Acquire::gpgv::Options::", "--ignore-time-conflict") apt_pkg.config.set("DPkg::Options::", "--root=%s" % self.target) # We don't want apt-listchanges or dpkg-preconfigure, so just clear # out the list of pre-installation hooks. apt_pkg.config.clear("DPkg::Pre-Install-Pkgs") apt_pkg.init_system()
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() if apt_pkg.config["APT::Architecture"] not in ('i386', 'amd64'): apt_pkg.config.set("APT::Architecture", "i386") apt_pkg.config.set("Dir::Etc", os.getcwd()) apt_pkg.config.set("Dir::Etc::sourceparts", "/xxx") if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() apt_pkg.config.set("APT::Architecture", "powerpc") apt_pkg.config.set("Dir::Etc", os.path.abspath("data/aptsources_ports")) apt_pkg.config.set("Dir::Etc::sourceparts", tempfile.mkdtemp()) if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def __init__(self, fstab="/etc/fstab"): apt_pkg.init_config() config = apt_pkg.config self.DISABLED = 'APT_NO_SNAPSHOTS' in os.environ self.ROOT = config.get("APT::Snapshots::RootSubvolume", "@") self.SNAP_PREFIX = config.get("APT::Snapshots::Prefix", "@apt-snapshot") + '-' self.BACKUP_PREFIX = self.SNAP_PREFIX + "old-root-" self.fstab = Fstab(fstab) self.commands = LowLevelCommands() self._btrfs_root_mountpoint = None
def setUp(self): apt_pkg.init_config() apt_pkg.init_system() apt_pkg.config.set("APT::Architecture", "powerpc") apt_pkg.config.set("Dir::Etc", os.path.abspath("data/aptsources_ports")) apt_pkg.config.set("Dir::Etc::sourceparts", "/xxx") if os.path.exists("../build/data/templates"): self.templates = os.path.abspath("../build/data/templates") else: self.templates = "/usr/share/python-apt/templates/"
def main(): apt_pkg.init_config() apt_pkg.init_system() acquire = apt_pkg.Acquire() slist = apt_pkg.SourceList() # Read the list slist.read_main_list() # Add all indexes to the fetcher. slist.get_indexes(acquire, True) # Now print the URI of every item. for item in acquire.items: print(item.desc_uri)
def main(): """Main.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() print("Essential packages:") for pkg in cache.packages: if pkg.essential: print(" ", pkg.name) print("Important packages:") for pkg in cache.packages: if pkg.important: print(" ", pkg.name)
def main(): """Main.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() print("depends_count:\t%s" % cache.depends_count) print("group_count:\t%s" % cache.group_count) print("package_count:\t%s" % cache.package_count) print("package_file_count:\t%s" % cache.package_file_count) print("provides_count:\t%s" % cache.provides_count) print("ver_file_count:\t%s" % cache.ver_file_count) print("version_count:\t%s" % cache.version_count) print "Essential packages:" for pkg in cache.packages: if pkg.essential: print " ", pkg.name print "Important packages:" for pkg in cache.packages: if pkg.important: print " ", pkg.name print for pkg in cache.packages: print pkg.id, pkg.name for pkgfile in cache.file_list: print 'Package-File:', pkgfile.filename # print 'Index-Type:', pkgfile.index_type # 'Debian Package Index' # if pkgfile.not_source: # print 'Source: None' # else: # if pkgfile.site: # # There is a source, and a site, print the site # print 'Source:', pkgfile.site # else: # # It seems to be a local repository # print 'Source: Local package file' # if pkgfile.not_automatic: # # The system won't be updated automatically (eg. experimental) # print 'Automatic: No' # else: # print 'Automatic: Yes' print 'id:', pkgfile.id # print 'archive:', pkgfile.archive # print 'architecture:', pkgfile.architecture # print 'label:', pkgfile.label # print 'origin:', pkgfile.origin # print 'size', pkgfile.size # print 'version:', pkgfile.version print
def setUp(self): apt_pkg.init_config() apt_pkg.config.set("APT::Architecture", "i386") # FIXME: When run via test_all.py, the tests fail without this if it # is set in the system. apt_pkg.config.clear("APT::Architectures") apt_pkg.config.set("Dir::State::status", "./data/test_debs/var/lib/dpkg/status") apt_pkg.config.set("Dir::State::lists", "./data/test_debs/var/lib/apt/lists") apt_pkg.config.set("Dir::Etc::sourcelist", "./data/test_debs/etc/apt/sources.list") apt_pkg.init_system() self.cache = apt.Cache()
def setUp(self): apt_pkg.init_config() apt_pkg.config.set("APT::Architecture","i386") # FIXME: When run via test_all.py, the tests fail without this if it # is set in the system. apt_pkg.config.clear("APT::Architectures") apt_pkg.config.set("Dir::State::status", "./data/test_debs/var/lib/dpkg/status") apt_pkg.config.set("Dir::State::lists", "./data/test_debs/var/lib/apt/lists") apt_pkg.config.set("Dir::Etc::sourcelist", "./data/test_debs/etc/apt/sources.list") apt_pkg.init_system() self.cache = apt.Cache()
def __init__(self, opts): apt_pkg.init_config() apt_pkg.init_system() self.cache = apt.Cache(apt.progress.text.OpProgress()) self.opts = opts self.archive_dir = apt_pkg.config.find_dir('Dir::Cache::Archives') if not self.archive_dir: raise Exception(('No archive dir is set.' ' Usually it is /var/cache/apt/archives/')) self.lists_dir = apt_pkg.config.find_dir('Dir::State::Lists') if not self.archive_dir: raise Exception(('No package lists dir is set.' ' Usually it is /var/lib/apt/lists/')) for c in self.opts.aptconf: (cname, copt) = c.split("=", 1) apt_pkg.config.set(cname, copt)
def main(): apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() with open(file) as f: csvreader=csv.reader(f) for row in csvreader: pkgname = row[2].strip().lower() snumber = row[1] if pkgname.endswith("网页版"): not_found_packages.append([snumber, pkgname]) elif re.match("^[0-9a-zA-Z\s]+$", pkgname): match_pkg(cache, snumber, pkgname) else: noname_packages.append([snumber, pkgname])
def resetConfig(self): apt_pkg.config.clear("") for key in apt_pkg.config.list(): apt_pkg.config.clear(key) # Avoid loading any host config files os.unsetenv("APT_CONFIG") apt_pkg.config["Dir::Etc::main"] = "/dev/null" apt_pkg.config["Dir::Etc::parts"] = "/dev/null" apt_pkg.init_config() apt_pkg.init_system() # Restore default values apt_pkg.config["Dir::Etc::main"] = "apt.conf" apt_pkg.config["Dir::Etc::parts"] = "apt.conf.d"
def auto_upgrades_enabled(): """ Checks if auto-updates are enabled on a system. :return: boolean """ if is_debian(): # For apt-based distros. import apt_pkg apt_pkg.init_config() config = apt_pkg.config if 'Unattended-Upgrade' in config and 'APT::Periodic' in config: apt_periodic = config.subtree('APT::Periodic') unattended_upgrade = apt_periodic.get('Unattended-Upgrade') update_package_lists = apt_periodic.get('Update-Package-Lists') allowed_origins = config.subtree('Unattended-Upgrade').value_list( 'Allowed-Origins') # Ubuntu origins_pattern = config.subtree('Unattended-Upgrade').value_list( 'Origins-Pattern') # Debian return unattended_upgrade == '1' and \ update_package_lists == '1' and \ (('${distro_id}:${distro_codename}' in allowed_origins and '${distro_id}:${distro_codename}-security' in allowed_origins) or 'origin=Debian,codename=${distro_codename},label=Debian-Security' in origins_pattern) return False elif is_amazon_linux2(): # For Amazon Linux 2. # 1. check if yum-cron installed # 2. check if it's running # 3. check if it has proper values in its config file import rpm try: from sh import systemctl except ImportError: # No systemd - probably yum-cron is not running # TODO: use "service" executable which also works without systemd and on older systems return False ts = rpm.ts() package_iterator = ts.dbMatch('name', 'yum-cron') if package_iterator.count() > 0: # Package is installed. result = systemctl(['is-active', 'yum-cron'], _ok_code=[0, 3]).stdout.decode().strip() if result == 'active': config = open('/etc/yum/yum-cron.conf').read() if '\ndownload_updates = yes' in config and '\napply_updates = yes' in config: return True return False return None
def main(): """The main function.""" apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() for pkg in sorted(cache.packages, key=lambda pkg: pkg.name): # pkg is from a list of packages, sorted by name. for version in pkg.version_list: # Check every version for pfile, _ in version.file_list: if (pfile.origin == "Debian" and pfile.component == "main" and pfile.archive == "unstable"): # We only want packages from Debian unstable main. check_version(version) break
def packageList(): import apt_pkg apt_pkg.init_config() apt_pkg.init_system() acquire = apt_pkg.Acquire() slist = apt_pkg.SourceList() slist.read_main_list() slist.get_indexes(acquire, True) output = {"packages": []} # Now print the URI of every item. for item in acquire.items: output["packages"].append(item.desc_uri) pipLog.sharedInstance.debug("APT:" + json.dumps(output)) return sendMessage(json.dumps(output))
def aptget_deps(): print "Installing required packages %s via apt" % ", ".join(INST_PKG_LIST) apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache() cacheManager = apt_pkg.DepCache(cache) installerate = lambda pkgName: install_if_missing(cache, cacheManager, pkgName) result = map(installerate, INST_PKG_LIST) if any(result): # install or update these packages fetchProgress = apt.progress.text.TextProgress() installProgress = InstallProgressSync() cacheManager.commit(fetchProgress, installProgress) sys.exit("Packages required to complete the installation have been installed. Please rerun the program.") else: print "Packages already installed."
def fetchPackages(self, prefix, installedOnlyPrefix=None): apt_pkg.init_config() apt_pkg.init_system() cache = apt_pkg.Cache(progress=None) depcache = apt_pkg.DepCache(cache) rt = {} for pkg in cache.packages: if pkg.name.startswith(prefix): cand = depcache.get_candidate_ver(pkg) candVersion = None current = pkg.current_ver.ver_str if pkg.current_ver else '' if cand and cand.ver_str != current: candVersion = cand.ver_str nv = NV(name=pkg.name, state=self.state_str(pkg.current_state), version=current, candidate=candVersion) last = rt.get(pkg.name) if last is not None: if last.state != 'installed' and nv.state == 'installed': last.state = 'installed' if last.candidate is None and nv.candidate is not None: last.candidate = nv.candidate elif last.candidate is not None and nv.candidate is not None: if last.candidate != nv.candidate: logging.info( "found 2 candidate versions for %s: %s and %s", pkg.name, last.candidate, nv.candidate) try: if nv.candidate > last.candidate: last.candidate = nv.candidate except Exception as e: pass if last.version == last.candidate: last.candidate = None else: rt[pkg.name] = nv rtlist = [] for k, pkg in rt.items(): if installedOnlyPrefix is not None and k.startswith( installedOnlyPrefix): if pkg.version is None or pkg.version == '': continue rtlist.append(pkg.dict()) logging.debug("fetchPackageList: %s", str(rtlist)) return rtlist
def __init__(self, config_dir, state_dir): super().__init__(None) config = apt_pkg.config config["Dir::Etc"] = os.path.realpath(config_dir) config["Dir::State"] = os.path.join(os.path.realpath(state_dir), "state") config["Dir::Cache"] = os.path.join(os.path.realpath(state_dir), "cache") apt_pkg.init_config() apt_pkg.init_system() lists = apt_pkg.config.find_dir("Dir::State::Lists") os.makedirs(lists, exist_ok=True) os.makedirs(config["Dir::Cache"], exist_ok=True) self.cache = apt_pkg.Cache(None) self.depcache = apt_pkg.DepCache(self.cache) self.source_list = apt_pkg.SourceList() self.source_list.read_main_list()
def __init__(self, interactive: bool = False): self.interactive = interactive # constants # apt uses MB rather than MiB, so let's stay consistent self.MB = 1000**2 # downloads larger than this require confirmation or fail self.max_download_size_default = 1.5 * self.MB self.max_download_size = self.max_download_size_default max_download_size_msg_template = "\ To retrieve the full changelog, %s MB have to be downloaded.\n%s\ \n\ Proceed with the download?" self.max_download_size_msg_lc = max_download_size_msg_template % ( "%.1f", "Otherwise we will try to retrieve just the last change.\n") self.max_download_size_msg = max_download_size_msg_template % ("%.1f", "") self.max_download_size_msg_unknown = max_download_size_msg_template % ( "an unknown amount of", "") self.apt_cache = None self.apt_cache_date = None self.candidate = None # get apt's configuration apt_pkg.init_config() if apt_pkg.config.exists("Acquire::Changelogs::URI::Origin"): self.apt_origins = apt_pkg.config.subtree( "Acquire::Changelogs::URI::Origin") else: self.apt_origins = None if apt_pkg.config.exists("Dir::Cache::pkgcache"): self.apt_cache_path = apt_pkg.config.find_dir("Dir::Cache") self.pkgcache = apt_pkg.config.find_file("Dir::Cache::pkgcache") else: self.apt_cache = "invalid" if (self.apt_cache or not os.path.isdir(self.apt_cache_path) or not os.path.isfile(self.pkgcache)): print( "E: Invalid APT configuration found, try to run `apt update` first", file=sys.stderr) self.close(99)
def init(arch, cache_dir): # Clear APT and Dir config trees to avoid system configuration: del apt_pkg.config['APT'] del apt_pkg.config['Dir'] del apt_pkg.config['Dpkg'] # Initialize apt configuration, use our local apt cache hierarchy: apt_opts = [] for opt, val in { 'APT::Architecture': arch, 'APT::Architectures::': '', 'Dir': cache_dir, 'Dir::State::Status': 'dpkg.status', 'Acquire::Languages': 'none', }.items(): apt_pkg.config.set(opt, val) apt_opts.append('%s=%s' % (opt, val)) apt_pkg.init_config() apt_pkg.init_system()
def __init_cache(self): """Initializes the Apt cache in use by the DpkgGraph.""" # Read the system's Apt configuration. logging.debug("Initializing Apt configuration ...") apt_pkg.init_config() # pylint: disable=no-member conf = apt_pkg.config # pylint: disable=no-member # Tweak the system's Apt configuration to only read the dpkg status # database as Purgatory is only interested in the installed packages. # This has the nice sideffect that this cuts down the Apt cache opening # time drastically as less files need to be parsed. dpkg_db = conf["Dir::State::status"] conf.clear("Dir::State") if self.__dpkg_db: conf["Dir::State::status"] = self.__dpkg_db else: conf["Dir::State::status"] = dpkg_db self.__dpkg_db = conf["Dir::State::status"] logging.debug("dpkg status database: %s", self.__dpkg_db) # As Purgatory uses a special configuration the Apt cache will be # built in memory so that the valid cache on disk for the full # configuration isn't overwritten. conf["Dir::Cache::pkgcache"] = "" conf["Dir::Cache::srcpkgcache"] = "" # Initialize Apt with the tweaked config. logging.debug("Initializing Apt system ...") apt_pkg.init_system() # pylint: disable=no-member # Opening Apt cache. This step actually reads the dpkg status database. logging.debug("Opening Apt cache ...") cache = apt.cache.Cache() # Filter Apt cache to only contain installed packages. filtered_cache = apt.cache.FilteredCache(cache) filtered_cache.set_filter(apt.cache.InstalledFilter()) logging.debug("%d installed packages in the Apt cache", len(filtered_cache)) if not len(filtered_cache): raise error.EmptyAptCacheError() self.__cache = filtered_cache
def setUp(self): # clean custom apt config - once apt_pkg.config.clear() is exposed # use that for d in apt_pkg.config.keys(): apt_pkg.config.clear(d) apt_pkg.init_config() # setup our custom vars apt_pkg.config.set("Dir", "/") apt_pkg.config.set("dir::state::status", "/var/lib/dpkg/status") self.tempdir = tempfile.mkdtemp("apt-clone-tests") os.makedirs(os.path.join(self.tempdir, "var/lib/dpkg/")) # ensure we are the right arch os.makedirs(os.path.join(self.tempdir, "etc/apt")) with open(os.path.join(self.tempdir, "etc/apt/apt.conf"), "w") as fp: fp.write(''' #clear Dpkg::Post-Invoke; #clear Dpkg::Pre-Invoke; #clear APT::Update; ''')
def __init__(self, progress=None, rootdir=None, memonly=False): self._cache = None self._depcache = None self._records = None self._list = None self._callbacks = {} self._weakref = weakref.WeakValueDictionary() self._set = set() self._fullnameset = set() self._changes_count = -1 self._sorted_set = None self.connect("cache_post_open", self._inc_changes_count) self.connect("cache_post_change", self._inc_changes_count) if memonly: # force apt to build its caches in memory apt_pkg.config.set("Dir::Cache::pkgcache", "") if rootdir: rootdir = os.path.abspath(rootdir) # clear old config first (Bug#728274) apt_pkg.config.clear("APT") apt_pkg.config.set("Dir", rootdir) apt_pkg.init_config() if os.path.exists(rootdir + "/etc/apt/apt.conf"): apt_pkg.read_config_file(apt_pkg.config, rootdir + "/etc/apt/apt.conf") if os.path.isdir(rootdir + "/etc/apt/apt.conf.d"): apt_pkg.read_config_dir(apt_pkg.config, rootdir + "/etc/apt/apt.conf.d") apt_pkg.config.set("Dir::State::status", rootdir + "/var/lib/dpkg/status") # also set dpkg to the rootdir path so that its called for the # --print-foreign-architectures call apt_pkg.config.set("Dir::bin::dpkg", os.path.join(rootdir, "usr", "bin", "dpkg")) # create required dirs/files when run with special rootdir # automatically self._check_and_create_required_dirs(rootdir) # Call InitSystem so the change to Dir::State::Status is actually # recognized (LP: #320665) apt_pkg.init_system() self.open(progress)
def setUp(self): # reset any config manipulations done in the individual tests apt_pkg.init_config() # save the apt config to restore later cnf = {} for item in apt_pkg.config.keys(): cnf[item] = apt_pkg.config.find(item) self.addCleanup(self._restore_apt_config, cnf) self.tmpdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.tmpdir) apt_pkg.config.set("Dir", self.tmpdir) apt_pkg.config.set("Dir::Bin::Apt-key", "fakeroot-apt-key") apt_pkg.config.set("Dir::Etc", "etc/apt/") trustedparts_dir = apt_pkg.config.find_dir("Dir::Etc::Trustedparts") confparts_dir = apt_pkg.config.find_dir("Dir::Etc::parts") self.assertTrue(trustedparts_dir.startswith(self.tmpdir)) os.makedirs(trustedparts_dir) os.makedirs(confparts_dir) shutil.copy("fakeroot-apt-key", self.tmpdir)
def get_current_mirror_hostname(): apt_pkg.init_config() apt_pkg.init_system() source_list_obj = apt_pkg.SourceList() source_list_obj.read_main_list() urls = source_list_obj.list all_uris = [url.uri for url in urls] filter_results = filter(lambda url: url.endswith("deepin/"), all_uris) if len(filter_results) > 0: return get_hostname(filter_results[0]) else: filter_results = filter(lambda url: in_string("/deepin", url), all_uris) if len(filter_results) > 0: return get_hostname(filter_results[0]) else: filter_results = filter(lambda url: in_string("deepin", url), all_uris) if len(filter_results) > 0: return get_hostname(filter_results[0]) else: return get_hostname(all_uris[0])