def install(*package_names) -> None: upgrade() cache = Cache() cache.update() cache.open() for name in package_names: if name not in cache: logger.error('Package %s not found!' % (name, )) continue package = cache[name] if package.is_installed: logger.warning('Package %s already installed!' % (name, )) continue package.mark_install() cache.commit(TextFetchProgress(), InstallProgress()) cache.close()
def _test(): """Test function""" from apt.cache import Cache from apt.progress import DpkgInstallProgress cache = Cache() vp = "www-browser" #print "%s virtual: %s" % (vp, cache.isVirtualPackage(vp)) providers = cache.get_providing_packages(vp) print "Providers for %s :" % vp for pkg in providers: print " %s" % pkg.name d = DebPackage(sys.argv[1], cache) print "Deb: %s" % d.pkgname if not d.check(): print "can't be satified" print d._failure_string print "missing deps: %s" % d.missing_deps print d.required_changes print "Installing ..." ret = d.install(DpkgInstallProgress()) print ret #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc") #s.check_dep() #print "Missing deps: ",s.missingDeps #print "Print required changes: ", s.requiredChanges s = DscSrcPackage(cache=cache) d = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)" print s._satisfy_depends(apt_pkg.parse_depends(d))
def remove(*package_names) -> None: upgrade() cache = Cache() cache.update() cache.open() for name in package_names: if name not in cache: print('Package %s not found!' % (name, )) continue package = cache[name] if not package.is_installed: print('Package %s is not installed!' % (name, )) continue package.mark_delete(purge=True) cache.commit(TextFetchProgress(), InstallProgress()) cache.close()
def __init__(self, options): # fixme, do graphic cache check self.options = options if options.quiet: tp = apt.progress.base.OpProgress() else: tp = apt.progress.text.OpProgress() # set architecture to architecture in root-dir if options.rootdir and os.path.exists(options.rootdir + "/usr/bin/dpkg"): arch = Popen( [options.rootdir + "/usr/bin/dpkg", "--print-architecture"], stdout=PIPE, universal_newlines=True).communicate()[0] if arch: apt_pkg.config.set("APT::Architecture", arch.strip()) if options.apt_opts: for o in options.apt_opts: if o.find('=') < 0: sys.stderr.write( _("Configuration items must be specified with a =<value>\n" )) sys.exit(1) (name, value) = o.split('=', 1) try: apt_pkg.config.set(name, value) except: sys.stderr.write( _("Couldn't set APT option %s to %s\n") % (name, value)) sys.exit(1) self._cache = Cache(tp, rootdir=options.rootdir)
def refresh_cache(self): cache_date = self.get_cache_date() if not self.apt_cache: self.apt_cache = Cache() self.apt_cache_date = cache_date elif cache_date != self.apt_cache_date: self.apt_cache.open(None) self.apt_cache_date = cache_date
def __init__(self, conn=None, object_path=None, bus_name=None): super().__init__(conn, object_path, bus_name) self.dbus_info = None self.polkit = None self.enforce_polkit = True self.cache = Cache() self.lock = None self.apt_lock = None
def openCache(self): self._cache = Cache(self.cprogress) if self._cache._depcache.broken_count > 0: self.error_header = _("Broken dependencies") self.error_body = _("Your system has broken dependencies. " "This application can not continue until " "this is fixed. " "To fix it run 'pkexec synaptic' or " "'sudo apt-get install -f' " "in a terminal window.") return False return True
def __init__(self, options): # fixme, do graphic cache check self.options = options if options.quiet: tp = apt.progress.base.OpProgress() else: tp = apt.progress.text.OpProgress() # set architecture to architecture in root-dir if options.rootdir and os.path.exists(options.rootdir+"/usr/bin/dpkg"): arch = Popen([options.rootdir+"/usr/bin/dpkg", "--print-architecture"], stdout=PIPE, universal_newlines=True).communicate()[0] if arch: apt_pkg.config.set("APT::Architecture",arch.strip()) if options.apt_opts: for o in options.apt_opts: if o.find('=') < 0: sys.stderr.write(_("Configuration items must be specified with a =<value>\n")) sys.exit(1) (name, value) = o.split('=', 1) try: apt_pkg.config.set(name, value) except: sys.stderr.write(_("Couldn't set APT option %s to %s\n") % (name, value)) sys.exit(1) self._cache = Cache(tp, rootdir=options.rootdir)
def __init__(self): # Get the apt cache self.cache = Cache() # Feedback module self.feedback = Feedback()
def setup(helper, old_version=None): """Install and configure the module.""" if old_version == 1: # Check that radicale 2.x is available for install. cache = Cache() candidate = cache['radicale'].candidate if candidate < '2': logger.error('Radicale 2.x is not available to install.') # Try to upgrade radicale 1.x to 2.x. helper.call('pre', actions.superuser_run, 'radicale', ['migrate']) helper.install(managed_packages, force_configuration='new') # Check that radicale 2.x is installed. current_version = get_package_version() if not current_version: logger.error('Could not determine installed version of radicale.') elif current_version < VERSION_2: logger.error('Could not install radicale 2.x.') # Enable radicale. helper.call('post', actions.superuser_run, 'radicale', ['setup']) else: helper.install(managed_packages) helper.call('post', actions.superuser_run, 'radicale', ['setup']) helper.call('post', app.enable)
def _test_for_new_dell_recovery(self, mount, assembly_tmp): """Tests if the distro currently on the system matches the recovery media. If it does, check for any potential SRUs to apply to the recovery media """ logging.debug("_test_for_new_dell_recovery: testing mount %s and assembly_tmp %s" % (mount, assembly_tmp)) output = fetch_output(['zcat', '/usr/share/doc/dell-recovery/changelog.gz']) package_distro = output.split('\n')[0].split()[2].strip(';') for info in ('info.recovery', 'info'): file_path = os.path.join(mount, '.disk', info) if os.path.exists(file_path): with open(file_path) as rfd: rp_distro = rfd.readline().split()[2].strip('"').lower() break if rp_distro in package_distro: logging.debug("_test_for_new_dell_recovery: Distro %s matches %s", rp_distro, package_distro) from apt.cache import Cache cache = Cache() package_version = cache['dell-recovery'].installed.version rp_version = self.query_have_dell_recovery(mount) if debian_support.version_compare(package_version, rp_version) > 0: logging.debug("_test_for_new_dell_recovery: Including updated dell-recovery package version, %s (original was %s)", package_version, rp_version) dest = os.path.join(assembly_tmp, 'debs') if not os.path.isdir(dest): os.makedirs(dest) call = subprocess.Popen(['dpkg-repack', 'dell-recovery'], cwd=dest, universal_newlines=True) (out, err) = call.communicate() else: logging.debug("_test_for_new_dell_recovery: RP Distro %s doesn't match our distro %s, not injecting updated package", rp_distro, package_distro)
def __init__(self, board: str): """ Parameters ---------- board: str The board to make the update for. """ self._board = board self._status_file = "" self._board = board self._cache = Cache(rootdir=ROOT_DIR) self._cache.update(raise_on_error=False) self._cache.open() self._deb_pkgs = [] self._inst_list = [] # make sure all dir exist Path(OLU_DIR).mkdir(parents=True, exist_ok=True) Path(ROOT_DIR).mkdir(parents=True, exist_ok=True) Path(DOWNLOAD_DIR).mkdir(parents=True, exist_ok=True) Path(UPDATE_CACHE_DIR).mkdir(parents=True, exist_ok=True) Path(STATUS_CACHE_DIR).mkdir(parents=True, exist_ok=True) # clear download dir for i in listdir(DOWNLOAD_DIR): if i.endswith(".deb"): remove(DOWNLOAD_DIR + i) status_files = [] for i in listdir(STATUS_CACHE_DIR): status_files.append(OLMFile(load=i)) status_files.sort() # find latest olu status tar file for i in status_files: if i.name == board: self._status_file = STATUS_CACHE_DIR + i.name break if self._status_file == "": msg = "No status file for {} board in cache".format(board) raise FileNotFoundError(msg) # update status file dpkg_data = read_dpkg_status_file(self._status_file) with open(DPKG_STATUS_FILE) as fptr: fptr.write(dpkg_data)
def main(): """main entry point""" args = get_args() installed_pkgs = set(pkg.name for pkg in Cache() if pkg.is_installed and not pkg.is_auto_installed) additional_pkgs = installed_pkgs - get_puppet_managed_pkgs( ) - get_base_pkgs(args.base_packages) for pkg in additional_pkgs: print(pkg)
def __init__(self, parent): QtWidgets.QLineEdit.__init__(self, parent) self.logger = logging.getLogger("PackageLineEdit") cache = FilteredCache(Cache()) cache.set_filter(InstalledFilter()) self._completer = QtWidgets.QCompleter(sorted(cache.keys())) self._completer.setModelSorting( QtWidgets.QCompleter.CaseSensitivelySortedModel) self.setCompleter(self._completer)
def about(request): """Serve the about page""" cache = Cache() plinth = cache['plinth'] context = { 'title': _('About {box_name}').format(box_name=_(cfg.box_name)), 'version': __version__, 'new_version': not plinth.candidate.is_installed } return TemplateResponse(request, 'help_about.html', context)
def fail(self, msg, returncode=1): """ Print package versions, traceback and error message. """ apt_cache = AptCache() self.log.error('\n%s\n%s%s', '=' * 79, ''.join(traceback.format_stack()), '=' * 79) pck_s = ['{:<40} {}'.format(pck, apt_cache[pck].installed.version if apt_cache[pck].is_installed else 'Not installed') for pck in sorted([pck for pck in apt_cache.keys() if 'school' in pck])] self.log.info('Installed package versions:\n{}'.format('\n'.join(pck_s))) utils.fail(msg, returncode)
def mark_upgrades(): '''Mark packages that can upgrade to upgrade during install''' from apt.cache import Cache cache = Cache() to_install = [] for key in cache.keys(): if cache[key].is_upgradable: to_install.append(key) del cache return to_install
def about(request): """Serve the about page""" cache = Cache() freedombox = cache['freedombox'] context = { 'title': _('About {box_name}').format(box_name=_(cfg.box_name)), 'version': __version__, 'new_version': not freedombox.candidate.is_installed, 'os_release': get_os_release(), } return TemplateResponse(request, 'help_about.html', context)
def openCache(self): self._cache = Cache(self.cprogress) if self._cache._depcache.broken_count > 0: self.error_header = _("Broken dependencies") self.error_body = _("Your system has broken dependencies. " "This application can not continue until " "this is fixed. " "To fix it run 'gksudo synaptic' or " "'sudo apt-get install -f' " "in a terminal window.") return False return True
def mark_packages(recovery_partition): '''Finds packages to install: * any debs from debs/main that we want unconditionally installed (but ONLY the latest version on the media) * upgrades * dell-recovery - if recovery partition * dell-eula - if it exists ''' import apt_inst import apt_pkg from apt.cache import Cache def parse(fname): """ read a deb """ control = apt_inst.DebFile(fname).control.extractdata("control") sections = apt_pkg.TagSection(control) if "Modaliases" in sections: modaliases = sections["Modaliases"] else: modaliases = '' return (sections["Architecture"], sections["Package"], modaliases) #process debs/main to_install = [] my_arch = fetch_output(['dpkg', '--print-architecture']).strip() for top in [ISO_MOUNT, CDROM_MOUNT]: repo = os.path.join(top, 'debs', 'main') if os.path.isdir(repo): for fname in os.listdir(repo): if '.deb' in fname: arch, package, modaliases = parse(os.path.join( repo, fname)) if not modaliases and (arch == "all" or arch == my_arch): to_install.append(package) #mark upgrades and dell-recovery/dell-eula cache = Cache() for key in cache.keys(): if cache[key].is_upgradable: to_install.append(key) continue #only install if present on the media if key == 'dell-eula' and recovery_partition: to_install.append(key) del cache #only install if using recovery partition if recovery_partition: to_install.append('dell-recovery') return to_install
def install_extras(self): """Try to install packages requested by installer components.""" # We only ever install these packages from the CD. sources_list = '/target/etc/apt/sources.list' os.rename(sources_list, "%s.apt-setup" % sources_list) with open("%s.apt-setup" % sources_list) as old_sources: with open(sources_list, 'w') as new_sources: found_cdrom = False for line in old_sources: if 'cdrom:' in line: print(line, end="", file=new_sources) found_cdrom = True if not found_cdrom: os.rename("%s.apt-setup" % sources_list, sources_list) # this will install free & non-free things, but not things # that have multiarch Depends or Recommends. Instead, those # will be installed by install_restricted_extras() later # because this function runs before i386 foreign arch is # enabled cache = Cache() filtered_extra_packages = query_recorded_installed() for package in filtered_extra_packages.copy(): pkg = cache.get(package) if not pkg: continue candidate = pkg.candidate dependencies = candidate.dependencies + candidate.recommends all_deps = itertools.chain.from_iterable(dependencies) for dep in all_deps: if ':' in dep.name: filtered_extra_packages.remove(package) break self.do_install(filtered_extra_packages) if found_cdrom: os.rename("%s.apt-setup" % sources_list, sources_list)
def charm_version(): try: from apt.cache import Cache charm_vers = Cache()['charm'].versions for v in charm_vers: if v.is_installed: charm_ver = v.version break except ImportError: charm_ver = 'unavailable' except: charm_ver = 'error' return charm_ver
def install(): cache = Cache() no_ok_pkg = [] to_install = [] for pkg in get_packages(): print("Installation du paquet : '%s'" % (pkg,)) if cache.has_key(pkg): abs_pkg = cache[pkg] if abs_pkg.is_installed: print(" - Le paquet est déjà installé") else: print(" + Le paquet va être installé") abs_pkg.mark_install() to_install.append(abs_pkg) else: print("Le paquet demandé n'a pas pu être trouvé") no_ok_pkg.append(pkg) print("L'opération d'analyse est terminée, seul(s) le(s) paquet(s) \ est(sont) introuvable(s):") print(",".join(no_ok_pkg)) raw_input('On est partie ?') abs_pkg.commit(text.TextProgress(), InstallProgress())
def _test(): # type: () -> None """Test function""" from apt.cache import Cache from apt.progress.base import InstallProgress cache = Cache() vp = "www-browser" print("%s virtual: %s" % (vp, cache.is_virtual_package(vp))) providers = cache.get_providing_packages(vp) print("Providers for %s :" % vp) for pkg in providers: print(" %s" % pkg.name) d = DebPackage(sys.argv[1], cache) print("Deb: %s" % d.pkgname) if not d.check(): print("can't be satified") print(d._failure_string) print("missing deps: %s" % d.missing_deps) print(d.required_changes) print(d.filelist) print("Installing ...") ret = d.install(InstallProgress()) print(ret) #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc") #s.check_dep() #print "Missing deps: ",s.missingDeps #print "Print required changes: ", s.requiredChanges s = DscSrcPackage(cache=cache) ds = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)" print(s._satisfy_depends(apt_pkg.parse_depends(ds, False)))
def update_cache(): """ Replaces the current CACHE with a new one. This is required because using cache.open() or cache.update() requires root permissions, which we don't have. Not using this is fine for the unprivileged side of the code because we aren't making any changes, only reading state. But there may be changes happening behind the scenes, so we want a way to get a current cache. Returns: The new apt.cache.Cache object. """ global CACHE CACHE = Cache() return CACHE
def do_install(self, to_install, langpacks=False): #self.nested_progress_start() with Cache() as cache: if cache._depcache.broken_count > 0: print('not installing additional packages, since there are' ' broken packages: %s' % ', '.join(broken_packages(cache))) return with cache.actiongroup(): for pkg in to_install: mark_install(cache, pkg) return # it throw exception in mark_install
def do_update(mark_only): _, progress = query_verbosity() log.info("Getting list of eligible packages...") cache = Cache(progress) f_cache = FilteredCache(cache) f_cache.set_filter(NvidiaFilter()) names = f_cache.keys() with unhold(names, cache): # mark_only means we just want the side-effects of exiting the # unhold() context manager. if mark_only: return False log.info("Updating package list...") try: cache.update() except FetchFailedException, err: log.warn(err) cache.open(progress) # Refresh package list old_versions = {name: cache[name].installed for name in names} log.info("Updating all packages...") for name in names: if cache[name].is_upgradable: cache[name].mark_upgrade() cache.commit(None, None) log.info("Refreshing package cache...") cache.open(progress) new_versions = {name: cache[name].installed for name in names} log.info("Checking whether packages were upgraded...") for name in old_versions: if old_versions[name] != new_versions[name]: log.info("Kernel module changed") return True return False
def charm_version(): if 'SNAP' in os.environ: cscv = os.path.join(os.environ['SNAP'], 'charmstore-client-version') if os.path.exists(cscv): with open(cscv) as f: charm_ver = f.read().strip() return charm_ver try: from apt.cache import Cache charm_vers = Cache()['charm'].versions for v in charm_vers: if v.is_installed: charm_ver = v.version break except ImportError: charm_ver = 'unavailable' except: charm_ver = 'error' return charm_ver
def cached_charmstore_client_version(): if 'SNAP' in os.environ: cscv = os.path.join(os.environ['SNAP'], 'charmstore-client-version') if not os.path.exists(cscv): return {'version': 'unavailable', 'git': ''} with open(cscv) as f: res_string = f.read().strip() return _add_snap_rev(json.loads(res_string)) try: from apt.cache import Cache charm_vers = Cache()['charm'].versions for v in charm_vers: if v.is_installed: charm_ver = v.version break except ImportError: charm_ver = 'unavailable' except: charm_ver = 'error' return _add_snap_rev({'version': charm_ver})
def prepare(self): """Prepare the IsolatedAptCache for use. Should be called before use, and after any modification to the list of sources. """ self.cleanup() logger.debug("Writing apt configs") self.tempdir = tempfile.mkdtemp(prefix="hwpack-apt-cache-") dirs = ["var/lib/dpkg", "etc/apt/sources.list.d", "var/cache/apt/archives/partial", "var/lib/apt/lists/partial", ] for d in dirs: os.makedirs(os.path.join(self.tempdir, d)) self.set_installed_packages([], reopen=False) sources_list = os.path.join( self.tempdir, "etc", "apt", "sources.list") with open(sources_list, 'w') as f: for source in self.sources: # To make a file URL look like an HTTP one (for urlparse) # We do this to use urlparse, which is probably more robust # than any regexp we come up with. mangled_source = source if re.search("file:/[^/]", source): mangled_source = re.sub("file:/", "file://", source) url_parsed = urlparse.urlsplit(mangled_source) # If the source uses authentication, don't put in sources.list if url_parsed.password: url_parts_without_user_pass = [url_parsed.scheme, url_parsed.hostname, url_parsed.path, url_parsed.query, url_parsed.fragment] auth_name = os.path.join( self.tempdir, "etc", "apt", "auth.conf") with open(auth_name, 'w') as auth: auth.write( "machine " + url_parsed.hostname + "\n" + "login " + url_parsed.username + "\n" + "password " + url_parsed.password + "\n") source = urlparse.urlunsplit(url_parts_without_user_pass) # Get rid of extra / in file URLs source = re.sub("file://", "file:/", source) f.write("deb %s\n" % source) if self.architecture is not None: apt_conf = os.path.join(self.tempdir, "etc", "apt", "apt.conf") with open(apt_conf, 'w') as f: f.write( 'Apt {\nArchitecture "%s";\n' 'Install-Recommends "true";\n}\n' % self.architecture) if self.prefer_label is not None: apt_preferences = os.path.join( self.tempdir, "etc", "apt", "preferences") with open(apt_preferences, 'w') as f: f.write( 'Package: *\n' 'Pin: release l=%s\n' 'Pin-Priority: 1001\n' % self.prefer_label) # XXX: This is a temporary workaround for bug 885895. apt_pkg.config.set("Dir::bin::dpkg", "/bin/false") self.cache = Cache(rootdir=self.tempdir, memonly=True) logger.debug("Updating apt cache") try: self.cache.update() except FetchFailedException, e: obfuscated_e = re.sub(r"([^ ]https://).+?(@)", r"\1***\2", str(e)) raise FetchFailedException(obfuscated_e)
# self.apt_status = os.WEXITSTATUS(status) # self.finished = True # # def error(self, pkg, errormsg): # """Called when an error happens. # # Emits: status_error() # """ # self.emit(QtCore.SIGNAL("status_error()")) # def conffile(self, current, new): # """Called during conffile. # # Emits: status-conffile() # """ # self.emit("status-conffile") # # def start_update(self): # """Called when the update starts. # # Emits: status-started() # """ # self.emit("status-started") if __name__ =='__main__': from apt.cache import Cache import apt c = Cache(QOpProgress()) c.update(QAcquireProgress()) c.commit(QAcquireProgress(), QInstallProgress())
def do_install(self, to_install, langpacks=False): self.nested_progress_start() if langpacks: self.db.progress('START', 0, 10, 'ubiquity/langpacks/title') else: self.db.progress('START', 0, 10, 'ubiquity/install/title') self.db.progress('INFO', 'ubiquity/install/find_installables') self.progress_region(0, 1) fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/install/title', 'ubiquity/install/apt_indices_starting', 'ubiquity/install/apt_indices') cache = Cache() if cache._depcache.broken_count > 0: syslog.syslog( 'not installing additional packages, since there are broken ' 'packages: %s' % ', '.join(broken_packages(cache))) self.db.progress('STOP') self.nested_progress_end() return for pkg in to_install: mark_install(cache, pkg) self.db.progress('SET', 1) self.progress_region(1, 10) if langpacks: fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/langpacks/title', None, 'ubiquity/langpacks/packages') installprogress = DebconfInstallProgress( self.db, 'ubiquity/langpacks/title', 'ubiquity/install/apt_info') else: fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/install/title', None, 'ubiquity/install/fetch_remove') installprogress = DebconfInstallProgress( self.db, 'ubiquity/install/title', 'ubiquity/install/apt_info', 'ubiquity/install/apt_error_install') chroot_setup(self.target) commit_error = None try: try: if not self.commit_with_verify(cache, fetchprogress, installprogress): fetchprogress.stop() installprogress.finishUpdate() self.db.progress('STOP') self.nested_progress_end() return except IOError: for line in traceback.format_exc().split('\n'): syslog.syslog(syslog.LOG_ERR, line) fetchprogress.stop() installprogress.finishUpdate() self.db.progress('STOP') self.nested_progress_end() return except SystemError, e: for line in traceback.format_exc().split('\n'): syslog.syslog(syslog.LOG_ERR, line) commit_error = str(e) finally: chroot_cleanup(self.target) self.db.progress('SET', 10) cache.open(None) if commit_error or cache._depcache.broken_count > 0: if commit_error is None: commit_error = '' brokenpkgs = broken_packages(cache) self.warn_broken_packages(brokenpkgs, commit_error) self.db.progress('STOP') self.nested_progress_end()
def generate_blacklist(self): manifest_remove = os.path.join(self.casper_path, 'filesystem.manifest-remove') manifest_desktop = os.path.join(self.casper_path, 'filesystem.manifest-desktop') manifest = os.path.join(self.casper_path, 'filesystem.manifest') if os.path.exists(manifest_remove) and os.path.exists(manifest): difference = set() with open(manifest_remove) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] difference.add(pkg.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] live_packages.add(pkg.split()[0]) desktop_packages = live_packages - difference elif os.path.exists(manifest_desktop) and os.path.exists(manifest): desktop_packages = set() with open(manifest_desktop) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] desktop_packages.add(pkg.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] live_packages.add(pkg.split()[0]) difference = live_packages - desktop_packages else: difference = set() cache = Cache() use_restricted = True try: if self.db.get('apt-setup/restricted') == 'false': use_restricted = False except debconf.DebconfError: pass if not use_restricted: for pkg in cache.keys(): if (cache[pkg].is_installed and cache[pkg].section.startswith('restricted/')): difference.add(pkg) # Keep packages we explicitly installed. keep = install_misc.query_recorded_installed() arch, subarch = install_misc.archdetect() # Less than ideal. Since we cannot know which bootloader we'll need # at file copy time, we should figure out why grub still fails when # apt-install-direct is present during configure_bootloader (code # removed). if arch in ('amd64', 'i386'): if subarch == 'efi': keep.add('grub-efi') keep.add('grub-efi-amd64') keep.add('grub-efi-amd64-signed') keep.add('shim-signed') keep.add('mokutil') keep.add('fwupdate-signed') install_misc.record_installed(['fwupdate-signed']) try: altmeta = self.db.get('base-installer/kernel/altmeta') if altmeta: altmeta = '-%s' % altmeta except debconf.DebconfError: altmeta = '' keep.add('linux-signed-generic%s' % altmeta) else: keep.add('grub') keep.add('grub-pc') elif (arch in ('armel', 'armhf') and subarch in ('omap', 'omap4', 'mx5')): keep.add('flash-kernel') keep.add('u-boot-tools') elif arch == 'powerpc': keep.add('yaboot') keep.add('hfsutils') # Even adding ubiquity as a depends to oem-config-{gtk,kde} doesn't # appear to force ubiquity and libdebian-installer4 to copy all of # their files, so this does the trick. try: if self.db.get('oem-config/enable') == 'true': keep.add('ubiquity') except (debconf.DebconfError, IOError): pass difference -= install_misc.expand_dependencies_simple( cache, keep, difference) # Consider only packages that don't have a prerm, and which can # therefore have their files removed without any preliminary work. difference = { x for x in difference if not os.path.exists('/var/lib/dpkg/info/%s.prerm' % x) } confirmed_remove = set() with cache.actiongroup(): for pkg in sorted(difference): if pkg in confirmed_remove: continue would_remove = install_misc.get_remove_list(cache, [pkg], recursive=True) if would_remove <= difference: confirmed_remove |= would_remove # Leave these marked for removal in the apt cache to # speed up further calculations. else: for removedpkg in would_remove: cachedpkg = install_misc.get_cache_pkg( cache, removedpkg) cachedpkg.mark_keep() difference = confirmed_remove if len(difference) == 0: del cache self.blacklist = {} return cmd = ['dpkg', '-L'] cmd.extend(difference) subp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) res = subp.communicate()[0].splitlines() u = {} for x in res: u[x] = 1 self.blacklist = u
def select_language_packs(self, save=False): try: keep_packages = self.db.get('ubiquity/keep-installed') keep_packages = keep_packages.replace(',', '').split() syslog.syslog('keeping packages due to preseeding: %s' % ' '.join(keep_packages)) record_installed(keep_packages) except debconf.DebconfError: pass langpacks = [] all_langpacks = False try: langpack_db = self.db.get('pkgsel/language-packs') if langpack_db == 'ALL': apt_subp = subprocess.Popen( ['apt-cache', '-n', 'search', '^language-pack-[^-][^-]*$'], stdout=subprocess.PIPE, universal_newlines=True) apt_out = apt_subp.communicate()[0].rstrip().split('\n') langpacks = [x.split('-')[2].strip() for x in apt_out] all_langpacks = True else: langpacks = langpack_db.replace(',', '').split() except debconf.DebconfError: pass if not langpacks: langpack_set = set() try: langpack_db = self.db.get('localechooser/supported-locales') for locale in langpack_db.replace(',', '').split(): langpack_set.add(locale) except debconf.DebconfError: pass langpack_db = self.db.get('debian-installer/locale') langpack_set.add(langpack_db) langpacks = sorted(langpack_set) no_install = '/var/lib/ubiquity/no-install-langpacks' if os.path.exists(no_install): osextras.unlink_force(no_install) if len(langpacks) == 1 and langpacks[0] in ('C', 'en'): # Touch with open(no_install, 'a'): os.utime(no_install, None) syslog.syslog('keeping language packs for: %s' % ' '.join(langpacks)) try: lppatterns = self.db.get('pkgsel/language-pack-patterns').split() except debconf.DebconfError: return cache = Cache() to_install = [] checker = osextras.find_on_path('check-language-support') for lp_locale in langpacks: lp = locale_to_language_pack(lp_locale) # Basic language packs, required to get localisation working at # all. We install these almost unconditionally; if you want to # get rid of even these, you can preseed pkgsel/language-packs # to the empty string. to_install.append('language-pack-%s' % lp) # Other language packs, typically selected by preseeding. for pattern in lppatterns: to_install.append(pattern.replace('$LL', lp)) # More extensive language support packages. # If pkgsel/language-packs is ALL, then speed things up by # calling check-language-support just once. if not all_langpacks and checker: check_lang = subprocess.Popen([ 'check-language-support', '-l', lp_locale.split('.')[0], '--show-installed' ], stdout=subprocess.PIPE, universal_newlines=True) to_install.extend(check_lang.communicate()[0].strip().split()) else: to_install.append('language-support-%s' % lp) if checker: # Keep language-support-$LL installed if it happens to be in # the live filesystem, since there's no point spending time # removing it; but don't install it if it isn't in the live # filesystem. toplevel = 'language-support-%s' % lp toplevel_pkg = get_cache_pkg(cache, toplevel) if toplevel_pkg and toplevel_pkg.is_installed: to_install.append(toplevel) if all_langpacks and checker: check_lang = subprocess.Popen( ['check-language-support', '-a', '--show-installed'], stdout=subprocess.PIPE, universal_newlines=True) to_install.extend(check_lang.communicate()[0].strip().split()) # Filter the list of language packs to include only language packs # that exist in the live filesystem's apt cache, so that we can tell # the difference between "no such language pack" and "language pack # not retrievable given apt configuration in /target" later on. to_install = [ pkg for pkg in to_install if get_cache_pkg(cache, pkg) is not None ] install_new = True try: install_new_key = \ self.db.get('pkgsel/install-language-support') == 'true' if install_new_key != '' and not misc.create_bool(install_new_key): install_new = False except debconf.DebconfError: pass if not install_new: # Keep packages that are on the live filesystem, but don't install # new ones. # TODO cjwatson 2010-03-18: To match pkgsel's semantics, we # ought to be willing to install packages from the package pool # on the CD as well. to_install = [ pkg for pkg in to_install if get_cache_pkg(cache, pkg).is_installed ] del cache record_installed(to_install) langpacks_file = '/var/lib/ubiquity/langpacks' if os.path.exists(langpacks_file): osextras.unlink_force(langpacks_file) if install_new: if save: if not os.path.exists(os.path.dirname(langpacks_file)): os.makedirs(os.path.dirname(langpacks_file)) with open(langpacks_file, 'w') as langpacks: for pkg in to_install: print(pkg, file=langpacks) return [] else: return to_install
def do_install(self, to_install, langpacks=False): self.nested_progress_start() if langpacks: self.db.progress('START', 0, 10, 'ubiquity/langpacks/title') else: self.db.progress('START', 0, 10, 'ubiquity/install/title') self.db.progress('INFO', 'ubiquity/install/find_installables') self.progress_region(0, 1) fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/install/title', 'ubiquity/install/apt_indices_starting', 'ubiquity/install/apt_indices') with Cache() as cache: if cache._depcache.broken_count > 0: syslog.syslog( 'not installing additional packages, since there are' ' broken packages: %s' % ', '.join(broken_packages(cache))) self.db.progress('STOP') self.nested_progress_end() return with cache.actiongroup(): for pkg in to_install: mark_install(cache, pkg) self.db.progress('SET', 1) self.progress_region(1, 10) if langpacks: fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/langpacks/title', None, 'ubiquity/langpacks/packages') installprogress = DebconfInstallProgress( self.db, 'ubiquity/langpacks/title', 'ubiquity/install/apt_info') else: fetchprogress = DebconfAcquireProgress( self.db, 'ubiquity/install/title', None, 'ubiquity/install/fetch_remove') installprogress = DebconfInstallProgress( self.db, 'ubiquity/install/title', 'ubiquity/install/apt_info', 'ubiquity/install/apt_error_install') chroot_setup(self.target) commit_error = None try: try: if not self.commit_with_verify(cache, fetchprogress, installprogress): fetchprogress.stop() installprogress.finish_update() self.db.progress('STOP') self.nested_progress_end() return except IOError: for line in traceback.format_exc().split('\n'): syslog.syslog(syslog.LOG_ERR, line) fetchprogress.stop() installprogress.finish_update() self.db.progress('STOP') self.nested_progress_end() return except SystemError as e: for line in traceback.format_exc().split('\n'): syslog.syslog(syslog.LOG_ERR, line) commit_error = str(e) finally: chroot_cleanup(self.target) self.db.progress('SET', 10) cache.open(None) if commit_error or cache._depcache.broken_count > 0: if commit_error is None: commit_error = '' brokenpkgs = broken_packages(cache) self.warn_broken_packages(brokenpkgs, commit_error) self.db.progress('STOP') self.nested_progress_end()
def generate_blacklist(self): manifest_remove = os.path.join(self.casper_path, "filesystem.manifest-remove") manifest_desktop = os.path.join(self.casper_path, "filesystem.manifest-desktop") manifest = os.path.join(self.casper_path, "filesystem.manifest") if os.path.exists(manifest_remove) and os.path.exists(manifest): difference = set() with open(manifest_remove) as manifest_file: for line in manifest_file: if line.strip() != "" and not line.startswith("#"): difference.add(line.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != "" and not line.startswith("#"): live_packages.add(line.split()[0]) desktop_packages = live_packages - difference elif os.path.exists(manifest_desktop) and os.path.exists(manifest): desktop_packages = set() with open(manifest_desktop) as manifest_file: for line in manifest_file: if line.strip() != "" and not line.startswith("#"): desktop_packages.add(line.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != "" and not line.startswith("#"): live_packages.add(line.split()[0]) difference = live_packages - desktop_packages else: difference = set() cache = Cache() use_restricted = True try: if self.db.get("apt-setup/restricted") == "false": use_restricted = False except debconf.DebconfError: pass if not use_restricted: for pkg in cache.keys(): if cache[pkg].is_installed and cache[pkg].section.startswith("restricted/"): difference.add(pkg) # Keep packages we explicitly installed. keep = install_misc.query_recorded_installed() arch, subarch = install_misc.archdetect() # Less than ideal. Since we cannot know which bootloader we'll need # at file copy time, we should figure out why grub still fails when # apt-install-direct is present during configure_bootloader (code # removed). if arch in ("amd64", "i386"): if subarch == "efi": keep.add("grub-efi") keep.add("grub-efi-amd64") efi_vars = "/sys/firmware/efi/vars" sb_var = os.path.join(efi_vars, "SecureBoot-8be4df61-93ca-11d2-aa0d-00e098032b8c", "data") if os.path.exists(sb_var): with open(sb_var, "rb") as sb_var_file: if sb_var_file.read(1) == b"\x01": keep.add("grub-efi-amd64-signed") keep.add("shim-signed") try: altmeta = self.db.get("base-installer/kernel/altmeta") if altmeta: altmeta = "-%s" % altmeta except debconf.DebconfError: altmeta = "" keep.add("linux-signed-generic%s" % altmeta) else: keep.add("grub") keep.add("grub-pc") elif arch in ("armel", "armhf") and subarch in ("omap", "omap4", "mx5"): keep.add("flash-kernel") keep.add("u-boot-tools") elif arch == "powerpc": keep.add("yaboot") keep.add("hfsutils") # Even adding ubiquity as a depends to oem-config-{gtk,kde} doesn't # appear to force ubiquity and libdebian-installer4 to copy all of # their files, so this does the trick. try: if self.db.get("oem-config/enable") == "true": keep.add("ubiquity") except (debconf.DebconfError, IOError): pass difference -= install_misc.expand_dependencies_simple(cache, keep, difference) # Consider only packages that don't have a prerm, and which can # therefore have their files removed without any preliminary work. difference = {x for x in difference if not os.path.exists("/var/lib/dpkg/info/%s.prerm" % x)} confirmed_remove = set() with cache.actiongroup(): for pkg in sorted(difference): if pkg in confirmed_remove: continue would_remove = install_misc.get_remove_list(cache, [pkg], recursive=True) if would_remove <= difference: confirmed_remove |= would_remove # Leave these marked for removal in the apt cache to # speed up further calculations. else: for removedpkg in would_remove: cachedpkg = install_misc.get_cache_pkg(cache, removedpkg) cachedpkg.mark_keep() difference = confirmed_remove if len(difference) == 0: del cache self.blacklist = {} return cmd = ["dpkg", "-L"] cmd.extend(difference) subp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) res = subp.communicate()[0].splitlines() u = {} for x in res: u[x] = 1 self.blacklist = u
class GDebiCommon(object): # cprogress may be different in child classes def __init__(self, datadir, options, file=""): self.cprogress = None self._cache = None self.deps = "" self.version_info_title = "" self.version_info_msg = "" self._deb = None self._options = options self.install = [] self.remove = [] self.unauthenticated = 0 def openCache(self): self._cache = Cache(self.cprogress) if self._cache._depcache.broken_count > 0: self.error_header = _("Broken dependencies") self.error_body = _("Your system has broken dependencies. " "This application can not continue until " "this is fixed. " "To fix it run 'pkexec synaptic' or " "'sudo apt-get install -f' " "in a terminal window.") return False return True def open(self, file, downloaded=False): file = os.path.abspath(file) klass = DebPackage if file.endswith(".click"): klass = ClickPackage try: self._deb = klass(file, self._cache, downloaded) except (IOError, SystemError, ValueError) as e: logging.debug("open failed with %s" % e) mimetype = guess_type(file) if (mimetype[0] != None and mimetype[0] != "application/vnd.debian.binary-package"): self.error_header = _( "'%s' is not a Debian package") % os.path.basename(file) self.error_body = _( "The MIME type of this file is '%s' " "and can not be installed on this system.") % mimetype[0] return False else: self.error_header = _( "Could not open '%s'") % os.path.basename(file) self.error_body = _( "The package might be corrupted or you are not " "allowed to open the file. Check the permissions " "of the file.") return False def compareDebWithCache(self): # check if the package is available in the normal sources as well res = self._deb.compare_to_version_in_cache(use_installed=False) if not self._options.non_interactive and res != DebPackage.VERSION_NONE: try: pkg = self._cache[self._deb.pkgname] except (KeyError, TypeError): return if self._deb.downloaded: self.version_info_title = "" self.version_info_msg = "" return # FIXME: make this strs better if res == DebPackage.VERSION_SAME: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _( "Same version is available in a software channel") self.version_info_msg = _( "You are recommended to install the software " "from the channel instead.") elif res == DebPackage.VERSION_NEWER: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _( "An older version is available in a software channel") self.version_info_msg = _( "Generally you are recommended to install " "the version from the software channel, since " "it is usually better supported.") elif res == DebPackage.VERSION_OUTDATED: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _( "A later version is available in a software " "channel") self.version_info_msg = _( "You are strongly advised to install " "the version from the software channel, since " "it is usually better supported.") def compareProvides(self): provides = set() broken_provides = set() try: pkg = self._cache[self._deb.pkgname].installed except (KeyError, TypeError): pkg = None if pkg: if pkg.provides: for p in self._deb.provides: for i in p: provides.add(i[0]) provides = set(pkg.provides).difference(provides) if provides: for package in list(self._cache.keys()): if self._cache[package].installed: for dep in self._cache[package].installed.dependencies: for d in dep.or_dependencies: if d.name in provides: broken_provides.add(d.name) return broken_provides def download_package(self): dirname = os.path.abspath(os.path.dirname(self._deb.filename)) package = self._cache[self._deb.pkgname].candidate pkgname = os.path.basename(package.filename) if package.downloadable: if not os.access(dirname, os.W_OK): dirname = "/tmp" if not os.path.exists(os.path.join(dirname, pkgname)): package.fetch_binary(dirname) self.open(os.path.join(dirname, pkgname), True) return True def get_changes(self): (self.install, self.remove, self.unauthenticated) = self._deb.required_changes self.deps = "" if len(self.remove) == len(self.install) == 0: self.deps = _("All dependencies are satisfied") if len(self.remove) > 0: # FIXME: use ngettext here self.deps += _("Requires the <b>removal</b> of %s packages\n" ) % len(self.remove) if len(self.install) > 0: self.deps += _("Requires the installation of %s packages") % len( self.install) return True
def generate_blacklist(self): manifest_remove = os.path.join(self.casper_path, 'filesystem.manifest-remove') manifest_desktop = os.path.join(self.casper_path, 'filesystem.manifest-desktop') manifest = os.path.join(self.casper_path, 'filesystem.manifest') if os.path.exists(manifest_remove) and os.path.exists(manifest): difference = set() with open(manifest_remove) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] difference.add(pkg.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] live_packages.add(pkg.split()[0]) desktop_packages = live_packages - difference elif os.path.exists(manifest_desktop) and os.path.exists(manifest): desktop_packages = set() with open(manifest_desktop) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] desktop_packages.add(pkg.split()[0]) live_packages = set() with open(manifest) as manifest_file: for line in manifest_file: if line.strip() != '' and not line.startswith('#'): pkg = line.split(':')[0] live_packages.add(pkg.split()[0]) difference = live_packages - desktop_packages else: difference = set() cache = Cache() use_restricted = True try: if self.db.get('apt-setup/restricted') == 'false': use_restricted = False except debconf.DebconfError: pass if not use_restricted: for pkg in cache.keys(): if (cache[pkg].is_installed and cache[pkg].section.startswith('restricted/')): difference.add(pkg) # Keep packages we explicitly installed. keep = install_misc.query_recorded_installed() arch, subarch = install_misc.archdetect() # Less than ideal. Since we cannot know which bootloader we'll need # at file copy time, we should figure out why grub still fails when # apt-install-direct is present during configure_bootloader (code # removed). if arch in ('amd64', 'i386'): if subarch == 'efi': keep.add('grub-efi') keep.add('grub-efi-amd64') keep.add('grub-efi-amd64-signed') keep.add('shim-signed') keep.add('mokutil') keep.add('fwupdate-signed') install_misc.record_installed(['fwupdate-signed']) try: altmeta = self.db.get( 'base-installer/kernel/altmeta') if altmeta: altmeta = '-%s' % altmeta except debconf.DebconfError: altmeta = '' keep.add('linux-signed-generic%s' % altmeta) else: keep.add('grub') keep.add('grub-pc') elif (arch in ('armel', 'armhf') and subarch in ('omap', 'omap4', 'mx5')): keep.add('flash-kernel') keep.add('u-boot-tools') elif arch == 'powerpc': keep.add('yaboot') keep.add('hfsutils') # Even adding ubiquity as a depends to oem-config-{gtk,kde} doesn't # appear to force ubiquity and libdebian-installer4 to copy all of # their files, so this does the trick. try: if self.db.get('oem-config/enable') == 'true': keep.add('ubiquity') except (debconf.DebconfError, IOError): pass difference -= install_misc.expand_dependencies_simple( cache, keep, difference) # Consider only packages that don't have a prerm, and which can # therefore have their files removed without any preliminary work. difference = { x for x in difference if not os.path.exists('/var/lib/dpkg/info/%s.prerm' % x)} confirmed_remove = set() with cache.actiongroup(): for pkg in sorted(difference): if pkg in confirmed_remove: continue would_remove = install_misc.get_remove_list( cache, [pkg], recursive=True) if would_remove <= difference: confirmed_remove |= would_remove # Leave these marked for removal in the apt cache to # speed up further calculations. else: for removedpkg in would_remove: cachedpkg = install_misc.get_cache_pkg( cache, removedpkg) cachedpkg.mark_keep() difference = confirmed_remove if len(difference) == 0: del cache self.blacklist = {} return cmd = ['dpkg', '-L'] cmd.extend(difference) subp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) res = subp.communicate()[0].splitlines() u = {} for x in res: u[x] = 1 self.blacklist = u
def get_languages(current_language_index=-1, only_installable=False): import gzip import icu current_language = "English" if only_installable: from apt.cache import Cache # workaround for an issue where euid != uid and the # apt cache has not yet been loaded causing a SystemError # when libapt-pkg tries to load the Cache the first time. with misc.raised_privileges(): cache = Cache() languagelist = gzip.open( '/usr/lib/ubiquity/localechooser/languagelist.data.gz') language_display_map = {} i = 0 for line in languagelist: line = misc.utf8(line) if line == '' or line == '\n': continue code, name, trans = line.strip('\n').split(':')[1:] if code in ('C', 'dz', 'km'): i += 1 continue # KDE fails to round-trip strings containing U+FEFF ZERO WIDTH # NO-BREAK SPACE, and we don't care about the NBSP anyway, so strip # it. # https://bugs.launchpad.net/bugs/1001542 # (comment # 5 and on) trans = trans.strip(" \ufeff") if only_installable: pkg_name = 'language-pack-%s' % code # special case these if pkg_name.endswith('_CN'): pkg_name = 'language-pack-zh-hans' elif pkg_name.endswith('_TW'): pkg_name = 'language-pack-zh-hant' elif pkg_name.endswith('_NO'): pkg_name = pkg_name.split('_NO')[0] elif pkg_name.endswith('_BR'): pkg_name = pkg_name.split('_BR')[0] try: pkg = cache[pkg_name] if not (pkg.installed or pkg.candidate): i += 1 continue except KeyError: i += 1 continue language_display_map[trans] = (name, code) if i == current_language_index: current_language = trans i += 1 languagelist.close() if only_installable: del cache try: # Note that we always collate with the 'C' locale. This is far # from ideal. But proper collation always requires a specific # language for its collation rules (languages frequently have # custom sorting). This at least gives us common sorting rules, # like stripping accents. collator = icu.Collator.createInstance(icu.Locale('C')) except: collator = None def compare_choice(x): if language_display_map[x][1] == 'C': return None # place C first if collator: try: return collator.getCollationKey(x).getByteArray() except: pass # Else sort by unicode code point, which isn't ideal either, # but also has the virtue of sorting like-glyphs together return x sorted_choices = sorted(language_display_map, key=compare_choice) return current_language, sorted_choices, language_display_map
class GDebiCli(object): def __init__(self, options): # fixme, do graphic cache check self.options = options if options.quiet: tp = apt.progress.base.OpProgress() else: tp = apt.progress.text.OpProgress() # set architecture to architecture in root-dir if options.rootdir and os.path.exists(options.rootdir+"/usr/bin/dpkg"): arch = Popen([options.rootdir+"/usr/bin/dpkg", "--print-architecture"], stdout=PIPE, universal_newlines=True).communicate()[0] if arch: apt_pkg.config.set("APT::Architecture",arch.strip()) if options.apt_opts: for o in options.apt_opts: if o.find('=') < 0: sys.stderr.write(_("Configuration items must be specified with a =<value>\n")) sys.exit(1) (name, value) = o.split('=', 1) try: apt_pkg.config.set(name, value) except: sys.stderr.write(_("Couldn't set APT option %s to %s\n") % (name, value)) sys.exit(1) self._cache = Cache(tp, rootdir=options.rootdir) def open(self, file): try: if (file.endswith(".deb") or "Debian binary package" in Popen(["file", file], stdout=PIPE, universal_newlines=True).communicate()[0]): self._deb = DebPackage(file, self._cache) elif (file.endswith(".dsc") or os.path.basename(file) == "control"): self._deb = DscSrcPackage(file, self._cache) else: sys.stderr.write(_("Unknown package type '%s', exiting\n") % file) sys.exit(1) except (IOError,SystemError,ValueError) as e: logging.debug("error opening: %s" % e) sys.stderr.write(_("Failed to open the software package\n")) sys.stderr.write(_("The package might be corrupted or you are not " "allowed to open the file. Check the permissions " "of the file.\n")) sys.exit(1) # check the deps if not self._deb.check(): sys.stderr.write(_("This package is uninstallable\n")) sys.stderr.write(self._deb._failure_string + "\n") return False return True def show_description(self): try: print(self._deb["Description"]) except KeyError: print(_("No description is available")) def show_dependencies(self): print(self.get_dependencies_info()) def get_dependencies_info(self): s = "" # show what changes (install, remove, unauthenticated) = self._deb.required_changes if len(unauthenticated) > 0: s += _("The following packages are UNAUTHENTICATED: ") for pkgname in unauthenticated: s += pkgname + " " if len(remove) > 0: s += _("Requires the REMOVAL of the following packages: ") for pkgname in remove: s += pkgname + " " s += "\n" if len(install) > 0: s += _("Requires the installation of the following packages: ") for pkgname in install: s += pkgname + " " s += "\n" return s def install(self): # install the dependecnies (install,remove,unauthenticated) = self._deb.required_changes if len(install) > 0 or len(remove) > 0: fprogress = apt.progress.text.AcquireProgress() iprogress = apt.progress.base.InstallProgress() try: self._cache.commit(fprogress,iprogress) except(apt.cache.FetchFailedException, SystemError) as e: sys.stderr.write(_("Error during install: '%s'") % e) return 1 # install the package itself if self._deb.filename.endswith(".dsc"): # FIXME: add option to only install build-dependencies # (or build+install the deb) and then enable # this code #dir = self._deb.pkgname + "-" + apt_pkg.UpstreamVersion(self._deb["Version"]) #os.system("dpkg-source -x %s" % self._deb.filename) #os.system("cd %s && dpkg-buildpackage -b -uc" % dir) #for i in self._deb.binaries: # os.system("gdebi %s_%s_*.deb" % (i,self._deb["Version"])) return 0 else: return call(["dpkg","--auto-deconfigure", "-i",self._deb.filename])
class GDebiCommon(object): # cprogress may be different in child classes def __init__(self, datadir, options, file=""): self.cprogress = None self._cache = None self.deps = "" self.version_info_title = "" self.version_info_msg = "" self._deb = None self._options = options self.install = [] self.remove = [] self.unauthenticated = 0 def openCache(self): self._cache = Cache(self.cprogress) if self._cache._depcache.broken_count > 0: self.error_header = _("Broken dependencies") self.error_body = _("Your system has broken dependencies. " "This application can not continue until " "this is fixed. " "To fix it run 'gksudo synaptic' or " "'sudo apt-get install -f' " "in a terminal window.") return False return True def open(self, file, downloaded=False): file = os.path.abspath(file) klass = DebPackage if file.endswith(".click"): klass = ClickPackage try: self._deb = klass(file, self._cache, downloaded) except (IOError, SystemError, ValueError) as e: logging.debug("open failed with %s" % e) mimetype=guess_type(file) if (mimetype[0] != None and mimetype[0] != "application/vnd.debian.binary-package"): self.error_header = _("'%s' is not a Debian package") % os.path.basename(file) self.error_body = _("The MIME type of this file is '%s' " "and can not be installed on this system.") % mimetype[0] return False else: self.error_header = _("Could not open '%s'") % os.path.basename(file) self.error_body = _("The package might be corrupted or you are not " "allowed to open the file. Check the permissions " "of the file.") return False def compareDebWithCache(self): # check if the package is available in the normal sources as well res = self._deb.compare_to_version_in_cache(use_installed=False) if not self._options.non_interactive and res != DebPackage.VERSION_NONE: try: pkg = self._cache[self._deb.pkgname] except (KeyError, TypeError): return if self._deb.downloaded: self.version_info_title = "" self.version_info_msg = "" return # FIXME: make this strs better if res == DebPackage.VERSION_SAME: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _("Same version is available in a software channel") self.version_info_msg = _("You are recommended to install the software " "from the channel instead.") elif res == DebPackage.VERSION_NEWER: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _("An older version is available in a software channel") self.version_info_msg = _("Generally you are recommended to install " "the version from the software channel, since " "it is usually better supported.") elif res == DebPackage.VERSION_OUTDATED: if pkg.candidate and pkg.candidate.downloadable: self.version_info_title = _("A later version is available in a software " "channel") self.version_info_msg = _("You are strongly advised to install " "the version from the software channel, since " "it is usually better supported.") def compareProvides(self): provides = set() broken_provides = set() try: pkg = self._cache[self._deb.pkgname].installed except (KeyError, TypeError): pkg = None if pkg: if pkg.provides: for p in self._deb.provides: for i in p: provides.add(i[0]) provides = set(pkg.provides).difference(provides) if provides: for package in list(self._cache.keys()): if self._cache[package].installed: for dep in self._cache[package].installed.dependencies: for d in dep.or_dependencies: if d.name in provides: broken_provides.add(d.name) return broken_provides def download_package(self): dirname = os.path.abspath(os.path.dirname(self._deb.filename)) package = self._cache[self._deb.pkgname].candidate pkgname = os.path.basename(package.filename) if package.downloadable: if not os.access(dirname, os.W_OK): dirname = "/tmp" if not os.path.exists(os.path.join(dirname, pkgname)): package.fetch_binary(dirname) self.open(os.path.join(dirname, pkgname), True) return True def get_changes(self): (self.install, self.remove, self.unauthenticated) = self._deb.required_changes self.deps = "" if len(self.remove) == len(self.install) == 0: self.deps = _("All dependencies are satisfied") if len(self.remove) > 0: # FIXME: use ngettext here self.deps += _("Requires the <b>removal</b> of %s packages\n") % len(self.remove) if len(self.install) > 0: self.deps += _("Requires the installation of %s packages") % len(self.install) return True def try_acquire_lock(self): " check if we can lock the apt database " try: apt_pkg.pkgsystem_lock() except SystemError: self.error_header = _("Only one software management tool is allowed to" " run at the same time") self.error_body = _("Please close the other application e.g. 'Update " "Manager', 'aptitude' or 'Synaptic' first.") return False apt_pkg.pkgsystem_unlock() return True def acquire_lock(self): " lock the pkgsystem for install " # sanity check ( moved here ) if self._deb is None: return False # check if we can lock the apt database try: apt_pkg.pkgsystem_lock() except SystemError: self.error_header = _("Only one software management tool is allowed to" " run at the same time") self.error_body = _("Please close the other application e.g. 'Update " "Manager', 'aptitude' or 'Synaptic' first.") return False return True def release_lock(self): " release the pkgsystem lock " apt_pkg.pkgsystem_lock() return True
class _AptChangelog(): def __init__(self, interactive:bool=False): self.interactive = interactive # constants # apt uses MB rather than MiB, so let's stay consistent self.MB = 1000 ** 2 # downloads larger than this require confirmation or fail self.max_download_size_default = 1.5 * self.MB self.max_download_size = self.max_download_size_default max_download_size_msg_template = "\ To retrieve the full changelog, %s MB have to be downloaded.\n%s\ \n\ Proceed with the download?" self.max_download_size_msg_lc = max_download_size_msg_template % ("%.1f", "Otherwise we will try to retrieve just the last change.\n") self.max_download_size_msg = max_download_size_msg_template % ("%.1f","") self.max_download_size_msg_unknown = max_download_size_msg_template % ("an unknown amount of", "") self.apt_cache = None self.apt_cache_date = None self.candidate = None # get apt's configuration apt_pkg.init_config() if apt_pkg.config.exists("Acquire::Changelogs::URI::Origin"): self.apt_origins = apt_pkg.config.subtree("Acquire::Changelogs::URI::Origin") else: self.apt_origins = None if apt_pkg.config.exists("Dir::Cache::pkgcache"): self.apt_cache_path = apt_pkg.config.find_dir("Dir::Cache") self.pkgcache = apt_pkg.config.find_file("Dir::Cache::pkgcache") else: self.apt_cache = "invalid" if (self.apt_cache or not os.path.isdir(self.apt_cache_path) or not os.path.isfile(self.pkgcache) ): print("E: Invalid APT configuration found, try to run `apt update` first", file=sys.stderr) self.close(99) def get_cache_date(self): if os.path.isfile(self.pkgcache): return os.path.getmtime(self.pkgcache) return None def refresh_cache(self): cache_date = self.get_cache_date() if not self.apt_cache: self.apt_cache = Cache() self.apt_cache_date = cache_date elif cache_date != self.apt_cache_date: self.apt_cache.open(None) self.apt_cache_date = cache_date def drop_cache(self): if self.candidate: self.candidate = None self.apt_cache = None def get_changelog(self, pkg_name:str, no_local:bool=False): self.refresh_cache() self.candidate = self.parse_package_metadata(pkg_name) # parse the package's origin if not self.candidate.downloadable: origin = "local_package" elif self.candidate.origin == "linuxmint": origin = "linuxmint" elif self.candidate.origin.startswith("LP-PPA-"): origin = "LP-PPA" elif self.apt_origins and self.candidate.origin in self.apt_origins.list(): origin = "APT" else: origin = "unsupported" # Check for changelog of installed package first has_local_changelog = False uri = None if not no_local and self.candidate.is_installed: if _DEBUG: print("Package is installed...") uri = self.get_changelog_from_filelist( self.candidate.installed_files, local=True) # Ubuntu kernel workarounds if self.candidate.origin == "Ubuntu": if self.candidate.source_name == "linux-signed": uri = uri.replace("linux-image","linux-modules") if self.candidate.source_name == "linux-meta": uri = None if uri and not os.path.isfile(uri): uri = None # Do nothing if local changelog exists if uri: has_local_changelog = True # all origins that APT supports elif origin == 'APT': uri = self.get_apt_changelog_uri( self.apt_origins.get(self.candidate.origin)) r = self.check_url(uri) if not r: self.exit_on_fail(2) # Linux Mint repo elif origin == 'linuxmint': # Mint repos don't have .debian.tar.xz files, only full packages, so # check the package cache first base_uri, _ = os.path.split(self.candidate.uri) r, uri = self.get_changelog_uri(base_uri) if not r: # fall back to last change info for the source package # Mint's naming scheme seems to be using amd64 unless source # is i386 only, we always check amd64 first base_uri = "http://packages.linuxmint.com/dev/%s_%s_%s.changes" uri = base_uri % (self.candidate.source_name, self.candidate.source_version, "amd64") r = self.check_url(uri, False) if not r: uri = base_uri % (self.candidate.source_name, self.candidate.source_version, "i386") r = self.check_url(uri, False) if not r: self.exit_on_fail(3) # Launchpad PPA elif origin == 'LP-PPA': ppa_owner, ppa_name, _ = \ self.candidate.uri.split("ppa.launchpad.net/")[1].split("/", 2) base_uri = "http://ppa.launchpad.net/%(owner)s/%(name)s/ubuntu/pool/main/%(source_prefix)s/%(source_name)s" % \ { "owner": ppa_owner, "name": ppa_name, "source_prefix": self.source_prefix(), "source_name": self.candidate.source_name } r, uri = self.get_changelog_uri(base_uri) if not r: # fall back to last change info only uri = "https://launchpad.net/~%(owner)s/+archive/ubuntu/%(name)s/+files/%(source_name)s_%(source_version)s_source.changes" % \ { "owner" : ppa_owner, "name": ppa_name, "source_name": self.candidate.source_name, "source_version": self.candidate.source_version } r = self.check_url(uri, False) if not r: self.exit_on_fail(4) # Not supported origin elif origin == 'unsupported': if _DEBUG: print("Unsupported Package") base_uri, _ = os.path.split(self.candidate.uri) r, uri = self.get_changelog_uri(base_uri) if not r: self.exit_on_fail(5) # Locally installed package without local changelog or remote # source, hope it's cached and contains a changelog elif origin == 'local_package': uri = self.apt_cache_path + self.candidate.filename if not os.path.isfile(uri): self.exit_on_fail(6) # Changelog downloading, extracting and processing: changelog = "" # local changelog if has_local_changelog and not no_local: if _DEBUG: print("Using local changelog:",uri) try: filename = os.path.basename(uri) # determine file type by name/extension # as per debian policy 4.4 the encoding must be UTF-8 # as per policy 12.7 the name must be changelog.Debian.gz or # changelog.gz (deprecated) if filename.lower().endswith('.gz'): changelog = gzip.open(uri,'r').read().decode('utf-8') elif filename.lower().endswith('.xz'): # just in case / future proofing changelog = lzma.open(uri,'r').read().decode('utf-8') elif filename.lower() == 'changelog': changelog = open(uri, 'r').read().encode().decode('utf-8') else: raise ValueError('Unknown changelog format') except Exception as e: _generic_exception_handler(e) self.exit_on_fail(1) # APT-format changelog, download directly # - unfortunately this is slow since the servers support no compression elif origin == "APT": if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB)) changelog = r.text r.close() # last change changelog, download directly elif uri.endswith('.changes'): if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB)) changes = r.text.split("Changes:")[1].split("Checksums")[0].split("\n") r.close() for change in changes: change = change.strip() if change: if change == ".": change = "" changelog += change + "\n" # compressed binary source, download and extract changelog else: source_is_cache = uri.startswith(self.apt_cache_path) if _DEBUG: print("Using cached package:" if source_is_cache else "Downloading: %s (%.2f MB)" % (uri, r.length / self.MB)) try: if not source_is_cache: # download stream to temporary file tmpFile = tempfile.NamedTemporaryFile(prefix="apt-changelog-") if self.interactive and r.length: # download chunks with progress indicator recv_length = 0 blocks = 60 for data in r.iter_content(chunk_size=16384): recv_length += len(data) tmpFile.write(data) recv_pct = recv_length / r.length recv_blocks = int(blocks * recv_pct) print("\r[%(progress)s%(spacer)s] %(percentage).1f%%" % { "progress": "=" * recv_blocks, "spacer": " " * (blocks - recv_blocks), "percentage": recv_pct * 100 }, end="", flush=True) # clear progress bar when done print("\r" + " " * (blocks + 10), end="\r", flush=True) else: # no content-length or non-interactive, download in one go # up to the configured max_download_size, ask only when # exceeded r.raw.decode_content = True size = 0 size_exceeded = False while True: buf = r.raw.read(16*1024) if not size_exceeded: size += len(buf) if size > self.max_download_size: if not self.user_confirm(self.max_download_size_msg_unknown): r.close() tmpFile.close() return "" else: size_exceeded = True if not buf: break tmpFile.write(buf) r.close() tmpFile.seek(0) if uri.endswith(".deb"): # process .deb file if source_is_cache: f = uri else: f = tmpFile.name # We could copy the downloaded .deb files to the apt # cache here but then we'd need to run the script elevated: # shutil.copy(f, self.apt_cache_path + os.path.basename(uri)) deb = DebPackage(f) changelog_file = self.get_changelog_from_filelist(deb.filelist) if changelog_file: changelog = deb.data_content(changelog_file) if changelog.startswith('Automatically decompressed:'): changelog = changelog[29:] else: raise ValueError('Malformed Debian package') elif uri.endswith(".diff.gz"): # Ubuntu partner repo has .diff.gz files, # we can extract a changelog from that data = gzip.open(tmpFile.name, "r").read().decode('utf-8') additions = data.split("+++") for addition in additions: lines = addition.split("\n") if "/debian/changelog" in lines[0]: for line in lines[2:]: if line.startswith("+"): changelog += "%s\n" % line[1:] else: break if not changelog: raise ValueError('No changelog in .diff.gz') else: # process .tar.xz file with tarfile.open(fileobj=tmpFile, mode="r:xz") as tar: changelog_file = self.get_changelog_from_filelist( [s.name for s in tar.getmembers() if s.type in (b"0", b"2")]) if changelog_file: changelog = tar.extractfile(changelog_file).read().decode() else: raise ValueError('No changelog in source package') except Exception as e: _generic_exception_handler(e) self.exit_on_fail(520) if 'tmpFile' in vars(): try: tmpFile.close() except Exception as e: _generic_exception_handler(e) # ALL DONE return changelog def parse_package_metadata(self, pkg_name:str): """ Creates the self.candidate object based on package name=version/release Wildcard matching is only used for version and release, and only the first match is processed. """ # parse =version declaration if "=" in pkg_name: (pkg_name, pkg_version) = pkg_name.split("=", 1) pkg_release = None # parse /release declaration (only if no version specified) elif "/" in pkg_name: (pkg_name, pkg_release) = pkg_name.split("/", 1) pkg_version = None else: pkg_version = None pkg_release = None # check if pkg_name exists # unlike apt no pattern matching, a single exact match only if pkg_name in self.apt_cache.keys(): pkg = self.apt_cache[pkg_name] else: print("E: Unable to locate package %s" % pkg_name, file=sys.stderr) self.close(13) # get package data _candidate = None candidate = None if pkg_release or pkg_version: match_found = False for _pkg in pkg.versions: if pkg_version: if fnmatch.fnmatch(_pkg.version, pkg_version): match_found = True else: for _origin in _pkg.origins: if fnmatch.fnmatch(_origin.archive, pkg_release): match_found = True if match_found: _candidate = _pkg break if not match_found: if pkg_release: print('E: Release "%s" is unavailable for "%s"' % (pkg_release, pkg.name), file=sys.stderr) else: print('E: Version "%s" is unavailable for "%s"' % (pkg_version, pkg.name), file=sys.stderr) self.close(14) else: _candidate = pkg.candidate candidate = _Package( version = _candidate.version, name = _candidate.package.name, fullname = None, architecture = pkg.architecture, source_name = _candidate.source_name, source_version = _candidate.source_version, uri = _candidate.uri, filename = os.path.basename(_candidate.filename), origin = _candidate.origins[0].origin, component = _candidate.origins[0].component, downloadable = _candidate.downloadable, is_installed = _candidate.is_installed, dependencies = _candidate.dependencies ) if candidate.is_installed: candidate.installed_files = pkg.installed_files candidate.source_version_raw = candidate.source_version if ":" in candidate.source_version: candidate.source_version = candidate.source_version.split(":", 1)[1] return candidate def check_url(self, url:str, check_size:bool=True, stream:bool=True, msg:str=None): """ True if url can be downloaded and fits size requirements """ if _DEBUG: print("Checking:", url) try: _r = requests.get(url, stream=stream, timeout=5) except Exception as e: _generic_exception_handler(e) else: if _r: length = _r.headers.get("Content-Length") if length: _r.length = int(length) else: _r.length = 0 if (not check_size or not (check_size and _r.length > self.max_download_size and not self.user_confirm( (self.max_download_size_msg_lc if not msg else msg) % (_r.length / self.MB)) )): return _r if '_r' in vars(): _r.close() return False @staticmethod def close(err:int=0): """ Exit """ sys.exit(err) def exit_on_fail(self, err:int=404): """ Prints error message and calls self.close() """ try: details = "Changelog unavailable for %s=%s" %\ (self.candidate.source_name, self.candidate.source_version_raw) except AttributeError: details = "" print("E: Failed to fetch changelog. %s" % details, file=sys.stderr) self.close(err) @staticmethod def strtobool (val): val = val.lower() if val in ('y', 'yes'): return True elif val in ('n', 'no'): return False else: raise ValueError("Invalid response value %r" % (val,)) def user_confirm(self, q:str): """ returns bool (always False in non-interactive mode) """ if not self.interactive: if _DEBUG: print("Maximum size exceeded, skipping in non-interactive mode") return False print('%s [y/n] ' % q, end='') while True: try: response = self.strtobool(input()) print("") return response except ValueError: print('Invalid response. Try again [y/n]: ', end='') except KeyboardInterrupt: pass def get_deb_or_tar(self, uri_tar:str=None): """ Returns request and URI of the preferred source The choice is made based on availability and size. If .deb is smaller than comparison_trigger_size, or if check_tar is False, then .deb is always selected. """ comparison_trigger_size = 50000 r_deb = self.check_url(self.candidate.uri, False) if r_deb: if uri_tar and r_deb.length > comparison_trigger_size: # try for .tar.xz r_tar = self.check_url(uri_tar, False) # validate and compare sizes if r_tar and r_tar.length < r_deb.length: _r = r_tar r_deb.close() else: _r = r_deb if r_tar: r_tar.close() else: _r = r_deb if (not _r.length > self.max_download_size or self.user_confirm(self.max_download_size_msg_lc % (_r.length / self.MB)) ): return (_r, _r.url) return (False, "") def get_changelog_from_filelist(self, filelist:list, local:bool=False): """ Returns hopefully the correct "changelog" or an empty string. We should not need to be searching because the debian policy says it must be at debian/changelog for source packages but not all seem to adhere to the policy: https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog """ files = [s for s in filelist if "changelog" in s.lower()] if local: testpath = '/usr/share/doc/%s/changelog' % self.candidate.name for item in files: if item.lower().startswith(testpath): return item else: testpath = 'debian/changelog' if testpath in files: return testpath testpath = 'recipe/debian/changelog' if testpath in files: return testpath testpath = 'usr/share/doc/%s/changelog' % self.candidate.name for item in files: if item.lower().startswith(testpath): return item # no hits in the standard locations, let's try our luck in # random locations at the risk of getting the wrong file for item in files: if os.path.basename(item).lower().startswith("changelog"): return item return None def get_apt_changelog_uri(self, uri_template:str): """ Returns URI based on provided apt changelog URI template. Emulates apt's std::string pkgAcqChangelog::URI The template must contain the @CHANGEPATH@ variable, which will be expanded to COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER Component is omitted for releases without one (= flat-style repositories). """ source_version = self.candidate.source_version def get_kernel_version_from_meta_package(pkg): for dependency in pkg.dependencies: if not dependency.target_versions: if _DEBUG: print("W: Kernel dependency not found:", dependency) return None deppkg = dependency.target_versions[0] if deppkg.source_name in ("linux", "linux-signed"): return deppkg.source_version if deppkg.source_name == "linux-meta": _pkg = self.parse_package_metadata(str(deppkg)) return get_kernel_version_from_meta_package(_pkg) return None # Ubuntu kernel meta package workaround if (self.candidate.origin == "Ubuntu" and self.candidate.source_name == "linux-meta" ): _source_version = get_kernel_version_from_meta_package(self.candidate) if _source_version: source_version = _source_version self.candidate.source_name = "linux" # Ubuntu signed kernel workaround if (self.candidate.origin == "Ubuntu" and self.candidate.source_name == "linux-signed" ): self.candidate.source_name = "linux" # XXX: Debian does not seem to reliably keep changelogs for previous # (kernel) versions, so should we always look for the latest # version instead on Debian? apt does not do this but the # packages.debian.org website shows the latest version in the # selected archive # strip epoch if ":" in source_version: source_version = source_version.split(":", 1)[1] # the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. # main/a/apt/apt_1.1 or contrib/liba/libapt/libapt_2.0 return uri_template.replace('@CHANGEPATH@', "%(component)s%(source_prefix)s/%(source_name)s/%(source_name)s_%(source_version)s" % { "component": self.candidate.component + "/" if \ self.candidate.component and \ self.candidate.component != "" else "", "source_prefix": self.source_prefix(), "source_name": self.candidate.source_name, "source_version": source_version }) def source_prefix(self, source_name:str=None): """ Return prefix used for build repository URL """ if not source_name: source_name = self.candidate.source_name return source_name[0] if not source_name.startswith("lib") else \ source_name[:4] def parse_dsc(self, url:str): """ Returns filename or None """ _r = self.check_url(url, False, False) if _r: target = "" lines = _r.text.split("Files:", 1)[1].split(":", 1)[0].split("-----BEGIN", 1)[0].split("\n") target = [s.strip() for s in lines if s.strip().lower().endswith('.debian.tar.xz')] if not target: target = [s.strip() for s in lines if s.strip().lower().endswith('.diff.gz')] if not target: target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.xz')] # don't even test for .tar.gz, it will be too big compared to the .deb # if not target: # target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.gz')] if target: return target[0].split()[-1] elif _DEBUG: print(".dsc parse error for", url) return None def get_changelog_uri(self, base_uri:str): """ Tries to find a changelog in files listed in .dsc, locally cached packages as well as the remote .deb file Returns r and uri """ uri = None # XXX: For APT sources we could just read the apt_pkg.SourceRecords() # directly, if available, which it is not for most users, so # probably not worth it target_filename = self.parse_dsc("%s/%s_%s.dsc" % (base_uri, self.candidate.source_name, self.candidate.source_version)) # get .debian.tar.xz or .diff.gz as a priority as the smallest options if (base_uri and target_filename and ( target_filename.lower().endswith('.debian.tar.xz') or target_filename.lower().endswith('.diff.gz') )): uri = "%s/%s" % (base_uri, target_filename) target_filename = None r = self.check_url(uri, msg = self.max_download_size_msg) else: r = None if not r: # fall back to cached local package uri = self.apt_cache_path + self.candidate.filename if not os.path.isfile(uri): # cache miss, download the full source package or the .deb, # depending on size and availability if target_filename: uri_tar = "%s/%s" % (base_uri, target_filename) else: uri_tar = None r, uri = self.get_deb_or_tar(uri_tar) return (r, uri)