Example #1
0
	def fetch_candidates(self, master_feed):
		package_names = [item.getAttribute("package") for item, item_attrs, depends in master_feed.get_package_impls(self)]

		if self.packagekit.available:
			return self.packagekit.fetch_candidates(package_names)

		# No PackageKit. Use apt-cache directly.
		for package in package_names:
			# Check to see whether we could get a newer version using apt-get
			try:
				null = os.open(os.devnull, os.O_WRONLY)
				child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null, universal_newlines = True)
				os.close(null)

				arch = version = size = None
				for line in child.stdout:
					line = line.strip()
					if line.startswith('Version: '):
						version = line[9:]
						version = try_cleanup_distro_version(version)
					elif line.startswith('Architecture: '):
						arch = canonical_machine(line[14:].strip())
					elif line.startswith('Size: '):
						size = int(line[6:].strip())
				if version and arch:
					cached = {'version': version, 'arch': arch, 'size': size}
				else:
					cached = None
				child.stdout.close()
				child.wait()
			except Exception as ex:
				logger.warning("'apt-cache show %s' failed: %s", package, ex)
				cached = None
			# (multi-arch support? can there be multiple candidates?)
			self.apt_cache[package] = cached
Example #2
0
    def add_dir_to_cache(self,
                         required_digest,
                         path,
                         try_helper=False,
                         dry_run=False):
        """Copy the contents of path to the cache.
		@param required_digest: the expected digest
		@type required_digest: str
		@param path: the root of the tree to copy
		@type path: str
		@param try_helper: attempt to use privileged helper before user cache (since 0.26)
		@type try_helper: bool
		@type dry_run: bool
		@raise BadDigest: if the contents don't match the given digest."""
        if self.lookup(required_digest):
            logger.info(_("Not adding %s as it already exists!"),
                        required_digest)
            return

        tmp = self.get_tmp_dir_for(required_digest)
        try:
            _copytree2(path, tmp)
            self.check_manifest_and_rename(required_digest,
                                           tmp,
                                           try_helper=try_helper,
                                           dry_run=dry_run)
        except:
            logger.warning(_("Error importing directory."))
            logger.warning(_("Deleting %s"), tmp)
            support.ro_rmtree(tmp)
            raise
Example #3
0
	def generate_cache(self):
		cache = []

		child = subprocess.Popen(["port", "-v", "installed"],
					  stdout = subprocess.PIPE, universal_newlines = True)
		for line in child.stdout:
			if not line.startswith(" "):
				continue
			if line.strip().count(" ") > 1:
				package, version, extra = line.split(None, 2)
			else:
				package, version = line.split()
				extra = ""
			if not extra.startswith("(active)"):
				continue
			version = version.lstrip('@')
			version = re.sub(r"\+.*", "", version) # strip variants
			zi_arch = '*'
			clean_version = try_cleanup_distro_version(version)
			if clean_version:
				match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
				if match:
					platform, major, archs = match.groups()
					for arch in archs.split():
						zi_arch = canonical_machine(arch)
						cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
				else:
					cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
			else:
				logger.warning(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
		self._write_cache(cache)
		child.stdout.close()
		child.wait()
Example #4
0
def _link(a, b, tmpfile):
    """Keep 'a', delete 'b' and hard-link to 'a'
	@type a: str
	@type b: str
	@type tmpfile: str"""
    if not _byte_identical(a, b):
        logger.warning(
            _("Files should be identical, but they're not!\n%(file_a)s\n%(file_b)s"
              ), {
                  'file_a': a,
                  'file_b': b
              })

    b_dir = os.path.dirname(b)
    old_mode = os.lstat(b_dir).st_mode
    os.chmod(b_dir, old_mode | 0o200)  # Need write access briefly
    try:
        os.link(a, tmpfile)
        try:
            os.rename(tmpfile, b)
        except:
            os.unlink(tmpfile)
            raise
    finally:
        os.chmod(b_dir, old_mode)
Example #5
0
def find_bin_dir(paths=None):
    """Find the first writable path in the list (default $PATH),
	skipping /bin, /sbin and everything under /usr except /usr/local/bin
	@type paths: [str] | None
	@rtype: str"""
    if paths is None:
        paths = os.environ['PATH'].split(os.pathsep)
    for path in paths:
        if path.startswith('/usr/') and not path.startswith('/usr/local/bin'):
            # (/usr/local/bin is OK if we're running as root)
            pass
        elif path.startswith('/bin') or path.startswith('/sbin'):
            pass
        elif os.path.realpath(path).startswith(basedir.xdg_cache_home):
            pass  # print "Skipping cache", first_path
        elif not os.access(path, os.W_OK):
            pass  # print "No access", first_path
        else:
            break
    else:
        path = os.path.expanduser('~/bin/')
        logger.warning('%s is not in $PATH. Add it with:\n%s' %
                       (path, _export('PATH', path + ':$PATH')))

        if not os.path.isdir(path):
            os.makedirs(path)
    return path
Example #6
0
def load_config(handler = None):
	"""@type handler: L{zeroinstall.injector.handler.Handler} | None
	@rtype: L{Config}"""
	config = Config(handler)
	parser = ConfigParser.RawConfigParser()
	parser.add_section('global')
	parser.set('global', 'help_with_testing', 'False')
	parser.set('global', 'freshness', str(60 * 60 * 24 * 30))	# One month
	parser.set('global', 'network_use', 'full')
	parser.set('global', 'auto_approve_keys', 'True')

	path = basedir.load_first_config(config_site, config_prog, 'global')
	if path:
		logger.info("Loading configuration from %s", path)
		try:
			parser.read(path)
		except Exception as ex:
			logger.warning(_("Error loading config: %s"), str(ex) or repr(ex))

	config.help_with_testing = parser.getboolean('global', 'help_with_testing')
	config.network_use = parser.get('global', 'network_use')
	config.freshness = int(parser.get('global', 'freshness'))
	config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys')

	assert config.network_use in network_levels, config.network_use

	return config
Example #7
0
def check(blockers, reporter=None):
    """See if any of the blockers have pending exceptions.
	If reporter is None, raise the first and log the rest.
	@type blockers: [L{Blocker}]
	@param reporter: invoke this function on each error"""
    ex = None
    if isinstance(blockers, Blocker):
        blockers = (blockers, )
    for b in blockers:
        if b.exception:
            b.exception_read = True
            if reporter:
                try:
                    reporter(*b.exception)
                except:
                    logger.warning("Failure reporting error! Error was: %s",
                                   repr(b.exception[0]))
                    raise
            elif ex is None:
                ex = b.exception
            else:
                logger.warning(_("Multiple exceptions waiting; skipping %s"),
                               b.exception[0])
    if ex:
        support.raise_with_traceback(ex[0], ex[1])
Example #8
0
			def factory(id, only_if_missing = False, installed = True):
				assert id.startswith('package:')
				if id in feed.implementations:
					if only_if_missing:
						return None
					logger.warning(_("Duplicate ID '%s' for DistributionImplementation"), id)
				impl = model.DistributionImplementation(feed, id, self, item)
				feed.implementations[id] = impl
				new_impls.append(impl)

				impl.installed = installed
				impl.metadata = item_attrs
				impl.requires = depends

				if 'run' not in impl.commands:
					item_main = item_attrs.get('main', None)
					if item_main:
						if item_main.startswith('/'):
							impl.main = item_main
						else:
							raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
										item_main)
				impl.upstream_stability = model.packaged

				return impl
Example #9
0
def load_config(handler=None):
    """@type handler: L{zeroinstall.injector.handler.Handler} | None
	@rtype: L{Config}"""
    config = Config(handler)
    parser = ConfigParser.RawConfigParser()
    parser.add_section('global')
    parser.set('global', 'help_with_testing', 'False')
    parser.set('global', 'freshness', str(60 * 60 * 24 * 30))  # One month
    parser.set('global', 'network_use', 'full')
    parser.set('global', 'auto_approve_keys', 'True')

    path = basedir.load_first_config(config_site, config_prog, 'global')
    if path:
        logger.info("Loading configuration from %s", path)
        try:
            parser.read(path)
        except Exception as ex:
            logger.warning(_("Error loading config: %s"), str(ex) or repr(ex))

    config.help_with_testing = parser.getboolean('global', 'help_with_testing')
    config.network_use = parser.get('global', 'network_use')
    config.freshness = int(parser.get('global', 'freshness'))
    config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys')

    assert config.network_use in network_levels, config.network_use

    return config
Example #10
0
	def get_package_info(self, package, factory):
		# Add installed versions...
		"""@type package: str"""
		for entry in os.listdir(self._packages_dir):
			name, version, build = entry.rsplit('-', 2)
			if name == package:
				gotarch = False
				# (read in binary mode to avoid unicode errors in C locale)
				with open(os.path.join(self._packages_dir, entry, "desc"), 'rb') as stream:
					for line in stream:
						if line == b"%ARCH%\n":
							gotarch = True
							continue
						if gotarch:
							arch = line.strip().decode('utf-8')
							break
				zi_arch = canonical_machine(arch)
				clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
				if not clean_version:
					logger.warning(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
					continue
	
				impl = factory('package:arch:%s:%s:%s' % \
						(package, clean_version, zi_arch))
				impl.version = model.parse_version(clean_version)
				if zi_arch != '*':
					impl.machine = zi_arch

				impl.quick_test_file = os.path.join(self._packages_dir, entry, 'desc')

		# Add any uninstalled candidates found by PackageKit
		self.packagekit.get_candidates(package, factory, 'package:arch')
Example #11
0
def discover_existing_apps():
    """Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}.
	@return: a map from application URIs to .desktop filenames"""
    already_installed = {}
    for d in basedir.load_data_paths('applications'):
        for desktop_file in os.listdir(d):
            if desktop_file.startswith(
                    'zeroinstall-') and desktop_file.endswith('.desktop'):
                full = os.path.join(d, desktop_file)
                try:
                    with open(full, 'rt') as stream:
                        for line in stream:
                            line = line.strip()
                            if line.startswith('Exec=0launch '):
                                bits = line.split(' -- ', 1)
                                if ' ' in bits[0]:
                                    uri = bits[0].split(
                                        ' ', 1)[1]  # 0launch URI -- %u
                                else:
                                    uri = bits[1].split(
                                        ' ', 1)[0].strip()  # 0launch -- URI %u
                                already_installed[uri] = full
                                break
                        else:
                            logger.info(_("Failed to find Exec line in %s"),
                                        full)
                except Exception as ex:
                    logger.warning(
                        _("Failed to load .desktop file %(filename)s: %(exceptions"
                          ), {
                              'filename': full,
                              'exception': ex
                          })
    return already_installed
Example #12
0
def find_bin_dir(paths = None):
	"""Find the first writable path in the list (default $PATH),
	skipping /bin, /sbin and everything under /usr except /usr/local/bin
	@type paths: [str] | None
	@rtype: str"""
	if paths is None:
		paths = os.environ['PATH'].split(os.pathsep)
	for path in paths:
		if path.startswith('/usr/') and not path.startswith('/usr/local/bin'):
			# (/usr/local/bin is OK if we're running as root)
			pass
		elif path.startswith('/bin') or path.startswith('/sbin'):
			pass
		elif os.path.realpath(path).startswith(basedir.xdg_cache_home):
			pass # print "Skipping cache", first_path
		elif not os.access(path, os.W_OK):
			pass # print "No access", first_path
		else:
			break
	else:
		path = os.path.expanduser('~/bin/')
		logger.warning('%s is not in $PATH. Add it with:\n%s' % (path, _export('PATH', path + ':$PATH')))

		if not os.path.isdir(path):
			os.makedirs(path)
	return path
Example #13
0
def discover_existing_apps():
	"""Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}.
	@return: a map from application URIs to .desktop filenames"""
	already_installed = {}
	for d in basedir.load_data_paths('applications'):
		for desktop_file in os.listdir(d):
			if desktop_file.startswith('zeroinstall-') and desktop_file.endswith('.desktop'):
				full = os.path.join(d, desktop_file)
				try:
					with open(full, 'rt') as stream:
						for line in stream:
							line = line.strip()
							if line.startswith('Exec=0launch '):
								bits = line.split(' -- ', 1)
								if ' ' in bits[0]:
									uri = bits[0].split(' ', 1)[1]		# 0launch URI -- %u
								else:
									uri = bits[1].split(' ', 1)[0].strip()	# 0launch -- URI %u
								already_installed[uri] = full
								break
						else:
							logger.info(_("Failed to find Exec line in %s"), full)
				except Exception as ex:
					logger.warning(_("Failed to load .desktop file %(filename)s: %(exceptions"), {'filename': full, 'exception': ex})
	return already_installed
Example #14
0
	def get_package_info(self, package, factory):
		"""@type package: str"""
		_name_version_regexp = '^(.+)-([^-]+)$'

		nameversion = re.compile(_name_version_regexp)
		for pkgname in os.listdir(self._pkgdir):
			pkgdir = os.path.join(self._pkgdir, pkgname)
			if not os.path.isdir(pkgdir): continue

			#contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()

			match = nameversion.search(pkgname)
			if match is None:
				logger.warning(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
				continue
			else:
				name = match.group(1)
				if name != package:
					continue
				version = try_cleanup_distro_version(match.group(2))

			machine = host_machine

			impl = factory('package:ports:%s:%s:%s' % \
						(package, version, machine))
			impl.version = model.parse_version(version)
			impl.machine = machine
Example #15
0
	def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False, dry_run = False):
		"""Check that tmp[/extract] has the required_digest.
		On success, rename the checked directory to the digest, and
		make the whole tree read-only.
		@type required_digest: str
		@type tmp: str
		@type extract: str | None
		@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
		@type try_helper: bool
		@param dry_run: just print what we would do to stdout (and delete tmp)
		@type dry_run: bool
		@raise BadDigest: if the input directory doesn't match the given digest"""
		if extract:
			extracted = os.path.join(tmp, extract)
			if not os.path.isdir(extracted):
				raise Exception(_('Directory %s not found in archive') % extract)
		else:
			extracted = tmp

		from . import manifest

		manifest.fixup_permissions(extracted)

		alg, required_value = manifest.splitID(required_digest)
		actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
		if actual_digest != required_digest:
			raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n'
					'Required digest: %(required_digest)s\n'
					'Actual digest: %(actual_digest)s\n') %
					{'required_digest': required_digest, 'actual_digest': actual_digest})

		if try_helper:
			if self._add_with_helper(required_digest, extracted, dry_run = dry_run):
				support.ro_rmtree(tmp)
				return
			logger.info(_("Can't add to system store. Trying user store instead."))

		logger.info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir)

		final_name = os.path.join(self.dir, required_digest)
		if os.path.isdir(final_name):
			logger.warning(_("Item %s already stored.") % final_name) # not really an error
			return

		if dry_run:
			print(_("[dry-run] would store implementation as {path}").format(path = final_name))
			self.dry_run_names.add(required_digest)
			support.ro_rmtree(tmp)
			return
		else:
			# If we just want a subdirectory then the rename will change
			# extracted/.. and so we'll need write permission on 'extracted'

			os.chmod(extracted, 0o755)
			os.rename(extracted, final_name)
			os.chmod(final_name, 0o555)

		if extract:
			os.rmdir(tmp)
Example #16
0
def load_keys(fingerprints):
    """Load a set of keys at once.
	This is much more efficient than making individual calls to L{load_key}.
	@type fingerprints: [str]
	@return: a list of loaded keys, indexed by fingerprint
	@rtype: {str: L{Key}}
	@since: 0.27"""
    import codecs

    keys = {}

    # Otherwise GnuPG returns everything...
    if not fingerprints: return keys

    for fp in fingerprints:
        keys[fp] = Key(fp)

    current_fpr = None
    current_uid = None

    child = _run_gpg([
        '--fixed-list-mode', '--with-colons', '--list-keys',
        '--with-fingerprint', '--with-fingerprint'
    ] + fingerprints,
                     stdout=subprocess.PIPE)
    try:
        for line in child.stdout:
            if line.startswith('pub:'):
                current_fpr = None
                current_uid = None
            if line.startswith('fpr:'):
                current_fpr = line.split(':')[9]
                if current_fpr in keys and current_uid:
                    # This is probably a subordinate key, where the fingerprint
                    # comes after the uid, not before. Note: we assume the subkey is
                    # cross-certified, as recent always ones are.
                    try:
                        keys[current_fpr].name = codecs.decode(
                            current_uid, 'utf-8')
                    except:
                        logger.warning("Not UTF-8: %s", current_uid)
                        keys[current_fpr].name = current_uid
            if line.startswith('uid:'):
                assert current_fpr is not None
                # Only take primary UID
                if current_uid: continue
                parts = line.split(':')
                current_uid = parts[9]
                if current_fpr in keys:
                    keys[current_fpr].name = current_uid
    finally:
        child.stdout.close()

        if child.wait():
            logger.warning(
                _("gpg --list-keys failed with exit code %d") %
                child.returncode)

    return keys
Example #17
0
	def _get_mtime(self, name, warn_if_missing = True):
		timestamp_path = os.path.join(self.path, name)
		try:
			return os.stat(timestamp_path).st_mtime
		except Exception as ex:
			if warn_if_missing:
				logger.warning("Failed to get time-stamp of %s: %s", timestamp_path, ex)
			return 0
Example #18
0
	def put(self, key, value):
		cache_path = os.path.join(self.cache_dir, self.cache_leaf)
		self.cache[key] = value
		try:
			with open(cache_path, 'a') as stream:
				stream.write('%s=%s\n' % (key, value))
		except Exception as ex:
			logger.warning("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
Example #19
0
	def download_icon(self, interface, force = False):
		"""Download an icon for this interface and add it to the
		icon cache. If the interface has no icon do nothing.
		@type interface: L{zeroinstall.injector.model.Interface}
		@type force: bool
		@return: the task doing the import, or None
		@rtype: L{tasks.Task}"""
		logger.debug("download_icon %(interface)s", {'interface': interface})

		modification_time = None
		existing_icon = self.config.iface_cache.get_icon_path(interface)
		if existing_icon:
			file_mtime = os.stat(existing_icon).st_mtime
			from email.utils import formatdate
			modification_time = formatdate(timeval = file_mtime, localtime = False, usegmt = True)

		feed = self.config.iface_cache.get_feed(interface.uri)
		if feed is None:
			return None

		# Find a suitable icon to download
		for icon in feed.get_metadata(XMLNS_IFACE, 'icon'):
			type = icon.getAttribute('type')
			if type != 'image/png':
				logger.debug(_('Skipping non-PNG icon'))
				continue
			source = icon.getAttribute('href')
			if source:
				break
			logger.warning(_('Missing "href" attribute on <icon> in %s'), interface)
		else:
			logger.info(_('No PNG icons found in %s'), interface)
			return

		dl = self.download_url(source, hint = interface, modification_time = modification_time)

		@tasks.async
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()

		return download_and_add_icon()
Example #20
0
    def download_keys(self, fetcher, feed_hint=None, key_mirror=None):
        """Download any required GPG keys not already on our keyring.
		When all downloads are done (successful or otherwise), add any new keys
		to the keyring, L{recheck}.
		@param fetcher: fetcher to manage the download (was Handler before version 1.5)
		@type fetcher: L{fetch.Fetcher}
		@param key_mirror: URL of directory containing keys, or None to use feed's directory
		@type key_mirror: str
		@rtype: [L{zeroinstall.support.tasks.Blocker}]"""
        downloads = {}
        blockers = []
        for x in self.sigs:
            key_id = x.need_key()
            if key_id:
                try:
                    import urlparse
                except ImportError:
                    from urllib import parse as urlparse  # Python 3
                key_url = urlparse.urljoin(key_mirror or self.url, "%s.gpg" % key_id)
                logger.info(_("Fetching key from %s"), key_url)
                dl = fetcher.download_url(key_url, hint=feed_hint)
                downloads[dl.downloaded] = (dl, dl.tempfile)
                blockers.append(dl.downloaded)

        exception = None
        any_success = False

        from zeroinstall.support import tasks

        while blockers:
            yield blockers

            old_blockers = blockers
            blockers = []

            for b in old_blockers:
                dl, stream = downloads[b]
                try:
                    tasks.check(b)
                    if b.happened:
                        stream.seek(0)
                        self._downloaded_key(stream)
                        any_success = True
                        stream.close()
                    else:
                        blockers.append(b)
                except Exception:
                    _type, exception, tb = sys.exc_info()
                    logger.warning(
                        _("Failed to import key for '%(url)s': %(exception)s"),
                        {"url": self.url, "exception": str(exception)},
                    )
                    stream.close()

        if exception and not any_success:
            raise_with_traceback(exception, tb)

        self.recheck()
Example #21
0
	def download_icon(self, interface, force = False):
		"""Download an icon for this interface and add it to the
		icon cache. If the interface has no icon do nothing.
		@type interface: L{zeroinstall.injector.model.Interface}
		@type force: bool
		@return: the task doing the import, or None
		@rtype: L{tasks.Task}"""
		logger.debug("download_icon %(interface)s", {'interface': interface})

		modification_time = None
		existing_icon = self.config.iface_cache.get_icon_path(interface)
		if existing_icon:
			file_mtime = os.stat(existing_icon).st_mtime
			from email.utils import formatdate
			modification_time = formatdate(timeval = file_mtime, localtime = False, usegmt = True)

		feed = self.config.iface_cache.get_feed(interface.uri)
		if feed is None:
			return None

		# Find a suitable icon to download
		for icon in feed.get_metadata(XMLNS_IFACE, 'icon'):
			type = icon.getAttribute('type')
			if type != 'image/png':
				logger.debug(_('Skipping non-PNG icon'))
				continue
			source = icon.getAttribute('href')
			if source:
				break
			logger.warning(_('Missing "href" attribute on <icon> in %s'), interface)
		else:
			logger.info(_('No PNG icons found in %s'), interface)
			return

		dl = self.download_url(source, hint = interface, modification_time = modification_time)

		@tasks.async
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()

		return download_and_add_icon()
Example #22
0
	def report_error(self, exception, tb = None):
		"""Report an exception to the user.
		@param exception: the exception to report
		@type exception: L{SafeException}
		@param tb: optional traceback
		@since: 0.25"""
		import logging
		logger.warning("%s", str(exception) or type(exception),
				exc_info = (exception, exception, tb) if logger.isEnabledFor(logging.INFO) else None)
Example #23
0
	def report_error(self, exception, tb = None):
		"""Report an exception to the user.
		@param exception: the exception to report
		@type exception: L{SafeException}
		@param tb: optional traceback
		@since: 0.25"""
		import logging
		logger.warning("%s", str(exception) or type(exception),
				exc_info = (exception, exception, tb) if logger.isEnabledFor(logging.INFO) else None)
Example #24
0
    def download(self, dl, timeout=None):
        """@type dl: L{zeroinstall.injector.download.Download}"""

        # (changed if we get redirected)
        current_url = dl.url

        redirections_remaining = 10

        original_exception = None

        # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect,
        # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group
        # and limit by the target site, not treat everything as going to a single site (the proxy).
        while True:
            location_parts = urlparse.urlparse(current_url)

            site_key = (location_parts.scheme, location_parts.hostname,
                        location_parts.port
                        or default_port.get(location_parts.scheme, None))

            step = DownloadStep()
            step.dl = dl
            step.url = current_url
            blocker = self._sites[site_key].download(step, timeout)
            yield blocker

            try:
                tasks.check(blocker)
            except download.DownloadError as ex:
                if original_exception is None:
                    original_exception = ex
                else:
                    logger.warning("%s (while trying mirror)", ex)
                mirror_url = step.dl.get_next_mirror_url()
                if mirror_url is None:
                    raise original_exception

                # Try the mirror.
                # There are actually two places where we try to use the mirror: this one
                # looks to see if we have an exact copy of same file somewhere else. If this
                # fails, Fetcher will also look for a different archive that would generate
                # the required implementation.
                logger.warning("%s: trying archive mirror at %s", ex,
                               mirror_url)
                step.redirect = mirror_url
                redirections_remaining = 10

            if not step.redirect:
                break

            current_url = step.redirect

            if redirections_remaining == 0:
                raise download.DownloadError(
                    "Too many redirections {url} -> {current}".format(
                        url=dl.url, current=current_url))
            redirections_remaining -= 1
Example #25
0
	def download(self, dl, timeout = None):
		"""@type dl: L{zeroinstall.injector.download.Download}"""

		# (changed if we get redirected)
		current_url = dl.url

		redirections_remaining = 10

		original_exception = None

		# Assign the Download to a Site based on its scheme, host and port. If the result is a redirect,
		# reassign it to the appropriate new site. Note that proxy handling happens later; we want to group
		# and limit by the target site, not treat everything as going to a single site (the proxy).
		while True:
			location_parts = urlparse.urlparse(current_url)

			site_key = (location_parts.scheme,
				    location_parts.hostname,
				    location_parts.port or default_port.get(location_parts.scheme, None))

			step = DownloadStep()
			step.dl = dl
			step.url = current_url
			blocker = self._sites[site_key].download(step, timeout)
			yield blocker

			try:
				tasks.check(blocker)
			except download.DownloadError as ex:
				if original_exception is None:
					original_exception = ex
				else:
					logger.warning("%s (while trying mirror)", ex)
				mirror_url = step.dl.get_next_mirror_url()
				if mirror_url is None:
					raise original_exception

				# Try the mirror.
				# There are actually two places where we try to use the mirror: this one
				# looks to see if we have an exact copy of same file somewhere else. If this
				# fails, Fetcher will also look for a different archive that would generate
				# the required implementation.
				logger.warning("%s: trying archive mirror at %s", ex, mirror_url)
				step.redirect = mirror_url
				redirections_remaining = 10

			if not step.redirect:
				break

			current_url = step.redirect

			if redirections_remaining == 0:
				raise download.DownloadError("Too many redirections {url} -> {current}".format(
						url = dl.url,
						current = current_url))
			redirections_remaining -= 1
Example #26
0
	def set(new):
		try:
			blocker = slave.invoke_master(["set-impl-stability", impl_details['from-feed'], impl_details['id'], new])
			yield blocker
			tasks.check(blocker)
			from zeroinstall.gui import main
			main.recalculate()
		except Exception:
			logger.warning("set", exc_info = True)
			raise
Example #27
0
    def get_network_state(self):
        if self.network_manager:
            try:
                state = self.network_manager.state()
                if state < 10:
                    state = _NetworkState.v0_8.get(
                        state, _NetworkState.NM_STATE_UNKNOWN)
                return state

            except Exception as ex:
                logger.warning(_("Error getting network state: %s"), ex)
        return _NetworkState.NM_STATE_UNKNOWN
Example #28
0
def _add_site_packages(interface, site_packages, known_site_feeds):
	for impl in os.listdir(site_packages):
		if impl.startswith('.'): continue
		feed = os.path.join(site_packages, impl, '0install', 'feed.xml')
		if not os.path.exists(feed):
			logger.warning(_("Site-local feed {path} not found").format(path = feed))
		logger.debug("Adding site-local feed '%s'", feed)

		# (we treat these as user overrides in order to let old versions of 0install
		# find them)
		interface.extra_feeds.append(Feed(feed, None, user_override = True, site_package = True))
		known_site_feeds.add(feed)
Example #29
0
def load_keys(fingerprints):
	"""Load a set of keys at once.
	This is much more efficient than making individual calls to L{load_key}.
	@type fingerprints: [str]
	@return: a list of loaded keys, indexed by fingerprint
	@rtype: {str: L{Key}}
	@since: 0.27"""
	import codecs

	keys = {}

	# Otherwise GnuPG returns everything...
	if not fingerprints: return keys

	for fp in fingerprints:
		keys[fp] = Key(fp)

	current_fpr = None
	current_uid = None

	child = _run_gpg(['--fixed-list-mode', '--with-colons', '--list-keys',
				'--with-fingerprint', '--with-fingerprint'] + fingerprints, stdout = subprocess.PIPE)
	try:
		for line in child.stdout:
			if line.startswith('pub:'):
				current_fpr = None
				current_uid = None
			if line.startswith('fpr:'):
				current_fpr = line.split(':')[9]
				if current_fpr in keys and current_uid:
					# This is probably a subordinate key, where the fingerprint
					# comes after the uid, not before. Note: we assume the subkey is
					# cross-certified, as recent always ones are.
					try:
						keys[current_fpr].name = codecs.decode(current_uid, 'utf-8')
					except:
						logger.warning("Not UTF-8: %s", current_uid)
						keys[current_fpr].name = current_uid
			if line.startswith('uid:'):
				assert current_fpr is not None
				# Only take primary UID
				if current_uid: continue
				parts = line.split(':')
				current_uid = parts[9]
				if current_fpr in keys:
					keys[current_fpr].name = current_uid
	finally:
		child.stdout.close()

		if child.wait():
			logger.warning(_("gpg --list-keys failed with exit code %d") % child.returncode)

	return keys
Example #30
0
    def _get_mtime(self, name, warn_if_missing=True):
        """@type name: str
		@type warn_if_missing: bool
		@rtype: int"""
        timestamp_path = os.path.join(self.path, name)
        try:
            return os.stat(timestamp_path).st_mtime
        except Exception as ex:
            if warn_if_missing:
                logger.warning("Failed to get time-stamp of %s: %s",
                               timestamp_path, ex)
            return 0
Example #31
0
def wait_for_destroy(ticket, window):
	window.show()
	blocker = tasks.Blocker("window closed")
	window.connect('destroy', lambda *args: blocker.trigger())
	try:
		if blocker:
			yield blocker
			tasks.check(blocker)
		send_json(["return", ticket, ["ok", None]])
	except Exception as ex:
		logger.warning("Returning error", exc_info = True)
		send_json(["return", ticket, ["error", str(ex)]])
Example #32
0
def do_run_gui(ticket):
	reply_holder = []
	blocker = run_gui(reply_holder)
	try:
		if blocker:
			yield blocker
			tasks.check(blocker)
		reply, = reply_holder
		send_json(["return", ticket, ["ok", reply]])
	except Exception as ex:
		logger.warning("Returning error", exc_info = True)
		send_json(["return", ticket, ["error", str(ex)]])
Example #33
0
	def get_network_state(self):
		if self.network_manager:
			try:
				state = self.network_manager.state()
				if state < 10:
					state = _NetworkState.v0_8.get(state,
								_NetworkState.NM_STATE_UNKNOWN)
				return state

			except Exception as ex:
				logger.warning(_("Error getting network state: %s"), ex)
		return _NetworkState.NM_STATE_UNKNOWN
Example #34
0
def recent_gnu_tar():
    """@rtype: bool
	@deprecated: should be private"""
    recent_gnu_tar = False
    if _gnu_tar():
        version = re.search(r'\)\s*(\d+(\.\d+)*)', _get_tar_version())
        if version:
            version = list(map(int, version.group(1).split('.')))
            recent_gnu_tar = version > [1, 13, 92]
        else:
            logger.warning(_("Failed to extract GNU tar version number"))
    logger.debug(_("Recent GNU tar = %s"), recent_gnu_tar)
    return recent_gnu_tar
Example #35
0
def recent_gnu_tar():
	"""@rtype: bool
	@deprecated: should be private"""
	recent_gnu_tar = False
	if _gnu_tar():
		version = re.search(r'\)\s*(\d+(\.\d+)*)', _get_tar_version())
		if version:
			version = list(map(int, version.group(1).split('.')))
			recent_gnu_tar = version > [1, 13, 92]
		else:
			logger.warning(_("Failed to extract GNU tar version number"))
	logger.debug(_("Recent GNU tar = %s"), recent_gnu_tar)
	return recent_gnu_tar
Example #36
0
def do_confirm_distro_install(config, ticket, options, impls):
	if gui_driver is not None: config = gui_driver.config
	try:
		manual_impls = [impl['id'] for impl in impls if not impl['needs-confirmation']]
		unsafe_impls = [impl for impl in impls if impl['needs-confirmation']]

		if unsafe_impls:
			confirm = config.handler.confirm_install(_('The following components need to be installed using native packages. '
				'These come from your distribution, and should therefore be trustworthy, but they also '
				'run with extra privileges. In particular, installing them may run extra services on your '
				'computer or affect other users. You may be asked to enter a password to confirm. The '
				'packages are:\n\n') + ('\n'.join('- ' + x['id'] for x in unsafe_impls)))
			yield confirm
			tasks.check(confirm)

		if manual_impls:
			raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
					"Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
					"use that to install it.") % manual_impls[0])

		blockers = []
		for impl in unsafe_impls:
			from zeroinstall.injector import packagekit
			packagekit_id = impl['packagekit-id']
			pk = get_distro().packagekit.pk
			dl = packagekit.PackageKitDownload('packagekit:' + packagekit_id, hint = impl['master-feed'],
					pk = pk, packagekit_id = packagekit_id, expected_size = int(impl['size']))
			config.handler.monitor_download(dl)
			blockers.append(dl.downloaded)

		# Record the first error log the rest
		error = []
		def dl_error(ex, tb = None):
			if error:
				config.handler.report_error(ex)
			else:
				error.append((ex, tb))
		while blockers:
			yield blockers
			tasks.check(blockers, dl_error)
			blockers = [b for b in blockers if not b.happened]
		if error:
			from zeroinstall import support
			support.raise_with_traceback(*error[0])

		send_json(["return", ticket, ["ok", "ok"]])
	except download.DownloadAborted as ex:
		send_json(["return", ticket, ["ok", "aborted-by-user"]])
	except Exception as ex:
		logger.warning("Returning error", exc_info = True)
		send_json(["return", ticket, ["error", str(ex)]])
Example #37
0
def _get_sigs_from_gpg_status_stream(status_r, child, errors):
    """Read messages from status_r and collect signatures from it.
	When done, reap 'child'.
	If there are no signatures, throw SafeException (using errors
	for the error message if non-empty).
	@type status_r: file
	@type child: L{subprocess.Popen}
	@type errors: file
	@rtype: [L{Signature}]"""
    sigs = []

    # Should we error out on bad signatures, even if there's a good
    # signature too?

    for line in status_r:
        assert line.endswith('\n')
        if not line.startswith('[GNUPG:] '):
            # The docs says every line starts with this, but if auto-key-retrieve
            # is on then they might not. See bug #3420548
            logger.warning("Invalid output from GnuPG: %r", line)
            continue

        line = line[9:-1]
        split_line = line.split(' ')
        code = split_line[0]
        args = split_line[1:]
        if code == 'VALIDSIG':
            sigs.append(ValidSig(args))
        elif code == 'BADSIG':
            sigs.append(BadSig(args))
        elif code == 'ERRSIG':
            sigs.append(ErrSig(args))

    errors.seek(0)

    error_messages = errors.read().strip()

    if not sigs:
        if error_messages:
            raise SafeException(
                _("No signatures found. Errors from GPG:\n%s") %
                error_messages)
        else:
            raise SafeException(
                _("No signatures found. No error messages from GPG."))
    elif error_messages:
        # Attach the warnings to all the signatures, in case they're useful.
        for s in sigs:
            s.messages = error_messages

    return sigs
Example #38
0
def do_update_key_info(config, ticket, fingerprint, xml):
	try:
		ki = pending_key_info.get(fingerprint, None)
		if ki:
			from xml.dom import minidom
			doc = minidom.parseString(qdom.to_UTF8(xml))
			ki.info = doc.documentElement.childNodes
			ki.blocker.trigger()
			ki.blocker = None
		else:
			logger.info("Unexpected key info for %s (not in %s)", fingerprint, pending_key_info)
	except Exception as ex:
		logger.warning("do_update_key_info", exc_info = True)
		send_json(["return", ticket, ["error", str(ex)]])
Example #39
0
def _add_site_packages(interface, site_packages, known_site_feeds):
	"""@type interface: L{Interface}
	@type site_packages: str
	@type known_site_feeds: {str}"""
	for impl in os.listdir(site_packages):
		if impl.startswith('.'): continue
		feed = os.path.join(site_packages, impl, '0install', 'feed.xml')
		if not os.path.exists(feed):
			logger.warning(_("Site-local feed {path} not found").format(path = feed))
		logger.debug("Adding site-local feed '%s'", feed)

		# (we treat these as user overrides in order to let old versions of 0install
		# find them)
		interface.extra_feeds.append(Feed(feed, None, user_override = True, site_package = True))
		known_site_feeds.add(feed)
Example #40
0
def _get_sigs_from_gpg_status_stream(status_r, child, errors):
	"""Read messages from status_r and collect signatures from it.
	When done, reap 'child'.
	If there are no signatures, throw SafeException (using errors
	for the error message if non-empty).
	@type status_r: file
	@type child: L{subprocess.Popen}
	@type errors: file
	@rtype: [L{Signature}]"""
	sigs = []

	# Should we error out on bad signatures, even if there's a good
	# signature too?

	for line in status_r:
		assert line.endswith('\n')
		if not line.startswith('[GNUPG:] '):
			# The docs says every line starts with this, but if auto-key-retrieve
			# is on then they might not. See bug #3420548
			logger.warning("Invalid output from GnuPG: %r", line)
			continue

		line = line[9:-1]
		split_line = line.split(' ')
		code = split_line[0]
		args = split_line[1:]
		if code == 'VALIDSIG':
			sigs.append(ValidSig(args))
		elif code == 'BADSIG':
			sigs.append(BadSig(args))
		elif code == 'ERRSIG':
			sigs.append(ErrSig(args))

	errors.seek(0)

	error_messages = errors.read().strip()

	if not sigs:
		if error_messages:
			raise SafeException(_("No signatures found. Errors from GPG:\n%s") % error_messages)
		else:
			raise SafeException(_("No signatures found. No error messages from GPG."))
	elif error_messages:
		# Attach the warnings to all the signatures, in case they're useful.
		for s in sigs:
			s.messages = error_messages

	return sigs
Example #41
0
	def flush(self):
		# Wipe the cache
		try:
			info = os.stat(self.source)
			mtime = int(info.st_mtime)
			size = info.st_size
		except Exception as ex:
			logger.warning("Failed to stat %s: %s", self.source, ex)
			mtime = size = 0
		self.cache = {}
		import tempfile
		tmp = tempfile.NamedTemporaryFile(mode = 'wt', dir = self.cache_dir, delete = False)
		tmp.write("mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format))
		tmp.close()
		portable_rename(tmp.name, os.path.join(self.cache_dir, self.cache_leaf))

		self._load_cache()
Example #42
0
def _link(a, b, tmpfile):
	"""Keep 'a', delete 'b' and hard-link to 'a'"""
	if not _byte_identical(a, b):
		logger.warning(_("Files should be identical, but they're not!\n%(file_a)s\n%(file_b)s"), {'file_a': a, 'file_b': b})

	b_dir = os.path.dirname(b)
	old_mode = os.lstat(b_dir).st_mode
	os.chmod(b_dir, old_mode | 0o200)	# Need write access briefly
	try:
		os.link(a, tmpfile)
		try:
			os.rename(tmpfile, b)
		except:
			os.unlink(tmpfile)
			raise
	finally:
		os.chmod(b_dir, old_mode)
Example #43
0
	def __init__(self, db_status_file):
		"""@param db_status_file: update the cache when the timestamp of this file changes"""
		self._status_details = os.stat(db_status_file)

		self.versions = {}
		self.cache_dir = basedir.save_cache_path(namespaces.config_site,
							 namespaces.config_prog)

		try:
			self._load_cache()
		except Exception as ex:
			logger.info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
			try:
				self.generate_cache()
				self._load_cache()
			except Exception as ex:
				logger.warning(_("Failed to regenerate distribution database cache: %s"), ex)
Example #44
0
def do_confirm_keys(config, ticket, args):
	try:
		if gui_driver is not None: config = gui_driver.config
		url, valid_sigs = args
		assert valid_sigs, "No signatures!"
		valid_sigs = [gpg.ValidSig([fingerprint, None, 0]) for fingerprint in valid_sigs]
		pending = PendingFromOCaml(url = args[0], sigs = valid_sigs)

		blocker = config.trust_mgr.confirm_keys(pending)
		if blocker:
			yield blocker
			tasks.check(blocker)

		now_trusted = [s.fingerprint for s in valid_sigs if s.is_trusted()]
		send_json(["return", ticket, ["ok", now_trusted]])
	except Exception as ex:
		logger.warning("do_confirm_keys", exc_info = True)
		send_json(["return", ticket, ["error", str(ex)]])
Example #45
0
def import_key(stream):
	"""Run C{gpg --import} with this stream as stdin."""
	with tempfile.TemporaryFile(mode = 'w+t') as errors:
		child = _run_gpg(['--quiet', '--import', '--batch'],
					stdin = stream, stderr = errors)

		status = child.wait()

		errors.seek(0)
		error_messages = errors.read().strip()

	if status != 0:
		if error_messages:
			raise SafeException(_("Errors from 'gpg --import':\n%s") % error_messages)
		else:
			raise SafeException(_("Non-zero exit code %d from 'gpg --import'") % status)
	elif error_messages:
		logger.warning(_("Warnings from 'gpg --import':\n%s") % error_messages)
Example #46
0
    def _add_with_helper(self, required_digest, path, dry_run):
        """Use 0store-secure-add to copy 'path' to the system store.
		@param required_digest: the digest for path
		@type required_digest: str
		@param path: root of implementation directory structure
		@type path: str
		@return: True iff the directory was copied into the system cache successfully"""
        if required_digest.startswith('sha1='):
            return False  # Old digest alg not supported
        if os.environ.get('ZEROINSTALL_PORTABLE_BASE'):
            return False  # Can't use helper with portable mode
        helper = support.find_in_path('0store-secure-add-helper')
        if not helper:
            logger.info(
                _("'0store-secure-add-helper' command not found. Not adding to system cache."
                  ))
            return False
        if dry_run:
            print(
                _("[dry-run] would use {helper} to store {required_digest} in system store"
                  ).format(helper=helper, required_digest=required_digest))
            self.dry_run_names.add(required_digest)
            return True
        import subprocess
        env = os.environ.copy()
        env['ENV_NOT_CLEARED'] = 'Unclean'  # (warn about insecure configurations)
        env['HOME'] = 'Unclean'  # (warn about insecure configurations)
        dev_null = os.open(os.devnull, os.O_RDONLY)
        try:
            logger.info(_("Trying to add to system cache using %s"), helper)
            child = subprocess.Popen([helper, required_digest],
                                     stdin=dev_null,
                                     cwd=path,
                                     env=env)
            exit_code = child.wait()
        finally:
            os.close(dev_null)

        if exit_code:
            logger.warning(_("0store-secure-add-helper failed."))
            return False

        logger.info(_("Added succcessfully."))
        return True
Example #47
0
def update_user_overrides(interface, known_site_feeds = frozenset()):
	"""Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param known_site_feeds: feeds to ignore (for backwards compatibility)
	@type known_site_feeds: {str}"""
	user = basedir.load_first_config(config_site, config_prog,
					   'interfaces', model._pretty_escape(interface.uri))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(interface.uri))
	if not user:
		return

	try:
		with open(user, 'rb') as stream:
			root = qdom.parse(stream)
	except Exception as ex:
		logger.warning(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise

	stability_policy = root.getAttribute('stability-policy')
	if stability_policy:
		interface.set_stability_policy(stability_levels[str(stability_policy)])

	for item in root.childNodes:
		if item.uri != XMLNS_IFACE: continue
		if item.name == 'feed':
			feed_src = item.getAttribute('src')
			if not feed_src:
				raise InvalidInterface(_('Missing "src" attribute in <feed>'))
			# (note: 0install 1.9..1.12 used a different scheme and the "site-package" attribute;
			# we deliberately use a different attribute name to avoid confusion)
			if item.getAttribute('is-site-package'):
				# Site packages are detected earlier. This test isn't completely reliable,
				# since older versions will remove the attribute when saving the config
				# (hence the next test).
				continue
			if feed_src in known_site_feeds:
				continue
			interface.extra_feeds.append(Feed(feed_src, item.getAttribute('arch'), True, langs = item.getAttribute('langs')))
Example #48
0
    def get_feeds(self, iface):
        """Get all feeds for this interface. This is a mapping from feed URLs
		to ZeroInstallFeeds. It includes the interface's main feed, plus the
		resolution of every feed returned by L{get_feed_imports}. Uncached
		feeds are indicated by a value of None.
		@type iface: L{Interface}
		@rtype: {str: L{ZeroInstallFeed} | None}
		@since: 0.48"""
        main_feed = self.get_feed(iface.uri)
        results = {iface.uri: main_feed}
        for imp in iface.extra_feeds:
            try:
                results[imp.uri] = self.get_feed(imp.uri)
            except SafeException as ex:
                logger.warning("Failed to load feed '%s: %s", imp.uri, ex)
        if main_feed:
            for imp in main_feed.feeds:
                results[imp.uri] = self.get_feed(imp.uri)
        return results
Example #49
0
def load_icon(icon_path, icon_width=None, icon_height=None):
    """Load icon from path. Icon MUST be in PNG format.
	@param icon_path: pathname of icon, or None to load nothing
	@return: a GdkPixbuf, or None on failure"""
    if not icon_path:
        return None

    def size_prepared_cb(loader, width, height):
        dest_width = icon_width or width
        dest_height = icon_height or height

        if dest_width == width and dest_height == height:
            return

        ratio_width = float(dest_width) / width
        ratio_height = float(dest_height) / height
        ratio = min(ratio_width, ratio_height)

        # preserve original ration
        if ratio_width != ratio:
            dest_width = int(math.ceil(width * ratio))
        elif ratio_height != ratio:
            dest_height = int(math.ceil(height * ratio))

        loader.set_size(int(dest_width), int(dest_height))

    # Restrict icon formats to avoid attacks
    try:
        loader = gtk.gdk.PixbufLoader('png')
        if icon_width or icon_height:
            loader.connect('size-prepared', size_prepared_cb)
        try:
            with open(icon_path, 'rb') as stream:
                loader.write(stream.read())
        finally:
            loader.close()
        return loader.get_pixbuf()
    except Exception as ex:
        logger.warning(_("Failed to load cached PNG icon: %s") % ex)
        return None
Example #50
0
def update_from_cache(interface, iface_cache = None):
	"""Read a cached interface and any native feeds or user overrides.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@type iface_cache: L{zeroinstall.injector.iface_cache.IfaceCache} | None
	@return: True if cached version and user overrides loaded OK.
	False if upstream not cached. Local interfaces (starting with /) are
	always considered to be cached, although they are not actually stored in the cache.
	@rtype: bool
	@note: internal; use L{iface_cache.IfaceCache.get_interface} instread."""
	interface.reset()
	if iface_cache is None:
		import warnings
		warnings.warn("iface_cache should be specified", DeprecationWarning, 2)
		from zeroinstall.injector import policy
		iface_cache = policy.get_deprecated_singleton_config().iface_cache

	# Add the distribution package manager's version, if any
	path = basedir.load_first_data(config_site, 'native_feeds', model._pretty_escape(interface.uri))
	if path:
		# Resolve any symlinks
		logger.info(_("Adding native packager feed '%s'"), path)
		interface.extra_feeds.append(Feed(os.path.realpath(path), None, False))

	# Add locally-compiled binaries, if any
	escaped_uri = model.escape_interface_uri(interface.uri)
	known_site_feeds = set()
	for path in basedir.load_data_paths(config_site, 'site-packages', *escaped_uri):
		try:
			_add_site_packages(interface, path, known_site_feeds)
		except Exception as ex:
			logger.warning("Error loading site packages from {path}: {ex}".format(path = path, ex = ex))

	update_user_overrides(interface, known_site_feeds)

	main_feed = iface_cache.get_feed(interface.uri, force = True)
	if main_feed:
		update_user_feed_overrides(main_feed)

	return main_feed is not None
Example #51
0
def import_key(stream):
    """Run C{gpg --import} with this stream as stdin.
	@type stream: file"""
    with tempfile.TemporaryFile(mode='w+t') as errors:
        child = _run_gpg(['--quiet', '--import', '--batch'],
                         stdin=stream,
                         stderr=errors)

        status = child.wait()

        errors.seek(0)
        error_messages = errors.read().strip()

    if status != 0:
        if error_messages:
            raise SafeException(
                _("Errors from 'gpg --import':\n%s") % error_messages)
        else:
            raise SafeException(
                _("Non-zero exit code %d from 'gpg --import'") % status)
    elif error_messages:
        logger.warning(_("Warnings from 'gpg --import':\n%s") % error_messages)
Example #52
0
def update_user_feed_overrides(feed):
	"""Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@type feed: L{ZeroInstallFeed}
	@since 0.49"""
	user = basedir.load_first_config(config_site, config_prog,
					   'feeds', model._pretty_escape(feed.url))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(feed.url))
	if not user:
		return

	try:
		with open(user, 'rb') as stream:
			root = qdom.parse(stream)
	except Exception as ex:
		logger.warning(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise

	last_checked = root.getAttribute('last-checked')
	if last_checked:
		feed.last_checked = int(last_checked)

	for item in root.childNodes:
		if item.uri != XMLNS_IFACE: continue
		if item.name == 'implementation':
			id = item.getAttribute('id')
			assert id is not None
			impl = feed.implementations.get(id, None)
			if not impl:
				logger.debug(_("Ignoring user-override for unknown implementation %(id)s in %(interface)s"), {'id': id, 'interface': feed})
				continue

			user_stability = item.getAttribute('user-stability')
			if user_stability:
				impl.user_stability = stability_levels[str(user_stability)]
Example #53
0
        def wait_for_downloads(primary):
            # Download just the upstream feed, unless it takes too long...
            timeout = primary.dl.timeout
            yield primary, timeout
            tasks.check(timeout)

            try:
                tasks.check(primary)
                if primary.happened:
                    return  # OK, primary succeeded!
                # OK, maybe it's just being slow...
                logger.info("Feed download from %s is taking a long time.",
                            feed_url)
                primary_ex = None
            except NoTrustedKeys as ex:
                raise  # Don't bother trying the mirror if we have a trust problem
            except ReplayAttack as ex:
                raise  # Don't bother trying the mirror if we have a replay attack
            except DownloadAborted as ex:
                raise  # Don't bother trying the mirror if the user cancelled
            except SafeException as ex:
                # Primary failed
                primary = None
                primary_ex = ex
                logger.warning(
                    _("Feed download from %(url)s failed: %(exception)s"), {
                        'url': feed_url,
                        'exception': ex
                    })

            # Start downloading from mirror...
            mirror = self._download_and_import_feed(feed_url, use_mirror=True)

            # Wait until both mirror and primary tasks are complete...
            while True:
                blockers = list(filter(None, [primary, mirror]))
                if not blockers:
                    break
                yield blockers

                if primary:
                    try:
                        tasks.check(primary)
                        if primary.happened:
                            primary = None
                            # No point carrying on with the mirror once the primary has succeeded
                            if mirror:
                                logger.info(
                                    _("Primary feed download succeeded; aborting mirror download for %s"
                                      ) % feed_url)
                                mirror.dl.abort()
                    except SafeException as ex:
                        primary = None
                        primary_ex = ex
                        logger.info(
                            _("Feed download from %(url)s failed; still trying mirror: %(exception)s"
                              ), {
                                  'url': feed_url,
                                  'exception': ex
                              })

                if mirror:
                    try:
                        tasks.check(mirror)
                        if mirror.happened:
                            mirror = None
                            if primary_ex:
                                # We already warned; no need to raise an exception too,
                                # as the mirror download succeeded.
                                primary_ex = None
                    except ReplayAttack as ex:
                        logger.info(
                            _("Version from mirror is older than cached version; ignoring it: %s"
                              ), ex)
                        mirror = None
                        primary_ex = None
                    except SafeException as ex:
                        logger.info(_("Mirror download failed: %s"), ex)
                        mirror = None

            if primary_ex:
                raise primary_ex
Example #54
0
    def check_manifest_and_rename(self,
                                  required_digest,
                                  tmp,
                                  extract=None,
                                  try_helper=False,
                                  dry_run=False):
        """Check that tmp[/extract] has the required_digest.
		On success, rename the checked directory to the digest, and
		make the whole tree read-only.
		@type required_digest: str
		@type tmp: str
		@type extract: str | None
		@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
		@type try_helper: bool
		@param dry_run: just print what we would do to stdout (and delete tmp)
		@type dry_run: bool
		@raise BadDigest: if the input directory doesn't match the given digest"""
        if extract:
            extracted = os.path.join(tmp, extract)
            if not os.path.isdir(extracted):
                raise Exception(
                    _('Directory %s not found in archive') % extract)
        else:
            extracted = tmp

        from . import manifest

        manifest.fixup_permissions(extracted)

        alg, required_value = manifest.splitID(required_digest)
        actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
        if actual_digest != required_digest:
            raise BadDigest(
                _('Incorrect manifest -- archive is corrupted.\n'
                  'Required digest: %(required_digest)s\n'
                  'Actual digest: %(actual_digest)s\n') % {
                      'required_digest': required_digest,
                      'actual_digest': actual_digest
                  })

        if try_helper:
            if self._add_with_helper(required_digest,
                                     extracted,
                                     dry_run=dry_run):
                support.ro_rmtree(tmp)
                return
            logger.info(
                _("Can't add to system store. Trying user store instead."))

        logger.info(_("Caching new implementation (digest %s) in %s"),
                    required_digest, self.dir)

        final_name = os.path.join(self.dir, required_digest)
        if os.path.isdir(final_name):
            logger.warning(_("Item %s already stored.") %
                           final_name)  # not really an error
            return

        if dry_run:
            print(
                _("[dry-run] would store implementation as {path}").format(
                    path=final_name))
            self.dry_run_names.add(required_digest)
            support.ro_rmtree(tmp)
            return
        else:
            # If we just want a subdirectory then the rename will change
            # extracted/.. and so we'll need write permission on 'extracted'

            os.chmod(extracted, 0o755)
            os.rename(extracted, final_name)
            os.chmod(final_name, 0o555)

        if extract:
            os.rmdir(tmp)
Example #55
0
    def _check_for_updates(self, sels, use_gui):
        """Check whether the selections need to be updated.
		If any input feeds have changed, we re-run the solver. If the
		new selections require a download, we schedule one in the
		background and return the old selections. Otherwise, we return the
		new selections. If we can select better versions without downloading,
		we update the app's selections and return the new selections.
		If we can't use the current selections, we update in the foreground.
		We also schedule a background update from time-to-time anyway.
		@type sels: L{zeroinstall.injector.selections.Selections}
		@type use_gui: bool
		@return: the selections to use
		@rtype: L{selections.Selections}"""
        need_solve = False  # Rerun solver (cached feeds have changed)
        need_update = False  # Update over the network

        if sels:
            utime = self._get_mtime('last-checked', warn_if_missing=True)
            last_solve = max(
                self._get_mtime('last-solve', warn_if_missing=False), utime)

            # Ideally, this would return all the files which were inputs into the solver's
            # decision. Currently, we approximate with:
            # - the previously selected feed files (local or cached)
            # - configuration files for the selected interfaces
            # - the global configuration
            # We currently ignore feeds and interfaces which were
            # considered but not selected.
            # Can yield None (ignored), paths or (path, mtime) tuples.
            # If this throws an exception, we will log it and resolve anyway.
            def get_inputs():
                for sel in sels.selections.values():
                    logger.info("Checking %s", sel.feed)

                    if sel.feed.startswith('distribution:'):
                        # If the package has changed version, we'll detect that below
                        # with get_unavailable_selections.
                        pass
                    elif os.path.isabs(sel.feed):
                        # Local feed
                        yield sel.feed
                    else:
                        # Cached feed
                        cached = basedir.load_first_cache(
                            namespaces.config_site, 'interfaces',
                            model.escape(sel.feed))
                        if cached:
                            yield cached
                        else:
                            raise IOError("Input %s missing; update" %
                                          sel.feed)

                    # Per-feed configuration
                    yield basedir.load_first_config(
                        namespaces.config_site, namespaces.config_prog,
                        'interfaces', model._pretty_escape(sel.interface))

                # Global configuration
                yield basedir.load_first_config(namespaces.config_site,
                                                namespaces.config_prog,
                                                'global')

            # If any of the feeds we used have been updated since the last check, do a quick re-solve
            try:
                for item in get_inputs():
                    if not item: continue
                    if isinstance(item, tuple):
                        path, mtime = item
                    else:
                        path = item
                        try:
                            mtime = os.stat(path).st_mtime
                        except OSError as ex:
                            logger.info(
                                "Triggering update to {app} due to error: {ex}"
                                .format(app=self, path=path, ex=ex))
                            need_solve = True
                            break

                    if mtime and mtime > last_solve:
                        logger.info(
                            "Triggering update to %s because %s has changed",
                            self, path)
                        need_solve = True
                        break
            except Exception as ex:
                logger.info("Error checking modification times: %s", ex)
                need_solve = True
                need_update = True

            # Is it time for a background update anyway?
            if not need_update:
                staleness = time.time() - utime
                logger.info("Staleness of app %s is %d hours", self,
                            staleness / (60 * 60))
                freshness_threshold = self.config.freshness
                if freshness_threshold > 0 and staleness >= freshness_threshold:
                    need_update = True

            # If any of the saved selections aren't available then we need
            # to download right now, not later in the background.
            unavailable_selections = sels.get_unavailable_selections(
                config=self.config, include_packages=True)
            if unavailable_selections:
                logger.info("Saved selections are unusable (missing %s)",
                            ', '.join(str(s) for s in unavailable_selections))
                need_solve = True
        else:
            # No current selections
            need_solve = True
            unavailable_selections = True

        if need_solve:
            from zeroinstall.injector.driver import Driver
            driver = Driver(config=self.config,
                            requirements=self.get_requirements())
            if driver.need_download():
                if unavailable_selections:
                    return self._foreground_update(driver, use_gui)
                else:
                    # Continue with the current (cached) selections while we download
                    need_update = True
            else:
                old_sels = sels
                sels = driver.solver.selections
                from zeroinstall.support import xmltools
                if old_sels is None or not xmltools.nodes_equal(
                        sels.toDOM(), old_sels.toDOM()):
                    self.set_selections(sels, set_last_checked=False)
            try:
                self._touch('last-solve')
            except OSError as ex:
                logger.warning("Error checking for updates: %s", ex)

        # If we tried to check within the last hour, don't try again.
        if need_update:
            last_check_attempt = self._get_mtime('last-check-attempt',
                                                 warn_if_missing=False)
            if last_check_attempt and last_check_attempt + 60 * 60 > time.time(
            ):
                logger.info(
                    "Tried to check within last hour; not trying again now")
                need_update = False

        if need_update:
            try:
                self.set_last_check_attempt()
            except OSError as ex:
                logger.warning("Error checking for updates: %s", ex)
            else:
                from zeroinstall.injector import background
                r = self.get_requirements()
                background.spawn_background_update2(r, False, self)

        return sels
Example #56
0
def optimise(impl_dir):
    """Scan an implementation cache directory for duplicate files, and
	hard-link any duplicates together to save space.
	@param impl_dir: a $cache/0install.net/implementations directory
	@type impl_dir: str
	@return: (unique bytes, duplicated bytes, already linked, manifest size)
	@rtype: (int, int, int, int)"""

    first_copy = {}  # TypeDigest -> Path
    dup_size = uniq_size = already_linked = man_size = 0

    import random
    from zeroinstall.zerostore import BadDigest, parse_algorithm_digest_pair

    for x in range(10):
        tmpfile = os.path.join(impl_dir,
                               'optimise-%d' % random.randint(0, 1000000))
        if not os.path.exists(tmpfile):
            break
    else:
        raise Exception(_("Can't generate unused tempfile name!"))

    dirs = os.listdir(impl_dir)
    total = len(dirs)
    msg = ""

    def clear():
        print("\r" + (" " * len(msg)) + "\r", end='')

    for i, impl in enumerate(dirs):
        clear()
        msg = _("[%(done)d / %(total)d] Reading manifests...") % {
            'done': i,
            'total': total
        }
        print(msg, end='')
        sys.stdout.flush()

        try:
            alg, manifest_digest = parse_algorithm_digest_pair(impl)
        except BadDigest:
            logger.warning(_("Skipping non-implementation '%s'"), impl)
            continue
        manifest_path = os.path.join(impl_dir, impl, '.manifest')
        try:
            ms = open(manifest_path, 'rt')
        except OSError as ex:
            logger.warning(
                _("Failed to read manifest file '%(manifest_path)s': %(exception)s"
                  ), {
                      'manifest': manifest_path,
                      'exception': str(ex)
                  })
            continue

        if alg == 'sha1':
            ms.close()
            continue

        man_size += os.path.getsize(manifest_path)

        dir = ""
        for line in ms:
            if line[0] == 'D':
                itype, path = line.split(' ', 1)
                assert path.startswith('/')
                dir = path[1:-1]  # Strip slash and newline
                continue

            if line[0] == "S":
                itype, digest, size, rest = line.split(' ', 3)
                uniq_size += int(size)
                continue

            assert line[0] in "FX"

            itype, digest, mtime, size, path = line.split(' ', 4)
            path = path[:-1]  # Strip newline
            size = int(size)

            key = (itype, digest, mtime, size)
            loc_path = (impl, dir, path)

            first_loc = first_copy.get(key, None)
            if first_loc:
                first_full = os.path.join(impl_dir, *first_loc)
                new_full = os.path.join(impl_dir, *loc_path)
                if _already_linked(first_full, new_full):
                    already_linked += size
                else:
                    _link(first_full, new_full, tmpfile)
                    dup_size += size
            else:
                first_copy[key] = loc_path
                uniq_size += size

        ms.close()
    clear()
    return (uniq_size, dup_size, already_linked, man_size)
Example #57
0
    def download_keys(self, fetcher, feed_hint=None, key_mirror=None):
        """Download any required GPG keys not already on our keyring.
		When all downloads are done (successful or otherwise), add any new keys
		to the keyring, L{recheck}.
		@param fetcher: fetcher to manage the download (was Handler before version 1.5)
		@type fetcher: L{fetch.Fetcher}
		@param key_mirror: URL of directory containing keys, or None to use feed's directory
		@type key_mirror: str
		@rtype: [L{zeroinstall.support.tasks.Blocker}]"""
        downloads = {}
        blockers = []
        for x in self.sigs:
            key_id = x.need_key()
            if key_id:
                try:
                    import urlparse
                except ImportError:
                    from urllib import parse as urlparse  # Python 3
                key_url = urlparse.urljoin(key_mirror or self.url,
                                           '%s.gpg' % key_id)
                logger.info(_("Fetching key from %s"), key_url)
                dl = fetcher.download_url(key_url, hint=feed_hint)
                downloads[dl.downloaded] = (dl, dl.tempfile)
                blockers.append(dl.downloaded)

        exception = None
        any_success = False

        from zeroinstall.support import tasks

        while blockers:
            yield blockers

            old_blockers = blockers
            blockers = []

            for b in old_blockers:
                dl, stream = downloads[b]
                try:
                    tasks.check(b)
                    if b.happened:
                        stream.seek(0)
                        self._downloaded_key(stream)
                        any_success = True
                        stream.close()
                    else:
                        blockers.append(b)
                except Exception:
                    _type, exception, tb = sys.exc_info()
                    logger.warning(
                        _("Failed to import key for '%(url)s': %(exception)s"),
                        {
                            'url': self.url,
                            'exception': str(exception)
                        })
                    stream.close()

        if exception and not any_success:
            raise_with_traceback(exception, tb)

        self.recheck()
Example #58
0
	def confirm_import_feed(self, pending, valid_sigs):
		"""Sub-classes should override this method to interact with the user about new feeds.
		If multiple feeds need confirmation, L{trust.TrustMgr.confirm_keys} will only invoke one instance of this
		method at a time.
		@param pending: the new feed to be imported
		@type pending: L{PendingFeed}
		@param valid_sigs: maps signatures to a list of fetchers collecting information about the key
		@type valid_sigs: {L{gpg.ValidSig} : L{fetch.KeyInfoFetcher}}
		@since: 0.42"""
		from zeroinstall.injector import trust

		assert valid_sigs

		domain = trust.domain_from_url(pending.url)

		# Ask on stderr, because we may be writing XML to stdout
		print(_("Feed: %s") % pending.url, file=sys.stderr)
		print(_("The feed is correctly signed with the following keys:"), file=sys.stderr)
		for x in valid_sigs:
			print("-", x, file=sys.stderr)

		def text(parent):
			text = ""
			for node in parent.childNodes:
				if node.nodeType == node.TEXT_NODE:
					text = text + node.data
			return text

		shown = set()
		key_info_fetchers = valid_sigs.values()
		while key_info_fetchers:
			old_kfs = key_info_fetchers
			key_info_fetchers = []
			for kf in old_kfs:
				infos = set(kf.info) - shown
				if infos:
					if len(valid_sigs) > 1:
						print("%s: " % kf.fingerprint)
					for key_info in infos:
						print("-", text(key_info), file=sys.stderr)
						shown.add(key_info)
				if kf.blocker:
					key_info_fetchers.append(kf)
			if key_info_fetchers:
				for kf in key_info_fetchers: print(kf.status, file=sys.stderr)
				stdin = tasks.InputBlocker(0, 'console')
				blockers = [kf.blocker for kf in key_info_fetchers] + [stdin]
				yield blockers
				for b in blockers:
					try:
						tasks.check(b)
					except Exception as ex:
						logger.warning(_("Failed to get key info: %s"), ex)
				if stdin.happened:
					print(_("Skipping remaining key lookups due to input from user"), file=sys.stderr)
					break
		if not shown:
			print(_("Warning: Nothing known about this key!"), file=sys.stderr)

		if len(valid_sigs) == 1:
			print(_("Do you want to trust this key to sign feeds from '%s'?") % domain, file=sys.stderr)
		else:
			print(_("Do you want to trust all of these keys to sign feeds from '%s'?") % domain, file=sys.stderr)
		while True:
			print(_("Trust [Y/N] "), end=' ', file=sys.stderr)
			sys.stderr.flush()
			i = support.raw_input()
			if not i: continue
			if i in 'Nn':
				raise NoTrustedKeys(_('Not signed with a trusted key'))
			if i in 'Yy':
				break
		trust.trust_db._dry_run = self.dry_run
		for key in valid_sigs:
			print(_("Trusting %(key_fingerprint)s for %(domain)s") % {'key_fingerprint': key.fingerprint, 'domain': domain}, file=sys.stderr)
			trust.trust_db.trust_key(key.fingerprint, domain)
Example #59
0
def _copy_files(alg, wanted, source, target):
    """Scan for files under 'source'. For each one:
	If it is in wanted and has the right details (or they can be fixed; e.g. mtime),
	then copy it into 'target'.
	If it's not in wanted, warn and skip it.
	On exit, wanted contains only files that were not found.
	@type alg: L{Algorithm}
	@type wanted: {str: tuple}
	@type source: str
	@type target: str"""
    dir = ''
    for line in alg.generate_manifest(source):
        if line[0] == 'D':
            type, name = line.split(' ', 1)
            assert name.startswith('/')
            dir = name[1:]
            path = dir
        elif line[0] == 'S':
            type, actual_digest, actual_size, name = line.split(' ', 3)
            path = os.path.join(dir, name)
        else:
            assert line[0] in 'XF'
            type, actual_digest, actual_mtime, actual_size, name = line.split(
                ' ', 4)
            path = os.path.join(dir, name)
        try:
            required_details = wanted.pop(path)
        except KeyError:
            logger.warning(_("Skipping file not in manifest: '%s'"), path)
            continue
        if required_details[0] != type:
            raise BadDigest(_("Item '%s' has wrong type!") % path)
        if type == 'D':
            os.mkdir(os.path.join(target, path))
        elif type in 'XF':
            required_type, required_digest, required_mtime, required_size = required_details
            if required_size != actual_size:
                raise SafeException(
                    _("File '%(path)s' has wrong size (%(actual_size)s bytes, but should be "
                      "%(required_size)s according to manifest)") % {
                          'path': path,
                          'actual_size': actual_size,
                          'required_size': required_size
                      })
            required_mtime = int(required_mtime)
            dest_path = os.path.join(target, path)
            if type == 'X':
                mode = 0o555
            else:
                mode = 0o444
            copy_with_verify(os.path.join(source, path), dest_path, mode, alg,
                             required_digest)
            os.utime(dest_path, (required_mtime, required_mtime))
        elif type == 'S':
            required_type, required_digest, required_size = required_details
            if required_size != actual_size:
                raise SafeException(
                    _("Symlink '%(path)s' has wrong size (%(actual_size)s bytes, but should be "
                      "%(required_size)s according to manifest)") % {
                          'path': path,
                          'actual_size': actual_size,
                          'required_size': required_size
                      })
            symlink_target = os.readlink(os.path.join(source, path))
            symlink_digest = alg.new_digest()
            symlink_digest.update(symlink_target.encode('utf-8'))
            if symlink_digest.hexdigest() != required_digest:
                raise SafeException(
                    _("Symlink '%(path)s' has wrong target (digest should be "
                      "%(digest)s according to manifest)") % {
                          'path': path,
                          'digest': required_digest
                      })
            dest_path = os.path.join(target, path)
            os.symlink(symlink_target, dest_path)
        else:
            raise SafeException(
                _("Unknown manifest type %(type)s for '%(path)s'") % {
                    'type': type,
                    'path': path
                })
Example #60
0
    def solve_with_downloads(self, force=False, update_local=False):
        """Run the solver, then download any feeds that are missing or
		that need to be updated. Each time a new feed is imported into
		the cache, the solver is run again, possibly adding new downloads.
		@param force: whether to download even if we're already ready to run.
		@type force: bool
		@param update_local: fetch PackageKit feeds even if we're ready to run.
		@type update_local: bool"""

        downloads_finished = set()  # Successful or otherwise
        downloads_in_progress = {}  # URL -> Download

        # There are three cases:
        # 1. We want to run immediately if possible. If not, download all the information we can.
        #    (force = False, update_local = False)
        # 2. We're in no hurry, but don't want to use the network unnecessarily.
        #    We should still update local information (from PackageKit).
        #    (force = False, update_local = True)
        # 3. The user explicitly asked us to refresh everything.
        #    (force = True)

        try_quick_exit = not (force or update_local)

        while True:
            self.solver.solve_for(self.requirements)
            for w in self.watchers:
                w()

            if try_quick_exit and self.solver.ready:
                break
            try_quick_exit = False

            if not self.solver.ready:
                force = True

            for f in self.solver.feeds_used:
                if f in downloads_finished or f in downloads_in_progress:
                    continue
                if os.path.isabs(f):
                    if force:
                        try:
                            self.config.iface_cache.get_feed(f, force=True)
                        except reader.MissingLocalFeed as ex:
                            logger.warning(
                                "Reloading %s: %s",
                                f,
                                ex,
                                exc_info=True
                                if logger.isEnabledFor(logging.INFO) else None)

                        downloads_in_progress[f] = tasks.IdleBlocker(
                            'Refresh local feed')
                    continue
                elif f.startswith('distribution:'):
                    if force or update_local:
                        downloads_in_progress[
                            f] = self.config.fetcher.download_and_import_feed(
                                f, self.config.iface_cache)
                elif force and self.config.network_use != network_offline:
                    downloads_in_progress[
                        f] = self.config.fetcher.download_and_import_feed(
                            f, self.config.iface_cache)
                    # Once we've starting downloading some things,
                    # we might as well get them all.
                    force = True

            if not downloads_in_progress:
                if self.config.network_use == network_offline:
                    logger.info(
                        _("Can't choose versions and in off-line mode, so aborting"
                          ))
                break

            # Wait for at least one download to finish
            blockers = downloads_in_progress.values()
            yield blockers
            tasks.check(blockers, self.config.handler.report_error)

            for f in list(downloads_in_progress.keys()):
                if f in downloads_in_progress and downloads_in_progress[
                        f].happened:
                    del downloads_in_progress[f]
                    downloads_finished.add(f)

                    # Need to refetch any "distribution" feed that
                    # depends on this one
                    distro_feed_url = 'distribution:' + f
                    if distro_feed_url in downloads_finished:
                        downloads_finished.remove(distro_feed_url)
                    if distro_feed_url in downloads_in_progress:
                        del downloads_in_progress[distro_feed_url]