Exemple #1
0
	def fetch_candidates(self, master_feed):
		package_names = [item.getAttribute("package") for item, item_attrs, depends in master_feed.get_package_impls(self)]

		if self.packagekit.available:
			return self.packagekit.fetch_candidates(package_names)

		# No PackageKit. Use apt-cache directly.
		for package in package_names:
			# Check to see whether we could get a newer version using apt-get
			try:
				null = os.open(os.devnull, os.O_WRONLY)
				child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null, universal_newlines = True)
				os.close(null)

				arch = version = size = None
				for line in child.stdout:
					line = line.strip()
					if line.startswith('Version: '):
						version = line[9:]
						version = try_cleanup_distro_version(version)
					elif line.startswith('Architecture: '):
						arch = canonical_machine(line[14:].strip())
					elif line.startswith('Size: '):
						size = int(line[6:].strip())
				if version and arch:
					cached = {'version': version, 'arch': arch, 'size': size}
				else:
					cached = None
				child.stdout.close()
				child.wait()
			except Exception as ex:
				logger.warn("'apt-cache show %s' failed: %s", package, ex)
				cached = None
			# (multi-arch support? can there be multiple candidates?)
			self.apt_cache[package] = cached
Exemple #2
0
			def factory(id, only_if_missing = False, installed = True):
				assert id.startswith('package:')
				if id in feed.implementations:
					if only_if_missing:
						return None
					logger.warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
				impl = model.DistributionImplementation(feed, id, self, item)
				feed.implementations[id] = impl
				new_impls.append(impl)

				impl.installed = installed
				impl.metadata = item_attrs
				impl.requires = depends

				if 'run' not in impl.commands:
					item_main = item_attrs.get('main', None)
					if item_main:
						if item_main.startswith('/'):
							impl.main = item_main
						else:
							raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
										item_main)
				impl.upstream_stability = model.packaged

				return impl
Exemple #3
0
def load_config(handler = None):
	config = Config(handler)
	parser = ConfigParser.RawConfigParser()
	parser.add_section('global')
	parser.set('global', 'help_with_testing', 'False')
	parser.set('global', 'freshness', str(60 * 60 * 24 * 30))	# One month
	parser.set('global', 'network_use', 'full')
	parser.set('global', 'auto_approve_keys', 'True')

	path = basedir.load_first_config(config_site, config_prog, 'global')
	if path:
		logger.info("Loading configuration from %s", path)
		try:
			parser.read(path)
		except Exception as ex:
			logger.warn(_("Error loading config: %s"), str(ex) or repr(ex))

	config.help_with_testing = parser.getboolean('global', 'help_with_testing')
	config.network_use = parser.get('global', 'network_use')
	config.freshness = int(parser.get('global', 'freshness'))
	config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys')

	assert config.network_use in network_levels, config.network_use

	return config
Exemple #4
0
    def _query_installed_package(self, package):
        null = os.open(os.devnull, os.O_WRONLY)
        child = subprocess.Popen(
            [
                "dpkg-query", "-W",
                "--showformat=${Version}\t${Architecture}\t${Status}\n", "--",
                package
            ],
            stdout=subprocess.PIPE,
            stderr=null,
            universal_newlines=True)  # Needed for Python 3
        os.close(null)
        stdout, stderr = child.communicate()
        child.wait()
        for line in stdout.split('\n'):
            if not line: continue
            version, debarch, status = line.split('\t', 2)
            if not status.endswith(' installed'): continue
            clean_version = try_cleanup_distro_version(version)
            if debarch.find("-") != -1:
                debarch = debarch.split("-")[-1]
            if clean_version:
                return '%s\t%s' % (clean_version,
                                   canonical_machine(debarch.strip()))
            else:
                logger.warn(
                    _("Can't parse distribution version '%(version)s' for package '%(package)s'"
                      ), {
                          'version': version,
                          'package': package
                      })

        return '-'
Exemple #5
0
            def factory(id, only_if_missing=False, installed=True):
                assert id.startswith('package:')
                if id in feed.implementations:
                    if only_if_missing:
                        return None
                    logger.warn(
                        _("Duplicate ID '%s' for DistributionImplementation"),
                        id)
                impl = model.DistributionImplementation(feed, id, self, item)
                feed.implementations[id] = impl
                new_impls.append(impl)

                impl.installed = installed
                impl.metadata = item_attrs
                impl.requires = depends

                if 'run' not in impl.commands:
                    item_main = item_attrs.get('main', None)
                    if item_main:
                        if item_main.startswith('/'):
                            impl.main = item_main
                        else:
                            raise model.InvalidInterface(
                                _("'main' attribute must be absolute, but '%s' doesn't start with '/'!"
                                  ) % item_main)
                impl.upstream_stability = model.packaged

                return impl
Exemple #6
0
    def generate_cache(self):
        cache = []

        child = subprocess.Popen(
            ["rpm", "-qa", "--qf=%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n"],
            stdout=subprocess.PIPE,
            universal_newlines=True)
        for line in child.stdout:
            package, version, rpmarch = line.split('\t', 2)
            if package == 'gpg-pubkey':
                continue
            zi_arch = canonical_machine(rpmarch.strip())
            clean_version = try_cleanup_distro_version(version)
            if clean_version:
                cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
            else:
                logger.warn(
                    _("Can't parse distribution version '%(version)s' for package '%(package)s'"
                      ), {
                          'version': version,
                          'package': package
                      })

        self._write_cache(cache)
        child.stdout.close()
        child.wait()
Exemple #7
0
    def get_package_info(self, package, factory):
        _name_version_regexp = '^(.+)-([^-]+)$'

        nameversion = re.compile(_name_version_regexp)
        for pkgname in os.listdir(self._pkgdir):
            pkgdir = os.path.join(self._pkgdir, pkgname)
            if not os.path.isdir(pkgdir): continue

            #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()

            match = nameversion.search(pkgname)
            if match is None:
                logger.warn(
                    _('Cannot parse version from Ports package named "%(pkgname)s"'
                      ), {'pkgname': pkgname})
                continue
            else:
                name = match.group(1)
                if name != package:
                    continue
                version = try_cleanup_distro_version(match.group(2))

            machine = host_machine

            impl = factory('package:ports:%s:%s:%s' % \
               (package, version, machine))
            impl.version = model.parse_version(version)
            impl.machine = machine
Exemple #8
0
def find_bin_dir(paths=None):
    """Find the first writable path in the list (default $PATH),
	skipping /bin, /sbin and everything under /usr except /usr/local/bin"""
    if paths is None:
        paths = os.environ['PATH'].split(os.pathsep)
    for path in paths:
        if path.startswith('/usr/') and not path.startswith('/usr/local/bin'):
            # (/usr/local/bin is OK if we're running as root)
            pass
        elif path.startswith('/bin') or path.startswith('/sbin'):
            pass
        elif os.path.realpath(path).startswith(basedir.xdg_cache_home):
            pass  # print "Skipping cache", first_path
        elif not os.access(path, os.W_OK):
            pass  # print "No access", first_path
        else:
            break
    else:
        path = os.path.expanduser('~/bin/')
        logger.warn('%s is not in $PATH. Add it with:\n%s' %
                    (path, _export('PATH', path + ':$PATH')))

        if not os.path.isdir(path):
            os.makedirs(path)
    return path
Exemple #9
0
def discover_existing_apps():
    """Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}.
	@return: a map from application URIs to .desktop filenames"""
    already_installed = {}
    for d in basedir.load_data_paths('applications'):
        for desktop_file in os.listdir(d):
            if desktop_file.startswith(
                    'zeroinstall-') and desktop_file.endswith('.desktop'):
                full = os.path.join(d, desktop_file)
                try:
                    with open(full, 'rt') as stream:
                        for line in stream:
                            line = line.strip()
                            if line.startswith('Exec=0launch '):
                                bits = line.split(' -- ', 1)
                                if ' ' in bits[0]:
                                    uri = bits[0].split(
                                        ' ', 1)[1]  # 0launch URI -- %u
                                else:
                                    uri = bits[1].split(
                                        ' ', 1)[0].strip()  # 0launch -- URI %u
                                already_installed[uri] = full
                                break
                        else:
                            logger.info(_("Failed to find Exec line in %s"),
                                        full)
                except Exception as ex:
                    logger.warn(
                        _("Failed to load .desktop file %(filename)s: %(exceptions"
                          ), {
                              'filename': full,
                              'exception': ex
                          })
    return already_installed
Exemple #10
0
def discover_existing_apps():
	"""Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}.
	@return: a map from application URIs to .desktop filenames"""
	already_installed = {}
	for d in basedir.load_data_paths('applications'):
		for desktop_file in os.listdir(d):
			if desktop_file.startswith('zeroinstall-') and desktop_file.endswith('.desktop'):
				full = os.path.join(d, desktop_file)
				try:
					with open(full, 'rt') as stream:
						for line in stream:
							line = line.strip()
							if line.startswith('Exec=0launch '):
								bits = line.split(' -- ', 1)
								if ' ' in bits[0]:
									uri = bits[0].split(' ', 1)[1]		# 0launch URI -- %u
								else:
									uri = bits[1].split(' ', 1)[0].strip()	# 0launch -- URI %u
								already_installed[uri] = full
								break
						else:
							logger.info(_("Failed to find Exec line in %s"), full)
				except Exception as ex:
					logger.warn(_("Failed to load .desktop file %(filename)s: %(exceptions"), {'filename': full, 'exception': ex})
	return already_installed
Exemple #11
0
	def get_package_info(self, package, factory):
		# Add installed versions...
		for entry in os.listdir(self._packages_dir):
			name, version, build = entry.rsplit('-', 2)
			if name == package:
				gotarch = False
				with open(os.path.join(self._packages_dir, entry, "desc"), 'rt') as stream:
					for line in stream:
						if line == "%ARCH%\n":
							gotarch = True
							continue
						if gotarch:
							arch = line.strip()
							break
				zi_arch = canonical_machine(arch)
				clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
				if not clean_version:
					logger.warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
					continue
	
				impl = factory('package:arch:%s:%s:%s' % \
						(package, clean_version, zi_arch))
				impl.version = model.parse_version(clean_version)
				if zi_arch != '*':
					impl.machine = zi_arch

		# Add any uninstalled candidates found by PackageKit
		self.packagekit.get_candidates(package, factory, 'package:arch')
Exemple #12
0
    def report_error(self, exception, tb=None):
        """Report an exception to the user.
		@param exception: the exception to report
		@type exception: L{SafeException}
		@param tb: optional traceback
		@since: 0.25"""
        logger.warn("%s", str(exception) or type(exception))
Exemple #13
0
	def get_package_info(self, package, factory):
		_name_version_regexp = '^(.+)-([^-]+)$'

		nameversion = re.compile(_name_version_regexp)
		for pkgname in os.listdir(self._pkgdir):
			pkgdir = os.path.join(self._pkgdir, pkgname)
			if not os.path.isdir(pkgdir): continue

			#contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()

			match = nameversion.search(pkgname)
			if match is None:
				logger.warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
				continue
			else:
				name = match.group(1)
				if name != package:
					continue
				version = try_cleanup_distro_version(match.group(2))

			machine = host_machine

			impl = factory('package:ports:%s:%s:%s' % \
						(package, version, machine))
			impl.version = model.parse_version(version)
			impl.machine = machine
Exemple #14
0
    def add_dir_to_cache(self, required_digest, path, try_helper=False):
        """Copy the contents of path to the cache.
		@param required_digest: the expected digest
		@type required_digest: str
		@param path: the root of the tree to copy
		@type path: str
		@param try_helper: attempt to use privileged helper before user cache (since 0.26)
		@type try_helper: bool
		@raise BadDigest: if the contents don't match the given digest."""
        if self.lookup(required_digest):
            logger.info(_("Not adding %s as it already exists!"),
                        required_digest)
            return

        tmp = self.get_tmp_dir_for(required_digest)
        try:
            _copytree2(path, tmp)
            self.check_manifest_and_rename(required_digest,
                                           tmp,
                                           try_helper=try_helper)
        except:
            logger.warn(_("Error importing directory."))
            logger.warn(_("Deleting %s"), tmp)
            support.ro_rmtree(tmp)
            raise
Exemple #15
0
    def get_package_info(self, package, factory):
        # Add installed versions...
        for entry in os.listdir(self._packages_dir):
            name, version, arch, build = entry.rsplit('-', 3)
            if name == package:
                zi_arch = canonical_machine(arch)
                clean_version = try_cleanup_distro_version("%s-%s" %
                                                           (version, build))
                if not clean_version:
                    logger.warn(
                        _("Can't parse distribution version '%(version)s' for package '%(package)s'"
                          ), {
                              'version': version,
                              'package': name
                          })
                    continue

                impl = factory('package:slack:%s:%s:%s' % \
                  (package, clean_version, zi_arch))
                impl.version = model.parse_version(clean_version)
                if zi_arch != '*':
                    impl.machine = zi_arch

        # Add any uninstalled candidates found by PackageKit
        self.packagekit.get_candidates(package, factory, 'package:slack')
Exemple #16
0
	def _add_with_helper(self, required_digest, path):
		"""Use 0store-secure-add to copy 'path' to the system store.
		@param required_digest: the digest for path
		@type required_digest: str
		@param path: root of implementation directory structure
		@type path: str
		@return: True iff the directory was copied into the system cache successfully
		"""
		if required_digest.startswith('sha1='):
			return False		# Old digest alg not supported
		helper = support.find_in_path('0store-secure-add-helper')
		if not helper:
			logger.info(_("'0store-secure-add-helper' command not found. Not adding to system cache."))
			return False
		import subprocess
		env = os.environ.copy()
		env['ENV_NOT_CLEARED'] = 'Unclean'	# (warn about insecure configurations)
		env['HOME'] = 'Unclean'			# (warn about insecure configurations)
		dev_null = os.open(os.devnull, os.O_RDONLY)
		try:
			logger.info(_("Trying to add to system cache using %s"), helper)
			child = subprocess.Popen([helper, required_digest],
						 stdin = dev_null,
						 cwd = path,
						 env = env)
			exit_code = child.wait()
		finally:
			os.close(dev_null)

		if exit_code:
			logger.warn(_("0store-secure-add-helper failed."))
			return False

		logger.info(_("Added succcessfully."))
		return True
Exemple #17
0
	def report_error(self, exception, tb = None):
		"""Report an exception to the user.
		@param exception: the exception to report
		@type exception: L{SafeException}
		@param tb: optional traceback
		@since: 0.25"""
		logger.warn("%s", str(exception) or type(exception))
Exemple #18
0
    def get_package_info(self, package, factory):
        # Add installed versions...
        for entry in os.listdir(self._packages_dir):
            name, version, build = entry.rsplit('-', 2)
            if name == package:
                gotarch = False
                with open(os.path.join(self._packages_dir, entry, "desc"),
                          'rt') as stream:
                    for line in stream:
                        if line == "%ARCH%\n":
                            gotarch = True
                            continue
                        if gotarch:
                            arch = line.strip()
                            break
                zi_arch = canonical_machine(arch)
                clean_version = try_cleanup_distro_version("%s-%s" %
                                                           (version, build))
                if not clean_version:
                    logger.warn(
                        _("Can't parse distribution version '%(version)s' for package '%(package)s'"
                          ), {
                              'version': version,
                              'package': name
                          })
                    continue

                impl = factory('package:arch:%s:%s:%s' % \
                  (package, clean_version, zi_arch))
                impl.version = model.parse_version(clean_version)
                if zi_arch != '*':
                    impl.machine = zi_arch

        # Add any uninstalled candidates found by PackageKit
        self.packagekit.get_candidates(package, factory, 'package:arch')
Exemple #19
0
	def generate_cache(self):
		cache = []

		child = subprocess.Popen(["port", "-v", "installed"],
					  stdout = subprocess.PIPE, universal_newlines = True)
		for line in child.stdout:
			if not line.startswith(" "):
				continue
			if line.strip().count(" ") > 1:
				package, version, extra = line.split(None, 2)
			else:
				package, version = line.split()
				extra = ""
			if not extra.startswith("(active)"):
				continue
			version = version.lstrip('@')
			version = re.sub(r"\+.*", "", version) # strip variants
			zi_arch = '*'
			clean_version = try_cleanup_distro_version(version)
			if clean_version:
				match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
				if match:
					platform, major, archs = match.groups()
					for arch in archs.split():
						zi_arch = canonical_machine(arch)
						cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
				else:
					cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
			else:
				logger.warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
		self._write_cache(cache)
		child.stdout.close()
		child.wait()
Exemple #20
0
def load_config(handler=None):
    config = Config(handler)
    parser = ConfigParser.RawConfigParser()
    parser.add_section('global')
    parser.set('global', 'help_with_testing', 'False')
    parser.set('global', 'freshness', str(60 * 60 * 24 * 30))  # One month
    parser.set('global', 'network_use', 'full')
    parser.set('global', 'auto_approve_keys', 'True')

    path = basedir.load_first_config(config_site, config_prog, 'global')
    if path:
        logger.info("Loading configuration from %s", path)
        try:
            parser.read(path)
        except Exception as ex:
            logger.warn(_("Error loading config: %s"), str(ex) or repr(ex))

    config.help_with_testing = parser.getboolean('global', 'help_with_testing')
    config.network_use = parser.get('global', 'network_use')
    config.freshness = int(parser.get('global', 'freshness'))
    config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys')

    assert config.network_use in network_levels, config.network_use

    return config
Exemple #21
0
    def download_icon(self, interface, force=False):
        """Download an icon for this interface and add it to the
		icon cache. If the interface has no icon do nothing.
		@return: the task doing the import, or None
		@rtype: L{tasks.Task}"""
        logger.debug("download_icon %(interface)s", {"interface": interface})

        modification_time = None
        existing_icon = self.config.iface_cache.get_icon_path(interface)
        if existing_icon:
            file_mtime = os.stat(existing_icon).st_mtime
            from email.utils import formatdate

            modification_time = formatdate(timeval=file_mtime, localtime=False, usegmt=True)

        feed = self.config.iface_cache.get_feed(interface.uri)
        if feed is None:
            return None

            # Find a suitable icon to download
        for icon in feed.get_metadata(XMLNS_IFACE, "icon"):
            type = icon.getAttribute("type")
            if type != "image/png":
                logger.debug(_("Skipping non-PNG icon"))
                continue
            source = icon.getAttribute("href")
            if source:
                break
            logger.warn(_('Missing "href" attribute on <icon> in %s'), interface)
        else:
            logger.info(_("No PNG icons found in %s"), interface)
            return

        dl = self.download_url(source, hint=interface, modification_time=modification_time)

        @tasks.async
        def download_and_add_icon():
            stream = dl.tempfile
            try:
                yield dl.downloaded
                tasks.check(dl.downloaded)
                if dl.unmodified:
                    return
                stream.seek(0)

                import shutil, tempfile

                icons_cache = basedir.save_cache_path(config_site, "interface_icons")

                tmp_file = tempfile.NamedTemporaryFile(dir=icons_cache, delete=False)
                shutil.copyfileobj(stream, tmp_file)
                tmp_file.close()

                icon_file = os.path.join(icons_cache, escape(interface.uri))
                portable_rename(tmp_file.name, icon_file)
            finally:
                stream.close()

        return download_and_add_icon()
Exemple #22
0
	def _get_mtime(self, name, warn_if_missing = True):
		timestamp_path = os.path.join(self.path, name)
		try:
			return os.stat(timestamp_path).st_mtime
		except Exception as ex:
			if warn_if_missing:
				logger.warn("Failed to get time-stamp of %s: %s", timestamp_path, ex)
			return 0
Exemple #23
0
	def put(self, key, value):
		cache_path = os.path.join(self.cache_dir, self.cache_leaf)
		self.cache[key] = value
		try:
			with open(cache_path, 'a') as stream:
				stream.write('%s=%s\n' % (key, value))
		except Exception as ex:
			logger.warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
Exemple #24
0
def load_keys(fingerprints):
    """Load a set of keys at once.
	This is much more efficient than making individual calls to L{load_key}.
	@return: a list of loaded keys, indexed by fingerprint
	@rtype: {str: L{Key}}
	@since: 0.27"""
    import codecs

    keys = {}

    # Otherwise GnuPG returns everything...
    if not fingerprints: return keys

    for fp in fingerprints:
        keys[fp] = Key(fp)

    current_fpr = None
    current_uid = None

    child = _run_gpg([
        '--fixed-list-mode', '--with-colons', '--list-keys',
        '--with-fingerprint', '--with-fingerprint'
    ] + fingerprints,
                     stdout=subprocess.PIPE)
    try:
        for line in child.stdout:
            if line.startswith('pub:'):
                current_fpr = None
                current_uid = None
            if line.startswith('fpr:'):
                current_fpr = line.split(':')[9]
                if current_fpr in keys and current_uid:
                    # This is probably a subordinate key, where the fingerprint
                    # comes after the uid, not before. Note: we assume the subkey is
                    # cross-certified, as recent always ones are.
                    try:
                        keys[current_fpr].name = codecs.decode(
                            current_uid, 'utf-8')
                    except:
                        logger.warn("Not UTF-8: %s", current_uid)
                        keys[current_fpr].name = current_uid
            if line.startswith('uid:'):
                assert current_fpr is not None
                # Only take primary UID
                if current_uid: continue
                parts = line.split(':')
                current_uid = parts[9]
                if current_fpr in keys:
                    keys[current_fpr].name = current_uid
    finally:
        child.stdout.close()

        if child.wait():
            logger.warn(
                _("gpg --list-keys failed with exit code %d") %
                child.returncode)

    return keys
Exemple #25
0
 def _get_mtime(self, name, warn_if_missing=True):
     timestamp_path = os.path.join(self.path, name)
     try:
         return os.stat(timestamp_path).st_mtime
     except Exception as ex:
         if warn_if_missing:
             logger.warn("Failed to get time-stamp of %s: %s",
                         timestamp_path, ex)
         return 0
Exemple #26
0
 def put(self, key, value):
     cache_path = os.path.join(self.cache_dir, self.cache_leaf)
     self.cache[key] = value
     try:
         with open(cache_path, 'a') as stream:
             stream.write('%s=%s\n' % (key, value))
     except Exception as ex:
         logger.warn("Failed to write to cache %s: %s=%s: %s", cache_path,
                     key, value, ex)
Exemple #27
0
	def download_icon(self, interface, force = False):
		"""Download an icon for this interface and add it to the
		icon cache. If the interface has no icon do nothing.
		@return: the task doing the import, or None
		@rtype: L{tasks.Task}"""
		logger.debug("download_icon %(interface)s", {'interface': interface})

		modification_time = None
		existing_icon = self.config.iface_cache.get_icon_path(interface)
		if existing_icon:
			file_mtime = os.stat(existing_icon).st_mtime
			from email.utils import formatdate
			modification_time = formatdate(timeval = file_mtime, localtime = False, usegmt = True)

		feed = self.config.iface_cache.get_feed(interface.uri)
		if feed is None:
			return None

		# Find a suitable icon to download
		for icon in feed.get_metadata(XMLNS_IFACE, 'icon'):
			type = icon.getAttribute('type')
			if type != 'image/png':
				logger.debug(_('Skipping non-PNG icon'))
				continue
			source = icon.getAttribute('href')
			if source:
				break
			logger.warn(_('Missing "href" attribute on <icon> in %s'), interface)
		else:
			logger.info(_('No PNG icons found in %s'), interface)
			return

		dl = self.download_url(source, hint = interface, modification_time = modification_time)

		@tasks.async
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()

		return download_and_add_icon()
Exemple #28
0
    def get_last_checked(self):
        """Get the time of the last successful check for updates.
		@return: the timestamp (or None on error)
		@rtype: float | None"""
        last_updated_path = os.path.join(self.path, "last-checked")
        try:
            return os.stat(last_updated_path).st_mtime
        except Exception as ex:
            logger.warn("Failed to get time-stamp of %s: %s", last_updated_path, ex)
            return None
Exemple #29
0
	def download_keys(self, fetcher, feed_hint = None, key_mirror = None):
		"""Download any required GPG keys not already on our keyring.
		When all downloads are done (successful or otherwise), add any new keys
		to the keyring, L{recheck}.
		@param fetcher: fetcher to manage the download (was Handler before version 1.5)
		@type fetcher: L{fetch.Fetcher}
		@param key_mirror: URL of directory containing keys, or None to use feed's directory
		@type key_mirror: str
		"""
		downloads = {}
		blockers = []
		for x in self.sigs:
			key_id = x.need_key()
			if key_id:
				try:
					import urlparse
				except ImportError:
					from urllib import parse as urlparse	# Python 3
				key_url = urlparse.urljoin(key_mirror or self.url, '%s.gpg' % key_id)
				logger.info(_("Fetching key from %s"), key_url)
				dl = fetcher.download_url(key_url, hint = feed_hint)
				downloads[dl.downloaded] = (dl, dl.tempfile)
				blockers.append(dl.downloaded)

		exception = None
		any_success = False

		from zeroinstall.support import tasks

		while blockers:
			yield blockers

			old_blockers = blockers
			blockers = []

			for b in old_blockers:
				dl, stream = downloads[b]
				try:
					tasks.check(b)
					if b.happened:
						stream.seek(0)
						self._downloaded_key(stream)
						any_success = True
						stream.close()
					else:
						blockers.append(b)
				except Exception:
					_type, exception, tb = sys.exc_info()
					logger.warn(_("Failed to import key for '%(url)s': %(exception)s"), {'url': self.url, 'exception': str(exception)})
					stream.close()

		if exception and not any_success:
			raise_with_traceback(exception, tb)

		self.recheck()
Exemple #30
0
    def get_last_checked(self):
        """Get the time of the last successful check for updates.
		@return: the timestamp (or None on error)
		@rtype: float | None"""
        last_updated_path = os.path.join(self.path, 'last-checked')
        try:
            return os.stat(last_updated_path).st_mtime
        except Exception as ex:
            logger.warn("Failed to get time-stamp of %s: %s",
                        last_updated_path, ex)
            return None
Exemple #31
0
	def download(self, dl):
		# (changed if we get redirected)
		current_url = dl.url

		redirections_remaining = 10

		original_exception = None

		# Assign the Download to a Site based on its scheme, host and port. If the result is a redirect,
		# reassign it to the appropriate new site. Note that proxy handling happens later; we want to group
		# and limit by the target site, not treat everything as going to a single site (the proxy).
		while True:
			location_parts = urlparse.urlparse(current_url)

			site_key = (location_parts.scheme,
				    location_parts.hostname,
				    location_parts.port or default_port.get(location_parts.scheme, None))

			step = DownloadStep()
			step.dl = dl
			step.url = current_url
			blocker = self._sites[site_key].download(step)
			yield blocker

			try:
				tasks.check(blocker)
			except download.DownloadError as ex:
				if original_exception is None:
					original_exception = ex
				else:
					logger.warn("%s (while trying mirror)", ex)
				mirror_url = step.dl.get_next_mirror_url()
				if mirror_url is None:
					raise original_exception

				# Try the mirror.
				# There are actually two places where we try to use the mirror: this one
				# looks to see if we have an exact copy of same file somewhere else. If this
				# fails, Fetcher will also look for a different archive that would generate
				# the required implementation.
				logger.warn("%s: trying archive mirror at %s", ex, mirror_url)
				step.redirect = mirror_url
				redirections_remaining = 10

			if not step.redirect:
				break

			current_url = step.redirect

			if redirections_remaining == 0:
				raise download.DownloadError("Too many redirections {url} -> {current}".format(
						url = dl.url,
						current = current_url))
			redirections_remaining -= 1
Exemple #32
0
    def download(self, dl):
        # (changed if we get redirected)
        current_url = dl.url

        redirections_remaining = 10

        original_exception = None

        # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect,
        # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group
        # and limit by the target site, not treat everything as going to a single site (the proxy).
        while True:
            location_parts = urlparse.urlparse(current_url)

            site_key = (location_parts.scheme, location_parts.hostname,
                        location_parts.port
                        or default_port.get(location_parts.scheme, None))

            step = DownloadStep()
            step.dl = dl
            step.url = current_url
            blocker = self._sites[site_key].download(step)
            yield blocker

            try:
                tasks.check(blocker)
            except download.DownloadError as ex:
                if original_exception is None:
                    original_exception = ex
                else:
                    logger.warn("%s (while trying mirror)", ex)
                mirror_url = step.dl.get_next_mirror_url()
                if mirror_url is None:
                    raise original_exception

                # Try the mirror.
                # There are actually two places where we try to use the mirror: this one
                # looks to see if we have an exact copy of same file somewhere else. If this
                # fails, Fetcher will also look for a different archive that would generate
                # the required implementation.
                logger.warn("%s: trying archive mirror at %s", ex, mirror_url)
                step.redirect = mirror_url
                redirections_remaining = 10

            if not step.redirect:
                break

            current_url = step.redirect

            if redirections_remaining == 0:
                raise download.DownloadError(
                    "Too many redirections {url} -> {current}".format(
                        url=dl.url, current=current_url))
            redirections_remaining -= 1
Exemple #33
0
def recent_gnu_tar():
    """@deprecated: should be private"""
    recent_gnu_tar = False
    if _gnu_tar():
        version = re.search(r'\)\s*(\d+(\.\d+)*)', _get_tar_version())
        if version:
            version = list(map(int, version.group(1).split('.')))
            recent_gnu_tar = version > [1, 13, 92]
        else:
            logger.warn(_("Failed to extract GNU tar version number"))
    logger.debug(_("Recent GNU tar = %s"), recent_gnu_tar)
    return recent_gnu_tar
Exemple #34
0
    def get_network_state(self):
        if self.network_manager:
            try:
                state = self.network_manager.state()
                if state < 10:
                    state = _NetworkState.v0_8.get(
                        state, _NetworkState.NM_STATE_UNKNOWN)
                return state

            except Exception as ex:
                logger.warn(_("Error getting network state: %s"), ex)
        return _NetworkState.NM_STATE_UNKNOWN
Exemple #35
0
def _add_site_packages(interface, site_packages, known_site_feeds):
	for impl in os.listdir(site_packages):
		if impl.startswith('.'): continue
		feed = os.path.join(site_packages, impl, '0install', 'feed.xml')
		if not os.path.exists(feed):
			logger.warn(_("Site-local feed {path} not found").format(path = feed))
		logger.debug("Adding site-local feed '%s'", feed)

		# (we treat these as user overrides in order to let old versions of 0install
		# find them)
		interface.extra_feeds.append(Feed(feed, None, user_override = True, site_package = True))
		known_site_feeds.add(feed)
Exemple #36
0
def recent_gnu_tar():
	"""@deprecated: should be private"""
	recent_gnu_tar = False
	if _gnu_tar():
		version = re.search(r'\)\s*(\d+(\.\d+)*)', _get_tar_version())
		if version:
			version = list(map(int, version.group(1).split('.')))
			recent_gnu_tar = version > [1, 13, 92]
		else:
			logger.warn(_("Failed to extract GNU tar version number"))
	logger.debug(_("Recent GNU tar = %s"), recent_gnu_tar)
	return recent_gnu_tar
Exemple #37
0
	def get_network_state(self):
		if self.network_manager:
			try:
				state = self.network_manager.state()
				if state < 10:
					state = _NetworkState.v0_8.get(state,
								_NetworkState.NM_STATE_UNKNOWN)
				return state

			except Exception as ex:
				logger.warn(_("Error getting network state: %s"), ex)
		return _NetworkState.NM_STATE_UNKNOWN
Exemple #38
0
def load_keys(fingerprints):
	"""Load a set of keys at once.
	This is much more efficient than making individual calls to L{load_key}.
	@return: a list of loaded keys, indexed by fingerprint
	@rtype: {str: L{Key}}
	@since: 0.27"""
	import codecs

	keys = {}

	# Otherwise GnuPG returns everything...
	if not fingerprints: return keys

	for fp in fingerprints:
		keys[fp] = Key(fp)

	current_fpr = None
	current_uid = None

	child = _run_gpg(['--fixed-list-mode', '--with-colons', '--list-keys',
				'--with-fingerprint', '--with-fingerprint'] + fingerprints, stdout = subprocess.PIPE)
	try:
		for line in child.stdout:
			if line.startswith('pub:'):
				current_fpr = None
				current_uid = None
			if line.startswith('fpr:'):
				current_fpr = line.split(':')[9]
				if current_fpr in keys and current_uid:
					# This is probably a subordinate key, where the fingerprint
					# comes after the uid, not before. Note: we assume the subkey is
					# cross-certified, as recent always ones are.
					try:
						keys[current_fpr].name = codecs.decode(current_uid, 'utf-8')
					except:
						logger.warn("Not UTF-8: %s", current_uid)
						keys[current_fpr].name = current_uid
			if line.startswith('uid:'):
				assert current_fpr is not None
				# Only take primary UID
				if current_uid: continue
				parts = line.split(':')
				current_uid = parts[9]
				if current_fpr in keys:
					keys[current_fpr].name = current_uid
	finally:
		child.stdout.close()

		if child.wait():
			logger.warn(_("gpg --list-keys failed with exit code %d") % child.returncode)

	return keys
Exemple #39
0
def update_user_overrides(interface, known_site_feeds=frozenset()):
    """Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param known_site_feeds: feeds to ignore (for backwards compatibility)
	"""
    user = basedir.load_first_config(config_site, config_prog, 'interfaces',
                                     model._pretty_escape(interface.uri))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site,
                                         config_prog, 'user_overrides',
                                         escape(interface.uri))
    if not user:
        return

    try:
        with open(user, 'rb') as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {
            'user': user,
            'exception': ex
        })
        raise

    stability_policy = root.getAttribute('stability-policy')
    if stability_policy:
        interface.set_stability_policy(stability_levels[str(stability_policy)])

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE: continue
        if item.name == 'feed':
            feed_src = item.getAttribute('src')
            if not feed_src:
                raise InvalidInterface(_('Missing "src" attribute in <feed>'))
            # (note: 0install 1.9..1.12 used a different scheme and the "site-package" attribute;
            # we deliberately use a different attribute name to avoid confusion)
            if item.getAttribute('is-site-package'):
                # Site packages are detected earlier. This test isn't completely reliable,
                # since older versions will remove the attribute when saving the config
                # (hence the next test).
                continue
            if feed_src in known_site_feeds:
                continue
            interface.extra_feeds.append(
                Feed(feed_src,
                     item.getAttribute('arch'),
                     True,
                     langs=item.getAttribute('langs')))
Exemple #40
0
def _add_site_packages(interface, site_packages, known_site_feeds):
    for impl in os.listdir(site_packages):
        if impl.startswith('.'): continue
        feed = os.path.join(site_packages, impl, '0install', 'feed.xml')
        if not os.path.exists(feed):
            logger.warn(
                _("Site-local feed {path} not found").format(path=feed))
        logger.debug("Adding site-local feed '%s'", feed)

        # (we treat these as user overrides in order to let old versions of 0install
        # find them)
        interface.extra_feeds.append(
            Feed(feed, None, user_override=True, site_package=True))
        known_site_feeds.add(feed)
Exemple #41
0
def _get_sigs_from_gpg_status_stream(status_r, child, errors):
    """Read messages from status_r and collect signatures from it.
	When done, reap 'child'.
	If there are no signatures, throw SafeException (using errors
	for the error message if non-empty)."""
    sigs = []

    # Should we error out on bad signatures, even if there's a good
    # signature too?

    for line in status_r:
        assert line.endswith('\n')
        if not line.startswith('[GNUPG:] '):
            # The docs says every line starts with this, but if auto-key-retrieve
            # is on then they might not. See bug #3420548
            logger.warn("Invalid output from GnuPG: %r", line)
            continue

        line = line[9:-1]
        split_line = line.split(' ')
        code = split_line[0]
        args = split_line[1:]
        if code == 'VALIDSIG':
            sigs.append(ValidSig(args))
        elif code == 'BADSIG':
            sigs.append(BadSig(args))
        elif code == 'ERRSIG':
            sigs.append(ErrSig(args))

    errors.seek(0)

    error_messages = errors.read().strip()

    if not sigs:
        if error_messages:
            raise SafeException(
                _("No signatures found. Errors from GPG:\n%s") %
                error_messages)
        else:
            raise SafeException(
                _("No signatures found. No error messages from GPG."))
    elif error_messages:
        # Attach the warnings to all the signatures, in case they're useful.
        for s in sigs:
            s.messages = error_messages

    return sigs
Exemple #42
0
def update_user_feed_overrides(feed):
    """Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@since 0.49
	"""
    user = basedir.load_first_config(config_site, config_prog, 'feeds',
                                     model._pretty_escape(feed.url))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site, config_prog,
                                         'user_overrides', escape(feed.url))
    if not user:
        return

    try:
        with open(user, 'rb') as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {
            'user': user,
            'exception': ex
        })
        raise

    last_checked = root.getAttribute('last-checked')
    if last_checked:
        feed.last_checked = int(last_checked)

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE: continue
        if item.name == 'implementation':
            id = item.getAttribute('id')
            assert id is not None
            impl = feed.implementations.get(id, None)
            if not impl:
                logger.debug(
                    _("Ignoring user-override for unknown implementation %(id)s in %(interface)s"
                      ), {
                          'id': id,
                          'interface': feed
                      })
                continue

            user_stability = item.getAttribute('user-stability')
            if user_stability:
                impl.user_stability = stability_levels[str(user_stability)]
Exemple #43
0
def _link(a, b, tmpfile):
	"""Keep 'a', delete 'b' and hard-link to 'a'"""
	if not _byte_identical(a, b):
		logger.warn(_("Files should be identical, but they're not!\n%(file_a)s\n%(file_b)s"), {'file_a': a, 'file_b': b})

	b_dir = os.path.dirname(b)
	old_mode = os.lstat(b_dir).st_mode
	os.chmod(b_dir, old_mode | 0o200)	# Need write access briefly
	try:
		os.link(a, tmpfile)
		try:
			os.rename(tmpfile, b)
		except:
			os.unlink(tmpfile)
			raise
	finally:
		os.chmod(b_dir, old_mode)
Exemple #44
0
	def flush(self):
		# Wipe the cache
		try:
			info = os.stat(self.source)
			mtime = int(info.st_mtime)
			size = info.st_size
		except Exception as ex:
			logger.warn("Failed to stat %s: %s", self.source, ex)
			mtime = size = 0
		self.cache = {}
		import tempfile
		tmp = tempfile.NamedTemporaryFile(mode = 'wt', dir = self.cache_dir, delete = False)
		tmp.write("mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format))
		tmp.close()
		portable_rename(tmp.name, os.path.join(self.cache_dir, self.cache_leaf))

		self._load_cache()
Exemple #45
0
	def __init__(self, db_status_file):
		"""@param db_status_file: update the cache when the timestamp of this file changes"""
		self._status_details = os.stat(db_status_file)

		self.versions = {}
		self.cache_dir = basedir.save_cache_path(namespaces.config_site,
							 namespaces.config_prog)

		try:
			self._load_cache()
		except Exception as ex:
			logger.info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
			try:
				self.generate_cache()
				self._load_cache()
			except Exception as ex:
				logger.warn(_("Failed to regenerate distribution database cache: %s"), ex)
Exemple #46
0
    def get_feeds(self, iface):
        """Get all feeds for this interface. This is a mapping from feed URLs
		to ZeroInstallFeeds. It includes the interface's main feed, plus the
		resolution of every feed returned by L{get_feed_imports}. Uncached
		feeds are indicated by a value of None.
		@rtype: {str: L{ZeroInstallFeed} | None}
		@since: 0.48"""
        main_feed = self.get_feed(iface.uri)
        results = {iface.uri: main_feed}
        for imp in iface.extra_feeds:
            try:
                results[imp.uri] = self.get_feed(imp.uri)
            except SafeException as ex:
                logger.warn("Failed to load feed '%s: %s", imp.uri, ex)
        if main_feed:
            for imp in main_feed.feeds:
                results[imp.uri] = self.get_feed(imp.uri)
        return results
Exemple #47
0
	def get_feeds(self, iface):
		"""Get all feeds for this interface. This is a mapping from feed URLs
		to ZeroInstallFeeds. It includes the interface's main feed, plus the
		resolution of every feed returned by L{get_feed_imports}. Uncached
		feeds are indicated by a value of None.
		@rtype: {str: L{ZeroInstallFeed} | None}
		@since: 0.48"""
		main_feed = self.get_feed(iface.uri)
		results = {iface.uri: main_feed}
		for imp in iface.extra_feeds:
			try:
				results[imp.uri] = self.get_feed(imp.uri)
			except SafeException as ex:
				logger.warn("Failed to load feed '%s: %s", imp.uri, ex)
		if main_feed:
			for imp in main_feed.feeds:
				results[imp.uri] = self.get_feed(imp.uri)
		return results
Exemple #48
0
def import_key(stream):
	"""Run C{gpg --import} with this stream as stdin."""
	with tempfile.TemporaryFile(mode = 'w+t') as errors:
		child = _run_gpg(['--quiet', '--import', '--batch'],
					stdin = stream, stderr = errors)

		status = child.wait()

		errors.seek(0)
		error_messages = errors.read().strip()

	if status != 0:
		if error_messages:
			raise SafeException(_("Errors from 'gpg --import':\n%s") % error_messages)
		else:
			raise SafeException(_("Non-zero exit code %d from 'gpg --import'") % status)
	elif error_messages:
		logger.warn(_("Warnings from 'gpg --import':\n%s") % error_messages)
Exemple #49
0
def update_user_overrides(interface, known_site_feeds=frozenset()):
    """Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param known_site_feeds: feeds to ignore (for backwards compatibility)
	"""
    user = basedir.load_first_config(config_site, config_prog, "interfaces", model._pretty_escape(interface.uri))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site, config_prog, "user_overrides", escape(interface.uri))
    if not user:
        return

    try:
        with open(user, "rb") as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {"user": user, "exception": ex})
        raise

    stability_policy = root.getAttribute("stability-policy")
    if stability_policy:
        interface.set_stability_policy(stability_levels[str(stability_policy)])

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE:
            continue
        if item.name == "feed":
            feed_src = item.getAttribute("src")
            if not feed_src:
                raise InvalidInterface(_('Missing "src" attribute in <feed>'))
                # (note: 0install 1.9..1.12 used a different scheme and the "site-package" attribute;
                # we deliberately use a different attribute name to avoid confusion)
            if item.getAttribute("is-site-package"):
                # Site packages are detected earlier. This test isn't completely reliable,
                # since older versions will remove the attribute when saving the config
                # (hence the next test).
                continue
            if feed_src in known_site_feeds:
                continue
            interface.extra_feeds.append(
                Feed(feed_src, item.getAttribute("arch"), True, langs=item.getAttribute("langs"))
            )
Exemple #50
0
def _get_sigs_from_gpg_status_stream(status_r, child, errors):
	"""Read messages from status_r and collect signatures from it.
	When done, reap 'child'.
	If there are no signatures, throw SafeException (using errors
	for the error message if non-empty)."""
	sigs = []

	# Should we error out on bad signatures, even if there's a good
	# signature too?

	for line in status_r:
		assert line.endswith('\n')
		if not line.startswith('[GNUPG:] '):
			# The docs says every line starts with this, but if auto-key-retrieve
			# is on then they might not. See bug #3420548
			logger.warn("Invalid output from GnuPG: %r", line)
			continue

		line = line[9:-1]
		split_line = line.split(' ')
		code = split_line[0]
		args = split_line[1:]
		if code == 'VALIDSIG':
			sigs.append(ValidSig(args))
		elif code == 'BADSIG':
			sigs.append(BadSig(args))
		elif code == 'ERRSIG':
			sigs.append(ErrSig(args))

	errors.seek(0)

	error_messages = errors.read().strip()

	if not sigs:
		if error_messages:
			raise SafeException(_("No signatures found. Errors from GPG:\n%s") % error_messages)
		else:
			raise SafeException(_("No signatures found. No error messages from GPG."))
	elif error_messages:
		# Attach the warnings to all the signatures, in case they're useful.
		for s in sigs:
			s.messages = error_messages

	return sigs
Exemple #51
0
    def get_package_info(self, package, factory):
        # Add installed versions...
        _version_start_reqexp = '-[0-9]'

        if package.count('/') != 1: return

        category, leafname = package.split('/')
        category_dir = os.path.join(self._pkgdir, category)
        match_prefix = leafname + '-'

        if not os.path.isdir(category_dir): return

        for filename in os.listdir(category_dir):
            if filename.startswith(match_prefix) and filename[len(
                    match_prefix)].isdigit():
                with open(os.path.join(category_dir, filename, 'PF'),
                          'rt') as stream:
                    name = stream.readline().strip()

                match = re.search(_version_start_reqexp, name)
                if match is None:
                    logger.warn(
                        _('Cannot parse version from Gentoo package named "%(name)s"'
                          ), {'name': name})
                    continue
                else:
                    version = try_cleanup_distro_version(name[match.start() +
                                                              1:])

                if category == 'app-emulation' and name.startswith('emul-'):
                    __, __, machine, __ = name.split('-', 3)
                else:
                    with open(os.path.join(category_dir, filename, 'CHOST'),
                              'rt') as stream:
                        machine, __ = stream.readline().split('-', 1)
                machine = arch.canonicalize_machine(machine)

                impl = factory('package:gentoo:%s:%s:%s' % \
                  (package, version, machine))
                impl.version = model.parse_version(version)
                impl.machine = machine

        # Add any uninstalled candidates found by PackageKit
        self.packagekit.get_candidates(package, factory, 'package:gentoo')
Exemple #52
0
    def download_selections(self, sels):
        """Download any missing implementations in the given selections.
		If no downloads are needed, but we haven't checked for a while, start
		a background process to check for updates (but return None immediately).
		@return: a blocker which resolves when all needed implementations are available
		@rtype: L{tasks.Blocker} | None
		"""
        # Check the selections are still available
        blocker = sels.download_missing(self.config)  # TODO: package impls

        if blocker:
            return blocker
        else:
            # Nothing to download, but is it time for a background update?
            timestamp_path = os.path.join(self.path, 'last-checked')
            try:
                utime = os.stat(timestamp_path).st_mtime
                staleness = time.time() - utime
                logger.info("Staleness of app %s is %d hours", self,
                            staleness / (60 * 60))
                freshness_threshold = self.config.freshness
                need_update = freshness_threshold > 0 and staleness >= freshness_threshold

                if need_update:
                    last_check_attempt_path = os.path.join(
                        self.path, 'last-check-attempt')
                    if os.path.exists(last_check_attempt_path):
                        last_check_attempt = os.stat(
                            last_check_attempt_path).st_mtime
                        if last_check_attempt + 60 * 60 > time.time():
                            logger.info(
                                "Tried to check within last hour; not trying again now"
                            )
                            need_update = False
            except Exception as ex:
                logger.warn("Failed to get time-stamp of %s: %s",
                            timestamp_path, ex)
                need_update = True

            if need_update:
                self.set_last_check_attempt()
                from zeroinstall.injector import background
                r = self.get_requirements()
                background.spawn_background_update2(r, False, self)
Exemple #53
0
	def generate_cache(self):
		cache = []

		child = subprocess.Popen(["rpm", "-qa", "--qf=%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n"],
					stdout = subprocess.PIPE, universal_newlines = True)
		for line in child.stdout:
			package, version, rpmarch = line.split('\t', 2)
			if package == 'gpg-pubkey':
				continue
			zi_arch = canonical_machine(rpmarch.strip())
			clean_version = try_cleanup_distro_version(version)
			if clean_version:
				cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
			else:
				logger.warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})

		self._write_cache(cache)
		child.stdout.close()
		child.wait()
Exemple #54
0
	def get_package_info(self, package, factory):
		# Add installed versions...
		for entry in os.listdir(self._packages_dir):
			name, version, arch, build = entry.rsplit('-', 3)
			if name == package:
				zi_arch = canonical_machine(arch)
				clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
				if not clean_version:
					logger.warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
					continue
	
				impl = factory('package:slack:%s:%s:%s' % \
						(package, clean_version, zi_arch))
				impl.version = model.parse_version(clean_version)
				if zi_arch != '*':
					impl.machine = zi_arch

		# Add any uninstalled candidates found by PackageKit
		self.packagekit.get_candidates(package, factory, 'package:slack')
Exemple #55
0
	def generate_cache(self):
		cache = []

		zi_arch = canonical_machine(arch)
		for line in os.popen("cygcheck -c -d"):
			if line == "Cygwin Package Information\r\n":
				continue
			if line == "\n":
				continue
			package, version = line.split()
			if package == "Package" and version == "Version":
				continue
			clean_version = try_cleanup_distro_version(version)
			if clean_version:
				cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
			else:
				logger.warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})

		self._write_cache(cache)
Exemple #56
0
    def generate_cache(self):
        cache = []

        child = subprocess.Popen(["port", "-v", "installed"],
                                 stdout=subprocess.PIPE,
                                 universal_newlines=True)
        for line in child.stdout:
            if not line.startswith(" "):
                continue
            if line.strip().count(" ") > 1:
                package, version, extra = line.split(None, 2)
            else:
                package, version = line.split()
                extra = ""
            if not extra.startswith("(active)"):
                continue
            version = version.lstrip('@')
            version = re.sub(r"\+.*", "", version)  # strip variants
            zi_arch = '*'
            clean_version = try_cleanup_distro_version(version)
            if clean_version:
                match = re.match(
                    r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
                if match:
                    platform, major, archs = match.groups()
                    for arch in archs.split():
                        zi_arch = canonical_machine(arch)
                        cache.append('%s\t%s\t%s' %
                                     (package, clean_version, zi_arch))
                else:
                    cache.append('%s\t%s\t%s' %
                                 (package, clean_version, zi_arch))
            else:
                logger.warn(
                    _("Can't parse distribution version '%(version)s' for package '%(package)s'"
                      ), {
                          'version': version,
                          'package': package
                      })
        self._write_cache(cache)
        child.stdout.close()
        child.wait()
Exemple #57
0
    def fetch_candidates(self, master_feed):
        package_names = [
            item.getAttribute("package") for item, item_attrs, depends in
            master_feed.get_package_impls(self)
        ]

        if self.packagekit.available:
            return self.packagekit.fetch_candidates(package_names)

        # No PackageKit. Use apt-cache directly.
        for package in package_names:
            # Check to see whether we could get a newer version using apt-get
            try:
                null = os.open(os.devnull, os.O_WRONLY)
                child = subprocess.Popen(
                    ['apt-cache', 'show', '--no-all-versions', '--', package],
                    stdout=subprocess.PIPE,
                    stderr=null,
                    universal_newlines=True)
                os.close(null)

                arch = version = size = None
                for line in child.stdout:
                    line = line.strip()
                    if line.startswith('Version: '):
                        version = line[9:]
                        version = try_cleanup_distro_version(version)
                    elif line.startswith('Architecture: '):
                        arch = canonical_machine(line[14:].strip())
                    elif line.startswith('Size: '):
                        size = int(line[6:].strip())
                if version and arch:
                    cached = {'version': version, 'arch': arch, 'size': size}
                else:
                    cached = None
                child.stdout.close()
                child.wait()
            except Exception as ex:
                logger.warn("'apt-cache show %s' failed: %s", package, ex)
                cached = None
            # (multi-arch support? can there be multiple candidates?)
            self.apt_cache[package] = cached
Exemple #58
0
def update_from_cache(interface, iface_cache=None):
    """Read a cached interface and any native feeds or user overrides.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@return: True if cached version and user overrides loaded OK.
	False if upstream not cached. Local interfaces (starting with /) are
	always considered to be cached, although they are not actually stored in the cache.
	Internal: use L{iface_cache.IfaceCache.get_interface} instread.
	@rtype: bool"""
    interface.reset()
    if iface_cache is None:
        import warnings

        warnings.warn("iface_cache should be specified", DeprecationWarning, 2)
        from zeroinstall.injector import policy

        iface_cache = policy.get_deprecated_singleton_config().iface_cache

        # Add the distribution package manager's version, if any
    path = basedir.load_first_data(config_site, "native_feeds", model._pretty_escape(interface.uri))
    if path:
        # Resolve any symlinks
        logger.info(_("Adding native packager feed '%s'"), path)
        interface.extra_feeds.append(Feed(os.path.realpath(path), None, False))

        # Add locally-compiled binaries, if any
    escaped_uri = model.escape_interface_uri(interface.uri)
    known_site_feeds = set()
    for path in basedir.load_data_paths(config_site, "site-packages", *escaped_uri):
        try:
            _add_site_packages(interface, path, known_site_feeds)
        except Exception as ex:
            logger.warn("Error loading site packages from {path}: {ex}".format(path=path, ex=ex))

    update_user_overrides(interface, known_site_feeds)

    main_feed = iface_cache.get_feed(interface.uri, force=True)
    if main_feed:
        update_user_feed_overrides(main_feed)

    return main_feed is not None