Beispiel #1
0
    def _populate_model(self):
        # Find cached implementations

        unowned = {}  # Impl ID -> Store
        duplicates = []  # TODO

        for s in self.iface_cache.stores.stores:
            if os.path.isdir(s.dir):
                for id in os.listdir(s.dir):
                    if id in unowned:
                        duplicates.append(id)
                    unowned[id] = s

        ok_interfaces = []
        error_interfaces = []

        # Look through cached interfaces for implementation owners
        all = self.iface_cache.list_all_interfaces()
        all.sort()
        for uri in all:
            iface_size = 0
            try:
                if uri.startswith('/'):
                    cached_iface = uri
                else:
                    cached_iface = basedir.load_first_cache(
                        namespaces.config_site, 'interfaces',
                        model.escape(uri))
                user_overrides = basedir.load_first_config(
                    namespaces.config_site, namespaces.config_prog,
                    'user_overrides', model.escape(uri))

                iface_size = size_if_exists(cached_iface) + size_if_exists(
                    user_overrides)
                iface = self.iface_cache.get_interface(uri)
            except Exception, ex:
                error_interfaces.append((uri, str(ex), iface_size))
            else:
                cached_iface = ValidInterface(iface, iface_size)
                for impl in iface.implementations.values():
                    if impl.id.startswith('/') or impl.id.startswith('.'):
                        cached_iface.in_cache.append(LocalImplementation(impl))
                    if impl.id in unowned:
                        cached_dir = unowned[impl.id].dir
                        impl_path = os.path.join(cached_dir, impl.id)
                        impl_size = get_size(impl_path)
                        cached_iface.in_cache.append(
                            KnownImplementation(cached_iface, cached_dir, impl,
                                                impl_size))
                        del unowned[impl.id]
                cached_iface.in_cache.sort()
                ok_interfaces.append(cached_iface)
Beispiel #2
0
	def testEscape(self):
		self.assertEqual("", model.escape(""))
		self.assertEqual("hello", model.escape("hello"))
		self.assertEqual("%20", model.escape(" "))

		self.assertEqual("file%3a%2f%2ffoo%7ebar",
				model.escape("file://foo~bar"))
		self.assertEqual("file%3a%2f%2ffoo%25bar",
				model.escape("file://foo%bar"))

		self.assertEqual("file:##foo%7ebar",
				model._pretty_escape("file://foo~bar"))
		self.assertEqual("file:##foo%25bar",
				model._pretty_escape("file://foo%bar"))
Beispiel #3
0
    def testEscape(self):
        self.assertEqual("", model.escape(""))
        self.assertEqual("hello", model.escape("hello"))
        self.assertEqual("%20", model.escape(" "))

        self.assertEqual("file%3a%2f%2ffoo%7ebar",
                         model.escape("file://foo~bar"))
        self.assertEqual("file%3a%2f%2ffoo%25bar",
                         model.escape("file://foo%bar"))

        self.assertEqual("file:##foo%7ebar",
                         model._pretty_escape("file://foo~bar"))
        self.assertEqual("file:##foo%25bar",
                         model._pretty_escape("file://foo%bar"))
Beispiel #4
0
 def delete(self):
     if not self.uri.startswith('/'):
         cached_iface = basedir.load_first_cache(namespaces.config_site,
                                                 'interfaces',
                                                 model.escape(self.uri))
         if cached_iface:
             #print "Delete", cached_iface
             os.unlink(cached_iface)
     user_overrides = basedir.load_first_config(namespaces.config_site,
                                                namespaces.config_prog,
                                                'user_overrides',
                                                model.escape(self.uri))
     if user_overrides:
         #print "Delete", user_overrides
         os.unlink(user_overrides)
    def get_icon_path(self, iface):
        """Get the path of a cached icon for an interface.
		@param iface: interface whose icon we want
		@return: the path of the cached icon, or None if not cached.
		@rtype: str"""
        return basedir.load_first_cache(config_site, 'interface_icons',
                                        escape(iface.uri))
Beispiel #6
0
	def cache_iface(self, name, data):
		cached_ifaces = basedir.save_cache_path('0install.net',
							'interfaces')

		f = open(os.path.join(cached_ifaces, model.escape(name)), 'w')
		f.write(data)
		f.close()
Beispiel #7
0
	def cache_iface(self, name, data):
		cached_ifaces = basedir.save_cache_path('0install.net',
							'interfaces')

		f = open(os.path.join(cached_ifaces, model.escape(name)), 'w')
		f.write(data)
		f.close()
Beispiel #8
0
def update_user_overrides(interface):
	"""Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	"""
	user = basedir.load_first_config(config_site, config_prog,
					   'interfaces', model._pretty_escape(interface.uri))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(interface.uri))
	if not user:
		return

	try:
		root = qdom.parse(file(user))
	except Exception as ex:
		warn(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise

	stability_policy = root.getAttribute('stability-policy')
	if stability_policy:
		interface.set_stability_policy(stability_levels[str(stability_policy)])

	for item in root.childNodes:
		if item.uri != XMLNS_IFACE: continue
		if item.name == 'feed':
			feed_src = item.getAttribute('src')
			if not feed_src:
				raise InvalidInterface(_('Missing "src" attribute in <feed>'))
			interface.extra_feeds.append(Feed(feed_src, item.getAttribute('arch'), True, langs = item.getAttribute('langs')))
Beispiel #9
0
    def _import_new_feed(self, feed_url, new_xml, modified_time):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, 'rb') as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        # Do we need to write this temporary file now?
        try:
            with open(cached + '.new', 'wb') as stream:
                stream.write(new_xml)
            os.utime(cached + '.new', (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + '.new')
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _("New feed's modification time is "
                          "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                          "Refusing update.") % {
                              'iface': feed_url,
                              'old_time': _pretty_time(old_modified),
                              'new_time': _pretty_time(new_mtime)
                          })
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    #raise SafeException("Interface has changed, but modification time "
                    #		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + '.new')
            raise

        portable_rename(cached + '.new', cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
Beispiel #10
0
	def testTimes(self):
		iface_cache = self.config.iface_cache
		with tempfile.TemporaryFile() as stream:
			stream.write(data.thomas_key)
			stream.seek(0)
			gpg.import_key(stream)

		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, model.escape('http://foo'))

		with open(cached, 'wb') as stream:
			stream.write(data.foo_signed_xml)

		signed = iface_cache._get_signature_date('http://foo')
		assert signed == None

		trust.trust_db.trust_key(
			'92429807C9853C0744A68B9AAE07828059A53CC1')

		signed = iface_cache._get_signature_date('http://foo')
		self.assertEqual(1154850229, signed)

		with open(cached, 'w+b') as stream:
			stream.seek(0)
			stream.write(b'Hello')

		# When the signature is invalid, we just return None.
		# This is because versions < 0.22 used to corrupt the signatue
		# by adding an attribute to the XML
		signed = iface_cache._get_signature_date('http://foo')
		assert signed == None
Beispiel #11
0
            def get_inputs():
                for sel in sels.selections.values():
                    logger.info("Checking %s", sel.feed)

                    if sel.feed.startswith('distribution:'):
                        # If the package has changed version, we'll detect that below
                        # with get_unavailable_selections.
                        pass
                    elif os.path.isabs(sel.feed):
                        # Local feed
                        yield sel.feed
                    else:
                        # Cached feed
                        cached = basedir.load_first_cache(
                            namespaces.config_site, 'interfaces',
                            model.escape(sel.feed))
                        if cached:
                            yield cached
                        else:
                            raise IOError("Input %s missing; update" %
                                          sel.feed)

                    # Per-feed configuration
                    yield basedir.load_first_config(
                        namespaces.config_site, namespaces.config_prog,
                        'interfaces', model._pretty_escape(sel.interface))

                # Global configuration
                yield basedir.load_first_config(namespaces.config_site,
                                                namespaces.config_prog,
                                                'global')
Beispiel #12
0
	def get_icon_path(self, iface):
		"""Get the path of a cached icon for an interface.
		@param iface: interface whose icon we want
		@return: the path of the cached icon, or None if not cached.
		@rtype: str"""
		return basedir.load_first_cache(config_site, 'interface_icons',
						 escape(iface.uri))
Beispiel #13
0
			def get_inputs():
				for sel in sels.selections.values():
					logger.info("Checking %s", sel.feed)

					if sel.feed.startswith('distribution:'):
						# If the package has changed version, we'll detect that below
						# with get_unavailable_selections.
						pass
					elif os.path.isabs(sel.feed):
						# Local feed
						yield sel.feed
					else:
						# Cached feed
						cached = basedir.load_first_cache(namespaces.config_site, 'interfaces', model.escape(sel.feed))
						if cached:
							yield cached
						else:
							raise IOError("Input %s missing; update" % sel.feed)

					# Per-feed configuration
					yield basedir.load_first_config(namespaces.config_site, namespaces.config_prog,
									   'interfaces', model._pretty_escape(sel.interface))

				# Global configuration
				yield basedir.load_first_config(namespaces.config_site, namespaces.config_prog, 'global')
	def testXMLupdate(self):
		iface_cache = self.config.iface_cache
		trust.trust_db.trust_key(
			'92429807C9853C0744A68B9AAE07828059A53CC1')
		with tempfile.TemporaryFile() as stream:
			stream.write(data.thomas_key)
			stream.seek(0)
			gpg.import_key(stream)

		iface = iface_cache.get_interface('http://foo')
		with tempfile.TemporaryFile() as src:
			src.write(data.foo_signed_xml)
			src.seek(0)
			pending = PendingFeed(iface.uri, src)
			assert iface_cache.update_feed_if_trusted(iface.uri, pending.sigs, pending.new_xml)

		iface_cache.__init__()
		feed = iface_cache.get_feed('http://foo')
		assert feed.last_modified == 1154850229

		# mtimes are unreliable because copying often changes them -
		# check that we extract the time from the signature when upgrading
		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, model.escape(feed.url))
		os.utime(cached, None)

		iface_cache.__init__()
		feed = iface_cache.get_feed('http://foo')
		assert feed.last_modified > 1154850229

		with tempfile.TemporaryFile() as src:
			src.write(data.new_foo_signed_xml)
			src.seek(0)

			pending = PendingFeed(feed.url, src)

			old_stdout = sys.stdout
			sys.stdout = StringIO()
			try:
				assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml, dry_run = True)
			finally:
				sys.stdout = old_stdout

			assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml)

		# Can't 'update' to an older copy
		with tempfile.TemporaryFile() as src:
			src.write(data.foo_signed_xml)
			src.seek(0)
			try:
				pending = PendingFeed(feed.url, src)
				assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml)

				assert 0
			except model.SafeException:
				pass
Beispiel #15
0
	def _import_new_feed(self, feed_url, new_xml, modified_time):
		"""Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
		assert modified_time
		assert isinstance(new_xml, bytes), repr(new_xml)

		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, escape(feed_url))

		old_modified = None
		if os.path.exists(cached):
			with open(cached, 'rb') as stream:
				old_xml = stream.read()
			if old_xml == new_xml:
				logger.debug(_("No change"))
				# Update in-memory copy, in case someone else updated the disk copy
				self.get_feed(feed_url, force = True)
				return
			old_modified = int(os.stat(cached).st_mtime)

		# Do we need to write this temporary file now?
		try:
			with open(cached + '.new', 'wb') as stream:
				stream.write(new_xml)
			os.utime(cached + '.new', (modified_time, modified_time))
			new_mtime = reader.check_readable(feed_url, cached + '.new')
			assert new_mtime == modified_time

			old_modified = self._get_signature_date(feed_url) or old_modified

			if old_modified:
				if new_mtime < old_modified:
					raise ReplayAttack(_("New feed's modification time is "
						"before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
						"Refusing update.")
						% {'iface': feed_url, 'old_time': _pretty_time(old_modified), 'new_time': _pretty_time(new_mtime)})
				if new_mtime == old_modified:
					# You used to have to update the modification time manually.
					# Now it comes from the signature, this check isn't useful
					# and often causes problems when the stored format changes
					# (e.g., when we stopped writing last-modified attributes)
					pass
					#raise SafeException("Interface has changed, but modification time "
					#		    "hasn't! Refusing update.")
		except:
			os.unlink(cached + '.new')
			raise

		portable_rename(cached + '.new', cached)
		logger.debug(_("Saved as %s") % cached)

		self.get_feed(feed_url, force = True)
Beispiel #16
0
 def delete(self):
     if not os.path.isabs(self.uri):
         cached_iface = basedir.load_first_cache(namespaces.config_site, "interfaces", model.escape(self.uri))
         if cached_iface:
             # print "Delete", cached_iface
             os.unlink(cached_iface)
     user_overrides = basedir.load_first_config(
         namespaces.config_site, namespaces.config_prog, "user_overrides", model.escape(self.uri)
     )
     if user_overrides:
         # print "Delete", user_overrides
         os.unlink(user_overrides)
    def _import_new_interface(self, interface, new_xml, modified_time):
        """Write new_xml into the cache.
		@param interface: updated once the new XML is written
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
        assert modified_time

        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, escape(interface.uri))

        if os.path.exists(cached):
            old_xml = file(cached).read()
            if old_xml == new_xml:
                debug(_("No change"))
                reader.update_from_cache(interface)
                return

        stream = file(cached + '.new', 'w')
        stream.write(new_xml)
        stream.close()
        os.utime(cached + '.new', (modified_time, modified_time))
        new_mtime = reader.check_readable(interface.uri, cached + '.new')
        assert new_mtime == modified_time

        old_modified = self._get_signature_date(interface.uri)
        if old_modified is None:
            old_modified = interface.last_modified

        if old_modified:
            if new_mtime < old_modified:
                os.unlink(cached + '.new')
                raise ReplayAttack(
                    _("New interface's modification time is "
                      "before old version!\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                      "Refusing update.") % {
                          'old_time': _pretty_time(old_modified),
                          'new_time': _pretty_time(new_mtime)
                      })
            if new_mtime == old_modified:
                # You used to have to update the modification time manually.
                # Now it comes from the signature, this check isn't useful
                # and often causes problems when the stored format changes
                # (e.g., when we stopped writing last-modified attributes)
                pass
                #raise SafeException("Interface has changed, but modification time "
                #		    "hasn't! Refusing update.")
        os.rename(cached + '.new', cached)
        debug(_("Saved as %s") % cached)

        reader.update_from_cache(interface)
Beispiel #18
0
def update_user_overrides(interface, known_site_feeds=frozenset()):
    """Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param known_site_feeds: feeds to ignore (for backwards compatibility)
	"""
    user = basedir.load_first_config(config_site, config_prog, 'interfaces',
                                     model._pretty_escape(interface.uri))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site,
                                         config_prog, 'user_overrides',
                                         escape(interface.uri))
    if not user:
        return

    try:
        with open(user, 'rb') as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {
            'user': user,
            'exception': ex
        })
        raise

    stability_policy = root.getAttribute('stability-policy')
    if stability_policy:
        interface.set_stability_policy(stability_levels[str(stability_policy)])

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE: continue
        if item.name == 'feed':
            feed_src = item.getAttribute('src')
            if not feed_src:
                raise InvalidInterface(_('Missing "src" attribute in <feed>'))
            # (note: 0install 1.9..1.12 used a different scheme and the "site-package" attribute;
            # we deliberately use a different attribute name to avoid confusion)
            if item.getAttribute('is-site-package'):
                # Site packages are detected earlier. This test isn't completely reliable,
                # since older versions will remove the attribute when saving the config
                # (hence the next test).
                continue
            if feed_src in known_site_feeds:
                continue
            interface.extra_feeds.append(
                Feed(feed_src,
                     item.getAttribute('arch'),
                     True,
                     langs=item.getAttribute('langs')))
Beispiel #19
0
		def download_and_add_icon():
			stream = dl.tempfile
			yield dl.downloaded
			try:
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')
				icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w')
				shutil.copyfileobj(stream, icon_file)
			except Exception as ex:
				self.handler.report_error(ex)
    def testXMLupdate(self):
        iface_cache = self.config.iface_cache
        trust.trust_db.trust_key('92429807C9853C0744A68B9AAE07828059A53CC1')
        stream = tempfile.TemporaryFile()
        stream.write(data.thomas_key)
        stream.seek(0)
        gpg.import_key(stream)

        iface = iface_cache.get_interface('http://foo')
        src = tempfile.TemporaryFile()
        src.write(data.foo_signed_xml)
        src.seek(0)
        pending = PendingFeed(iface.uri, src)
        assert iface_cache.update_feed_if_trusted(iface.uri, pending.sigs,
                                                  pending.new_xml)

        iface_cache.__init__()
        feed = iface_cache.get_feed('http://foo')
        assert feed.last_modified == 1154850229

        # mtimes are unreliable because copying often changes them -
        # check that we extract the time from the signature when upgrading
        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, model.escape(feed.url))
        os.utime(cached, None)

        iface_cache.__init__()
        feed = iface_cache.get_feed('http://foo')
        assert feed.last_modified > 1154850229

        src = tempfile.TemporaryFile()
        src.write(data.new_foo_signed_xml)
        src.seek(0)

        pending = PendingFeed(feed.url, src)
        assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs,
                                                  pending.new_xml)

        # Can't 'update' to an older copy
        src = tempfile.TemporaryFile()
        src.write(data.foo_signed_xml)
        src.seek(0)
        try:
            pending = PendingFeed(feed.url, src)
            assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs,
                                                      pending.new_xml)

            assert 0
        except model.SafeException:
            pass
Beispiel #21
0
        def download_and_add_icon():
            stream = dl.tempfile
            yield dl.downloaded
            try:
                tasks.check(dl.downloaded)
                if dl.unmodified: return
                stream.seek(0)

                import shutil
                icons_cache = basedir.save_cache_path(config_site,
                                                      'interface_icons')
                icon_file = file(
                    os.path.join(icons_cache, escape(interface.uri)), 'w')
                shutil.copyfileobj(stream, icon_file)
            except Exception, ex:
                self.handler.report_error(ex)
Beispiel #22
0
def update_user_feed_overrides(feed):
    """Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@since 0.49
	"""
    user = basedir.load_first_config(config_site, config_prog, 'feeds',
                                     model._pretty_escape(feed.url))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site, config_prog,
                                         'user_overrides', escape(feed.url))
    if not user:
        return

    try:
        with open(user, 'rb') as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {
            'user': user,
            'exception': ex
        })
        raise

    last_checked = root.getAttribute('last-checked')
    if last_checked:
        feed.last_checked = int(last_checked)

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE: continue
        if item.name == 'implementation':
            id = item.getAttribute('id')
            assert id is not None
            impl = feed.implementations.get(id, None)
            if not impl:
                logger.debug(
                    _("Ignoring user-override for unknown implementation %(id)s in %(interface)s"
                      ), {
                          'id': id,
                          'interface': feed
                      })
                continue

            user_stability = item.getAttribute('user-stability')
            if user_stability:
                impl.user_stability = stability_levels[str(user_stability)]
Beispiel #23
0
	def delete(self):
		if not os.path.isabs(self.uri):
			cached_iface = basedir.load_first_cache(namespaces.config_site,
					'interfaces', model.escape(self.uri))
			if cached_iface:
				if SAFE_MODE:
					print("Delete", cached_iface)
				else:
					os.unlink(cached_iface)
		user_overrides = basedir.load_first_config(namespaces.config_site,
					namespaces.config_prog,
					'interfaces', model._pretty_escape(self.uri))
		if user_overrides:
			if SAFE_MODE:
				print("Delete", user_overrides)
			else:
				os.unlink(user_overrides)
Beispiel #24
0
	def delete(self):
		if not os.path.isabs(self.uri):
			cached_iface = basedir.load_first_cache(namespaces.config_site,
					'interfaces', model.escape(self.uri))
			if cached_iface:
				if SAFE_MODE:
					print("Delete", cached_iface)
				else:
					os.unlink(cached_iface)
		user_overrides = basedir.load_first_config(namespaces.config_site,
					namespaces.config_prog,
					'interfaces', model._pretty_escape(self.uri))
		if user_overrides:
			if SAFE_MODE:
				print("Delete", user_overrides)
			else:
				os.unlink(user_overrides)
Beispiel #25
0
def load_feed_from_cache(url, selections_ok = False):
	"""Load a feed. If the feed is remote, load from the cache. If local, load it directly.
	@return: the feed, or None if it's remote and not cached."""
	try:
		if os.path.isabs(url):
			debug(_("Loading local feed file '%s'"), url)
			return load_feed(url, local = True, selections_ok = selections_ok)
		else:
			cached = basedir.load_first_cache(config_site, 'interfaces', escape(url))
			if cached:
				debug(_("Loading cached information for %(interface)s from %(cached)s"), {'interface': url, 'cached': cached})
				return load_feed(cached, local = False)
			else:
				return None
	except InvalidInterface, ex:
		ex.feed_url = url
		raise
Beispiel #26
0
def load_feed_from_cache(url, selections_ok = False):
	"""Load a feed. If the feed is remote, load from the cache. If local, load it directly.
	@return: the feed, or None if it's remote and not cached."""
	try:
		if os.path.isabs(url):
			debug(_("Loading local feed file '%s'"), url)
			return load_feed(url, local = True, selections_ok = selections_ok)
		else:
			cached = basedir.load_first_cache(config_site, 'interfaces', escape(url))
			if cached:
				debug(_("Loading cached information for %(interface)s from %(cached)s"), {'interface': url, 'cached': cached})
				return load_feed(cached, local = False)
			else:
				return None
	except InvalidInterface as ex:
		ex.feed_url = url
		raise
Beispiel #27
0
def update_user_overrides(interface, known_site_feeds=frozenset()):
    """Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param known_site_feeds: feeds to ignore (for backwards compatibility)
	"""
    user = basedir.load_first_config(config_site, config_prog, "interfaces", model._pretty_escape(interface.uri))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site, config_prog, "user_overrides", escape(interface.uri))
    if not user:
        return

    try:
        with open(user, "rb") as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {"user": user, "exception": ex})
        raise

    stability_policy = root.getAttribute("stability-policy")
    if stability_policy:
        interface.set_stability_policy(stability_levels[str(stability_policy)])

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE:
            continue
        if item.name == "feed":
            feed_src = item.getAttribute("src")
            if not feed_src:
                raise InvalidInterface(_('Missing "src" attribute in <feed>'))
                # (note: 0install 1.9..1.12 used a different scheme and the "site-package" attribute;
                # we deliberately use a different attribute name to avoid confusion)
            if item.getAttribute("is-site-package"):
                # Site packages are detected earlier. This test isn't completely reliable,
                # since older versions will remove the attribute when saving the config
                # (hence the next test).
                continue
            if feed_src in known_site_feeds:
                continue
            interface.extra_feeds.append(
                Feed(feed_src, item.getAttribute("arch"), True, langs=item.getAttribute("langs"))
            )
Beispiel #28
0
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
Beispiel #29
0
def count_impls(url):
	if url not in cached_counts:
		cached = basedir.load_first_cache(namespaces.config_site, 'interfaces', model.escape(url))
		if cached:
			with open(cached) as stream:
				cached_doc = qdom.parse(stream)
			def count(elem):
				c = 0
				if elem.uri != namespaces.XMLNS_IFACE: return 0
				if elem.name == 'implementation' or elem.name == 'package-implementation':
					c += 1
				else:
					for child in elem.childNodes:
						c += count(child)
				return c
			cached_counts[url] = count(cached_doc)
		else:
			cached_counts[url] = 0
	return cached_counts[url]
Beispiel #30
0
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
Beispiel #31
0
    def import_feed(self, url, contents):
        """contents can be a path or an Element."""
        iface_cache = self.config.iface_cache
        iface_cache.get_interface(url)

        if isinstance(contents, qdom.Element):
            feed = model.ZeroInstallFeed(contents)
        else:
            feed = reader.load_feed(contents)

        iface_cache._feeds[url] = feed

        xml = qdom.to_UTF8(feed.feed_element)
        upstream_dir = basedir.save_cache_path(namespaces.config_site, "interfaces")
        cached = os.path.join(upstream_dir, model.escape(url))
        with open(cached, "wb") as stream:
            stream.write(xml)

        return feed
def update_user_overrides(interface, main_feed=None):
    """Update an interface with user-supplied information.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@param main_feed: feed to update with last_checked information
	@note: feed updates shouldn't really be here. main_feed may go away in future.
	"""
    user = basedir.load_first_config(config_site, config_prog,
                                     'user_overrides', escape(interface.uri))
    if not user:
        return

    try:
        root = qdom.parse(file(user))
    except Exception, ex:
        warn(_("Error reading '%(user)s': %(exception)s"), {
            'user': user,
            'exception': ex
        })
        raise
Beispiel #33
0
	def get_cached_signatures(self, uri):
		"""Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
		import gpg
		if os.path.isabs(uri):
			old_iface = uri
		else:
			old_iface = basedir.load_first_cache(config_site, 'interfaces', escape(uri))
			if old_iface is None:
				return None
		try:
			return gpg.check_stream(file(old_iface))[1]
		except SafeException, ex:
			debug(_("No signatures (old-style interface): %s") % ex)
			return None
Beispiel #34
0
def update_user_feed_overrides(feed):
	"""Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@since 0.49
	"""
	user = basedir.load_first_config(config_site, config_prog,
					   'feeds', model._pretty_escape(feed.url))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(feed.url))
	if not user:
		return

	try:
		root = qdom.parse(file(user))
	except Exception, ex:
		warn(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise
Beispiel #35
0
def update_user_overrides(interface):
	"""Update an interface with user-supplied information.
	Sets preferred stability and updates extra_feeds.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	"""
	user = basedir.load_first_config(config_site, config_prog,
					   'interfaces', model._pretty_escape(interface.uri))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(interface.uri))
	if not user:
		return

	try:
		root = qdom.parse(file(user))
	except Exception, ex:
		warn(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise
Beispiel #36
0
def update_user_feed_overrides(feed):
    """Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@since 0.49
	"""
    user = basedir.load_first_config(config_site, config_prog, "feeds", model._pretty_escape(feed.url))
    if user is None:
        # For files saved by 0launch < 0.49
        user = basedir.load_first_config(config_site, config_prog, "user_overrides", escape(feed.url))
    if not user:
        return

    try:
        with open(user, "rb") as stream:
            root = qdom.parse(stream)
    except Exception as ex:
        logger.warn(_("Error reading '%(user)s': %(exception)s"), {"user": user, "exception": ex})
        raise

    last_checked = root.getAttribute("last-checked")
    if last_checked:
        feed.last_checked = int(last_checked)

    for item in root.childNodes:
        if item.uri != XMLNS_IFACE:
            continue
        if item.name == "implementation":
            id = item.getAttribute("id")
            assert id is not None
            impl = feed.implementations.get(id, None)
            if not impl:
                logger.debug(
                    _("Ignoring user-override for unknown implementation %(id)s in %(interface)s"),
                    {"id": id, "interface": feed},
                )
                continue

            user_stability = item.getAttribute("user-stability")
            if user_stability:
                impl.user_stability = stability_levels[str(user_stability)]
def save_interface(interface):
	user_overrides = basedir.save_config_path(config_site, config_prog, 'user_overrides')

	impl = minidom.getDOMImplementation()
	doc = impl.createDocument(XMLNS_IFACE, 'interface-preferences', None)

	root = doc.documentElement
	root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE)
	root.setAttribute('uri', interface.uri)

	if interface.stability_policy:
		root.setAttribute('stability-policy', str(interface.stability_policy))

	if interface.last_checked:
		root.setAttribute('last-checked', str(interface.last_checked))

	impls = interface.implementations.values()
	impls.sort()
	for impl in impls:
		_add_impl(root, impl)
	
	for feed in interface.extra_feeds:
		if feed.user_override:
			elem = doc.createElementNS(XMLNS_IFACE, 'feed')
			root.appendChild(elem)
			elem.setAttribute('src', feed.uri)
			if feed.arch:
				elem.setAttribute('arch', feed.arch)

	import tempfile
	tmp_fd, tmp_name = tempfile.mkstemp(dir = user_overrides)
	try:
		tmp_file = os.fdopen(tmp_fd, 'w')
		doc.writexml(tmp_file, addindent = " ", newl = '\n')
		tmp_file.close()
		path = os.path.join(user_overrides, escape(interface.uri))
		os.rename(tmp_name, path)
		os.chmod(path, 0660)
	except:
		os.unlink(tmp_name)
		raise
    def get_cached_signatures(self, uri):
        """Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
        import gpg
        if uri.startswith('/'):
            old_iface = uri
        else:
            old_iface = basedir.load_first_cache(config_site, 'interfaces',
                                                 escape(uri))
            if old_iface is None:
                return None
        try:
            return gpg.check_stream(file(old_iface))[1]
        except SafeException, ex:
            debug(_("No signatures (old-style interface): %s") % ex)
            return None
Beispiel #39
0
    def get_cached_signatures(self, uri):
        """Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
        from . import gpg
        if os.path.isabs(uri):
            old_iface = uri
        else:
            old_iface = basedir.load_first_cache(config_site, 'interfaces',
                                                 escape(uri))
            if old_iface is None:
                return None
        try:
            with open(old_iface, 'rb') as stream:
                return gpg.check_stream(stream)[1]
        except SafeException as ex:
            logger.info(_("No signatures (old-style interface): %s") % ex)
            return None
Beispiel #40
0
def load_feed_from_cache(url):
    """Load a feed. If the feed is remote, load from the cache. If local, load it directly.
	@type url: str
	@return: the feed, or None if it's remote and not cached.
	@rtype: L{ZeroInstallFeed} | None"""
    try:
        if os.path.isabs(url):
            logger.debug(_("Loading local feed file '%s'"), url)
            return load_feed(url, local=True)
        else:
            cached = basedir.load_first_cache(config_site, "interfaces", escape(url))
            if cached:
                logger.debug(
                    _("Loading cached information for %(interface)s from %(cached)s"),
                    {"interface": url, "cached": cached},
                )
                return load_feed(cached, local=False)
            else:
                return None
    except InvalidInterface as ex:
        ex.feed_url = url
        raise
Beispiel #41
0
def export_feeds(export_dir, feeds, keys_used):
	"""Copy each feed (and icon) in feeds from the cache to export_dir.
	Add all signing key fingerprints to keys_used."""
	for feed in feeds:
		if feed.startswith('/'):
			info("Skipping local feed %s", feed)
			continue
		if feed.startswith('distribution:'):
			info("Skipping distribution feed %s", feed)
			continue
		print "Exporting feed", feed
		# Store feed
		cached = basedir.load_first_cache(namespaces.config_site,
						  'interfaces',
						  model.escape(feed))
		if cached:
			feed_dir = os.path.join(export_dir, get_feed_path(feed))
			feed_dst = os.path.join(feed_dir, 'latest.xml')
			if not os.path.isdir(feed_dir):
				os.makedirs(feed_dir)
			shutil.copyfile(cached, feed_dst)
			info("Exported feed %s", feed)

			icon_path = iface_cache.iface_cache.get_icon_path(iface_cache.iface_cache.get_interface(feed))
			if icon_path:
				icon_dst = os.path.join(feed_dir, 'icon.png')
				shutil.copyfile(icon_path, icon_dst)

			# Get the keys
			stream = file(cached)
			unused, sigs = gpg.check_stream(stream)
			stream.close()
			for x in sigs:
				if isinstance(x, gpg.ValidSig):
					keys_used.add(x.fingerprint)
				else:
					warn("Signature problem: %s" % x)
		else:
			warn("Feed not cached: %s", feed)
Beispiel #42
0
def update_user_feed_overrides(feed):
	"""Update a feed with user-supplied information.
	Sets last_checked and user_stability ratings.
	@param feed: feed to update
	@since 0.49
	"""
	user = basedir.load_first_config(config_site, config_prog,
					   'feeds', model._pretty_escape(feed.url))
	if user is None:
		# For files saved by 0launch < 0.49
		user = basedir.load_first_config(config_site, config_prog,
						   'user_overrides', escape(feed.url))
	if not user:
		return

	try:
		root = qdom.parse(file(user))
	except Exception as ex:
		warn(_("Error reading '%(user)s': %(exception)s"), {'user': user, 'exception': ex})
		raise

	last_checked = root.getAttribute('last-checked')
	if last_checked:
		feed.last_checked = int(last_checked)

	for item in root.childNodes:
		if item.uri != XMLNS_IFACE: continue
		if item.name == 'implementation':
			id = item.getAttribute('id')
			assert id is not None
			impl = feed.implementations.get(id, None)
			if not impl:
				debug(_("Ignoring user-override for unknown implementation %(id)s in %(interface)s"), {'id': id, 'interface': feed})
				continue

			user_stability = item.getAttribute('user-stability')
			if user_stability:
				impl.user_stability = stability_levels[str(user_stability)]
def update_from_cache(interface):
    """Read a cached interface and any native feeds or user overrides.
	@param interface: the interface object to update
	@type interface: L{model.Interface}
	@return: True if cached version and user overrides loaded OK.
	False if upstream not cached. Local interfaces (starting with /) are
	always considered to be cached, although they are not actually stored in the cache.
	@rtype: bool"""
    interface.reset()
    main_feed = None

    if interface.uri.startswith('/'):
        debug(_("Loading local interface file '%s'"), interface.uri)
        update(interface, interface.uri, local=True)
        cached = True
    else:
        cached = basedir.load_first_cache(config_site, 'interfaces',
                                          escape(interface.uri))
        if cached:
            debug(
                _("Loading cached information for %(interface)s from %(cached)s"
                  ), {
                      'interface': interface,
                      'cached': cached
                  })
            main_feed = update(interface, cached)

    # Add the distribution package manager's version, if any
    path = basedir.load_first_data(config_site, 'native_feeds',
                                   model._pretty_escape(interface.uri))
    if path:
        # Resolve any symlinks
        info(_("Adding native packager feed '%s'"), path)
        interface.extra_feeds.append(Feed(os.path.realpath(path), None, False))

    update_user_overrides(interface, main_feed)

    return bool(cached)
Beispiel #44
0
    def _import_new_feed(self, feed_url, new_xml, modified_time, dry_run):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@type feed_url: str
		@param new_xml: the data to write
		@type new_xml: str
		@param modified_time: when new_xml was modified
		@type modified_time: int
		@type dry_run: bool
		@raises ReplayAttack: if the new mtime is older than the current one"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, "interfaces")
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, "rb") as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        if dry_run:
            print(_("[dry-run] would cache feed {url} as {cached}").format(url=feed_url, cached=cached))
            from io import BytesIO
            from zeroinstall.injector import qdom

            root = qdom.parse(BytesIO(new_xml), filter_for_version=True)
            feed = model.ZeroInstallFeed(root)
            reader.update_user_feed_overrides(feed)
            self._feeds[feed_url] = feed
            return

            # Do we need to write this temporary file now?
        try:
            with open(cached + ".new", "wb") as stream:
                stream.write(new_xml)
            os.utime(cached + ".new", (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + ".new")
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _(
                            "New feed's modification time is "
                            "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                            "Refusing update."
                        )
                        % {
                            "iface": feed_url,
                            "old_time": _pretty_time(old_modified),
                            "new_time": _pretty_time(new_mtime),
                        }
                    )
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    # raise SafeException("Interface has changed, but modification time "
                    # 		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + ".new")
            raise

        portable_rename(cached + ".new", cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
Beispiel #45
0
		def check(str):
			self.assertEqual(str, model.unescape(model.escape(str)))
			self.assertEqual(str, model.unescape(model._pretty_escape(str)))
Beispiel #46
0
		def check(str):
			self.assertEqual(str, model.unescape(model.escape(str)))
			self.assertEqual(str, model.unescape(model._pretty_escape(str)))
			self.assertEqual(str,
				escaping.ununderscore_escape(escaping.underscore_escape(str)))
Beispiel #47
0
 def check(str):
     self.assertEqual(str, model.unescape(model.escape(str)))
     self.assertEqual(str, model.unescape(model._pretty_escape(str)))
	def testCompletion(self):
		shell = None
		complete = lambda *a: self.complete(*a, shell=shell)
		for shell in ['zsh','fish']:
			assert 'select\n' in complete(["s"], 1)
			assert 'select\n' in complete([], 1)
			assert 'select\n' in complete(["", "bar"], 1)

			assert '' == complete(["", "bar"], 2)
			assert '' == complete(["unknown", "bar"], 2)
			#self.assertEqual('', complete(["--", "s"], 2))

			assert '--help\n' in complete(["-"], 1)
			assert '--help\n' in complete(["--"], 1)
			assert '--help\n' in complete(["--h"], 1)
			assert '-h\n' in complete(["-h"], 1)
			assert '-hv\n' in complete(["-hv"], 1)
			assert '' == complete(["-hi"], 1)

			#assert '--message' not in complete(["--m"], 1)
			assert '--message' in complete(["--m", "select"], 1)
			assert '--message' in complete(["select", "--m"], 2)

			assert '--help' in complete(["select", "foo", "--h"], 3)
			assert '--help' not in complete(["run", "foo", "--h"], 3)
			#assert '--help' not in complete(["select", "--version", "--h"], 3)

			# Fall back to file completion for the program's arguments
			self.assertEqual('file\n', complete(["run", "foo", ""], 3))

			# Option value completion
			assert 'file\n' in complete(["select", "--with-store"], 3)
			assert 'Linux\n' in complete(["select", "--os"], 3)
			assert 'x86_64\n' in complete(["select", "--cpu"], 3)
			assert 'sha256new\n' in complete(["digest", "--algorithm"], 3)

		# Option=value complete
		for shell in ['zsh','fish']:
			assert 'file\n' in complete(["select", "--with-store="], 2)
			assert 'filter --cpu=x86_64\n' in complete(["select", "--cpu="], 2)
		for shell in ['bash']:
			assert 'file\n' in complete(["select", "--with-store", "="], 3)
			assert 'file\n' in complete(["select", "--with-store", "=", "foo"], 4)
			assert 'filter x86_64 \n' in complete(["select", "--cpu", "="], 3)

		from zeroinstall.support import basedir
		from zeroinstall.injector.namespaces import config_site
		d = basedir.save_cache_path(config_site, "interfaces")
		with open(os.path.join(d, model.escape('http://example.com/foo')), 'wb') as stream:
			stream.write(b"<?xml version='1.0'?>"
				b"<interface uri='http://example.com/foo' xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>"
				b"<name>-</name><summary>-</summary>"
				b"<implementation version='1.2' id='12'/>"
				b"<implementation version='1.5' id='15'/>"
				b"</interface>")

		for shell in ['bash']:
			assert 'filter select \n' in complete(["sel"], 1)
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "ht"], 2))
			self.assertEqual('prefix //example.com/\nfile\n', complete(["select", "http:"], 2))
			self.assertEqual('prefix //example.com/\nfile\n', complete(["select", "http:/"], 2))
			self.assertEqual('filter //example.com/foo \n', complete(["select", "http://example.com/"], 2))

		for shell in ['zsh','fish']:
			# Check options are ignored correctly
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--with-store=.", "http:"], 3))
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "http:", "--with-store=."], 2))

			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--with-store", ".", "http:"], 4))
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "http:", "--with-store", "."], 2))

			# Version completion
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--before", "", "http://example.com/foo"], 3))
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--version", "", "http://example.com/foo"], 3))
			self.assertEqual('filter 1.2..!1.2\nfilter 1.2..!1.5\n', complete(["select", "--version", "1.2..", "http://example.com/foo"], 3))

			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--version-for", "http:", "", ], 3))
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--version-for", "http://example.com/foo", "", ], 4))

			# -- before argument
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--", "http:"], 3))
Beispiel #49
0
    def testCompletion(self):
        shell = None
        complete = lambda *a: self.complete(*a, shell=shell)
        for shell in ['zsh', 'fish']:
            assert 'select\n' in complete(["s"], 1)
            assert 'select\n' in complete([], 1)
            assert 'select\n' in complete(["", "bar"], 1)

            assert '' == complete(["", "bar"], 2)
            assert '' == complete(["unknown", "bar"], 2)
            #self.assertEqual('', complete(["--", "s"], 2))

            assert '--help\n' in complete(["-"], 1)
            assert '--help\n' in complete(["--"], 1)
            assert '--help\n' in complete(["--h"], 1)
            assert '-h\n' in complete(["-h"], 1)
            assert '-hv\n' in complete(["-hv"], 1)
            assert '' == complete(["-hi"], 1)

            #assert '--message' not in complete(["--m"], 1)
            assert '--message' in complete(["--m", "select"], 1)
            assert '--message' in complete(["select", "--m"], 2)

            assert '--help' in complete(["select", "foo", "--h"], 3)
            assert '--help' not in complete(["run", "foo", "--h"], 3)
            #assert '--help' not in complete(["select", "--version", "--h"], 3)

            # Fall back to file completion for the program's arguments
            self.assertEqual('file\n', complete(["run", "foo", ""], 3))

            # Option value completion
            assert 'file\n' in complete(["select", "--with-store"], 3)
            assert 'Linux\n' in complete(["select", "--os"], 3)
            assert 'x86_64\n' in complete(["select", "--cpu"], 3)
            assert 'sha256new\n' in complete(["digest", "--algorithm"], 3)

        # Option=value complete
        for shell in ['zsh', 'fish']:
            assert 'file\n' in complete(["select", "--with-store="], 2)
            assert 'filter --cpu=x86_64\n' in complete(["select", "--cpu="], 2)
        for shell in ['bash']:
            assert 'file\n' in complete(["select", "--with-store", "="], 3)
            assert 'file\n' in complete(["select", "--with-store", "=", "foo"],
                                        4)
            assert 'filter x86_64 \n' in complete(["select", "--cpu", "="], 3)

        from zeroinstall.support import basedir
        from zeroinstall.injector.namespaces import config_site
        d = basedir.save_cache_path(config_site, "interfaces")
        with open(os.path.join(d, model.escape('http://example.com/foo')),
                  'wb') as stream:
            stream.write(
                b"<?xml version='1.0'?>"
                b"<interface uri='http://example.com/foo' xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>"
                b"<name>-</name><summary>-</summary>"
                b"<implementation version='1.2' id='12'/>"
                b"<implementation version='1.5' id='15'/>"
                b"</interface>")

        for shell in ['bash']:
            assert 'filter select \n' in complete(["sel"], 1)
            self.assertEqual('prefix http://example.com/\nfile\n',
                             complete(["select", "ht"], 2))
            self.assertEqual('prefix //example.com/\nfile\n',
                             complete(["select", "http:"], 2))
            self.assertEqual('prefix //example.com/\nfile\n',
                             complete(["select", "http:/"], 2))
            self.assertEqual('filter //example.com/foo \n',
                             complete(["select", "http://example.com/"], 2))

        for shell in ['zsh', 'fish']:
            # Check options are ignored correctly
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "--with-store=.", "http:"], 3))
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "http:", "--with-store=."], 2))

            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "--with-store", ".", "http:"], 4))
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "http:", "--with-store", "."], 2))

            # Version completion
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete(["select", "--before", "", "http://example.com/foo"],
                         3))
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete(["select", "--version", "", "http://example.com/foo"],
                         3))
            self.assertEqual(
                'filter 1.2..!1.2\nfilter 1.2..!1.5\n',
                complete(
                    ["select", "--version", "1.2..", "http://example.com/foo"],
                    3))

            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete([
                    "select",
                    "--version-for",
                    "http:",
                    "",
                ], 3))
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete([
                    "select",
                    "--version-for",
                    "http://example.com/foo",
                    "",
                ], 4))

            # -- before argument
            self.assertEqual('prefix http://example.com/\nfile\n',
                             complete(["select", "--", "http:"], 3))
Beispiel #50
0
		def check(str):
			self.assertEqual(str, model.unescape(model.escape(str)))
			self.assertEqual(str, model.unescape(model._pretty_escape(str)))
			self.assertEqual(str,
				escaping.ununderscore_escape(escaping.underscore_escape(str)))
Beispiel #51
0
    def _populate_model(self):
        # Find cached implementations

        unowned = {}  # Impl ID -> Store
        duplicates = []  # TODO

        for s in self.iface_cache.stores.stores:
            if os.path.isdir(s.dir):
                for id in os.listdir(s.dir):
                    if id in unowned:
                        duplicates.append(id)
                    unowned[id] = s

        ok_interfaces = []
        error_interfaces = []

        # Look through cached interfaces for implementation owners
        all = self.iface_cache.list_all_interfaces()
        all.sort()
        for uri in all:
            iface_size = 0
            try:
                if os.path.isabs(uri):
                    cached_iface = uri
                else:
                    cached_iface = basedir.load_first_cache(namespaces.config_site, "interfaces", model.escape(uri))
                user_overrides = basedir.load_first_config(
                    namespaces.config_site, namespaces.config_prog, "user_overrides", model.escape(uri)
                )

                iface_size = size_if_exists(cached_iface) + size_if_exists(user_overrides)
                iface = self.iface_cache.get_interface(uri)
            except Exception, ex:
                error_interfaces.append((uri, str(ex), iface_size))
            else:
                cached_iface = ValidInterface(iface, iface_size)
                for impl in iface.implementations.values():
                    if impl.local_path:
                        cached_iface.in_cache.append(LocalImplementation(impl))
                    if impl.id in unowned:
                        cached_dir = unowned[impl.id].dir
                        impl_path = os.path.join(cached_dir, impl.id)
                        impl_size = get_size(impl_path)
                        cached_iface.in_cache.append(KnownImplementation(cached_iface, cached_dir, impl, impl_size))
                        del unowned[impl.id]
                cached_iface.in_cache.sort()
                ok_interfaces.append(cached_iface)
Beispiel #52
0
	def _populate_model(self):
		# Find cached implementations

		unowned = {}	# Impl ID -> Store
		duplicates = [] # TODO

		for s in self.iface_cache.stores.stores:
			if os.path.isdir(s.dir):
				for id in os.listdir(s.dir):
					if id in unowned:
						duplicates.append(id)
					unowned[id] = s

		ok_feeds = []
		error_feeds = []

		# Look through cached feeds for implementation owners
		all_interfaces = self.iface_cache.list_all_interfaces()
		all_feeds = {}
		for uri in all_interfaces:
			try:
				iface = self.iface_cache.get_interface(uri)
			except Exception as ex:
				error_feeds.append((uri, str(ex), 0))
			else:
				all_feeds.update(self.iface_cache.get_feeds(iface))

		for url, feed in all_feeds.items():
			if not feed: continue
			yield
			feed_size = 0
			try:
				if url != feed.url:
					# (e.g. for .new feeds)
					raise Exception('Incorrect URL for feed (%s vs %s)' % (url, feed.url))

				if os.path.isabs(url):
					cached_feed = url
					feed_type = LocalFeed
				else:
					feed_type = RemoteFeed
					cached_feed = basedir.load_first_cache(namespaces.config_site,
							'interfaces', model.escape(url))
				user_overrides = basedir.load_first_config(namespaces.config_site,
							namespaces.config_prog,
							'interfaces', model._pretty_escape(url))

				feed_size = size_if_exists(cached_feed) + size_if_exists(user_overrides)
			except Exception as ex:
				error_feeds.append((url, str(ex), feed_size))
			else:
				cached_feed = feed_type(feed, feed_size)
				for impl in feed.implementations.values():
					if impl.local_path:
						cached_feed.in_cache.append(LocalImplementation(impl))
					for digest in impl.digests:
						if digest in unowned:
							cached_dir = unowned[digest].dir
							impl_path = os.path.join(cached_dir, digest)
							impl_size = get_size(impl_path)
							cached_feed.in_cache.append(KnownImplementation(cached_feed, cached_dir, impl, impl_size, digest))
							del unowned[digest]
				cached_feed.in_cache.sort()
				ok_feeds.append(cached_feed)

		if error_feeds:
			iter = SECTION_INVALID_INTERFACES.append_to(self.raw_model)
			for uri, ex, size in error_feeds:
				item = InvalidFeed(uri, ex, size)
				item.append_to(self.raw_model, iter)

		unowned_sizes = []
		local_dir = os.path.join(basedir.xdg_cache_home, '0install.net', 'implementations')
		for id in unowned:
			if unowned[id].dir == local_dir:
				impl = UnusedImplementation(local_dir, id)
				unowned_sizes.append((impl.size, impl))
		if unowned_sizes:
			iter = SECTION_UNOWNED_IMPLEMENTATIONS.append_to(self.raw_model)
			for size, item in unowned_sizes:
				item.append_to(self.raw_model, iter)

		if ok_feeds:
			iter = SECTION_INTERFACES.append_to(self.raw_model)
			for item in ok_feeds:
				yield
				item.append_to(self.raw_model, iter)
		self._update_sizes()
Beispiel #53
0
	def do_install(self, archive_stream, progress_bar, archive_offset):
		# Step 1. Import GPG keys

		# Maybe GPG has never been run before. Let it initialse, or we'll get an error code
		# from the first import... (ignore return value here)
		subprocess.call([get_gpg(), '--check-trustdb', '-q'])

		key_dir = os.path.join(mydir, 'keys')
		for key in os.listdir(key_dir):
			check_call([get_gpg(), '--import', '-q', os.path.join(key_dir, key)])

		# Step 2. Import feeds and trust their signing keys
		for root, dirs, files in os.walk(os.path.join(mydir, 'feeds')):
			if 'latest.xml' in files:
				feed_path = os.path.join(root, 'latest.xml')
				icon_path = os.path.join(root, 'icon.png')

				# Get URI
				feed_stream = file(feed_path)
				doc = qdom.parse(feed_stream)
				uri = doc.getAttribute('uri')
				assert uri, "Missing 'uri' attribute on root element in '%s'" % feed_path
				domain = trust.domain_from_url(uri)

				feed_stream.seek(0)
				stream, sigs = gpg.check_stream(feed_stream)
				for s in sigs:
					if not trust.trust_db.is_trusted(s.fingerprint, domain):
						print "Adding key %s to trusted list for %s" % (s.fingerprint, domain)
						trust.trust_db.trust_key(s.fingerprint, domain)
				oldest_sig = min([s.get_timestamp() for s in sigs])
				try:
					config.iface_cache.update_feed_from_network(uri, stream.read(), oldest_sig)
				except iface_cache.ReplayAttack:
					# OK, the user has a newer copy already
					pass
				if feed_stream != stream:
					feed_stream.close()
				stream.close()

				if os.path.exists(icon_path):
					icons_cache = basedir.save_cache_path(namespaces.config_site, 'interface_icons')
					icon_file = os.path.join(icons_cache, model.escape(uri))
					if not os.path.exists(icon_file):
						shutil.copyfile(icon_path, icon_file)

		# Step 3. Solve to find out which implementations we actually need
		archive_stream.seek(archive_offset)

		extract_impls = {}	# Impls we need but which are compressed (ID -> Impl)
		tmp = tempfile.mkdtemp(prefix = '0export-')
		try:
			# Create a "fake store" with the implementation in the archive
			archive = tarfile.open(name=archive_stream.name, mode='r|', fileobj=archive_stream)
			fake_store = FakeStore()
			for tarmember in archive:
				if tarmember.name.startswith('implementations'):
					impl = os.path.basename(tarmember.name).split('.')[0]
					fake_store.impls.add(impl)

			bootstrap_store = zerostore.Store(os.path.join(mydir, 'implementations'))
			stores = config.stores

			toplevel_uris = [uri.strip() for uri in file(os.path.join(mydir, 'toplevel_uris'))]
			ZEROINSTALL_URI = "@ZEROINSTALL_URI@"
			for uri in [ZEROINSTALL_URI] + toplevel_uris:
				# This is so the solver treats versions in the setup archive as 'cached',
				# meaning that it will prefer using them to doing a download
				stores.stores.append(bootstrap_store)
				stores.stores.append(fake_store)

				# Shouldn't need to download anything, but we might not have all feeds
				r = requirements.Requirements(uri)
				d = driver.Driver(config = config, requirements = r)
				config.network_use = model.network_minimal
				download_feeds = d.solve_with_downloads()
				h.wait_for_blocker(download_feeds)
				assert d.solver.ready, d.solver.get_failure_reason()

				# Add anything chosen from the setup store to the main store
				stores.stores.remove(fake_store)
				stores.stores.remove(bootstrap_store)
				for iface, impl in d.get_uncached_implementations():
					print >>sys.stderr, "Need to import", impl
					if impl.id in fake_store.impls:
						# Delay extraction
						extract_impls[impl.id] = impl
					else:
						impl_src = os.path.join(mydir, 'implementations', impl.id)

						if os.path.isdir(impl_src):
							stores.add_dir_to_cache(impl.id, impl_src)
						else:
							print >>sys.stderr, "Required impl %s (for %s) not present" % (impl, iface)

				# Remember where we copied 0launch to, because we'll need it after
				# the temporary directory is deleted.
				if uri == ZEROINSTALL_URI:
					global copied_0launch_in_cache
					impl = d.solver.selections.selections[uri]
					if not impl.id.startswith('package:'):
						copied_0launch_in_cache = impl.get_path(stores = config.stores)
					# (else we selected the distribution version of Zero Install)
		finally:
			shutil.rmtree(tmp)

		# Count total number of bytes to extract
		extract_total = 0
		for impl in extract_impls.values():
			impl_info = archive.getmember('implementations/' + impl.id + '.tar.bz2')
			extract_total += impl_info.size

		self.sent = 0

		# Actually extract+import implementations in archive
		archive_stream.seek(archive_offset)
		archive = tarfile.open(name=archive_stream.name, mode='r|',
                fileobj=archive_stream)

		for tarmember in archive:
			if not tarmember.name.startswith('implementations'):
				continue
			impl_id = tarmember.name.split('/')[1].split('.')[0]
			if impl_id not in extract_impls:
				print "Skip", impl_id
				continue
			print "Extracting", impl_id
			tmp = tempfile.mkdtemp(prefix = '0export-')
			try:
				impl_stream = archive.extractfile(tarmember)
				self.child = subprocess.Popen('bunzip2|tar xf -', shell = True, stdin = subprocess.PIPE, cwd = tmp)
				mainloop = gobject.MainLoop(gobject.main_context_default())

				def pipe_ready(src, cond):
					data = impl_stream.read(4096)
					if not data:
						mainloop.quit()
						self.child.stdin.close()
						return False
					self.sent += len(data)
					if progress_bar:
						progress_bar.set_fraction(float(self.sent) / extract_total)
					self.child.stdin.write(data)
					return True
				gobject.io_add_watch(self.child.stdin, gobject.IO_OUT | gobject.IO_HUP, pipe_ready, priority = gobject.PRIORITY_LOW)

				mainloop.run()

				self.child.wait()
				if self.child.returncode:
					raise Exception("Failed to unpack archive (code %d)" % self.child.returncode)

				stores.add_dir_to_cache(impl_id, tmp)

			finally:
				shutil.rmtree(tmp)

		return toplevel_uris