예제 #1
0
	def testList(self):
		iface_cache = self.config.iface_cache
		self.assertEqual([], iface_cache.list_all_interfaces())
		iface_dir = basedir.save_cache_path(config_site, 'interfaces')
		open(os.path.join(iface_dir, 'http%3a%2f%2ffoo'), 'w').close()
		self.assertEqual(['http://foo'],
				iface_cache.list_all_interfaces())
예제 #2
0
    def do_exec_binding(self, binding, iface):
        assert iface is not None
        name = binding.name
        if '/' in name or name.startswith('.') or "'" in name:
            raise SafeException("Invalid <executable> name '%s'" % name)
        exec_dir = basedir.save_cache_path(namespaces.config_site,
                                           namespaces.config_prog,
                                           'executables', name)
        exec_path = os.path.join(exec_dir, name)

        if not self._checked_runenv:
            self._check_runenv()

        if not os.path.exists(exec_path):
            # Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py
            os.symlink('../../runenv.py', exec_path)
            os.chmod(exec_dir, 0o500)

        if binding.in_path:
            path = os.environ[
                "PATH"] = exec_dir + os.pathsep + os.environ["PATH"]
            info("PATH=%s", path)
        else:
            os.environ[name] = exec_path
            info("%s=%s", name, exec_path)

        import json
        args = self.build_command(iface, binding.command)
        os.environ["0install-runenv-" + name] = json.dumps(args)
예제 #3
0
	def testTimes(self):
		iface_cache = self.config.iface_cache
		with tempfile.TemporaryFile() as stream:
			stream.write(data.thomas_key)
			stream.seek(0)
			gpg.import_key(stream)

		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, model.escape('http://foo'))

		with open(cached, 'wb') as stream:
			stream.write(data.foo_signed_xml)

		signed = iface_cache._get_signature_date('http://foo')
		assert signed == None

		trust.trust_db.trust_key(
			'92429807C9853C0744A68B9AAE07828059A53CC1')

		signed = iface_cache._get_signature_date('http://foo')
		self.assertEqual(1154850229, signed)

		with open(cached, 'w+b') as stream:
			stream.seek(0)
			stream.write(b'Hello')

		# When the signature is invalid, we just return None.
		# This is because versions < 0.22 used to corrupt the signatue
		# by adding an attribute to the XML
		signed = iface_cache._get_signature_date('http://foo')
		assert signed == None
예제 #4
0
	def cache_iface(self, name, data):
		cached_ifaces = basedir.save_cache_path('0install.net',
							'interfaces')

		f = open(os.path.join(cached_ifaces, model.escape(name)), 'w')
		f.write(data)
		f.close()
예제 #5
0
	def do_exec_binding(self, binding, iface):
		assert iface is not None
		name = binding.name
		if '/' in name or name.startswith('.') or "'" in name:
			raise SafeException("Invalid <executable> name '%s'" % name)
		exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name)
		exec_path = os.path.join(exec_dir, name)

		if not self._checked_runenv:
			self._check_runenv()

		if not os.path.exists(exec_path):
			# Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py
			os.symlink('../../runenv.py', exec_path)
			os.chmod(exec_dir, 0o500)

		if binding.in_path:
			path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"]
			info("PATH=%s", path)
		else:
			os.environ[name] = exec_path
			info("%s=%s", name, exec_path)

		import json
		args = self.build_command(iface, binding.command)
		os.environ["0install-runenv-" + name] = json.dumps(args)
예제 #6
0
	def cache_iface(self, name, data):
		cached_ifaces = basedir.save_cache_path('0install.net',
							'interfaces')

		f = open(os.path.join(cached_ifaces, model.escape(name)), 'w')
		f.write(data)
		f.close()
예제 #7
0
    def _import_new_feed(self, feed_url, new_xml, modified_time):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, 'rb') as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        # Do we need to write this temporary file now?
        try:
            with open(cached + '.new', 'wb') as stream:
                stream.write(new_xml)
            os.utime(cached + '.new', (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + '.new')
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _("New feed's modification time is "
                          "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                          "Refusing update.") % {
                              'iface': feed_url,
                              'old_time': _pretty_time(old_modified),
                              'new_time': _pretty_time(new_mtime)
                          })
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    #raise SafeException("Interface has changed, but modification time "
                    #		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + '.new')
            raise

        portable_rename(cached + '.new', cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
예제 #8
0
	def do_exec_binding(self, binding, iface):
		assert iface is not None
		name = binding.name
		if '/' in name or name.startswith('.') or "'" in name:
			raise SafeException("Invalid <executable> name '%s'" % name)
		exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name)
		exec_path = os.path.join(exec_dir, name + ".exe" if os.name == "nt" else name)

		if not self._checked_runenv:
			self._check_runenv()

		if not os.path.exists(exec_path):
			if os.name == "nt":
				# Copy runenv.cli.template to ~/.cache/0install.net/injector/executables/$name/$name
				import shutil
				shutil.copyfile(os.path.join(os.path.dirname(__file__), "runenv.cli.template"), exec_path)
			else:
				# Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py
				os.symlink('../../runenv.py', exec_path)
				os.chmod(exec_dir, 0o500)

		if binding.in_path:
			path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"]
			logger.info("PATH=%s", path)
		else:
			os.environ[name] = exec_path
			logger.info("%s=%s", name, exec_path)

		args = self.build_command(iface, binding.command)
		if os.name == "nt":
			os.environ["0install-runenv-file-" + name + ".exe"] = args[0]
			os.environ["0install-runenv-args-" + name + ".exe"] = support.windows_args_escape(args[1:])
		else:
			import json
			os.environ["0install-runenv-" + name] = json.dumps(args)
예제 #9
0
def save_feed(feed):
    """Save information about a feed. Currently, this is the last_checked time and any user-set stability ratings.
	@since: 0.49"""
    feeds = basedir.save_config_path(config_site, config_prog, 'feeds')

    impl = minidom.getDOMImplementation()
    doc = impl.createDocument(XMLNS_IFACE, 'feed-preferences', None)
    root = doc.documentElement
    root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE)

    if feed.last_checked:
        root.setAttribute('last-checked', str(feed.last_checked))

    impls = feed.implementations.values()
    impls.sort()
    for impl in impls:
        if impl.user_stability:
            node = doc.createElementNS(XMLNS_IFACE, 'implementation')
            root.appendChild(node)
            node.setAttribute('user-stability', str(impl.user_stability))
            node.setAttribute('id', impl.id)

    _atomic_save(doc, feeds, feed.url)

    # Keep interfaces/ directory's mtime as a global timestamp to detect
    # if cached solution are stale
    ts = time.time()
    os.utime(basedir.save_cache_path(config_site, 'interfaces'), (ts, ts))
예제 #10
0
파일: run.py 프로젝트: dabrahams/0install
	def do_exec_binding(self, binding, iface):
		assert iface is not None
		name = binding.name
		if '/' in name or name.startswith('.') or "'" in name:
			raise SafeException("Invalid <executable> name '%s'" % name)
		exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name)
		exec_path = os.path.join(exec_dir, name + ".exe" if os.name == "nt" else name)

		if not self._checked_runenv:
			self._check_runenv()

		if not os.path.exists(exec_path):
			if os.name == "nt":
				# Copy runenv.cli.template to ~/.cache/0install.net/injector/executables/$name/$name
				import shutil
				shutil.copyfile(os.environ['ZEROINSTALL_CLI_TEMPLATE'], exec_path)
			else:
				# Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py
				os.symlink('../../runenv.py', exec_path)
				os.chmod(exec_dir, 0o500)

		if binding.in_path:
			path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"]
			logger.info("PATH=%s", path)
		else:
			os.environ[name] = exec_path
			logger.info("%s=%s", name, exec_path)

		args = self.build_command(iface, binding.command)
		if os.name == "nt":
			os.environ["0install-runenv-file-" + name + ".exe"] = args[0]
			os.environ["0install-runenv-args-" + name + ".exe"] = support.windows_args_escape(args[1:])
		else:
			import json
			os.environ["0install-runenv-" + name] = json.dumps(args)
예제 #11
0
	def testXMLupdate(self):
		iface_cache = self.config.iface_cache
		trust.trust_db.trust_key(
			'92429807C9853C0744A68B9AAE07828059A53CC1')
		with tempfile.TemporaryFile() as stream:
			stream.write(data.thomas_key)
			stream.seek(0)
			gpg.import_key(stream)

		iface = iface_cache.get_interface('http://foo')
		with tempfile.TemporaryFile() as src:
			src.write(data.foo_signed_xml)
			src.seek(0)
			pending = PendingFeed(iface.uri, src)
			assert iface_cache.update_feed_if_trusted(iface.uri, pending.sigs, pending.new_xml)

		iface_cache.__init__()
		feed = iface_cache.get_feed('http://foo')
		assert feed.last_modified == 1154850229

		# mtimes are unreliable because copying often changes them -
		# check that we extract the time from the signature when upgrading
		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, model.escape(feed.url))
		os.utime(cached, None)

		iface_cache.__init__()
		feed = iface_cache.get_feed('http://foo')
		assert feed.last_modified > 1154850229

		with tempfile.TemporaryFile() as src:
			src.write(data.new_foo_signed_xml)
			src.seek(0)

			pending = PendingFeed(feed.url, src)

			old_stdout = sys.stdout
			sys.stdout = StringIO()
			try:
				assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml, dry_run = True)
			finally:
				sys.stdout = old_stdout

			assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml)

		# Can't 'update' to an older copy
		with tempfile.TemporaryFile() as src:
			src.write(data.foo_signed_xml)
			src.seek(0)
			try:
				pending = PendingFeed(feed.url, src)
				assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs, pending.new_xml)

				assert 0
			except model.SafeException:
				pass
예제 #12
0
	def _import_new_feed(self, feed_url, new_xml, modified_time):
		"""Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
		assert modified_time
		assert isinstance(new_xml, bytes), repr(new_xml)

		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, escape(feed_url))

		old_modified = None
		if os.path.exists(cached):
			with open(cached, 'rb') as stream:
				old_xml = stream.read()
			if old_xml == new_xml:
				logger.debug(_("No change"))
				# Update in-memory copy, in case someone else updated the disk copy
				self.get_feed(feed_url, force = True)
				return
			old_modified = int(os.stat(cached).st_mtime)

		# Do we need to write this temporary file now?
		try:
			with open(cached + '.new', 'wb') as stream:
				stream.write(new_xml)
			os.utime(cached + '.new', (modified_time, modified_time))
			new_mtime = reader.check_readable(feed_url, cached + '.new')
			assert new_mtime == modified_time

			old_modified = self._get_signature_date(feed_url) or old_modified

			if old_modified:
				if new_mtime < old_modified:
					raise ReplayAttack(_("New feed's modification time is "
						"before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
						"Refusing update.")
						% {'iface': feed_url, 'old_time': _pretty_time(old_modified), 'new_time': _pretty_time(new_mtime)})
				if new_mtime == old_modified:
					# You used to have to update the modification time manually.
					# Now it comes from the signature, this check isn't useful
					# and often causes problems when the stored format changes
					# (e.g., when we stopped writing last-modified attributes)
					pass
					#raise SafeException("Interface has changed, but modification time "
					#		    "hasn't! Refusing update.")
		except:
			os.unlink(cached + '.new')
			raise

		portable_rename(cached + '.new', cached)
		logger.debug(_("Saved as %s") % cached)

		self.get_feed(feed_url, force = True)
예제 #13
0
	def mark_as_checking(self, url):
		"""Touch a 'last_check_attempt_timestamp' file for this feed.
		If url is a local path, nothing happens.
		This prevents us from repeatedly trying to download a failing feed many
		times in a short period."""
		if os.path.isabs(url):
			return
		feeds_dir = basedir.save_cache_path(config_site, config_prog, 'last-check-attempt')
		timestamp_path = os.path.join(feeds_dir, model._pretty_escape(url))
		fd = os.open(timestamp_path, os.O_WRONLY | os.O_CREAT, 0644)
		os.close(fd)
		os.utime(timestamp_path, None)	# In case file already exists
예제 #14
0
    def _import_new_interface(self, interface, new_xml, modified_time):
        """Write new_xml into the cache.
		@param interface: updated once the new XML is written
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
        assert modified_time

        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, escape(interface.uri))

        if os.path.exists(cached):
            old_xml = file(cached).read()
            if old_xml == new_xml:
                debug(_("No change"))
                reader.update_from_cache(interface)
                return

        stream = file(cached + '.new', 'w')
        stream.write(new_xml)
        stream.close()
        os.utime(cached + '.new', (modified_time, modified_time))
        new_mtime = reader.check_readable(interface.uri, cached + '.new')
        assert new_mtime == modified_time

        old_modified = self._get_signature_date(interface.uri)
        if old_modified is None:
            old_modified = interface.last_modified

        if old_modified:
            if new_mtime < old_modified:
                os.unlink(cached + '.new')
                raise ReplayAttack(
                    _("New interface's modification time is "
                      "before old version!\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                      "Refusing update.") % {
                          'old_time': _pretty_time(old_modified),
                          'new_time': _pretty_time(new_mtime)
                      })
            if new_mtime == old_modified:
                # You used to have to update the modification time manually.
                # Now it comes from the signature, this check isn't useful
                # and often causes problems when the stored format changes
                # (e.g., when we stopped writing last-modified attributes)
                pass
                #raise SafeException("Interface has changed, but modification time "
                #		    "hasn't! Refusing update.")
        os.rename(cached + '.new', cached)
        debug(_("Saved as %s") % cached)

        reader.update_from_cache(interface)
예제 #15
0
    def mark_as_checking(self, url):
        """Touch a 'last_check_attempt_timestamp' file for this feed.
		If url is a local path, nothing happens.
		This prevents us from repeatedly trying to download a failing feed many
		times in a short period."""
        if url.startswith('/'):
            return
        feeds_dir = basedir.save_cache_path(config_site, config_prog,
                                            'last-check-attempt')
        timestamp_path = os.path.join(feeds_dir, model._pretty_escape(url))
        fd = os.open(timestamp_path, os.O_WRONLY | os.O_CREAT, 0644)
        os.close(fd)
        os.utime(timestamp_path, None)  # In case file already exists
예제 #16
0
		def download_and_add_icon():
			stream = dl.tempfile
			yield dl.downloaded
			try:
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')
				icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w')
				shutil.copyfileobj(stream, icon_file)
			except Exception as ex:
				self.handler.report_error(ex)
예제 #17
0
    def testXMLupdate(self):
        iface_cache = self.config.iface_cache
        trust.trust_db.trust_key('92429807C9853C0744A68B9AAE07828059A53CC1')
        stream = tempfile.TemporaryFile()
        stream.write(data.thomas_key)
        stream.seek(0)
        gpg.import_key(stream)

        iface = iface_cache.get_interface('http://foo')
        src = tempfile.TemporaryFile()
        src.write(data.foo_signed_xml)
        src.seek(0)
        pending = PendingFeed(iface.uri, src)
        assert iface_cache.update_feed_if_trusted(iface.uri, pending.sigs,
                                                  pending.new_xml)

        iface_cache.__init__()
        feed = iface_cache.get_feed('http://foo')
        assert feed.last_modified == 1154850229

        # mtimes are unreliable because copying often changes them -
        # check that we extract the time from the signature when upgrading
        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, model.escape(feed.url))
        os.utime(cached, None)

        iface_cache.__init__()
        feed = iface_cache.get_feed('http://foo')
        assert feed.last_modified > 1154850229

        src = tempfile.TemporaryFile()
        src.write(data.new_foo_signed_xml)
        src.seek(0)

        pending = PendingFeed(feed.url, src)
        assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs,
                                                  pending.new_xml)

        # Can't 'update' to an older copy
        src = tempfile.TemporaryFile()
        src.write(data.foo_signed_xml)
        src.seek(0)
        try:
            pending = PendingFeed(feed.url, src)
            assert iface_cache.update_feed_if_trusted(feed.url, pending.sigs,
                                                      pending.new_xml)

            assert 0
        except model.SafeException:
            pass
예제 #18
0
    def __init__(self, cache_leaf, source, format):
        """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
		If the size or mtime of $source has changed, or the cache
		format version if different, reset the cache first."""
        self.cache_leaf = cache_leaf
        self.source = source
        self.format = format
        self.cache_dir = basedir.save_cache_path(namespaces.config_site,
                                                 namespaces.config_prog)
        self.cached_for = {}  # Attributes of source when cache was created
        try:
            self._load_cache()
        except Exception as ex:
            info(_("Failed to load cache (%s). Flushing..."), ex)
            self.flush()
예제 #19
0
파일: distro.py 프로젝트: dsqmoore/0install
	def __init__(self, cache_leaf, source, format):
		"""Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
		If the size or mtime of $source has changed, or the cache
		format version if different, reset the cache first."""
		self.cache_leaf = cache_leaf
		self.source = source
		self.format = format
		self.cache_dir = basedir.save_cache_path(namespaces.config_site,
							 namespaces.config_prog)
		self.cached_for = {}		# Attributes of source when cache was created
		try:
			self._load_cache()
		except Exception as ex:
			logger.info(_("Failed to load cache (%s). Flushing..."), ex)
			self.flush()
예제 #20
0
        def download_and_add_icon():
            stream = dl.tempfile
            yield dl.downloaded
            try:
                tasks.check(dl.downloaded)
                if dl.unmodified: return
                stream.seek(0)

                import shutil
                icons_cache = basedir.save_cache_path(config_site,
                                                      'interface_icons')
                icon_file = file(
                    os.path.join(icons_cache, escape(interface.uri)), 'w')
                shutil.copyfileobj(stream, icon_file)
            except Exception, ex:
                self.handler.report_error(ex)
예제 #21
0
파일: distro.py 프로젝트: dsqmoore/0install
	def __init__(self, db_status_file):
		"""@param db_status_file: update the cache when the timestamp of this file changes"""
		self._status_details = os.stat(db_status_file)

		self.versions = {}
		self.cache_dir = basedir.save_cache_path(namespaces.config_site,
							 namespaces.config_prog)

		try:
			self._load_cache()
		except Exception as ex:
			logger.info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
			try:
				self.generate_cache()
				self._load_cache()
			except Exception as ex:
				logger.warning(_("Failed to regenerate distribution database cache: %s"), ex)
예제 #22
0
    def import_feed(self, url, contents):
        """contents can be a path or an Element."""
        iface_cache = self.config.iface_cache
        iface_cache.get_interface(url)

        if isinstance(contents, qdom.Element):
            feed = model.ZeroInstallFeed(contents)
        else:
            feed = reader.load_feed(contents)

        iface_cache._feeds[url] = feed

        xml = qdom.to_UTF8(feed.feed_element)
        upstream_dir = basedir.save_cache_path(namespaces.config_site, "interfaces")
        cached = os.path.join(upstream_dir, model.escape(url))
        with open(cached, "wb") as stream:
            stream.write(xml)

        return feed
예제 #23
0
파일: fetch.py 프로젝트: timdiels/0install
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
예제 #24
0
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
예제 #25
0
    def __init__(self, db_status_file):
        """@param db_status_file: update the cache when the timestamp of this file changes"""
        self._status_details = os.stat(db_status_file)

        self.versions = {}
        self.cache_dir = basedir.save_cache_path(namespaces.config_site,
                                                 namespaces.config_prog)

        try:
            self._load_cache()
        except Exception, ex:
            info(
                _("Failed to load distribution database cache (%s). Regenerating..."
                  ), ex)
            try:
                self.generate_cache()
                self._load_cache()
            except Exception, ex:
                warn(_("Failed to regenerate distribution database cache: %s"),
                     ex)
예제 #26
0
파일: run.py 프로젝트: rammstein/0install
	def _check_runenv(self):
		# Create the runenv.py helper script under ~/.cache if missing or out-of-date
		main_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog)
		runenv = os.path.join(main_dir, 'runenv.py')
		expected_contents = "#!%s\nfrom zeroinstall.injector import _runenv; _runenv.main()\n" % sys.executable

		actual_contents = None
		if os.path.exists(runenv):
			with open(runenv) as s:
				actual_contents = s.read()

		if actual_contents != expected_contents:
			import tempfile
			tmp = tempfile.NamedTemporaryFile('w', dir = main_dir, delete = False)
			logger.info("Updating %s", runenv)
			tmp.write(expected_contents)
			tmp.close()
			os.chmod(tmp.name, 0o555)
			os.rename(tmp.name, runenv)

		self._checked_runenv = True
예제 #27
0
	def _check_runenv(self):
		# Create the runenv.py helper script under ~/.cache if missing or out-of-date
		main_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog)
		runenv = os.path.join(main_dir, 'runenv.py')
		expected_contents = "#!%s\nfrom zeroinstall.injector import _runenv; _runenv.main()\n" % sys.executable

		actual_contents = None
		if os.path.exists(runenv):
			with open(runenv) as s:
				actual_contents = s.read()

		if actual_contents != expected_contents:
			import tempfile
			tmp = tempfile.NamedTemporaryFile('w', dir = main_dir, delete = False)
			info("Updating %s", runenv)
			tmp.write(expected_contents)
			tmp.close()
			os.chmod(tmp.name, 0555)
			os.rename(tmp.name, runenv)

		self._checked_runenv = True
예제 #28
0
	def testBinding(self):
		local_impl = os.path.dirname(os.path.abspath(__file__))
		tmp = tempfile.NamedTemporaryFile(mode = 'wt')
		tmp.write(
"""<?xml version="1.0" ?>
<interface
 main='testdriver.py'
 xmlns="http://zero-install.sourceforge.net/2004/injector/interface">
  <name>Bar</name>
  <summary>Bar</summary>
  <description>Bar</description>
  <group>
    <requires interface='%s'>
      <environment name='FOO_PATH' insert='.'/>
      <environment name='BAR_PATH' insert='.' default='/a:/b'/>
      <environment name='NO_PATH' value='val'/>
      <environment name='XDG_DATA_DIRS' insert='.'/>
    </requires>
    <environment name='SELF_GROUP' insert='group' mode='replace'/>
    <implementation version='1.0' id='%s'>
      <environment name='SELF_IMPL' insert='impl' mode='replace'/>
    </implementation>
  </group>
</interface>""" % (foo_iface_uri, local_impl))
		tmp.flush()
		self.cache_iface(foo_iface_uri,
"""<?xml version="1.0" ?>
<interface last-modified="0"
 uri="%s"
 xmlns="http://zero-install.sourceforge.net/2004/injector/interface">
  <name>Foo</name>
  <summary>Foo</summary>
  <description>Foo</description>
  <implementation version='1.0' id='sha1=123'/>
</interface>""" % foo_iface_uri)
		cached_impl = basedir.save_cache_path('0install.net',
							'implementations',
							'sha1=123')
		driver = Driver(requirements = Requirements(tmp.name), config = self.config)
		self.config.network_use = model.network_offline
		os.environ['FOO_PATH'] = "old"
		old, sys.stdout = sys.stdout, StringIO()
		try:
			download_and_execute(driver, ['Hello'])
		finally:
			sys.stdout = old
		self.assertEqual(cached_impl + '/.:old',
				os.environ['FOO_PATH'])
		self.assertEqual(cached_impl + '/.:/a:/b',
				os.environ['BAR_PATH'])
		self.assertEqual('val', os.environ['NO_PATH'])
		
		self.assertEqual(os.path.join(local_impl, 'group'), os.environ['SELF_GROUP'])
		self.assertEqual(os.path.join(local_impl, 'impl'), os.environ['SELF_IMPL'])

		del os.environ['FOO_PATH']
		if 'XDG_DATA_DIRS' in os.environ:
			del os.environ['XDG_DATA_DIRS']
		os.environ['BAR_PATH'] = '/old'
		old, sys.stdout = sys.stdout, StringIO()
		try:
			download_and_execute(driver, ['Hello'])
		finally:
			sys.stdout = old
		self.assertEqual(cached_impl + '/.',
				os.environ['FOO_PATH'])
		self.assertEqual(cached_impl + '/.:/old',
				os.environ['BAR_PATH'])
		self.assertEqual(cached_impl + '/.:/usr/local/share:/usr/share',
				os.environ['XDG_DATA_DIRS'])
예제 #29
0
    def _import_new_feed(self, feed_url, new_xml, modified_time, dry_run):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@type feed_url: str
		@param new_xml: the data to write
		@type new_xml: str
		@param modified_time: when new_xml was modified
		@type modified_time: int
		@type dry_run: bool
		@raises ReplayAttack: if the new mtime is older than the current one"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, "interfaces")
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, "rb") as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        if dry_run:
            print(_("[dry-run] would cache feed {url} as {cached}").format(url=feed_url, cached=cached))
            from io import BytesIO
            from zeroinstall.injector import qdom

            root = qdom.parse(BytesIO(new_xml), filter_for_version=True)
            feed = model.ZeroInstallFeed(root)
            reader.update_user_feed_overrides(feed)
            self._feeds[feed_url] = feed
            return

            # Do we need to write this temporary file now?
        try:
            with open(cached + ".new", "wb") as stream:
                stream.write(new_xml)
            os.utime(cached + ".new", (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + ".new")
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _(
                            "New feed's modification time is "
                            "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                            "Refusing update."
                        )
                        % {
                            "iface": feed_url,
                            "old_time": _pretty_time(old_modified),
                            "new_time": _pretty_time(new_mtime),
                        }
                    )
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    # raise SafeException("Interface has changed, but modification time "
                    # 		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + ".new")
            raise

        portable_rename(cached + ".new", cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
예제 #30
0
    def testCompletion(self):
        shell = None
        complete = lambda *a: self.complete(*a, shell=shell)
        for shell in ['zsh', 'fish']:
            assert 'select\n' in complete(["s"], 1)
            assert 'select\n' in complete([], 1)
            assert 'select\n' in complete(["", "bar"], 1)

            assert '' == complete(["", "bar"], 2)
            assert '' == complete(["unknown", "bar"], 2)
            #self.assertEqual('', complete(["--", "s"], 2))

            assert '--help\n' in complete(["-"], 1)
            assert '--help\n' in complete(["--"], 1)
            assert '--help\n' in complete(["--h"], 1)
            assert '-h\n' in complete(["-h"], 1)
            assert '-hv\n' in complete(["-hv"], 1)
            assert '' == complete(["-hi"], 1)

            #assert '--message' not in complete(["--m"], 1)
            assert '--message' in complete(["--m", "select"], 1)
            assert '--message' in complete(["select", "--m"], 2)

            assert '--help' in complete(["select", "foo", "--h"], 3)
            assert '--help' not in complete(["run", "foo", "--h"], 3)
            #assert '--help' not in complete(["select", "--version", "--h"], 3)

            # Fall back to file completion for the program's arguments
            self.assertEqual('file\n', complete(["run", "foo", ""], 3))

            # Option value completion
            assert 'file\n' in complete(["select", "--with-store"], 3)
            assert 'Linux\n' in complete(["select", "--os"], 3)
            assert 'x86_64\n' in complete(["select", "--cpu"], 3)
            assert 'sha256new\n' in complete(["digest", "--algorithm"], 3)

        # Option=value complete
        for shell in ['zsh', 'fish']:
            assert 'file\n' in complete(["select", "--with-store="], 2)
            assert 'filter --cpu=x86_64\n' in complete(["select", "--cpu="], 2)
        for shell in ['bash']:
            assert 'file\n' in complete(["select", "--with-store", "="], 3)
            assert 'file\n' in complete(["select", "--with-store", "=", "foo"],
                                        4)
            assert 'filter x86_64 \n' in complete(["select", "--cpu", "="], 3)

        from zeroinstall.support import basedir
        from zeroinstall.injector.namespaces import config_site
        d = basedir.save_cache_path(config_site, "interfaces")
        with open(os.path.join(d, model.escape('http://example.com/foo')),
                  'wb') as stream:
            stream.write(
                b"<?xml version='1.0'?>"
                b"<interface uri='http://example.com/foo' xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>"
                b"<name>-</name><summary>-</summary>"
                b"<implementation version='1.2' id='12'/>"
                b"<implementation version='1.5' id='15'/>"
                b"</interface>")

        for shell in ['bash']:
            assert 'filter select \n' in complete(["sel"], 1)
            self.assertEqual('prefix http://example.com/\nfile\n',
                             complete(["select", "ht"], 2))
            self.assertEqual('prefix //example.com/\nfile\n',
                             complete(["select", "http:"], 2))
            self.assertEqual('prefix //example.com/\nfile\n',
                             complete(["select", "http:/"], 2))
            self.assertEqual('filter //example.com/foo \n',
                             complete(["select", "http://example.com/"], 2))

        for shell in ['zsh', 'fish']:
            # Check options are ignored correctly
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "--with-store=.", "http:"], 3))
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "http:", "--with-store=."], 2))

            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "--with-store", ".", "http:"], 4))
            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete(["select", "http:", "--with-store", "."], 2))

            # Version completion
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete(["select", "--before", "", "http://example.com/foo"],
                         3))
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete(["select", "--version", "", "http://example.com/foo"],
                         3))
            self.assertEqual(
                'filter 1.2..!1.2\nfilter 1.2..!1.5\n',
                complete(
                    ["select", "--version", "1.2..", "http://example.com/foo"],
                    3))

            self.assertEqual(
                'prefix http://example.com/\nfile\n',
                complete([
                    "select",
                    "--version-for",
                    "http:",
                    "",
                ], 3))
            self.assertEqual(
                'filter 1.2\nfilter 1.5\n',
                complete([
                    "select",
                    "--version-for",
                    "http://example.com/foo",
                    "",
                ], 4))

            # -- before argument
            self.assertEqual('prefix http://example.com/\nfile\n',
                             complete(["select", "--", "http:"], 3))
예제 #31
0
	def testBinding(self):
		local_impl = os.path.dirname(os.path.abspath(__file__))
		tmp = tempfile.NamedTemporaryFile(mode = 'wt')
		tmp.write(
"""<?xml version="1.0" ?>
<interface
 main='testdriver.py'
 xmlns="http://zero-install.sourceforge.net/2004/injector/interface">
  <name>Bar</name>
  <summary>Bar</summary>
  <description>Bar</description>
  <group>
    <requires interface='%s'>
      <environment name='FOO_PATH' insert='.'/>
      <environment name='BAR_PATH' insert='.' default='/a:/b'/>
      <environment name='NO_PATH' value='val'/>
      <environment name='XDG_DATA_DIRS' insert='.'/>
    </requires>
    <environment name='SELF_GROUP' insert='group' mode='replace'/>
    <implementation version='1.0' id='%s'>
      <environment name='SELF_IMPL' insert='impl' mode='replace'/>
    </implementation>
  </group>
</interface>""" % (foo_iface_uri, local_impl))
		tmp.flush()
		self.cache_iface(foo_iface_uri,
"""<?xml version="1.0" ?>
<interface last-modified="0"
 uri="%s"
 xmlns="http://zero-install.sourceforge.net/2004/injector/interface">
  <name>Foo</name>
  <summary>Foo</summary>
  <description>Foo</description>
  <implementation version='1.0' id='sha1=123'/>
</interface>""" % foo_iface_uri)
		cached_impl = basedir.save_cache_path('0install.net',
							'implementations',
							'sha1=123')
		driver = Driver(requirements = Requirements(tmp.name), config = self.config)
		self.config.network_use = model.network_offline
		os.environ['FOO_PATH'] = "old"
		old, sys.stdout = sys.stdout, StringIO()
		try:
			download_and_execute(driver, ['Hello'])
		finally:
			sys.stdout = old
		self.assertEqual(cached_impl + '/.:old',
				os.environ['FOO_PATH'])
		self.assertEqual(cached_impl + '/.:/a:/b',
				os.environ['BAR_PATH'])
		self.assertEqual('val', os.environ['NO_PATH'])
		
		self.assertEqual(os.path.join(local_impl, 'group'), os.environ['SELF_GROUP'])
		self.assertEqual(os.path.join(local_impl, 'impl'), os.environ['SELF_IMPL'])

		del os.environ['FOO_PATH']
		if 'XDG_DATA_DIRS' in os.environ:
			del os.environ['XDG_DATA_DIRS']
		os.environ['BAR_PATH'] = '/old'
		old, sys.stdout = sys.stdout, StringIO()
		try:
			download_and_execute(driver, ['Hello'])
		finally:
			sys.stdout = old
		self.assertEqual(cached_impl + '/.',
				os.environ['FOO_PATH'])
		self.assertEqual(cached_impl + '/.:/old',
				os.environ['BAR_PATH'])
		self.assertEqual(cached_impl + '/.:/usr/local/share:/usr/share',
				os.environ['XDG_DATA_DIRS'])
예제 #32
0
파일: install.py 프로젝트: res2k/0export
	def do_install(self, archive_stream, progress_bar, archive_offset):
		# Step 1. Import GPG keys

		# Maybe GPG has never been run before. Let it initialse, or we'll get an error code
		# from the first import... (ignore return value here)
		subprocess.call([get_gpg(), '--check-trustdb', '-q'])

		key_dir = os.path.join(mydir, 'keys')
		for key in os.listdir(key_dir):
			check_call([get_gpg(), '--import', '-q', os.path.join(key_dir, key)])

		# Step 2. Import feeds and trust their signing keys
		for root, dirs, files in os.walk(os.path.join(mydir, 'feeds')):
			if 'latest.xml' in files:
				feed_path = os.path.join(root, 'latest.xml')
				icon_path = os.path.join(root, 'icon.png')

				# Get URI
				feed_stream = file(feed_path)
				doc = qdom.parse(feed_stream)
				uri = doc.getAttribute('uri')
				assert uri, "Missing 'uri' attribute on root element in '%s'" % feed_path
				domain = trust.domain_from_url(uri)

				feed_stream.seek(0)
				stream, sigs = gpg.check_stream(feed_stream)
				for s in sigs:
					if not trust.trust_db.is_trusted(s.fingerprint, domain):
						print "Adding key %s to trusted list for %s" % (s.fingerprint, domain)
						trust.trust_db.trust_key(s.fingerprint, domain)
				oldest_sig = min([s.get_timestamp() for s in sigs])
				try:
					config.iface_cache.update_feed_from_network(uri, stream.read(), oldest_sig)
				except iface_cache.ReplayAttack:
					# OK, the user has a newer copy already
					pass
				if feed_stream != stream:
					feed_stream.close()
				stream.close()

				if os.path.exists(icon_path):
					icons_cache = basedir.save_cache_path(namespaces.config_site, 'interface_icons')
					icon_file = os.path.join(icons_cache, model.escape(uri))
					if not os.path.exists(icon_file):
						shutil.copyfile(icon_path, icon_file)

		# Step 3. Solve to find out which implementations we actually need
		archive_stream.seek(archive_offset)

		extract_impls = {}	# Impls we need but which are compressed (ID -> Impl)
		tmp = tempfile.mkdtemp(prefix = '0export-')
		try:
			# Create a "fake store" with the implementation in the archive
			archive = tarfile.open(name=archive_stream.name, mode='r|', fileobj=archive_stream)
			fake_store = FakeStore()
			for tarmember in archive:
				if tarmember.name.startswith('implementations'):
					impl = os.path.basename(tarmember.name).split('.')[0]
					fake_store.impls.add(impl)

			bootstrap_store = zerostore.Store(os.path.join(mydir, 'implementations'))
			stores = config.stores

			toplevel_uris = [uri.strip() for uri in file(os.path.join(mydir, 'toplevel_uris'))]
			ZEROINSTALL_URI = "@ZEROINSTALL_URI@"
			for uri in [ZEROINSTALL_URI] + toplevel_uris:
				# This is so the solver treats versions in the setup archive as 'cached',
				# meaning that it will prefer using them to doing a download
				stores.stores.append(bootstrap_store)
				stores.stores.append(fake_store)

				# Shouldn't need to download anything, but we might not have all feeds
				r = requirements.Requirements(uri)
				d = driver.Driver(config = config, requirements = r)
				config.network_use = model.network_minimal
				download_feeds = d.solve_with_downloads()
				h.wait_for_blocker(download_feeds)
				assert d.solver.ready, d.solver.get_failure_reason()

				# Add anything chosen from the setup store to the main store
				stores.stores.remove(fake_store)
				stores.stores.remove(bootstrap_store)
				for iface, impl in d.get_uncached_implementations():
					print >>sys.stderr, "Need to import", impl
					if impl.id in fake_store.impls:
						# Delay extraction
						extract_impls[impl.id] = impl
					else:
						impl_src = os.path.join(mydir, 'implementations', impl.id)

						if os.path.isdir(impl_src):
							stores.add_dir_to_cache(impl.id, impl_src)
						else:
							print >>sys.stderr, "Required impl %s (for %s) not present" % (impl, iface)

				# Remember where we copied 0launch to, because we'll need it after
				# the temporary directory is deleted.
				if uri == ZEROINSTALL_URI:
					global copied_0launch_in_cache
					impl = d.solver.selections.selections[uri]
					if not impl.id.startswith('package:'):
						copied_0launch_in_cache = impl.get_path(stores = config.stores)
					# (else we selected the distribution version of Zero Install)
		finally:
			shutil.rmtree(tmp)

		# Count total number of bytes to extract
		extract_total = 0
		for impl in extract_impls.values():
			impl_info = archive.getmember('implementations/' + impl.id + '.tar.bz2')
			extract_total += impl_info.size

		self.sent = 0

		# Actually extract+import implementations in archive
		archive_stream.seek(archive_offset)
		archive = tarfile.open(name=archive_stream.name, mode='r|',
                fileobj=archive_stream)

		for tarmember in archive:
			if not tarmember.name.startswith('implementations'):
				continue
			impl_id = tarmember.name.split('/')[1].split('.')[0]
			if impl_id not in extract_impls:
				print "Skip", impl_id
				continue
			print "Extracting", impl_id
			tmp = tempfile.mkdtemp(prefix = '0export-')
			try:
				impl_stream = archive.extractfile(tarmember)
				self.child = subprocess.Popen('bunzip2|tar xf -', shell = True, stdin = subprocess.PIPE, cwd = tmp)
				mainloop = gobject.MainLoop(gobject.main_context_default())

				def pipe_ready(src, cond):
					data = impl_stream.read(4096)
					if not data:
						mainloop.quit()
						self.child.stdin.close()
						return False
					self.sent += len(data)
					if progress_bar:
						progress_bar.set_fraction(float(self.sent) / extract_total)
					self.child.stdin.write(data)
					return True
				gobject.io_add_watch(self.child.stdin, gobject.IO_OUT | gobject.IO_HUP, pipe_ready, priority = gobject.PRIORITY_LOW)

				mainloop.run()

				self.child.wait()
				if self.child.returncode:
					raise Exception("Failed to unpack archive (code %d)" % self.child.returncode)

				stores.add_dir_to_cache(impl_id, tmp)

			finally:
				shutil.rmtree(tmp)

		return toplevel_uris
예제 #33
0
	def testCompletion(self):
		shell = None
		complete = lambda *a: self.complete(*a, shell=shell)
		for shell in ['zsh','fish']:
			assert 'select\n' in complete(["s"], 1)
			assert 'select\n' in complete([], 1)
			assert 'select\n' in complete(["", "bar"], 1)

			assert '' == complete(["", "bar"], 2)
			assert '' == complete(["unknown", "bar"], 2)
			#self.assertEqual('', complete(["--", "s"], 2))

			assert '--help\n' in complete(["-"], 1)
			assert '--help\n' in complete(["--"], 1)
			assert '--help\n' in complete(["--h"], 1)
			assert '-h\n' in complete(["-h"], 1)
			assert '-hv\n' in complete(["-hv"], 1)
			assert '' == complete(["-hi"], 1)

			#assert '--message' not in complete(["--m"], 1)
			assert '--message' in complete(["--m", "select"], 1)
			assert '--message' in complete(["select", "--m"], 2)

			assert '--help' in complete(["select", "foo", "--h"], 3)
			assert '--help' not in complete(["run", "foo", "--h"], 3)
			#assert '--help' not in complete(["select", "--version", "--h"], 3)

			# Fall back to file completion for the program's arguments
			self.assertEqual('file\n', complete(["run", "foo", ""], 3))

			# Option value completion
			assert 'file\n' in complete(["select", "--with-store"], 3)
			assert 'Linux\n' in complete(["select", "--os"], 3)
			assert 'x86_64\n' in complete(["select", "--cpu"], 3)
			assert 'sha256new\n' in complete(["digest", "--algorithm"], 3)

		# Option=value complete
		for shell in ['zsh','fish']:
			assert 'file\n' in complete(["select", "--with-store="], 2)
			assert 'filter --cpu=x86_64\n' in complete(["select", "--cpu="], 2)
		for shell in ['bash']:
			assert 'file\n' in complete(["select", "--with-store", "="], 3)
			assert 'file\n' in complete(["select", "--with-store", "=", "foo"], 4)
			assert 'filter x86_64 \n' in complete(["select", "--cpu", "="], 3)

		from zeroinstall.support import basedir
		from zeroinstall.injector.namespaces import config_site
		d = basedir.save_cache_path(config_site, "interfaces")
		with open(os.path.join(d, model.escape('http://example.com/foo')), 'wb') as stream:
			stream.write(b"<?xml version='1.0'?>"
				b"<interface uri='http://example.com/foo' xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>"
				b"<name>-</name><summary>-</summary>"
				b"<implementation version='1.2' id='12'/>"
				b"<implementation version='1.5' id='15'/>"
				b"</interface>")

		for shell in ['bash']:
			assert 'filter select \n' in complete(["sel"], 1)
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "ht"], 2))
			self.assertEqual('prefix //example.com/\nfile\n', complete(["select", "http:"], 2))
			self.assertEqual('prefix //example.com/\nfile\n', complete(["select", "http:/"], 2))
			self.assertEqual('filter //example.com/foo \n', complete(["select", "http://example.com/"], 2))

		for shell in ['zsh','fish']:
			# Check options are ignored correctly
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--with-store=.", "http:"], 3))
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "http:", "--with-store=."], 2))

			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--with-store", ".", "http:"], 4))
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "http:", "--with-store", "."], 2))

			# Version completion
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--before", "", "http://example.com/foo"], 3))
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--version", "", "http://example.com/foo"], 3))
			self.assertEqual('filter 1.2..!1.2\nfilter 1.2..!1.5\n', complete(["select", "--version", "1.2..", "http://example.com/foo"], 3))

			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--version-for", "http:", "", ], 3))
			self.assertEqual('filter 1.2\nfilter 1.5\n', complete(["select", "--version-for", "http://example.com/foo", "", ], 4))

			# -- before argument
			self.assertEqual('prefix http://example.com/\nfile\n', complete(["select", "--", "http:"], 3))