Пример #1
0
	def _confirm_unknown_keys(self, to_trust, valid_sigs):
		"""Check the key-info server's results for these keys. If we don't know any of them,
		ask for extra confirmation from the user.
		@param to_trust: the signatures the user wants to trust
		@return: True to continue"""

		def is_unknown(sig):
			for note in valid_sigs[sig].info:
				if note.getAttribute("vote") == "good":
					return False
			return True
		unknown = [sig for sig in to_trust if is_unknown(sig)]

		if unknown:
			if len(unknown) == 1:
				msg = _('WARNING: you are confirming a key which was not known to the key server. Are you sure?')
			else:
				msg = _('WARNING: you are confirming keys which were not known to the key server. Are you sure?')

			box = gtk.MessageDialog(self,
						gtk.DIALOG_DESTROY_WITH_PARENT,
						gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK_CANCEL,
						msg)
			box.set_position(gtk.WIN_POS_CENTER)
			response = box.run()
			box.destroy()
			return response == gtk.RESPONSE_OK

		return True
Пример #2
0
def _check_xml_stream(stream):
	xml_comment_start = '<!-- Base64 Signature'

	data_to_check = stream.read()

	last_comment = data_to_check.rfind('\n' + xml_comment_start)
	if last_comment < 0:
		raise SafeException(_("No signature block in XML. Maybe this file isn't signed?"))
	last_comment += 1	# Include new-line in data
	
	data = tempfile.TemporaryFile()
	data.write(data_to_check[:last_comment])
	data.flush()
	os.lseek(data.fileno(), 0, 0)

	errors = tempfile.TemporaryFile()

	sig_lines = data_to_check[last_comment:].split('\n')
	if sig_lines[0].strip() != xml_comment_start:
		raise SafeException(_('Bad signature block: extra data on comment line'))
	while sig_lines and not sig_lines[-1].strip():
		del sig_lines[-1]
	if sig_lines[-1].strip() != '-->':
		raise SafeException(_('Bad signature block: last line is not end-of-comment'))
	sig_data = '\n'.join(sig_lines[1:-1])

	if re.match('^[ A-Za-z0-9+/=\n]+$', sig_data) is None:
		raise SafeException(_("Invalid characters found in base 64 encoded signature"))
	try:
		sig_data = base64.decodestring(sig_data) # (b64decode is Python 2.4)
	except Exception as ex:
		raise SafeException(_("Invalid base 64 encoded signature: %s") % str(ex))

	sig_fd, sig_name = tempfile.mkstemp(prefix = 'injector-sig-')
	try:
		sig_file = os.fdopen(sig_fd, 'w')
		sig_file.write(sig_data)
		sig_file.close()

		# Note: Should ideally close status_r in the child, but we want to support Windows too
		child = _run_gpg([# Not all versions support this:
				  #'--max-output', str(1024 * 1024),
				  '--batch',
				  # Windows GPG can only cope with "1" here
				  '--status-fd', '1',
				  # Don't try to download missing keys; we'll do that
				  '--keyserver-options', 'no-auto-key-retrieve',
				  '--verify', sig_name, '-'],
			   stdin = data,
			   stdout = subprocess.PIPE,
			   stderr = errors)

		try:
			sigs = _get_sigs_from_gpg_status_stream(child.stdout, child, errors)
		finally:
			os.lseek(stream.fileno(), 0, 0)
			stream.seek(0)
	finally:
		os.unlink(sig_name)
	return (stream, sigs)
Пример #3
0
def extract_deb(stream, destdir, extract = None, start_offset = 0):
	if extract:
		raise SafeException(_('Sorry, but the "extract" attribute is not yet supported for Debs'))

	stream.seek(start_offset)
	# ar can't read from stdin, so make a copy...
	deb_copy_name = os.path.join(destdir, 'archive.deb')
	with open(deb_copy_name, 'wb') as deb_copy:
		shutil.copyfileobj(stream, deb_copy)

	data_tar = None
	p = subprocess.Popen(('ar', 't', 'archive.deb'), stdout=subprocess.PIPE, cwd=destdir, universal_newlines=True)
	o = p.communicate()[0]
	for line in o.split('\n'):
		if line == 'data.tar':
			data_compression = None
		elif line == 'data.tar.gz':
			data_compression = 'gzip'
		elif line == 'data.tar.bz2':
			data_compression = 'bzip2'
		elif line == 'data.tar.lzma':
			data_compression = 'lzma'
		else:
			continue
		data_tar = line
		break
	else:
		raise SafeException(_("File is not a Debian package."))

	_extract(stream, destdir, ('ar', 'x', 'archive.deb', data_tar))
	os.unlink(deb_copy_name)
	data_name = os.path.join(destdir, data_tar)
	with open(data_name, 'rb') as data_stream:
		os.unlink(data_name)
		extract_tar(data_stream, destdir, None, data_compression)
Пример #4
0
	def confirm_install(self, message):
		yield self._switch_to_main_window(_('Need to confirm installation of distribution packages'))

		from zeroinstall.injector.download import DownloadAborted
		import dialog
		import gtk
		box = gtk.MessageDialog(self.mainwindow.window,
					gtk.DIALOG_DESTROY_WITH_PARENT,
					gtk.MESSAGE_QUESTION, gtk.BUTTONS_CANCEL,
					str(message))
		box.set_position(gtk.WIN_POS_CENTER)

		install = dialog.MixedButton(_('Install'), gtk.STOCK_OK)
		install.set_can_default(True)
		box.add_action_widget(install, gtk.RESPONSE_OK)
		install.show_all()
		box.set_default_response(gtk.RESPONSE_OK)
		box.show()

		response = dialog.DialogResponse(box)
		yield response
		box.destroy()

		if response.response != gtk.RESPONSE_OK:
			raise DownloadAborted()
Пример #5
0
def unpack_archive(url, data, destdir, extract = None, type = None, start_offset = 0):
	"""Unpack stream 'data' into directory 'destdir'. If extract is given, extract just
	that sub-directory from the archive (i.e. destdir/extract will exist afterwards).
	Works out the format from the name."""
	if type is None: type = type_from_url(url)
	if type is None: raise SafeException(_("Unknown extension (and no MIME type given) in '%s'") % url)
	if type == 'application/x-bzip-compressed-tar':
		extract_tar(data, destdir, extract, 'bzip2', start_offset)
	elif type == 'application/x-deb':
		extract_deb(data, destdir, extract, start_offset)
	elif type == 'application/x-rpm':
		extract_rpm(data, destdir, extract, start_offset)
	elif type == 'application/zip':
		extract_zip(data, destdir, extract, start_offset)
	elif type == 'application/x-tar':
		extract_tar(data, destdir, extract, None, start_offset)
	elif type == 'application/x-lzma-compressed-tar':
		extract_tar(data, destdir, extract, 'lzma', start_offset)
	elif type == 'application/x-xz-compressed-tar':
		extract_tar(data, destdir, extract, 'xz', start_offset)
	elif type == 'application/x-compressed-tar':
		extract_tar(data, destdir, extract, 'gzip', start_offset)
	elif type == 'application/vnd.ms-cab-compressed':
		extract_cab(data, destdir, extract, start_offset)
	elif type == 'application/x-apple-diskimage':
		extract_dmg(data, destdir, extract, start_offset)
	elif type == 'application/x-ruby-gem':
		extract_gem(data, destdir, extract, start_offset)
	else:
		raise SafeException(_('Unknown MIME type "%(type)s" for "%(url)s"') % {'type': type, 'url': url})
Пример #6
0
def discover_existing_apps():
	"""Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}.
	@return: a map from application URIs to .desktop filenames"""
	already_installed = {}
	for d in basedir.load_data_paths('applications'):
		for desktop_file in os.listdir(d):
			if desktop_file.startswith('zeroinstall-') and desktop_file.endswith('.desktop'):
				full = os.path.join(d, desktop_file)
				try:
					with open(full, 'rt') as stream:
						for line in stream:
							line = line.strip()
							if line.startswith('Exec=0launch '):
								bits = line.split(' -- ', 1)
								if ' ' in bits[0]:
									uri = bits[0].split(' ', 1)[1]		# 0launch URI -- %u
								else:
									uri = bits[1].split(' ', 1)[0].strip()	# 0launch -- URI %u
								already_installed[uri] = full
								break
						else:
							logger.info(_("Failed to find Exec line in %s"), full)
				except Exception as ex:
					logger.warn(_("Failed to load .desktop file %(filename)s: %(exceptions"), {'filename': full, 'exception': ex})
	return already_installed
Пример #7
0
	def __init__(self):
		user_store = os.path.join(basedir.xdg_cache_home, '0install.net', 'implementations')
		self.stores = [Store(user_store)]

		impl_dirs = basedir.load_first_config('0install.net', 'injector',
							  'implementation-dirs')
		debug(_("Location of 'implementation-dirs' config file being used: '%s'"), impl_dirs)
		if impl_dirs:
			dirs = open(impl_dirs)
		else:
			if os.name == "nt":
				from win32com.shell import shell, shellcon
				localAppData = shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, 0, 0)
				commonAppData = shell.SHGetFolderPath(0, shellcon.CSIDL_COMMON_APPDATA, 0, 0)

				userCache = os.path.join(localAppData, "0install.net", "implementations")
				sharedCache = os.path.join(commonAppData, "0install.net", "implementations")
				dirs = [userCache, sharedCache]

			else:
				dirs = ['/var/cache/0install.net/implementations']

		for directory in dirs:
			directory = directory.strip()
			if directory and not directory.startswith('#'):
				debug(_("Added system store '%s'"), directory)
				self.stores.append(Store(directory))
Пример #8
0
def import_key(stream):
	"""Run C{gpg --import} with this stream as stdin."""
	errors = tempfile.TemporaryFile()

	child = _run_gpg(['--quiet', '--import', '--batch'],
				stdin = stream, stderr = errors)

	status = child.wait()

	errors.seek(0)
	error_messages = errors.read().strip()
	errors.close()

	if error_messages:
		import codecs
		decoder = codecs.lookup('utf-8')
		error_messages = decoder.decode(error_messages, errors = 'replace')[0]

	if status != 0:
		if error_messages:
			raise SafeException(_("Errors from 'gpg --import':\n%s") % error_messages)
		else:
			raise SafeException(_("Non-zero exit code %d from 'gpg --import'") % status)
	elif error_messages:
		warn(_("Warnings from 'gpg --import':\n%s") % error_messages)
Пример #9
0
def canonical_iface_uri(uri):
	"""If uri is a relative path, convert to an absolute one.
	A "file:///foo" URI is converted to "/foo".
	An "alias:prog" URI expands to the URI in the 0alias script
	Otherwise, return it unmodified.
	@rtype: str
	@raise SafeException: if uri isn't valid
	"""
	if uri.startswith('http://') or uri.startswith('https://'):
		if uri.count("/") < 3:
			raise SafeException(_("Missing / after hostname in URI '%s'") % uri)
		return uri
	elif uri.startswith('file:///'):
		return uri[7:]
	elif uri.startswith('alias:'):
		from zeroinstall import alias, support
		alias_prog = uri[6:]
		if not os.path.isabs(alias_prog):
			full_path = support.find_in_path(alias_prog)
			if not full_path:
				raise alias.NotAnAliasScript("Not found in $PATH: " + alias_prog)
		else:
			full_path = alias_prog
		interface_uri, main = alias.parse_script(full_path)
		return interface_uri
	else:
		iface_uri = os.path.realpath(uri)
		if os.path.isfile(iface_uri):
			return iface_uri
	raise SafeException(_("Bad interface name '%(uri)s'.\n"
			"(doesn't start with 'http:', and "
			"doesn't exist as a local file '%(interface_uri)s' either)") %
			{'uri': uri, 'interface_uri': iface_uri})
Пример #10
0
def handle(config, options, args):
	if not args:
		raise UsageError()

	for x in args:
		if not os.path.isfile(x):
			raise SafeException(_("File '%s' does not exist") % x)
		logger.info(_("Importing from file '%s'"), x)
		with open(x, 'rb') as signed_data:
			data, sigs = gpg.check_stream(signed_data)
			doc = minidom.parseString(data.read())
			uri = doc.documentElement.getAttribute('uri')
			if not uri:
				raise SafeException(_("Missing 'uri' attribute on root element in '%s'") % x)
			logger.info(_("Importing information about interface %s"), uri)
			signed_data.seek(0)

			pending = PendingFeed(uri, signed_data)

			def run():
				keys_downloaded = tasks.Task(pending.download_keys(config.fetcher), "download keys")
				yield keys_downloaded.finished
				tasks.check(keys_downloaded.finished)
				if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml):
					blocker = config.trust_mgr.confirm_keys(pending)
					if blocker:
						yield blocker
						tasks.check(blocker)
					if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml):
						raise SafeException(_("No signing keys trusted; not importing"))

			task = tasks.Task(run(), "import feed")

			tasks.wait_for_blocker(task.finished)
Пример #11
0
	def download_archive(self, download_source, force = False, impl_hint = None, may_use_mirror = False):
		"""Fetch an archive. You should normally call L{download_impl}
		instead, since it handles other kinds of retrieval method too.
		It is the caller's responsibility to ensure that the returned stream is closed.
		If impl_hint is from a local feed and the url is relative, just opens the existing file for reading.
		@type download_source: L{model.DownloadSource}
		@type force: bool
		@type may_use_mirror: bool
		@rtype: (L{Blocker} | None, file)"""
		from zeroinstall.zerostore import unpack

		mime_type = download_source.type
		if not mime_type:
			mime_type = unpack.type_from_url(download_source.url)
		if not mime_type:
			raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source.url)
		if not self.external_store:
			unpack.check_type_ok(mime_type)

		if '://' not in download_source.url:
			return self._download_local_file(download_source, impl_hint)

		if may_use_mirror:
			mirror = self._get_archive_mirror(download_source)
		else:
			mirror = None

		if self.config.handler.dry_run:
			print(_("[dry-run] downloading archive {url}").format(url = download_source.url))
		dl = self.download_url(download_source.url, hint = impl_hint, mirror_url = mirror)
		if download_source.size is not None:
			dl.expected_size = download_source.size + (download_source.start_offset or 0)
		# (else don't know sizes for mirrored archives)
		return (dl.downloaded, dl.tempfile)
Пример #12
0
def _check_xml_stream(stream):
	xml_comment_start = '<!-- Base64 Signature'

	data_to_check = stream.read()

	last_comment = data_to_check.rfind('\n' + xml_comment_start)
	if last_comment < 0:
		raise SafeException(_("No signature block in XML. Maybe this file isn't signed?"))
	last_comment += 1	# Include new-line in data
	
	data = tempfile.TemporaryFile()
	data.write(data_to_check[:last_comment])
	data.flush()
	os.lseek(data.fileno(), 0, 0)

	errors = tempfile.TemporaryFile()

	sig_lines = data_to_check[last_comment:].split('\n')
	if sig_lines[0].strip() != xml_comment_start:
		raise SafeException(_('Bad signature block: extra data on comment line'))
	while sig_lines and not sig_lines[-1].strip():
		del sig_lines[-1]
	if sig_lines[-1].strip() != '-->':
		raise SafeException(_('Bad signature block: last line is not end-of-comment'))
	sig_data = '\n'.join(sig_lines[1:-1])

	if re.match('^[ A-Za-z0-9+/=\n]+$', sig_data) is None:
		raise SafeException(_("Invalid characters found in base 64 encoded signature"))
	try:
		sig_data = base64.decodestring(sig_data) # (b64decode is Python 2.4)
	except Exception, ex:
		raise SafeException(_("Invalid base 64 encoded signature: %s") % str(ex))
Пример #13
0
			def factory(id, only_if_missing = False, installed = True):
				assert id.startswith('package:')
				if id in feed.implementations:
					if only_if_missing:
						return None
					logger.warning(_("Duplicate ID '%s' for DistributionImplementation"), id)
				impl = model.DistributionImplementation(feed, id, self, item)
				feed.implementations[id] = impl
				new_impls.append(impl)

				impl.installed = installed
				impl.metadata = item_attrs
				impl.requires = depends

				if 'run' not in impl.commands:
					item_main = item_attrs.get('main', None)
					if item_main:
						if item_main.startswith('/'):
							impl.main = item_main
						else:
							raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
										item_main)
				impl.upstream_stability = model.packaged

				return impl
Пример #14
0
	def update_feed_from_network(self, feed_url, new_xml, modified_time, dry_run = False):
		"""Update a cached feed.
		Called by L{update_feed_if_trusted} if we trust this data.
		After a successful update, L{writer} is used to update the feed's
		last_checked time.
		@param feed_url: the feed being updated
		@type feed_url: L{model.Interface}
		@param new_xml: the downloaded replacement feed document
		@type new_xml: str
		@param modified_time: the timestamp of the oldest trusted signature (used as an approximation to the feed's modification time)
		@type modified_time: long
		@type dry_run: bool
		@raises ReplayAttack: if modified_time is older than the currently cached time
		@since: 0.48"""
		logger.debug(_("Updating '%(interface)s' from network; modified at %(time)s") %
			{'interface': feed_url, 'time': _pretty_time(modified_time)})

		self._import_new_feed(feed_url, new_xml, modified_time, dry_run)

		if dry_run: return

		feed = self.get_feed(feed_url)

		from . import writer
		feed.last_checked = int(time.time())
		writer.save_feed(feed)

		logger.info(_("Updated feed cache entry for %(interface)s (modified %(time)s)"),
			{'interface': feed.get_name(), 'time': _pretty_time(modified_time)})
Пример #15
0
	def is_stale(self, feed_url, freshness_threshold):
		"""Check whether feed needs updating, based on the configured L{config.Config.freshness}.
		None is considered to be stale.
		If we already tried to update the feed within FAILED_CHECK_DELAY, returns false.
		@type feed_url: str
		@type freshness_threshold: int
		@return: True if feed should be updated
		@rtype: bool
		@since: 0.53"""
		if isinstance(feed_url, model.ZeroInstallFeed):
			feed_url = feed_url.url		# old API
		elif feed_url is None:
			return True			# old API

		now = time.time()

		feed = self.get_feed(feed_url)
		if feed is not None:
			if feed.local_path is not None:
				return False		# Local feeds are never stale

			if feed.last_modified is not None:
				staleness = now - (feed.last_checked or 0)
				logger.debug(_("Staleness for %(feed)s is %(staleness).2f hours"), {'feed': feed, 'staleness': staleness / 3600.0})

				if freshness_threshold <= 0 or staleness < freshness_threshold:
					return False		# Fresh enough for us
		# else we've never had it

		last_check_attempt = self.get_last_check_attempt(feed_url)
		if last_check_attempt and last_check_attempt > now - FAILED_CHECK_DELAY:
			logger.debug(_("Stale, but tried to check recently (%s) so not rechecking now."), time.ctime(last_check_attempt))
			return False

		return True
Пример #16
0
def do_add(args):
	"""add DIGEST (DIRECTORY | (ARCHIVE [EXTRACT]))"""
	from zeroinstall.zerostore import unpack
	if len(args) < 2: raise UsageError(_("Missing arguments"))
	digest = args[0]
	if os.path.isdir(args[1]):
		if len(args) > 2: raise UsageError(_("Too many arguments"))
		stores.add_dir_to_cache(digest, args[1])
	elif os.path.isfile(args[1]):
		if len(args) > 3: raise UsageError(_("Too many arguments"))
		if len(args) > 2:
			extract = args[2]
		else:
			extract = None

		type = unpack.type_from_url(args[1])
		if not type:
			raise SafeException(_("Unknown extension in '%s' - can't guess MIME type") % args[1])
		unpack.check_type_ok(type)

		with open(args[1], 'rb') as stream:
			stores.add_archive_to_cache(digest, stream, args[1], extract, type = type)
	else:
		try:
			os.stat(args[1])
		except OSError as ex:
			if ex.errno != errno.ENOENT:		# No such file or directory
				raise UsageError(str(ex))	# E.g. permission denied
		raise UsageError(_("No such file or directory '%s'") % args[1])
Пример #17
0
	def _add_with_helper(self, required_digest, path):
		"""Use 0store-secure-add to copy 'path' to the system store.
		@param required_digest: the digest for path
		@type required_digest: str
		@param path: root of implementation directory structure
		@type path: str
		@return: True iff the directory was copied into the system cache successfully
		"""
		if required_digest.startswith('sha1='):
			return False		# Old digest alg not supported
		helper = support.find_in_path('0store-secure-add-helper')
		if not helper:
			info(_("'0store-secure-add-helper' command not found. Not adding to system cache."))
			return False
		import subprocess
		env = os.environ.copy()
		env['ENV_NOT_CLEARED'] = 'Unclean'	# (warn about insecure configurations)
		env['HOME'] = 'Unclean'			# (warn about insecure configurations)
		dev_null = os.open(os.devnull, os.O_RDONLY)
		try:
			info(_("Trying to add to system cache using %s"), helper)
			child = subprocess.Popen([helper, required_digest],
						 stdin = dev_null,
						 cwd = path,
						 env = env)
			exit_code = child.wait()
		finally:
			os.close(dev_null)

		if exit_code:
			warn(_("0store-secure-add-helper failed."))
			return False

		info(_("Added succcessfully."))
		return True
Пример #18
0
	def populate_model(self):
		m = self.model
		m.clear()

		for uri in self.app_list.get_apps():
			itr = m.append()
			m[itr][AppListBox.URI] = uri

			try:
				iface = self.iface_cache.get_interface(uri)
				feed = self.iface_cache.get_feed(uri)
				if feed:
					name = feed.get_name()
					summary = feed.summary or _('No information available')
					summary = summary[:1].capitalize() + summary[1:]
				else:
					name = iface.get_name()
					summary = _('No information available')
				# (GTK3 returns an extra boolean at the start)
				icon_width, icon_height = gtk.icon_size_lookup(gtk.ICON_SIZE_DIALOG)[-2:]
				pixbuf = icon.load_icon(self.iface_cache.get_icon_path(iface), icon_width, icon_height)
			except model.InvalidInterface as ex:
				name = uri
				summary = unicode(ex)
				pixbuf = None

			m[itr][AppListBox.NAME] = name
			if pixbuf is None:
				pixbuf = self.window.render_icon(gtk.STOCK_EXECUTE, gtk.ICON_SIZE_DIALOG)
			m[itr][AppListBox.ICON] = pixbuf

			m[itr][AppListBox.MARKUP] = '<b>%s</b>\n<i>%s</i>' % (_pango_escape(name), _pango_escape(summary))
Пример #19
0
	def _load_cache(self):
		"""Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
		Throws an exception if the cache should be (re)created."""
		with open(os.path.join(self.cache_dir, self.cache_leaf), 'rt') as stream:
			cache_version = None
			for line in stream:
				if line == '\n':
					break
				name, value = line.split(': ')
				if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
					raise Exception(_("Modification time of package database file has changed"))
				if name == 'size' and int(value) != self._status_details.st_size:
					raise Exception(_("Size of package database file has changed"))
				if name == 'version':
					cache_version = int(value)
			else:
				raise Exception(_('Invalid cache format (bad header)'))

			if cache_version is None:
				raise Exception(_('Old cache format'))

			versions = self.versions
			for line in stream:
				package, version, zi_arch = line[:-1].split('\t')
				versionarch = (version, intern(zi_arch))
				if package not in versions:
					versions[package] = [versionarch]
				else:
					versions[package].append(versionarch)
Пример #20
0
def _parse_manifest(manifest_data):
	"""Parse a manifest file.
	@param manifest_data: the contents of the manifest file
	@type manifest_data: str
	@return: a mapping from paths to information about that path
	@rtype: {str: tuple}"""
	wanted = {}
	dir = ''
	for line in manifest_data.split('\n'):
		if not line: break
		if line[0] == 'D':
			data = line.split(' ', 1)
			if len(data) != 2: raise BadDigest(_("Bad line '%s'") % line)
			path = data[-1]
			if not path.startswith('/'): raise BadDigest(_("Not absolute: '%s'") % line)
			path = path[1:]
			dir = path
		elif line[0] == 'S':
			data = line.split(' ', 3)
			path = os.path.join(dir, data[-1])
			if len(data) != 4: raise BadDigest(_("Bad line '%s'") % line)
		else:
			data = line.split(' ', 4)
			path = os.path.join(dir, data[-1])
			if len(data) != 5: raise BadDigest(_("Bad line '%s'") % line)
		if path in wanted:
			raise BadDigest(_('Duplicate entry "%s"') % line)
		wanted[path] = data[:-1]
	return wanted
Пример #21
0
def process_depends(item, local_feed_dir):
	"""Internal"""
	# Note: also called from selections
	attrs = item.attrs
	dep_iface = item.getAttribute('interface')
	if not dep_iface:
		raise InvalidInterface(_("Missing 'interface' on <%s>") % item.name)
	if dep_iface.startswith('./'):
		if local_feed_dir:
			dep_iface = os.path.abspath(os.path.join(local_feed_dir, dep_iface))
			# (updates the element too, in case we write it out again)
			attrs['interface'] = dep_iface
		else:
			raise InvalidInterface(_('Relative interface URI "%s" in non-local feed') % dep_iface)
	dependency = InterfaceDependency(dep_iface, element = item)

	for e in item.childNodes:
		if e.uri != XMLNS_IFACE: continue
		if e.name in binding_names:
			dependency.bindings.append(process_binding(e))
		elif e.name == 'version':
			dependency.restrictions.append(
				VersionRangeRestriction(not_before = parse_version(e.getAttribute('not-before')),
						        before = parse_version(e.getAttribute('before'))))
	return dependency
Пример #22
0
def load_feed(source, local=False):
    """Load a feed from a local file.
	@param source: the name of the file to read
	@type source: str
	@param local: this is a local feed
	@type local: bool
	@return: the new feed
	@rtype: L{ZeroInstallFeed}
	@raise InvalidInterface: if the source's syntax is incorrect
	@since: 0.48
	@see: L{iface_cache.iface_cache}, which uses this to load the feeds"""
    try:
        with open(source, "rb") as stream:
            root = qdom.parse(stream, filter_for_version=True)
    except IOError as ex:
        if ex.errno == errno.ENOENT and local:
            raise MissingLocalFeed(
                _(
                    "Feed not found. Perhaps this is a local feed that no longer exists? You can remove it from the list of feeds in that case."
                )
            )
        raise InvalidInterface(_("Can't read file"), ex)
    except Exception as ex:
        raise InvalidInterface(_("Invalid XML"), ex)

    if local:
        assert os.path.isabs(source), source
        local_path = source
    else:
        local_path = None
    feed = ZeroInstallFeed(root, local_path)
    feed.last_modified = int(os.stat(source).st_mtime)
    return feed
Пример #23
0
        def fetch_feed():
            yield dl.downloaded
            tasks.check(dl.downloaded)

            pending = PendingFeed(feed_url, stream)

            if use_mirror:
                # If we got the feed from a mirror, get the key from there too
                key_mirror = self.feed_mirror + "/keys/"
            else:
                key_mirror = None

            keys_downloaded = tasks.Task(
                pending.download_keys(self.handler, feed_hint=feed_url, key_mirror=key_mirror),
                _("download keys for %s") % feed_url,
            )
            yield keys_downloaded.finished
            tasks.check(keys_downloaded.finished)

            if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
                blocker = self.handler.confirm_keys(pending, self.fetch_key_info)
                if blocker:
                    yield blocker
                    tasks.check(blocker)
                if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
                    raise NoTrustedKeys(_("No signing keys trusted; not importing"))
Пример #24
0
def _stability(impl):
	assert impl
	if impl.user_stability is None:
		return _(str(impl.upstream_stability))
	return _("%(implementation_user_stability)s (was %(implementation_upstream_stability)s)") \
		% {'implementation_user_stability': _(str(impl.user_stability)),
		   'implementation_upstream_stability': _(str(impl.upstream_stability))}
Пример #25
0
		def button_press(tree_view, bev):
			if bev.button not in (1, 3):
				return False
			pos = tree_view.get_path_at_pos(int(bev.x), int(bev.y))
			if not pos:
				return False
			path, col, x, y = pos
			impl = self.model[path][ITEM]

			menu = gtk.Menu()

			stability_menu = gtk.MenuItem(_('Rating'))
			stability_menu.set_submenu(_build_stability_menu(self.policy, impl))
			stability_menu.show()
			menu.append(stability_menu)

			if not impl.id.startswith('package:') and self.policy.get_cached(impl):
				def open():
					os.spawnlp(os.P_WAIT, '0launch',
						'0launch', rox_filer, '-d',
						self.policy.get_implementation_path(impl))
				item = gtk.MenuItem(_('Open cached copy'))
				item.connect('activate', lambda item: open())
				item.show()
				menu.append(item)

			menu.popup(None, None, None, bev.button, bev.time)
Пример #26
0
def _build_stability_menu(policy, impl):
	menu = gtk.Menu()

	upstream = impl.upstream_stability or model.testing
	choices = model.stability_levels.values()
	choices.sort()
	choices.reverse()

	def set(new):
		if isinstance(new, model.Stability):
			impl.user_stability = new
		else:
			impl.user_stability = None
		writer.save_feed(impl.feed)
		import main
		main.recalculate()

	item = gtk.MenuItem(_('Unset (%s)') % _(str(upstream).capitalize()).lower())
	item.connect('activate', lambda item: set(None))
	item.show()
	menu.append(item)

	item = gtk.SeparatorMenuItem()
	item.show()
	menu.append(item)

	for value in choices:
		item = gtk.MenuItem(_(str(value)).capitalize())
		item.connect('activate', lambda item, v = value: set(v))
		item.show()
		menu.append(item)

	return menu
Пример #27
0
    def __init__(self, config, requirements):
        """
		@param config: The configuration settings to use
		@type config: L{config.Config}
		@param requirements: Details about the program we want to run
		@type requirements: L{requirements.Requirements}
		@since: 0.53
		"""
        self.watchers = []

        assert config
        self.config = config

        assert requirements
        self.requirements = requirements

        self.target_arch = arch.get_architecture(requirements.os, requirements.cpu)

        from zeroinstall.injector.solver import DefaultSolver

        self.solver = DefaultSolver(self.config)

        logger.debug(_("Supported systems: '%s'"), arch.os_ranks)
        logger.debug(_("Supported processors: '%s'"), arch.machine_ranks)

        if requirements.before or requirements.not_before:
            self.solver.extra_restrictions[config.iface_cache.get_interface(requirements.interface_uri)] = [
                model.VersionRangeRestriction(
                    model.parse_version(requirements.before), model.parse_version(requirements.not_before)
                )
            ]
Пример #28
0
def process_binding(e):
	"""Internal"""
	if e.name == 'environment':
		mode = {
			None: EnvironmentBinding.PREPEND,
			'prepend': EnvironmentBinding.PREPEND,
			'append': EnvironmentBinding.APPEND,
			'replace': EnvironmentBinding.REPLACE,
		}[e.getAttribute('mode')]

		binding = EnvironmentBinding(e.getAttribute('name'),
					     insert = e.getAttribute('insert'),
					     default = e.getAttribute('default'),
					     value = e.getAttribute('value'),
					     mode = mode,
					     separator = e.getAttribute('separator'))
		if not binding.name: raise InvalidInterface(_("Missing 'name' in binding"))
		if binding.insert is None and binding.value is None:
			raise InvalidInterface(_("Missing 'insert' or 'value' in binding"))
		if binding.insert is not None and binding.value is not None:
			raise InvalidInterface(_("Binding contains both 'insert' and 'value'"))
		return binding
	elif e.name == 'executable-in-path':
		return ExecutableBinding(e, in_path = True)
	elif e.name == 'executable-in-var':
		return ExecutableBinding(e, in_path = False)
	elif e.name == 'overlay':
		return OverlayBinding(e.getAttribute('src'), e.getAttribute('mount-point'))
	else:
		raise Exception(_("Unknown binding type '%s'") % e.name)
Пример #29
0
	def is_stale(self, feed, freshness_threshold):
		"""Check whether feed needs updating, based on the configured L{freshness}.
		None is considered to be stale.
		If we already tried to update the feed within FAILED_CHECK_DELAY, returns false.
		@return: True if feed should be updated
		@since: 0.53"""
		if feed is None:
			return True
		if os.path.isabs(feed.url):
			return False		# Local feeds are never stale
		if feed.last_modified is None:
			return True		# Don't even have it yet
		now = time.time()
		staleness = now - (feed.last_checked or 0)
		debug(_("Staleness for %(feed)s is %(staleness).2f hours"), {'feed': feed, 'staleness': staleness / 3600.0})

		if freshness_threshold <= 0 or staleness < freshness_threshold:
			return False		# Fresh enough for us

		last_check_attempt = self.get_last_check_attempt(feed.url)
		if last_check_attempt and last_check_attempt > now - FAILED_CHECK_DELAY:
			debug(_("Stale, but tried to check recently (%s) so not rechecking now."), time.ctime(last_check_attempt))
			return False

		return True
Пример #30
0
def parse_version(version_string):
	"""Convert a version string to an internal representation.
	The parsed format can be compared quickly using the standard Python functions.
	 - Version := DottedList ("-" Mod DottedList?)*
	 - DottedList := (Integer ("." Integer)*)
	@rtype: tuple (opaque)
	@raise SafeException: if the string isn't a valid version
	@since: 0.24 (moved from L{reader}, from where it is still available):"""
	if version_string is None: return None
	parts = _version_re.split(version_string)
	if parts[-1] == '':
		del parts[-1]	# Ends with a modifier
	else:
		parts.append('')
	if not parts:
		raise SafeException(_("Empty version string!"))
	l = len(parts)
	try:
		for x in range(0, l, 2):
			part = parts[x]
			if part:
				parts[x] = list(map(int, parts[x].split('.')))
			else:
				parts[x] = []	# (because ''.split('.') == [''], not [])
		for x in range(1, l, 2):
			parts[x] = _version_mod_to_value[parts[x]]
		return parts
	except ValueError as ex:
		raise SafeException(_("Invalid version format in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': ex})
	except KeyError as ex:
		raise SafeException(_("Invalid version modifier in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': str(ex).strip("u")})
Пример #31
0
    def __init__(self, cache_leaf, source, format):
        """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
		If the size or mtime of $source has changed, or the cache
		format version if different, reset the cache first."""
        self.cache_leaf = cache_leaf
        self.source = source
        self.format = format
        self.cache_dir = basedir.save_cache_path(namespaces.config_site,
                                                 namespaces.config_prog)
        self.cached_for = {}  # Attributes of source when cache was created
        try:
            self._load_cache()
        except Exception as ex:
            info(_("Failed to load cache (%s). Flushing..."), ex)
            self.flush()
Пример #32
0
 def run():
     keys_downloaded = tasks.Task(pending.download_keys(h),
                                  "download keys")
     yield keys_downloaded.finished
     tasks.check(keys_downloaded.finished)
     if not config.iface_cache.update_feed_if_trusted(
             uri, pending.sigs, pending.new_xml):
         blocker = config.trust_mgr.confirm_keys(pending)
         if blocker:
             yield blocker
             tasks.check(blocker)
         if not config.iface_cache.update_feed_if_trusted(
                 uri, pending.sigs, pending.new_xml):
             raise SafeException(
                 _("No signing keys trusted; not importing"))
Пример #33
0
	def parse(value):
		v = float(value[:-1])
		unit = value[-1]
		if unit == 's':
			return int(v)
		v *= 60
		if unit == 'm':
			return int(v)
		v *= 60
		if unit == 'h':
			return int(v)
		v *= 24
		if unit == 'd':
			return int(v)
		raise SafeException(_('Unknown unit "%s" - use e.g. 5d for 5 days') % unit)
Пример #34
0
    def __init__(self, source):
        """Constructor.
		@param source: a map of implementations, policy or selections document
		@type source: L{Element}
		"""
        self.selections = {}
        self.command = None

        if source is None:
            # (Solver will fill everything in)
            pass
        elif isinstance(source, Element):
            self._init_from_qdom(source)
        else:
            raise Exception(_("Source not a qdom.Element!"))
Пример #35
0
def handle(config, options, args):
	if len(args) == 2:
		iface = config.iface_cache.get_interface(model.canonical_iface_uri(args[0]))
		feed_url = args[1]

		feed_import = add_feed.find_feed_import(iface, feed_url)
		if not feed_import:
			raise SafeException(_('Interface %(interface)s has no feed %(feed)s') %
						{'interface': iface.uri, 'feed': feed_url})
		iface.extra_feeds.remove(feed_import)
		writer.save_interface(iface)
	elif len(args) == 1:
		add_feed.handle(config, options, args, add_ok = False, remove_ok = True)
	else:
		raise UsageError()
Пример #36
0
def _run_gpg(args, **kwargs):
    global _gnupg_options
    if _gnupg_options is None:
        gpg_path = find_in_path('gpg') or find_in_path('gpg2') or 'gpg'
        _gnupg_options = [gpg_path, '--no-secmem-warning']

        if hasattr(os, 'geteuid') and os.geteuid(
        ) == 0 and 'GNUPGHOME' not in os.environ:
            _gnupg_options += [
                '--homedir', os.path.join(basedir.home, '.gnupg')
            ]
            info(_("Running as root, so setting GnuPG home to %s"),
                 _gnupg_options[-1])

    return subprocess.Popen(_gnupg_options + args, **kwargs)
Пример #37
0
        def get_implementations(iface, arch):
            debug(
                _("get_best_implementation(%(interface)s), with feeds: %(feeds)s"
                  ), {
                      'interface': iface,
                      'feeds': iface.feeds
                  })

            impls = []
            for f in usable_feeds(iface, arch):
                self.feeds_used.add(f)
                debug(_("Processing feed %s"), f)

                try:
                    feed = self.iface_cache.get_interface(f)._main_feed
                    if not feed.last_modified: continue  # DummyFeed
                    if feed.name and iface.uri != feed.url and iface.uri not in feed.feed_for:
                        info(
                            _("Missing <feed-for> for '%(uri)s' in '%(feed)s'"
                              ), {
                                  'uri': iface.uri,
                                  'feed': f
                              })

                    if feed.implementations:
                        impls.extend(feed.implementations.values())
                    elif return_postponed:
                        postponed.extend(feed.pop_postponed())
                except Exception, ex:
                    warn(
                        _("Failed to load feed %(feed)s for %(interface)s: %(exception)s"
                          ), {
                              'feed': f,
                              'interface': iface,
                              'exception': str(ex)
                          })
Пример #38
0
    def _download_and_import_feed(self, feed_url, force, use_mirror):
        """Download and import a feed.
		@param use_mirror: False to use primary location; True to use mirror."""
        if use_mirror:
            url = self.get_feed_mirror(feed_url)
            if url is None: return None
            info(_("Trying mirror server for feed %s") % feed_url)
        else:
            url = feed_url

        dl = self.handler.get_download(url, force=force, hint=feed_url)
        stream = dl.tempfile

        @tasks.named_async("fetch_feed " + url)
        def fetch_feed():
            yield dl.downloaded
            tasks.check(dl.downloaded)

            pending = PendingFeed(feed_url, stream)

            if use_mirror:
                # If we got the feed from a mirror, get the key from there too
                key_mirror = self.config.feed_mirror + '/keys/'
            else:
                key_mirror = None

            keys_downloaded = tasks.Task(
                pending.download_keys(self.handler,
                                      feed_hint=feed_url,
                                      key_mirror=key_mirror),
                _("download keys for %s") % feed_url)
            yield keys_downloaded.finished
            tasks.check(keys_downloaded.finished)

            if not self.config.iface_cache.update_feed_if_trusted(
                    pending.url, pending.sigs, pending.new_xml):
                blocker = self.config.trust_mgr.confirm_keys(pending)
                if blocker:
                    yield blocker
                    tasks.check(blocker)
                if not self.config.iface_cache.update_feed_if_trusted(
                        pending.url, pending.sigs, pending.new_xml):
                    raise NoTrustedKeys(
                        _("No signing keys trusted; not importing"))

        task = fetch_feed()
        task.dl = dl
        return task
Пример #39
0
    def __init__(self, source):
        """Constructor.
		@param source: a map of implementations, policy or selections document
		@type source: {str: L{Selection}} | L{Policy} | L{Element}
		"""
        self.selections = {}

        if source is None:
            self.commands = []
            # (Solver will fill everything in)
        elif isinstance(source, Policy):
            self._init_from_policy(source)
        elif isinstance(source, Element):
            self._init_from_qdom(source)
        else:
            raise Exception(_("Source not a Policy or qdom.Element!"))
Пример #40
0
	def usable_feeds(self, iface):
		"""Generator for C{iface.feeds} that are valid for our architecture.
		@rtype: generator
		@see: L{arch}"""
		if self.src and iface.uri == self.root:
			# Note: when feeds are recursive, we'll need a better test for root here
			machine_ranks = {'src': 1}
		else:
			machine_ranks = arch.machine_ranks
			
		for f in iface.feeds:
			if f.os in arch.os_ranks and f.machine in machine_ranks:
				yield f
			else:
				debug(_("Skipping '%(feed)s'; unsupported architecture %(os)s-%(machine)s"),
					{'feed': f, 'os': f.os, 'machine': f.machine})
Пример #41
0
    def usable_feeds(self, iface, arch):
        """Generator for C{iface.feeds} that are valid for this architecture.
		@rtype: generator
		@see: L{arch}
		@since: 0.53"""
        for f in self.get_feed_imports(iface):
            if f.os in arch.os_ranks and f.machine in arch.machine_ranks:
                yield f
            else:
                debug(
                    _("Skipping '%(feed)s'; unsupported architecture %(os)s-%(machine)s"
                      ), {
                          'feed': f,
                          'os': f.os,
                          'machine': f.machine
                      })
Пример #42
0
 def _sanity_check(self, uri):
     if uri.endswith('.tar.bz2') or \
        uri.endswith('.tar.gz') or \
        uri.endswith('.exe') or \
        uri.endswith('.rpm') or \
        uri.endswith('.deb') or \
        uri.endswith('.tgz'):
         box = gtk.MessageDialog(
             self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR,
             gtk.BUTTONS_OK,
             _("This URI (%s) looks like an archive, not a Zero Install feed. Make sure you're using the feed link!"
               ) % uri)
         box.run()
         box.destroy()
         return False
     return True
Пример #43
0
 def report_exception(self, ex, tb=None):
     if not isinstance(ex, SafeException):
         if isinstance(ex, AssertionError):
             # Assertions often don't say that they're errors (and are frequently
             # blank).
             ex = repr(ex)
         if tb is None:
             warn(ex, exc_info=True)
         else:
             warn(ex, exc_info=(type(ex), ex, tb))
     if self.systray_icon:
         self.systray_icon.set_blinking(True)
         self.systray_icon.set_tooltip(
             str(ex) + '\n' + _('(click for details)'))
     else:
         dialog.alert(self.window, str(ex) or repr(ex))
Пример #44
0
    def _downloaded_key(self, stream):
        import shutil, tempfile
        from zeroinstall.injector import gpg

        info(_("Importing key for feed '%s'"), self.url)

        # Python2.4: can't call fileno() on stream, so save to tmp file instead
        tmpfile = tempfile.TemporaryFile(prefix='injector-dl-data-')
        try:
            shutil.copyfileobj(stream, tmpfile)
            tmpfile.flush()

            tmpfile.seek(0)
            gpg.import_key(tmpfile)
        finally:
            tmpfile.close()
Пример #45
0
    def create_app(self, name, requirements):
        validate_name(name)

        apps_dir = basedir.save_config_path(namespaces.config_site, "apps")
        app_dir = os.path.join(apps_dir, name)
        if os.path.isdir(app_dir):
            raise SafeException(
                _("Application '{name}' already exists: {path}").format(
                    name=name, path=app_dir))
        os.mkdir(app_dir)

        app = App(self.config, app_dir)
        app.set_requirements(requirements)
        app.set_last_checked()

        return app
Пример #46
0
    def __details_cb(self, id, licence, group, detail, url, size):
        """@type id: str
		@type licence: str
		@type group: str
		@type detail: str
		@type url: str
		@type size: int"""
        details = {
            'licence': str(licence),
            'group': str(group),
            'detail': str(detail),
            'url': str(url),
            'size': int(size)
        }
        _logger_pk.debug(_('Details: %s %r'), id, details)
        self.details[id] = details
Пример #47
0
def extract_cab(stream, destdir, extract, start_offset=0):
    "@since: 0.24"
    if extract:
        raise SafeException(
            _('Sorry, but the "extract" attribute is not yet supported for Cabinet files'
              ))

    stream.seek(start_offset)
    # cabextract can't read from stdin, so make a copy...
    cab_copy_name = os.path.join(destdir, 'archive.cab')
    cab_copy = file(cab_copy_name, 'w')
    shutil.copyfileobj(stream, cab_copy)
    cab_copy.close()

    _extract(stream, destdir, ['cabextract', '-s', '-q', 'archive.cab'])
    os.unlink(cab_copy_name)
Пример #48
0
    def get_details(self):
        """Call 'gpg --list-keys' and return the results split into lines and columns.
		@rtype: [[str]]"""
        # Note: GnuPG 2 always uses --fixed-list-mode
        child = _run_gpg([
            '--fixed-list-mode', '--with-colons', '--list-keys',
            self.fingerprint
        ],
                         stdout=subprocess.PIPE)
        cout, unused = child.communicate()
        if child.returncode:
            logger.info(_("GPG exited with code %d") % child.returncode)
        details = []
        for line in cout.split('\n'):
            details.append(line.split(':'))
        return details
Пример #49
0
	def download_file(self, download_source, impl_hint=None):
		"""Fetch a single file. You should normally call L{download_impl}
		instead, since it handles other kinds of retrieval method too.
		It is the caller's responsibility to ensure that the returned stream is closed.
		@type download_source: L{zeroinstall.injector.model.FileSource}
		@type impl_hint: L{zeroinstall.injector.model.ZeroInstallImplementation} | None
		@rtype: tuple"""
		if self.config.handler.dry_run:
			print(_("[dry-run] downloading file {url}").format(url = download_source.url))

		if '://' not in download_source.url:
			return self._download_local_file(download_source, impl_hint)

		dl = self.download_url(download_source.url, hint = impl_hint)
		dl.expected_size = download_source.size
		return (dl.downloaded, dl.tempfile)
Пример #50
0
    def abort(self):
        """Signal the current download to stop.
		@postcondition: L{aborted_by_user}"""
        self.status = download_failed

        if self.tempfile is not None:
            info(_("Aborting download of %s"), self.url)
            # TODO: we currently just close the output file; the thread will end when it tries to
            # write to it. We should try harder to stop the thread immediately (e.g. by closing its
            # socket when known), although we can never cover all cases (e.g. a stuck DNS lookup).
            # In any case, we don't wait for the child to exit before notifying tasks that are waiting
            # on us.
            self.aborted_by_user = True
            self.tempfile.close()
            self.tempfile = None
            self._aborted.trigger()
Пример #51
0
    def add_archive_to_cache(self,
                             required_digest,
                             data,
                             url,
                             extract=None,
                             type=None,
                             start_offset=0,
                             try_helper=False,
                             dry_run=False):
        """@type required_digest: str
		@type data: file
		@type url: str
		@type extract: str | None
		@type type: str | None
		@type start_offset: int
		@type try_helper: bool
		@type dry_run: bool"""
        from . import unpack

        if self.lookup(required_digest):
            logger.info(_("Not adding %s as it already exists!"),
                        required_digest)
            return

        tmp = self.get_tmp_dir_for(required_digest)
        try:
            unpack.unpack_archive(url,
                                  data,
                                  tmp,
                                  extract,
                                  type=type,
                                  start_offset=start_offset)
        except:
            import shutil
            shutil.rmtree(tmp)
            raise

        try:
            self.check_manifest_and_rename(required_digest,
                                           tmp,
                                           extract,
                                           try_helper=try_helper,
                                           dry_run=dry_run)
        except Exception:
            #warn(_("Leaving extracted directory as %s"), tmp)
            support.ro_rmtree(tmp)
            raise
Пример #52
0
        def recurse(sub):
            # To ensure that a line-by-line comparison of the manifests
            # is possible, we require that filenames don't contain newlines.
            # Otherwise, you can name a file so that the part after the \n
            # would be interpreted as another line in the manifest.
            if '\n' in sub: raise BadDigest("Newline in filename '%s'" % sub)
            assert sub.startswith('/')

            if sub == '/.manifest': return

            full = os.path.join(root, sub[1:].replace('/', os.sep))
            info = os.lstat(full)

            m = info.st_mode
            if stat.S_ISDIR(m):
                if sub != '/':
                    yield "D %s %s" % (int(info.st_mtime), sub)
                items = os.listdir(full)
                items.sort()
                subdir = sub
                if not subdir.endswith('/'):
                    subdir += '/'
                for x in items:
                    for y in recurse(subdir + x):
                        yield y
                return

            assert sub[1:]
            leaf = os.path.basename(sub[1:])
            if stat.S_ISREG(m):
                d = sha1_new(open(full).read()).hexdigest()
                if m & 0o111:
                    yield "X %s %s %s %s" % (d, int(
                        info.st_mtime), info.st_size, leaf)
                else:
                    yield "F %s %s %s %s" % (d, int(
                        info.st_mtime), info.st_size, leaf)
            elif stat.S_ISLNK(m):
                target = os.readlink(full)
                d = sha1_new(target).hexdigest()
                # Note: Can't use utime on symlinks, so skip mtime
                # Note: eCryptfs may report length as zero, so count ourselves instead
                yield "S %s %s %s" % (d, len(target), leaf)
            else:
                raise SafeException(
                    _("Unknown object '%s' (not a file, directory or symlink)")
                    % full)
Пример #53
0
def parse_script(pathname):
    """Extract the URI and main values from a 0alias script.
	@param pathname: the script to be examined
	@return: information about the alias script
	@rtype: L{ScriptInfo}
	@raise NotAnAliasScript: if we can't parse the script
	"""
    stream = open(pathname)
    template_header = _template[:_template.index("%s'")]
    actual_header = stream.read(len(template_header))
    stream.seek(0)
    if template_header == actual_header:
        # If it's a 0alias script, it should be quite short!
        rest = stream.read()
        line = rest.split('\n')[1]
    else:
        old_template_header = \
            _old_template[:_old_template.index("-gd '")]
        actual_header = stream.read(len(old_template_header))
        if old_template_header != actual_header:
            raise NotAnAliasScript(
                _("'%s' does not look like a script created by 0alias") %
                pathname)
        rest = stream.read()
        line = rest.split('\n')[2]

    info = ScriptInfo()
    split = line.rfind("' '")
    if split != -1:
        # We have a --main or --command
        info.uri = line[split + 3:].split("'")[0]
        start, value = line[:split].split("'", 1)
        option = start.split('--', 1)[1].strip()
        value = value.replace("'\\''", "'")
        if option == 'main':
            info.main = value
        elif option == 'command':
            info.command = value
        else:
            raise NotAnAliasScript(
                "Unknown option '{option}' in alias script".format(
                    option=option))
    else:
        info.uri = line.split("'", 2)[1]

    return info
Пример #54
0
def _link(a, b, tmpfile):
	"""Keep 'a', delete 'b' and hard-link to 'a'"""
	if not _byte_identical(a, b):
		logger.warn(_("Files should be identical, but they're not!\n%(file_a)s\n%(file_b)s"), {'file_a': a, 'file_b': b})

	b_dir = os.path.dirname(b)
	old_mode = os.lstat(b_dir).st_mode
	os.chmod(b_dir, old_mode | 0o200)	# Need write access briefly
	try:
		os.link(a, tmpfile)
		try:
			os.rename(tmpfile, b)
		except:
			os.unlink(tmpfile)
			raise
	finally:
		os.chmod(b_dir, old_mode)
Пример #55
0
def handle(config, options, args):
    """@type config: L{zeroinstall.injector.config.Config}
	@type args: [str]"""
    if len(args) != 1:
        raise UsageError()

    app = config.app_mgr.lookup_app(args[0], missing_ok=True)
    if app is not None:
        old_sels = app.get_selections()

        requirements = app.get_requirements()
        changes = requirements.parse_update_options(options)
        iface_uri = old_sels.interface

        if requirements.extra_restrictions and not options.xml:
            print("User-provided restrictions in force:")
            for uri, expr in requirements.extra_restrictions.items():
                print("  {uri}: {expr}".format(uri=uri, expr=expr))
            print()
    else:
        iface_uri = model.canonical_iface_uri(args[0])
        requirements = None
        changes = False

    sels = get_selections(config,
                          options,
                          iface_uri,
                          select_only=True,
                          download_only=False,
                          test_callback=None,
                          requirements=requirements)
    if not sels:
        sys.exit(1)  # Aborted by user

    if options.xml:
        show_xml(sels)
    else:
        show_human(sels, config.stores)
        if app is not None:
            from zeroinstall.cmd import whatchanged
            changes = whatchanged.show_changes(old_sels.selections,
                                               sels.selections) or changes
            if changes:
                print(
                    _("(note: use '0install update' instead to save the changes)"
                      ))
Пример #56
0
	def solve_and_download_impls(self, refresh = False):
		"""Run L{solve_with_downloads} and then get the selected implementations too.
		@raise SafeException: if we couldn't select a set of implementations
		@since: 0.40"""
		refreshed = self.solve_with_downloads(refresh)
		if refreshed:
			yield refreshed
			tasks.check(refreshed)

		if not self.solver.ready:
			raise SafeException(_("Can't find all required implementations:") + '\n' +
				'\n'.join(["- %s -> %s" % (iface, self.solver.selections[iface])
					   for iface  in self.solver.selections]))
		downloaded = self.download_uncached_implementations()
		if downloaded:
			yield downloaded
			tasks.check(downloaded)
Пример #57
0
def load_keys(fingerprints):
	"""Load a set of keys at once.
	This is much more efficient than making individual calls to L{load_key}.
	@return: a list of loaded keys, indexed by fingerprint
	@rtype: {str: L{Key}}
	@since: 0.27"""

	keys = {}

	# Otherwise GnuPG returns everything...
	if not fingerprints: return keys

	for fp in fingerprints:
		keys[fp] = Key(fp)

	current_fpr = None
	current_uid = None

	child = _run_gpg(['--fixed-list-mode', '--with-colons', '--list-keys',
				'--with-fingerprint', '--with-fingerprint'] + fingerprints, stdout = subprocess.PIPE)
	try:
		for line in child.stdout:
			if line.startswith('pub:'):
				current_fpr = None
				current_uid = None
			if line.startswith('fpr:'):
				current_fpr = line.split(':')[9]
				if current_fpr in keys and current_uid:
					# This is probably a subordinate key, where the fingerprint
					# comes after the uid, not before. Note: we assume the subkey is
					# cross-certified, as recent always ones are.
					keys[current_fpr].name = current_uid
			if line.startswith('uid:'):
				assert current_fpr is not None
				# Only take primary UID
				if current_uid: continue
				parts = line.split(':')
				current_uid = parts[9]
				if current_fpr in keys:
					keys[current_fpr].name = current_uid
	finally:
		_fix_perms()
		if child.wait():
			warn(_("gpg --list-keys failed with exit code %d") % child.returncode)

	return keys
Пример #58
0
        def process_dependencies(requiring_var, requirer, arch):
            for d in deps_in_use(requirer, arch):
                debug(_("Considering command dependency %s"), d)

                add_iface(d.interface, arch.child_arch)

                for c in d.get_required_commands():
                    # We depend on a specific command within the implementation.
                    command_vars = add_command_iface(d.interface,
                                                     arch.child_arch, c)

                    # If the parent command/impl is chosen, one of the candidate commands
                    # must be too. If there aren't any, then this command is unselectable.
                    problem.add_clause([sat.neg(requiring_var)] + command_vars)

                # Must choose one version of d if impl is selected
                find_dependency_candidates(requiring_var, d)
Пример #59
0
    def report_error(self, exception, tb=None):
        from zeroinstall.injector import download
        if isinstance(exception, download.DownloadError):
            tb = None

        if tb:
            import traceback
            details = '\n' + '\n'.join(
                traceback.format_exception(type(exception), exception, tb))
        else:
            details = str(exception)
        self.notify(
            "Zero Install",
            _("Error updating %(title)s: %(details)s") % {
                'title': self.title,
                'details': details.replace('<', '&lt;')
            })
Пример #60
0
	def requires(self):
		if self._depends is None:
			self._runner = None
			depends = []
			for child in self.qdom.childNodes:
				if child.uri != XMLNS_IFACE: continue
				if child.name in _dependency_names:
					dep = process_depends(child, self._local_dir)
					depends.append(dep)
				elif child.name == 'runner':
					if self._runner:
						raise InvalidInterface(_("Multiple <runner>s in <command>!"))
					dep = process_depends(child, self._local_dir)
					depends.append(dep)
					self._runner = dep
			self._depends = depends
		return self._depends