def installed_fixup(self, impl): # OpenSUSE uses _, Fedora uses . impl_id = impl.id.replace('_', '.') # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl_id.startswith('package:rpm:java-1.6.0-openjdk:'): java_version = '1.6.0-openjdk' elif impl_id.startswith('package:rpm:java-1.7.0-openjdk:'): java_version = '1.7.0-openjdk' else: return # On Fedora, unlike Debian, the arch is x86_64, not amd64 java_bin = '/usr/lib/jvm/jre-%s.%s/bin/java' % (java_version, impl.machine) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/jre-%s/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.commands["run"] = model.Command(qdom.Element(namespaces.XMLNS_IFACE, 'command', {'path': java_bin, 'name': 'run'}), None)
def load_config(handler=None): """@type handler: L{zeroinstall.injector.handler.Handler} | None @rtype: L{Config}""" config = Config(handler) parser = ConfigParser.RawConfigParser() parser.add_section('global') parser.set('global', 'help_with_testing', 'False') parser.set('global', 'freshness', str(60 * 60 * 24 * 30)) # One month parser.set('global', 'network_use', 'full') parser.set('global', 'auto_approve_keys', 'True') path = basedir.load_first_config(config_site, config_prog, 'global') if path: logger.info("Loading configuration from %s", path) try: parser.read(path) except Exception as ex: logger.warning(_("Error loading config: %s"), str(ex) or repr(ex)) config.help_with_testing = parser.getboolean('global', 'help_with_testing') config.network_use = parser.get('global', 'network_use') config.freshness = int(parser.get('global', 'freshness')) config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys') assert config.network_use in network_levels, config.network_use return config
def discover_existing_apps(): """Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}. @return: a map from application URIs to .desktop filenames""" already_installed = {} for d in basedir.load_data_paths('applications'): for desktop_file in os.listdir(d): if desktop_file.startswith('zeroinstall-') and desktop_file.endswith('.desktop'): full = os.path.join(d, desktop_file) try: with open(full, 'rt') as stream: for line in stream: line = line.strip() if line.startswith('Exec=0launch '): bits = line.split(' -- ', 1) if ' ' in bits[0]: uri = bits[0].split(' ', 1)[1] # 0launch URI -- %u else: uri = bits[1].split(' ', 1)[0].strip() # 0launch -- URI %u already_installed[uri] = full break else: logger.info(_("Failed to find Exec line in %s"), full) except Exception as ex: logger.warn(_("Failed to load .desktop file %(filename)s: %(exceptions"), {'filename': full, 'exception': ex}) return already_installed
def get_selections(config, options, iface_uri, select_only, download_only, test_callback): """Get selections for iface_uri, according to the options passed. Will switch to GUI mode if necessary. @param options: options from OptionParser @param iface_uri: canonical URI of the interface @param select_only: return immediately even if the selected versions aren't cached @param download_only: wait for stale feeds, and display GUI button as Download, not Run @return: the selected versions, or None if the user cancels @rtype: L{selections.Selections} | None """ if options.offline: config.network_use = model.network_offline iface_cache = config.iface_cache # Try to load it as a feed. If it is a feed, it'll get cached. If not, it's a # selections document and we return immediately. maybe_selections = iface_cache.get_feed(iface_uri, selections_ok = True) if isinstance(maybe_selections, selections.Selections): if not select_only: blocker = maybe_selections.download_missing(config) if blocker: logger.info(_("Waiting for selected implementations to be downloaded...")) tasks.wait_for_blocker(blocker) return maybe_selections r = requirements.Requirements(iface_uri) r.parse_options(options) return get_selections_for(r, config, options, select_only, download_only, test_callback)
def parse_script_header(stream): """If stream is a shell script for an application, return the app details. @param stream: the executable file's stream (will seek) @type stream: file-like object @return: the app details, if any @rtype: L{AppScriptInfo} | None @since: 1.12""" try: stream.seek(0) template_header = _command_template[:_command_template.index("{app}")] actual_header = stream.read(len(template_header)) stream.seek(0) if template_header == actual_header: # If it's a launcher script, it should be quite short! rest = stream.read() line = rest.split('\n')[1] else: return None except UnicodeDecodeError as ex: logger.info("Not an app script '%s': %s", stream, ex) return None info = AppScriptInfo() info.name = line.split()[3] return info
def do_exec_binding(self, binding, iface): assert iface is not None name = binding.name if '/' in name or name.startswith('.') or "'" in name: raise SafeException("Invalid <executable> name '%s'" % name) exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name) exec_path = os.path.join(exec_dir, name + ".exe" if os.name == "nt" else name) if not self._checked_runenv: self._check_runenv() if not os.path.exists(exec_path): if os.name == "nt": # Copy runenv.cli.template to ~/.cache/0install.net/injector/executables/$name/$name import shutil shutil.copyfile(os.environ['ZEROINSTALL_CLI_TEMPLATE'], exec_path) else: # Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py os.symlink('../../runenv.py', exec_path) os.chmod(exec_dir, 0o500) if binding.in_path: path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"] logger.info("PATH=%s", path) else: os.environ[name] = exec_path logger.info("%s=%s", name, exec_path) args = self.build_command(iface, binding.command) if os.name == "nt": os.environ["0install-runenv-file-" + name + ".exe"] = args[0] os.environ["0install-runenv-args-" + name + ".exe"] = support.windows_args_escape(args[1:]) else: import json os.environ["0install-runenv-" + name] = json.dumps(args)
def _handle_run_queue(): global _idle_blocker assert _run_queue next = _run_queue[0] assert next.happened if next is _idle_blocker: # Since this blocker will never run again, create a # new one for future idling. _idle_blocker = IdleBlocker("(idle)") elif next._zero_lib_tasks: logger.info(_("Running %(task)s due to triggering of '%(next)s'"), { 'task': next._zero_lib_tasks, 'next': next }) else: logger.info(_("Running %s"), next) tasks = frozenset(next._zero_lib_tasks) if tasks: next.noticed = True for task in tasks: # Run 'task'. task._resume() del _run_queue[0] if _run_queue: return True return False
def do_exec_binding(self, binding, iface): assert iface is not None name = binding.name if '/' in name or name.startswith('.') or "'" in name: raise SafeException("Invalid <executable> name '%s'" % name) exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name) exec_path = os.path.join(exec_dir, name + ".exe" if os.name == "nt" else name) if not self._checked_runenv: self._check_runenv() if not os.path.exists(exec_path): if os.name == "nt": # Copy runenv.cli.template to ~/.cache/0install.net/injector/executables/$name/$name import shutil shutil.copyfile(os.path.join(os.path.dirname(__file__), "runenv.cli.template"), exec_path) else: # Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py os.symlink('../../runenv.py', exec_path) os.chmod(exec_dir, 0o500) if binding.in_path: path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"] logger.info("PATH=%s", path) else: os.environ[name] = exec_path logger.info("%s=%s", name, exec_path) args = self.build_command(iface, binding.command) if os.name == "nt": os.environ["0install-runenv-file-" + name + ".exe"] = args[0] os.environ["0install-runenv-args-" + name + ".exe"] = support.windows_args_escape(args[1:]) else: import json os.environ["0install-runenv-" + name] = json.dumps(args)
def trigger(self, exception=None): """The event has happened. Note that this cannot be undone; instead, create a new Blocker to handle the next occurance of the event. @param exception: exception to raise in waiting tasks @type exception: (Exception, traceback)""" if self.happened: return # Already triggered self.happened = True self.exception = exception self.exception_read = False #assert self not in _run_queue # Slow if not _run_queue: _schedule() _run_queue.append(self) if exception: assert isinstance(exception, tuple), exception if not self._zero_lib_tasks: logger.info( _("Exception from '%s', but nothing is waiting for it"), self) import traceback logger.debug(''.join( traceback.format_exception(type(exception[0]), exception[0], exception[1])))
def installed_fixup(self, impl): """@type impl: L{zeroinstall.injector.model.DistributionImplementation}""" # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl.id.startswith('package:deb:openjdk-6-jre:'): java_version = '6-openjdk' elif impl.id.startswith('package:deb:openjdk-7-jre:'): java_version = '7-openjdk' else: return Distribution.installed_fixup(self, impl) # super if impl.machine == 'x86_64': java_arch = 'amd64' else: java_arch = impl.machine java_bin = '/usr/lib/jvm/java-%s-%s/jre/bin/java' % (java_version, java_arch) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/java-%s/jre/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.main = java_bin
def get_inputs(): for sel in sels.selections.values(): logger.info("Checking %s", sel.feed) if sel.feed.startswith('distribution:'): # If the package has changed version, we'll detect that below # with get_unavailable_selections. pass elif os.path.isabs(sel.feed): # Local feed yield sel.feed else: # Cached feed cached = basedir.load_first_cache( namespaces.config_site, 'interfaces', model.escape(sel.feed)) if cached: yield cached else: raise IOError("Input %s missing; update" % sel.feed) # Per-feed configuration yield basedir.load_first_config( namespaces.config_site, namespaces.config_prog, 'interfaces', model._pretty_escape(sel.interface)) # Global configuration yield basedir.load_first_config(namespaces.config_site, namespaces.config_prog, 'global')
def get_selections(self, snapshot_date=None, may_update=False, use_gui=None): """Load the selections. If may_update is True then the returned selections will be cached and available. @param snapshot_date: get a historical snapshot @type snapshot_date: (as returned by L{get_history}) | None @param may_update: whether to check for updates @type may_update: bool @param use_gui: whether to use the GUI for foreground updates @type use_gui: bool | None (never/always/if possible) @return: the selections @rtype: L{selections.Selections}""" if snapshot_date: assert may_update is False, "Can't update a snapshot!" sels_file = os.path.join(self.path, 'selections-' + snapshot_date + '.xml') else: sels_file = os.path.join(self.path, 'selections.xml') try: with open(sels_file, 'rb') as stream: sels = selections.Selections(qdom.parse(stream)) except IOError as ex: if may_update and ex.errno == errno.ENOENT: logger.info("App selections missing: %s", ex) sels = None else: raise if may_update: sels = self._check_for_updates(sels, use_gui) return sels
def _0install_man(config, command): """@type command: str""" from zeroinstall import apps, alias, helpers path = support.find_in_path(command) if not path: return None try: with open(path, "rt") as stream: app_info = apps.parse_script_header(stream) if app_info: app = config.app_mgr.lookup_app(app_info.name) sels = app.get_selections() main = None else: alias_info = alias.parse_script_header(stream) if alias_info is None: return None sels = helpers.ensure_cached(alias_info.uri, alias_info.command, config=config) if not sels: # Cancelled by user sys.exit(1) main = alias_info.main except IOError as ex: logger.info("%s: falling back to `man %s`", ex, command) os.execlp("man", "man", command) sys.exit(1) helpers.exec_man(config.stores, sels, main, fallback_name=command) assert 0
def installed_fixup(self, impl): # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl.id.startswith('package:deb:openjdk-6-jre:'): java_version = '6-openjdk' elif impl.id.startswith('package:deb:openjdk-7-jre:'): java_version = '7-openjdk' else: return if impl.machine == 'x86_64': java_arch = 'amd64' else: java_arch = impl.machine java_bin = '/usr/lib/jvm/java-%s-%s/jre/bin/java' % (java_version, java_arch) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/java-%s/jre/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.commands["run"] = model.Command(qdom.Element(namespaces.XMLNS_IFACE, 'command', {'path': java_bin, 'name': 'run'}), None)
def installed_fixup(self, impl): """Called when an installed package is added (after L{fixup}), or when installation completes. This is useful to fix up the main value. The default implementation checks that main exists, and searches L{Distribution.system_paths} for it if not. @type impl: L{DistributionImplementation} @since: 1.11""" path = impl.main if not path: return if os.path.isabs(path) and os.path.exists(path): return basename = os.path.basename(path) if os.name == "nt" and not basename.endswith('.exe'): basename += '.exe' for d in self.system_paths: path = os.path.join(d, basename) if os.path.isfile(path): logger.info("Found %s by searching system paths", path) impl.main = path return else: logger.info("Binary '%s' not found in any system path (checked %s)", basename, self.system_paths)
def discover_existing_apps(): """Search through the configured XDG datadirs looking for .desktop files created by L{add_to_menu}. @return: a map from application URIs to .desktop filenames""" already_installed = {} for d in basedir.load_data_paths('applications'): for desktop_file in os.listdir(d): if desktop_file.startswith( 'zeroinstall-') and desktop_file.endswith('.desktop'): full = os.path.join(d, desktop_file) try: with open(full, 'rt') as stream: for line in stream: line = line.strip() if line.startswith('Exec=0launch '): bits = line.split(' -- ', 1) if ' ' in bits[0]: uri = bits[0].split( ' ', 1)[1] # 0launch URI -- %u else: uri = bits[1].split( ' ', 1)[0].strip() # 0launch -- URI %u already_installed[uri] = full break else: logger.info(_("Failed to find Exec line in %s"), full) except Exception as ex: logger.warn( _("Failed to load .desktop file %(filename)s: %(exceptions" ), { 'filename': full, 'exception': ex }) return already_installed
def update_feed_from_network(self, feed_url, new_xml, modified_time, dry_run = False): """Update a cached feed. Called by L{update_feed_if_trusted} if we trust this data. After a successful update, L{writer} is used to update the feed's last_checked time. @param feed_url: the feed being updated @type feed_url: L{model.Interface} @param new_xml: the downloaded replacement feed document @type new_xml: str @param modified_time: the timestamp of the oldest trusted signature (used as an approximation to the feed's modification time) @type modified_time: long @type dry_run: bool @raises ReplayAttack: if modified_time is older than the currently cached time @since: 0.48""" logger.debug(_("Updating '%(interface)s' from network; modified at %(time)s") % {'interface': feed_url, 'time': _pretty_time(modified_time)}) self._import_new_feed(feed_url, new_xml, modified_time, dry_run) if dry_run: return feed = self.get_feed(feed_url) from . import writer feed.last_checked = int(time.time()) writer.save_feed(feed) logger.info(_("Updated feed cache entry for %(interface)s (modified %(time)s)"), {'interface': feed.get_name(), 'time': _pretty_time(modified_time)})
def handle(config, options, args): if len(args) == 0: raise UsageError() url = config.mirror + '/search/?q=' + quote(' '.join(args)) logger.info("Fetching %s...", url) root = qdom.parse(urllib2.urlopen(url)) assert root.name == 'results' first = True for child in root.childNodes: if child.name != 'result': continue if first: first = False else: print() print(child.attrs['uri']) score = child.attrs['score'] details = {} for detail in child.childNodes: details[detail.name] = detail.content print(" {name} - {summary} [{score}%]".format( name = child.attrs['name'], summary = details.get('summary', ''), score = score))
def handle(config, options, args): if not args: raise UsageError() for x in args: if not os.path.isfile(x): raise SafeException(_("File '%s' does not exist") % x) logger.info(_("Importing from file '%s'"), x) with open(x, 'rb') as signed_data: data, sigs = gpg.check_stream(signed_data) doc = minidom.parseString(data.read()) uri = doc.documentElement.getAttribute('uri') if not uri: raise SafeException(_("Missing 'uri' attribute on root element in '%s'") % x) logger.info(_("Importing information about interface %s"), uri) signed_data.seek(0) pending = PendingFeed(uri, signed_data) def run(): keys_downloaded = tasks.Task(pending.download_keys(config.fetcher), "download keys") yield keys_downloaded.finished tasks.check(keys_downloaded.finished) if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml): blocker = config.trust_mgr.confirm_keys(pending) if blocker: yield blocker tasks.check(blocker) if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml): raise SafeException(_("No signing keys trusted; not importing")) task = tasks.Task(run(), "import feed") tasks.wait_for_blocker(task.finished)
def handle(config, options, args): if len(args) == 0: raise UsageError() url = config.mirror + '/search/?q=' + quote(' '.join(args)) logger.info("Fetching %s...", url) root = qdom.parse(urllib2.urlopen(url)) assert root.name == 'results' first = True for child in root.childNodes: if child.name != 'result': continue if first: first = False else: print() print(child.attrs['uri']) score = child.attrs['score'] details = {} for detail in child.childNodes: details[detail.name] = detail.content print(" {name} - {summary} [{score}%]".format( name=child.attrs['name'], summary=details.get('summary', ''), score=score))
def load_config(handler = None): """@type handler: L{zeroinstall.injector.handler.Handler} | None @rtype: L{Config}""" config = Config(handler) parser = ConfigParser.RawConfigParser() parser.add_section('global') parser.set('global', 'help_with_testing', 'False') parser.set('global', 'freshness', str(60 * 60 * 24 * 30)) # One month parser.set('global', 'network_use', 'full') parser.set('global', 'auto_approve_keys', 'True') path = basedir.load_first_config(config_site, config_prog, 'global') if path: logger.info("Loading configuration from %s", path) try: parser.read(path) except Exception as ex: logger.warning(_("Error loading config: %s"), str(ex) or repr(ex)) config.help_with_testing = parser.getboolean('global', 'help_with_testing') config.network_use = parser.get('global', 'network_use') config.freshness = int(parser.get('global', 'freshness')) config.auto_approve_keys = parser.getboolean('global', 'auto_approve_keys') assert config.network_use in network_levels, config.network_use return config
def add_dir_to_cache(self, required_digest, path, try_helper=False): """Copy the contents of path to the cache. @param required_digest: the expected digest @type required_digest: str @param path: the root of the tree to copy @type path: str @param try_helper: attempt to use privileged helper before user cache (since 0.26) @type try_helper: bool @raise BadDigest: if the contents don't match the given digest.""" if self.lookup(required_digest): logger.info(_("Not adding %s as it already exists!"), required_digest) return tmp = self.get_tmp_dir_for(required_digest) try: _copytree2(path, tmp) self.check_manifest_and_rename(required_digest, tmp, try_helper=try_helper) except: logger.warn(_("Error importing directory.")) logger.warn(_("Deleting %s"), tmp) support.ro_rmtree(tmp) raise
def check_readable(feed_url, source): """Test whether a feed file is valid. @param feed_url: the feed's expected URL @type feed_url: str @param source: the name of the file to test @type source: str @return: the modification time in src (usually just the mtime of the file) @rtype: int @raise InvalidInterface: If the source's syntax is incorrect""" try: feed = load_feed(source, local = False) if feed.url != feed_url: raise InvalidInterface(_("Incorrect URL used for feed.\n\n" "%(feed_url)s is given in the feed, but\n" "%(interface_uri)s was requested") % {'feed_url': feed.url, 'interface_uri': feed_url}) return feed.last_modified except InvalidInterface as ex: logger.info(_("Error loading feed:\n" "Interface URI: %(uri)s\n" "Local file: %(source)s\n" "%(exception)s") % {'uri': feed_url, 'source': source, 'exception': ex}) raise InvalidInterface(_("Error loading feed '%(uri)s':\n\n%(exception)s") % {'uri': feed_url, 'exception': ex})
def installed_fixup(self, impl): # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl.id.startswith('package:deb:openjdk-6-jre:'): java_version = '6-openjdk' elif impl.id.startswith('package:deb:openjdk-7-jre:'): java_version = '7-openjdk' else: return if impl.machine == 'x86_64': java_arch = 'amd64' else: java_arch = impl.machine java_bin = '/usr/lib/jvm/java-%s-%s/jre/bin/java' % (java_version, java_arch) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/java-%s/jre/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.commands["run"] = model.Command( qdom.Element(namespaces.XMLNS_IFACE, 'command', { 'path': java_bin, 'name': 'run' }), None)
def notify(self, title, message, timeout = 0, actions = []): """Send a D-BUS notification message if possible. If there is no notification service available, log the message instead. @type title: str @type message: str @type timeout: int""" if not self.notification_service: logger.info('%s: %s', title, message) return None LOW = 0 NORMAL = 1 #CRITICAL = 2 import dbus.types hints = {} if actions: hints['urgency'] = dbus.types.Byte(NORMAL) else: hints['urgency'] = dbus.types.Byte(LOW) return self.notification_service.Notify('Zero Install', 0, # replaces_id, '', # icon _escape_xml(title), _escape_xml(message), actions, hints, timeout * 1000)
def installed_fixup(self, impl): # OpenSUSE uses _, Fedora uses . """@type impl: L{zeroinstall.injector.model.DistributionImplementation}""" impl_id = impl.id.replace('_', '.') # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl_id.startswith('package:rpm:java-1.6.0-openjdk:'): java_version = '1.6.0-openjdk' elif impl_id.startswith('package:rpm:java-1.7.0-openjdk:'): java_version = '1.7.0-openjdk' else: return Distribution.installed_fixup(self, impl) # super # On Fedora, unlike Debian, the arch is x86_64, not amd64 java_bin = '/usr/lib/jvm/jre-%s.%s/bin/java' % (java_version, impl.machine) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/jre-%s/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.main = java_bin
def _0install_man(config, command): """@type command: str""" from zeroinstall import apps, alias, helpers path = support.find_in_path(command) if not path: return None try: with open(path, 'rt') as stream: app_info = apps.parse_script_header(stream) if app_info: app = config.app_mgr.lookup_app(app_info.name) sels = app.get_selections() main = None else: alias_info = alias.parse_script_header(stream) if alias_info is None: return None sels = helpers.ensure_cached(alias_info.uri, alias_info.command, config=config) if not sels: # Cancelled by user sys.exit(1) main = alias_info.main except IOError as ex: logger.info("%s: falling back to `man %s`", ex, command) os.execlp('man', 'man', command) sys.exit(1) helpers.exec_man(config.stores, sels, main, fallback_name=command) assert 0
def installed_fixup(self, impl): # OpenSUSE uses _, Fedora uses . impl_id = impl.id.replace('_', '.') # Hack: If we added any Java implementations, find the corresponding JAVA_HOME... if impl_id.startswith('package:rpm:java-1.6.0-openjdk:'): java_version = '1.6.0-openjdk' elif impl_id.startswith('package:rpm:java-1.7.0-openjdk:'): java_version = '1.7.0-openjdk' else: return # On Fedora, unlike Debian, the arch is x86_64, not amd64 java_bin = '/usr/lib/jvm/jre-%s.%s/bin/java' % (java_version, impl.machine) if not os.path.exists(java_bin): # Try without the arch... java_bin = '/usr/lib/jvm/jre-%s/bin/java' % java_version if not os.path.exists(java_bin): logger.info("Java binary not found (%s)", java_bin) if impl.main is None: java_bin = '/usr/bin/java' else: return impl.commands["run"] = model.Command( qdom.Element(namespaces.XMLNS_IFACE, 'command', { 'path': java_bin, 'name': 'run' }), None)
def get_selections(self, snapshot_date = None, may_update = False, use_gui = None): """Load the selections. If may_update is True then the returned selections will be cached and available. @param snapshot_date: get a historical snapshot @type snapshot_date: (as returned by L{get_history}) | None @param may_update: whether to check for updates @type may_update: bool @param use_gui: whether to use the GUI for foreground updates @type use_gui: bool | None (never/always/if possible) @return: the selections @rtype: L{selections.Selections}""" if snapshot_date: assert may_update is False, "Can't update a snapshot!" sels_file = os.path.join(self.path, 'selections-' + snapshot_date + '.xml') else: sels_file = os.path.join(self.path, 'selections.xml') try: with open(sels_file, 'rb') as stream: sels = selections.Selections(qdom.parse(stream)) except IOError as ex: if may_update and ex.errno == errno.ENOENT: logger.info("App selections missing: %s", ex) sels = None else: raise if may_update: sels = self._check_for_updates(sels, use_gui) return sels
def notify(self, title, message, timeout=0, actions=[]): """Send a D-BUS notification message if possible. If there is no notification service available, log the message instead. @type title: str @type message: str @type timeout: int""" if not self.notification_service: logger.info('%s: %s', title, message) return None LOW = 0 NORMAL = 1 #CRITICAL = 2 import dbus.types hints = {} if actions: hints['urgency'] = dbus.types.Byte(NORMAL) else: hints['urgency'] = dbus.types.Byte(LOW) return self.notification_service.Notify( 'Zero Install', 0, # replaces_id, '', # icon _escape_xml(title), _escape_xml(message), actions, hints, timeout * 1000)
def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False, dry_run = False): """@type required_digest: str @type data: file @type url: str @type extract: str | None @type type: str | None @type start_offset: int @type try_helper: bool @type dry_run: bool""" from . import unpack if self.lookup(required_digest): logger.info(_("Not adding %s as it already exists!"), required_digest) return tmp = self.get_tmp_dir_for(required_digest) try: unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset) except: import shutil shutil.rmtree(tmp) raise try: self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper, dry_run = dry_run) except Exception: #warn(_("Leaving extracted directory as %s"), tmp) support.ro_rmtree(tmp) raise
def get_inputs(): for sel in sels.selections.values(): logger.info("Checking %s", sel.feed) if sel.feed.startswith('distribution:'): # If the package has changed version, we'll detect that below # with get_unavailable_selections. pass elif os.path.isabs(sel.feed): # Local feed yield sel.feed else: # Cached feed cached = basedir.load_first_cache(namespaces.config_site, 'interfaces', model.escape(sel.feed)) if cached: yield cached else: raise IOError("Input %s missing; update" % sel.feed) # Per-feed configuration yield basedir.load_first_config(namespaces.config_site, namespaces.config_prog, 'interfaces', model._pretty_escape(sel.interface)) # Global configuration yield basedir.load_first_config(namespaces.config_site, namespaces.config_prog, 'global')
def check_readable(feed_url, source): """Test whether a feed file is valid. @param feed_url: the feed's expected URL @type feed_url: str @param source: the name of the file to test @type source: str @return: the modification time in src (usually just the mtime of the file) @rtype: int @raise InvalidInterface: If the source's syntax is incorrect, """ try: feed = load_feed(source, local=False) if feed.url != feed_url: raise InvalidInterface( _( "Incorrect URL used for feed.\n\n" "%(feed_url)s is given in the feed, but\n" "%(interface_uri)s was requested" ) % {"feed_url": feed.url, "interface_uri": feed_url} ) return feed.last_modified except InvalidInterface as ex: logger.info( _("Error loading feed:\n" "Interface URI: %(uri)s\n" "Local file: %(source)s\n" "%(exception)s") % {"uri": feed_url, "source": source, "exception": ex} ) raise InvalidInterface(_("Error loading feed '%(uri)s':\n\n%(exception)s") % {"uri": feed_url, "exception": ex})
def get_selections(config, options, iface_uri, select_only, download_only, test_callback): """Get selections for iface_uri, according to the options passed. Will switch to GUI mode if necessary. @param options: options from OptionParser @param iface_uri: canonical URI of the interface @param select_only: return immediately even if the selected versions aren't cached @param download_only: wait for stale feeds, and display GUI button as Download, not Run @return: the selected versions, or None if the user cancels @rtype: L{selections.Selections} | None """ if options.offline: config.network_use = model.network_offline iface_cache = config.iface_cache # Try to load it as a feed. If it is a feed, it'll get cached. If not, it's a # selections document and we return immediately. maybe_selections = iface_cache.get_feed(iface_uri, selections_ok=True) if isinstance(maybe_selections, selections.Selections): if not select_only: blocker = maybe_selections.download_missing(config) if blocker: logger.info( _("Waiting for selected implementations to be downloaded..." )) tasks.wait_for_blocker(blocker) return maybe_selections r = requirements.Requirements(iface_uri) r.parse_options(options) return get_selections_for(r, config, options, select_only, download_only, test_callback)
def download_icon(self, interface, force=False): """Download an icon for this interface and add it to the icon cache. If the interface has no icon do nothing. @return: the task doing the import, or None @rtype: L{tasks.Task}""" logger.debug("download_icon %(interface)s", {"interface": interface}) modification_time = None existing_icon = self.config.iface_cache.get_icon_path(interface) if existing_icon: file_mtime = os.stat(existing_icon).st_mtime from email.utils import formatdate modification_time = formatdate(timeval=file_mtime, localtime=False, usegmt=True) feed = self.config.iface_cache.get_feed(interface.uri) if feed is None: return None # Find a suitable icon to download for icon in feed.get_metadata(XMLNS_IFACE, "icon"): type = icon.getAttribute("type") if type != "image/png": logger.debug(_("Skipping non-PNG icon")) continue source = icon.getAttribute("href") if source: break logger.warn(_('Missing "href" attribute on <icon> in %s'), interface) else: logger.info(_("No PNG icons found in %s"), interface) return dl = self.download_url(source, hint=interface, modification_time=modification_time) @tasks.async def download_and_add_icon(): stream = dl.tempfile try: yield dl.downloaded tasks.check(dl.downloaded) if dl.unmodified: return stream.seek(0) import shutil, tempfile icons_cache = basedir.save_cache_path(config_site, "interface_icons") tmp_file = tempfile.NamedTemporaryFile(dir=icons_cache, delete=False) shutil.copyfileobj(stream, tmp_file) tmp_file.close() icon_file = os.path.join(icons_cache, escape(interface.uri)) portable_rename(tmp_file.name, icon_file) finally: stream.close() return download_and_add_icon()
def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False, dry_run = False): """Check that tmp[/extract] has the required_digest. On success, rename the checked directory to the digest, and make the whole tree read-only. @type required_digest: str @type tmp: str @type extract: str | None @param try_helper: attempt to use privileged helper to import to system cache first (since 0.26) @type try_helper: bool @param dry_run: just print what we would do to stdout (and delete tmp) @type dry_run: bool @raise BadDigest: if the input directory doesn't match the given digest""" if extract: extracted = os.path.join(tmp, extract) if not os.path.isdir(extracted): raise Exception(_('Directory %s not found in archive') % extract) else: extracted = tmp from . import manifest manifest.fixup_permissions(extracted) alg, required_value = manifest.splitID(required_digest) actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg)) if actual_digest != required_digest: raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n' 'Required digest: %(required_digest)s\n' 'Actual digest: %(actual_digest)s\n') % {'required_digest': required_digest, 'actual_digest': actual_digest}) if try_helper: if self._add_with_helper(required_digest, extracted, dry_run = dry_run): support.ro_rmtree(tmp) return logger.info(_("Can't add to system store. Trying user store instead.")) logger.info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir) final_name = os.path.join(self.dir, required_digest) if os.path.isdir(final_name): logger.warning(_("Item %s already stored.") % final_name) # not really an error return if dry_run: print(_("[dry-run] would store implementation as {path}").format(path = final_name)) self.dry_run_names.add(required_digest) support.ro_rmtree(tmp) return else: # If we just want a subdirectory then the rename will change # extracted/.. and so we'll need write permission on 'extracted' os.chmod(extracted, 0o755) os.rename(extracted, final_name) os.chmod(final_name, 0o555) if extract: os.rmdir(tmp)
def check_manifest_and_rename(self, required_digest, tmp, extract=None, try_helper=False): """Check that tmp[/extract] has the required_digest. On success, rename the checked directory to the digest, and make the whole tree read-only. @param try_helper: attempt to use privileged helper to import to system cache first (since 0.26) @type try_helper: bool @raise BadDigest: if the input directory doesn't match the given digest""" if extract: extracted = os.path.join(tmp, extract) if not os.path.isdir(extracted): raise Exception( _('Directory %s not found in archive') % extract) else: extracted = tmp from . import manifest manifest.fixup_permissions(extracted) alg, required_value = manifest.splitID(required_digest) actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg)) if actual_digest != required_digest: raise BadDigest( _('Incorrect manifest -- archive is corrupted.\n' 'Required digest: %(required_digest)s\n' 'Actual digest: %(actual_digest)s\n') % { 'required_digest': required_digest, 'actual_digest': actual_digest }) if try_helper: if self._add_with_helper(required_digest, extracted): support.ro_rmtree(tmp) return logger.info( _("Can't add to system store. Trying user store instead.")) logger.info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir) final_name = os.path.join(self.dir, required_digest) if os.path.isdir(final_name): raise Exception(_("Item %s already stored.") % final_name) # XXX: not really an error # If we just want a subdirectory then the rename will change # extracted/.. and so we'll need write permission on 'extracted' os.chmod(extracted, 0o755) os.rename(extracted, final_name) os.chmod(final_name, 0o555) if extract: os.rmdir(tmp)
def download_keys(self, fetcher, feed_hint=None, key_mirror=None): """Download any required GPG keys not already on our keyring. When all downloads are done (successful or otherwise), add any new keys to the keyring, L{recheck}. @param fetcher: fetcher to manage the download (was Handler before version 1.5) @type fetcher: L{fetch.Fetcher} @param key_mirror: URL of directory containing keys, or None to use feed's directory @type key_mirror: str @rtype: [L{zeroinstall.support.tasks.Blocker}]""" downloads = {} blockers = [] for x in self.sigs: key_id = x.need_key() if key_id: try: import urlparse except ImportError: from urllib import parse as urlparse # Python 3 key_url = urlparse.urljoin(key_mirror or self.url, "%s.gpg" % key_id) logger.info(_("Fetching key from %s"), key_url) dl = fetcher.download_url(key_url, hint=feed_hint) downloads[dl.downloaded] = (dl, dl.tempfile) blockers.append(dl.downloaded) exception = None any_success = False from zeroinstall.support import tasks while blockers: yield blockers old_blockers = blockers blockers = [] for b in old_blockers: dl, stream = downloads[b] try: tasks.check(b) if b.happened: stream.seek(0) self._downloaded_key(stream) any_success = True stream.close() else: blockers.append(b) except Exception: _type, exception, tb = sys.exc_info() logger.warning( _("Failed to import key for '%(url)s': %(exception)s"), {"url": self.url, "exception": str(exception)}, ) stream.close() if exception and not any_success: raise_with_traceback(exception, tb) self.recheck()
def download_icon(self, interface, force = False): """Download an icon for this interface and add it to the icon cache. If the interface has no icon do nothing. @type interface: L{zeroinstall.injector.model.Interface} @type force: bool @return: the task doing the import, or None @rtype: L{tasks.Task}""" logger.debug("download_icon %(interface)s", {'interface': interface}) modification_time = None existing_icon = self.config.iface_cache.get_icon_path(interface) if existing_icon: file_mtime = os.stat(existing_icon).st_mtime from email.utils import formatdate modification_time = formatdate(timeval = file_mtime, localtime = False, usegmt = True) feed = self.config.iface_cache.get_feed(interface.uri) if feed is None: return None # Find a suitable icon to download for icon in feed.get_metadata(XMLNS_IFACE, 'icon'): type = icon.getAttribute('type') if type != 'image/png': logger.debug(_('Skipping non-PNG icon')) continue source = icon.getAttribute('href') if source: break logger.warning(_('Missing "href" attribute on <icon> in %s'), interface) else: logger.info(_('No PNG icons found in %s'), interface) return dl = self.download_url(source, hint = interface, modification_time = modification_time) @tasks.async def download_and_add_icon(): stream = dl.tempfile try: yield dl.downloaded tasks.check(dl.downloaded) if dl.unmodified: return stream.seek(0) import shutil, tempfile icons_cache = basedir.save_cache_path(config_site, 'interface_icons') tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False) shutil.copyfileobj(stream, tmp_file) tmp_file.close() icon_file = os.path.join(icons_cache, escape(interface.uri)) portable_rename(tmp_file.name, icon_file) finally: stream.close() return download_and_add_icon()
def reply_when_done(ticket, blocker): try: if blocker: yield blocker tasks.check(blocker) send_json(["return", ticket, ["ok", []]]) except Exception as ex: logger.info("async task failed", exc_info=True) send_json(["return", ticket, ["error", str(ex)]])
def reply_when_done(ticket, blocker): try: if blocker: yield blocker tasks.check(blocker) send_json(["return", ticket, ["ok", []]]) except Exception as ex: logger.info("async task failed", exc_info = True) send_json(["return", ticket, ["error", str(ex)]])
def get(self, key): try: self._check_valid() except Exception as ex: logger.info(_("Cache needs to be refreshed: %s"), ex) self.flush() return None else: return self.cache.get(key, None)
def handle_invoke(config, options, ticket, request): try: command = request[0] logger.debug("Got request '%s'", command) if command == 'open-gui': response = do_open_gui(request[1:]) elif command == 'run-gui': do_run_gui(ticket) return #async elif command == 'wait-for-network': response = do_wait_for_network(config) elif command == 'check-gui': response = do_check_gui(request[1]) elif command == 'report-error': response = do_report_error(config, request[1]) elif command == 'gui-update-selections': xml = qdom.parse(BytesIO(read_chunk())) response = do_gui_update_selections(request[1:], xml) elif command == 'download-selections': xml = qdom.parse(BytesIO(read_chunk())) blocker = do_download_selections(config, ticket, options, request[1:], xml) return #async elif command == 'import-feed': xml = qdom.parse(BytesIO(read_chunk())) response = do_import_feed(config, xml) elif command == 'get-package-impls': xml = qdom.parse(BytesIO(read_chunk())) response = do_get_package_impls(config, options, request[1:], xml) elif command == 'is-distro-package-installed': xml = qdom.parse(BytesIO(read_chunk())) response = do_is_distro_package_installed(config, options, xml) elif command == 'get-distro-candidates': xml = qdom.parse(BytesIO(read_chunk())) blocker = do_get_distro_candidates(config, request[1:], xml) reply_when_done(ticket, blocker) return # async elif command == 'confirm-keys': do_confirm_keys(config, ticket, request[1:]) return # async elif command == 'download-url': do_download_url(config, ticket, request[1:]) return elif command == 'notify-user': response = do_notify_user(config, request[1]) else: raise SafeException("Internal error: unknown command '%s'" % command) response = ['ok', response] except SafeException as ex: logger.info("Replying with error: %s", ex) response = ['error', str(ex)] except Exception as ex: import traceback logger.info("Replying with error: %s", ex) response = ['error', traceback.format_exc().strip()] send_json(["return", ticket, response])
def __init__(self, iterator, name): """Call next(iterator) from a glib idle function. This function can yield Blocker() objects to suspend processing while waiting for events. name is used only for debugging.""" self.iterator = iterator self.finished = Blocker(name) # Block new task on the idle handler... _idle_blocker.add_task(self) self._zero_blockers = (_idle_blocker, ) logger.info(_("Scheduling new task: %s"), self)
def cook(self, required_digest, recipe, stores, force=False, impl_hint=None): """Follow a Recipe. @param impl_hint: the Implementation this is for (if any) as a hint for the GUI @see: L{download_impl} uses this method when appropriate""" # Maybe we're taking this metaphor too far? # Start a download for each ingredient blockers = [] steps = [] try: for stepdata in recipe.steps: cls = StepRunner.class_for(stepdata) step = cls(stepdata, impl_hint=impl_hint) step.prepare(self, blockers) steps.append(step) while blockers: yield blockers tasks.check(blockers) blockers = [b for b in blockers if not b.happened] if self.external_store: # Note: external_store will not yet work with non-<archive> steps. streams = [step.stream for step in steps] self._add_to_external_store(required_digest, recipe.steps, streams) else: # Create an empty directory for the new implementation store = stores.stores[0] tmpdir = store.get_tmp_dir_for(required_digest) try: # Unpack each of the downloaded archives into it in turn for step in steps: step.apply(tmpdir) # Check that the result is correct and store it in the cache store.check_manifest_and_rename(required_digest, tmpdir) tmpdir = None finally: # If unpacking fails, remove the temporary directory if tmpdir is not None: support.ro_rmtree(tmpdir) finally: for step in steps: try: step.close() except IOError as ex: # Can get "close() called during # concurrent operation on the same file # object." if we're unlucky (Python problem). logger.info("Failed to close: %s", ex)
def cook(self, required_digest, recipe, stores, force = False, impl_hint = None, dry_run = False, may_use_mirror = True): """Follow a Recipe. @type required_digest: str @type recipe: L{Recipe} @type stores: L{zeroinstall.zerostore.Stores} @type force: bool @param impl_hint: the Implementation this is for (as a hint for the GUI, and to allow local files) @type dry_run: bool @type may_use_mirror: bool @see: L{download_impl} uses this method when appropriate""" # Maybe we're taking this metaphor too far? # Start a download for each ingredient blockers = [] steps = [] try: for stepdata in recipe.steps: cls = StepRunner.class_for(stepdata) step = cls(stepdata, impl_hint = impl_hint, may_use_mirror = may_use_mirror) step.prepare(self, blockers) steps.append(step) while blockers: yield blockers tasks.check(blockers) blockers = [b for b in blockers if not b.happened] if self.external_store: # Note: external_store will not work with non-<archive> steps. streams = [step.stream for step in steps] self._add_to_external_store(required_digest, recipe.steps, streams) else: # Create an empty directory for the new implementation store = stores.stores[0] tmpdir = store.get_tmp_dir_for(required_digest) try: # Unpack each of the downloaded archives into it in turn for step in steps: step.apply(tmpdir) # Check that the result is correct and store it in the cache stores.check_manifest_and_rename(required_digest, tmpdir, dry_run=dry_run) tmpdir = None finally: # If unpacking fails, remove the temporary directory if tmpdir is not None: support.ro_rmtree(tmpdir) finally: for step in steps: try: step.close() except IOError as ex: # Can get "close() called during # concurrent operation on the same file # object." if we're unlucky (Python problem). logger.info("Failed to close: %s", ex)
def _run_gpg(args, **kwargs): global _gnupg_options if _gnupg_options is None: gpg_path = os.environ.get('ZEROINSTALL_GPG') or find_in_path('gpg') or find_in_path('gpg2') or 'gpg' _gnupg_options = [gpg_path, '--no-secmem-warning'] if hasattr(os, 'geteuid') and os.geteuid() == 0 and 'GNUPGHOME' not in os.environ: _gnupg_options += ['--homedir', os.path.join(basedir.home, '.gnupg')] logger.info(_("Running as root, so setting GnuPG home to %s"), _gnupg_options[-1]) return subprocess.Popen(_gnupg_options + args, universal_newlines = True, **kwargs)
def get_details(self): """Call 'gpg --list-keys' and return the results split into lines and columns. @rtype: [[str]]""" # Note: GnuPG 2 always uses --fixed-list-mode child = _run_gpg(['--fixed-list-mode', '--with-colons', '--list-keys', self.fingerprint], stdout = subprocess.PIPE) cout, unused = child.communicate() if child.returncode: logger.info(_("GPG exited with code %d") % child.returncode) details = [] for line in cout.split('\n'): details.append(line.split(':')) return details
def _download_and_import_feed(self, feed_url, use_mirror, timeout = None): """Download and import a feed. @type feed_url: str @param use_mirror: False to use primary location; True to use mirror. @type use_mirror: bool @param timeout: create a blocker which triggers if a download hangs for this long @type timeout: float | None @rtype: L{zeroinstall.support.tasks.Blocker}""" if use_mirror: url = self.get_feed_mirror(feed_url) if url is None: return None logger.info(_("Trying mirror server for feed %s") % feed_url) else: url = feed_url if self.config.handler.dry_run: print(_("[dry-run] downloading feed {url}").format(url = url)) dl = self.download_url(url, hint = feed_url, timeout = timeout) stream = dl.tempfile @tasks.named_async("fetch_feed " + url) def fetch_feed(): try: yield dl.downloaded tasks.check(dl.downloaded) pending = PendingFeed(feed_url, stream) if use_mirror: # If we got the feed from a mirror, get the key from there too key_mirror = self.config.mirror + '/keys/' else: key_mirror = None keys_downloaded = tasks.Task(pending.download_keys(self, feed_hint = feed_url, key_mirror = key_mirror), _("download keys for %s") % feed_url) yield keys_downloaded.finished tasks.check(keys_downloaded.finished) dry_run = self.handler.dry_run if not self.config.iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml, dry_run = dry_run): blocker = self.config.trust_mgr.confirm_keys(pending) if blocker: yield blocker tasks.check(blocker) if not self.config.iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml, dry_run = dry_run): raise NoTrustedKeys(_("No signing keys trusted; not importing")) finally: stream.close() task = fetch_feed() task.dl = dl return task
def download_impl(method): original_exception = None while True: try: if isinstance(method, DownloadSource): blocker, stream = self.download_archive( method, impl_hint=impl, may_use_mirror=original_exception is None) try: yield blocker tasks.check(blocker) stream.seek(0) if self.external_store: self._add_to_external_store( required_digest, [method], [stream]) else: self._add_to_cache(required_digest, stores, method, stream) finally: stream.close() elif isinstance(method, Recipe): blocker = self.cook(required_digest, method, stores, impl_hint=impl) yield blocker tasks.check(blocker) else: raise Exception( _("Unknown download type for '%s'") % method) except download.DownloadError as ex: if original_exception: logger.info("Error from mirror: %s", ex) raise original_exception else: original_exception = ex mirror_url = self._get_impl_mirror(impl) if mirror_url is not None: logger.info("%s: trying implementation mirror at %s", ex, mirror_url) method = model.DownloadSource( impl, mirror_url, None, None, type='application/x-bzip-compressed-tar') continue # Retry raise break self.handler.impl_added_to_store(impl)
def do_env_binding(binding, path): """Update this process's environment by applying the binding. @param binding: the binding to apply @type binding: L{model.EnvironmentBinding} @param path: the selected implementation @type path: str""" if binding.insert is not None and path is None: # Skip insert bindings for package implementations logger.debug("not setting %s as we selected a package implementation", binding.name) return os.environ[binding.name] = binding.get_value(path, os.environ.get(binding.name, None)) logger.info("%s=%s", binding.name, os.environ[binding.name])
def _download_and_import_feed(self, feed_url, use_mirror): """Download and import a feed. @param use_mirror: False to use primary location; True to use mirror.""" if use_mirror: url = self.get_feed_mirror(feed_url) if url is None: return None logger.info(_("Trying mirror server for feed %s") % feed_url) else: url = feed_url dl = self.download_url(url, hint=feed_url) stream = dl.tempfile @tasks.named_async("fetch_feed " + url) def fetch_feed(): try: yield dl.downloaded tasks.check(dl.downloaded) pending = PendingFeed(feed_url, stream) if use_mirror: # If we got the feed from a mirror, get the key from there too key_mirror = self.config.mirror + '/keys/' else: key_mirror = None keys_downloaded = tasks.Task( pending.download_keys(self, feed_hint=feed_url, key_mirror=key_mirror), _("download keys for %s") % feed_url) yield keys_downloaded.finished tasks.check(keys_downloaded.finished) if not self.config.iface_cache.update_feed_if_trusted( pending.url, pending.sigs, pending.new_xml): blocker = self.config.trust_mgr.confirm_keys(pending) if blocker: yield blocker tasks.check(blocker) if not self.config.iface_cache.update_feed_if_trusted( pending.url, pending.sigs, pending.new_xml): raise NoTrustedKeys( _("No signing keys trusted; not importing")) finally: stream.close() task = fetch_feed() task.dl = dl return task
def __init__(self, cache_leaf, source, format): """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name). If the size or mtime of $source has changed, or the cache format version if different, reset the cache first.""" self.cache_leaf = cache_leaf self.source = source self.format = format self.cache_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog) self.cached_for = {} # Attributes of source when cache was created try: self._load_cache() except Exception as ex: logger.info(_("Failed to load cache (%s). Flushing..."), ex) self.flush()
def get_details(self): """Call 'gpg --list-keys' and return the results split into lines and columns. @rtype: [[str]]""" # Note: GnuPG 2 always uses --fixed-list-mode child = _run_gpg([ '--fixed-list-mode', '--with-colons', '--list-keys', self.fingerprint ], stdout=subprocess.PIPE) cout, unused = child.communicate() if child.returncode: logger.info(_("GPG exited with code %d") % child.returncode) details = [] for line in cout.split('\n'): details.append(line.split(':')) return details
def _downloaded_key(self, stream): import shutil, tempfile from zeroinstall.injector import gpg logger.info(_("Importing key for feed '%s'"), self.url) # Python2.4: can't call fileno() on stream, so save to tmp file instead tmpfile = tempfile.TemporaryFile(prefix='injector-dl-data-') try: shutil.copyfileobj(stream, tmpfile) tmpfile.flush() tmpfile.seek(0) gpg.import_key(tmpfile) finally: tmpfile.close()
def handle_invoke(config, options, ticket, request): try: command = request[0] logger.debug("Got request '%s'", command) if command == 'get-selections-gui': response = do_get_selections_gui(config, request[1:]) elif command == 'wait-for-network': response = do_wait_for_network(config) elif command == 'download-selections': l = stdin.readline().strip() xml = qdom.parse(BytesIO(stdin.read(int(l)))) blocker = do_download_selections(config, options, request[1:], xml) reply_when_done(ticket, blocker) return #async elif command == 'get-package-impls': l = stdin.readline().strip() xml = qdom.parse(BytesIO(stdin.read(int(l)))) response = do_get_package_impls(config, options, request[1:], xml) elif command == 'is-distro-package-installed': l = stdin.readline().strip() xml = qdom.parse(BytesIO(stdin.read(int(l)))) response = do_is_distro_package_installed(config, options, xml) elif command == 'get-distro-candidates': l = stdin.readline().strip() xml = qdom.parse(BytesIO(stdin.read(int(l)))) blocker = do_get_distro_candidates(config, request[1:], xml) reply_when_done(ticket, blocker) return # async elif command == 'download-and-import-feed': blocker = do_download_and_import_feed(config, request[1:]) reply_when_done(ticket, blocker) return # async elif command == 'notify-user': response = do_notify_user(config, request[1]) else: raise SafeException("Internal error: unknown command '%s'" % command) response = ['ok', response] except SafeException as ex: logger.info("Replying with error: %s", ex) response = ['error', str(ex)] except Exception as ex: import traceback logger.info("Replying with error: %s", ex) response = ['error', traceback.format_exc().strip()] send_json(["return", ticket, response])