def testDistro(self): with output_suppressed(): native_url = "http://example.com:8000/Native.xml" # Initially, we don't have the feed at all... master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is None, master_feed trust.trust_db.trust_key("DE937DD411906ACF7C263B396FCF121BE2390E0B", "example.com:8000") self.child = server.handle_requests( "Native.xml", "6FCF121BE2390E0B.gpg", "/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B" ) policy = Policy(native_url, config=self.config) assert policy.need_download() solve = policy.solve_with_downloads() self.config.handler.wait_for_blocker(solve) tasks.check(solve) master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is not None assert master_feed.implementations == {} distro_feed_url = master_feed.get_distro_feed() assert distro_feed_url is not None distro_feed = self.config.iface_cache.get_feed(distro_feed_url) assert distro_feed is not None assert len(distro_feed.implementations) == 2, distro_feed.implementations
def _queue_confirm_import_feed(self, pending, valid_sigs): # If we're already confirming something else, wait for that to finish... while self._current_confirm is not None: yield self._current_confirm # Check whether we still need to confirm. The user may have # already approved one of the keys while dealing with another # feed. from zeroinstall.injector import trust domain = trust.domain_from_url(pending.url) for sig in valid_sigs: is_trusted = trust.trust_db.is_trusted(sig.fingerprint, domain) if is_trusted: return # Take the lock and confirm this feed self._current_confirm = lock = tasks.Blocker('confirm key lock') try: done = self.confirm_import_feed(pending, valid_sigs) if done is not None: yield done tasks.check(done) finally: self._current_confirm = None lock.trigger()
def testDistro(self): with output_suppressed(): native_url = 'http://example.com:8000/Native.xml' # Initially, we don't have the feed at all... master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is None, master_feed trust.trust_db.trust_key( 'DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000') self.child = server.handle_requests( 'Native.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B') policy = Policy(native_url, config=self.config) assert policy.need_download() solve = policy.solve_with_downloads() self.config.handler.wait_for_blocker(solve) tasks.check(solve) master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is not None assert master_feed.implementations == {} distro_feed_url = master_feed.get_distro_feed() assert distro_feed_url is not None distro_feed = self.config.iface_cache.get_feed(distro_feed_url) assert distro_feed is not None assert len( distro_feed.implementations) == 2, distro_feed.implementations
def testDistro(self): with output_suppressed(): native_url = 'http://example.com:8000/Native.xml' # Initially, we don't have the feed at all... master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is None, master_feed trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000') run_server('Native.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B') driver = Driver(requirements = Requirements(native_url), config = self.config) assert driver.need_download() solve = driver.solve_with_downloads() tasks.wait_for_blocker(solve) tasks.check(solve) master_feed = self.config.iface_cache.get_feed(native_url) assert master_feed is not None assert master_feed.implementations == {} distro_feed_url = master_feed.get_distro_feed() assert distro_feed_url is not None distro_feed = self.config.iface_cache.get_feed(distro_feed_url) assert distro_feed is not None assert len(distro_feed.implementations) == 2, distro_feed.implementations
def update_details_page(): blocker = slave.invoke_master(['get-feed-metadata', uri.get_text()]) yield blocker tasks.check(blocker) feed = blocker.result about.set_text('%s - %s' % (feed['name'], feed['summary'])) icon_path = feed['icon-path'] from zeroinstall.gtkui import icon icon_pixbuf = icon.load_icon(icon_path) if icon_pixbuf: icon_widget.set_from_pixbuf(icon_pixbuf) feed_category = feed['category'] if feed_category: i = 0 for row in categories: if row.lower() == feed_category.lower(): category.set_active(i) break i += 1 self.window.set_response_sensitive(_RESPONSE_PREV, True) nb.next_page() dialog_next.set_property('visible', False) dialog_ok.set_property('visible', True) dialog_ok.grab_focus()
def download(self, dl): # (changed if we get redirected) current_url = dl.url redirections_remaining = 10 # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect, # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group # and limit by the target site, not treat everything as going to a single site (the proxy). while True: location_parts = urlparse.urlparse(current_url) site_key = (location_parts.scheme, location_parts.hostname, location_parts.port or default_port.get(location_parts.scheme, None)) step = DownloadStep() step.dl = dl step.url = current_url blocker = self._sites[site_key].download(step) yield blocker tasks.check(blocker) if not step.redirect: break current_url = step.redirect if redirections_remaining == 0: raise download.DownloadError("Too many redirections {url} -> {current}".format( url = dl.url, current = current_url)) redirections_remaining -= 1
def download(self, step): if self.active == MAX_DOWNLOADS_PER_SITE: # Too busy to start a new download now. Queue this one and wait. ticket = tasks.Blocker('queued download for ' + step.url) self.queue.append(ticket) yield ticket, step.dl._aborted if step.dl._aborted.happened: raise download.DownloadAborted() # Start a new thread for the download thread_blocker = _spawn_thread(step) self.active += 1 # Wait for thread to complete download. yield thread_blocker, step.dl._aborted self.active -= 1 if self.active < MAX_DOWNLOADS_PER_SITE: self.process_next() # Start next queued download, if any if step.dl._aborted.happened: # Don't wait for child to finish (might be stuck doing IO) raise download.DownloadAborted() tasks.check(thread_blocker) if step.status == download.RESULT_REDIRECT: assert step.redirect return # DownloadScheduler will handle it assert not step.redirect, step.redirect step.dl._finish(step.status)
def collect_output(self, buffer): try: iter = buffer.get_end_iter() buffer.place_cursor(iter) if not self.driver.ready: buffer.insert_at_cursor("Can't run, because we failed to select a set of versions.\n") return sels = selections.Selections(self.driver.sels) uncached = sels.get_unavailable_selections(self.driver.config, include_packages = True) if uncached: buffer.insert_at_cursor("Can't run: the chosen versions have not been downloaded yet. I need:\n\n- " + "\n\n- " . join(['%s version %s\n (%s)' %(x.interface, x.version, x.id) for x in uncached])) return self.hide() try: gtk.gdk.flush() iter = buffer.get_end_iter() buffer.place_cursor(iter) # Tell 0launch to run the program blocker = slave.run_test() yield blocker tasks.check(blocker) buffer.insert_at_cursor(blocker.result) finally: self.show() except Exception as ex: buffer.insert_at_cursor(str(ex)) raise
def collect_output(self, buffer): try: iter = buffer.get_end_iter() buffer.place_cursor(iter) if not self.driver.ready: buffer.insert_at_cursor("Can't run, because we failed to select a set of versions.\n") return self.hide() try: gtk.gdk.flush() iter = buffer.get_end_iter() buffer.place_cursor(iter) # Tell 0launch to run the program blocker = slave.run_test() yield blocker tasks.check(blocker) buffer.insert_at_cursor(blocker.result) finally: self.show() except Exception as ex: buffer.insert_at_cursor(str(ex)) raise
def download(): # We're missing some. For each one, get the feed it came from # and find the corresponding <implementation> in that. This will # tell us where to get it from. # Note: we look for an implementation with the same ID. Maybe we # should check it has the same digest(s) too? needed_impls = [] for sel in needed_downloads: feed_url = sel.attrs.get('from-feed', None) or sel.attrs['interface'] feed = iface_cache.get_feed(feed_url) if feed is None or sel.id not in feed.implementations: fetch_feed = config.fetcher.download_and_import_feed( feed_url, iface_cache) yield fetch_feed tasks.check(fetch_feed) feed = iface_cache.get_feed(feed_url) assert feed, "Failed to get feed for %s" % feed_url impl = feed.implementations[sel.id] needed_impls.append(impl) fetch_impls = config.fetcher.download_impls(needed_impls, stores) yield fetch_impls tasks.check(fetch_impls)
def handle(config, options, args): """@type config: L{zeroinstall.injector.config.Config} @type args: [str]""" if len(args) != 1: raise UsageError() app = config.app_mgr.lookup_app(args[0], missing_ok = True) if app is not None: sels = app.get_selections() r = app.get_requirements() do_select = r.parse_update_options(options) or options.refresh iface_uri = sels.interface else: iface_uri = model.canonical_iface_uri(args[0]) r = None do_select = True if do_select or options.gui: sels = select.get_selections(config, options, iface_uri, select_only = False, download_only = True, test_callback = None, requirements = r) if not sels: sys.exit(1) # Aborted by user else: dl = app.download_selections(sels) if dl: tasks.wait_for_blocker(dl) tasks.check(dl) if options.xml: select.show_xml(sels) if options.show: select.show_human(sels, config.stores) if app is not None and do_select: print(_("(use '0install update' to save the new parameters)"))
def download(self, dl): # (changed if we get redirected) current_url = dl.url redirections_remaining = 10 # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect, # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group # and limit by the target site, not treat everything as going to a single site (the proxy). while True: location_parts = urlparse.urlparse(current_url) site_key = (location_parts.scheme, location_parts.hostname, location_parts.port or default_port.get(location_parts.scheme, None)) step = DownloadStep() step.dl = dl step.url = current_url blocker = self._sites[site_key].download(step) yield blocker tasks.check(blocker) if not step.redirect: break current_url = step.redirect if redirections_remaining == 0: raise download.DownloadError( "Too many redirections {url} -> {current}".format( url=dl.url, current=current_url)) redirections_remaining -= 1
def main(): force_refresh = bool(options.refresh) while True: window.refresh_button.set_sensitive(False) window.browser.set_update_icons(force_refresh) solved = policy.solve_with_downloads(force = force_refresh, update_local = True) if not window.systray_icon: window.show() yield solved try: window.refresh_button.set_sensitive(True) window.browser.highlight_problems() tasks.check(solved) except Exception, ex: window.report_exception(ex) if window.systray_icon and window.systray_icon.get_visible() and \ window.systray_icon.is_embedded(): if policy.ready: window.systray_icon.set_tooltip(_('Downloading updates for %s') % root_iface.get_name()) window.run_button.set_active(True) else: # Should already be reporting an error, but # blink it again just in case window.systray_icon.set_blinking(True) refresh_clicked = dialog.ButtonClickedBlocker(window.refresh_button) yield refresh_clicked, _recalculate if refresh_clicked.happened: force_refresh = True
def main(): force_refresh = bool(options.refresh) while True: window.refresh_button.set_sensitive(False) window.browser.set_update_icons(force_refresh) solved = policy.solve_with_downloads(force = force_refresh) if not window.systray_icon: window.show() yield solved try: window.refresh_button.set_sensitive(True) tasks.check(solved) except Exception, ex: window.report_exception(ex) if window.systray_icon and window.systray_icon.get_visible() and \ window.systray_icon.is_embedded(): if policy.ready: window.systray_icon.set_tooltip(_('Downloading updates for %s') % root_iface.get_name()) window.run_button.set_active(True) else: # Should already be reporting an error, but # blink it again just in case window.systray_icon.set_blinking(True) yield dialog.ButtonClickedBlocker(window.refresh_button) force_refresh = True
def download_impls(): if unsafe_impls: confirm = self.handler.confirm_install(_('The following components need to be installed using native packages. ' 'These come from your distribution, and should therefore be trustworthy, but they also ' 'run with extra privileges. In particular, installing them may run extra services on your ' 'computer or affect other users. You may be asked to enter a password to confirm. The ' 'packages are:\n\n') + ('\n'.join('- ' + x for x in unsafe_impls))) yield confirm tasks.check(confirm) blockers = [] for impl, source in to_download: blockers.append(self.download_impl(impl, source, stores)) # Record the first error log the rest error = [] def dl_error(ex, tb = None): if error: self.handler.report_error(ex) else: error.append((ex, tb)) while blockers: yield blockers tasks.check(blockers, dl_error) blockers = [b for b in blockers if not b.happened] if error: from zeroinstall import support support.raise_with_traceback(*error[0])
def download_keys(self, fetcher, feed_hint=None, key_mirror=None): """Download any required GPG keys not already on our keyring. When all downloads are done (successful or otherwise), add any new keys to the keyring, L{recheck}. @param fetcher: fetcher to manage the download (was Handler before version 1.5) @type fetcher: L{fetch.Fetcher} @param key_mirror: URL of directory containing keys, or None to use feed's directory @type key_mirror: str @rtype: [L{zeroinstall.support.tasks.Blocker}]""" downloads = {} blockers = [] for x in self.sigs: key_id = x.need_key() if key_id: try: import urlparse except ImportError: from urllib import parse as urlparse # Python 3 key_url = urlparse.urljoin(key_mirror or self.url, "%s.gpg" % key_id) logger.info(_("Fetching key from %s"), key_url) dl = fetcher.download_url(key_url, hint=feed_hint) downloads[dl.downloaded] = (dl, dl.tempfile) blockers.append(dl.downloaded) exception = None any_success = False from zeroinstall.support import tasks while blockers: yield blockers old_blockers = blockers blockers = [] for b in old_blockers: dl, stream = downloads[b] try: tasks.check(b) if b.happened: stream.seek(0) self._downloaded_key(stream) any_success = True stream.close() else: blockers.append(b) except Exception: _type, exception, tb = sys.exc_info() logger.warning( _("Failed to import key for '%(url)s': %(exception)s"), {"url": self.url, "exception": str(exception)}, ) stream.close() if exception and not any_success: raise_with_traceback(exception, tb) self.recheck()
def confirm_import_feed(self, pending, valid_sigs): """Sub-classes should override this method to interact with the user about new feeds. If multiple feeds need confirmation, L{confirm_keys} will only invoke one instance of this method at a time. @param pending: the new feed to be imported @type pending: L{PendingFeed} @param valid_sigs: maps signatures to a list of fetchers collecting information about the key @type valid_sigs: {L{gpg.ValidSig} : L{fetch.KeyInfoFetcher}} @since: 0.42 @see: L{confirm_keys}""" from zeroinstall.injector import trust assert valid_sigs domain = trust.domain_from_url(pending.url) # Ask on stderr, because we may be writing XML to stdout print >>sys.stderr, _("Feed: %s") % pending.url print >>sys.stderr, _("The feed is correctly signed with the following keys:") for x in valid_sigs: print >>sys.stderr, "-", x def text(parent): text = "" for node in parent.childNodes: if node.nodeType == node.TEXT_NODE: text = text + node.data return text shown = set() key_info_fetchers = valid_sigs.values() while key_info_fetchers: old_kfs = key_info_fetchers key_info_fetchers = [] for kf in old_kfs: infos = set(kf.info) - shown if infos: if len(valid_sigs) > 1: print "%s: " % kf.fingerprint for key_info in infos: print >>sys.stderr, "-", text(key_info) shown.add(key_info) if kf.blocker: key_info_fetchers.append(kf) if key_info_fetchers: for kf in key_info_fetchers: print >>sys.stderr, kf.status stdin = tasks.InputBlocker(0, 'console') blockers = [kf.blocker for kf in key_info_fetchers] + [stdin] yield blockers for b in blockers: try: tasks.check(b) except Exception, ex: warn(_("Failed to get key info: %s"), ex) if stdin.happened: print >>sys.stderr, _("Skipping remaining key lookups due to input from user") break
def reply_when_done(ticket, blocker): try: if blocker: yield blocker tasks.check(blocker) send_json(["return", ticket, ["ok", []]]) except Exception as ex: logger.info("async task failed", exc_info = True) send_json(["return", ticket, ["error", str(ex)]])
def download(self, dl, timeout=None): """@type dl: L{zeroinstall.injector.download.Download}""" # (changed if we get redirected) current_url = dl.url redirections_remaining = 10 original_exception = None # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect, # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group # and limit by the target site, not treat everything as going to a single site (the proxy). while True: location_parts = urlparse.urlparse(current_url) site_key = (location_parts.scheme, location_parts.hostname, location_parts.port or default_port.get(location_parts.scheme, None)) step = DownloadStep() step.dl = dl step.url = current_url blocker = self._sites[site_key].download(step, timeout) yield blocker try: tasks.check(blocker) except download.DownloadError as ex: if original_exception is None: original_exception = ex else: logger.warning("%s (while trying mirror)", ex) mirror_url = step.dl.get_next_mirror_url() if mirror_url is None: raise original_exception # Try the mirror. # There are actually two places where we try to use the mirror: this one # looks to see if we have an exact copy of same file somewhere else. If this # fails, Fetcher will also look for a different archive that would generate # the required implementation. logger.warning("%s: trying archive mirror at %s", ex, mirror_url) step.redirect = mirror_url redirections_remaining = 10 if not step.redirect: break current_url = step.redirect if redirections_remaining == 0: raise download.DownloadError( "Too many redirections {url} -> {current}".format( url=dl.url, current=current_url)) redirections_remaining -= 1
def doTest(self): imp.reload(packagekit) pk = packagekit.PackageKit() assert pk.available # Check none is found yet factory = Exception("not called") pk.get_candidates('gimp', factory, 'package:test') blocker = pk.fetch_candidates(["gimp"]) blocker2 = pk.fetch_candidates(["gimp"]) # Check batching too @tasks.async def wait(): yield blocker, blocker2 if blocker.happened: tasks.check(blocker) else: tasks.check(blocker2) tasks.wait_for_blocker(wait()) impls = {} def factory(impl_id, only_if_missing, installed): assert impl_id.startswith('package:') assert only_if_missing is True assert installed is False feed = None impl = model.DistributionImplementation(feed, impl_id, self) impl.installed = installed impls[impl_id] = impl return impl pk.get_candidates('gimp', factory, 'package:test') self.assertEqual(["package:test:gimp:2.6.8-2:x86_64"], list(impls.keys())) self.assertEqual(False, list(impls.values())[0].installed) impl, = impls.values() self.config.handler.allow_downloads = True _pk = pk.pk rm, = impl.download_sources dl = packagekit.PackageKitDownload('packagekit:' + rm.packagekit_id, hint = None, pk = _pk, packagekit_id = rm.packagekit_id, expected_size = rm.size) self.config.handler.monitor_download(dl) b = dl.downloaded tasks.wait_for_blocker(b) tasks.check(b) #self.assertEqual("/usr/bin/fixed", list(impls.values())[0].main) # Fixup not used in Python now tasks.wait_for_blocker(blocker) tasks.wait_for_blocker(blocker2) # Don't fetch it again tasks.wait_for_blocker(pk.fetch_candidates(["gimp"]))
def reply_when_done(ticket, blocker): try: if blocker: yield blocker tasks.check(blocker) send_json(["return", ticket, ["ok", []]]) except Exception as ex: logger.info("async task failed", exc_info=True) send_json(["return", ticket, ["error", str(ex)]])
def download(self, dl, timeout = None): """@type dl: L{zeroinstall.injector.download.Download}""" # (changed if we get redirected) current_url = dl.url redirections_remaining = 10 original_exception = None # Assign the Download to a Site based on its scheme, host and port. If the result is a redirect, # reassign it to the appropriate new site. Note that proxy handling happens later; we want to group # and limit by the target site, not treat everything as going to a single site (the proxy). while True: location_parts = urlparse.urlparse(current_url) site_key = (location_parts.scheme, location_parts.hostname, location_parts.port or default_port.get(location_parts.scheme, None)) step = DownloadStep() step.dl = dl step.url = current_url blocker = self._sites[site_key].download(step, timeout) yield blocker try: tasks.check(blocker) except download.DownloadError as ex: if original_exception is None: original_exception = ex else: logger.warning("%s (while trying mirror)", ex) mirror_url = step.dl.get_next_mirror_url() if mirror_url is None: raise original_exception # Try the mirror. # There are actually two places where we try to use the mirror: this one # looks to see if we have an exact copy of same file somewhere else. If this # fails, Fetcher will also look for a different archive that would generate # the required implementation. logger.warning("%s: trying archive mirror at %s", ex, mirror_url) step.redirect = mirror_url redirections_remaining = 10 if not step.redirect: break current_url = step.redirect if redirections_remaining == 0: raise download.DownloadError("Too many redirections {url} -> {current}".format( url = dl.url, current = current_url)) redirections_remaining -= 1
def download_info(feed_url): print "Downloading", feed_url feed_download = config.fetcher.download_and_import_feed(feed_url) yield feed_download tasks.check(feed_download) #print "Download complete" feed = config.iface_cache.get_feed(feed_url)
def set(new): try: blocker = slave.invoke_master(["set-impl-stability", impl_details['from-feed'], impl_details['id'], new]) yield blocker tasks.check(blocker) from zeroinstall.gui import main main.recalculate() except Exception: logger.warning("set", exc_info = True) raise
def dummy_spawn_thread(step): resume = tasks.Blocker('complete ' + step.url) downloads[step.url] = resume yield resume try: tasks.check(resume) except Exception: step.status = "fail" else: step.status = "ok"
def cook(self, required_digest, recipe, stores, force=False, impl_hint=None): """Follow a Recipe. @param impl_hint: the Implementation this is for (if any) as a hint for the GUI @see: L{download_impl} uses this method when appropriate""" # Maybe we're taking this metaphor too far? # Start a download for each ingredient blockers = [] steps = [] try: for stepdata in recipe.steps: cls = StepRunner.class_for(stepdata) step = cls(stepdata, impl_hint=impl_hint) step.prepare(self, blockers) steps.append(step) while blockers: yield blockers tasks.check(blockers) blockers = [b for b in blockers if not b.happened] if self.external_store: # Note: external_store will not yet work with non-<archive> steps. streams = [step.stream for step in steps] self._add_to_external_store(required_digest, recipe.steps, streams) else: # Create an empty directory for the new implementation store = stores.stores[0] tmpdir = store.get_tmp_dir_for(required_digest) try: # Unpack each of the downloaded archives into it in turn for step in steps: step.apply(tmpdir) # Check that the result is correct and store it in the cache store.check_manifest_and_rename(required_digest, tmpdir) tmpdir = None finally: # If unpacking fails, remove the temporary directory if tmpdir is not None: support.ro_rmtree(tmpdir) finally: for step in steps: try: step.close() except IOError as ex: # Can get "close() called during # concurrent operation on the same file # object." if we're unlucky (Python problem). logger.info("Failed to close: %s", ex)
def cook(self, required_digest, recipe, stores, force = False, impl_hint = None, dry_run = False, may_use_mirror = True): """Follow a Recipe. @type required_digest: str @type recipe: L{Recipe} @type stores: L{zeroinstall.zerostore.Stores} @type force: bool @param impl_hint: the Implementation this is for (as a hint for the GUI, and to allow local files) @type dry_run: bool @type may_use_mirror: bool @see: L{download_impl} uses this method when appropriate""" # Maybe we're taking this metaphor too far? # Start a download for each ingredient blockers = [] steps = [] try: for stepdata in recipe.steps: cls = StepRunner.class_for(stepdata) step = cls(stepdata, impl_hint = impl_hint, may_use_mirror = may_use_mirror) step.prepare(self, blockers) steps.append(step) while blockers: yield blockers tasks.check(blockers) blockers = [b for b in blockers if not b.happened] if self.external_store: # Note: external_store will not work with non-<archive> steps. streams = [step.stream for step in steps] self._add_to_external_store(required_digest, recipe.steps, streams) else: # Create an empty directory for the new implementation store = stores.stores[0] tmpdir = store.get_tmp_dir_for(required_digest) try: # Unpack each of the downloaded archives into it in turn for step in steps: step.apply(tmpdir) # Check that the result is correct and store it in the cache stores.check_manifest_and_rename(required_digest, tmpdir, dry_run=dry_run) tmpdir = None finally: # If unpacking fails, remove the temporary directory if tmpdir is not None: support.ro_rmtree(tmpdir) finally: for step in steps: try: step.close() except IOError as ex: # Can get "close() called during # concurrent operation on the same file # object." if we're unlucky (Python problem). logger.info("Failed to close: %s", ex)
def confirm_import_feed(self, pending, valid_sigs): """ verify the feed """ from zeroinstall.injector import trust assert valid_sigs domain = trust.domain_from_url(pending.url) # Ask on stderr, because we may be writing XML to stdout print "Feed: %s" % pending.url print "The feed is correctly signed with the following keys:" for x in valid_sigs: print "-", x def text(parent): text = "" for node in parent.childNodes: if node.nodeType == node.TEXT_NODE: text = text + node.data return text shown = set() key_info_fetchers = valid_sigs.values() while key_info_fetchers: old_kfs = key_info_fetchers key_info_fetchers = [] for kf in old_kfs: infos = set(kf.info) - shown if infos: if len(valid_sigs) > 1: print("%s: " % kf.fingerprint) for key_info in infos: print("-", text(key_info) ) shown.add(key_info) if kf.blocker: key_info_fetchers.append(kf) if key_info_fetchers: for kf in key_info_fetchers: print(kf.status) stdin = tasks.InputBlocker(0, 'console') blockers = [kf.blocker for kf in key_info_fetchers] + [stdin] yield blockers for b in blockers: try: tasks.check(b) except Exception as ex: warn(_("Failed to get key info: %s"), ex) if stdin.happened: print("Skipping remaining key lookups due to input from user") break for key in valid_sigs: print("Trusting %(key_fingerprint)s for %(domain)s") % {'key_fingerprint': key.fingerprint, 'domain': domain} trust.trust_db.trust_key(key.fingerprint, domain)
def run(): keys_downloaded = tasks.Task(pending.download_keys(handler), "download keys") yield keys_downloaded.finished tasks.check(keys_downloaded.finished) if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): blocker = handler.confirm_trust_keys(iface, pending.sigs, pending.new_xml) if blocker: yield blocker tasks.check(blocker) if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): raise SafeException(_("No signing keys trusted; not importing"))
def solve_with_downloads(self, force = False): """Run the solver, then download any feeds that are missing or that need to be updated. Each time a new feed is imported into the cache, the solver is run again, possibly adding new downloads. @param force: whether to download even if we're already ready to run.""" downloads_finished = set() # Successful or otherwise downloads_in_progress = {} # URL -> Download host_arch = self.target_arch if self.src: host_arch = arch.SourceArchitecture(host_arch) while True: postponed = self.solver.solve(self.root, host_arch, return_postponed=True) if postponed: while postponed: yield postponed postponed = [i for i in postponed if not i.happened] continue for w in self.watchers: w() if self.solver.ready and not force: break else: if self.network_use == network_offline and not force: info(_("Can't choose versions and in off-line mode, so aborting")) break # Once we've starting downloading some things, # we might as well get them all. force = True for f in self.solver.feeds_used: if f in downloads_finished or f in downloads_in_progress: continue if f.startswith('/'): continue feed = iface_cache.get_interface(f) downloads_in_progress[f] = self.fetcher.download_and_import_feed(f, iface_cache) if not downloads_in_progress: break blockers = downloads_in_progress.values() yield blockers tasks.check(blockers, self.handler.report_error) for f in downloads_in_progress.keys(): if downloads_in_progress[f].happened: del downloads_in_progress[f] downloads_finished.add(f)
def testChunked(self): if sys.version_info[0] < 3: return # not a problem with Python 2 run_server('chunked') dl = self.config.fetcher.download_url('http://localhost/chunked') tmp = dl.tempfile tasks.wait_for_blocker(dl.downloaded) tasks.check(dl.downloaded) tmp.seek(0) self.assertEqual(b'hello world', tmp.read()) kill_server_process()
def ok(feed, config = config, interface = interface, chooser = chooser): try: blocker = slave.add_local_feed(interface.uri, feed) yield blocker tasks.check(blocker) chooser.destroy() from zeroinstall.gui import main main.recalculate() except Exception as ex: dialog.alert(None, _("Error in feed file '%(feed)s':\n\n%(exception)s") % {'feed': feed, 'exception': str(ex)})
def run(): keys_downloaded = tasks.Task(pending.download_keys(config.fetcher), "download keys") yield keys_downloaded.finished tasks.check(keys_downloaded.finished) if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml): blocker = config.trust_mgr.confirm_keys(pending) if blocker: yield blocker tasks.check(blocker) if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml): raise SafeException(_("No signing keys trusted; not importing"))
def cook(self, required_digest, recipe, stores, force=False, impl_hint=None): """Follow a Recipe. @param impl_hint: the Implementation this is for (if any) as a hint for the GUI @see: L{download_impl} uses this method when appropriate""" # Maybe we're taking this metaphor too far? # Start downloading all the ingredients. streams = {} # Streams collected from successful downloads # Start a download for each ingredient blockers = [] for step in recipe.steps: blocker, stream = self.download_archive(step, force=force, impl_hint=impl_hint) assert stream blockers.append(blocker) streams[step] = stream while blockers: yield blockers tasks.check(blockers) blockers = [b for b in blockers if not b.happened] from zeroinstall.zerostore import unpack # Create an empty directory for the new implementation store = stores.stores[0] tmpdir = store.get_tmp_dir_for(required_digest) try: # Unpack each of the downloaded archives into it in turn for step in recipe.steps: stream = streams[step] stream.seek(0) unpack.unpack_archive_over(step.url, stream, tmpdir, extract=step.extract, type=step.type, start_offset=step.start_offset or 0) # Check that the result is correct and store it in the cache store.check_manifest_and_rename(required_digest, tmpdir) tmpdir = None finally: # If unpacking fails, remove the temporary directory if tmpdir is not None: from zeroinstall import support support.ro_rmtree(tmpdir)
def download_impl(method): original_exception = None while True: try: if isinstance(method, DownloadSource): blocker, stream = self.download_archive( method, impl_hint=impl, may_use_mirror=original_exception is None) try: yield blocker tasks.check(blocker) stream.seek(0) if self.external_store: self._add_to_external_store( required_digest, [method], [stream]) else: self._add_to_cache(required_digest, stores, method, stream) finally: stream.close() elif isinstance(method, Recipe): blocker = self.cook(required_digest, method, stores, impl_hint=impl) yield blocker tasks.check(blocker) else: raise Exception( _("Unknown download type for '%s'") % method) except download.DownloadError as ex: if original_exception: logger.info("Error from mirror: %s", ex) raise original_exception else: original_exception = ex mirror_url = self._get_impl_mirror(impl) if mirror_url is not None: logger.info("%s: trying implementation mirror at %s", ex, mirror_url) method = model.DownloadSource( impl, mirror_url, None, None, type='application/x-bzip-compressed-tar') continue # Retry raise break self.handler.impl_added_to_store(impl)
def populate(): populate = self._populate_model() yield populate try: tasks.check(populate) except: import logging logging.warn("fail", exc_info = True) raise # (we delay until here because inserting with the view set is very slow) self.tree_view.set_model(self.view_model) self.set_initial_expansion()
def do_run_gui(ticket): reply_holder = [] blocker = run_gui(reply_holder) try: if blocker: yield blocker tasks.check(blocker) reply, = reply_holder send_json(["return", ticket, ["ok", reply]]) except Exception as ex: logger.warning("Returning error", exc_info = True) send_json(["return", ticket, ["error", str(ex)]])
def wait_for_destroy(ticket, window): window.show() blocker = tasks.Blocker("window closed") window.connect('destroy', lambda *args: blocker.trigger()) try: if blocker: yield blocker tasks.check(blocker) send_json(["return", ticket, ["ok", None]]) except Exception as ex: logger.warning("Returning error", exc_info = True) send_json(["return", ticket, ["error", str(ex)]])
def remove_feed(button): try: model, iter = self.tv.get_selection().get_selected() feed_uri = model[iter][Feeds.URI] blocker = slave.remove_feed(interface.uri, feed_uri) yield blocker tasks.check(blocker) from zeroinstall.gui import main main.recalculate() except Exception as ex: import traceback traceback.print_exc() config.handler.report_error(ex)
def doTest(self): imp.reload(packagekit) pk = packagekit.PackageKit() assert pk.available # Check none is found yet factory = Exception("not called") pk.get_candidates('gimp', factory, 'package:test') blocker = pk.fetch_candidates(["gimp"]) blocker2 = pk.fetch_candidates(["gimp"]) # Check batching too @tasks.async def wait(): yield blocker, blocker2 if blocker.happened: tasks.check(blocker) else: tasks.check(blocker2) tasks.wait_for_blocker(wait()) impls = {} def factory(impl_id, only_if_missing, installed): assert impl_id.startswith('package:') assert only_if_missing is True assert installed is False feed = None impl = model.DistributionImplementation(feed, impl_id, self) impl.installed = installed impls[impl_id] = impl return impl pk.get_candidates('gimp', factory, 'package:test') self.assertEqual(["package:test:gimp:2.6.8-2:x86_64"], list(impls.keys())) self.assertEqual(False, list(impls.values())[0].installed) impl, = impls.values() fetcher = fetch.Fetcher(config = self.config) self.config.handler.allow_downloads = True b = fetcher.download_impl(impl, impl.download_sources[0], stores = None) tasks.wait_for_blocker(b) tasks.check(b) self.assertEqual("/usr/bin/fixed", list(impls.values())[0].main) tasks.wait_for_blocker(blocker) tasks.wait_for_blocker(blocker2) # Don't fetch it again tasks.wait_for_blocker(pk.fetch_candidates(["gimp"]))
def do_confirm_distro_install(config, ticket, options, impls): if gui_driver is not None: config = gui_driver.config try: manual_impls = [impl['id'] for impl in impls if not impl['needs-confirmation']] unsafe_impls = [impl for impl in impls if impl['needs-confirmation']] if unsafe_impls: confirm = config.handler.confirm_install(_('The following components need to be installed using native packages. ' 'These come from your distribution, and should therefore be trustworthy, but they also ' 'run with extra privileges. In particular, installing them may run extra services on your ' 'computer or affect other users. You may be asked to enter a password to confirm. The ' 'packages are:\n\n') + ('\n'.join('- ' + x['id'] for x in unsafe_impls))) yield confirm tasks.check(confirm) if manual_impls: raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. " "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can " "use that to install it.") % manual_impls[0]) blockers = [] for impl in unsafe_impls: from zeroinstall.injector import packagekit packagekit_id = impl['packagekit-id'] pk = get_distro().packagekit.pk dl = packagekit.PackageKitDownload('packagekit:' + packagekit_id, hint = impl['master-feed'], pk = pk, packagekit_id = packagekit_id, expected_size = int(impl['size'])) config.handler.monitor_download(dl) blockers.append(dl.downloaded) # Record the first error log the rest error = [] def dl_error(ex, tb = None): if error: config.handler.report_error(ex) else: error.append((ex, tb)) while blockers: yield blockers tasks.check(blockers, dl_error) blockers = [b for b in blockers if not b.happened] if error: from zeroinstall import support support.raise_with_traceback(*error[0]) send_json(["return", ticket, ["ok", "ok"]]) except download.DownloadAborted as ex: send_json(["return", ticket, ["ok", "aborted-by-user"]]) except Exception as ex: logger.warning("Returning error", exc_info = True) send_json(["return", ticket, ["error", str(ex)]])
def get_packagekit_feed(self, feed_url): """Send a query to PackageKit (if available) for information about this package. On success, the result is added to iface_cache. """ assert feed_url.startswith('distribution:'), feed_url master_feed = self.config.iface_cache.get_feed(feed_url.split(':', 1)[1]) if master_feed: fetch = self.config.iface_cache.distro.fetch_candidates(master_feed) if fetch: yield fetch tasks.check(fetch) # Force feed to be regenerated with the new information self.config.iface_cache.get_feed(feed_url, force = True)
def download_and_run(self, run_button, cancelled): try: if not self.select_only: downloaded = self.driver.download_uncached_implementations() if downloaded: # We need to wait until everything is downloaded... blockers = [downloaded, cancelled] yield blockers tasks.check(blockers) if cancelled.happened: return uncached = self.driver.get_uncached_implementations() else: uncached = None # (we don't care) if uncached: missing = '\n- '.join([ _('%(iface_name)s %(impl_version)s') % { 'iface_name': iface.get_name(), 'impl_version': impl.get_version() } for iface, impl in uncached ]) dialog.alert( self.window, _('Not all downloads succeeded; cannot run program.\n\nFailed to get:' ) + '\n- ' + missing) else: sels = self.driver.solver.selections doc = sels.toDOM() reply = doc.toxml('utf-8') if sys.version_info[0] > 2: stdout = sys.stdout.buffer else: stdout = sys.stdout stdout.write(('Length:%8x\n' % len(reply)).encode('utf-8') + reply) self.window.destroy() sys.exit(0) # Success except SystemExit: raise except download.DownloadAborted as ex: run_button.set_active(False) # Don't bother reporting this to the user except Exception as ex: run_button.set_active(False) self.report_exception(ex)
def download(self, step, timeout=None): """ Queue up this download. If it takes too long, trigger step.dl.timeout (if any), but only count time spent actually downloading, not time spent queuing. @type step: L{DownloadStep}""" if self.active == MAX_DOWNLOADS_PER_SITE: # Too busy to start a new download now. Queue this one and wait. ticket = tasks.Blocker('queued download for ' + step.url) self.queue.append(ticket) yield ticket, step.dl._aborted if step.dl._aborted.happened: raise download.DownloadAborted() in_progress = [True] if timeout is not None: def timeout_cb(): if in_progress: step.dl.timeout.trigger() tasks.get_loop().call_later(timeout, timeout_cb) # Start a new thread for the download thread_blocker = _spawn_thread(step) self.active += 1 # Wait for thread to complete download. yield thread_blocker, step.dl._aborted del in_progress[0] self.active -= 1 if self.active < MAX_DOWNLOADS_PER_SITE: self.process_next() # Start next queued download, if any if step.dl._aborted.happened: # Don't wait for child to finish (might be stuck doing IO) raise download.DownloadAborted() tasks.check(thread_blocker) if step.status == download.RESULT_REDIRECT: assert step.redirect return # DownloadScheduler will handle it assert not step.redirect, step.redirect step.dl._finish(step.status)
def run(): keys_downloaded = tasks.Task( pending.download_keys(config.fetcher), "download keys") yield keys_downloaded.finished tasks.check(keys_downloaded.finished) if not config.iface_cache.update_feed_if_trusted( uri, pending.sigs, pending.new_xml): blocker = config.trust_mgr.confirm_keys(pending) if blocker: yield blocker tasks.check(blocker) if not config.iface_cache.update_feed_if_trusted( uri, pending.sigs, pending.new_xml): raise SafeException( _("No signing keys trusted; not importing"))