def download_url(self, url, hint=None, modification_time=None, expected_size=None, mirror_url=None): """The most low-level method here; just download a raw URL. It is the caller's responsibility to ensure that dl.stream is closed. @param url: the location to download from @param hint: user-defined data to store on the Download (e.g. used by the GUI) @param modification_time: don't download unless newer than this @param mirror_url: an altertive URL to try if this one fails @type mirror_url: str @rtype: L{download.Download} @since: 1.5 """ if self.handler.dry_run: raise NeedDownload(url) dl = download.Download(url, hint=hint, modification_time=modification_time, expected_size=expected_size, auto_delete=not self.external_store) dl.mirror = mirror_url self.handler.monitor_download(dl) dl.downloaded = self.scheduler.download(dl) return dl
def download_missing(self, config, _old=None, include_packages=False): """Check all selected implementations are available. Download any that are not present. Since native distribution packages are usually only available in a single version, which is unlikely to be the one in the selections document, we ignore them by default. Note: package implementations (distribution packages) are ignored. @param config: used to get iface_cache, stores and fetcher @param include_packages: also try to install native packages (since 1.5) @type include_packages: bool @rtype: L{zeroinstall.support.tasks.Blocker} | None""" if _old: config = get_deprecated_singleton_config() iface_cache = config.iface_cache stores = config.stores needed_downloads = self.get_unavailable_selections( config, include_packages) if not needed_downloads: return if config.network_use == model.network_offline: from zeroinstall import NeedDownload raise NeedDownload(', '.join([str(x) for x in needed_downloads])) @tasks. async def download(): # We're missing some. For each one, get the feed it came from # and find the corresponding <implementation> in that. This will # tell us where to get it from. # Note: we look for an implementation with the same ID. Maybe we # should check it has the same digest(s) too? needed_impls = [] for sel in needed_downloads: feed_url = sel.attrs.get('from-feed', None) or sel.attrs['interface'] feed = iface_cache.get_feed(feed_url) if feed is None or sel.id not in feed.implementations: fetch_feed = config.fetcher.download_and_import_feed( feed_url, iface_cache) yield fetch_feed tasks.check(fetch_feed) feed = iface_cache.get_feed(feed_url) assert feed, "Failed to get feed for %s" % feed_url impl = feed.implementations[sel.id] needed_impls.append(impl) fetch_impls = config.fetcher.download_impls(needed_impls, stores) yield fetch_impls tasks.check(fetch_impls) return download()
def download_url(self, url, hint=None, modification_time=None, expected_size=None): """The most low-level method here; just download a raw URL. @param url: the location to download from @param hint: user-defined data to store on the Download (e.g. used by the GUI) @param modification_time: don't download unless newer than this @rtype: L{download.Download} @since: 1.5 """ if self.handler.dry_run: raise NeedDownload(url) dl = download.Download(url, hint=hint, modification_time=modification_time, expected_size=expected_size) self.handler.monitor_download(dl) dl.downloaded = self.scheduler.download(dl) return dl
def get_download(self, url, force = False, hint = None, factory = None): """Return the Download object currently downloading 'url'. If no download for this URL has been started, start one now (and start monitoring it). If the download failed and force is False, return it anyway. If force is True, abort any current or failed download and start a new one. @rtype: L{download.Download} """ if self.dry_run: raise NeedDownload(url) try: dl = self.monitored_downloads[url] if dl and force: dl.abort() raise KeyError except KeyError: if factory is None: dl = download.Download(url, hint) else: dl = factory(url, hint) self.monitor_download(dl) return dl
def download_missing(self, config, _old = None, include_packages = False): """Check all selected implementations are available. Download any that are not present. Since native distribution packages are usually only available in a single version, which is unlikely to be the one in the selections document, we ignore them by default. Note: package implementations (distribution packages) are ignored. @param config: used to get iface_cache, stores and fetcher @param include_packages: also try to install native packages (since 1.5) @return: a L{tasks.Blocker} or None""" from zeroinstall.zerostore import NotStored if _old: config = get_deprecated_singleton_config() iface_cache = config.iface_cache stores = config.stores # Check that every required selection is cached def needs_download(sel): if sel.id.startswith('package:'): if not include_packages: return False feed = iface_cache.get_feed(sel.feed) if not feed: return False impl = feed.implementations.get(sel.id, None) return impl is None or not impl.installed elif sel.local_path: return False else: try: stores.lookup_any(sel.digests) except NotStored: return True needed_downloads = list(filter(needs_download, self.selections.values())) if not needed_downloads: return if config.network_use == model.network_offline: from zeroinstall import NeedDownload raise NeedDownload(', '.join([str(x) for x in needed_downloads])) @tasks.async def download(): # We're missing some. For each one, get the feed it came from # and find the corresponding <implementation> in that. This will # tell us where to get it from. # Note: we look for an implementation with the same ID. Maybe we # should check it has the same digest(s) too? needed_impls = [] for sel in needed_downloads: feed_url = sel.attrs.get('from-feed', None) or sel.attrs['interface'] feed = iface_cache.get_feed(feed_url) if feed is None or sel.id not in feed.implementations: fetch_feed = config.fetcher.download_and_import_feed(feed_url, iface_cache) yield fetch_feed tasks.check(fetch_feed) feed = iface_cache.get_feed(feed_url) assert feed, "Failed to get feed for %s" % feed_url impl = feed.implementations[sel.id] needed_impls.append(impl) fetch_impls = config.fetcher.download_impls(needed_impls, stores) yield fetch_impls tasks.check(fetch_impls) return download()