def _get_archive_unpack_info(self, context, archive): suffix = nr.misc.archive.get_opener(archive)[0] filename = path.basename(archive)[: -len(suffix)] directory = path.join(context.installdir, filename) return suffix, directory
def load(self, context, cache): if cache is not None and path.isdir(cache.get("directory", "")): # Check if the requested version changes. url_template = context.expand_variables(cache.get("url_template", "")) if url_template == cache.get("url"): self.directory = cache["directory"] logger.info("Reusing cached directory: {}".format(path.rel(self.directory, nopar=True))) return cache else: logger.info("Cached URL is outdated:", cache.get("url")) directory = None archive = None delete_after_extract = True for url_template in self.urls: url = context.expand_variables(url_template) if not url: continue if url.startswith("file://"): name = url[7:] if path.isdir(name): logger.info("Using directory", url) directory = name break elif path.isfile(name): logger.info("Using archive", url) archive = name delete_after_extract = False break error = None else: error = None try: progress = lambda d: self._download_progress(url, context, d) archive, reused = httputils.download_file( url, directory=context.get_temporary_directory(), on_exists="skip", progress=progress ) except (httputils.URLError, httputils.HTTPError) as exc: error = exc except self.DownloadAlreadyExists as exc: directory = exc.directory logger.info("Reusing existing directory", directory) else: if reused: logger.info("Reusing cached download", path.basename(archive)) break if error: logger.info("Error reading", url, ":", error) if directory or archive: logger.debug("URL applies: {}".format(url)) if not directory and archive: suffix, directory = self._get_archive_unpack_info(context, archive) logger.info( 'Unpacking "{}" to "{}" ...'.format(path.rel(archive, nopar=True), path.rel(directory, nopar=True)) ) nr.misc.archive.extract( archive, directory, suffix=suffix, unpack_single_dir=True, check_extract_file=self._check_extract_file, progress_callback=self._extract_progress, ) elif not directory: raise LoaderError(self, "no URL matched") self.directory = directory with open(path.join(self.directory, ".craftr_downloadurl"), "w") as fp: fp.write(url) return {"directory": directory, "url_template": url_template, "url": url}