def _download_files_to_folder(self, file_urls, to_folder, snapshot_md5): """ :param: file_urls is a dict with {filename: abs_path} It writes downloaded files to disk (appending to file, only keeps chunks in memory) """ ret = {} # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename, resource_url in sorted(file_urls.items(), reverse=True): if self._output and not self._output.is_terminal: self._output.writeln("Downloading %s" % filename) auth, _ = self._file_server_capabilities(resource_url) abs_path = os.path.join(to_folder, filename) md5 = snapshot_md5.get(filename, None) if snapshot_md5 else None assert not self._config.download_cache or snapshot_md5, \ "if download_cache is set, we need the file checksums" run_downloader(self.requester, self._output, self.verify_ssl, self._config, url=resource_url, file_path=abs_path, auth=auth, md5=md5) ret[filename] = abs_path return ret
def _download_file(file_url): # The download cache is only used if a checksum is provided, otherwise, a normal download run_downloader(requester=requester, output=out, verify=verify, config=config, user_download=True, use_cache=bool(config and checksum), url=file_url, file_path=filename, retry=retry, retry_wait=retry_wait, overwrite=overwrite, auth=auth, headers=headers, md5=md5, sha1=sha1, sha256=sha256) # TODO: Probably move inside downloader, remove the file,... locks... if md5: check_md5(filename, md5) if sha1: check_sha1(filename, sha1) if sha256: check_sha256(filename, sha256) out.writeln("")
def _download_and_save_files(self, urls, dest_folder, files, use_cache): # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename in sorted(files, reverse=True): if self._output and not self._output.is_terminal: self._output.writeln("Downloading %s" % filename) resource_url = urls[filename] abs_path = os.path.join(dest_folder, filename) run_downloader(self.requester, self._output, self.verify_ssl, self._config, use_cache=use_cache, url=resource_url, file_path=abs_path, auth=self.auth)
def _download_file(file_url): # The download cache is only used if a checksum is provided, otherwise, a normal download run_downloader(requester=requester, output=out, verify=verify, config=config, user_download=True, use_cache=bool(config and checksum), url=file_url, file_path=filename, retry=retry, retry_wait=retry_wait, overwrite=overwrite, auth=auth, headers=headers, md5=md5, sha1=sha1, sha256=sha256) out.writeln("")
def _get_remote_file_contents(self, url, use_cache, headers=None): # We don't want traces in output of these downloads, they are ugly in output contents = run_downloader(self.requester, None, self.verify_ssl, self._config, use_cache=use_cache, url=url, auth=self.auth, headers=headers) return contents
def _download_files(self, file_urls, snapshot_md5): """ :param: file_urls is a dict with {filename: url} :param snapshot_md5: dict with {filaname: md5 checksum} of files to be downloaded Its a generator, so it yields elements for memory performance """ # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename, resource_url in sorted(file_urls.items(), reverse=True): auth, _ = self._file_server_capabilities(resource_url) md5 = snapshot_md5.get(filename, None) if snapshot_md5 else None assert not self._config.download_cache or snapshot_md5, \ "if download_cache is set, we need the file checksums" contents = run_downloader(self.requester, None, self.verify_ssl, self._config, url=resource_url, auth=auth, md5=md5) yield os.path.normpath(filename), contents