def _download_files(self, file_urls, snapshot_md5): """ :param: file_urls is a dict with {filename: url} :param snapshot_md5: dict with {filaname: md5 checksum} of files to be downloaded Its a generator, so it yields elements for memory performance """ downloader = FileDownloader(self.requester, None, self.verify_ssl, self._config) download_cache = self._config.download_cache if download_cache: assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums" downloader = CachedFileDownloader(download_cache, downloader) # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename, resource_url in sorted(file_urls.items(), reverse=True): auth, _ = self._file_server_capabilities(resource_url) if download_cache: md5 = snapshot_md5[filename] contents = downloader.download(resource_url, auth=auth, md5=md5) else: contents = downloader.download(resource_url, auth=auth) yield os.path.normpath(filename), contents
def _download_files_to_folder(self, file_urls, to_folder, snapshot_md5): """ :param: file_urls is a dict with {filename: abs_path} It writes downloaded files to disk (appending to file, only keeps chunks in memory) """ downloader = FileDownloader(self.requester, self._output, self.verify_ssl, self._config) download_cache = self._config.download_cache if download_cache: assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums" downloader = CachedFileDownloader(download_cache, downloader) ret = {} # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename, resource_url in sorted(file_urls.items(), reverse=True): if self._output and not self._output.is_terminal: self._output.writeln("Downloading %s" % filename) auth, _ = self._file_server_capabilities(resource_url) abs_path = os.path.join(to_folder, filename) if download_cache: md5 = snapshot_md5[filename] downloader.download(resource_url, abs_path, auth=auth, md5=md5) else: downloader.download(resource_url, abs_path, auth=auth) ret[filename] = abs_path return ret
def test_fail_interrupted_download_to_file_if_no_progress(self): expected_content = b"some data" requester = MockRequester(expected_content, chunk_size=0) downloader = FileDownloader(requester=requester, output=self.out, verify=None, config=_ConfigMock()) with self.assertRaisesRegexp(ConanException, r"Download failed"): downloader.download("fake_url", file_path=self.target)
def test_succeed_download_to_file_if_not_interrupted(self): expected_content = b"some data" requester = MockRequester(expected_content) downloader = FileDownloader(requester=requester, output=self.out, verify=None, config=_ConfigMock()) downloader.download("fake_url", file_path=self.target) actual_content = load(self.target, binary=True) self.assertEqual(expected_content, actual_content)
def test_fail_download_to_memory_if_interrupted(self): expected_content = b"some data" requester = MockRequester(expected_content, chunk_size=4) downloader = FileDownloader(requester=requester, output=self.out, verify=None, config=_ConfigMock()) with self.assertRaisesRegexp(ConanException, r"Transfer interrupted before complete"): downloader.download("fake_url", file_path=None)
def _download_and_save_files(self, urls, dest_folder, files, use_cache): downloader = FileDownloader(self.requester, self._output, self.verify_ssl, self._config) if use_cache and self._config.download_cache: downloader = CachedFileDownloader(self._config.download_cache, downloader) # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz # can be < conanfile, conaninfo, and sent always the last, so smaller files go first for filename in sorted(files, reverse=True): if self._output and not self._output.is_terminal: self._output.writeln("Downloading %s" % filename) resource_url = urls[filename] abs_path = os.path.join(dest_folder, filename) downloader.download(resource_url, abs_path, auth=self.auth)
def test_fail_interrupted_download_if_server_not_accepting_ranges(self): expected_content = b"some data" requester = MockRequester(expected_content, chunk_size=4, accept_ranges=False) downloader = FileDownloader(requester=requester, output=self.out, verify=None, config=_ConfigMock()) with self.assertRaisesRegexp(ConanException, r"Incorrect Content-Range header"): downloader.download("fake_url", file_path=self.target)
def test_download_with_compressed_content_and_smaller_content_length(self): expected_content = b"some data" echo_header = { "Content-Encoding": "gzip", "Content-Length": len(expected_content) - 1 } requester = MockRequester(expected_content, echo_header=echo_header) downloader = FileDownloader(requester=requester, output=self.out, verify=None, config=_ConfigMock()) downloader.download("fake_url", file_path=self.target) actual_content = load(self.target, binary=True) self.assertEqual(expected_content, actual_content)
def _get_remote_file_contents(self, url, use_cache): # We don't want traces in output of these downloads, they are ugly in output downloader = FileDownloader(self.requester, None, self.verify_ssl, self._config) if use_cache and self._config.download_cache: downloader = CachedFileDownloader(self._config.download_cache, downloader) contents = downloader.download(url, auth=self.auth) return contents
def _get_path(self, url, path): urls = self._get_file_to_url_dict(url) def is_dir(the_path): if the_path == ".": return True for _the_file in urls: if the_path == _the_file: return False elif _the_file.startswith(the_path): return True raise NotFoundException("The specified path doesn't exist") if is_dir(path): ret = [] for the_file in urls: if path == "." or the_file.startswith(path): tmp = the_file[len(path) - 1:].split("/", 1)[0] if tmp not in ret: ret.append(tmp) return sorted(ret) else: downloader = FileDownloader(self.requester, None, self.verify_ssl, self._config) auth, _ = self._file_server_capabilities(urls[path]) content = downloader.download(urls[path], auth=auth) return decode_text(content)