Esempio n. 1
0
 def _get_remote_file_contents(self, url, use_cache):
     # We don't want traces in output of these downloads, they are ugly in output
     downloader = FileDownloader(self.requester, None, self.verify_ssl, self._config)
     if use_cache and self._config.download_cache:
         downloader = CachedFileDownloader(self._config.download_cache, downloader)
     contents = downloader.download(url, auth=self.auth)
     return contents
Esempio n. 2
0
    def _download_files(self, file_urls, snapshot_md5):
        """
        :param: file_urls is a dict with {filename: url}
        :param snapshot_md5: dict with {filaname: md5 checksum} of files to be downloaded

        Its a generator, so it yields elements for memory performance
        """
        downloader = FileDownloader(self.requester, None, self.verify_ssl,
                                    self._config)
        download_cache = self._config.download_cache
        if download_cache:
            assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums"
            downloader = CachedFileDownloader(download_cache, downloader)
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            auth, _ = self._file_server_capabilities(resource_url)
            if download_cache:
                md5 = snapshot_md5[filename]
                contents = downloader.download(resource_url,
                                               auth=auth,
                                               md5=md5)
            else:
                contents = downloader.download(resource_url, auth=auth)
            yield os.path.normpath(filename), contents
Esempio n. 3
0
    def _get_path(self, url, path):
        urls = self._get_file_to_url_dict(url)

        def is_dir(the_path):
            if the_path == ".":
                return True
            for _the_file in urls:
                if the_path == _the_file:
                    return False
                elif _the_file.startswith(the_path):
                    return True
            raise NotFoundException("The specified path doesn't exist")

        if is_dir(path):
            ret = []
            for the_file in urls:
                if path == "." or the_file.startswith(path):
                    tmp = the_file[len(path) - 1:].split("/", 1)[0]
                    if tmp not in ret:
                        ret.append(tmp)
            return sorted(ret)
        else:
            downloader = FileDownloader(self.requester, None, self.verify_ssl,
                                        self._config)
            auth, _ = self._file_server_capabilities(urls[path])
            content = downloader.download(urls[path], auth=auth)

            return decode_text(content)
Esempio n. 4
0
 def test_succeed_download_to_memory_if_not_interrupted(self):
     expected_content = b"some data"
     requester = MockRequester(expected_content)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     actual_content = downloader.download("fake_url", file_path=None)
     self.assertEqual(expected_content, actual_content)
Esempio n. 5
0
 def test_fail_interrupted_download_to_file_if_no_progress(self):
     expected_content = b"some data"
     requester = MockRequester(expected_content, chunk_size=0)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     with self.assertRaisesRegexp(ConanException, r"Download failed"):
         downloader.download("fake_url", file_path=self.target)
Esempio n. 6
0
 def test_fail_download_to_memory_if_interrupted(self):
     expected_content = b"some data"
     requester = MockRequester(expected_content, chunk_size=4)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     with self.assertRaisesRegexp(ConanException,
                                  r"Transfer interrupted before complete"):
         downloader.download("fake_url", file_path=None)
Esempio n. 7
0
 def test_resume_download_to_file_if_interrupted(self):
     expected_content = b"some data"
     requester = MockRequester(expected_content, chunk_size=4)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     downloader.download("fake_url", file_path=self.target)
     actual_content = load(self.target, binary=True)
     self.assertEqual(expected_content, actual_content)
Esempio n. 8
0
 def _download_and_save_files(self, urls, dest_folder, files, use_cache):
     downloader = FileDownloader(self.requester, self._output, self.verify_ssl, self._config)
     if use_cache and self._config.download_cache:
         downloader = CachedFileDownloader(self._config.download_cache, downloader)
     # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
     # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
     for filename in sorted(files, reverse=True):
         if self._output and not self._output.is_terminal:
             self._output.writeln("Downloading %s" % filename)
         resource_url = urls[filename]
         abs_path = os.path.join(dest_folder, filename)
         downloader.download(resource_url, abs_path, auth=self.auth)
Esempio n. 9
0
 def test_fail_interrupted_download_if_server_not_accepting_ranges(self):
     expected_content = b"some data"
     requester = MockRequester(expected_content,
                               chunk_size=4,
                               accept_ranges=False)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     with self.assertRaisesRegexp(ConanException,
                                  r"Incorrect Content-Range header"):
         downloader.download("fake_url", file_path=self.target)
Esempio n. 10
0
 def test_download_with_compressed_content_and_smaller_content_length(self):
     expected_content = b"some data"
     echo_header = {
         "Content-Encoding": "gzip",
         "Content-Length": len(expected_content) - 1
     }
     requester = MockRequester(expected_content, echo_header=echo_header)
     downloader = FileDownloader(requester=requester,
                                 output=self.out,
                                 verify=None,
                                 config=_ConfigMock())
     downloader.download("fake_url", file_path=self.target)
     actual_content = load(self.target, binary=True)
     self.assertEqual(expected_content, actual_content)
Esempio n. 11
0
    def _download_files_to_folder(self, file_urls, to_folder, snapshot_md5):
        """
        :param: file_urls is a dict with {filename: abs_path}

        It writes downloaded files to disk (appending to file, only keeps chunks in memory)
        """
        downloader = FileDownloader(self.requester, self._output,
                                    self.verify_ssl, self._config)
        download_cache = self._config.download_cache
        if download_cache:
            assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums"
            downloader = CachedFileDownloader(download_cache, downloader)

        ret = {}
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            if self._output and not self._output.is_terminal:
                self._output.writeln("Downloading %s" % filename)
            auth, _ = self._file_server_capabilities(resource_url)
            abs_path = os.path.join(to_folder, filename)
            if download_cache:
                md5 = snapshot_md5[filename]
                downloader.download(resource_url, abs_path, auth=auth, md5=md5)
            else:
                downloader.download(resource_url, abs_path, auth=auth)
            ret[filename] = abs_path
        return ret
Esempio n. 12
0
def download(url,
             filename,
             verify=True,
             out=None,
             retry=None,
             retry_wait=None,
             overwrite=False,
             auth=None,
             headers=None,
             requester=None,
             md5='',
             sha1='',
             sha256=''):
    """Retrieves a file from a given URL into a file with a given filename.
       It uses certificates from a list of known verifiers for https downloads,
       but this can be optionally disabled.

    :param url: URL to download. It can be a list, which only the first one will be downloaded, and
                the follow URLs will be used as mirror in case of download error.
    :param filename: Name of the file to be created in the local storage
    :param verify: When False, disables https certificate validation
    :param out: An object with a write() method can be passed to get the output. stdout will use if
                not specified
    :param retry: Number of retries in case of failure. Default is overriden by general.retry in the
                  conan.conf file or an env variable CONAN_RETRY
    :param retry_wait: Seconds to wait between download attempts. Default is overriden by
                       general.retry_wait in the conan.conf file or an env variable CONAN_RETRY_WAIT
    :param overwrite: When True, Conan will overwrite the destination file if exists. Otherwise it
                      will raise an exception
    :param auth: A tuple of user and password to use HTTPBasic authentication
    :param headers: A dictionary with additional headers
    :param requester: HTTP requests instance
    :param md5: MD5 hash code to check the downloaded file
    :param sha1: SHA-1 hash code to check the downloaded file
    :param sha256: SHA-256 hash code to check the downloaded file
    :return: None
    """
    out = default_output(out, 'conans.client.tools.net.download')
    requester = default_requester(requester,
                                  'conans.client.tools.net.download')
    from conans.tools import _global_config as config

    # It might be possible that users provide their own requester
    retry = retry if retry is not None else config.retry
    retry = retry if retry is not None else 1
    retry_wait = retry_wait if retry_wait is not None else config.retry_wait
    retry_wait = retry_wait if retry_wait is not None else 5

    checksum = sha256 or sha1 or md5

    downloader = FileDownloader(requester=requester,
                                output=out,
                                verify=verify,
                                config=config)
    if config and config.download_cache and checksum:
        downloader = CachedFileDownloader(config.download_cache,
                                          downloader,
                                          user_download=True)

    def _download_file(file_url):
        # The download cache is only used if a checksum is provided, otherwise, a normal download
        if isinstance(downloader, CachedFileDownloader):
            downloader.download(file_url,
                                filename,
                                retry=retry,
                                retry_wait=retry_wait,
                                overwrite=overwrite,
                                auth=auth,
                                headers=headers,
                                md5=md5,
                                sha1=sha1,
                                sha256=sha256)
        else:
            downloader.download(file_url,
                                filename,
                                retry=retry,
                                retry_wait=retry_wait,
                                overwrite=overwrite,
                                auth=auth,
                                headers=headers)
            if md5:
                check_md5(filename, md5)
            if sha1:
                check_sha1(filename, sha1)
            if sha256:
                check_sha256(filename, sha256)
        out.writeln("")

    if not isinstance(url, (list, tuple)):
        _download_file(url)
    else:  # We were provided several URLs to try
        for url_it in url:
            try:
                _download_file(url_it)
                break
            except Exception as error:
                message = "Could not download from the URL {}: {}.".format(
                    url_it, str(error))
                out.warn(message + " Trying another mirror.")
        else:
            raise ConanException(
                "All downloads from ({}) URLs have failed.".format(len(url)))