예제 #1
0
    def _download_files(self, file_urls, snapshot_md5):
        """
        :param: file_urls is a dict with {filename: url}
        :param snapshot_md5: dict with {filaname: md5 checksum} of files to be downloaded

        Its a generator, so it yields elements for memory performance
        """
        downloader = FileDownloader(self.requester, None, self.verify_ssl,
                                    self._config)
        download_cache = self._config.download_cache
        if download_cache:
            assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums"
            downloader = CachedFileDownloader(download_cache, downloader)
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            auth, _ = self._file_server_capabilities(resource_url)
            if download_cache:
                md5 = snapshot_md5[filename]
                contents = downloader.download(resource_url,
                                               auth=auth,
                                               md5=md5)
            else:
                contents = downloader.download(resource_url, auth=auth)
            yield os.path.normpath(filename), contents
예제 #2
0
파일: net.py 프로젝트: zhongpan/conan
def download(url,
             filename,
             verify=True,
             out=None,
             retry=None,
             retry_wait=None,
             overwrite=False,
             auth=None,
             headers=None,
             requester=None):

    out = default_output(out, 'conans.client.tools.net.download')
    requester = default_requester(requester,
                                  'conans.client.tools.net.download')

    # It might be possible that users provide their own requester
    retry = retry if retry is not None else getattr(requester, "retry", None)
    retry = retry if retry is not None else 1
    retry_wait = retry_wait if retry_wait is not None else getattr(
        requester, "retry_wait", None)
    retry_wait = retry_wait if retry_wait is not None else 5

    downloader = FileDownloader(requester=requester, output=out, verify=verify)
    downloader.download(url,
                        filename,
                        retry=retry,
                        retry_wait=retry_wait,
                        overwrite=overwrite,
                        auth=auth,
                        headers=headers)
    out.writeln("")
예제 #3
0
    def _download_files_to_folder(self, file_urls, to_folder, snapshot_md5):
        """
        :param: file_urls is a dict with {filename: abs_path}

        It writes downloaded files to disk (appending to file, only keeps chunks in memory)
        """
        downloader = FileDownloader(self.requester, self._output,
                                    self.verify_ssl, self._config)
        download_cache = self._config.download_cache
        if download_cache:
            assert snapshot_md5 is not None, "if download_cache is set, we need the file checksums"
            downloader = CachedFileDownloader(download_cache, downloader)

        ret = {}
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            if self._output and not self._output.is_terminal:
                self._output.writeln("Downloading %s" % filename)
            auth, _ = self._file_server_capabilities(resource_url)
            abs_path = os.path.join(to_folder, filename)
            if download_cache:
                md5 = snapshot_md5[filename]
                downloader.download(resource_url, abs_path, auth=auth, md5=md5)
            else:
                downloader.download(resource_url, abs_path, auth=auth)
            ret[filename] = abs_path
        return ret
예제 #4
0
 def _download_and_save_files(self, urls, dest_folder, files):
     downloader = FileDownloader(self.requester, self._output, self.verify_ssl)
     # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
     # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
     for filename in sorted(files, reverse=True):
         if self._output and not self._output.is_terminal:
             self._output.writeln("Downloading %s" % filename)
         resource_url = urls[filename]
         abs_path = os.path.join(dest_folder, filename)
         downloader.download(resource_url, abs_path, auth=self.auth)
예제 #5
0
    def _get_path(self, url, path):
        urls = self._get_file_to_url_dict(url)

        def is_dir(the_path):
            if the_path == ".":
                return True
            for _the_file in urls:
                if the_path == _the_file:
                    return False
                elif _the_file.startswith(the_path):
                    return True
            raise NotFoundException("The specified path doesn't exist")

        if is_dir(path):
            ret = []
            for the_file in urls:
                if path == "." or the_file.startswith(path):
                    tmp = the_file[len(path) - 1:].split("/", 1)[0]
                    if tmp not in ret:
                        ret.append(tmp)
            return sorted(ret)
        else:
            downloader = FileDownloader(self.requester, None, self.verify_ssl)
            auth, _ = self._file_server_capabilities(urls[path])
            content = downloader.download(urls[path], auth=auth)

            return decode_text(content)
예제 #6
0
 def _get_remote_file_contents(self, url, use_cache):
     # We don't want traces in output of these downloads, they are ugly in output
     downloader = FileDownloader(self.requester, None, self.verify_ssl,
                                 self._config)
     if use_cache and self._config.download_cache:
         downloader = CachedFileDownloader(self._config.download_cache,
                                           downloader)
     contents = downloader.download(url, auth=self.auth)
     return contents
예제 #7
0
    def _download_files_to_folder(self, file_urls, to_folder):
        """
        :param: file_urls is a dict with {filename: abs_path}

        It writes downloaded files to disk (appending to file, only keeps chunks in memory)
        """
        downloader = FileDownloader(self.requester, self._output,
                                    self.verify_ssl)
        ret = {}
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            if self._output:
                self._output.writeln("Downloading %s" % filename)
            auth, _ = self._file_server_capabilities(resource_url)
            abs_path = os.path.join(to_folder, filename)
            downloader.download(resource_url, abs_path, auth=auth)
            if self._output:
                self._output.writeln("")
            ret[filename] = abs_path
        return ret
예제 #8
0
    def _download_files(self, file_urls, quiet=False):
        """
        :param: file_urls is a dict with {filename: url}

        Its a generator, so it yields elements for memory performance
        """
        output = self._output if not quiet else None
        downloader = FileDownloader(self.requester, output, self.verify_ssl)
        # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz
        # can be < conanfile, conaninfo, and sent always the last, so smaller files go first
        for filename, resource_url in sorted(file_urls.items(), reverse=True):
            if output and not output.is_terminal:
                output.writeln("Downloading %s" % filename)
            auth, _ = self._file_server_capabilities(resource_url)
            contents = downloader.download(resource_url, auth=auth)
            yield os.path.normpath(filename), contents
예제 #9
0
 def _get_remote_file_contents(self, url):
     # We don't want traces in output of these downloads, they are ugly in output
     downloader = FileDownloader(self.requester, None, self.verify_ssl)
     contents = downloader.download(url, auth=self.auth)
     return contents
예제 #10
0
파일: net.py 프로젝트: vermosen/conan
def download(url,
             filename,
             verify=True,
             out=None,
             retry=None,
             retry_wait=None,
             overwrite=False,
             auth=None,
             headers=None,
             requester=None,
             md5='',
             sha1='',
             sha256=''):

    out = default_output(out, 'conans.client.tools.net.download')
    requester = default_requester(requester,
                                  'conans.client.tools.net.download')
    from conans.tools import _global_config as config

    # It might be possible that users provide their own requester
    retry = retry if retry is not None else config.retry
    retry = retry if retry is not None else 1
    retry_wait = retry_wait if retry_wait is not None else config.retry_wait
    retry_wait = retry_wait if retry_wait is not None else 5

    downloader = FileDownloader(requester=requester,
                                output=out,
                                verify=verify,
                                config=config)
    checksum = sha256 or sha1 or md5
    # The download cache is only used if a checksum is provided, otherwise, a normal download
    if config and config.download_cache and checksum:
        downloader = CachedFileDownloader(config.download_cache,
                                          downloader,
                                          user_download=True)
        downloader.download(url,
                            filename,
                            retry=retry,
                            retry_wait=retry_wait,
                            overwrite=overwrite,
                            auth=auth,
                            headers=headers,
                            md5=md5,
                            sha1=sha1,
                            sha256=sha256)
    else:
        downloader.download(url,
                            filename,
                            retry=retry,
                            retry_wait=retry_wait,
                            overwrite=overwrite,
                            auth=auth,
                            headers=headers)
        if md5:
            check_md5(filename, md5)
        if sha1:
            check_sha1(filename, sha1)
        if sha256:
            check_sha256(filename, sha256)

    out.writeln("")