Exemplo n.º 1
0
def _download_multipass(dl_dir: str, echoer) -> str:
    """Creates temporary Downloads installer to temp directory."""

    dl_url = _fetch_installer_url()
    dl_basename = os.path.basename(dl_url)
    dl_path = os.path.join(dl_dir, dl_basename)

    echoer.info("Downloading Multipass installer...\n{} -> {}".format(
        dl_url, dl_path))

    try:
        request = requests.get(dl_url, stream=True, allow_redirects=True)
        request.raise_for_status()
        download_requests_stream(request, dl_path)
    except requests.RequestException as e:
        raise ProviderMultipassDownloadFailed(_requests_exception_hint(e))

    digest = calculate_sha3_384(dl_path)
    if digest != _MULTIPASS_DL_SHA3_384:
        raise ProviderMultipassDownloadFailed(
            "download failed verification (expected={} but found={})".format(
                _MULTIPASS_DL_SHA3_384, digest))

    echoer.info("Verified installer successfully...")
    return dl_path
Exemplo n.º 2
0
    def download(self):
        request = requests.get(self.source, stream=True, allow_redirects=True)
        request.raise_for_status()

        self.file = os.path.join(
            self.source_dir, os.path.basename(self.source))
        download_requests_stream(request, self.file)
Exemplo n.º 3
0
    def download(self):
        # First check if we already have the source file cached.
        file_cache = FileCache()
        if self.source_checksum:
            algorithm, hash = split_checksum(self.source_checksum)
            cache_file = file_cache.get(algorithm=algorithm, hash=hash)
            if cache_file:
                self.file = os.path.join(self.source_dir, os.path.basename(cache_file))
                # We make this copy as the provisioning logic can delete
                # this file and we don't want that.
                shutil.copy2(cache_file, self.file)
                return self.file

        # If not we download and store
        self.file = os.path.join(self.source_dir, os.path.basename(self.source))

        if snapcraft.internal.common.get_url_scheme(self.source) == "ftp":
            download_urllib_source(self.source, self.file)
        else:
            request = requests.get(self.source, stream=True, allow_redirects=True)
            request.raise_for_status()

            download_requests_stream(request, self.file)

        # We verify the file if source_checksum is defined
        # and we cache the file for future reuse.
        if self.source_checksum:
            algorithm, digest = verify_checksum(self.source_checksum, self.file)
            file_cache.cache(filename=self.file, algorithm=algorithm, hash=hash)
        return self.file
Exemplo n.º 4
0
    def download(self, filepath: str = None) -> str:
        if filepath is None:
            self.file = os.path.join(self.source_dir, os.path.basename(self.source))
        else:
            self.file = filepath

        # First check if we already have the source file cached.
        file_cache = FileCache()
        if self.source_checksum:
            algorithm, hash = split_checksum(self.source_checksum)
            cache_file = file_cache.get(algorithm=algorithm, hash=hash)
            if cache_file:
                # We make this copy as the provisioning logic can delete
                # this file and we don't want that.
                shutil.copy2(cache_file, self.file)
                return self.file

        # If not we download and store
        if snapcraft.internal.common.get_url_scheme(self.source) == "ftp":
            download_urllib_source(self.source, self.file)
        else:
            request = requests.get(self.source, stream=True, allow_redirects=True)
            request.raise_for_status()

            download_requests_stream(request, self.file)

        # We verify the file if source_checksum is defined
        # and we cache the file for future reuse.
        if self.source_checksum:
            algorithm, digest = verify_checksum(self.source_checksum, self.file)
            file_cache.cache(filename=self.file, algorithm=algorithm, hash=hash)
        return self.file
Exemplo n.º 5
0
    def download(self):
        request = requests.get(self.source, stream=True, allow_redirects=True)
        request.raise_for_status()

        self.file = os.path.join(self.source_dir,
                                 os.path.basename(self.source))
        download_requests_stream(request, self.file)
Exemplo n.º 6
0
    def _download_snap(self, name, channel, arch, download_path,
                       download_url, expected_sha512):
        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Already downloaded {} at {}'.format(
                name, download_path))
            return
        logger.info('Downloading {}'.format(name, download_path))

        # HttpAdapter cannot help here as this is a stream.
        # LP: #1617765
        not_downloaded = True
        retry_count = 5
        while not_downloaded and retry_count:
            request = self.cpi.get(download_url, stream=True)
            request.raise_for_status()
            try:
                download_requests_stream(request, download_path)
                not_downloaded = False
            except requests.exceptions.ChunkedEncodingError as e:
                retry_count -= 1
                if not retry_count:
                    raise e
                sleep(1)

        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Successfully downloaded {} at {}'.format(
                name, download_path))
        else:
            raise errors.SHAMismatchError(download_path, expected_sha512)
Exemplo n.º 7
0
    def download(self):
        self.file = os.path.join(
                self.source_dir, os.path.basename(self.source))

        if common.get_url_scheme(self.source) == 'ftp':
            download_urllib_source(self.source, self.file)
        else:
            request = requests.get(
                self.source, stream=True, allow_redirects=True)
            request.raise_for_status()

            download_requests_stream(request, self.file)
Exemplo n.º 8
0
    def download(self):
        self.file = os.path.join(
                self.source_dir, os.path.basename(self.source))

        if snapcraft.internal.common.get_url_scheme(self.source) == 'ftp':
            download_urllib_source(self.source, self.file)
        else:
            request = requests.get(
                self.source, stream=True, allow_redirects=True)
            request.raise_for_status()

            download_requests_stream(request, self.file)
Exemplo n.º 9
0
    def _download_snap(self, name, channel, arch, download_path, download_url,
                       expected_sha512):
        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Already downloaded {} at {}'.format(
                name, download_path))
            return
        logger.info('Downloading {}'.format(name, download_path))

        # we only resume when redirected to our CDN since we use internap's
        # special sauce.
        resume_possible = False
        total_read = 0
        probe_url = requests.head(download_url)
        if (probe_url.is_redirect
                and 'internap' in probe_url.headers['Location']):
            download_url = probe_url.headers['Location']
            resume_possible = True

        # HttpAdapter cannot help here as this is a stream.
        # LP: #1617765
        not_downloaded = True
        retry_count = 5
        while not_downloaded and retry_count:
            headers = {}
            if resume_possible and os.path.exists(download_path):
                total_read = os.path.getsize(download_path)
                headers['Range'] = 'bytes={}-'.format(total_read)
            request = self.cpi.get(download_url, headers=headers, stream=True)
            request.raise_for_status()
            redirections = [h.headers['Location'] for h in request.history]
            if redirections:
                logger.debug('Redirections for {!r}: {}'.format(
                    download_url, ', '.join(redirections)))
            try:
                download_requests_stream(request,
                                         download_path,
                                         total_read=total_read)
                not_downloaded = False
            except requests.exceptions.ChunkedEncodingError as e:
                logger.debug('Error while downloading: {!r}. '
                             'Retries left to download: {!r}.'.format(
                                 e, retry_count))
                retry_count -= 1
                if not retry_count:
                    raise e
                sleep(1)

        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Successfully downloaded {} at {}'.format(
                name, download_path))
        else:
            raise errors.SHAMismatchError(download_path, expected_sha512)
Exemplo n.º 10
0
    def _download_snap(
        self, name, channel, arch, download_path, download_url, expected_sha512
    ):
        if self._is_downloaded(download_path, expected_sha512):
            logger.info("Already downloaded {} at {}".format(name, download_path))
            return
        logger.info("Downloading {}".format(name))

        # we only resume when redirected to our CDN since we use internap's
        # special sauce.
        resume_possible = False
        total_read = 0
        probe_url = requests.head(download_url)
        if probe_url.is_redirect and "internap" in probe_url.headers["Location"]:
            download_url = probe_url.headers["Location"]
            resume_possible = True

        # HttpAdapter cannot help here as this is a stream.
        # LP: #1617765
        not_downloaded = True
        retry_count = 5
        while not_downloaded and retry_count:
            headers = {}
            if resume_possible and os.path.exists(download_path):
                total_read = os.path.getsize(download_path)
                headers["Range"] = "bytes={}-".format(total_read)
            request = self.cpi.get(download_url, headers=headers, stream=True)
            request.raise_for_status()
            redirections = [h.headers["Location"] for h in request.history]
            if redirections:
                logger.debug(
                    "Redirections for {!r}: {}".format(
                        download_url, ", ".join(redirections)
                    )
                )
            try:
                download_requests_stream(request, download_path, total_read=total_read)
                not_downloaded = False
            except requests.exceptions.ChunkedEncodingError as e:
                logger.debug(
                    "Error while downloading: {!r}. "
                    "Retries left to download: {!r}.".format(e, retry_count)
                )
                retry_count -= 1
                if not retry_count:
                    raise e
                sleep(1)

        if self._is_downloaded(download_path, expected_sha512):
            logger.info("Successfully downloaded {} at {}".format(name, download_path))
        else:
            raise errors.SHAMismatchError(download_path, expected_sha512)
Exemplo n.º 11
0
    def execute(self):
        headers = self._load_headers()
        self._request = requests.get(self._parts_uri, stream=True,
                                     headers=headers)

        if self._request.status_code == 304:
            logger.info('The parts cache is already up to date.')
            return
        self._request.raise_for_status()

        download_requests_stream(self._request, self.parts_yaml,
                                 'Downloading parts list')
        self._save_headers()
Exemplo n.º 12
0
    def execute(self):
        headers = self._load_headers()
        self._request = requests.get(self._parts_uri, stream=True,
                                     headers=headers)

        if self._request.status_code == 304:
            logger.info('The parts cache is already up to date.')
            return
        self._request.raise_for_status()

        download_requests_stream(self._request, self.parts_yaml,
                                 'Downloading parts list')
        self._save_headers()
Exemplo n.º 13
0
    def _download_snap(self, name, channel, arch, download_path,
                       download_url, expected_sha512):
        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Already downloaded {} at {}'.format(
                name, download_path))
            return
        logger.info('Downloading {}'.format(name, download_path))
        request = self.cpi.get(download_url, stream=True)
        request.raise_for_status()
        download_requests_stream(request, download_path)

        if self._is_downloaded(download_path, expected_sha512):
            logger.info('Successfully downloaded {} at {}'.format(
                name, download_path))
        else:
            raise errors.SHAMismatchError(download_path, expected_sha512)
Exemplo n.º 14
0
    def execute(self):
        headers = self._load_headers()

        try:
            self._request = requests.get(self._parts_uri, stream=True, headers=headers)
        except requests.exceptions.RequestException as e:
            raise errors.RemotePartsUpdateConnectionError(e) from e

        if self._request.status_code == 304:
            logger.info("The parts cache is already up to date.")
            return
        self._request.raise_for_status()

        download_requests_stream(
            self._request, self.parts_yaml, "Downloading parts list"
        )
        self._save_headers()
Exemplo n.º 15
0
    def _download_snap(self, download_details, download_path):
        # we only resume when redirected to our CDN since we use internap's
        # special sauce.
        total_read = 0
        probe_url = requests.head(download_details.url)
        if probe_url.is_redirect and "internap" in probe_url.headers[
                "Location"]:
            download_url = probe_url.headers["Location"]
            resume_possible = True
        else:
            download_url = download_details.url
            resume_possible = False

        # HttpAdapter cannot help here as this is a stream.
        # LP: #1617765
        not_downloaded = True
        retry_count = 5
        while not_downloaded and retry_count:
            headers = {}
            if resume_possible and os.path.exists(download_path):
                total_read = os.path.getsize(download_path)
                headers["Range"] = "bytes={}-".format(total_read)
            request = self.client.request("GET",
                                          download_url,
                                          headers=headers,
                                          stream=True)
            request.raise_for_status()
            redirections = [h.headers["Location"] for h in request.history]
            if redirections:
                logger.debug("Redirections for {!r}: {}".format(
                    download_url, ", ".join(redirections)))
            try:
                download_requests_stream(request,
                                         download_path,
                                         total_read=total_read)
                not_downloaded = False
            except requests.exceptions.ChunkedEncodingError as e:
                logger.debug("Error while downloading: {!r}. "
                             "Retries left to download: {!r}.".format(
                                 e, retry_count))
                retry_count -= 1
                if not retry_count:
                    raise e
                sleep(1)
Exemplo n.º 16
0
 def _download_and_cache(self) -> str:
     request = requests.get(self.url, stream=True, allow_redirects=True)
     if not request.ok:
         raise errors.BuildImageRequestError(
             base=self.base, status_code=request.status_code
         )
     # First create the prefix as tempfile.TemporaryDirectory does not do that for you
     os.makedirs(self._image_cache.file_cache, exist_ok=True)
     with tempfile.TemporaryDirectory(
         prefix=self._image_cache.file_cache
     ) as tmp_dir:
         download_file = os.path.join(tmp_dir, "{}-vm".format(self.base))
         download_requests_stream(request, download_file)
         calculated_digest = calculate_hash(download_file, algorithm=self.algorithm)
         if self.checksum != calculated_digest:
             raise errors.BuildImageChecksumError(
                 expected=self.checksum,
                 calculated=calculated_digest,
                 algorithm=self.algorithm,
             )
         return self._image_cache.cache(
             filename=download_file, algorithm=self.algorithm, hash=self.checksum
         )
Exemplo n.º 17
0
    def test_download_request_stream(self):
        request = requests.get(self.source, stream=True, allow_redirects=True)
        indicators.download_requests_stream(request, self.dest_file)

        self.assertTrue(os.path.exists(self.dest_file))
Exemplo n.º 18
0
    def test_download_request_stream(self):
        request = requests.get(self.source, stream=True, allow_redirects=True)
        indicators.download_requests_stream(request, self.dest_file)

        self.assertTrue(os.path.exists(self.dest_file))