Exemple #1
0
def _prepare_download(
        resp,  # type: Response
        link,  # type: Link
        progress_bar  # type: str
):

    # type: (...) -> Iterable[bytes]

    total_length = _get_http_response_size(resp)

    if link.netloc == PyPI.file_storage_domain:

        url = link.show_url

    else:

        url = link.url_without_fragment

    logged_url = redact_auth_from_url(url)

    if total_length:

        logged_url = '{} ({})'.format(logged_url, format_size(total_length))

    if is_from_cache(resp):

        logger.info("Using cached %s", logged_url)

    else:

        logger.info("Downloading %s", logged_url)

    if logger.getEffectiveLevel() > logging.INFO:

        show_progress = False

    elif is_from_cache(resp):

        show_progress = False

    elif not total_length:

        show_progress = True

    elif total_length > (40 * 1000):

        show_progress = True

    else:

        show_progress = False

    chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)

    if not show_progress:

        return chunks

    return DownloadProgressProvider(progress_bar, max=total_length)(chunks)
Exemple #2
0
def _download_url(
        resp,  # type: Response
        link,  # type: Link
        content_file,  # type: IO[Any]
        hashes,  # type: Optional[Hashes]
        progress_bar  # type: str
):
    # type: (...) -> None
    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0

    if link.netloc == PyPI.file_storage_domain:
        url = link.show_url
    else:
        url = link.url_without_fragment

    redacted_url = redact_auth_from_url(url)

    if total_length:
        logger.info("Downloading %s (%s)", redacted_url,
                    format_size(total_length))
    elif is_from_cache(resp):
        logger.info("Using cached %s", redacted_url)
    else:
        logger.info("Downloading %s", redacted_url)

    if logger.getEffectiveLevel() > logging.INFO:
        show_progress = False
    elif is_from_cache(resp):
        show_progress = False
    elif total_length > (40 * 1000):
        show_progress = True
    elif not total_length:
        show_progress = True
    else:
        show_progress = False

    def written_chunks(chunks):
        for chunk in chunks:
            content_file.write(chunk)
            yield chunk

    progress_indicator = _progress_indicator

    if show_progress:  # We don't show progress on cached responses
        progress_indicator = DownloadProgressProvider(progress_bar,
                                                      max=total_length)

    downloaded_chunks = written_chunks(
        progress_indicator(response_chunks(resp, CONTENT_CHUNK_SIZE),
                           CONTENT_CHUNK_SIZE))
    if hashes:
        hashes.check_against_chunks(downloaded_chunks)
    else:
        consume(downloaded_chunks)
Exemple #3
0
 def _download(self, start: int, end: int) -> None:
     """Download bytes from start to end inclusively."""
     with self._stay():
         left = bisect_left(self._right, start)
         right = bisect_right(self._left, end)
         for start, end in self._merge(start, end, left, right):
             response = self._stream_response(start, end)
             response.raise_for_status()
             self.seek(start)
             for chunk in response_chunks(response, self._chunk_size):
                 self._file.write(chunk)
 def _download(self, start, end):
     # type: (int, int) -> None
     """Download bytes from start to end inclusively."""
     with _stay():
         left = bisect_left(_right, start)
         right = bisect_right(_left, end)
         for start, end in _merge(start, end, left, right):
             response = _stream_response(start, end)
             response.raise_for_status()
             seek(start)
             for chunk in response_chunks(response, _chunk_size):
                 _file.write(chunk)
Exemple #5
0
def _prepare_download(
    resp: Response,
    link: Link,
    progress_bar: str,
) -> Iterable[bytes]:
    total_length = _get_http_response_size(resp)

    if link.netloc == PyPI.file_storage_domain:
        url = link.show_url
    else:
        url = link.url_without_fragment

    logged_url = redact_auth_from_url(url)

    if total_length:
        logged_url = "{} ({})".format(logged_url, format_size(total_length))

    if is_from_cache(resp):
        logger.info("Using cached %s", logged_url)
    else:
        logger.info("Downloading %s", logged_url)

    if logger.getEffectiveLevel() > logging.INFO:
        show_progress = False
    elif is_from_cache(resp):
        show_progress = False
    elif not total_length:
        show_progress = True
    elif total_length > (40 * 1000):
        show_progress = True
    else:
        show_progress = False

    chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)

    if not show_progress:
        return chunks

    renderer = get_download_progress_renderer(bar_type=progress_bar,
                                              size=total_length)
    return renderer(chunks)
Exemple #6
0
        logger.info("Using cached %s", logged_url)
    else:
        logger.info("Downloading %s", logged_url)

    if logger.getEffectiveLevel() > logging.INFO:
        show_progress = False
    elif is_from_cache(resp):
        show_progress = False
    elif not total_length:
        show_progress = True
    elif total_length > (40 * 1000):
        show_progress = True
    else:
        show_progress = False

    chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)

    if not show_progress:
        return chunks

    return DownloadProgressProvider(
        progress_bar, max=total_length
    )(chunks)


def sanitize_content_filename(filename):
    # type: (str) -> str
    """
    Sanitize the "filename" value from a Content-Disposition header.
    """
    return os.path.basename(filename)