Example #1
0
def test_helper_getUrl_redirect_too_many(monkeypatch):
    mocked_get, mocked_session_get = mock_get_redirect(num_redirects=11)
    monkeypatch.setattr(requests, "get", mocked_get)
    monkeypatch.setattr(requests.Session, "get", mocked_session_get)
    with pytest.raises(ArchiveDownloadError) as e:
        getUrl("some_url", (5, 5))
    assert e.type == ArchiveDownloadError
Example #2
0
def test_helper_getUrl_checksum_error(monkeypatch):
    mocked_get, mocked_session_get = mock_get_redirect(0)
    monkeypatch.setattr(requests, "get", mocked_get)
    monkeypatch.setattr(requests.Session, "get", mocked_session_get)
    with pytest.raises(ArchiveChecksumError) as e:
        getUrl("some_url", timeout=(5, 5), expected_hash=b"AAAAAAAAAAA")
    assert e.type == ArchiveChecksumError
Example #3
0
def test_helper_getUrl_conn_error(monkeypatch):
    response_class = mocked_request_response_class(
        forbidden_baseurls=["https://www.forbidden.com"])
    url = "https://www.forbidden.com/some_path"
    timeout = (5, 5)

    expect_re = re.compile(r"^Failure to connect to.+" + re.escape(url))

    def _mock(url: str, *args, **kwargs):
        return response_class(url, {}, text="some_html_content")

    monkeypatch.setattr(requests, "get", _mock)
    with pytest.raises(ArchiveConnectionError) as e:
        getUrl(url, timeout)
    assert e.type == ArchiveConnectionError
    assert expect_re.match(format(e.value))
Example #4
0
def test_helper_getUrl_ok(monkeypatch):
    response_class = mocked_request_response_class()

    def _mock_get(url, **kwargs):
        return response_class(url, {}, "some_html_content")

    monkeypatch.setattr(requests, "get", _mock_get)
    assert getUrl("some_url", timeout=(5, 5)) == "some_html_content"
Example #5
0
    def fetch_http(rest_of_url: str, is_check_hash: bool = True) -> str:
        timeout = (Settings.connection_timeout, Settings.response_timeout)
        expected_hash = get_hash(rest_of_url, "sha256", timeout) if is_check_hash else None
        base_urls = Settings.baseurl, random.choice(Settings.fallbacks)
        for i, base_url in enumerate(base_urls):
            try:
                url = posixpath.join(base_url, rest_of_url)
                return getUrl(url=url, timeout=timeout, expected_hash=expected_hash)

            except (ArchiveDownloadError, ArchiveConnectionError) as e:
                if i == len(base_urls) - 1:
                    raise e from e
                else:
                    getLogger("aqt.metadata").debug(
                        f"Connection to '{base_url}' failed. Retrying with fallback '{base_urls[i + 1]}'."
                    )
Example #6
0
def test_helper_getUrl_redirect_5(monkeypatch):
    mocked_get, mocked_session_get = mock_get_redirect(num_redirects=5)
    monkeypatch.setattr(requests, "get", mocked_get)
    monkeypatch.setattr(requests.Session, "get", mocked_session_get)
    assert getUrl("some_url", (5, 5)) == "some_html_content"
Example #7
0
 def _download_update_xml(self, update_xml_url):
     """Hook for unit test."""
     self.update_xml_text = getUrl(update_xml_url, self.timeout)
Example #8
0
def installer(
    qt_archive: QtPackage,
    base_dir: str,
    command: Optional[str],
    queue: multiprocessing.Queue,
    keep: bool = False,
    response_timeout: Optional[int] = None,
):
    """
    Installer function to download archive files and extract it.
    It is called through multiprocessing.Pool()
    """
    name = qt_archive.name
    url = qt_archive.archive_url
    hashurl = qt_archive.hashurl
    archive = qt_archive.archive
    start_time = time.perf_counter()
    # set defaults
    Settings.load_settings()
    # set logging
    setup_logging()  # XXX: why need to load again?
    qh = QueueHandler(queue)
    logger = getLogger()
    for handler in logger.handlers:
        handler.close()
        logger.removeHandler(handler)
    logger.addHandler(qh)
    #
    logger.debug("Download URL: {}".format(url))
    if response_timeout is None:
        timeout = (Settings.connection_timeout, Settings.response_timeout)
    else:
        timeout = (Settings.connection_timeout, response_timeout)
    hash = binascii.unhexlify(getUrl(hashurl, timeout))
    retry_on_errors(
        action=lambda: downloadBinaryFile(url, archive, "sha1", hash, timeout),
        acceptable_errors=(ArchiveChecksumError, ),
        num_retries=Settings.max_retries_on_checksum_error,
        name=f"Downloading {name}",
    )
    if command is None:
        with py7zr.SevenZipFile(archive, "r") as szf:
            szf.extractall(path=base_dir)
    else:
        if base_dir is not None:
            command_args = [
                command,
                "x",
                "-aoa",
                "-bd",
                "-y",
                "-o{}".format(base_dir),
                archive,
            ]
        else:
            command_args = [command, "x", "-aoa", "-bd", "-y", archive]
        try:
            proc = subprocess.run(command_args,
                                  stdout=subprocess.PIPE,
                                  check=True)
            logger.debug(proc.stdout)
        except subprocess.CalledProcessError as cpe:
            msg = "\n".join(
                filter(None, [
                    f"Extraction error: {cpe.returncode}", cpe.stdout,
                    cpe.stderr
                ]))
            raise ArchiveExtractionError(msg) from cpe
    if not keep:
        os.unlink(archive)
    logger.info("Finished installation of {} in {:.8f}".format(
        archive,
        time.perf_counter() - start_time))
    qh.flush()
    qh.close()
    logger.removeHandler(qh)
Example #9
0
 def _download_update_xml(self, update_xml_path):
     """Hook for unit test."""
     xml_hash = get_hash(update_xml_path, "sha256", self.timeout)
     return getUrl(posixpath.join(self.base, update_xml_path), self.timeout,
                   xml_hash)