def test_fetch_http_download_error(monkeypatch): urls_requested = set() def _mock(url, **kwargs): urls_requested.add(url) raise ArchiveDownloadError() monkeypatch.setattr("aqt.metadata.getUrl", _mock) with pytest.raises(ArchiveDownloadError) as e: MetadataFactory.fetch_http("some_url") assert e.type == ArchiveDownloadError # Require that a fallback url was tried assert len(urls_requested) == 2
def test_fetch_http_ok(monkeypatch): monkeypatch.setattr( "aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(b"some_html_content").hexdigest( )) monkeypatch.setattr("aqt.metadata.getUrl", lambda **kwargs: "some_html_content") assert MetadataFactory.fetch_http("some_url") == "some_html_content"
def test_fetch_http_download_error(monkeypatch, exception_on_error): urls_requested = set() def _mock(url, **kwargs): urls_requested.add(url) raise exception_on_error() monkeypatch.setattr( "aqt.metadata.get_hash", lambda *args, **kwargs: hashlib.sha256(b"some_html_content").hexdigest( )) monkeypatch.setattr("aqt.metadata.getUrl", _mock) with pytest.raises(exception_on_error) as e: MetadataFactory.fetch_http("some_url") assert e.type == exception_on_error # Require that a fallback url was tried assert len(urls_requested) == 2
def test_fetch_http_failover(monkeypatch): urls_requested = set() def _mock(url, **kwargs): urls_requested.add(url) if len(urls_requested) <= 1: raise ArchiveDownloadError() return "some_html_content" monkeypatch.setattr("aqt.metadata.getUrl", _mock) # Require that the first attempt failed, but the second did not assert MetadataFactory.fetch_http("some_url") == "some_html_content" assert len(urls_requested) == 2
def test_fetch_http_ok(monkeypatch): monkeypatch.setattr("aqt.metadata.getUrl", lambda **kwargs: "some_html_content") assert MetadataFactory.fetch_http("some_url") == "some_html_content"