def test_collect_links(self, caplog, data): caplog.set_level(logging.DEBUG) link_collector = make_test_link_collector( find_links=[data.find_links], # Include two copies of the URL to check that the second one # is skipped. index_urls=[PyPI.simple_url, PyPI.simple_url], ) actual = link_collector.collect_links('twine') # Spot-check the CollectedLinks return value. assert len(actual.files) > 20 check_links_include(actual.files, names=['simple-1.0.tar.gz']) assert len(actual.find_links) == 1 check_links_include(actual.find_links, names=['packages']) # Check that find-links URLs are marked as cacheable. assert actual.find_links[0].cache_link_parsing assert actual.project_urls == [Link('https://pypi.org/simple/twine/')] # Check that index URLs are marked as *un*cacheable. assert not actual.project_urls[0].cache_link_parsing expected_message = dedent("""\ 1 location(s) to search for versions of twine: * https://pypi.org/simple/twine/""") assert caplog.record_tuples == [ ('pip._internal.index.collector', logging.DEBUG, expected_message), ]
def test_collect_links(self, mock_get_html_response, data): expected_url = 'https://pypi.org/simple/twine/' fake_page = make_fake_html_page(expected_url) mock_get_html_response.return_value = fake_page link_collector = make_test_link_collector( find_links=[data.find_links], index_urls=[PyPI.simple_url], ) actual = link_collector.collect_links('twine') mock_get_html_response.assert_called_once_with( expected_url, session=link_collector.session, ) # Spot-check the CollectedLinks return value. assert len(actual.files) > 20 check_links_include(actual.files, names=['simple-1.0.tar.gz']) assert len(actual.find_links) == 1 check_links_include(actual.find_links, names=['packages']) actual_pages = actual.pages assert list(actual_pages) == [expected_url] actual_page_links = actual_pages[expected_url] assert len(actual_page_links) == 1 assert actual_page_links[0].url == ( 'https://pypi.org/abc-1.0.tar.gz#md5=000000000')
def test_collect_sources( self, caplog: pytest.LogCaptureFixture, data: TestData ) -> None: caplog.set_level(logging.DEBUG) link_collector = make_test_link_collector( find_links=[data.find_links], # Include two copies of the URL to check that the second one # is skipped. index_urls=[PyPI.simple_url, PyPI.simple_url], ) collected_sources = link_collector.collect_sources( "twine", candidates_from_page=lambda link: [ InstallationCandidate("twine", "1.0", link) ], ) files_it = itertools.chain.from_iterable( source.file_links() for sources in collected_sources for source in sources if source is not None ) pages_it = itertools.chain.from_iterable( source.page_candidates() for sources in collected_sources for source in sources if source is not None ) files = list(files_it) pages = list(pages_it) # Spot-check the returned sources. assert len(files) > 20 check_links_include(files, names=["simple-1.0.tar.gz"]) assert [page.link for page in pages] == [Link("https://pypi.org/simple/twine/")] # Check that index URLs are marked as *un*cacheable. assert not pages[0].link.cache_link_parsing expected_message = dedent( """\ 1 location(s) to search for versions of twine: * https://pypi.org/simple/twine/""" ) assert caplog.record_tuples == [ ("pip._internal.index.collector", logging.DEBUG, expected_message), ]
def test_fetch_page(self, mock_get_html_response): url = 'https://pypi.org/simple/twine/' fake_response = make_fake_html_response(url) mock_get_html_response.return_value = fake_response location = Link(url) link_collector = make_test_link_collector() actual = link_collector.fetch_page(location) assert actual.content == fake_response.content assert actual.encoding is None assert actual.url == url # Also check that the right session object was passed to # _get_html_response(). mock_get_html_response.assert_called_once_with( url, session=link_collector.session, )
def test_collect_links(self, mock_get_html_response, caplog, data): caplog.set_level(logging.DEBUG) expected_url = 'https://pypi.org/simple/twine/' fake_page = make_fake_html_response(expected_url) mock_get_html_response.return_value = fake_page link_collector = make_test_link_collector( find_links=[data.find_links], # Include two copies of the URL to check that the second one # is skipped. index_urls=[PyPI.simple_url, PyPI.simple_url], ) actual = link_collector.collect_links('twine') mock_get_html_response.assert_called_once_with( expected_url, session=link_collector.session, ) # Spot-check the CollectedLinks return value. assert len(actual.files) > 20 check_links_include(actual.files, names=['simple-1.0.tar.gz']) assert len(actual.find_links) == 1 check_links_include(actual.find_links, names=['packages']) actual_pages = actual.pages assert list(actual_pages) == [expected_url] actual_page_links = actual_pages[expected_url] assert len(actual_page_links) == 1 assert actual_page_links[0].url == ( 'https://pypi.org/abc-1.0.tar.gz#md5=000000000') expected_message = dedent("""\ 1 location(s) to search for versions of twine: * https://pypi.org/simple/twine/""") assert caplog.record_tuples == [ ('pip._internal.collector', logging.DEBUG, expected_message), ]
def test_fetch_response(self, mock_get_simple_response: mock.Mock) -> None: url = "https://pypi.org/simple/twine/" fake_response = make_fake_html_response(url) mock_get_simple_response.return_value = fake_response location = Link(url, cache_link_parsing=False) link_collector = make_test_link_collector() actual = link_collector.fetch_response(location) assert actual is not None assert actual.content == fake_response.content assert actual.encoding is None assert actual.url == url assert actual.cache_link_parsing == location.cache_link_parsing # Also check that the right session object was passed to # _get_simple_response(). mock_get_simple_response.assert_called_once_with( url, session=link_collector.session, )