def test_full_parse(self, module_name, count, expected_archive_link): html = importlib_resources.read_text( simple_data, f"archive_links.{module_name}.html" ) archive_links = simple.parse_archive_links(html) assert len(archive_links) == count assert expected_archive_link in archive_links
def test_full_parse(self, module_name, count, expected_archive_link): html_file = (importlib_resources.files(simple_data) / f"archive_links.{module_name}.html") html = html_file.read_text(encoding="utf-8") archive_links = simple.parse_archive_links(html) assert len(archive_links) == count assert expected_archive_link in archive_links
def test_requires_python(self, html, supported, unsupported): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert packaging.version.Version( supported) in archive_links[0].requires_python if unsupported: assert (packaging.version.Version(unsupported) not in archive_links[0].requires_python)
def test_str(self, archive_link): """Make sure __str__ roundtrips.""" html = str(archive_link) roundtrip = simple.parse_archive_links(html) assert len(roundtrip) == 1 print(html) print(roundtrip[0]) assert archive_link == roundtrip[0]
def test_yanked(self, html, expected): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert archive_links[0].yanked == expected
def test_gpg_sig(self, html, expected_gpg_sig): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert archive_links[0].gpg_sig == expected_gpg_sig
def test_hash_(self, html, expected_hash): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert archive_links[0].hash_ == expected_hash
def test_url(self, html, expected_url): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert archive_links[0].url == expected_url
def test_filename(self, html, expected_filename): archive_links = simple.parse_archive_links(html) assert len(archive_links) == 1 assert archive_links[0].filename == expected_filename
def get_candidates(self, project): with urlopen(create_project_url(PYPI_INDEX, project)) as f: return [ Candidate(l) for l in parse_archive_links(f.read().decode()) if l.filename.endswith((".tar.gz", ".whl")) ]
def test_hash(self, attribute): html = f'<a href="spam-1.2.3-py3.none.any.whl" {attribute}>spam-1.2.3-py3.none.any.whl</a>' archive_link = simple.parse_archive_links(html)[0] assert archive_link.metadata == ("sha256", "abcdef")
def test_attribute_only(self, attribute): html = f'<a href="spam-1.2.3-py3.none.any.whl" {attribute} >spam-1.2.3-py3.none.any.whl</a>' archive_link = simple.parse_archive_links(html)[0] assert archive_link.metadata == ("", "")
def test_default(self): html = '<a href="spam-1.2.3-py3.none.any.whl">spam-1.2.3-py3.none.any.whl</a>' archive_link = simple.parse_archive_links(html)[0] assert archive_link.metadata is None