def test_find_wheels_non_universal(self, loghub): vl = ViewLinkStore( "http://something/index", { "+links": [ { "href": "http://b/pytest-2.7.0.tar.gz", "rel": "releasefile" }, { "href": "http://b/pytest-2.7.0-py2-none-any.whl", "rel": "releasefile" }, ] }) links = vl.get_links(rel="releasefile") (sdist_links, wheel_links) = find_sdist_and_wheels(loghub, links, universal_only=False) assert len(sdist_links) == 1 assert sdist_links[0].basename.endswith(".tar.gz") assert len(wheel_links) == 1 assert wheel_links[0].basename.endswith("py2-none-any.whl") assert 'only universal wheels' not in '\n'.join( loghub._getmatcher().lines)
def test_prepare_toxrun_args2(self, loghub, pseudo_current, tmpdir, reqmock, initproj): # basically the same test as above, but it's testing the unpack # path for packages that have an underscore in the name vl = ViewLinkStore("http://something/index", {"+links": [ {"href": "http://b/prep_under-1.0.zip", "rel": "releasefile"}, {"href": "http://b/prep_under-1.0.tar.gz", "rel": "releasefile"}, {"href": "http://b/prep_under-1.0-py2.py3-none-any.whl", "rel": "releasefile"}, {"href": "http://b/prep_under-1.0-py2-none-any.whl", "rel": "releasefile"}, ], "name": "prep-under", "version": "1.0"}) links = vl.get_links(rel="releasefile") sdist_links, wheel_links = find_sdist_and_wheels(loghub, links) dev_index = DevIndex(loghub, tmpdir, pseudo_current) initproj("prep_under-1.0", filedefs={}) subprocess.check_call(["python", "setup.py", "sdist", "--formats=gztar,zip"]) subprocess.check_call(["python", "setup.py", "bdist_wheel", "--universal"]) for p in py.path.local("dist").listdir(): reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", data=p.read("rb")) toxrunargs = prepare_toxrun_args(dev_index, vl, sdist_links, wheel_links) assert len(toxrunargs) == 3 sdist1, sdist2, wheel1 = toxrunargs assert sdist1[0].basename == "prep_under-1.0.tar.gz" assert sdist1[1].path_unpacked.strpath.endswith("targz" + os.sep + "prep_under-1.0") assert sdist2[0].basename == "prep_under-1.0.zip" assert sdist2[1].path_unpacked.strpath.endswith("zip" + os.sep + "prep_under-1.0") assert wheel1[0].basename == "prep_under-1.0-py2.py3-none-any.whl" assert str(wheel1[1].path_unpacked).endswith(wheel1[0].basename)
def test_find_wheels_and_sdist(self, loghub): vl = ViewLinkStore( "http://something/index", { "+links": [ { "href": "http://b/pytest-2.7.0.zip", "rel": "releasefile" }, { "href": "http://b/pytest-2.7.0.tar.gz", "rel": "releasefile" }, { "href": "http://b/pytest-2.7.0-py2.py3-none-any.whl", "rel": "releasefile" }, ] }) links = vl.get_links(rel="releasefile") sdist_links, wheel_links = find_sdist_and_wheels(loghub, links) assert len(sdist_links) == 2 assert sdist_links[0].basename.endswith(".tar.gz") assert sdist_links[1].basename.endswith(".zip") assert len(wheel_links) == 1 assert wheel_links[0].basename == "pytest-2.7.0-py2.py3-none-any.whl"
def test_frompath(self, initproj, devpi, name_version, out_devpi, path, runproc): from devpi_common.archive import zip_dir if isinstance(name_version, tuple): name_version_str = "%s-%s" % name_version else: name_version_str = name_version initproj(name_version, {"doc": { "conf.py": "", "index.html": "<html/>"}}) tmpdir = py.path.local() runproc(tmpdir, "python setup.py sdist --format=zip".split()) bpath = tmpdir.join('build') out = runproc( tmpdir, "python setup.py build_sphinx -E --build-dir".split() + [bpath.strpath]) dist = tmpdir.join("dist") zip_dir(bpath.join('html'), dist.join("%s.doc.zip" % name_version_str)) assert len(dist.listdir()) == 2 (p, dp) = sorted(dist.listdir(), key=lambda x: '.doc.zip' in x.basename) hub = devpi("upload", p, dp) url = hub.current.get_index_url().url + path out = out_devpi("getjson", url) data = json.loads(out.stdout.str()) vv = ViewLinkStore(url, data["result"]) assert len(vv.get_links()) == 2 links = dict((x.rel, x.basename.lower()) for x in vv.get_links()) assert links["releasefile"] == "%s.zip" % name_version_str assert links["doczip"] == "%s.doc.zip" % name_version_str
def test_prepare_toxrun_args(self, loghub, pseudo_current, tmpdir, reqmock, initproj): # XXX this test was a bit hard to setup and is also somewhat covered by # the below wheel functional test so unclear if it's worth to # maintain it (but now that we have it ...) vl = ViewLinkStore("http://something/index", {"+links": [ {"href": "http://b/prep1-1.0.zip", "rel": "releasefile"}, {"href": "http://b/prep1-1.0.tar.gz", "rel": "releasefile"}, {"href": "http://b/prep1-1.0-py2.py3-none-any.whl", "rel": "releasefile"}, {"href": "http://b/prep1-1.0-py2-none-any.whl", "rel": "releasefile"}, ], "name": "prep1", "version": "1.0"}) links = vl.get_links(rel="releasefile") sdist_links, wheel_links = find_sdist_and_wheels(loghub, links) dev_index = DevIndex(loghub, tmpdir, pseudo_current) initproj("prep1-1.0", filedefs={}) subprocess.check_call(["python", "setup.py", "sdist", "--formats=gztar,zip"]) subprocess.check_call(["python", "setup.py", "bdist_wheel", "--universal"]) for p in py.path.local("dist").listdir(): reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", data=p.read("rb")) toxrunargs = prepare_toxrun_args(dev_index, vl, sdist_links, wheel_links) assert len(toxrunargs) == 3 sdist1, sdist2, wheel1 = toxrunargs assert sdist1[0].basename == "prep1-1.0.tar.gz" assert sdist1[1].path_unpacked.strpath.endswith("targz" + os.sep + "prep1-1.0") assert sdist2[0].basename == "prep1-1.0.zip" assert sdist2[1].path_unpacked.strpath.endswith("zip" + os.sep + "prep1-1.0") assert wheel1[0].basename == "prep1-1.0-py2.py3-none-any.whl" assert str(wheel1[1].path_unpacked).endswith(wheel1[0].basename)
def test_download_and_unpack(makehub, tmpdir, pseudo_current, monkeypatch, basic_auth): class FakeHTTP(object): class Response(object): def __init__(self, content=b'archive'): self.status_code = 200 self.content = content def __init__(self): self.last_get = None def get(self, *args, **kwargs): self.last_get = (args, kwargs) return self.Response() class FakeUnpack(object): def __init__(self): self.called = False def unpack(self): self.called = True hub = makehub(['test', '-epy27', 'somepkg']) hub.current.reconfigure( dict(index='http://dev/foo/bar', login='******', pypisubmit='http://dev/foo/bar')) if basic_auth: hub.current.set_basic_auth(*basic_auth) index = DevIndex(hub, tmpdir, pseudo_current) fake_http = FakeHTTP() hub.http.get = fake_http.get fake_unpack = FakeUnpack() monkeypatch.setattr('devpi.test.UnpackedPackage.unpack', fake_unpack.unpack) links = [ { 'href': 'http://dev/foo/bar/prep1-1.0.tar.gz', 'rel': 'releasefile' }, ] store = ViewLinkStore('http://something/index', { '+links': links, 'name': 'prep1', 'version': '1.0' }) link = store.get_link(rel='releasefile') index.download_and_unpack('1.0', link) assert fake_unpack.called args, kwargs = fake_http.last_get assert args[0] == 'http://dev/foo/bar/prep1-1.0.tar.gz' if basic_auth: assert kwargs['auth'] == basic_auth else: assert kwargs.get('auth') is None
def get_view_version_links(testapp, index, name, version, proj=False): if proj: url = "/".join([index, name]) r = testapp.get_json(url, expect_errors=False) return ViewLinkStore(url, r.json["result"][version]) else: url = "/".join([index, name, version]) r = testapp.get_json(url, expect_errors=False) return ViewLinkStore(url, r.json["result"])
def test_find_wheels_and_no_sdist(self, loghub): vl = ViewLinkStore("http://something/index", {"+links": [ {"href": "http://b/pytest-2.7.0-py2.py3-none-any.whl", "rel": "releasefile"}, ]}) links = vl.get_links(rel="releasefile") with pytest.raises(SystemExit): find_sdist_and_wheels(loghub, links) loghub._getmatcher().fnmatch_lines(""" *need at least one sdist* """)
def get_versions_to_delete(index_url, response, requirement): basepath = index_url.path.lstrip("/") ver_to_delete = [] for version, verdata in response.result.items(): if version in requirement: vv = ViewLinkStore(basepath, verdata) files_to_delete = [ link for link in vv.get_links() if link.href.startswith(index_url.url) ] ver_to_delete.append((version, files_to_delete)) return ver_to_delete
def test_find_wheels_not_universal(self, loghub): vl = ViewLinkStore("http://something/index", {"+links": [ {"href": "http://b/pytest-2.7.0.tar.gz", "rel": "releasefile"}, {"href": "http://b/pytest-2.7.0-py26-none-any.whl", "rel": "releasefile"}, ]}) links = vl.get_links(rel="releasefile") (sdist_links, wheel_links) = find_sdist_and_wheels(loghub, links) assert len(sdist_links) == 1 assert sdist_links[0].basename.endswith(".tar.gz") assert len(wheel_links) == 0 loghub._getmatcher().fnmatch_lines(""" *only universal wheels* """)
def test_prepare_toxrun_args_select(self, loghub, pseudo_current, tmpdir, reqmock, initproj): # test that we can explicitly select a non universal wheel pyver = "py%s" % sys.version_info[0] vl = ViewLinkStore( "http://something/index", { "+links": [ { "href": "http://b/prep_under-1.0.tar.gz", "rel": "releasefile" }, { "href": "http://b/prep_under-1.0-%s-none-any.whl" % pyver, "rel": "releasefile" }, ], "name": "prep-under", "version": "1.0" }) links = vl.get_links(rel="releasefile") sdist_links, wheel_links = find_sdist_and_wheels(loghub, links, universal_only=False) dev_index = DevIndex(loghub, tmpdir, pseudo_current) initproj("prep_under-1.0", filedefs={}) subprocess.check_call( ["python", "setup.py", "sdist", "--formats=gztar"]) subprocess.check_call(["python", "setup.py", "bdist_wheel"]) for p in py.path.local("dist").listdir(): reqmock.mockresponse("http://b/" + p.basename, code=200, method="GET", data=p.read("rb")) toxrunargs = prepare_toxrun_args(dev_index, vl, sdist_links, wheel_links, select=pyver) assert len(toxrunargs) == 1 (wheel1, ) = toxrunargs assert wheel1[0].basename == "prep_under-1.0-%s-none-any.whl" % pyver assert str(wheel1[1].path_unpacked).endswith(wheel1[0].basename)
def test_frompath(self, initproj, devpi, out_devpi, runproc): initproj("hello-1.3", {"doc": { "conf.py": "", "index.html": "<html/>" }}) tmpdir = py.path.local() runproc(tmpdir, "python setup.py sdist --format=zip".split()) dist = tmpdir.join("dist") assert len(dist.listdir()) == 1 p = dist.listdir()[0] hub = devpi("upload", p) url = hub.current.get_index_url().url + "hello/1.3/" out = out_devpi("getjson", url) data = json.loads(out.stdout.str()) vv = ViewLinkStore(url, data["result"]) assert vv.get_link(basename="hello-1.3.zip")
def test_fromdir(self, initproj, devpi, out_devpi, runproc, monkeypatch): initproj("hello-1.1", {"doc": { "conf.py": "", "index.html": "<html/>"}}) tmpdir = py.path.local() runproc(tmpdir, "python setup.py sdist --format=zip".split()) initproj("hello-1.2") runproc(tmpdir, "python setup.py sdist --format=zip".split()) dist = tmpdir.join("dist") assert len(dist.listdir()) == 2 hub = devpi("upload", "--from-dir", dist) for ver in ("1.1", '1.2'): url = hub.current.get_index_url().url + "hello/%s/" % ver out = out_devpi("getjson", url) data = json.loads(out.stdout.str()) vv = ViewLinkStore(url, data["result"]) assert vv.get_link(basename="hello-%s.zip" % ver)
def get_matching_versioninfo(self, pkgname, indexname): req = next(pkg_resources.parse_requirements(pkgname)) projurl = self.current.get_project_url( req.project_name, indexname=indexname).url r = self.hub.http_api("get", projurl) for version in get_sorted_versions(r.result): if version not in req: continue return ViewLinkStore(projurl, r.result[version])
def out_project_version_files(hub, url, verdata, version, index): vv = ViewLinkStore(url, verdata) release_links = vv.get_links(rel="releasefile") for link in release_links: if version.startswith("egg="): origin = "%s (%s) " % (link.href, version) else: origin = link.href if index is None: hub.error(origin) elif origin.startswith(hub.current.index): hub.info(origin) else: hub.line(origin) toxlinks = vv.get_links(rel="toxresult", for_href=link.href) if toxlinks: show_test_status(hub, toxlinks) return bool(release_links)
def confirm_delete(hub, index_url, reply, req): basepath = index_url.path.lstrip("/") ver_to_delete = [] for version, verdata in reply.result.items(): if version in req: vv = ViewLinkStore(basepath, verdata) files_to_delete = [link for link in vv.get_links() if link.href.startswith(index_url.url)] if files_to_delete: # XXX need to delete metadata without files ver_to_delete.append((version, files_to_delete)) if ver_to_delete: hub.info("About to remove the following releases and distributions") for ver, links in ver_to_delete: hub.info("version: " + ver) for link in links: hub.info(" - " + link.href) if hub.ask_confirm("Are you sure"): return ver_to_delete