def test_doc_unpack_cleanup(mapp, testapp): api = mapp.create_and_use() content = zip_dict({ "index.html": "<html><body>2.6</body></html>", "foo.html": "<html><body>Foo</body></html>" }) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) with mapp.xom.keyfs.transaction(write=False): stage = mapp.xom.model.getstage(api.stagename) path = get_unpack_path(stage, 'pkg1', '2.6') testapp.xget(200, api.index + '/pkg1/2.6/+doc/foo.html') assert path.join('foo.html').exists() content = zip_dict({"index.html": "<html><body>2.6</body></html>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) with mapp.xom.keyfs.transaction(write=False): stage = mapp.xom.model.getstage(api.stagename) path = get_unpack_path(stage, 'pkg1', '2.6') testapp.xget(404, api.index + '/pkg1/2.6/+doc/foo.html') assert not path.join('foo.html').exists()
def test_multiple_docs_on_same_version(self, impexp): mapp1 = impexp.mapp1 api = mapp1.create_and_use() mapp1.set_versiondata({"name": "hello", "version": "1.0"}) with mapp1.xom.keyfs.transaction(write=True): # create entries with and without log stage = mapp1.xom.model.getstage(mapp1.current_stage) link = stage.store_doczip( "Hello", "1.0", content=zip_dict({ "index.html": "<html><body>Hello"})) link.add_log('upload', stage.user.name, dst=stage.name) time.sleep(1.1) # force different times in log entry link = stage.store_doczip( "hello", "1.0", content=zip_dict({ "index.html": "<html><body>hello"})) link.add_log('upload', stage.user.name, dst=stage.name) impexp.export() mapp2 = impexp.new_import() with mapp2.xom.keyfs.transaction(write=False): stage = mapp2.xom.model.getstage(api.stagename) doczip = stage.get_doczip("hello", "1.0") archive = Archive(py.io.BytesIO(doczip)) assert 'index.html' in archive.namelist() assert py.builtin._totext( archive.read("index.html"), 'utf-8') == "<html><body>hello"
def test_simulate_multiple_doczip_entries(self, stage, bases, tmpdir): stage.set_versiondata(udict(name="pkg1", version="1.0")) stage.store_doczip("pkg1", "1.0", zip_dict({})) # simulate a second entry with a slightly different name # (XXX not clear if this test is really neccessary. hpk thinks for # exporting state from server<2.1.5 with such a double-entry one # needs to install 2.1.5 and export from there anyway, clearing # the problem. Then again server<2.3.2 allowed the store_doczip # method to construct doczip filenames which differ only in # casing) linkstore = stage.get_linkstore_perstage("Pkg1", "1.0", readonly=False) content = zip_dict({"index.html": "<html/>"}) linkstore.create_linked_entry( rel="doczip", basename="Pkg1-1.0.doc.zip", file_content=content, ) # check we have two doczip links linkstore = stage.get_linkstore_perstage("pkg1", "1.0") links = linkstore.get_links(rel="doczip") assert len(links) == 2 # get doczip and check it's really the latest one doczip = stage.get_doczip("pkg1", "1.0") assert doczip == content
def test_docs_latest(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html><body>2.6</body></html>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.6' # there is no warning assert r.html.select('.infonote') == [] # and the content matches r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.6</body></html>" # now we register a newer version, but docs should still be 2.6 mapp.set_versiondata({"name": "pkg1", "version": "2.7"}, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.7' # there is a warning assert [x.text.strip() for x in r.html.select('.infonote')] == [ "The latest available documentation (version 2.6) isn't for the latest available package version." ] # and the content is from older uploaded docs r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.6</body></html>" # now we upload newer docs content = zip_dict({"index.html": "<html><body>2.7</body></html>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.7", code=200, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.7' # there is no warning anymore assert r.html.select('.infonote') == [] # and the content is from newest docs r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.7</body></html>"
def test_storedoczipfile(self, stage, bases): stage.register_metadata(dict(name="pkg1", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) filepath = stage.store_doczip("pkg1", "1.0", BytesIO(content)) assert filepath.join("index.html").exists() content = zip_dict({"nothing": "hello"}) filepath = stage.store_doczip("pkg1", "1.0", BytesIO(content)) assert filepath.join("nothing").check() assert not filepath.join("index.html").check() assert not filepath.join("_static").check() assert not filepath.join("_templ").check()
def test_search_docs(mapp, testapp): api = mapp.create_and_use() mapp.set_versiondata( { "name": "pkg1", "version": "2.6", "description": "foo" }, waithooks=True) mapp.upload_file_pypi("pkg1-2.6.tar.gz", b"content", "pkg1", "2.6") content = zip_dict({ "index.html": "\n".join([ "<html>", "<head><title>Foo</title></head>", "<body>Bar</body>", "</html>" ]) }) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.get('/+search?query=bar') assert r.status_code == 200 links = r.html.select('.searchresults a') assert [(l.text.strip(), l.attrs['href']) for l in links] == [ ("pkg1-2.6", "http://localhost/%s/pkg1/2.6" % api.stagename), ("Foo", "http://localhost/%s/pkg1/2.6/+d/index.html" % api.stagename) ]
def test_upload_docs_no_version(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.register_metadata(dict(name="Pkg1", version="1.0")) mapp.upload_doc("pkg1.zip", content, "Pkg1", "") r = testapp.get(api.index + "Pkg1/1.0/+doc/index.html") assert r.status_code == 200
def test_hard_links(self, makeimpexp): impexp = makeimpexp(options=('--hard-links',)) mapp1 = impexp.mapp1 api = mapp1.create_and_use() content = b'content' mapp1.upload_file_pypi("he-llo-1.0.tar.gz", content, "he_llo", "1.0") content = zip_dict({"index.html": "<html/>"}) mapp1.upload_doc("he-llo.zip", content, "he-llo", "") impexp.export() assert impexp.exportdir.join( 'dataindex.json').stat().nlink == 1 assert impexp.exportdir.join( 'user1', 'dev', 'he_llo-1.0.doc.zip').stat().nlink == 2 assert impexp.exportdir.join( 'user1', 'dev', 'he_llo', 'he-llo-1.0.tar.gz').stat().nlink == 2 mapp2 = impexp.new_import() with mapp2.xom.keyfs.transaction(): stage = mapp2.xom.model.getstage(api.stagename) verdata = stage.get_versiondata_perstage("he_llo", "1.0") assert verdata["version"] == "1.0" links = stage.get_releaselinks("he_llo") assert len(links) == 1 assert links[0].entry.file_get_content() == b'content' doczip = stage.get_doczip("he_llo", "1.0") archive = Archive(py.io.BytesIO(doczip)) assert 'index.html' in archive.namelist() assert py.builtin._totext( archive.read("index.html"), 'utf-8') == "<html/>"
def test_project_view_root_and_docs(mapp, testapp, pypistage): pypistage.mock_simple("pkg1", text=''' <a href="../../pkg/pkg1-2.7.zip" /> <a href="../../pkg/pkg1-2.6.zip" /> ''', pypiserial=10) api = mapp.create_and_use( indexconfig=dict(bases=["root/pypi"], mirror_whitelist=["*"])) content = zip_dict({"index.html": "<html/>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, api.index + '/pkg1', headers=dict(accept="text/html")) links = r.html.select('#content a') assert [(l.text, l.attrs['href']) for l in links] == [ ("root/pypi", "http://localhost/root/pypi"), ("2.7", "http://localhost/root/pypi/pkg1/2.7"), ("root/pypi", "http://localhost/root/pypi"), ("2.6", "http://localhost/root/pypi/pkg1/2.6"), ("pkg1-2.6", "http://localhost/user1/dev/pkg1/2.6/+d/index.html") ]
def test_upload_and_push_internal(mapp, testapp, monkeypatch): mapp.create_user("user1", "1") mapp.create_and_login_user("user2") mapp.create_index("prod", indexconfig=dict(acl_upload=["user1", "user2"])) mapp.create_index("dev", indexconfig=dict(acl_upload=["user2"])) mapp.login("user1", "1") mapp.create_index("dev") mapp.use("user1/dev") mapp.upload_file_pypi("pkg1-2.6.tgz", b"123", "pkg1", "2.6") content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "") # check that push is authorized and executed towards user2/prod index req = dict(name="pkg1", version="2.6", targetindex="user2/prod") r = testapp.push("/user1/dev/", json.dumps(req)) assert r.status_code == 200 r = testapp.get_json("/user2/prod/pkg1/2.6") assert r.status_code == 200 relpath = r.json["result"]["+files"]["pkg1-2.6.tgz"] assert relpath.endswith("/pkg1-2.6.tgz") # we check here that the upload of docs without version was # automatically tied to the newest release metadata r = testapp.get("/user2/prod/pkg1/2.6/+doc/index.html") assert r.status_code == 200
def test_search_deleted_docs(mapp, testapp): from devpi_web.doczip import get_unpack_path api = mapp.create_and_use() mapp.set_versiondata( { "name": "pkg1", "version": "2.6", "description": "foo" }, waithooks=True) mapp.upload_file_pypi("pkg1-2.6.tar.gz", b"content", "pkg1", "2.6") content = zip_dict({ "index.html": "\n".join([ "<html>", "<head><title>Foo</title></head>", "<body>Bar</body>", "</html>" ]) }) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) with mapp.xom.keyfs.transaction(): stage = mapp.xom.model.getstage(api.stagename) path = get_unpack_path(stage, "pkg1", "2.6") path.remove() r = testapp.get('/+search?query=bar') assert r.status_code == 200 highlights = r.html.select('.packageinfo dd') text = [compareable_text(h.text) for h in highlights] assert len(text) == 1 assert text[0].startswith("Couldn't access documentation files for pkg1 " "version 2.6 on %s." % api.stagename)
def test_doczip_uploaded_hook(self, stage, queue): class Plugin: def devpiserver_on_upload(self, stage, project, version, link): queue.put((stage, project, version, link)) stage.xom.config.pluginmanager.register(Plugin()) stage.set_versiondata(udict(name="pkg1", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg1", "1.0", content) stage.xom.keyfs.commit_transaction_in_thread() nstage, name, version, link = queue.get() assert name == "pkg1" assert version == "1.0" with stage.xom.keyfs.transaction(): assert link.entry.file_get_content() == content # delete, which shouldnt trigger devpiserver_on_upload with stage.xom.keyfs.transaction(write=True): linkstore = stage.get_linkstore_perstage("pkg1", "1.0", readonly=False) linkstore.remove_links() # now write again and check that we get something from the queue with stage.xom.keyfs.transaction(write=True): stage.store_doczip("pkg1", "1.0", content) nstage, name, version, link = queue.get() assert name == "pkg1" and version == "1.0" with stage.xom.keyfs.transaction(): assert link.entry.file_exists()
def test_doczip_uploaded_hook(self, stage, queue): class Plugin: def devpiserver_on_upload(self, stage, projectname, version, link): queue.put((stage, projectname, version, link)) stage.xom.config.pluginmanager.register(Plugin()) stage.set_versiondata(udict(name="pkg1", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg1", "1.0", content) stage.xom.keyfs.commit_transaction_in_thread() nstage, name, version, link = queue.get() assert name == "pkg1" assert version == "1.0" with stage.xom.keyfs.transaction(): assert link.entry.file_get_content() == content # delete, which shouldnt trigger devpiserver_on_upload with stage.xom.keyfs.transaction(write=True): linkstore = stage.get_linkstore_perstage("pkg1", "1.0") linkstore.remove_links() # now write again and check that we get something from the queue with stage.xom.keyfs.transaction(write=True): stage.store_doczip("pkg1", "1.0", content) nstage, name, version, link = queue.get() assert name == "pkg1" and version == "1.0" with stage.xom.keyfs.transaction(): assert link.entry.file_exists()
def test_docs_view(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(302, api.index + "/pkg1/2.6/+d/") r = testapp.xget(200, r.location) iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/2.6/+doc/index.html" r = testapp.xget(404, "/blubber/blubb/pkg1/2.6/+d/index.html") content, = r.html.select('#content') assert 'The stage blubber/blubb could not be found.' in compareable_text( content.text) r = testapp.xget(404, api.index + "/pkg1/2.7/+d/index.html") content, = r.html.select('#content') assert 'No documentation available.' in compareable_text(content.text) r = testapp.xget(404, api.index + "/pkg1/2.6/+d/foo.html") content, = r.html.select('#content') assert 'File foo.html not found in documentation.' in compareable_text( content.text)
def test_search_docs(mapp, testapp): api = mapp.create_and_use() mapp.set_versiondata( { "name": "pkg_hello", "version": "2.6", "description": "foo" }, waithooks=True) mapp.upload_file_pypi("pkg_hello-2.6-py2.py3-none-any.whl", b"content", "pkg_hello", "2.6") content = zip_dict({ "index.html": "\n".join([ "<html>", "<head><title>Foo</title></head>", "<body>Bar</body>", "</html>" ]) }) mapp.upload_doc("pkg-hello-2.6.doc.zip", content, "pkg_hello", "2.6", code=200, waithooks=True) r = testapp.get('/+search?query=bar') assert r.status_code == 200 highlight = r.html.select('.searchresults dd dd') assert [compareable_text(x.text) for x in highlight] == ["Bar"] links = r.html.select('.searchresults a') assert [(compareable_text(l.text), l.attrs['href']) for l in links] == [ ("pkg_hello-2.6", "http://localhost/%s/pkg-hello/2.6" % api.stagename), ("Foo", "http://localhost/%s/pkg-hello/2.6/+d/index.html" % api.stagename) ]
def test_export_hard_links(self, makeimpexp): impexp = makeimpexp(options=('--hard-links', )) mapp1 = impexp.mapp1 api = mapp1.create_and_use() content = b'content' mapp1.upload_file_pypi("he-llo-1.0.tar.gz", content, "he_llo", "1.0") content = zip_dict({"index.html": "<html/>"}) mapp1.upload_doc("he-llo.zip", content, "he-llo", "") # export the data impexp.export() # check the number of links of the files in the exported data assert impexp.exportdir.join('dataindex.json').stat().nlink == 1 assert impexp.exportdir.join('user1', 'dev', 'he_llo-1.0.doc.zip').stat().nlink == 2 assert impexp.exportdir.join('user1', 'dev', 'he-llo', '1.0', 'he-llo-1.0.tar.gz').stat().nlink == 2 # now import the data mapp2 = impexp.new_import() # and check that the files have the expected content with mapp2.xom.keyfs.transaction(): stage = mapp2.xom.model.getstage(api.stagename) verdata = stage.get_versiondata_perstage("he_llo", "1.0") assert verdata["version"] == "1.0" links = stage.get_releaselinks("he_llo") assert len(links) == 1 assert links[0].entry.file_get_content() == b'content' doczip = stage.get_doczip("he_llo", "1.0") archive = Archive(py.io.BytesIO(doczip)) assert 'index.html' in archive.namelist() assert py.builtin._totext(archive.read("index.html"), 'utf-8') == "<html/>"
def test_docs_raw_projectname(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html><body>foo</body></"}) mapp.set_versiondata({"name": "pkg_hello", "version": "1.0"}) mapp.upload_doc("pkg-hello.zip", content, "pkg-hello", "1.0", code=200, waithooks=True) location = '%s/pkg_hello/1.0/' % api.index r = testapp.xget(200, location, headers=dict(accept="text/html")) navlinks = dict((l.text, l.attrs['href']) for l in r.html.select('.projectnavigation a')) assert 'Documentation' in navlinks # the regular name should work location = '%s/pkg_hello/1.0/+doc/index.html' % api.index r = testapp.xget(200, location, headers=dict(accept="text/html")) html = py.builtin._totext(r.html.renderContents().strip(), 'utf-8') assert '<html><body>foo</body></html>' == html # as well as the normalized name location = '%s/pkg-hello/1.0/+doc/index.html' % api.index r = testapp.xget(200, location, headers=dict(accept="text/html")) html = py.builtin._totext(r.html.renderContents().strip(), 'utf-8') assert '<html><body>foo</body></html>' == html
def test_docs_raw_view(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(302, api.index + "/pkg1/2.6/+doc/") r = testapp.xget(200, r.location) assert r.cache_control.no_cache is None r = testapp.xget(302, api.index + "/pkg1/latest/+doc/") r = testapp.xget(200, r.location) assert r.cache_control.no_cache r = testapp.xget(302, api.index + "/pkg1/stable/+doc/") r = testapp.xget(200, r.location) assert r.cache_control.no_cache r = testapp.xget(404, "/blubber/blubb/pkg1/2.6/+doc/index.html") content, = r.html.select('#content') assert 'The stage blubber/blubb could not be found.' in compareable_text( content.text) r = testapp.xget(404, api.index + "/pkg1/2.7/+doc/index.html") content, = r.html.select('#content') assert 'No documentation available.' in compareable_text(content.text) r = testapp.xget(404, api.index + "/pkg1/2.6/+doc/foo.html") content, = r.html.select('#content') assert 'File foo.html not found in documentation.' in compareable_text( content.text)
def test_doczip_remove_hook(self, stage, queue): class Plugin: def devpiserver_on_upload(self, stage, project, version, link): queue.put((stage, project, version, link)) stage.xom.config.pluginmanager.register(Plugin()) class Plugin: def devpiserver_on_remove_file(self, stage, relpath): queue.put((stage, relpath)) stage.xom.config.pluginmanager.register(Plugin()) # upload, should trigger devpiserver_on_upload stage.set_versiondata(udict(name="pkg2", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg2", "1.0", content) stage.xom.keyfs.commit_transaction_in_thread() nstage, name, version, link = queue.get() assert name == "pkg2" assert version == "1.0" with stage.xom.keyfs.transaction(): assert link.entry.file_get_content() == content # remove, should trigger devpiserver_on_remove_file with stage.xom.keyfs.transaction(write=True): linkstore = stage.get_linkstore_perstage("pkg2", "1.0", readonly=False) linkstore.remove_links() nstage, relpath = queue.get() assert relpath.startswith('hello/world/+') assert relpath.endswith('/pkg2-1.0.doc.zip')
def test_docs_latest(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html><body>2.6</body></html>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.6' # the content is from latest docs though r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.6</body></html>" # now we register a newer version, but docs should still be 2.6 mapp.set_versiondata({"name": "pkg1", "version": "2.7"}, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.7' # the content is from latest docs though r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.6</body></html>" # now we upload newer docs content = zip_dict({"index.html": "<html><body>2.7</body></html>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.7", code=200, waithooks=True) r = testapp.xget(200, api.index + "/pkg1/latest/+d/index.html") iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg1/latest/+doc/index.html" # navigation shows latest registered version navigation_links = r.html.select("#navigation a") assert navigation_links[3].text == '2.7' # the content is from latest docs though r = testapp.xget(200, iframe.attrs['src']) assert r.text == "<html><body>2.7</body></html>"
def test_upload_docs(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=400) mapp.register_metadata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200) r = testapp.get(api.index + "pkg1/2.6/+doc/index.html") assert r.status_code == 200
def test_storedoczipfile(self, stage, bases): from devpi_common.archive import Archive stage.set_versiondata(udict(name="pkg1", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg1", "1.0", content) archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0"))) assert 'index.html' in archive.namelist() content = zip_dict({"nothing": "hello"}) stage.store_doczip("pkg1", "1.0", content) archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0"))) namelist = archive.namelist() assert 'nothing' in namelist assert 'index.html' not in namelist assert '_static' not in namelist assert '_templ' not in namelist
def test_indexing_doc_with_unicode(mapp, testapp): mapp.create_and_use() mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) content = zip_dict({"index.html": u'<html><meta charset="utf-8"><body>Föö</body></html>'.encode('utf-8')}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, '/+search?query=F%C3%B6%C3%B6') search_results = r.html.select('.searchresults > dl > dt') assert len(search_results) == 1
def test_storedoczipfile(self, stage, bases): stage.register_metadata(dict(name="pkg1", version="1.0")) content = zip_dict({ "index.html": "<html/>", "_static": {}, "_templ": { "x.css": "" } }) filepath = stage.store_doczip("pkg1", "1.0", BytesIO(content)) assert filepath.join("index.html").exists() content = zip_dict({"nothing": "hello"}) filepath = stage.store_doczip("pkg1", "1.0", BytesIO(content)) assert filepath.join("nothing").check() assert not filepath.join("index.html").check() assert not filepath.join("_static").check() assert not filepath.join("_templ").check()
def test_multiple_store_doczip_uses_project(self, stage, bases, tmpdir): # check that two store_doczip calls with slightly # different names will not lead to two doczip entries stage.set_versiondata(udict(name="pkg1", version="1.0")) stage.store_doczip("pkg1", "1.0", zip_dict({})) content2 = zip_dict({"index.html": "<html/>"}) stage.store_doczip("Pkg1", "1.0", content2) # check we have only have one doczip link linkstore = stage.get_linkstore_perstage("pkg1", "1.0") links = linkstore.get_links(rel="doczip") assert len(links) == 1 # get doczip and check it's really the latest one doczip2 = stage.get_doczip("pkg1", "1.0") with Archive(BytesIO(doczip2)) as archive: archive.extract(tmpdir) assert tmpdir.join("index.html").read() == "<html/>"
def archive_path(request, tmpdir): contentdict = {"1": "file1", "sub": {"1": "subfile"}} if request.param == "zip": content = zip_dict(contentdict) else: content = create_tarfile_fromdict(tmpdir, contentdict) p = tmpdir.join("content.%s" % request.param) p.write(content, "wb") return p
def test_project_view_docs_only(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc( "pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, api.index + '/pkg1', headers=dict(accept="text/html")) (content,) = r.html.select('#content') assert [x.text for x in content.select('tr td')] == [ "user1/dev", "2.6", "pkg1-2.6"]
def test_upload_docs_no_version(mapp, testapp, proj): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.set_versiondata(dict(name="Pkg1", version="1.0")) mapp.upload_doc("pkg1.zip", content, "Pkg1", "") vv = get_view_version_links(testapp, api.index, "Pkg1", "1.0", proj=proj) link = vv.get_link("doczip") assert link.href.endswith("/Pkg1-1.0.doc.zip") r = testapp.get(link.href) archive = Archive(py.io.BytesIO(r.body)) assert 'index.html' in archive.namelist()
def test_docs_show_projectname(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html><body>foo</body></html>"}) mapp.set_versiondata({ "name": "pkg_hello", "version": "1.0"}) mapp.upload_doc( "pkg-hello.zip", content, "pkg-hello", "1.0", code=200, waithooks=True) location = '%s/pkg-hello/1.0/+d/index.html' % api.index r = testapp.xget(200, location, headers=dict(accept="text/html")) iframe, = r.html.findAll('iframe') assert iframe.attrs['src'] == api.index + "/pkg-hello/1.0/+doc/index.html"
def test_docs_are_preserved(self, impexp): mapp1 = impexp.mapp1 api = mapp1.create_and_use() mapp1.register_metadata({"name": "hello", "version": "1.0"}) content = zip_dict({"index.html": "<html/>"}) mapp1.upload_doc("hello.zip", content, "hello", "") impexp.export() mapp2 = impexp.new_import() stage = mapp2.xom.db.getstage(api.stagename) path = stage._doc_key("hello", "1.0").filepath assert path.check() assert path.join("index.html").read() == "<html/>"
def test_upload_docs(mapp, testapp, proj): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=400) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200) vv = get_view_version_links(testapp, api.index, "pkg1", "2.6", proj=proj) link = vv.get_link(rel="doczip") assert link.href.endswith("/pkg1-2.6.doc.zip") r = testapp.get(link.href) archive = Archive(py.io.BytesIO(r.body)) assert 'index.html' in archive.namelist()
def test_indexing_doc_with_missing_title(mapp, testapp): mapp.create_and_use() content = zip_dict({"index.html": "<html><body>Foo</body></html>"}) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(200, '/+search?query=Foo') search_results = r.html.select('.searchresults > dl > dt') assert len(search_results) == 1 links = search_results[0].findAll('a') assert sorted((compareable_text(l.text), l.attrs['href']) for l in links) == [ ("pkg1-2.6", "http://localhost/user1/dev/pkg1/2.6")]
def test_getdoczip(self, stage, bases, tmpdir): assert not stage.get_doczip("pkg1", "version") stage.register_metadata(dict(name="pkg1", version="1.0")) content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg1", "1.0", BytesIO(content)) doczip_file = stage.get_doczip("pkg1", "1.0") assert doczip_file with Archive(doczip_file) as archive: archive.extract(tmpdir) assert tmpdir.join("index.html").read() == "<html/>" assert tmpdir.join("_static").check(dir=1) assert tmpdir.join("_templ", "x.css").check(file=1)
def test_getdoczip(self, stage, bases, tmpdir): stage.set_versiondata(udict(name="pkg1", version="1.0")) assert not stage.get_doczip("pkg1", "1.0") content = zip_dict({"index.html": "<html/>", "_static": {}, "_templ": {"x.css": ""}}) stage.store_doczip("pkg1", "1.0", content) doczip = stage.get_doczip("pkg1", "1.0") assert doczip with Archive(BytesIO(doczip)) as archive: archive.extract(tmpdir) assert tmpdir.join("index.html").read() == "<html/>" assert tmpdir.join("_static").check(dir=1) assert tmpdir.join("_templ", "x.css").check(file=1)
def test_upload_and_push_external(mapp, testapp, mockrequests): api = mapp.create_and_use() mapp.upload_file_pypi("pkg1-2.6.tgz", b"123", "pkg1", "2.6") zipcontent = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", zipcontent, "pkg1", "") r = testapp.get(api.simpleindex + "pkg1") assert r.status_code == 200 a = getfirstlink(r.text) assert "pkg1-2.6.tgz" in a.get("href") # get root index page r = testapp.get(api.index) assert r.status_code == 200 # push req = dict(name="pkg1", version="2.6", posturl="whatever", username="******", password="******") rec = [] def recpost(url, data, auth, files=None): rec.append((url, data, auth, files)) class r: status_code = 200 content = "msg" return r mockrequests.set_post(recpost) body = json.dumps(req).encode("utf-8") r = testapp.request(api.index, method="push", body=body, expect_errors=True) assert r.status_code == 200 assert len(rec) == 3 assert rec[0][0] == "whatever" assert rec[1][0] == "whatever" assert rec[2][0] == "whatever" upload_dict = rec[2][-1] assert upload_dict["content"][0] == "pkg1.zip" assert upload_dict["content"][1].read() == zipcontent # push with error def posterror(url, data, auth, files=None): class r: status_code = 500 return r mockrequests.set_post(posterror) r = testapp.request(api.index, method="push", body=body, expect_errors=True) assert r.status_code == 502 result = r.json["result"] assert len(result) == 1 assert result[0][0] == 500
def test_index_view_project_docs(mapp, testapp): api = mapp.create_and_use() mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.get(api.index, headers=dict(accept="text/html")) assert r.status_code == 200 links = r.html.select('#content a') assert [(l.text, l.attrs['href']) for l in links] == [ ("simple index", "http://localhost/%s/+simple/" % api.stagename), ("pkg1-2.6", "http://localhost/%s/pkg1/2.6" % api.stagename), ("pkg1-2.6", "http://localhost/%s/pkg1/2.6/+d/index.html" % api.stagename), ("root/pypi", "http://localhost/root/pypi"), ("simple", "http://localhost/root/pypi/+simple/")]
def test_docs_are_preserved(self, impexp): mapp1 = impexp.mapp1 api = mapp1.create_and_use() mapp1.set_versiondata({"name": "hello", "version": "1.0"}) content = zip_dict({"index.html": "<html/>"}) mapp1.upload_doc("hello.zip", content, "hello", "") impexp.export() mapp2 = impexp.new_import() with mapp2.xom.keyfs.transaction(write=False): stage = mapp2.xom.model.getstage(api.stagename) doczip = stage.get_doczip("hello", "1.0") archive = Archive(py.io.BytesIO(doczip)) assert 'index.html' in archive.namelist() assert py.builtin._totext(archive.read("index.html"), 'utf-8') == "<html/>"
def test_docs_are_preserved(self, impexp): mapp1 = impexp.mapp1 api = mapp1.create_and_use() mapp1.set_versiondata({"name": "hello", "version": "1.0"}) content = zip_dict({"index.html": "<html/>"}) mapp1.upload_doc("hello.zip", content, "hello", "") impexp.export() mapp2 = impexp.new_import() with mapp2.xom.keyfs.transaction(write=False): stage = mapp2.xom.model.getstage(api.stagename) doczip = stage.get_doczip("hello", "1.0") archive = Archive(py.io.BytesIO(doczip)) assert 'index.html' in archive.namelist() assert py.builtin._totext( archive.read("index.html"), 'utf-8') == "<html/>"
def test_docs_raw_view(mapp, testapp): api = mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=400) mapp.set_versiondata({"name": "pkg1", "version": "2.6"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.xget(302, api.index + "/pkg1/2.6/+doc/") testapp.xget(200, r.location) r = testapp.xget(404, "/blubber/blubb/pkg1/2.6/+doc/index.html") content, = r.html.select('#content') assert 'The stage blubber/blubb could not be found.' in content.text.strip() r = testapp.xget(404, api.index + "/pkg1/2.7/+doc/index.html") content, = r.html.select('#content') assert 'No documentation available.' in content.text.strip() r = testapp.xget(404, api.index + "/pkg1/2.6/+doc/foo.html") content, = r.html.select('#content') assert 'File foo.html not found in documentation.' in content.text.strip()
def test_search_docs(mapp, testapp): api = mapp.create_and_use() mapp.set_versiondata({ "name": "pkg1", "version": "2.6", "description": "foo"}, waithooks=True) mapp.upload_file_pypi( "pkg1-2.6.tar.gz", b"content", "pkg1", "2.6") content = zip_dict( {"index.html": "\n".join([ "<html>", "<head><title>Foo</title></head>", "<body>Bar</body>", "</html>"])}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200, waithooks=True) r = testapp.get('/+search?query=bar') assert r.status_code == 200 links = r.html.select('.searchresults a') assert [(l.text.strip(), l.attrs['href']) for l in links] == [ ("pkg1-2.6", "http://localhost/%s/pkg1/2.6" % api.stagename), ("Foo", "http://localhost/%s/pkg1/2.6/+d/index.html" % api.stagename)]
def test_upload_and_push_internal(mapp, testapp, monkeypatch, proj): mapp.create_user("user1", "1") mapp.create_and_login_user("user2") mapp.create_index("prod", indexconfig=dict(acl_upload=["user1", "user2"])) mapp.create_index("dev", indexconfig=dict(acl_upload=["user2"])) mapp.login("user1", "1") mapp.create_index("dev") mapp.use("user1/dev") mapp.upload_file_pypi("pkg1-2.6.tgz", b"123", "pkg1", "2.6") content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "") # check that push is authorized and executed towards user2/prod index req = dict(name="pkg1", version="2.6", targetindex="user2/prod") r = testapp.push("/user1/dev", json.dumps(req)) assert r.status_code == 200 vv = get_view_version_links(testapp, "/user2/prod", "pkg1", "2.6", proj=proj) link = vv.get_link(rel="releasefile") assert link.href.endswith("/pkg1-2.6.tgz") # we check here that the upload of docs without version was # automatically tied to the newest release metadata link = vv.get_link(rel="doczip") assert link.href.endswith("/pkg1-2.6.doc.zip") r = testapp.get(link.href) archive = Archive(py.io.BytesIO(r.body)) assert 'index.html' in archive.namelist() # reconfigure inheritance and see if get shadowing information mapp.modify_index("user1/dev", indexconfig=dict(bases=("/user2/prod",))) vv = get_view_version_links(testapp, "/user1/dev", "pkg1", "2.6", proj=proj) link = vv.get_link(rel="releasefile") assert link.href.endswith("/pkg1-2.6.tgz") shadows = vv.shadowed() assert len(shadows) == 1, vv.versiondata vv = shadows[0] link = vv.get_link(rel="releasefile") assert link.href.endswith("/pkg1-2.6.tgz")
def test_dashes_in_name_issue199(self, impexp): mapp1 = impexp.mapp1 api = mapp1.create_and_use() content = b'content' name = "plugin-ddpenc-3-5-1-rel" mapp1.upload_file_pypi(name + "-1.0.tar.gz", content, name, "1.0") with mapp1.xom.keyfs.transaction(write=True): stage = mapp1.xom.model.getstage(api.stagename) doccontent = zip_dict({"index.html": "<html><body>Hello"}) link1 = stage.store_doczip( name, "1.0", content=doccontent) impexp.export() mapp2 = impexp.new_import() with mapp2.xom.keyfs.transaction(): stage = mapp2.xom.model.getstage(api.stagename) content = stage.get_doczip(name, "1.0") assert content == doccontent linkstore = stage.get_linkstore_perstage(name, "1.0") link2, = linkstore.get_links(rel="doczip") assert link2.basename == link1.basename
def test_upload_and_push_external(mapp, testapp, reqmock): api = mapp.create_and_use() mapp.upload_file_pypi("pkg1-2.6.tgz", b"123", "pkg1", "2.6") zipcontent = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", zipcontent, "pkg1", "") r = testapp.get(api.simpleindex + "pkg1") assert r.status_code == 200 a = getfirstlink(r.text) assert "pkg1-2.6.tgz" in a.get("href") # get root index page r = testapp.get(api.index) assert r.status_code == 200 # push OK req = dict(name="pkg1", version="2.6", posturl="http://whatever.com/", username="******", password="******") rec = reqmock.mockresponse(url=None, code=200, method="POST", data="msg") body = json.dumps(req).encode("utf-8") r = testapp.request(api.index, method="PUSH", body=body, expect_errors=True) assert r.status_code == 200 assert len(rec.requests) == 3 for i in range(3): assert rec.requests[i].url == req["posturl"] req = rec.requests[2] # XXX properly decode www-url-encoded body and check zipcontent assert b"pkg1.zip" in req.body assert zipcontent in req.body # push with error reqmock.mockresponse(url=None, code=500, method="POST") r = testapp.request(api.index, method="PUSH", body=body, expect_errors=True) assert r.status_code == 502 result = r.json["result"] assert len(result) == 1 assert result[0][0] == 500
def test_version_view(mapp, testapp): api = mapp.create_and_use() mapp.upload_file_pypi( "pkg1-2.6.tar.gz", b"content", "pkg1", "2.6") mapp.upload_file_pypi( "pkg1-2.6.zip", b"contentzip", "pkg1", "2.6") content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200) mapp.set_versiondata({ "name": "pkg1", "version": "2.6", "author": "Foo Bear", "description": u"föö".encode('utf-8')}, waithooks=True) r = testapp.get(api.index + '/pkg1/2.6', headers=dict(accept="text/html")) assert r.status_code == 200 assert r.html.find('title').text == "user1/dev/: pkg1-2.6 metadata and description" info = dict((t.text for t in x.findAll('td')) for x in r.html.select('.projectinfos tr')) assert sorted(info.keys()) == ['author'] assert info['author'] == 'Foo Bear' description = r.html.select('#description') assert len(description) == 1 description = description[0] assert py.builtin._totext( description.renderContents().strip(), 'utf-8') == u'<p>föö</p>' filesinfo = [tuple(t.text.strip() for t in x.findAll('td')) for x in r.html.select('.files tbody tr')] assert filesinfo == [ ('pkg1-2.6.tar.gz', 'Source', '', '7 bytes', '', '9a0364b9e99bb480dd25e1f0284c8555'), ('pkg1-2.6.zip', 'Source', '', '10 bytes', '', '52360ae08d733016c5603d54b06b5300')] links = r.html.select('#content a') assert [(l.text, l.attrs['href']) for l in links] == [ ("Documentation", "http://localhost/%s/pkg1/2.6/+d/index.html" % api.stagename), ("Simple index", "http://localhost/%s/+simple/pkg1" % api.stagename), ("pkg1-2.6.tar.gz", "http://localhost/%s/+f/9a0/364b9e99bb480/pkg1-2.6.tar.gz#md5=9a0364b9e99bb480dd25e1f0284c8555" % api.stagename), ("pkg1-2.6.zip", "http://localhost/%s/+f/523/60ae08d733016/pkg1-2.6.zip#md5=52360ae08d733016c5603d54b06b5300" % api.stagename)]
def test_upload_docs_no_project_ever_registered(mapp, testapp): mapp.create_and_use() content = zip_dict({"index.html": "<html/>"}) mapp.upload_doc("pkg1.zip", content, "pkg1", "", code=400)