Beispiel #1
0
 def test_multiple_docs_on_same_version(self, impexp):
     mapp1 = impexp.mapp1
     api = mapp1.create_and_use()
     mapp1.set_versiondata({"name": "hello", "version": "1.0"})
     with mapp1.xom.keyfs.transaction(write=True):
         # create entries with and without log
         stage = mapp1.xom.model.getstage(mapp1.current_stage)
         link = stage.store_doczip(
             "Hello", "1.0",
             content=zip_dict({
                 "index.html": "<html><body>Hello"}))
         link.add_log('upload', stage.user.name, dst=stage.name)
         time.sleep(1.1)  # force different times in log entry
         link = stage.store_doczip(
             "hello", "1.0",
             content=zip_dict({
                 "index.html": "<html><body>hello"}))
         link.add_log('upload', stage.user.name, dst=stage.name)
     impexp.export()
     mapp2 = impexp.new_import()
     with mapp2.xom.keyfs.transaction(write=False):
         stage = mapp2.xom.model.getstage(api.stagename)
         doczip = stage.get_doczip("hello", "1.0")
         archive = Archive(py.io.BytesIO(doczip))
         assert 'index.html' in archive.namelist()
         assert py.builtin._totext(
             archive.read("index.html"), 'utf-8') == "<html><body>hello"
Beispiel #2
0
    def test_hard_links(self, makeimpexp):
        impexp = makeimpexp(options=('--hard-links',))
        mapp1 = impexp.mapp1
        api = mapp1.create_and_use()
        content = b'content'
        mapp1.upload_file_pypi("he-llo-1.0.tar.gz", content, "he_llo", "1.0")
        content = zip_dict({"index.html": "<html/>"})
        mapp1.upload_doc("he-llo.zip", content, "he-llo", "")

        impexp.export()

        assert impexp.exportdir.join(
          'dataindex.json').stat().nlink == 1
        assert impexp.exportdir.join(
          'user1', 'dev', 'he_llo-1.0.doc.zip').stat().nlink == 2
        assert impexp.exportdir.join(
          'user1', 'dev', 'he_llo', 'he-llo-1.0.tar.gz').stat().nlink == 2

        mapp2 = impexp.new_import()

        with mapp2.xom.keyfs.transaction():
            stage = mapp2.xom.model.getstage(api.stagename)
            verdata = stage.get_versiondata_perstage("he_llo", "1.0")
            assert verdata["version"] == "1.0"
            links = stage.get_releaselinks("he_llo")
            assert len(links) == 1
            assert links[0].entry.file_get_content() == b'content'
            doczip = stage.get_doczip("he_llo", "1.0")
            archive = Archive(py.io.BytesIO(doczip))
            assert 'index.html' in archive.namelist()
            assert py.builtin._totext(
                archive.read("index.html"), 'utf-8') == "<html/>"
Beispiel #3
0
    def test_export_hard_links(self, makeimpexp):
        impexp = makeimpexp(options=('--hard-links', ))
        mapp1 = impexp.mapp1
        api = mapp1.create_and_use()
        content = b'content'
        mapp1.upload_file_pypi("he-llo-1.0.tar.gz", content, "he_llo", "1.0")
        content = zip_dict({"index.html": "<html/>"})
        mapp1.upload_doc("he-llo.zip", content, "he-llo", "")

        # export the data
        impexp.export()

        # check the number of links of the files in the exported data
        assert impexp.exportdir.join('dataindex.json').stat().nlink == 1
        assert impexp.exportdir.join('user1', 'dev',
                                     'he_llo-1.0.doc.zip').stat().nlink == 2
        assert impexp.exportdir.join('user1', 'dev', 'he-llo', '1.0',
                                     'he-llo-1.0.tar.gz').stat().nlink == 2

        # now import the data
        mapp2 = impexp.new_import()

        # and check that the files have the expected content
        with mapp2.xom.keyfs.transaction():
            stage = mapp2.xom.model.getstage(api.stagename)
            verdata = stage.get_versiondata_perstage("he_llo", "1.0")
            assert verdata["version"] == "1.0"
            links = stage.get_releaselinks("he_llo")
            assert len(links) == 1
            assert links[0].entry.file_get_content() == b'content'
            doczip = stage.get_doczip("he_llo", "1.0")
            archive = Archive(py.io.BytesIO(doczip))
            assert 'index.html' in archive.namelist()
            assert py.builtin._totext(archive.read("index.html"),
                                      'utf-8') == "<html/>"
Beispiel #4
0
def test_upload_docs_no_version(mapp, testapp, proj):
    api = mapp.create_and_use()
    content = zip_dict({"index.html": "<html/>"})
    mapp.set_versiondata(dict(name="Pkg1", version="1.0"))
    mapp.upload_doc("pkg1.zip", content, "Pkg1", "")
    vv = get_view_version_links(testapp, api.index, "Pkg1", "1.0", proj=proj)
    link = vv.get_link("doczip")
    assert link.href.endswith("/Pkg1-1.0.doc.zip")
    r = testapp.get(link.href)
    archive = Archive(py.io.BytesIO(r.body))
    assert 'index.html' in archive.namelist()
Beispiel #5
0
def test_upload_docs(mapp, testapp, proj):
    api = mapp.create_and_use()
    content = zip_dict({"index.html": "<html/>"})
    mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=400)
    mapp.set_versiondata({"name": "pkg1", "version": "2.6"})
    mapp.upload_doc("pkg1.zip", content, "pkg1", "2.6", code=200)
    vv = get_view_version_links(testapp, api.index, "pkg1", "2.6", proj=proj)
    link = vv.get_link(rel="doczip")
    assert link.href.endswith("/pkg1-2.6.doc.zip")
    r = testapp.get(link.href)
    archive = Archive(py.io.BytesIO(r.body))
    assert 'index.html' in archive.namelist()
Beispiel #6
0
 def test_docs_are_preserved(self, impexp):
     mapp1 = impexp.mapp1
     api = mapp1.create_and_use()
     mapp1.set_versiondata({"name": "hello", "version": "1.0"})
     content = zip_dict({"index.html": "<html/>"})
     mapp1.upload_doc("hello.zip", content, "hello", "")
     impexp.export()
     mapp2 = impexp.new_import()
     with mapp2.xom.keyfs.transaction(write=False):
         stage = mapp2.xom.model.getstage(api.stagename)
         doczip = stage.get_doczip("hello", "1.0")
         archive = Archive(py.io.BytesIO(doczip))
         assert 'index.html' in archive.namelist()
         assert py.builtin._totext(
             archive.read("index.html"), 'utf-8') == "<html/>"
Beispiel #7
0
 def test_docs_are_preserved(self, impexp):
     mapp1 = impexp.mapp1
     api = mapp1.create_and_use()
     mapp1.set_versiondata({"name": "hello", "version": "1.0"})
     content = zip_dict({"index.html": "<html/>"})
     mapp1.upload_doc("hello.zip", content, "hello", "")
     impexp.export()
     mapp2 = impexp.new_import()
     with mapp2.xom.keyfs.transaction(write=False):
         stage = mapp2.xom.model.getstage(api.stagename)
         doczip = stage.get_doczip("hello", "1.0")
         archive = Archive(py.io.BytesIO(doczip))
         assert 'index.html' in archive.namelist()
         assert py.builtin._totext(archive.read("index.html"),
                                   'utf-8') == "<html/>"
Beispiel #8
0
    def store_doczip(self, name, version, docfile):
        """ store zip file and unzip doc content for the
        specified "name" project. """
        if not version:
            version = self.get_metadata_latest(name)["version"]
            log.info("store_doczip: derived version %s", version)
        key = self.keyfs.PROJCONFIG(user=self.user,
                                    index=self.index,
                                    name=name)
        with key.locked_update() as projectconfig:
            verdata = projectconfig[version]
            filename = "%s-%s.doc.zip" % (name, version)
            entry = self.xom.filestore.store_file(self.user, self.index,
                                                  filename, docfile)
            verdata["+doczip"] = entry.relpath
        # unpack
        key = self._doc_key(name, version)

        # XXX locking? (unzipping could happen concurrently in theory)
        tempdir = self.keyfs.mkdtemp(name)
        with Archive(entry.filepath.open("rb")) as archive:
            archive.extract(tempdir)
        keypath = key.filepath
        if keypath.check():
            old = keypath.new(basename="old-" + keypath.basename)
            keypath.move(old)
            tempdir.move(keypath)
            old.remove()
        else:
            keypath.dirpath().ensure(dir=1)
            tempdir.move(keypath)
        return keypath
Beispiel #9
0
    def test_storedoczipfile(self, stage, bases):
        from devpi_common.archive import Archive
        stage.set_versiondata(udict(name="pkg1", version="1.0"))
        content = zip_dict({"index.html": "<html/>",
            "_static": {}, "_templ": {"x.css": ""}})
        stage.store_doczip("pkg1", "1.0", content)
        archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0")))
        assert 'index.html' in archive.namelist()

        content = zip_dict({"nothing": "hello"})
        stage.store_doczip("pkg1", "1.0", content)
        archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0")))
        namelist = archive.namelist()
        assert 'nothing' in namelist
        assert 'index.html' not in namelist
        assert '_static' not in namelist
        assert '_templ' not in namelist
Beispiel #10
0
def test_zip_dir(tmpdir):
    source = tmpdir.join("source")
    newdest = tmpdir.join("newdest")
    dest = tmpdir.join("dest.zip")
    source.ensure("file")
    source.ensure("sub", "subfile")
    zip_dir(source, dest)
    with Archive(dest) as archive:
        archive.extract(newdest)
    assert newdest.join("file").isfile()
    assert newdest.join("sub", "subfile").isfile()

    newdest.remove()
    with Archive(py.io.BytesIO(zip_dir(source))) as archive:
        archive.extract(newdest)
    assert newdest.join("file").isfile()
    assert newdest.join("sub", "subfile").isfile()
Beispiel #11
0
 def unpack(self):
     self.hub.info("unpacking", self.path_archive, "to", str(self.rootdir))
     with Archive(self.path_archive) as archive:
         archive.extract(self.rootdir)
     basename = URL(self.link.url).basename
     pkgname, version = splitbasename(basename)[:2]
     subdir = "%s-%s" % (pkgname, version)
     inpkgdir = self.rootdir.join(subdir)
     assert inpkgdir.check(), inpkgdir
     self.path_unpacked = inpkgdir
Beispiel #12
0
 def unpack(self):
     self.hub.info("unpacking", self.path_archive, "to", str(self.rootdir))
     with Archive(self.path_archive) as archive:
         archive.extract(self.rootdir)
     pkgname = self.versioninfo.versiondata['name']
     version = self.versioninfo.versiondata['version']
     if self.link.basename.endswith(".whl"):
         inpkgdir = self.rootdir
     else:
         inpkgdir = self.rootdir.join("%s-%s" %(pkgname, version))
     assert inpkgdir.check(), inpkgdir
     self.path_unpacked = inpkgdir
Beispiel #13
0
 def test_getdoczip(self, stage, bases, tmpdir):
     stage.set_versiondata(udict(name="pkg1", version="1.0"))
     assert not stage.get_doczip("pkg1", "1.0")
     content = zip_dict({"index.html": "<html/>",
         "_static": {}, "_templ": {"x.css": ""}})
     stage.store_doczip("pkg1", "1.0", content)
     doczip = stage.get_doczip("pkg1", "1.0")
     assert doczip
     with Archive(BytesIO(doczip)) as archive:
         archive.extract(tmpdir)
     assert tmpdir.join("index.html").read() == "<html/>"
     assert tmpdir.join("_static").check(dir=1)
     assert tmpdir.join("_templ", "x.css").check(file=1)
Beispiel #14
0
def test_upload_and_push_internal(mapp, testapp, monkeypatch, proj):
    mapp.create_user("user1", "1")
    mapp.create_and_login_user("user2")
    mapp.create_index("prod", indexconfig=dict(acl_upload=["user1", "user2"]))
    mapp.create_index("dev", indexconfig=dict(acl_upload=["user2"]))

    mapp.login("user1", "1")
    mapp.create_index("dev")
    mapp.use("user1/dev")
    mapp.upload_file_pypi("pkg1-2.6.tgz", b"123", "pkg1", "2.6")
    content = zip_dict({"index.html": "<html/>"})
    mapp.upload_doc("pkg1.zip", content, "pkg1", "")

    # check that push is authorized and executed towards user2/prod index
    req = dict(name="pkg1", version="2.6", targetindex="user2/prod")
    r = testapp.push("/user1/dev", json.dumps(req))
    assert r.status_code == 200
    vv = get_view_version_links(testapp, "/user2/prod", "pkg1", "2.6",
                                proj=proj)
    link = vv.get_link(rel="releasefile")
    assert link.href.endswith("/pkg1-2.6.tgz")
    # we check here that the upload of docs without version was
    # automatically tied to the newest release metadata
    link = vv.get_link(rel="doczip")
    assert link.href.endswith("/pkg1-2.6.doc.zip")
    r = testapp.get(link.href)
    archive = Archive(py.io.BytesIO(r.body))
    assert 'index.html' in archive.namelist()

    # reconfigure inheritance and see if get shadowing information
    mapp.modify_index("user1/dev", indexconfig=dict(bases=("/user2/prod",)))
    vv = get_view_version_links(testapp, "/user1/dev", "pkg1", "2.6", proj=proj)
    link = vv.get_link(rel="releasefile")
    assert link.href.endswith("/pkg1-2.6.tgz")
    shadows = vv.shadowed()
    assert len(shadows) == 1, vv.versiondata
    vv = shadows[0]
    link = vv.get_link(rel="releasefile")
    assert link.href.endswith("/pkg1-2.6.tgz")
Beispiel #15
0
 def archive(self, request, archive_path):
     if request.param == "path":
         arch = Archive(archive_path)
     else:
         f = archive_path.open("rb")
         arch = Archive(f)
     yield arch
     arch.close()
Beispiel #16
0
    def test_multiple_store_doczip_uses_project(self, stage, bases, tmpdir):
        # check that two store_doczip calls with slightly
        # different names will not lead to two doczip entries
        stage.set_versiondata(udict(name="pkg1", version="1.0"))
        stage.store_doczip("pkg1", "1.0", zip_dict({}))
        content2 = zip_dict({"index.html": "<html/>"})
        stage.store_doczip("Pkg1", "1.0", content2)

        # check we have only have one doczip link
        linkstore = stage.get_linkstore_perstage("pkg1", "1.0")
        links = linkstore.get_links(rel="doczip")
        assert len(links) == 1

        # get doczip and check it's really the latest one
        doczip2 = stage.get_doczip("pkg1", "1.0")
        with Archive(BytesIO(doczip2)) as archive:
            archive.extract(tmpdir)
        assert tmpdir.join("index.html").read() == "<html/>"
Beispiel #17
0
 def unpack(self):
     self.hub.info("unpacking", self.path_archive, "to", str(self.rootdir))
     with Archive(self.path_archive) as archive:
         archive.extract(self.rootdir)
     pkgname = self.versioninfo.versiondata['name']
     version = self.versioninfo.versiondata['version']
     if self.link.basename.endswith(".whl"):
         inpkgdir = self.rootdir
     else:
         inpkgdir = self.rootdir.join("%s-%s" % (pkgname, version))
         if not inpkgdir.check():
             # sometimes dashes are replaced by underscores,
             # for example the source releases of argon2_cffi
             inpkgdir = self.rootdir.join(
                 "%s-%s" % (pkgname.replace('-', '_'), version))
     if not inpkgdir.check():
         self.hub.fatal("Couldn't find unpacked package in", inpkgdir)
     self.path_unpacked = inpkgdir
Beispiel #18
0
def unpack_docs(stage, name, version, entry):
    # unpack, maybe a bit uncarefully but in principle
    # we are not loosing the original zip file anyway
    unpack_path = get_unpack_path(stage, name, version)
    hash_path = unpack_path.join('.hash')
    if hash_path.exists():
        with hash_path.open() as f:
            if f.read().strip() == entry.hash_spec:
                return unpack_path
    if unpack_path.exists():
        unpack_path.remove()
    with entry.file_open_read() as f:
        with Archive(f) as archive:
            archive.extract(unpack_path)
    with hash_path.open('w') as f:
        f.write(entry.hash_spec)
    threadlog.debug("%s: unpacked %s-%s docs to %s", stage.name, name, version,
                    unpack_path)
    return unpack_path
Beispiel #19
0
    def test_storedoczipfile(self, stage, bases):
        from devpi_common.archive import Archive
        stage.set_versiondata(udict(name="pkg1", version="1.0"))
        content = zip_dict({"index.html": "<html/>",
            "_static": {}, "_templ": {"x.css": ""}})
        stage.store_doczip("pkg1", "1.0", content)
        archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0")))
        assert 'index.html' in archive.namelist()

        content = zip_dict({"nothing": "hello"})
        stage.store_doczip("pkg1", "1.0", content)
        archive = Archive(BytesIO(stage.get_doczip("pkg1", "1.0")))
        namelist = archive.namelist()
        assert 'nothing' in namelist
        assert 'index.html' not in namelist
        assert '_static' not in namelist
        assert '_templ' not in namelist
Beispiel #20
0
 def test_unknown_archive(self):
     with pytest.raises(UnsupportedArchive):
         Archive(py.io.BytesIO(b"123"))
Beispiel #21
0
def test_zip_dict(tmpdir):
    content = zip_dict({"one": {"nested": "1"}, "two": {}})
    with Archive(py.io.BytesIO(content)) as archive:
        archive.extract(tmpdir)
    assert tmpdir.join("one", "nested").read() == "1"
    assert tmpdir.join("two").isdir()
Beispiel #22
0
def test_tarfile_outofbound(tmpdir):
    with Archive(datadir.join("slash.tar.gz")) as archive:
        with pytest.raises(ValueError):
            archive.extract(tmpdir)