Exemplo n.º 1
0
def test_parsehashspec():
    hash_algo, hash_value = parse_hash_spec("l1kj23")
    assert hash_algo is None and hash_value is None
    hash_algo, hash_value = parse_hash_spec("xyz=123098123")
    assert hash_algo is None and hash_value is None

    digest = hashlib.md5(b'123').hexdigest()
    hash_algo, hash_value = parse_hash_spec("md5=" + digest)
    assert hash_algo(b'123').hexdigest() == digest

    digest = hashlib.sha256(b'123').hexdigest()
    hash_algo, hash_value = parse_hash_spec("sha256=" + digest)
    assert hash_algo(b'123').hexdigest() == digest
Exemplo n.º 2
0
def get_checksum_error(content, hash_spec):
    hash_algo, hash_value = parse_hash_spec(hash_spec)
    hash_type = hash_spec.split("=")[0]
    digest = hash_algo(content).hexdigest()
    if digest != hash_value:
        return "%s mismatch, got %s, expected %s" % (hash_type, digest,
                                                     hash_value)
Exemplo n.º 3
0
    def import_filedesc(self, stage, filedesc):
        assert stage.ixconfig["type"] != "mirror"
        rel = filedesc["relpath"]
        project = filedesc["projectname"]
        p = self.import_rootdir.join(rel)
        assert p.check(), p
        data = p.read("rb")
        if self.xom.config.args.hard_links:
            # wrap the data for additional attribute
            data = BytesForHardlink(data)
            data.devpi_srcpath = p.strpath
        if filedesc["type"] == "releasefile":
            mapping = filedesc["entrymapping"]
            if self.dumpversion == "1":
                # previous versions would not add a version attribute
                version = BasenameMeta(p.basename).version
            else:
                version = filedesc["version"]

            link = stage.store_releasefile(
                project,
                version,
                p.basename,
                data,
                last_modified=mapping["last_modified"])
            # devpi-server-2.1 exported with md5 checksums
            if "md5" in mapping:
                assert "hash_spec" not in mapping
                mapping["hash_spec"] = "md5=" + mapping["md5"]
            hash_algo, hash_value = parse_hash_spec(mapping["hash_spec"])
            digest = hash_algo(link.entry.file_get_content()).hexdigest()
            if digest != hash_value:
                fatal("File %s has bad checksum %s, expected %s" %
                      (p, digest, hash_value))
            # note that the actual hash_type used within devpi-server is not
            # determined here but in store_releasefile/store_doczip/store_toxresult etc
        elif filedesc["type"] == "doczip":
            version = filedesc["version"]
            link = stage.store_doczip(project, version, data)
        elif filedesc["type"] == "toxresult":
            linkstore = stage.get_linkstore_perstage(filedesc["projectname"],
                                                     filedesc["version"])
            # we can not search for the full relative path because
            # it might use a different checksum
            basename = posixpath.basename(filedesc["for_entrypath"])
            link, = linkstore.get_links(basename=basename)
            link = stage.store_toxresult(link, json.loads(data.decode("utf8")))
        else:
            fatal("unknown file type: %s" % (type, ))
        history_log = filedesc.get('log')
        if history_log is None:
            link.add_log('upload', '<import>', dst=stage.name)
        else:
            link.add_logs(history_log)
Exemplo n.º 4
0
    def import_filedesc(self, stage, filedesc):
        assert stage.ixconfig["type"] != "mirror"
        rel = filedesc["relpath"]
        projectname = filedesc["projectname"]
        p = self.import_rootdir.join(rel)
        assert p.check(), p
        if filedesc["type"] == "releasefile":
            mapping = filedesc["entrymapping"]
            if self.dumpversion == "1":
                # previous versions would not add a version attribute
                version = BasenameMeta(p.basename).version
            else:
                version = filedesc["version"]

            link = stage.store_releasefile(projectname, version,
                                           p.basename, p.read("rb"),
                                           last_modified=mapping["last_modified"])
            # devpi-server-2.1 exported with md5 checksums
            if "md5" in mapping:
                assert "hash_spec" not in mapping
                mapping["hash_spec"] = "md5=" + mapping["md5"]
            hash_algo, hash_value = parse_hash_spec(mapping["hash_spec"])
            digest = hash_algo(link.entry.file_get_content()).hexdigest()
            assert digest == hash_value
            # note that the actual hash_type used within devpi-server is not
            # determined here but in store_releasefile/store_doczip/store_toxresult etc
        elif filedesc["type"] == "doczip":
            version = filedesc["version"]
            link = stage.store_doczip(projectname, version, p.read("rb"))
        elif filedesc["type"] == "toxresult":
            linkstore = stage.get_linkstore_perstage(filedesc["projectname"],
                                           filedesc["version"])
            # we can not search for the full relative path because
            # it might use a different checksum
            basename = posixpath.basename(filedesc["for_entrypath"])
            link, = linkstore.get_links(basename=basename)
            link = stage.store_toxresult(link, json.loads(p.read("rb").decode("utf8")))
        else:
            fatal("unknown file type: %s" % (type,))
        history_log = filedesc.get('log')
        if history_log is None:
            link.add_log('upload', '<import>', dst=stage.name)
        else:
            link.add_logs(history_log)
Exemplo n.º 5
0
 def matches_checksum(self, content):
     hash_algo, hash_value = parse_hash_spec(self.hash_spec)
     if not hash_algo:
         return True
     return hash_algo(content).hexdigest() == hash_value
Exemplo n.º 6
0
 def matches_checksum(self, content):
     hash_algo, hash_value = parse_hash_spec(self.hash_spec)
     if not hash_algo:
         return True
     return hash_algo(content).hexdigest() == hash_value
Exemplo n.º 7
0
 def hash_value(self):
     return parse_hash_spec(self._parsed[-1])[1]
Exemplo n.º 8
0
 def hash_algo(self):
     return parse_hash_spec(self._parsed[-1])[0]
Exemplo n.º 9
0
 def hash_spec(self):
     hashalgo, hash_value = parse_hash_spec(self._parsed[-1])
     if hashalgo:
         hashtype = self._parsed[-1].split("=")[0]
         return "%s=%s" %(hashtype, hash_value)
     return ""
Exemplo n.º 10
0
    def import_filedesc(self, stage, filedesc, versions):
        rel = filedesc["relpath"]
        project = filedesc["projectname"]
        p = self.import_rootdir.join(rel)
        assert p.check(), p
        data = p.read("rb")
        if self.xom.config.hard_links:
            # wrap the data for additional attribute
            data = BytesForHardlink(data)
            data.devpi_srcpath = p.strpath
        if filedesc["type"] == "releasefile":
            mapping = filedesc["entrymapping"]
            if self.dumpversion == "1":
                # previous versions would not add a version attribute
                version = BasenameMeta(p.basename).version
            else:
                version = filedesc["version"]

            if hasattr(stage, 'store_releasefile'):
                link = stage.store_releasefile(
                    project,
                    version,
                    p.basename,
                    data,
                    last_modified=mapping["last_modified"])
                entry = link.entry
            else:
                link = None
                url = URL(
                    mapping['url']).replace(fragment=mapping['hash_spec'])
                entry = self.xom.filestore.maplink(url, stage.username,
                                                   stage.index, project)
                entry.file_set_content(data, mapping["last_modified"])
                (_, links_with_data, serial) = stage._load_cache_links(project)
                if links_with_data is None:
                    links_with_data = []
                links = [(url.basename, entry.relpath)]
                requires_python = [versions[version].get('requires_python')]
                yanked = [versions[version].get('yanked')]
                for key, href, require_python, is_yanked in links_with_data:
                    links.append((key, href))
                    requires_python.append(require_python)
                    yanked.append(is_yanked)
                stage._save_cache_links(project, links, requires_python,
                                        yanked, serial)
            # devpi-server-2.1 exported with md5 checksums
            if "md5" in mapping:
                assert "hash_spec" not in mapping
                mapping["hash_spec"] = "md5=" + mapping["md5"]
            hash_algo, hash_value = parse_hash_spec(mapping["hash_spec"])
            digest = hash_algo(entry.file_get_content()).hexdigest()
            if digest != hash_value:
                fatal("File %s has bad checksum %s, expected %s" %
                      (p, digest, hash_value))
            # note that the actual hash_type used within devpi-server is not
            # determined here but in store_releasefile/store_doczip/store_toxresult etc
        elif filedesc["type"] == "doczip":
            version = filedesc["version"]
            link = stage.store_doczip(project, version, data)
        elif filedesc["type"] == "toxresult":
            linkstore = stage.get_linkstore_perstage(filedesc["projectname"],
                                                     filedesc["version"])
            # we can not search for the full relative path because
            # it might use a different checksum
            basename = posixpath.basename(filedesc["for_entrypath"])
            link, = linkstore.get_links(basename=basename)
            link = stage.store_toxresult(link, json.loads(data.decode("utf8")))
        else:
            fatal("unknown file type: %s" % (type, ))
        if link is not None:
            history_log = filedesc.get('log')
            if history_log is None:
                link.add_log('upload', '<import>', dst=stage.name)
            else:
                link.add_logs(history_log)
Exemplo n.º 11
0
 def hash_value(self):
     return parse_hash_spec(self._parsed[-1])[1]
Exemplo n.º 12
0
 def hash_algo(self):
     return parse_hash_spec(self._parsed[-1])[0]
Exemplo n.º 13
0
 def hash_spec(self):
     hashalgo, hash_value = parse_hash_spec(self._parsed[-1])
     if hashalgo:
         hashtype = self._parsed[-1].split("=")[0]
         return "%s=%s" %(hashtype, hash_value)
     return ""
Exemplo n.º 14
0
def get_checksum_error(content, hash_spec):
    hash_algo, hash_value = parse_hash_spec(hash_spec)
    hash_type = hash_spec.split("=")[0]
    digest = hash_algo(content).hexdigest()
    if digest != hash_value:
       return "%s mismatch, got %s, expected %s" % (hash_type, digest, hash_value)