def web_addpkg(self, reponame, name, version, fobj): repo = get_repo(db(), reponame) # write wheel to temp storage with TemporaryDirectory() as tdir: tmppkgpath = os.path.join( tdir, fobj.filename ) #TODO verify filename doesnt have any nonsense like ../../passwd with open(tmppkgpath, "wb") as fdest: shasum = copysha256(fobj.file, fdest) metadata = parse_wheel(tmppkgpath) assert (version == metadata["fields"]["version"] ), "wheel metadata version doesn't match supplied version" assert ( fobj.filename == metadata["wheelname"] ), f"file name is invalid, wanted '{metadata['wheelname']}'" # s3 path - repos/<reponame>/wheels/f/foo.wheel dpath = os.path.join(self.basepath, "repos", repo.name, "wheels", metadata["wheelname"][0].lower(), metadata["wheelname"]) files = self.s3.list_objects(Bucket=self.bucket, Prefix=dpath).get("Contents") if files: print(f"will overwrite: {files}") # add to db pkg = PipPackage( repo=repo, dist=metadata["fields"]["dist"], dist_norm=normalize(metadata["fields"]["dist"]), # index me ? version=metadata["fields"]["version"], build=metadata["fields"]["build"], python=metadata["fields"]["python"], api=metadata["fields"]["api"], platform=metadata["fields"]["platform"], fname=metadata["wheelname"], size=metadata["size"], sha256=shasum, fields=json.dumps(metadata)) db().add(pkg) db().commit() try: with open(tmppkgpath, "rb") as f: response = self.s3.put_object(Body=f, Bucket=self.bucket, Key=dpath) assert (response["ResponseMetadata"]["HTTPStatusCode"] == 200), f"Upload failed: {response}" except Exception: db().delete(pkg) db().commit() raise return json.dumps(metadata, indent=4)
def handle_navigation(self, reponame=None, pkgname=None, filename=None): if reponame: repo = get_repo(db(), reponame, create_ok=False) if pkgname: return self.tpl.get_template("tar/package.html") \ .render(repo=repo, pkgs=db().query(TarPackage).filter(TarPackage.repo == repo, TarPackage.name == pkgname). order_by(TarPackage.version).all()) return self.tpl.get_template("tar/repo.html") \ .render(repo=repo, pkgs=self._get_dists(repo)) return self.tpl.get_template("tar/root.html") \ .render(repos=db().query(TarRepo).order_by(TarRepo.name).all())
def __call__(self, *segments, reponame=None): repo = get_repo(db(), reponame, create_ok=False) if len(segments) == 4 and segments[3] == "Packages": distname, componentname, indexname, pkgs = segments dist = get_dist(db(), repo, distname, create_ok=False) if not repo or not dist: raise cherrypy.HTTPError(404) cherrypy.response.headers['Content-Type'] = 'text/plain' return dist.packages_cache elif len(segments) == 2: distname, target = segments dist = get_dist(db(), repo, distname, create_ok=False) cherrypy.response.headers['Content-Type'] = 'text/plain' if target == "Release": return dist.release_cache elif target == "Release.gpg": return dist.sig_cache elif target == "install": return """#!/bin/sh -ex wget -qO- {scheme}://{host}/repo/apt/{reponame}/pubkey | apt-key add - echo 'deb {scheme}://{host}/repo/apt/{reponame}/ {dist} main' | tee /etc/apt/sources.list.d/{reponame}-{dist}.list apt-get update """.format(scheme=cherrypy.request.scheme, host=cherrypy.request.headers['Host'], reponame=repo.name, dist=dist.name) else: raise cherrypy.HTTPError(404) elif len(segments) == 1: distname = segments[0] dist = get_dist(db(), repo, distname, create_ok=False) body = "" for package in db().query(AptPackage).filter( AptPackage.repo == repo, AptPackage.dist == dist).order_by(AptPackage.fname).all(): body += "<a href='/repo/apt/{reponame}/packages/{dist.name}/{fname[0]}/{fname}'>{fname}</a><br />" \ .format(reponame=repo.name, dist=dist, fname=package.fname) return body raise cherrypy.HTTPError(404)
def _get_dists(self, repo): lastdist = None for dist in db().query(PipPackage).filter( PipPackage.repo == repo).order_by(PipPackage.dist).all(): if lastdist and dist.dist == lastdist: continue yield dist lastdist = dist.dist
def handle_navigation(self, reponame=None, distname=None, filename=None): if reponame: repo = get_repo(db(), reponame, create_ok=False) if distname: return self.tpl.get_template("pypi/dist.html") \ .render(repo=repo, pkgs=db().query(PipPackage).filter(PipPackage.repo == repo, PipPackage.dist_norm == distname). order_by(PipPackage.version).all(), distname=normalize(distname)) return self.tpl.get_template("pypi/repo.html") \ .render(repo=repo, dists=self._get_dists(repo)) return self.tpl.get_template("pypi/root.html") \ .render(repos=db().query(PipRepo).order_by(PipRepo.name).all())
def _get_dists(self, repo): lastpkg = None for pkg in db().query(TarPackage).filter( TarPackage.repo == repo).order_by(TarPackage.fname).all(): if lastpkg and pkg.name == lastpkg: continue yield pkg lastpkg = pkg.name
def web_addpkg(self, reponame, name, version, fobj): repo = get_repo(db(), reponame) # write wheel to temp storage with TemporaryDirectory() as tdir: tmppkgpath = os.path.join( tdir, fobj.filename ) #TODO verify filename doesnt have any nonsense like ../../passwd with open(tmppkgpath, "wb") as fdest: shasum = copysha256(fobj.file, fdest) #TODO assert that the uploaded file smells like a tarball #TODO assert the version string matches allowed chars #TODO assert the name string matches allowed chars #TODO support non-gzip fname = f"{name}-{version}.tar.gz" # add to db tar = TarPackage(repo=repo, name=name, version=version, fname=fname, size=os.path.getsize(tmppkgpath), sha256=shasum) # s3 path - repos/<reponame>/tarballs/f/foo/foo-1234.tar.gz dpath = os.path.join(self.basepath, tar.blobpath) files = self.s3.list_objects(Bucket=self.bucket, Prefix=dpath).get("Contents") if files: print(f"will overwrite: {files}") db().add(tar) db().commit() try: with open(tmppkgpath, "rb") as f: response = self.s3.put_object(Body=f, Bucket=self.bucket, Key=dpath) assert (response["ResponseMetadata"]["HTTPStatusCode"] == 200), f"Upload failed: {response}" except Exception: db().delete(tar) db().commit() raise return json.dumps({"ok": True}, indent=4) #TODO do something with this
def index(self, reponame=None, regen=False): if reponame: repo = get_repo(db(), reponame, create_ok=False) yield "<a href='/repo/apt/{reponame}/pubkey'>pubkey</a> " \ "<a href='/repo/apt/{reponame}?regen=1'>regen</a><hr/>".format(reponame=repo.name) for dist in db().query(AptDist).filter( AptDist.repo == repo).order_by(AptDist.name).all(): yield "<a href='/repo/apt/{reponame}/dists/{name}'>{name}</a>: <a href='/repo/apt/{reponame}/dists/{name}/main/indexname/Packages'>Packages</a> <a href='/repo/apt/{reponame}/dists/{name}/Release'>Release</a> <a href='/repo/apt/{reponame}/dists/{name}/Release.gpg'>Release.gpg</a> <a href='/repo/apt/{reponame}/dists/{name}/install'>install</a><br />".format( reponame=repo.name, name=dist.name) if regen: self.base.regen_dist(dist.id) # yield "about apt repo '{}'".format(reponame) else: for repo in db().query(AptRepo).order_by(AptRepo.name).all(): yield "<a href='/repo/apt/{name}'>{name}</a><br/>".format( name=repo.name)
def __call__(self, *segments, reponame=None): distname, firstletter, pkgname = segments repo = get_repo(db(), reponame, create_ok=False) dist = get_dist(db(), repo, distname, create_ok=False) package = db().query(AptPackage).filter( AptPackage.repo == repo, AptPackage.dist == dist, AptPackage.fname == pkgname).first() if not package: raise cherrypy.HTTPError(404) dpath = os.path.join(self.base.basepath, package.blobpath) if cherrypy.request.method == "DELETE": db().delete(package) self.base.s3.delete_object(Bucket=self.base.bucket, Key=dpath) db().commit() return elif cherrypy.request.method not in ("GET", "HEAD"): raise cherrypy.HTTPError(405) response = self.base.s3.get_object(Bucket=self.base.bucket, Key=dpath) def stream(): while True: data = response["Body"].read(65535) if not data: return yield data cherrypy.response.headers[ "Content-Type"] = "application/x-debian-package" cherrypy.response.headers["Content-Length"] = response["ContentLength"] return stream()
def handle_download(self, reponame, distname, filename): repo = get_repo(db(), reponame, create_ok=False) pkg = db().query(TarPackage).filter( TarPackage.repo == repo, TarPackage.fname == filename).first() if not pkg: raise cherrypy.HTTPError(404) dpath = os.path.join(self.base.basepath, pkg.blobpath) print("dpath=", dpath) print("blobpath=", pkg.blobpath) print("basepath=", self.base.basepath) if str(cherrypy.request.method) == "DELETE": db().delete(pkg) files = self.base.s3.list_objects(Bucket=self.base.bucket, Prefix=dpath).get("Contents") if files: self.base.s3.delete_object(Bucket=self.base.bucket, Key=dpath) db().commit() return "OK" #TODO delete the repo if we've emptied it(?) elif str(cherrypy.request.method) == "GET": response = self.base.s3.get_object(Bucket=self.base.bucket, Key=dpath) cherrypy.response.headers[ "Content-Type"] = "application/octet-stream" cherrypy.response.headers["Content-Length"] = response[ "ContentLength"] def stream(): while True: data = response["Body"].read(65535) if not data: return yield data return stream() else: raise cherrypy.HTTPError(405)
def handle_download(self, reponame, distname, filename): repo = get_repo(db(), reponame, create_ok=False) pkg = db().query(PipPackage).filter( PipPackage.repo == repo, PipPackage.fname == filename).first() if not pkg: raise cherrypy.HTTPError(404) dpath = os.path.join(self.base.basepath, "repos", repo.name, "wheels", pkg.fname[0].lower(), pkg.fname) if str(cherrypy.request.method) == "DELETE": db().delete(pkg) files = self.base.s3.list_objects(Bucket=self.base.bucket, Prefix=dpath).get("Contents") if files: self.base.s3.delete_object(Bucket=self.base.bucket, Key=dpath) db().commit() return "OK" elif str(cherrypy.request.method) == "GET": response = self.base.s3.get_object(Bucket=self.base.bucket, Key=dpath) cherrypy.response.headers["Content-Type"] = "binary/octet-stream" cherrypy.response.headers["Content-Length"] = response[ "ContentLength"] def stream(): while True: data = response["Body"].read(65535) if not data: return yield data return stream() else: raise cherrypy.HTTPError(405)
def pubkey(self, reponame=None): cherrypy.response.headers['Content-Type'] = 'text/plain' return get_repo(db(), reponame, create_ok=False).gpgpubkey
def web_addpkg(self, reponame, name, version, fobj, dist): repo = get_repo(db(), reponame) dist = get_dist(db(), repo, dist) print("Dist:", dist) # - read f (write to temp storage if needed) and generate the hashes # - load with Dpkg to get name version and whatnot with TemporaryDirectory() as tdir: tmppkgpath = os.path.join(tdir, "temp.deb") with open(tmppkgpath, "wb") as fdest: fhashes = copyhash(fobj.file, fdest) fsize = os.path.getsize(tmppkgpath) p = Dpkg(tmppkgpath) pkgname = "{}_{}_{}.deb".format(p.message['Package'], p.message['Version'], p.message['Architecture']) #TODO keys can be duplicated in email.message.Message, does this cause any problems? fields = {key: p.message[key] for key in p.message.keys()} # repos/<reponame>/packages/f/foo.deb dpath = os.path.join(self.basepath, "repos", repo.name, "packages", dist.name, pkgname[0], pkgname) files = self.s3.list_objects(Bucket=self.bucket, Prefix=dpath).get("Contents") if files: print(f"will overwrite: {files}") pkg = AptPackage(repo=repo, dist=dist, name=p.message['Package'], version=p.message['Version'], arch=p.message['Architecture'], fname=pkgname, size=fsize, **fhashes, fields=json.dumps(fields)) db().add(pkg) db().commit() try: with open(tmppkgpath, "rb") as f: response = self.s3.put_object(Body=f, Bucket=self.bucket, Key=dpath) assert (response["ResponseMetadata"]["HTTPStatusCode"] == 200), f"Upload failed: {response}" except Exception: db().delete(pkg) db().commit() raise dist.dirty = True db().commit() self.regen_dist(dist.id) yield "package name: {}\n".format(pkgname) yield "package size: {}\n".format(fsize) yield "package message:\n-----------------\n{}\n-----------------\n".format( p.message) yield "package hashes: {}\n".format(fhashes)