def package_versions_json(context, request): """ Render the package versions in JSON format """ pkgs = package_versions(context, request) if not isinstance(pkgs, dict): return pkgs response = { 'info': { 'name': context.name, }, 'releases': {}, } max_version = None for filename, url in six.iteritems(pkgs['pkgs']): name, version_str = parse_filename(filename) version = pkg_resources.parse_version(version_str) if max_version is None or version > max_version: max_version = version response['releases'].setdefault(version_str, []).append({ 'filename': filename, 'url': url, }) if max_version is not None: response['urls'] = response['releases'].get(str(max_version), []) return response
def upload(request, content, name=None, version=None, summary=None, requires_python=None): """ Handle update commands """ action = request.param(":action", "file_upload") # Direct uploads from the web UI go here, and don't have a name/version if name is None or version is None: name, version = parse_filename(content.filename) else: name = normalize_name(name) if action == "file_upload": if not request.access.has_permission(name, "write"): return request.forbid() try: return request.db.upload( content.filename, content.file, name=name, version=version, summary=summary, requires_python=requires_python or None, ) except ValueError as e: return HTTPConflict(*e.args) else: return HTTPBadRequest("Unknown action '%s'" % action)
def list(self, factory=Package): keys = self.bucket.list(self.bucket_prefix) for key in keys: # Boto doesn't send down metadata from bucket.list() # so we are forced to retrieve each key individually. key = self.bucket.get_key(key.key) filename = posixpath.basename(key.key) name = key.get_metadata('name') version = key.get_metadata('version') summary = key.get_metadata('summary') # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except ValueError: LOG.warning("S3 file %s has no package name", key.key) continue last_modified = boto.utils.parse_ts(key.last_modified) pkg = factory(name, version, filename, last_modified, summary, path=key.key) yield pkg
def package_versions_json(context, request): """ Render the package versions in JSON format """ pkgs = _package_versions(context, request) if not isinstance(pkgs, dict): return pkgs response = {"info": {"name": context.name}, "releases": {}} max_version = None for filename, pkg in pkgs["pkgs"].items(): name, version_str = parse_filename(filename) version = pkg_resources.parse_version(version_str) if max_version is None or version > max_version: max_version = version release = { "filename": filename, "url": pkg.get("non_hashed_url", pkg["url"]), "requires_python": pkg["requires_python"], } if pkg.get("hash_sha256"): release["digests"] = { "md5": pkg["hash_md5"], "sha256": pkg["hash_sha256"] } release["md5_digest"] = pkg["hash_md5"] response["releases"].setdefault(version_str, []).append(release) if max_version is not None: response["urls"] = response["releases"].get(str(max_version), []) return response
def upload(request, content, name=None, version=None, summary=None): """ Handle update commands """ action = request.param(":action", "file_upload") # Direct uploads from the web UI go here, and don't have a name/version if name is None or version is None: name, version = parse_filename(content.filename) else: name = normalize_name(name) if action == "file_upload": if not request.access.has_permission(name, "write"): return request.forbid() print("TYPE", type(content.file)) try: file_content = content.file.read() return request.db.upload( content.filename, file_content, name=name, digest=hashlib.sha256(file_content).hexdigest(), version=version, summary=summary, ) except ValueError as e: return HTTPConflict(*e.args) else: return HTTPBadRequest("Unknown action '%s'" % action)
def upload( self, filename: str, data: BinaryIO, name: Optional[str] = None, version: Optional[str] = None, summary: Optional[str] = None, requires_python: Optional[str] = None, ) -> Package: """ Save this package to the storage mechanism and to the cache Parameters ---------- filename : str Name of the package file data : file File-like readable object name : str, optional The name of the package (if not provided, will be parsed from filename) version : str, optional The version number of the package (if not provided, will be parsed from filename) summary : str, optional The summary of the package requires_python : str, optional The Python version requirement Returns ------- package : :class:`~pypicloud.models.Package` The Package object that was uploaded Raises ------ e : ValueError If the package already exists and allow_overwrite = False """ if version is None or name is None: name, version = parse_filename(filename, name) name = normalize_name(name) filename = posixpath.basename(filename) old_pkg = self.fetch(filename) metadata = {"requires_python": requires_python} if old_pkg is not None and not self.allow_overwrite: raise ValueError("Package '%s' already exists!" % filename) if self.calculate_hashes: file_data = data.read() metadata["hash_sha256"] = hashlib.sha256(file_data).hexdigest() metadata["hash_md5"] = hashlib.md5(file_data).hexdigest() data = BytesIO(file_data) new_pkg = self.new_package(name, version, filename, summary=summary, **metadata) self.storage.upload(new_pkg, data) self.save(new_pkg) return new_pkg
def upload( self, filename, data, name=None, version=None, summary=None, requires_python=None, ): """ Save this package to the storage mechanism and to the cache Parameters ---------- filename : str Name of the package file data : file File-like readable object name : str, optional The name of the package (if not provided, will be parsed from filename) version : str, optional The version number of the package (if not provided, will be parsed from filename) summary : str, optional The summary of the package requires_python : str, optional The Python version requirement Returns ------- package : :class:`~pypicloud.models.Package` The Package object that was uploaded Raises ------ e : ValueError If the package already exists and allow_overwrite = False """ if version is None: name, version = parse_filename(filename, name) name = normalize_name(name) filename = posixpath.basename(filename) old_pkg = self.fetch(filename) if old_pkg is not None and not self.allow_overwrite: raise ValueError("Package '%s' already exists!" % filename) new_pkg = self.package_class(name, version, filename, summary=summary, requires_python=requires_python) self.storage.upload(new_pkg, data) self.save(new_pkg) return new_pkg
def upload(request, content, name=None, version=None): """ Handle update commands """ action = request.param(':action', 'file_upload') # Direct uploads from the web UI go here, and don't have a name/version if name is None or version is None: name, version = parse_filename(content.filename) else: name = normalize_name(name) if action == 'file_upload': if not request.access.has_permission(name, 'write'): return request.forbid() try: return request.db.upload(content.filename, content.file, name=name, version=version) except ValueError as e: return HTTPBadRequest(*e.args) else: return HTTPBadRequest("Unknown action '%s'" % action)
def package_from_object(cls, obj, factory): """ Create a package from a S3 object """ filename = posixpath.basename(obj.key) name = obj.metadata.get("name") version = obj.metadata.get("version") summary = obj.metadata.get("summary") # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except ValueError: LOG.warning("S3 file %s has no package name", obj.key) return None return factory( name, version, filename, obj.last_modified, summary, path=obj.key )
def package_from_object(cls, obj, factory): """ Create a package from a S3 object """ filename = posixpath.basename(obj.key) name = obj.metadata.get("name") version = obj.metadata.get("version") summary = obj.metadata.get("summary") digest = obj.metadata.get("digest") # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except ValueError: LOG.warning("S3 file %s has no package name", obj.key) return None return factory(name, version, filename, digest, obj.last_modified, summary)
def package_versions_json(context, request): """ Render the package versions in JSON format """ pkgs = _package_versions(context, request) if not isinstance(pkgs, dict): return pkgs response = {"info": {"name": context.name}, "releases": {}} max_version = None for filename, url in six.iteritems(pkgs["pkgs"]): name, version_str = parse_filename(filename) version = pkg_resources.parse_version(version_str) if max_version is None or version > max_version: max_version = version response["releases"].setdefault(version_str, []).append( {"filename": filename, "url": url} ) if max_version is not None: response["urls"] = response["releases"].get(str(max_version), []) return response
def package_versions_json(context, request): """ Render the package versions in JSON format """ pkgs = _package_versions(context, request) if not isinstance(pkgs, dict): return pkgs response = {"info": {"name": context.name}, "releases": {}} max_version = None for filename, pkg in six.iteritems(pkgs["pkgs"]): name, version_str = parse_filename(filename) version = pkg_resources.parse_version(version_str) if max_version is None or version > max_version: max_version = version response["releases"].setdefault(version_str, []).append( {"filename": filename, "url": pkg["url"]} ) if max_version is not None: response["urls"] = response["releases"].get(str(max_version), []) return response
def handle_s3_event(event, context): """ Handle S3 object notification """ from pypicloud.cache import get_cache_impl from pypicloud.storage.s3 import S3Storage from pypicloud.util import parse_filename settings = json.loads(os.environ["PYPICLOUD_SETTINGS"]) # Set 'file' storage as a hack. We're going to load the cache, which will # load a storage. We won't actually be using the storage for anything, but # the settings have to be present. settings.setdefault("pypi.storage", "file") settings.setdefault("storage.dir", "/tmp") cache_impl = get_cache_impl(settings) kwargs = cache_impl.configure(settings) cache = cache_impl(**kwargs) s3 = boto3.resource("s3") for record in event["Records"]: bucket = record["s3"]["bucket"]["name"] key = record["s3"]["object"]["key"] event_name = record["eventName"] if event_name.startswith("ObjectCreated"): print("S3 object %r created" % key) obj = s3.Object(bucket, key) package = S3Storage.package_from_object(obj, cache.package_class) existing_pkg = cache.fetch(package.filename) if existing_pkg is None: print("Saving package %s" % package) cache.save(package) else: print("Package already cached") else: print("S3 object %r deleted" % key) filename = posixpath.basename(key) try: name, version = parse_filename(filename) except ValueError: name = version = "dummy" package = cache.package_class( name, version, filename, datetime.utcnow(), "" ) print("Deleting package %s" % package) cache.clear(package)
def handle_s3_event(event, context): """ Handle S3 object notification """ from pypicloud.cache import get_cache_impl from pypicloud.storage.s3 import package_from_object from pypicloud.util import parse_filename settings = json.loads(os.environ['PYPICLOUD_SETTINGS']) # Set 'file' storage as a hack. We're going to load the cache, which will # load a storage. We won't actually be using the storage for anything, but # the settings have to be present. settings.setdefault('pypi.storage', 'file') settings.setdefault('storage.dir', '/tmp') cache_impl = get_cache_impl(settings) kwargs = cache_impl.configure(settings) cache = cache_impl(**kwargs) s3 = boto3.resource('s3') for record in event['Records']: bucket = record['s3']['bucket']['name'] key = record['s3']['object']['key'] event_name = record['eventName'] if event_name.startswith('ObjectCreated'): print("S3 object %r created" % key) obj = s3.Object(bucket, key) package = package_from_object(obj, cache.package_class) existing_pkg = cache.fetch(package.filename) if existing_pkg is None: print("Saving package %s" % package) cache.save(package) else: print("Package already cached") else: print("S3 object %r deleted" % key) filename = posixpath.basename(key) try: name, version = parse_filename(filename) except ValueError: name = version = 'dummy' package = cache.package_class(name, version, filename, datetime.utcnow(), '') print("Deleting package %s" % package) cache.clear(package)
def upload(self, filename, data, name=None, version=None, summary=None): """ Save this package to the storage mechanism and to the cache Parameters ---------- filename : str Name of the package file data : file File-like readable object name : str, optional The name of the package (if not provided, will be parsed from filename) version : str, optional The version number of the package (if not provided, will be parsed from filename) summary : str, optional The summary of the package Returns ------- package : :class:`~pypicloud.models.Package` The Package object that was uploaded Raises ------ e : ValueError If the package already exists and allow_overwrite = False """ if version is None: name, version = parse_filename(filename, name) name = normalize_name(name) filename = posixpath.basename(filename) old_pkg = self.fetch(filename) if old_pkg is not None and not self.allow_overwrite: raise ValueError("Package '%s' already exists!" % filename) new_pkg = self.package_class(name, version, filename, summary=summary) self.storage.upload(new_pkg, data) self.save(new_pkg) return new_pkg
def package_from_object(cls, obj, factory): """Create a package from a S3 object""" filename = posixpath.basename(obj.key) name = obj.metadata.get("name") version = obj.metadata.get("version") metadata = Package.read_metadata(obj.metadata) # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except PackageParseError: LOG.warning("S3 file %s has no package name", obj.key) return None return factory(name, version, filename, obj.last_modified, path=obj.key, **metadata)
def list(self, factory=Package): keys = self.bucket.list(self.bucket_prefix) for key in keys: # Moto doesn't send down metadata from bucket.list() if self.test: key = self.bucket.get_key(key.key) filename = posixpath.basename(key.key) name = key.get_metadata('name') version = key.get_metadata('version') # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except ValueError: LOG.warning("S3 file %s has no package name", key.key) continue last_modified = boto.utils.parse_ts(key.last_modified) pkg = factory(name, version, filename, last_modified, path=key.key) yield pkg
def list(self, factory=Package): keys = self.bucket for key in keys: # Moto doesn't send down metadata from bucket.list() if self.test: key = self.bucket.get_key(key.key) filename = posixpath.basename(key.key) name = key.get_metadata('name') version = key.get_metadata('version') # We used to not store metadata. This is for backwards # compatibility if name is None or version is None: try: name, version = parse_filename(filename) except ValueError: LOG.warning("GCS file %s has no package name", key.key) continue last_modified = boto.utils.parse_ts(key.last_modified) pkg = factory(name, version, filename, last_modified, path=key.key) yield pkg
def test_valid_source(self): """ Parse a valid source package """ name, version = util.parse_filename("mypkg-1.1.tar.gz") self.assertEqual(name, "mypkg") self.assertEqual(version, "1.1")
def test_valid_source(self): """ Parse a valid source package """ name, version = util.parse_filename("mypkg-1.1.tar.gz") self.assertEqual(name, "mypkg") self.assertEqual(version, "1.1")
def test_use_name(self): """ Can pass in name to assist parsing """ name, version = util.parse_filename("mypkg-1.1-py2.py3-none-any.whl", "mypkg") self.assertEqual(name, "mypkg") self.assertEqual(version, "1.1")
def test_use_name(self): """ Can pass in name to assist parsing """ name, version = util.parse_filename('mypkg-1.1-py2.py3-none-any.whl', 'mypkg') self.assertEqual(name, 'mypkg') self.assertEqual(version, '1.1')
def test_invalid_file_ext(self): """ Parse fails on invalid file extension """ with self.assertRaises(ValueError): util.parse_filename('mypkg-1.1.pdf')
def test_valid_source(self): """ Parse a valid source package """ name, version = util.parse_filename('mypkg-1.1.tar.gz') self.assertEqual(name, 'mypkg') self.assertEqual(version, '1.1')
def test_invalid_source(self): """ Parse fails on invalid package name """ with self.assertRaises(ValueError): util.parse_filename('invalid_package_name.tar.gz')
def test_valid_source(self): """ Parse a valid source package """ name, version = util.parse_filename('mypkg-1.1.tar.gz') self.assertEqual(name, 'mypkg') self.assertEqual(version, '1.1')
def test_invalid_file_ext(self): """ Parse fails on invalid file extension """ with self.assertRaises(ValueError): util.parse_filename("mypkg-1.1.pdf")
def test_invalid_source(self): """ Parse fails on invalid package name """ with self.assertRaises(ValueError): util.parse_filename("invalid_package_name.tar.gz")
def test_valid_wheel(self): """ Parse a valid wheel package """ name, version = util.parse_filename("mypkg-1.1-py2.py3-none-any.whl") self.assertEqual(name, "mypkg") self.assertEqual(version, "1.1")
def test_valid_wheel(self): """ Parse a valid wheel package """ name, version = util.parse_filename('mypkg-1.1-py2.py3-none-any.whl') self.assertEqual(name, 'mypkg') self.assertEqual(version, '1.1')
def test_use_name(self): """ Can pass in name to assist parsing """ name, version = util.parse_filename("mypkg-1.1-py2.py3-none-any.whl", "mypkg") self.assertEqual(name, "mypkg") self.assertEqual(version, "1.1")
def test_use_name(self): """ Can pass in name to assist parsing """ name, version = util.parse_filename('mypkg-1.1-py2.py3-none-any.whl', 'mypkg') self.assertEqual(name, 'mypkg') self.assertEqual(version, '1.1')