def _get_from_store(context, where): try: image_data, image_size = get_from_backend(context, where) except exception.NotFound as e: raise HTTPNotFound(explanation="%s" % e) image_size = int(image_size) if image_size else None return image_data, image_size
def get_from_store(image_meta): try: location = image_meta['location'] image_data, image_size = get_from_backend(location) image_meta["size"] = image_size or image_meta["size"] except exception.NotFound, e: raise HTTPNotFound(explanation="%s" % e)
def fetch_image_into_cache(self, image_id): ctx = context.RequestContext(is_admin=True, show_deleted=True) image_meta = registry.get_image_metadata(self.options, ctx, image_id) with self.cache.open(image_meta, "wb") as cache_file: chunks = get_from_backend(image_meta["location"], expected_size=image_meta["size"], options=self.options) for chunk in chunks: cache_file.write(chunk)
def get_from_store(image_meta): """Called if caching disabled""" try: location = image_meta["location"] image_data, image_size = get_from_backend(location) image_meta["size"] = image_size or image_meta["size"] except exception.NotFound, e: raise HTTPNotFound(explanation="%s" % e)
def test_https_get(self): url = "https://netloc/path/to/file.tar.gz" expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s', 'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n'] fetcher = get_from_backend(url, expected_size=8) chunks = [c for c in fetcher] self.assertEqual(chunks, expected_returns)
def fetch_image_into_cache(self, image_id): ctx = context.RequestContext(is_admin=True, show_deleted=True) image_meta = registry.get_image_metadata( self.options, ctx, image_id) with self.cache.open(image_meta, "wb") as cache_file: chunks = get_from_backend(image_meta['location'], expected_size=image_meta['size'], options=self.options) for chunk in chunks: cache_file.write(chunk)
def test_get(self): s3_uri = "s3://user:password@localhost/bucket1/file.tar.gz" expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s', 'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n'] fetcher = get_from_backend(s3_uri, expected_size=8, conn_class=S3Backend) chunks = [c for c in fetcher] self.assertEqual(chunks, expected_returns)
def fetch_image_into_cache(self, image_id): ctx = context.RequestContext(is_admin=True, show_deleted=True) try: image_meta = registry.get_image_metadata(ctx, image_id) if image_meta['status'] != 'active': LOG.warn(_("Image '%s' is not active. Not caching."), image_id) return False except exception.NotFound: LOG.warn(_("No metadata found for image '%s'"), image_id) return False image_data, image_size = get_from_backend(ctx, image_meta['location']) LOG.debug(_("Caching image '%s'"), image_id) self.cache.cache_image_iter(image_id, image_data) return True
def fetch_image_into_cache(self, image_id): ctx = context.RequestContext(is_admin=True, show_deleted=True) try: image_meta = registry.get_image_metadata(ctx, image_id) if image_meta['status'] != 'active': LOG.warn(_("Image '%s' is not active. Not caching."), image_id) return False except exception.NotFound: LOG.warn(_("No metadata found for image '%s'"), image_id) return False image_data, image_size = get_from_backend(image_meta['location']) LOG.debug(_("Caching image '%s'"), image_id) self.cache.cache_image_iter(image_id, image_data) return True
def get_from_store(image): """Called if caching disabled""" return get_from_backend(image['location'], expected_size=image['size'], options=self.options)
def get_from_store(image): """Called if caching disabled""" return get_from_backend(image['location'])
def _get_from_store(context, where): try: image_data, image_size = get_from_backend(context, where) except exception.NotFound, e: raise HTTPNotFound(explanation="%s" % e)
def get_from_store(image): """Called if caching disabled""" try: image = get_from_backend(image['location']) except exception.NotFound, e: raise HTTPNotFound(explanation="%s" % e)