def get_or_create_shared_blob(digest, byte_data, storage): """ Returns the ImageStorage blob with the given digest or, if not present, adds a row and writes the given byte data to the storage engine. This method is *only* to be used for shared blobs that are globally accessible, such as the special empty gzipped tar layer that Docker no longer pushes to us. """ assert digest assert byte_data is not None and isinstance(byte_data, bytes) assert storage try: return ImageStorage.get(content_checksum=digest) except ImageStorage.DoesNotExist: preferred = storage.preferred_locations[0] location_obj = ImageStorageLocation.get(name=preferred) record = ImageStorage.create(image_size=len(byte_data), content_checksum=digest) try: storage.put_content([preferred], storage_model.get_layer_path(record), byte_data) ImageStoragePlacement.create(storage=record, location=location_obj) except: logger.exception("Exception when trying to write special layer %s", digest) record.delete_instance() raise return record
def _populate_blob(content): digest = str(sha256_digest(content)) location = ImageStorageLocation.get(name="local_us") blob = store_blob_record_and_temp_link("devtable", "newrepo", digest, location, len(content), 120) storage.put_content(["local_us"], get_layer_path(blob), content) return blob, digest
def test_store_blob_on_first_time_download(self, proxy_manifest_response): proxy_mock = proxy_manifest_response( self.tag, HELLO_WORLD_SCHEMA2_MANIFEST_JSON, DOCKER_SCHEMA2_MANIFEST_CONTENT_TYPE) params = { "repository": self.repository, "digest": self.digest, } with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): with patch("endpoints.v2.blob.model_cache", NoopDataModelCache(TEST_CACHE_CONFIG)): conduct_call( self.client, "v2.download_blob", url_for, "GET", params, expected_code=200, headers=self.headers, ) path = get_layer_path(self.blob) assert path is not None placements = ImageStoragePlacement.filter( ImageStoragePlacement.storage == self.blob) locations = [placements.get().location.name] assert storage.exists( locations, path), f"blob not found in storage at path {path}"
def get_blob_bytes_with_digest(self, digest): """ Returns the bytes of the blob with the given digest or None if none found. """ blob = get_repository_blob_by_digest(self.repository_id, digest) if blob is None: return None assert blob.locations is not None return self.storage.get_content(blob.locations, get_layer_path(blob))
def get_blob(layer): content = Bytes.for_string_or_unicode(layer).as_encoded_str() digest = str(sha256_digest(content)) blob = store_blob_record_and_temp_link( self.orgname, self.upstream_repository, digest, ImageStorageLocation.get(name="local_us"), len(content), 120, ) storage.put_content(["local_us"], get_layer_path(blob), content) return blob, digest
def get_blob_bytes_with_digest(self, digest): """ Returns the bytes of the blob with the given digest or None if none found. """ blob = get_repository_blob_by_digest(self.repository_id, digest) if blob is None: return None assert blob.locations is not None # NOTE: Some storage engines are eventually consistent, and so we add a small # retry here for retrieving the blobs from storage, as they may just have been # written as part of the push process. for retry in range(0, RETRY_COUNT): try: return self.storage.get_content(blob.locations, get_layer_path(blob)) except IOError: time.sleep(RETRY_DELAY) return None