def create_manifest_for_testing(repository, differentiation_field='1', include_shared_blob=False): # Populate a manifest. layer_json = json.dumps({ 'config': {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [], }) # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) remote_digest = sha256_digest('something') builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json)) builder.add_layer(remote_digest, 1234, urls=['http://hello/world' + differentiation_field]) if include_shared_blob: _, blob_digest = _populate_blob('some data here') builder.add_layer(blob_digest, 4567) manifest = builder.build() created = get_or_create_manifest(repository, manifest, storage) assert created return created.manifest, manifest
def test_unencoded_unicode_manifest(): builder = DockerSchema2ManifestBuilder() builder.add_layer("sha256:abc123", 123) builder.set_config_digest("sha256:def456", 2000) manifest = builder.build() retriever = ContentRetrieverForTesting.for_config( { "config": { "author": "Sômé guy", }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "base", "author": "Sômé guy", }, ], }, "sha256:def456", 2000, ensure_ascii=False, ) layers = list(manifest.get_layers(retriever)) assert layers[0].author == "Sômé guy"
def test_build_unencoded_unicode_manifest(): config_json = json.dumps( { "config": { "author": "Sômé guy", }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "base", "author": "Sômé guy", }, ], }, ensure_ascii=False, ) schema2_config = DockerSchema2Config( Bytes.for_string_or_unicode(config_json)) builder = DockerSchema2ManifestBuilder() builder.set_config(schema2_config) builder.add_layer("sha256:abc123", 123) builder.build()
def _create_tag(repo, name): repo_ref = RepositoryReference.for_repo_obj(repo) with upload_blob(repo_ref, storage, BlobUploadSettings(500, 500)) as upload: app_config = {"TESTING": True} config_json = json.dumps({ "config": { "author": "Repo Mirror", }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2019-07-30T18:37:09.284840891Z", "created_by": "base", "author": "Repo Mirror", }, ], }) upload.upload_chunk(app_config, BytesIO(config_json.encode("utf-8"))) blob = upload.commit_to_blob(app_config) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(blob.digest, blob.compressed_size) builder.add_layer("sha256:abcd", 1234, urls=["http://hello/world"]) manifest = builder.build() manifest, tag = registry_model.create_manifest_and_retarget_tag( repo_ref, manifest, name, storage)
def test_remote_layer_manifest(): builder = DockerSchema2ManifestBuilder() builder.set_config_digest("sha256:abcd", 1234) builder.add_layer("sha256:adef", 1234, urls=["http://some/url"]) builder.add_layer("sha256:1352", 4567) builder.add_layer("sha256:1353", 4567) manifest = builder.build() assert manifest.has_remote_layer assert manifest.get_leaf_layer_v1_image_id(None) is None assert manifest.get_legacy_image_ids(None) is None assert not manifest.has_legacy_image schema1 = manifest.get_schema1_manifest("somenamespace", "somename", "sometag", None) assert schema1 is None assert set(manifest.blob_digests) == { "sha256:adef", "sha256:abcd", "sha256:1352", "sha256:1353", } assert set(manifest.local_blob_digests) == { "sha256:abcd", "sha256:1352", "sha256:1353" }
def create_manifest_for_testing(repository, differentiation_field="1"): # Populate a manifest. layer_json = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [], }) # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) remote_digest = sha256_digest("something") builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(remote_digest, 1234, urls=["http://hello/world" + differentiation_field]) manifest = builder.build() created = get_or_create_manifest(repository, manifest, storage) assert created return created.manifest, manifest
def build_schema2(self, images, blobs, options): builder = DockerSchema2ManifestBuilder() for image in images: checksum = "sha256:" + hashlib.sha256(image.bytes).hexdigest() if image.urls is None: blobs[checksum] = image.bytes # If invalid blob references were requested, just make it up. if options.manifest_invalid_blob_references: checksum = "sha256:" + hashlib.sha256("notarealthing").hexdigest() if not image.is_empty: builder.add_layer(checksum, len(image.bytes), urls=image.urls) def history_for_image(image): history = { "created": "2018-04-03T18:37:09.284840891Z", "created_by": ( ("/bin/sh -c #(nop) ENTRYPOINT %s" % image.config["Entrypoint"]) if image.config and image.config.get("Entrypoint") else "/bin/sh -c #(nop) %s" % image.id ), } if image.is_empty: history["empty_layer"] = True return history config = { "os": "linux", "rootfs": {"type": "layers", "diff_ids": []}, "history": [history_for_image(image) for image in images], } if options.with_broken_manifest_config: # NOTE: We are missing the history entry on purpose. config = { "os": "linux", "rootfs": {"type": "layers", "diff_ids": []}, } if images and images[-1].config: config["config"] = images[-1].config config_json = json.dumps(config, ensure_ascii=options.ensure_ascii) schema2_config = DockerSchema2Config( Bytes.for_string_or_unicode(config_json), skip_validation_for_testing=options.with_broken_manifest_config, ) builder.set_config(schema2_config) blobs[schema2_config.digest] = schema2_config.bytes.as_encoded_str() return builder.build(ensure_ascii=options.ensure_ascii)
def test_create_manifest_and_retarget_tag_with_labels_with_existing_manifest( oci_model): # Create a config blob for testing. config_json = json.dumps({ "config": { "Labels": { "quay.expires-after": "2w", }, }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) app_config = {"TESTING": True} repository_ref = oci_model.lookup_repository("devtable", "simple") with upload_blob(repository_ref, storage, BlobUploadSettings(500, 500)) as upload: upload.upload_chunk(app_config, BytesIO(config_json.encode("utf-8"))) blob = upload.commit_to_blob(app_config) # Create the manifest in the repo. builder = DockerSchema2ManifestBuilder() builder.set_config_digest(blob.digest, blob.compressed_size) builder.add_layer("sha256:abcd", 1234, urls=["http://hello/world"]) manifest = builder.build() some_manifest, some_tag = oci_model.create_manifest_and_retarget_tag( repository_ref, manifest, "some_tag", storage) assert some_manifest is not None assert some_tag is not None assert some_tag.lifetime_end_ms is not None # Create tag and retarget it to an existing manifest; it should have an end date. # This is from a push, so it will attempt to create a manifest first. some_other_manifest, some_other_tag = oci_model.create_manifest_and_retarget_tag( repository_ref, manifest, "some_other_tag", storage) assert some_other_manifest is not None assert some_other_manifest == some_manifest assert some_other_tag is not None assert some_other_tag.lifetime_end_ms is not None # Create another tag and retarget it to an existing manifest; it should have an end date. # This is from a Quay's tag api, so it will not attempt to create a manifest first. yet_another_tag = oci_model.retarget_tag(repository_ref, "yet_another_tag", some_other_manifest, storage, docker_v2_signing_key) assert yet_another_tag.lifetime_end_ms is not None
def test_generate_legacy_layers(): builder = DockerSchema2ManifestBuilder() builder.add_layer("sha256:abc123", 123) builder.add_layer("sha256:def456", 789) builder.set_config_digest("sha256:def456", 2000) manifest = builder.build() retriever = ContentRetrieverForTesting.for_config( { "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "base" }, { "created": "2018-04-06T18:37:09.284840891Z", "created_by": "middle", "empty_layer": True, }, { "created": "2018-04-12T18:37:09.284840891Z", "created_by": "leaf" }, ], }, "sha256:def456", 2000, ) legacy_layers = list(manifest.generate_legacy_layers({}, retriever)) assert len(legacy_layers) == 3 assert legacy_layers[0].content_checksum == "sha256:abc123" assert legacy_layers[1].content_checksum == EMPTY_LAYER_BLOB_DIGEST assert legacy_layers[2].content_checksum == "sha256:def456" assert legacy_layers[0].created == "2018-04-03T18:37:09.284840891Z" assert legacy_layers[1].created == "2018-04-06T18:37:09.284840891Z" assert legacy_layers[2].created == "2018-04-12T18:37:09.284840891Z" assert legacy_layers[0].command == '["base"]' assert legacy_layers[1].command == '["middle"]' assert legacy_layers[2].command == '["leaf"]' assert legacy_layers[2].parent_image_id == legacy_layers[1].image_id assert legacy_layers[1].parent_image_id == legacy_layers[0].image_id assert legacy_layers[0].parent_image_id is None assert legacy_layers[1].image_id != legacy_layers[2] assert legacy_layers[0].image_id != legacy_layers[1]
def test_schema2_builder(): manifest = DockerSchema2Manifest(Bytes.for_string_or_unicode(MANIFEST_BYTES)) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(manifest.config.digest, manifest.config.size) for layer in manifest.filesystem_layers: builder.add_layer(layer.digest, layer.compressed_size, urls=layer.urls) built = builder.build() assert built.filesystem_layers == manifest.filesystem_layers assert built.config == manifest.config
def build_schema2(self, images, blobs, options): builder = DockerSchema2ManifestBuilder() for image in images: checksum = 'sha256:' + hashlib.sha256(image.bytes).hexdigest() if image.urls is None: blobs[checksum] = image.bytes # If invalid blob references were requested, just make it up. if options.manifest_invalid_blob_references: checksum = 'sha256:' + hashlib.sha256( 'notarealthing').hexdigest() if not image.is_empty: builder.add_layer(checksum, len(image.bytes), urls=image.urls) def history_for_image(image): history = { 'created': '2018-04-03T18:37:09.284840891Z', 'created_by': (('/bin/sh -c #(nop) ENTRYPOINT %s' % image.config['Entrypoint']) if image.config and image.config.get('Entrypoint') else '/bin/sh -c #(nop) %s' % image.id), } if image.is_empty: history['empty_layer'] = True return history config = { "os": "linux", "rootfs": { "type": "layers", "diff_ids": [] }, "history": [history_for_image(image) for image in images], } if images[-1].config: config['config'] = images[-1].config config_json = json.dumps(config, ensure_ascii=options.ensure_ascii) schema2_config = DockerSchema2Config( Bytes.for_string_or_unicode(config_json)) builder.set_config(schema2_config) blobs[schema2_config.digest] = schema2_config.bytes.as_encoded_str() return builder.build(ensure_ascii=options.ensure_ascii)
def test_raises_exception_with_docker_v2_manifest_to_v1(self): def get_blob(layer): content = Bytes.for_string_or_unicode(layer).as_encoded_str() digest = str(sha256_digest(content)) blob = store_blob_record_and_temp_link( self.orgname, self.upstream_repository, digest, ImageStorageLocation.get(name="local_us"), len(content), 120, ) storage.put_content(["local_us"], get_layer_path(blob), content) return blob, digest layer1 = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [{}], }) _, config_digest = get_blob(layer1) layer2 = "hello world" _, blob_digest = get_blob(layer2) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer1.encode("utf-8"))) builder.add_layer(blob_digest, len(layer2.encode("utf-8"))) manifest = builder.build() created_manifest = get_or_create_manifest(self.repo_ref.id, manifest, storage) assert created_manifest is not None proxy_model = ProxyModel( self.orgname, self.upstream_repository, self.user, ) m = ManifestType.for_manifest(created_manifest.manifest, MagicMock()) with pytest.raises(ManifestException): proxy_model.get_schema1_parsed_manifest( m, self.orgname, self.upstream_repository, self.tag, storage, raise_on_error=True, )
def test_remote_layer_manifest(): builder = DockerSchema2ManifestBuilder() builder.set_config_digest('sha256:abcd', 1234) builder.add_layer('sha256:adef', 1234, urls=['http://some/url']) builder.add_layer('sha256:1352', 4567) builder.add_layer('sha256:1353', 4567) manifest = builder.build() assert manifest.has_remote_layer assert manifest.get_leaf_layer_v1_image_id(None) is None assert manifest.get_legacy_image_ids(None) is None assert not manifest.has_legacy_image schema1 = manifest.get_schema1_manifest('somenamespace', 'somename', 'sometag', None) assert schema1 is None assert set(manifest.blob_digests) == {'sha256:adef', 'sha256:abcd', 'sha256:1352', 'sha256:1353'} assert set(manifest.local_blob_digests) == {'sha256:abcd', 'sha256:1352', 'sha256:1353'}
def test_create_manifest_cannot_load_config_blob(initialized_db): repository = create_repository("devtable", "newrepo", None) layer_json = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = "hello world" _, random_digest = _populate_blob(random_data) remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() broken_retriever = BrokenRetriever() # Write the manifest. with pytest.raises(CreateManifestException): get_or_create_manifest(repository, manifest, storage, retriever=broken_retriever, raise_on_error=True)
def test_manifest_remote_layers(oci_model): # Create a config blob for testing. config_json = json.dumps({ 'config': {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) app_config = {'TESTING': True} repository_ref = oci_model.lookup_repository('devtable', 'simple') with upload_blob(repository_ref, storage, BlobUploadSettings(500, 500, 500)) as upload: upload.upload_chunk(app_config, BytesIO(config_json)) blob = upload.commit_to_blob(app_config) # Create the manifest in the repo. builder = DockerSchema2ManifestBuilder() builder.set_config_digest(blob.digest, blob.compressed_size) builder.add_layer('sha256:abcd', 1234, urls=['http://hello/world']) manifest = builder.build() created_manifest, _ = oci_model.create_manifest_and_retarget_tag( repository_ref, manifest, 'sometag', storage) assert created_manifest layers = oci_model.list_parsed_manifest_layers( repository_ref, created_manifest.get_parsed_manifest(), storage) assert len(layers) == 1 assert layers[0].layer_info.is_remote assert layers[0].layer_info.urls == ['http://hello/world'] assert layers[0].blob is None
def test_get_or_create_manifest(schema_version, initialized_db): repository = create_repository("devtable", "newrepo", None) expected_labels = { "Foo": "Bar", "Baz": "Meh", } layer_json = json.dumps({ "id": "somelegacyid", "config": { "Labels": expected_labels, }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Create a legacy image. find_create_or_link_image("somelegacyid", repository, "devtable", {}, "local_us") # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = "hello world" _, random_digest = _populate_blob(random_data) # Build the manifest. if schema_version == 1: builder = DockerSchema1ManifestBuilder("devtable", "simple", "anothertag") builder.add_layer(random_digest, layer_json) sample_manifest_instance = builder.build(docker_v2_signing_key) elif schema_version == 2: builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(random_digest, len(random_data.encode("utf-8"))) sample_manifest_instance = builder.build() assert sample_manifest_instance.layers_compressed_size is not None # Create a new manifest. created_manifest = get_or_create_manifest(repository, sample_manifest_instance, storage) created = created_manifest.manifest newly_created = created_manifest.newly_created assert newly_created assert created is not None assert created.media_type.name == sample_manifest_instance.media_type assert created.digest == sample_manifest_instance.digest assert created.manifest_bytes == sample_manifest_instance.bytes.as_encoded_str( ) assert created_manifest.labels_to_apply == expected_labels assert created.config_media_type == sample_manifest_instance.config_media_type assert created.layers_compressed_size == sample_manifest_instance.layers_compressed_size # Lookup the manifest and verify. found = lookup_manifest(repository, created.digest, allow_dead=True) assert found.digest == created.digest assert found.config_media_type == created.config_media_type assert found.layers_compressed_size == created.layers_compressed_size # Verify it has a temporary tag pointing to it. assert Tag.get(manifest=created, hidden=True).lifetime_end_ms # Verify the linked blobs. blob_digests = [ mb.blob.content_checksum for mb in ManifestBlob.select().where(ManifestBlob.manifest == created) ] assert random_digest in blob_digests if schema_version == 2: assert config_digest in blob_digests # Retrieve it again and ensure it is the same manifest. created_manifest2 = get_or_create_manifest(repository, sample_manifest_instance, storage) created2 = created_manifest2.manifest newly_created2 = created_manifest2.newly_created assert not newly_created2 assert created2 == created # Ensure it again has a temporary tag. assert Tag.get(manifest=created2, hidden=True).lifetime_end_ms # Ensure the labels were added. labels = list(list_manifest_labels(created)) assert len(labels) == 2 labels_dict = {label.key: label.value for label in labels} assert labels_dict == expected_labels
def test_retriever(initialized_db): repository = create_repository("devtable", "newrepo", None) layer_json = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = "hello world" _, random_digest = _populate_blob(random_data) # Add another blob of random data. other_random_data = "hi place" _, other_random_digest = _populate_blob(other_random_data) remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(other_random_digest, len(other_random_data.encode("utf-8"))) builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() assert config_digest in manifest.blob_digests assert random_digest in manifest.blob_digests assert other_random_digest in manifest.blob_digests assert config_digest in manifest.local_blob_digests assert random_digest in manifest.local_blob_digests assert other_random_digest in manifest.local_blob_digests # Write the manifest. created_tuple = get_or_create_manifest(repository, manifest, storage) assert created_tuple is not None created_manifest = created_tuple.manifest assert created_manifest assert created_manifest.media_type.name == manifest.media_type assert created_manifest.digest == manifest.digest # Verify the linked blobs. blob_digests = { mb.blob.content_checksum for mb in ManifestBlob.select().where( ManifestBlob.manifest == created_manifest) } assert random_digest in blob_digests assert other_random_digest in blob_digests assert config_digest in blob_digests # Delete any Image rows linking to the blobs from temp tags. for blob_digest in blob_digests: storage_row = ImageStorage.get(content_checksum=blob_digest) for image in list(Image.select().where(Image.storage == storage_row)): all_temp = all([ rt.hidden for rt in RepositoryTag.select().where( RepositoryTag.image == image) ]) if all_temp: RepositoryTag.delete().where( RepositoryTag.image == image).execute() image.delete_instance(recursive=True) # Verify the blobs in the retriever. retriever = RepositoryContentRetriever(repository, storage) assert (retriever.get_manifest_bytes_with_digest( created_manifest.digest) == manifest.bytes.as_encoded_str()) for blob_digest in blob_digests: assert retriever.get_blob_bytes_with_digest(blob_digest) is not None
def test_get_or_create_manifest_with_remote_layers(initialized_db): repository = create_repository("devtable", "newrepo", None) layer_json = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = "hello world" _, random_digest = _populate_blob(random_data) remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(remote_digest, 1234, urls=["http://hello/world"]) builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() assert remote_digest in manifest.blob_digests assert remote_digest not in manifest.local_blob_digests assert manifest.has_remote_layer assert not manifest.has_legacy_image assert manifest.get_schema1_manifest("foo", "bar", "baz", None) is None # Write the manifest. created_tuple = get_or_create_manifest(repository, manifest, storage) assert created_tuple is not None created_manifest = created_tuple.manifest assert created_manifest assert created_manifest.media_type.name == manifest.media_type assert created_manifest.digest == manifest.digest assert created_manifest.config_media_type == manifest.config_media_type assert created_manifest.layers_compressed_size == manifest.layers_compressed_size # Verify the legacy image. legacy_image = get_legacy_image_for_manifest(created_manifest) assert legacy_image is None # Verify the linked blobs. blob_digests = { mb.blob.content_checksum for mb in ManifestBlob.select().where( ManifestBlob.manifest == created_manifest) } assert random_digest in blob_digests assert config_digest in blob_digests assert remote_digest not in blob_digests
def test_get_or_create_manifest_list_duplicate_child_manifest(initialized_db): repository = create_repository("devtable", "newrepo", None) expected_labels = { "Foo": "Bar", "Baz": "Meh", } layer_json = json.dumps({ "id": "somelegacyid", "config": { "Labels": expected_labels, }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Create a legacy image. find_create_or_link_image("somelegacyid", repository, "devtable", {}, "local_us") # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = "hello world" _, random_digest = _populate_blob(random_data) # Build the manifest. v2_builder = DockerSchema2ManifestBuilder() v2_builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) v2_builder.add_layer(random_digest, len(random_data.encode("utf-8"))) v2_manifest = v2_builder.build() # Write the manifest. v2_created = get_or_create_manifest(repository, v2_manifest, storage) assert v2_created assert v2_created.manifest.digest == v2_manifest.digest # Build the manifest list, with the child manifest repeated. list_builder = DockerSchema2ManifestListBuilder() list_builder.add_manifest(v2_manifest, "amd64", "linux") list_builder.add_manifest(v2_manifest, "amd32", "linux") manifest_list = list_builder.build() # Write the manifest list, which should also write the manifests themselves. created_tuple = get_or_create_manifest(repository, manifest_list, storage) assert created_tuple is not None created_list = created_tuple.manifest assert created_list assert created_list.media_type.name == manifest_list.media_type assert created_list.digest == manifest_list.digest # Ensure the child manifest links exist. child_manifests = { cm.child_manifest.digest: cm.child_manifest for cm in ManifestChild.select().where( ManifestChild.manifest == created_list) } assert len(child_manifests) == 1 assert v2_manifest.digest in child_manifests assert child_manifests[ v2_manifest.digest].media_type.name == v2_manifest.media_type # Try to create again and ensure we get back the same manifest list. created2_tuple = get_or_create_manifest(repository, manifest_list, storage) assert created2_tuple is not None assert created2_tuple.manifest == created_list
def setup(self, client, app, proxy_manifest_response): self.client = client self.user = model.user.get_user("devtable") context, subject = build_context_and_subject( ValidatedAuthContext(user=self.user)) access = [{ "type": "repository", "name": self.repository, "actions": ["pull"], }] token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) self.headers = { "Authorization": f"Bearer {token.decode('ascii')}", } if self.org is None: self.org = model.organization.create_organization( self.orgname, "{self.orgname}@devtable.com", self.user) self.org.save() self.config = model.proxy_cache.create_proxy_cache_config( org_name=self.orgname, upstream_registry=self.registry, expiration_s=3600, ) if self.repo_ref is None: r = model.repository.create_repository(self.orgname, self.image_name, self.user) assert r is not None self.repo_ref = registry_model.lookup_repository( self.orgname, self.image_name) assert self.repo_ref is not None def get_blob(layer): content = Bytes.for_string_or_unicode(layer).as_encoded_str() digest = str(sha256_digest(content)) blob = model.blob.store_blob_record_and_temp_link( self.orgname, self.image_name, digest, ImageStorageLocation.get(name="local_us"), len(content), 120, ) storage.put_content(["local_us"], get_layer_path(blob), content) return blob, digest if self.manifest is None: layer1 = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [{}], }) _, config_digest = get_blob(layer1) layer2 = "test" _, blob_digest = get_blob(layer2) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer1.encode("utf-8"))) builder.add_layer(blob_digest, len(layer2.encode("utf-8"))) manifest = builder.build() created_manifest = model.oci.manifest.get_or_create_manifest( self.repo_ref.id, manifest, storage) self.manifest = created_manifest.manifest assert self.digest == blob_digest assert self.manifest is not None if self.blob is None: self.blob = ImageStorage.filter( ImageStorage.content_checksum == self.digest).get()
def test_get_or_create_manifest_list(initialized_db): repository = create_repository('devtable', 'newrepo', None) expected_labels = { 'Foo': 'Bar', 'Baz': 'Meh', } layer_json = json.dumps({ 'id': 'somelegacyid', 'config': { 'Labels': expected_labels, }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Create a legacy image. find_create_or_link_image('somelegacyid', repository, 'devtable', {}, 'local_us') # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = 'hello world' _, random_digest = _populate_blob(random_data) # Build the manifests. v1_builder = DockerSchema1ManifestBuilder('devtable', 'simple', 'anothertag') v1_builder.add_layer(random_digest, layer_json) v1_manifest = v1_builder.build(docker_v2_signing_key).unsigned() v2_builder = DockerSchema2ManifestBuilder() v2_builder.set_config_digest(config_digest, len(layer_json)) v2_builder.add_layer(random_digest, len(random_data)) v2_manifest = v2_builder.build() # Write the manifests. v1_created = get_or_create_manifest(repository, v1_manifest, storage) assert v1_created assert v1_created.manifest.digest == v1_manifest.digest v2_created = get_or_create_manifest(repository, v2_manifest, storage) assert v2_created assert v2_created.manifest.digest == v2_manifest.digest # Build the manifest list. list_builder = DockerSchema2ManifestListBuilder() list_builder.add_manifest(v1_manifest, 'amd64', 'linux') list_builder.add_manifest(v2_manifest, 'amd32', 'linux') manifest_list = list_builder.build() # Write the manifest list, which should also write the manifests themselves. created_tuple = get_or_create_manifest(repository, manifest_list, storage) assert created_tuple is not None created_list = created_tuple.manifest assert created_list assert created_list.media_type.name == manifest_list.media_type assert created_list.digest == manifest_list.digest # Ensure the child manifest links exist. child_manifests = { cm.child_manifest.digest: cm.child_manifest for cm in ManifestChild.select().where( ManifestChild.manifest == created_list) } assert len(child_manifests) == 2 assert v1_manifest.digest in child_manifests assert v2_manifest.digest in child_manifests assert child_manifests[ v1_manifest.digest].media_type.name == v1_manifest.media_type assert child_manifests[ v2_manifest.digest].media_type.name == v2_manifest.media_type
def test_derived_image_for_manifest_list(oci_model): # Clear all existing derived storage. DerivedStorageForImage.delete().execute() # Create a config blob for testing. config_json = json.dumps({ 'config': {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) app_config = {'TESTING': True} repository_ref = oci_model.lookup_repository('devtable', 'simple') with upload_blob(repository_ref, storage, BlobUploadSettings(500, 500, 500)) as upload: upload.upload_chunk(app_config, BytesIO(config_json)) blob = upload.commit_to_blob(app_config) # Create the manifest in the repo. builder = DockerSchema2ManifestBuilder() builder.set_config_digest(blob.digest, blob.compressed_size) builder.add_layer(blob.digest, blob.compressed_size) amd64_manifest = builder.build() oci_model.create_manifest_and_retarget_tag(repository_ref, amd64_manifest, 'submanifest', storage) # Create a manifest list, pointing to at least one amd64+linux manifest. builder = DockerSchema2ManifestListBuilder() builder.add_manifest(amd64_manifest, 'amd64', 'linux') manifestlist = builder.build() oci_model.create_manifest_and_retarget_tag(repository_ref, manifestlist, 'listtag', storage) manifest = oci_model.get_manifest_for_tag( oci_model.get_repo_tag(repository_ref, 'listtag')) assert manifest assert manifest.get_parsed_manifest().is_manifest_list # Ensure the squashed image doesn't exist. assert oci_model.lookup_derived_image(manifest, 'squash', storage, {}) is None # Create a new one. squashed = oci_model.lookup_or_create_derived_image( manifest, 'squash', 'local_us', storage, {}) assert squashed.unique_id assert oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', storage, {}) == squashed # Perform lookup. assert oci_model.lookup_derived_image(manifest, 'squash', storage, {}) == squashed
def test_get_or_create_manifest(schema_version, initialized_db): repository = create_repository('devtable', 'newrepo', None) expected_labels = { 'Foo': 'Bar', 'Baz': 'Meh', } layer_json = json.dumps({ 'id': 'somelegacyid', 'config': { 'Labels': expected_labels, }, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "do something", }, ], }) # Create a legacy image. find_create_or_link_image('somelegacyid', repository, 'devtable', {}, 'local_us') # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) # Add a blob of random data. random_data = 'hello world' _, random_digest = _populate_blob(random_data) # Build the manifest. if schema_version == 1: builder = DockerSchema1ManifestBuilder('devtable', 'simple', 'anothertag') builder.add_layer(random_digest, layer_json) sample_manifest_instance = builder.build(docker_v2_signing_key) elif schema_version == 2: builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer_json)) builder.add_layer(random_digest, len(random_data)) sample_manifest_instance = builder.build() # Create a new manifest. created_manifest = get_or_create_manifest(repository, sample_manifest_instance, storage) created = created_manifest.manifest newly_created = created_manifest.newly_created assert newly_created assert created is not None assert created.media_type.name == sample_manifest_instance.media_type assert created.digest == sample_manifest_instance.digest assert created.manifest_bytes == sample_manifest_instance.bytes.as_encoded_str( ) assert created_manifest.labels_to_apply == expected_labels # Verify it has a temporary tag pointing to it. assert Tag.get(manifest=created, hidden=True).lifetime_end_ms # Verify the legacy image. legacy_image = get_legacy_image_for_manifest(created) assert legacy_image is not None assert legacy_image.storage.content_checksum == random_digest # Verify the linked blobs. blob_digests = [ mb.blob.content_checksum for mb in ManifestBlob.select().where(ManifestBlob.manifest == created) ] assert random_digest in blob_digests if schema_version == 2: assert config_digest in blob_digests # Retrieve it again and ensure it is the same manifest. created_manifest2 = get_or_create_manifest(repository, sample_manifest_instance, storage) created2 = created_manifest2.manifest newly_created2 = created_manifest2.newly_created assert not newly_created2 assert created2 == created # Ensure it again has a temporary tag. assert Tag.get(manifest=created2, hidden=True).lifetime_end_ms # Ensure the labels were added. labels = list(list_manifest_labels(created)) assert len(labels) == 2 labels_dict = {label.key: label.value for label in labels} assert labels_dict == expected_labels