def test_known_vs_other_annotations(self): # All annotation must be found and replaced exactly once, heuristic # must not look in keys that are known pullspec sources data = { 'kind': 'ClusterServiceVersion', 'metadata': { 'annotations': { 'containerImage': 'a.b/c:1', 'notContainerImage': 'a.b/c:1' } }, 'spec': { 'metadata': { 'annotations': { 'containerImage': 'a.b/c:1', 'notContainerImage': 'a.b/c:1' } } } } replacements = { ImageName.parse(old): ImageName.parse(new) for old, new in [ ('a.b/c:1', 'd.e/f:1'), ('d.e/f:1', 'g.h/i:1'), ] } self._mock_check_csv() csv = OperatorCSV("original.yaml", data) csv.replace_pullspecs(replacements) assert csv.data["metadata"]["annotations"]["containerImage"] == 'd.e/f:1' assert csv.data["metadata"]["annotations"]["notContainerImage"] == 'd.e/f:1' assert csv.data["spec"]["metadata"]["annotations"]["containerImage"] == 'd.e/f:1' assert csv.data["spec"]["metadata"]["annotations"]["notContainerImage"] == 'd.e/f:1'
def test_image_name_parse_image_name(caplog): warning = 'Attempting to parse ImageName test:latest as an ImageName' test = ImageName.parse("test") assert warning not in caplog.text image_test = ImageName.parse(test) assert warning in caplog.text assert test is image_test
def test_base_image_missing_labels(self, workflow, koji_session, remove_labels, exp_result, external, caplog): base_tag = ImageName.parse('base:stubDigest') base_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS_W_ALIASES.copy()}} parent_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS_W_ALIASES.copy()}} workflow.builder.set_inspection_data(base_inspect) workflow.builder.set_parent_inspection_data(base_tag, parent_inspect) for label in remove_labels: del workflow.builder._inspection_data[INSPECT_CONFIG]['Labels'][label] del workflow.builder._parent_inspection_data[base_tag][INSPECT_CONFIG]['Labels'][label] if not exp_result: if not external: with pytest.raises(PluginFailedException) as exc: self.run_plugin_with_args(workflow, expect_result=exp_result, external_base=external) assert 'Was this image built in OSBS?' in str(exc.value) else: result = {PARENT_IMAGES_KOJI_BUILDS: {ImageName.parse('base'): None}} self.run_plugin_with_args(workflow, expect_result=result, external_base=external) assert 'Was this image built in OSBS?' in caplog.text else: self.run_plugin_with_args(workflow, expect_result=exp_result)
def test_base_image_missing_labels(self, workflow, koji_session, remove_labels, exp_result, external, caplog): base_tag = ImageName.parse('base:stubDigest') base_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS_W_ALIASES.copy()}} parent_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS_W_ALIASES.copy()}} for label in remove_labels: del base_inspect[INSPECT_CONFIG]['Labels'][label] del parent_inspect[INSPECT_CONFIG]['Labels'][label] flexmock(workflow.imageutil).should_receive('base_image_inspect').and_return(base_inspect) (flexmock(workflow.imageutil) .should_receive('get_inspect_for_image') .with_args(base_tag) .and_return(parent_inspect)) if not exp_result: if not external: with pytest.raises(PluginFailedException) as exc: self.run_plugin_with_args(workflow, expect_result=exp_result, external_base=external) assert 'Was this image built in OSBS?' in str(exc.value) else: result = { PARENT_IMAGES_KOJI_BUILDS: { ImageName.parse('base').to_str(): None, } } self.run_plugin_with_args(workflow, expect_result=result, external_base=external) assert 'Was this image built in OSBS?' in caplog.text else: self.run_plugin_with_args(workflow, expect_result=exp_result)
def test_pull_raises_retry_error(workflow, caplog): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker(retry_times=1) workflow.builder = MockBuilder() image_name = ImageName.parse(IMAGE_RAISE_RETRYGENERATOREXCEPTION) base_image_str = "{}/{}:{}".format(SOURCE_REGISTRY, image_name.repo, 'some') source_registry = image_name.registry workflow.builder.dockerfile_images = DockerfileImages([base_image_str]) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'source_registry': {'url': source_registry, 'insecure': True}}) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': {}, }], ) with pytest.raises(Exception): runner.run() exp_img = ImageName.parse(base_image_str) exp_img.registry = source_registry assert 'failed to pull image: {}'.format(exp_img.to_str()) in caplog.text
def test_save_and_load(self, tmpdir): """Test save workflow data and then load them back properly.""" tag_conf = TagConf() tag_conf.add_floating_image(ImageName.parse("registry/image:latest")) tag_conf.add_primary_image(ImageName.parse("registry/image:1.0")) wf_data = ImageBuildWorkflowData( dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]), # Test object in dict values is serialized tag_conf=tag_conf, plugins_results={ "plugin_a": { 'parent-images-koji-builds': { ImageName(repo='base', tag='latest').to_str(): { 'id': 123456789, 'nvr': 'base-image-1.0-99', 'state': 1, }, }, }, "tag_and_push": [ # Such object in a list should be handled properly. ImageName(registry="localhost:5000", repo='image', tag='latest'), ], "image_build": { "logs": ["Build succeeds."] }, }, koji_upload_files=[ { "local_filename": "/path/to/build1.log", "dest_filename": "x86_64-build.log", }, { "local_filename": "/path/to/dir1/remote-source.tar.gz", "dest_filename": "remote-source.tar.gz", }, ]) context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir())) wf_data.save(context_dir) assert context_dir.workflow_json.exists() # Verify the saved data matches the schema saved_data = json.loads(context_dir.workflow_json.read_bytes()) try: validate_with_schema(saved_data, "schemas/workflow_data.json") except osbs.exceptions.OsbsValidationException as e: pytest.fail( f"The dumped workflow data does not match JSON schema: {e}") # Load and verify the loaded data loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir) assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images assert wf_data.tag_conf == loaded_wf_data.tag_conf assert wf_data.plugins_results == loaded_wf_data.plugins_results
def test_get_built_images(workflow, manifest_version): MockEnv(workflow).set_check_platforms_result(["ppc64le", "x86_64"]) workflow.data.tag_conf.add_unique_image(UNIQUE_IMAGE) _, platform_digests = mock_registries( [REGISTRY_V2], { "ppc64le": {REGISTRY_V2: ["namespace/httpd:2.4-ppc64le"]}, "x86_64": {REGISTRY_V2: ["namespace/httpd:2.4-x86_64"]}, }, schema_version=manifest_version, ) ppc_digest = platform_digests["ppc64le"]["digests"][0]["digest"] x86_digest = platform_digests["x86_64"]["digests"][0]["digest"] flexmock(ManifestUtil).should_receive("__init__") # and do nothing, this test doesn't use it plugin = GroupManifestsPlugin(workflow) session = RegistrySession(REGISTRY_V2) assert plugin.get_built_images(session) == [ BuiltImage( pullspec=ImageName.parse(f"{UNIQUE_IMAGE}-ppc64le"), platform="ppc64le", manifest_digest=ppc_digest, manifest_version=manifest_version, ), BuiltImage( pullspec=ImageName.parse(f"{UNIQUE_IMAGE}-x86_64"), platform="x86_64", manifest_digest=x86_digest, manifest_version=manifest_version, ), ]
def test_update_dockerfile_images_from_config(self, tmp_path, images_exist, organization): config = REQUIRED_CONFIG if organization: config += "\nregistries_organization: " + organization config_yaml = tmp_path / 'config.yaml' config_yaml.write_text(dedent(config), "utf-8") if images_exist: parent_images = ['parent:latest', 'base:latest'] if organization: expect_images = [ImageName.parse('source_registry.com/organization/base:latest'), ImageName.parse('source_registry.com/organization/parent:latest')] else: expect_images = [ImageName.parse('source_registry.com/base:latest'), ImageName.parse('source_registry.com/parent:latest')] else: parent_images = [] dockerfile_images = DockerfileImages(parent_images) conf = Configuration(config_path=str(config_yaml)) conf.update_dockerfile_images_from_config(dockerfile_images) if images_exist: assert len(dockerfile_images) == 2 assert dockerfile_images.keys() == expect_images else: assert not dockerfile_images
def test_deep_digests_with_requested_arches(self, workflow, koji_session, caplog, manifest_list, requested_platforms, expected_logs, not_expected_logs): # noqa registry = 'example.com' image_str = '{}/base:latest'.format(registry) extra = {'image': {'index': {'digests': {V2_LIST: 'stubDigest'}}}} parent_tag = 'notExpectedDigest' workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = requested_platforms koji_build = dict(nvr='base-image-1.0-99', id=KOJI_BUILD_ID, state=KOJI_STATE_COMPLETE, extra=extra) (koji_session.should_receive('getBuild') .and_return(koji_build)) archives = [{ 'btype': 'image', 'extra': { 'docker': { 'config': { 'architecture': 'amd64' }, 'digests': { V2: 'stubDigest'}}}}] (koji_session.should_receive('listArchives') .and_return(archives)) name, version, release = koji_build['nvr'].rsplit('-', 2) labels = {'com.redhat.component': name, 'version': version, 'release': release} image_inspect = {INSPECT_CONFIG: {'Labels': labels}} flexmock(workflow.builder, parent_image_inspect=image_inspect) if manifest_list: response = flexmock(content=json.dumps(manifest_list)) else: response = {} (flexmock(atomic_reactor.util.RegistryClient) .should_receive('get_manifest') .and_return((response, None))) expected_result = {BASE_IMAGE_KOJI_BUILD: KOJI_BUILD, PARENT_IMAGES_KOJI_BUILDS: { ImageName.parse(image_str): KOJI_BUILD}} workflow.builder.parent_images_digests = {image_str: {V2_LIST: parent_tag}} workflow.builder.set_dockerfile_images([image_str]) image_for_key = ImageName.parse(image_str) image_for_key.tag = parent_tag workflow.builder.dockerfile_images[image_str] = image_for_key.to_str() self.run_plugin_with_args(workflow, expect_result=expected_result, deep_inspection=True, pull_registries=[{'url': registry}]) for log in expected_logs: assert log in caplog.text for log in not_expected_logs: assert log not in caplog.text
def _find_image(img, ignore_registry=False): tagged_img = ImageName.parse(img).to_str(explicit_tag=True) for im in mock_images: im_name = im['RepoTags'][0] if im_name == tagged_img: return im if ignore_registry: im_name_wo_reg = ImageName.parse(im_name).to_str(registry=False) if im_name_wo_reg == tagged_img: return im return None
def _get_replacement_pullspecs_from_csv_modifications(self, pullspecs): """Replace components of pullspecs based on externally provided CSV modifications :param pullspecs: a list of pullspecs. :type pullspecs: list[ImageName] :return: a list of replacement result. Each of the replacement result is a mapping containing key/value pairs: * ``original``: ImageName, the original pullspec. * ``new``: ImageName, the replaced/non-replaced pullspec. * ``pinned``: bool, indicate whether the tag is replaced with a specific digest. * ``replaced``: bool, indicate whether the new pullspec has change of repository or registry. :rtype: list[dict[str, ImageName or bool]] :raises RuntimeError: if provided CSV modification doesn't contain all required pullspecs or contain different ones """ operator_csv_modifications = self._fetch_operator_csv_modifications() mod_pullspec_repl = operator_csv_modifications.get('pullspec_replacements', []) # check if modification data contains all required pullspecs pullspecs_set = set(pullspecs) mod_pullspecs_set = set((ImageName.parse(p['original']) for p in mod_pullspec_repl)) missing = pullspecs_set - mod_pullspecs_set if missing: raise RuntimeError( f"Provided operator CSV modifications misses following pullspecs: " f"{', '.join(sorted(str(p) for p in missing))}" ) extra = mod_pullspecs_set - pullspecs_set if extra: raise RuntimeError( f"Provided operator CSV modifications defines extra pullspecs: " f"{','.join(sorted(str(p) for p in extra))}" ) # Copy replacements from provided CSV modifications file, fill missing 'replaced' filed replacements = [ { 'original': ImageName.parse(repl['original']), 'new': ImageName.parse(repl['new']), 'pinned': repl['pinned'], 'replaced': repl['original'] != repl['new'] } for repl in mod_pullspec_repl ] return replacements
def test_as_dict(self): tag_conf = TagConf() tag_conf.add_primary_image('r.fp.o/f:35') tag_conf.add_floating_image('ns/img:latest') tag_conf.add_floating_image('ns1/img2:devel') expected = { 'primary_images': [ImageName.parse('r.fp.o/f:35')], 'unique_images': [], 'floating_images': [ ImageName.parse('ns/img:latest'), ImageName.parse('ns1/img2:devel'), ], } assert expected == tag_conf.as_dict()
def workflow_callback(workflow): workflow = self.prepare(workflow, mock_get_manifest_list=False) release = 'rel1' version = 'ver1' config_blob = {'config': {'Labels': {'release': release, 'version': version}}} (flexmock(atomic_reactor.util.RegistryClient) .should_receive('get_config_from_registry') .and_return(config_blob) .times(0 if sha_is_manifest_list else 2)) manifest_list = { 'manifests': [ {'platform': {'architecture': 'amd64'}, 'digest': 'sha256:123456'}, {'platform': {'architecture': 'ppc64le'}, 'digest': 'sha256:654321'}, ] } manifest_tag = SOURCE_REGISTRY + '/' + BASE_IMAGE_W_SHA base_image_result = ImageName.parse(manifest_tag) manifest_image_original = base_image_result.copy() if sha_is_manifest_list: (flexmock(atomic_reactor.util.RegistryClient) .should_receive('get_manifest_list') .with_args(manifest_image_original) .and_return(flexmock(json=lambda: manifest_list, content=json.dumps(manifest_list).encode('utf-8'))) .once()) else: (flexmock(atomic_reactor.util.RegistryClient) .should_receive('get_manifest_list') .with_args(manifest_image_original) .and_return(None) .times(2)) docker_tag = '{}-{}'.format(version, release) manifest_tag = '{}/{}:{}'. \ format(SOURCE_REGISTRY, BASE_IMAGE_W_SHA[:BASE_IMAGE_W_SHA.find('@sha256')], docker_tag) base_image_result = ImageName.parse(manifest_tag) manifest_image_new = base_image_result.copy() (flexmock(atomic_reactor.util.RegistryClient) .should_receive('get_manifest_list') .with_args(manifest_image_new) .and_return(flexmock(json=lambda: manifest_list, content=json.dumps(manifest_list).encode('utf-8'))) .times(2)) return workflow
def test_create_image(tmpdir, insecure_registry, namespace, organization, monkeypatch, reactor_config_map, user_params): """ Test that an ImageStream is created if not found """ runner = prepare(tmpdir, insecure_registry=insecure_registry, namespace=namespace, organization=organization, reactor_config_map=reactor_config_map) kwargs = {} build_json = {"metadata": {}} if namespace is not None: build_json['metadata']['namespace'] = namespace monkeypatch.setenv("BUILD", json.dumps(build_json)) (flexmock(OSBS).should_receive('get_image_stream').once().with_args( TEST_IMAGESTREAM).and_raise(OsbsResponseException('none', 404))) if insecure_registry is not None: kwargs['insecure_registry'] = insecure_registry enclose_repo = ImageName.parse(TEST_REPO_WITH_REGISTRY) if reactor_config_map and organization: enclose_repo.enclose(organization) (flexmock(OSBS).should_receive('create_image_stream').once().with_args( TEST_IMAGESTREAM).and_return(ImageStreamResponse())) (flexmock(OSBS).should_receive('import_image_tags').once().and_return(True) ) runner.run()
def mock_environment(tmpdir, workflow, primary_images=None, floating_images=None, manifest_results=None, annotations=None): if MOCK: mock_docker() tasker = DockerTasker() base_image_id = '123456parent-id' setattr(workflow, '_base_image_inspect', {'Id': base_image_id}) setattr(workflow, 'builder', StubInsideBuilder()) setattr(workflow.builder, 'image_id', '123456imageid') setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22')) setattr(workflow.builder, 'source', StubInsideBuilder()) setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id}) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) if primary_images: for image in primary_images: if '-' in ImageName.parse(image).tag: workflow.tag_conf.add_primary_image(image) workflow.tag_conf.add_unique_image(primary_images[0]) if floating_images: workflow.tag_conf.add_floating_images(floating_images) workflow.build_result = BuildResult(image_id='123456', annotations=annotations or {}) workflow.postbuild_results = {} if manifest_results: workflow.postbuild_results[ PLUGIN_GROUP_MANIFESTS_KEY] = manifest_results return tasker, workflow
def _replace_named_pullspec(self, pullspec, replacement_pullspecs): old = ImageName.parse(pullspec.image) new = replacement_pullspecs.get(old) if new is not None and old != new: log.debug("%s - Replaced pullspec for %s: %s -> %s", self.path, pullspec.description, old, new) pullspec.image = new.to_str() # `new` is an ImageName
def add_unique_image(self, image: Union[str, "ImageName"]) -> None: """add image with unpredictable name :param image: str, name of image (e.g. "namespace/httpd:2.4") :return: None """ self._unique_images.append(ImageName.parse(image))
def replace_repo(self, image): """ Replace image repo based on OSBS site/user configuration for image registry Note: repo can also mean "namespace/repo" :param image: ImageName :return: ImageName """ site_mapping = self._get_site_mapping(image.registry) if site_mapping is None and image.registry not in self.user_package_mappings: self.log.debug("repo_replacements not configured for %s", image.registry) return image package = self._get_component_name(image) mapping = self._get_final_mapping(image.registry, package) replacements = mapping.get(package) if replacements is None: raise RuntimeError("Replacement not configured for package {} (from {}). " "Please specify replacement in {}" .format(package, image, REPO_CONTAINER_CONFIG)) elif len(replacements) > 1: options = ", ".join(replacements) raise RuntimeError("Multiple replacements for package {} (from {}): {}. " "Please specify replacement in {}" .format(package, image, options, REPO_CONTAINER_CONFIG)) self.log.debug("Replacement for package %s: %s", package, replacements[0]) replacement = ImageName.parse(replacements[0]) return self._replace(image, namespace=replacement.namespace, repo=replacement.repo)
def mock_environment(tmpdir, primary_images=None, annotations=None): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(source=SOURCE) base_image_id = '123456parent-id' setattr(workflow, '_base_image_inspect', {'Id': base_image_id}) setattr(workflow, 'builder', StubInsideBuilder()) setattr(workflow.builder, 'image_id', '123456imageid') setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22')) setattr(workflow.builder, 'source', StubInsideBuilder()) setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id}) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) setattr(workflow, 'tag_conf', TagConf()) if primary_images: for image in primary_images: if '-' in ImageName.parse(image).tag: workflow.tag_conf.add_primary_image(image) workflow.tag_conf.add_unique_image(primary_images[0]) workflow.tag_conf.add_floating_image('namespace/httpd:floating') workflow.build_result = BuildResult(image_id='123456', annotations=annotations or {}) return tasker, workflow
class WF(object): image = ImageName.parse('foo/bar:baz') openshift_build_selflink = '/builds/blablabla' build_process_failed = False autorebuild_canceled = auto_cancel build_canceled = manual_cancel tag_conf = TagConf() exit_results = { KojiImportPlugin.key: MOCK_KOJI_BUILD_ID } prebuild_results = {} plugin_workspace = {} builder = mock_builder() class mock_source(object): def get_vcs_info(self): return VcsInfo( vcs_type='git', vcs_url=git_source_url, vcs_ref=git_source_ref ) source = mock_source() with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df: df.write(MOCK_DOCKERFILE) setattr(builder, 'df_path', df.name)
def cmd_list_builds(args, osbs): kwargs = {} if args.running: kwargs['running'] = args.running if args.from_json: with open(args.from_json) as fp: builds = [BuildResponse(build, osbs) for build in json.load(fp)] else: builds = osbs.list_builds(**kwargs) if args.output == 'json': json_output = [] for build in builds: json_output.append(build.json) print_json_nicely(json_output) elif args.output == 'text': if args.columns: cols_to_display = args.columns.split(",") else: cols_to_display = CLI_LIST_BUILDS_DEFAULT_COLS data = [{ "base_image": "BASE IMAGE NAME", "base_image_id": "BASE IMAGE ID", "commit": "COMMIT", "image": "IMAGE NAME", "unique_image": "UNIQUE IMAGE NAME", "image_id": "IMAGE ID", "koji_build_id": "KOJI BUILD ID", "name": "BUILD ID", "status": "STATUS", "time_created": "TIME CREATED", }] for build in sorted(builds, key=lambda x: x.get_time_created_in_seconds()): unique_image = build.get_image_tag() try: image = \ ImageName.parse(build.get_repositories()["primary"][0]).to_str(registry=False) except (TypeError, KeyError, IndexError): image = "" # "" or unique_image? failed builds don't have that ^ if args.FILTER and args.FILTER not in image: continue if args.running and not build.is_in_progress(): continue b = { "base_image": build.get_base_image_name() or '', "base_image_id": build.get_base_image_id() or '', "commit": build.get_commit_id(), "image": image, "unique_image": unique_image, "image_id": build.get_image_id() or '', "koji_build_id": build.get_koji_build_id() or '', "name": build.get_build_name(), "status": build.status, "time_created": build.get_time_created(), } data.append(b) tp = TablePrinter(data, cols_to_display) tp.render()
def source_get_unique_image(self) -> ImageName: source_result = self.workflow.data.prebuild_results[PLUGIN_FETCH_SOURCES_KEY] koji_build_id = source_result['sources_for_koji_build_id'] kojisession = get_koji_session(self.workflow.conf) timestamp = osbs.utils.utcnow().strftime('%Y%m%d%H%M%S') random.seed() current_platform = platform.processor() or 'x86_64' tag_segments = [ self.koji_target or 'none', str(random.randrange(10**(RAND_DIGITS - 1), 10**RAND_DIGITS)), timestamp, current_platform ] tag = '-'.join(tag_segments) get_build_meta = kojisession.getBuild(koji_build_id) pull_specs = get_build_meta['extra']['image']['index']['pull'] source_image_spec = ImageName.parse(pull_specs[0]) source_image_spec.tag = tag organization = self.workflow.conf.registries_organization if organization: source_image_spec.enclose(organization) source_image_spec.registry = self.workflow.conf.registry['uri'] return source_image_spec
def test_get_inspect_for_image_not_inspectable(self, df_images): """Test that passing a non-inspectable image raises an error.""" image_util = imageutil.ImageUtil(df_images, self.config) custom_image = ImageName.parse("koji/image-build") with pytest.raises(ValueError, match=r"ImageName\(.*\) is not inspectable"): image_util.get_inspect_for_image(custom_image)
def add_floating_image(self, image: Union[str, "ImageName"]) -> None: """add image with floating name :param image: str, name of image (e.g. "namespace/httpd:2.4") :return: None """ self._floating_images.append(ImageName.parse(image))
def workflow_callback(workflow): workflow = self.prepare(workflow, mock_get_manifest_list=False) workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = { 'ppc64le' } release = 'rel1' version = 'ver1' config_blob = { 'config': { 'Labels': { 'release': release, 'version': version } } } (flexmock(atomic_reactor.util.RegistryClient).should_receive( 'get_config_from_registry').and_return(config_blob).times(0)) manifest_tag = SOURCE_REGISTRY + '/' + BASE_IMAGE_W_SHA base_image_result = ImageName.parse(manifest_tag) manifest_image = base_image_result.copy() (flexmock(atomic_reactor.util.RegistryClient).should_receive( 'get_manifest_list').with_args(manifest_image).and_return( flexmock(json=lambda: manifest_list, content=json.dumps(manifest_list).encode( 'utf-8'))).once()) return workflow
def tag_image(self, image, target_image, force=False): """ tag provided image with specified image_name, registry and tag :param image: str or ImageName, image to tag :param target_image: ImageName, new name for the image :param force: bool, force tag the image? :return: str, image (reg.om/img:v1) """ logger.info("tagging image '%s' as '%s'", image, target_image) logger.debug("image = '%s', target_image_name = '%s'", image, target_image) if not isinstance(image, ImageName): image = ImageName.parse(image) if image != target_image: response = self.d.tag( image.to_str(), target_image.to_str(tag=False), tag=target_image.tag, force=force) # returns True/False if not response: logger.error("failed to tag image") raise RuntimeError("Failed to tag image '%s': target_image = '%s'" % image.to_str(), target_image) else: logger.debug('image already tagged correctly, nothing to do') return target_image.to_str() # this will be the proper name, not just repo/img
def add_primary_image(self, image: Union[str, "ImageName"]) -> None: """add new primary image :param image: str, name of image (e.g. "namespace/httpd:2.4") :return: None """ self._primary_images.append(ImageName.parse(image))
def test_parent_image_inspect(insecure, parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b.tasker.build_method = default_build_method b.parents_pulled = parents_pulled provided_imagename = ImageName.parse(provided_image) registry_name = "registry.example.com" provided_imagename.registry = registry_name b.pull_registries = { registry_name: { 'insecure': insecure, 'dockercfg_path': str(tmpdir) } } if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').with_args( provided_imagename, provided_imagename.registry, insecure, str(tmpdir)).and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_imagename) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_base_image_inspect(self, platform, df_images): """Test that base_image_inspect just calls get_inspect_for_image with the right args.""" image_util = imageutil.ImageUtil(df_images, self.config) (flexmock(image_util).should_receive("get_inspect_for_image") # base image in df_images .with_args(ImageName.parse("registry.com/fedora:35"), platform).once().and_return(self.inspect_data)) assert image_util.base_image_inspect(platform) == self.inspect_data
def name(self): # Construct name by taking repo and tag from image and adding suffix image = ImageName.parse(self.image) tag = image.tag # If tag is a digest, strip "sha256:" prefix if tag.startswith("sha256:"): tag = tag[len("sha256:"):] return "{}-{}-annotation".format(image.repo, tag)