def test_remove_worker_plugin(tmpdir, caplog, names, fragment_key): workflow = mock_workflow(tmpdir) koji_metadata = { 'foo': 'bar', 'spam': 'bacon', } metadata = {'metadata.json': koji_metadata} for name in names: osbs = MockOSBS({name: metadata}) defer_removal(workflow, name, osbs) (flexmock(osbs).should_call("delete_config_map").with_args( name).once().and_return(True)) runner = ExitPluginsRunner(None, workflow, [{ 'name': PLUGIN_REMOVE_WORKER_METADATA_KEY, "args": {} }]) runner.run() for name in names: if name: assert "ConfigMap {} deleted".format(name) in caplog.text else: assert "Failed to delete ConfigMap None" in caplog.text
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, metadata_only=False, blocksize=None): args = { 'kojihub': '', 'url': '/', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if metadata_only: args['metadata_only'] = True if blocksize: args['blocksize'] = blocksize runner = ExitPluginsRunner(tasker, workflow, [ { 'name': KojiPromotePlugin.key, 'args': args, }, ]) return runner
def test_metadata_plugin_rpmqa_failure(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels assert "base-image-id" in labels assert "base-image-name" in labels assert "image-id" in labels # On rpmqa failure, rpm-packages should be empty assert len(labels["rpm-packages"]) == 0
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels
def test_set_koji_annotations_whitelist(workflow, source_dir, koji_conf): prepare(workflow) if koji_conf is not None: workflow.conf.conf['koji'] = koji_conf df_content = dedent('''\ FROM nowhere RUN nothing CMD cowsay moo ''') df = df_parser(str(source_dir)) df.content = df_content flexmock(workflow, df_path=df.dockerfile_path) workflow.df_dir = str(source_dir) runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] whitelist = None if koji_conf: whitelist = koji_conf.get('task_annotations_whitelist') if whitelist: assert 'koji_task_annotations_whitelist' in annotations assert all(entry in whitelist for entry in koji_conf['task_annotations_whitelist']) assert all(entry in whitelist for entry in json.loads( annotations['koji_task_annotations_whitelist'])) else: assert 'koji_task_annotations_whitelist' not in annotations
def test_metadata_plugin_rpmqa_failure(tmpdir, reactor_config_map): # noqa initial_timestamp = datetime.now() workflow = prepare(reactor_config_map=reactor_config_map) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.prebuild_results = {} workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: {'metadata_fragment_key': 'metadata.json', 'metadata_fragment': 'configmap/build-1-md'} } workflow.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 3.03, } workflow.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo', PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 'bar', } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert "repositories" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations assert "metadata_fragment" in annotations assert "metadata_fragment_key" in annotations assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, target=None, tag_later=False): args = { 'kojihub': '', 'url': '/', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if target: args['target'] = target args['poll_interval'] = 0 plugins_conf = [ {'name': KojiImportPlugin.key, 'args': args}, ] if target and tag_later: plugins_conf.append({'name': KojiTagBuildPlugin.key, 'args': { 'kojihub': '', 'target': target, 'poll_interval': 0.01}}) workflow.exit_plugins_conf = plugins_conf runner = ExitPluginsRunner(tasker, workflow, plugins_conf) return runner
def test_missing_koji_build_id(tmpdir): workflow = prepare() workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_store_metadata_fail_update_annotations(tmpdir, caplog): workflow = prepare() workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('set_annotations_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'annotations:' in caplog.text()
def test_labels_metadata_plugin(tmpdir, koji_plugin): koji_build_id = 1234 workflow = prepare() df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.exit_results = { koji_plugin: koji_build_id, } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" in labels assert is_string_type(labels["koji-build-id"]) assert int(labels["koji-build-id"]) == koji_build_id
def test_delete_from_registry_plugin(saved_digests, req_registries, tmpdir): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) setattr(workflow, 'builder', X) args_registries = {} for reg, use_secret in req_registries.items(): if use_secret: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { reg: { "username": "******", "password": reg } } dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() args_registries[reg] = { 'secret': temp_dir } else: args_registries[reg] = {} for reg, digests in saved_digests.items(): r = DockerRegistry(reg) for tag, dig in digests.items(): r.digests[tag] = ManifestDigest(v1='not-used', v2=dig) workflow.push_conf._registries['docker'].append(r) runner = ExitPluginsRunner( tasker, workflow, [{ 'name': DeleteFromRegistryPlugin.key, 'args': { 'registries': args_registries }, }] ) deleted_digests = set() for reg, digests in saved_digests.items(): if reg not in req_registries: continue for tag, dig in digests.items(): if dig in deleted_digests: continue url = "https://" + reg + "/v2/" + tag.split(":")[0] + "/manifests/" + dig auth_type = requests.auth.HTTPBasicAuth if req_registries[reg] else None (flexmock(requests) .should_receive('delete') .with_args(url, verify=bool, auth=auth_type) .once() .and_return(flexmock(status_code=202))) deleted_digests.add(dig) result = runner.run() assert result[DeleteFromRegistryPlugin.key] == deleted_digests
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, poll_interval=0.01, proxy_user=None): args = { 'kojihub': '', 'target': 'koji-target', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if poll_interval is not None: args['poll_interval'] = poll_interval if proxy_user: args['koji_proxy_user'] = proxy_user runner = ExitPluginsRunner(tasker, workflow, [ { 'name': KojiTagBuildPlugin.key, 'args': args, }, ]) return runner
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, poll_interval=0.01, proxy_user=None, use_args=True, koji_target='koji-target'): args = { 'target': koji_target, } if poll_interval is not None: args['poll_interval'] = poll_interval workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1}) add_koji_map_in_workflow(workflow, hub_url='', ssl_certs_dir='/' if ssl_certs else None, krb_keytab=keytab, krb_principal=principal, proxyuser=proxy_user) plugin_conf = { 'name': KojiTagBuildPlugin.key } if use_args: plugin_conf['args'] = args else: plugin_conf['args'] = {'target': koji_target} runner = ExitPluginsRunner(tasker, workflow, [plugin_conf]) return runner
def test_filter_nonpulp_repositories(tmpdir, pulp_registries, docker_registries, is_orchestrator, expected): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) if is_orchestrator: workflow.buildstep_result[OrchestrateBuildPlugin.key] = 'foo' output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) assert repositories == expected
def test_metadata_plugin_rpmqa_failure(tmpdir): initial_timestamp = datetime.now() workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.plugins_timestamps = { CpDockerfilePlugin.key: initial_timestamp.isoformat(), DistgitFetchArtefactsPlugin.key: (initial_timestamp + timedelta(seconds=1)).isoformat(), PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { CpDockerfilePlugin.key: 1.01, DistgitFetchArtefactsPlugin.key: 2.02, PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = {PostBuildRPMqaPlugin.key: 'foo'} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels assert "base-image-id" in labels assert "base-image-name" in labels assert "image-id" in labels # On rpmqa failure, rpm-packages should be empty assert len(labels["rpm-packages"]) == 0 assert "plugins-metadata" in labels assert "errors" in labels["plugins-metadata"] assert "durations" in labels["plugins-metadata"] assert "timestamps" in labels["plugins-metadata"] plugins_metadata = json.loads(labels["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "cp_dockerfile" in plugins_metadata["durations"] assert "distgit_fetch_artefacts" in plugins_metadata["durations"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_metadata_plugin_rpmqa_failure(tmpdir): initial_timestamp = datetime.now() workflow = prepare() df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.prebuild_results = {} workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo' } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert "logs" in annotations assert "rpm-packages" in annotations assert "repositories" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations # On rpmqa failure, rpm-packages should be empty assert len(annotations["rpm-packages"]) == 0 assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert is_string_type(labels['dockerfile']) assert "artefacts" in labels assert is_string_type(labels['artefacts']) assert "logs" in labels assert is_string_type(labels['logs']) assert "rpm-packages" in labels assert is_string_type(labels['rpm-packages']) assert "repositories" in labels assert is_string_type(labels['repositories']) assert "commit_id" in labels assert is_string_type(labels['commit_id']) assert "base-image-id" in labels assert is_string_type(labels['base-image-id']) assert "base-image-name" in labels assert is_string_type(labels['base-image-name']) assert "image-id" in labels assert is_string_type(labels['image-id']) assert "digests" in labels assert is_string_type(labels['digests']) digests = json.loads(labels['digests']) expected = [{ "registry": DOCKER0_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, }, { "registry": DOCKER0_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, }] assert digests == expected or digests == reversed(expected)
def create_runner( tasker, workflow, ssl_certs=False, principal=None, # noqa:F811 keytab=None, poll_interval=0.01, proxy_user=None, reactor_config_map=False, use_args=True): args = { 'kojihub': '', 'target': 'koji-target', } koji_map = {'hub_url': '', 'auth': {}} if ssl_certs: args['koji_ssl_certs'] = '/' koji_map['auth']['ssl_certs_dir'] = '/' if principal: args['koji_principal'] = principal koji_map['auth']['krb_principal'] = principal if keytab: args['koji_keytab'] = keytab koji_map['auth']['krb_keytab_path'] = keytab if poll_interval is not None: args['poll_interval'] = poll_interval if proxy_user: args['koji_proxy_user'] = proxy_user koji_map['auth']['proxyuser'] = proxy_user if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'koji': koji_map}) plugin_conf = {'name': KojiTagBuildPlugin.key} if use_args: plugin_conf['args'] = args else: plugin_conf['args'] = {'target': 'koji-target'} runner = ExitPluginsRunner(tasker, workflow, [plugin_conf]) return runner
def test_metadata_plugin_rpmqa_failure(workflow, source_dir): initial_timestamp = datetime.now() prepare(workflow) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(source_dir)) df.content = df_content flexmock(workflow, df_path=df.dockerfile_path) workflow.df_dir = str(source_dir) workflow.data.prebuild_results = {} workflow.data.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.data.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.data.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, } workflow.data.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo', } runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] assert "dockerfile" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_store_metadata_fail_update_labels(workflow, caplog): prepare(workflow) workflow.data.labels = {'some-label': 'some-value'} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('update_labels_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'labels:' in caplog.text
def test_plugin_labels(workflow): prepare(workflow) workflow.data.labels = {'foo': 1, 'bar': 'two'} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() labels = output[StoreMetadataPlugin.key]["labels"] assert labels['foo'] == '1' assert labels['bar'] == 'two'
def test_plugin_annotations(workflow): prepare(workflow) workflow.data.annotations = {'foo': {'bar': 'baz'}, 'spam': ['eggs']} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() annotations = output[StoreMetadataPlugin.key]["annotations"] assert annotations['foo'] == '{"bar": "baz"}' assert annotations['spam'] == '["eggs"]'
def test_missing_koji_build_id(tmpdir): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_store_metadata_fail_update_annotations(tmpdir, caplog): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('set_annotations_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) output = runner.run() assert 'annotations:' in caplog.text()
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_koji_filesystem_label(res): workflow = prepare() workflow.prebuild_results = {PLUGIN_ADD_FILESYSTEM_KEY: res} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() labels = output[StoreMetadataInOSv3Plugin.key]["labels"] if 'filesystem-koji-task-id' in res: assert 'filesystem-koji-task-id' in labels assert labels['filesystem-koji-task-id'] == 'example-fs-taskid' if 'filesystem-koji-task-id' not in res: assert 'filesystem-koji-task-id' not in labels
def test_exit_before_dockerfile_created(workflow, source_dir): prepare(workflow) workflow.data.exit_results = {} workflow.df_dir = str(source_dir) workflow._df_path = None runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert annotations["dockerfile"] == ""
def test_exit_before_dockerfile_created(tmpdir): # noqa workflow = prepare(before_dockerfile=True) workflow.exit_results = {} workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert annotations["dockerfile"] == ""
def test_store_metadata_fail_update_labels(tmpdir, caplog, koji_plugin): workflow = prepare() workflow.exit_results = { koji_plugin: 1234, } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('update_labels_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'labels:' in caplog.text()