def test_metadata_plugin_rpmqa_failure(tmpdir, reactor_config_map): # noqa initial_timestamp = datetime.now() workflow = prepare(reactor_config_map=reactor_config_map) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.prebuild_results = {} workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: {'metadata_fragment_key': 'metadata.json', 'metadata_fragment': 'configmap/build-1-md'} } workflow.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 3.03, } workflow.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo', PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 'bar', } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert "repositories" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations assert "metadata_fragment" in annotations assert "metadata_fragment_key" in annotations assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_filter_nonpulp_repositories(tmpdir, pulp_registries, docker_registries, is_orchestrator, expected): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) if is_orchestrator: workflow.buildstep_result[OrchestrateBuildPlugin.key] = 'foo' output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) assert repositories == expected
def test_remove_worker_plugin(tmpdir, caplog, names, fragment_key): workflow = mock_workflow(tmpdir) koji_metadata = { 'foo': 'bar', 'spam': 'bacon', } metadata = {'metadata.json': koji_metadata} for name in names: osbs = MockOSBS({name: metadata}) defer_removal(workflow, name, osbs) (flexmock(osbs).should_call("delete_config_map").with_args( name).once().and_return(True)) runner = ExitPluginsRunner(None, workflow, [{ 'name': PLUGIN_REMOVE_WORKER_METADATA_KEY, "args": {} }]) runner.run() for name in names: if name: assert "ConfigMap {} deleted".format(name) in caplog.text else: assert "Failed to delete ConfigMap None" in caplog.text
def test_labels_metadata_plugin(tmpdir, koji_plugin, reactor_config_map): koji_build_id = 1234 workflow = prepare(reactor_config_map=reactor_config_map) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.exit_results = { koji_plugin: koji_build_id, } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" in labels assert is_string_type(labels["koji-build-id"]) assert int(labels["koji-build-id"]) == koji_build_id
def test_metadata_plugin_rpmqa_failure(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels assert "base-image-id" in labels assert "base-image-name" in labels assert "image-id" in labels # On rpmqa failure, rpm-packages should be empty assert len(labels["rpm-packages"]) == 0
def test_filter_nonpulp_repositories(tmpdir, pulp_registries, docker_registries, is_orchestrator, expected, reactor_config_map): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries, reactor_config_map=reactor_config_map) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) if is_orchestrator: workflow.buildstep_result[OrchestrateBuildPlugin.key] = 'foo' output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) assert repositories == expected
def test_remove_worker_plugin(tmpdir, caplog, names, fragment_key): workflow = mock_workflow(tmpdir) koji_metadata = { 'foo': 'bar', 'spam': 'bacon', } metadata = {'metadata.json': koji_metadata} for name in names: osbs = MockOSBS({name: metadata}) defer_removal(workflow, name, osbs) (flexmock(osbs) .should_call("delete_config_map") .with_args(name) .once() .and_return(True)) runner = ExitPluginsRunner( None, workflow, [{ 'name': PLUGIN_REMOVE_WORKER_METADATA_KEY, "args": {} }] ) runner.run() for name in names: if name: assert "ConfigMap {} deleted".format(name) in caplog.text else: assert "Failed to delete ConfigMap None" in caplog.text
def test_delete_from_registry_plugin(saved_digests, req_registries, tmpdir): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) setattr(workflow, 'builder', X) args_registries = {} for reg, use_secret in req_registries.items(): if use_secret: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { reg: { "username": "******", "password": reg } } dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() args_registries[reg] = { 'secret': temp_dir } else: args_registries[reg] = {} for reg, digests in saved_digests.items(): r = DockerRegistry(reg) for tag, dig in digests.items(): r.digests[tag] = ManifestDigest(v1='not-used', v2=dig) workflow.push_conf._registries['docker'].append(r) runner = ExitPluginsRunner( tasker, workflow, [{ 'name': DeleteFromRegistryPlugin.key, 'args': { 'registries': args_registries }, }] ) deleted_digests = set() for reg, digests in saved_digests.items(): if reg not in req_registries: continue for tag, dig in digests.items(): if dig in deleted_digests: continue url = "https://" + reg + "/v2/" + tag.split(":")[0] + "/manifests/" + dig auth_type = requests.auth.HTTPBasicAuth if req_registries[reg] else None (flexmock(requests) .should_receive('delete') .with_args(url, verify=bool, auth=auth_type) .once() .and_return(flexmock(status_code=202))) deleted_digests.add(dig) result = runner.run() assert result[DeleteFromRegistryPlugin.key] == deleted_digests
def test_metadata_plugin_rpmqa_failure(tmpdir): initial_timestamp = datetime.now() workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.plugins_timestamps = { CpDockerfilePlugin.key: initial_timestamp.isoformat(), DistgitFetchArtefactsPlugin.key: (initial_timestamp + timedelta(seconds=1)).isoformat(), PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { CpDockerfilePlugin.key: 1.01, DistgitFetchArtefactsPlugin.key: 2.02, PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = {PostBuildRPMqaPlugin.key: 'foo'} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels assert "base-image-id" in labels assert "base-image-name" in labels assert "image-id" in labels # On rpmqa failure, rpm-packages should be empty assert len(labels["rpm-packages"]) == 0 assert "plugins-metadata" in labels assert "errors" in labels["plugins-metadata"] assert "durations" in labels["plugins-metadata"] assert "timestamps" in labels["plugins-metadata"] plugins_metadata = json.loads(labels["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "cp_dockerfile" in plugins_metadata["durations"] assert "distgit_fetch_artefacts" in plugins_metadata["durations"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels
def test_metadata_plugin_rpmqa_failure(tmpdir): initial_timestamp = datetime.now() workflow = prepare() df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.prebuild_results = {} workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo' } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert "logs" in annotations assert "rpm-packages" in annotations assert "repositories" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations # On rpmqa failure, rpm-packages should be empty assert len(annotations["rpm-packages"]) == 0 assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_store_metadata_fail_update_annotations(tmpdir, caplog, reactor_config_map): # noqa workflow = prepare(reactor_config_map=reactor_config_map) workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) (flexmock(OSBS) .should_receive('update_annotations_on_build') .and_raise(OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'annotations:' in caplog.text
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels
def test_missing_koji_build_id(tmpdir, reactor_config_map): # noqa workflow = prepare(reactor_config_map=reactor_config_map) workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_labels_metadata_plugin(tmpdir): koji_build_id = 1234 workflow = prepare() workflow.exit_results = { KojiPromotePlugin.key: koji_build_id, } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" in labels assert is_string_type(labels["koji-build-id"]) assert int(labels["koji-build-id"]) == koji_build_id
def test_store_metadata_fail_update_annotations(tmpdir, caplog): workflow = prepare() workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('set_annotations_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'annotations:' in caplog.text()
def test_missing_koji_build_id(tmpdir): workflow = prepare() workflow.exit_results = {} df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_labels_metadata_plugin(tmpdir, koji_plugin): koji_build_id = 1234 workflow = prepare() df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.exit_results = { koji_plugin: koji_build_id, } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" in labels assert is_string_type(labels["koji-build-id"]) assert int(labels["koji-build-id"]) == koji_build_id
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_set_koji_annotations_whitelist(workflow, source_dir, koji_conf): prepare(workflow) if koji_conf is not None: workflow.conf.conf['koji'] = koji_conf df_content = dedent('''\ FROM nowhere RUN nothing CMD cowsay moo ''') df = df_parser(str(source_dir)) df.content = df_content flexmock(workflow, df_path=df.dockerfile_path) workflow.df_dir = str(source_dir) runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] whitelist = None if koji_conf: whitelist = koji_conf.get('task_annotations_whitelist') if whitelist: assert 'koji_task_annotations_whitelist' in annotations assert all(entry in whitelist for entry in koji_conf['task_annotations_whitelist']) assert all(entry in whitelist for entry in json.loads( annotations['koji_task_annotations_whitelist'])) else: assert 'koji_task_annotations_whitelist' not in annotations
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert is_string_type(labels['dockerfile']) assert "artefacts" in labels assert is_string_type(labels['artefacts']) assert "logs" in labels assert is_string_type(labels['logs']) assert "rpm-packages" in labels assert is_string_type(labels['rpm-packages']) assert "repositories" in labels assert is_string_type(labels['repositories']) assert "commit_id" in labels assert is_string_type(labels['commit_id']) assert "base-image-id" in labels assert is_string_type(labels['base-image-id']) assert "base-image-name" in labels assert is_string_type(labels['base-image-name']) assert "image-id" in labels assert is_string_type(labels['image-id']) assert "digests" in labels assert is_string_type(labels['digests']) digests = json.loads(labels['digests']) expected = [{ "registry": DOCKER0_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, },{ "registry": DOCKER0_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, }] assert digests == expected or digests == reversed(expected)
def test_metadata_plugin(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert is_string_type(labels['dockerfile']) assert "artefacts" in labels assert is_string_type(labels['artefacts']) assert "logs" in labels assert is_string_type(labels['logs']) assert "rpm-packages" in labels assert is_string_type(labels['rpm-packages']) assert "repositories" in labels assert is_string_type(labels['repositories']) assert "commit_id" in labels assert is_string_type(labels['commit_id']) assert "base-image-id" in labels assert is_string_type(labels['base-image-id']) assert "base-image-name" in labels assert is_string_type(labels['base-image-name']) assert "image-id" in labels assert is_string_type(labels['image-id']) assert "digests" in labels assert is_string_type(labels['digests']) digests = json.loads(labels['digests']) expected = [{ "registry": DOCKER0_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, }, { "registry": DOCKER0_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, }] assert digests == expected or digests == reversed(expected)
def test_metadata_plugin_rpmqa_failure(workflow, source_dir): initial_timestamp = datetime.now() prepare(workflow) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(source_dir)) df.content = df_content flexmock(workflow, df_path=df.dockerfile_path) workflow.df_dir = str(source_dir) workflow.data.prebuild_results = {} workflow.data.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.data.plugins_timestamps = { PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.data.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, } workflow.data.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo', } runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] assert "dockerfile" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_store_metadata_fail_update_labels(workflow, caplog): prepare(workflow) workflow.data.labels = {'some-label': 'some-value'} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('update_labels_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'labels:' in caplog.text
def test_plugin_annotations(workflow): prepare(workflow) workflow.data.annotations = {'foo': {'bar': 'baz'}, 'spam': ['eggs']} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() annotations = output[StoreMetadataPlugin.key]["annotations"] assert annotations['foo'] == '{"bar": "baz"}' assert annotations['spam'] == '["eggs"]'
def test_plugin_labels(workflow): prepare(workflow) workflow.data.labels = {'foo': 1, 'bar': 'two'} runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() labels = output[StoreMetadataPlugin.key]["labels"] assert labels['foo'] == '1' assert labels['bar'] == 'two'
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, metadata_only=False, blocksize=None): args = { 'kojihub': '', 'url': '/', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if metadata_only: args['metadata_only'] = True if blocksize: args['blocksize'] = blocksize runner = ExitPluginsRunner(tasker, workflow, [ { 'name': KojiPromotePlugin.key, 'args': args, }, ]) return runner
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, poll_interval=0.01, proxy_user=None, use_args=True, koji_target='koji-target'): args = { 'target': koji_target, } if poll_interval is not None: args['poll_interval'] = poll_interval workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1}) add_koji_map_in_workflow(workflow, hub_url='', ssl_certs_dir='/' if ssl_certs else None, krb_keytab=keytab, krb_principal=principal, proxyuser=proxy_user) plugin_conf = { 'name': KojiTagBuildPlugin.key } if use_args: plugin_conf['args'] = args else: plugin_conf['args'] = {'target': koji_target} runner = ExitPluginsRunner(tasker, workflow, [plugin_conf]) return runner
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, poll_interval=0.01, proxy_user=None): args = { 'kojihub': '', 'target': 'koji-target', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if poll_interval is not None: args['poll_interval'] = poll_interval if proxy_user: args['koji_proxy_user'] = proxy_user runner = ExitPluginsRunner(tasker, workflow, [ { 'name': KojiTagBuildPlugin.key, 'args': args, }, ]) return runner
def test_missing_koji_build_id(tmpdir): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_store_metadata_fail_update_annotations(tmpdir, caplog): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('set_annotations_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) output = runner.run() assert 'annotations:' in caplog.text()
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, target=None, tag_later=False): args = { 'kojihub': '', 'url': '/', } if ssl_certs: args['koji_ssl_certs'] = '/' if principal: args['koji_principal'] = principal if keytab: args['koji_keytab'] = keytab if target: args['target'] = target args['poll_interval'] = 0 plugins_conf = [ {'name': KojiImportPlugin.key, 'args': args}, ] if target and tag_later: plugins_conf.append({'name': KojiTagBuildPlugin.key, 'args': { 'kojihub': '', 'target': target, 'poll_interval': 0.01}}) workflow.exit_plugins_conf = plugins_conf runner = ExitPluginsRunner(tasker, workflow, plugins_conf) return runner
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes, reactor_config_map): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries, reactor_config_map=reactor_config_map) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_exit_before_dockerfile_created(workflow, source_dir): prepare(workflow) workflow.data.exit_results = {} workflow.df_dir = str(source_dir) workflow._df_path = None runner = ExitPluginsRunner(workflow, [{ 'name': StoreMetadataPlugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataPlugin.key in output annotations = output[StoreMetadataPlugin.key]["annotations"] assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert annotations["dockerfile"] == ""
def test_koji_filesystem_label(res): workflow = prepare() workflow.prebuild_results = {PLUGIN_ADD_FILESYSTEM_KEY: res} runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() labels = output[StoreMetadataInOSv3Plugin.key]["labels"] if 'filesystem-koji-task-id' in res: assert 'filesystem-koji-task-id' in labels assert labels['filesystem-koji-task-id'] == 'example-fs-taskid' if 'filesystem-koji-task-id' not in res: assert 'filesystem-koji-task-id' not in labels
def test_exit_before_dockerfile_created(tmpdir): # noqa workflow = prepare(before_dockerfile=True) workflow.exit_results = {} workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert annotations["dockerfile"] == ""
def test_store_metadata_fail_update_labels(tmpdir, caplog, koji_plugin): workflow = prepare() workflow.exit_results = { koji_plugin: 1234, } runner = ExitPluginsRunner(None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }]) (flexmock(OSBS).should_receive('update_labels_on_build').and_raise( OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'labels:' in caplog.text()
def test_remove_worker_metadata_no_worker_build(tmpdir, caplog, user_params): """Don't traceback with missing worker builds, without worker builds plugin should just skip""" workflow = mock_workflow(tmpdir) annotations = None workflow.build_result = BuildResult(annotations=annotations, image_id="id1234") runner = ExitPluginsRunner( None, workflow, [{ 'name': PLUGIN_REMOVE_WORKER_METADATA_KEY, "args": {} }] ) runner.run() assert "No build annotations found, skipping plugin" in caplog.text assert "Traceback" not in caplog.text
def test_missing_koji_build_id(tmpdir): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] assert "koji-build-id" not in labels
def test_store_metadata_fail_update_annotations(tmpdir, caplog): workflow = prepare() workflow.exit_results = {} runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) (flexmock(OSBS) .should_receive('set_annotations_on_build') .and_raise(OsbsResponseException('/', 'failed', 0))) output = runner.run() assert 'annotations:' in caplog.text()
def test_arrangementv4_repositories(tmpdir, group_manifests, restore): workflow = prepare() df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) worker_data = { 'repositories': { 'primary': ['worker:1'], 'unique': ['worker:unique'], }, } workflow.buildstep_result[OrchestrateBuildPlugin.key] = worker_data workflow.build_result = BuildResult.make_remote_image_result(annotations=worker_data) if group_manifests is not None: workflow.postbuild_results[PLUGIN_GROUP_MANIFESTS_KEY] = group_manifests output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) if restore: assert repositories != worker_data['repositories'] else: assert repositories == worker_data['repositories']
def test_exit_before_dockerfile_created(tmpdir, reactor_config_map): # noqa workflow = prepare(before_dockerfile=True, reactor_config_map=reactor_config_map) workflow.exit_results = {} workflow.builder = XBeforeDockerfile() workflow.builder.df_dir = str(tmpdir) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert annotations["dockerfile"] == ""
def test_koji_filesystem_label(res, reactor_config_map): workflow = prepare(reactor_config_map=reactor_config_map) workflow.prebuild_results = { PLUGIN_ADD_FILESYSTEM_KEY: res } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() labels = output[StoreMetadataInOSv3Plugin.key]["labels"] if 'filesystem-koji-task-id' in res: assert 'filesystem-koji-task-id' in labels assert labels['filesystem-koji-task-id'] == 'example-fs-taskid' if 'filesystem-koji-task-id' not in res: assert 'filesystem-koji-task-id' not in labels
def test_store_metadata_fail_update_labels(tmpdir, caplog, koji_plugin, reactor_config_map): workflow = prepare(reactor_config_map=reactor_config_map) workflow.exit_results = { koji_plugin: 1234, } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) (flexmock(OSBS) .should_receive('update_labels_on_build') .and_raise(OsbsResponseException('/', 'failed', 0))) with pytest.raises(PluginFailedException): runner.run() assert 'labels:' in caplog.text
def test_filter_repositories(tmpdir, pulp_registries, docker_registries, prefixes): workflow = prepare(pulp_registries=pulp_registries, docker_registries=docker_registries) runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] repositories = json.loads(annotations['repositories']) unique_repositories = repositories['unique'] primary_repositories = repositories['primary'] matched = set() for prefix in prefixes: for repo in unique_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(unique_repositories) matched = set() for prefix in prefixes: for repo in primary_repositories: if repo.startswith(prefix): matched.add(repo) assert matched == set(primary_repositories)
def test_metadata_plugin_rpmqa_failure(tmpdir): workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels assert "base-image-id" in labels assert "base-image-name" in labels assert "image-id" in labels # On rpmqa failure, rpm-packages should be empty assert len(labels["rpm-packages"]) == 0
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) finally: self.source.remove_tmpdir()
def test_delete_from_registry_plugin(saved_digests, req_registries, tmpdir, orchestrator, manifest_list_digests): if MOCK: mock_docker() mock_get_retry_session() buildstep_plugin = None if orchestrator: ann_digests = [] buildstep_plugin = [{ 'name': OrchestrateBuildPlugin.key, 'args': { 'platforms': "x86_64" }, }] tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE, buildstep_plugins=buildstep_plugin, ) setattr(workflow, 'builder', X) args_registries = {} for reg, use_secret in req_registries.items(): if use_secret: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { reg: { "username": "******", "password": reg } } dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() args_registries[reg] = {'secret': temp_dir} else: args_registries[reg] = {} for reg, digests in saved_digests.items(): if orchestrator: for tag, dig in digests.items(): repo = tag.split(':')[0] t = tag.split(':')[1] ann_digests.append({ 'digest': dig, 'tag': t, 'repository': repo, 'registry': reg, }) else: r = DockerRegistry(reg) for tag, dig in digests.items(): r.digests[tag] = ManifestDigest(v1='not-used', v2=dig) workflow.push_conf._registries['docker'].append(r) group_manifest_digests = {} if orchestrator: build_annotations = {'digests': ann_digests} annotations = {'worker-builds': {'x86_64': build_annotations}} setattr(workflow, 'build_result', Y) setattr(workflow.build_result, 'annotations', annotations) # group_manifest digest should be added only # if there are worker builds and images are pushed to one registry if len(req_registries) == 1 and len(saved_digests.keys()) == 1 and \ all(saved_digests.values()): workflow.postbuild_results[PLUGIN_GROUP_MANIFESTS_KEY] = manifest_list_digests for ml_repo, ml_digest in manifest_list_digests.items(): for reg in req_registries: if reg in saved_digests: group_manifest_digests.setdefault(reg, {}) group_manifest_digests[reg] = saved_digests[reg].copy() group_manifest_digests[reg][ml_repo] = ml_digest.default result_digests = saved_digests.copy() result_digests.update(group_manifest_digests) runner = ExitPluginsRunner( tasker, workflow, [{ 'name': DeleteFromRegistryPlugin.key, 'args': { 'registries': args_registries }, }] ) deleted_digests = set() for reg, digests in result_digests.items(): if reg not in req_registries: continue for tag, dig in digests.items(): if dig in deleted_digests: continue url = "https://" + reg + "/v2/" + tag.split(":")[0] + "/manifests/" + dig auth_type = requests.auth.HTTPBasicAuth if req_registries[reg] else None (flexmock(requests.Session) .should_receive('delete') .with_args(url, verify=bool, auth=auth_type) .once() .and_return(flexmock(status_code=202, ok=True, raise_for_status=lambda: None))) deleted_digests.add(dig) result = runner.run() assert result[DeleteFromRegistryPlugin.key] == deleted_digests
def test_delete_from_registry_failures(tmpdir, status_code): if MOCK: mock_docker() mock_get_retry_session() req_registries = {DOCKER0_REGISTRY: True} saved_digests = {DOCKER0_REGISTRY: {'foo/bar:latest': DIGEST1}} tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) setattr(workflow, 'builder', X) args_registries = {} for reg, use_secret in req_registries.items(): if use_secret: temp_dir = mkdtemp(dir=str(tmpdir)) with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { reg: { "username": "******", "password": reg } } dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() args_registries[reg] = {'secret': temp_dir} else: args_registries[reg] = {} for reg, digests in saved_digests.items(): r = DockerRegistry(reg) for tag, dig in digests.items(): r.digests[tag] = ManifestDigest(v1='not-used', v2=dig) workflow.push_conf._registries['docker'].append(r) runner = ExitPluginsRunner( tasker, workflow, [{ 'name': DeleteFromRegistryPlugin.key, 'args': { 'registries': args_registries }, }] ) deleted_digests = set() for reg, digests in saved_digests.items(): if reg not in req_registries: continue for tag, dig in digests.items(): if dig in deleted_digests: continue url = "https://" + reg + "/v2/" + tag.split(":")[0] + "/manifests/" + dig auth_type = requests.auth.HTTPBasicAuth if req_registries[reg] else None response = requests.Response() response.status_code = status_code (flexmock(requests.Session) .should_receive('delete') .with_args(url, verify=bool, auth=auth_type) .and_return(response)) deleted_digests.add(dig) if status_code == 520: with pytest.raises(PluginFailedException): result = runner.run() assert result[DeleteFromRegistryPlugin.key] == set([]) else: result = runner.run() if status_code == requests.codes.ACCEPTED: assert result[DeleteFromRegistryPlugin.key] == deleted_digests else: assert result[DeleteFromRegistryPlugin.key] == set([])
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise start_time = datetime.datetime.now() self.plugins_timestamps['dockerbuild'] = start_time.isoformat() build_result = self.builder.build() try: finish_time = datetime.datetime.now() duration = finish_time - start_time seconds = duration.total_seconds() logger.debug("build finished in %ds", seconds) self.plugins_durations['dockerbuild'] = seconds except Exception: logger.exception("failed to save build duration") self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). self.plugins_errors['dockerbuild'] = '' return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir()
def test_metadata_plugin(tmpdir, br_annotations, expected_br_annotations, br_labels, expected_br_labels, koji, help_results, expected_help_results, base_from_scratch, pulp_push_results, expected_pulp_push_results, pulp_pull_results, expected_pulp_pull_results, verify_media_results, expected_media_results, reactor_config_map): initial_timestamp = datetime.now() workflow = prepare(reactor_config_map=reactor_config_map) if base_from_scratch: df_content = """ FROM fedora RUN yum install -y python-django CMD blabla FROM scratch RUN yum install -y python""" workflow.builder.base_from_scratch = base_from_scratch else: df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) workflow.prebuild_results = { AddHelpPlugin.key: help_results } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", PLUGIN_PULP_PUSH_KEY: pulp_push_results, } workflow.exit_results = { PulpPullPlugin.key: pulp_pull_results, PLUGIN_VERIFY_MEDIA_KEY: verify_media_results, } workflow.fs_watcher._data = dict(fs_data=None) if br_annotations or br_labels: workflow.build_result = BuildResult( image_id=INPUT_IMAGE, annotations={'br_annotations': br_annotations} if br_annotations else None, labels={'br_labels': br_labels} if br_labels else None, ) timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat() workflow.plugins_timestamps = { PostBuildRPMqaPlugin.key: timestamp, } workflow.plugins_durations = { PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = {} if koji: cm_annotations = {'metadata_fragment_key': 'metadata.json', 'metadata_fragment': 'configmap/build-1-md'} workflow.postbuild_results[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = cm_annotations workflow.plugins_timestamps[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = timestamp workflow.plugins_durations[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = 3.03 runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key]["labels"] annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert is_string_type(annotations['dockerfile']) assert "repositories" in annotations assert is_string_type(annotations['repositories']) assert "commit_id" in annotations assert is_string_type(annotations['commit_id']) assert "base-image-id" in annotations assert is_string_type(annotations['base-image-id']) assert "base-image-name" in annotations assert is_string_type(annotations['base-image-name']) assert "parent_images" in annotations assert is_string_type(annotations['parent_images']) if base_from_scratch: assert annotations["base-image-name"] == "" assert annotations["base-image-id"] == "" assert '"scratch": "scratch"' in annotations['parent_images'] else: assert annotations["base-image-name"] == workflow.builder.original_base_image.to_str() assert annotations["base-image-id"] != "" assert workflow.builder.original_base_image.to_str() in annotations['parent_images'] assert "image-id" in annotations assert is_string_type(annotations['image-id']) assert "filesystem" in annotations assert "fs_data" in annotations['filesystem'] if koji: assert "metadata_fragment" in annotations assert is_string_type(annotations['metadata_fragment']) assert "metadata_fragment_key" in annotations assert is_string_type(annotations['metadata_fragment_key']) else: assert "metadata_fragment" not in annotations assert "metadata_fragment_key" not in annotations assert "digests" in annotations assert is_string_type(annotations['digests']) digests = json.loads(annotations['digests']) expected = [{ "registry": DOCKER0_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST_NOT_USED, "version": "v1" }, { "registry": DOCKER0_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, "version": "v2" }, { "registry": DOCKER0_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST_NOT_USED, "version": "v1" }, { "registry": DOCKER0_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, "version": "v2" }, { "registry": LOCALHOST_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST_NOT_USED, "version": "v1" }, { "registry": LOCALHOST_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, "version": "v2" }, { "registry": LOCALHOST_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST_NOT_USED, "version": "v1" }, { "registry": LOCALHOST_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, "version": "v2" }] assert all(digest in expected for digest in digests) assert all(digest in digests for digest in expected) assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["durations"] if br_annotations: assert annotations['br_annotations'] == expected_br_annotations else: assert 'br_annotations' not in annotations if br_labels: assert labels['br_labels'] == expected_br_labels else: assert 'br_labels' not in labels if expected_help_results is False: assert 'help_file' not in annotations else: assert json.loads(annotations['help_file']) == expected_help_results if expected_pulp_push_results is False: assert 'v1-image-id' not in annotations else: assert annotations['v1-image-id'] == expected_pulp_push_results if expected_pulp_pull_results or expected_pulp_push_results or expected_media_results: media_types = [] if expected_pulp_push_results: media_types = ['application/json'] if expected_pulp_pull_results: media_types += pulp_pull_results if expected_media_results: media_types += expected_media_results assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types))) else: assert 'media-types' not in annotations
def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
def test_metadata_plugin(tmpdir): initial_timestamp = datetime.now() workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } workflow.plugins_timestamps = { CpDockerfilePlugin.key: initial_timestamp.isoformat(), DistgitFetchArtefactsPlugin.key: (initial_timestamp + timedelta(seconds=1)).isoformat(), PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { CpDockerfilePlugin.key: 1.01, DistgitFetchArtefactsPlugin.key: 2.02, PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = { DistgitFetchArtefactsPlugin.key: 'foo' } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert is_string_type(annotations['dockerfile']) assert "artefacts" in annotations assert is_string_type(annotations['artefacts']) assert "logs" in annotations assert is_string_type(annotations['logs']) assert annotations['logs'] == '' assert "rpm-packages" in annotations assert is_string_type(annotations['rpm-packages']) assert annotations['rpm-packages'] == '' assert "repositories" in annotations assert is_string_type(annotations['repositories']) assert "commit_id" in annotations assert is_string_type(annotations['commit_id']) assert "base-image-id" in annotations assert is_string_type(annotations['base-image-id']) assert "base-image-name" in annotations assert is_string_type(annotations['base-image-name']) assert "image-id" in annotations assert is_string_type(annotations['image-id']) assert "digests" in annotations assert is_string_type(annotations['digests']) digests = json.loads(annotations['digests']) expected = [{ "registry": LOCALHOST_REGISTRY, "repository": TEST_IMAGE, "tag": 'latest', "digest": DIGEST1, },{ "registry": LOCALHOST_REGISTRY, "repository": "namespace/image", "tag": 'asd123', "digest": DIGEST2, }] assert digests == expected or digests == reversed(expected) assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "distgit_fetch_artefacts" in plugins_metadata["errors"] assert "cp_dockerfile" in plugins_metadata["durations"] assert "distgit_fetch_artefacts" in plugins_metadata["durations"] assert "all_rpm_packages" in plugins_metadata["durations"]
def test_metadata_plugin_rpmqa_failure(tmpdir): initial_timestamp = datetime.now() workflow = prepare() workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: RuntimeError(), } workflow.plugins_timestamps = { CpDockerfilePlugin.key: initial_timestamp.isoformat(), DistgitFetchArtefactsPlugin.key: (initial_timestamp + timedelta(seconds=1)).isoformat(), PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(), } workflow.plugins_durations = { CpDockerfilePlugin.key: 1.01, DistgitFetchArtefactsPlugin.key: 2.02, PostBuildRPMqaPlugin.key: 3.03, } workflow.plugins_errors = { PostBuildRPMqaPlugin.key: 'foo' } runner = ExitPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"] assert "dockerfile" in annotations assert "artefacts" in annotations assert "logs" in annotations assert "rpm-packages" in annotations assert "repositories" in annotations assert "commit_id" in annotations assert "base-image-id" in annotations assert "base-image-name" in annotations assert "image-id" in annotations # On rpmqa failure, rpm-packages should be empty assert len(annotations["rpm-packages"]) == 0 assert "plugins-metadata" in annotations assert "errors" in annotations["plugins-metadata"] assert "durations" in annotations["plugins-metadata"] assert "timestamps" in annotations["plugins-metadata"] plugins_metadata = json.loads(annotations["plugins-metadata"]) assert "all_rpm_packages" in plugins_metadata["errors"] assert "cp_dockerfile" in plugins_metadata["durations"] assert "distgit_fetch_artefacts" in plugins_metadata["durations"] assert "all_rpm_packages" in plugins_metadata["durations"]