def test_pulp_dedup_layers(unsupported, unlink_exc, tmpdir, existing_layers, should_raise, monkeypatch, subprocess_exceptions, reactor_config_map): tasker, workflow = prepare( check_repo_retval=0, existing_layers=existing_layers, subprocess_exceptions=subprocess_exceptions, unsupported=unsupported) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") if unlink_exc is not None: (flexmock(os).should_receive('unlink') .and_raise(unlink_exc)) runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {}}}) runner.run() assert PulpPushPlugin.key is not None top_layer, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images assert top_layer == 'foo'
def test_pulp_source_secret(tmpdir, check_repo_retval, should_raise, monkeypatch): tasker, workflow = prepare(check_repo_retval=check_repo_retval) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if should_raise: with pytest.raises(Exception): runner.run() return runner.run() assert PulpPushPlugin.key is not None _, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_rpmqa_plugin(remove_container_error, ignore_autogenerated): if MOCK: should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") flexmock(docker.Client, logs=mock_logs) runner = PostBuildPluginsRunner( tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"]}} ]) results = runner.run() assert results[PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"]
def test_pulp_source_secret(tmpdir, check_repo_retval, should_raise, monkeypatch, reactor_config_map): tasker, workflow = prepare(check_repo_retval=check_repo_retval) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) if should_raise: with pytest.raises(Exception): runner.run() return runner.run() assert PulpPushPlugin.key is not None _, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_skip_plugin(workflow, caplog): rcm = {'version': 1, 'registries': [{'url': LOCALHOST_REGISTRY, 'insecure': True, 'auth': {}}]} workflow.conf.conf = rcm runner = PostBuildPluginsRunner( workflow, [{ 'name': TagAndPushPlugin.key, 'args': {}, }] ) results = runner.run()[TagAndPushPlugin.key] assert 'not a flatpak or source build, skipping plugin' in caplog.text assert 'pushed_images' in results assert 'repositories' in results assert not results['pushed_images'] repositories = results['repositories'] assert 'primary' in repositories assert 'unique' in repositories assert 'floating' in repositories assert not repositories['primary'] assert not repositories['unique'] assert not repositories['floating']
def test_pulp_tag_basic(tmpdir, monkeypatch, v1_image_ids, should_raise, caplog): tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test' }}]) if should_raise: with pytest.raises(PluginFailedException): runner.run() return msg = None expected_results = {} for platform, v1_image_id in v1_image_ids.items(): if v1_image_id: msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': {'tag': 'latest:ppc64le_v1_image_id'}, } break results = runner.run() if msg: assert msg in caplog.text() else: assert "tagging v1-image-id" not in caplog.text() assert results['pulp_tag'] == expected_results
def test_cp_built_image_to_nfs(tmpdir, docker_tasker, dest_dir): mountpoint = tmpdir.join("mountpoint") def fake_check_call(cmd): assert cmd == [ "mount", "-t", "nfs", "-o", "nolock", NFS_SERVER_PATH, mountpoint, ] flexmock(subprocess, check_call=fake_check_call) workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, "test-image") workflow.builder = X() workflow.exported_image_sequence.append({"path": os.path.join(str(tmpdir), EXPORTED_SQUASHED_IMAGE_NAME)}) open(workflow.exported_image_sequence[-1].get("path"), 'a').close() runner = PostBuildPluginsRunner( docker_tasker, workflow, [{ 'name': CopyBuiltImageToNFSPlugin.key, 'args': { "nfs_server_path": NFS_SERVER_PATH, "dest_dir": dest_dir, "mountpoint": str(mountpoint), } }] ) runner.run() if dest_dir is None: assert os.path.isfile(os.path.join(str(mountpoint), EXPORTED_SQUASHED_IMAGE_NAME)) else: assert os.path.isfile(os.path.join(str(mountpoint), dest_dir, EXPORTED_SQUASHED_IMAGE_NAME))
def test_pulp_tag_service_account_secret(tmpdir, monkeypatch, caplog, reactor_config_map): v1_image_ids = {'x86_64': None, 'ppc64le': 'ppc64le_v1_image_id'} msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': { 'tag': 'latest:ppc64le_v1_image_id' }, } tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), } }]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) results = runner.run() assert msg in caplog.text assert results['pulp_tag'] == expected_results
def test_rpmqa_plugin_rpmdb_dir_is_empty(caplog, workflow, build_dir, base_from_scratch): platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64'] workflow.data.tag_conf.add_unique_image(f'registry.com/{TEST_IMAGE}') mock_oc_image_extract_empty_dir = functools.partial(mock_oc_image_extract, rpm_dir_empty=True) (flexmock(retries).should_receive("run_cmd").replace_with( mock_oc_image_extract_empty_dir)) workflow.build_dir.init_build_dirs(platforms, workflow.source) set_df_images(workflow, base_from_scratch=base_from_scratch) runner = PostBuildPluginsRunner(workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { "ignore_autogenerated_gpg_keys": True } }]) log_msg = f'rpmdb directory {RPMDB_PATH} is empty' if base_from_scratch: results = runner.run() assert log_msg in caplog.text for platform in platforms: assert results[PostBuildRPMqaPlugin.key][platform] is None else: with pytest.raises(PluginFailedException, match=log_msg): runner.run() assert log_msg in caplog.text
def test_fetch_docker_archive(self, tmpdir, caplog, workflow): platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64'] workflow.build_dir.init_build_dirs(platforms, workflow.source) workflow.data.tag_conf.add_unique_image('registry.com/image:latest') workflow.build_dir.for_each_platform(self.create_image) flexmock(ImageUtil).should_receive( 'download_image_archive_tarball').times(4) runner = PostBuildPluginsRunner( workflow, [{ 'name': FetchDockerArchivePlugin.key, 'args': {}, }]) results = runner.run() for platform, metadata in results[ FetchDockerArchivePlugin.key].items(): image_path = workflow.build_dir.path / platform img = os.path.join(image_path / EXPORTED_SQUASHED_IMAGE_NAME) assert os.path.exists(img) assert metadata['path'] == img assert metadata['type'] == IMAGE_TYPE_DOCKER_ARCHIVE assert f'image for platform:{platform} available at ' \ f"{image_path / 'image.tar'}" in caplog.text
def test_rpmqa_plugin_success(caplog, workflow, build_dir, base_from_scratch, ignore_autogenerated): platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64'] workflow.build_dir.init_build_dirs(platforms, workflow.source) workflow.data.tag_conf.add_unique_image(f'registry.com/{TEST_IMAGE}') set_df_images(workflow, base_from_scratch=base_from_scratch) (flexmock(retries).should_receive("run_cmd").replace_with( mock_oc_image_extract)) (flexmock(_RandomNameSequence).should_receive("__next__").times( 4).and_return('abcdef12')) (flexmock(subprocess).should_receive("check_output").times(4).and_return( "\n".join(PACKAGE_LIST_WITH_AUTOGENERATED))) flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner(workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() for platform in platforms: assert results[PostBuildRPMqaPlugin.key][platform] == parse_rpm_output( ignore_autogenerated["package_list"])
def test_rpmqa_plugin_rpm_query_failed(caplog, workflow, build_dir, base_from_scratch): platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64'] workflow.data.tag_conf.add_unique_image(f'registry.com/{TEST_IMAGE}') workflow.build_dir.init_build_dirs(platforms, workflow.source) set_df_images(workflow, base_from_scratch=base_from_scratch) (flexmock(retries).should_receive("run_cmd").replace_with( mock_oc_image_extract)) runner = PostBuildPluginsRunner(workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { "ignore_autogenerated_gpg_keys": True } }]) log_msg_getting = 'getting rpms from rpmdb:' (flexmock(subprocess).should_receive("check_output").once().and_raise( Exception, 'rpm query failed')) with pytest.raises(Exception, match='rpm query failed'): runner.run() assert log_msg_getting in caplog.text log_msg = 'Failed to get rpms from rpmdb:' assert log_msg in caplog.text
def test_pulp_dedup_layers(tmpdir, existing_layers, should_raise, monkeypatch, subprocess_exceptions): tasker, workflow = prepare(check_repo_retval=0, existing_layers=existing_layers, subprocess_exceptions=subprocess_exceptions) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' } }]) runner.run() assert PulpPushPlugin.key is not None images = [ i.to_str() for i in workflow.postbuild_results[PulpPushPlugin.key] ] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_pulp_service_account_secret(tmpdir, monkeypatch, reactor_config_map): tasker, workflow = prepare() monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) runner.run() _, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_compress(self, tmpdir, method, load_exported_image, extension): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({'provider': 'git', 'uri': 'asd'}, 'test-image') workflow.builder = X() exp_img = os.path.join(str(tmpdir), 'img.tar') if load_exported_image: tarfile.open(exp_img, mode='w').close() workflow.exported_image_sequence.append({'path': exp_img}) runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': CompressPlugin.key, 'args': { 'method': method, 'load_exported_image': load_exported_image, }, }] ) runner.run() compressed_img = os.path.join( workflow.source.tmpdir, EXPORTED_COMPRESSED_IMAGE_NAME_TEMPLATE.format(extension)) assert os.path.exists(compressed_img) assert workflow.exported_image_sequence[-1]['path'] == compressed_img
def test_pulp(tmpdir, check_repo_retval, should_raise): tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'source', X()) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) setattr(workflow, 'tag_conf', X()) setattr(workflow.tag_conf, 'images', [ImageName(repo="image-name1"), ImageName(namespace="prefix", repo="image-name2"), ImageName(repo="image-name3", tag="asd")]) # Mock dockpulp and docker dockpulp.Pulp = flexmock(dockpulp.Pulp) dockpulp.Pulp.registry='registry.example.com' (flexmock(dockpulp.imgutils).should_receive('get_metadata') .with_args(object) .and_return([{'id': 'foo'}])) (flexmock(dockpulp.imgutils).should_receive('get_versions') .with_args(object) .and_return({'id': '1.6.0'})) (flexmock(dockpulp.imgutils).should_receive('check_repo') .and_return(check_repo_retval)) (flexmock(dockpulp.Pulp) .should_receive('set_certs') .with_args(object, object)) (flexmock(dockpulp.Pulp) .should_receive('push_tar_to_pulp') .with_args(object, object) .and_return([1, 2, 3])) (flexmock(dockpulp.Pulp) .should_receive('watch_tasks') .with_args(list)) mock_docker() os.environ['SOURCE_SECRET_PATH'] = str(tmpdir) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if should_raise: with pytest.raises(Exception) as exc: runner.run() return runner.run() assert PulpPushPlugin.key is not None images = [i.to_str() for i in workflow.postbuild_results[PulpPushPlugin.key]] assert "registry.example.com/image-name1" in images assert "registry.example.com/prefix/image-name2" in images assert "registry.example.com/image-name3:asd" in images
def test_pulp_dedup_layers( tmpdir, existing_layers, should_raise, monkeypatch, subprocess_exceptions): tasker, workflow = prepare( check_repo_retval=0, existing_layers=existing_layers, subprocess_exceptions=subprocess_exceptions) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) runner.run() assert PulpPushPlugin.key is not None top_layer, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images assert top_layer == 'foo'
def test_pulp_tag_service_account_secret(tmpdir, monkeypatch, caplog, reactor_config_map): v1_image_ids = {'x86_64': None, 'ppc64le': 'ppc64le_v1_image_id'} msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': {'tag': 'latest:ppc64le_v1_image_id'}, } tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) results = runner.run() assert msg in caplog.text assert results['pulp_tag'] == expected_results
def test_pulp_tag_service_account_secret(tmpdir, monkeypatch, caplog): v1_image_ids = {'x86_64': None, 'ppc64le': 'ppc64le_v1_image_id'} msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': {'tag': 'latest:ppc64le_v1_image_id'}, } tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), }}]) results = runner.run() assert msg in caplog.text() assert results['pulp_tag'] == expected_results
def test_rpmqa_plugin(remove_container_error, ignore_autogenerated): if MOCK: should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") flexmock(docker.Client, logs=mock_logs) runner = PostBuildPluginsRunner( tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() assert results[ PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"]
def test_rpmqa_plugin_skip(docker_tasker): # noqa """ Test skipping the plugin if workflow.image_components is already set """ workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") image_components = { 'type': 'rpm', 'name': 'something' } setattr(workflow, 'image_components', image_components) mock_docker() flexmock(docker.APIClient, logs=mock_logs_raise) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": {'image_id': TEST_IMAGE}}]) results = runner.run() assert results[PostBuildRPMqaPlugin.key] is None assert workflow.image_components == image_components
def test_dangling_volumes_removed(docker_tasker, request): # noqa:F811 fake_logger = FakeLogger() existing_logger = atomic_reactor.core.logger def restore_logger(): atomic_reactor.core.logger = existing_logger request.addfinalizer(restore_logger) atomic_reactor.core.logger = fake_logger mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) runner.run() assert ("container_id = '%s'", u'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d') \ in fake_logger.debugs expected_volumes = [u'test', u'conflict_exception', u'real_exception'] assert ("volumes = %s", expected_volumes) in fake_logger.debugs assert ("removing volume '%s'", u'test') in fake_logger.infos assert ("removing volume '%s'", u'conflict_exception') in fake_logger.infos assert ("removing volume '%s'", u'real_exception') in fake_logger.infos assert ('ignoring a conflict when removing volume %s', 'conflict_exception') in \ fake_logger.debugs
def test_rpmqa_plugin(caplog, docker_tasker, base_from_scratch, remove_container_error, ignore_autogenerated): should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) workflow = DockerBuildWorkflow(TEST_IMAGE, source=SOURCE) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_base_from_scratch(base_from_scratch) flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() if base_from_scratch: log_msg = "from scratch can't run rpmqa" assert log_msg in caplog.text assert results[PostBuildRPMqaPlugin.key] is None assert workflow.image_components is None else: assert results[ PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"] assert workflow.image_components == parse_rpm_output( ignore_autogenerated["package_list"])
def test_dangling_volumes_removed(docker_tasker, caplog): mock_docker() workflow = DockerBuildWorkflow(TEST_IMAGE, source=SOURCE) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_base_from_scratch(False) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) runner.run() logs = {} for record in caplog.records: logs.setdefault(record.levelno, []).append(record.message) assert "container_id = 'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d'" \ in logs[logging.DEBUG] expected_volumes = [u'test', u'conflict_exception', u'real_exception'] assert "volumes = {}".format(expected_volumes) in logs[logging.DEBUG] for volume in expected_volumes: assert "removing volume '{}'".format(volume) in logs[logging.INFO] assert 'ignoring a conflict when removing volume conflict_exception' in logs[ logging.DEBUG]
def test_pulp_tag_service_account_secret(tmpdir, monkeypatch, caplog): v1_image_ids = {'x86_64': None, 'ppc64le': 'ppc64le_v1_image_id'} msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': { 'tag': 'latest:ppc64le_v1_image_id' }, } tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), } }]) results = runner.run() assert msg in caplog.text() assert results['pulp_tag'] == expected_results
def test_compress(self, tmpdir, method, load_exported_image, extension): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({ 'provider': 'git', 'uri': 'asd' }, 'test-image') workflow.builder = X() exp_img = os.path.join(str(tmpdir), 'img.tar') if load_exported_image: tarfile.open(exp_img, mode='w').close() workflow.exported_image_sequence.append({'path': exp_img}) runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': CompressPlugin.key, 'args': { 'method': method, 'load_exported_image': load_exported_image, }, }]) runner.run() compressed_img = os.path.join( workflow.source.tmpdir, EXPORTED_COMPRESSED_IMAGE_NAME_TEMPLATE.format(extension)) assert os.path.exists(compressed_img) assert workflow.exported_image_sequence[-1]['path'] == compressed_img
def test_compare_components_plugin(tmpdir, caplog, base_from_scratch, mismatch, exception, fail): workflow = mock_workflow(tmpdir) worker_metadatas = mock_metadatas() # example data has 2 log items before component item hence output[2] component = worker_metadatas['ppc64le']['output'][2]['components'][0] if mismatch: component['version'] = 'bacon' if exception: workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig( {'version': 1, 'package_comparison_exceptions': [component['name']]} ) } workflow.postbuild_results[PLUGIN_FETCH_WORKER_METADATA_KEY] = worker_metadatas workflow.builder.base_from_scratch = base_from_scratch runner = PostBuildPluginsRunner( None, workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }] ) if fail and not base_from_scratch: with pytest.raises(PluginFailedException): runner.run() else: runner.run() if base_from_scratch: log_msg = "Skipping comparing components: unsupported for FROM-scratch images" assert log_msg in caplog.text
def test_tag_and_push_plugin(tmpdir, image_name, logs, should_raise, use_secret): if MOCK: mock_docker() flexmock(docker.Client, push=lambda iid, **kwargs: iter(logs), login=lambda username, registry, dockercfg_path: {'Status': 'Login Succeeded'}) tasker = DockerTasker() workflow = DockerBuildWorkflow({ "provider": "git", "uri": "asd" }, TEST_IMAGE) workflow.tag_conf.add_primary_image(image_name) setattr(workflow, 'builder', X) secret_path = None if use_secret: temp_dir = mkdtemp() with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { LOCALHOST_REGISTRY: { "username": "******", "email": "*****@*****.**", "password": "******" } } dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() secret_path = temp_dir runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': TagAndPushPlugin.key, 'args': { 'registries': { LOCALHOST_REGISTRY: { 'insecure': True, 'secret': secret_path } } }, }]) if should_raise: with pytest.raises(Exception): runner.run() else: output = runner.run() image = output[TagAndPushPlugin.key][0] tasker.remove_image(image) assert len(workflow.push_conf.docker_registries) > 0 if MOCK: # we only test this when mocking docker because we don't expect # running actual docker against v2 registry assert workflow.push_conf.docker_registries[0].digests[ image_name] == DIGEST1
def test_tag_by_labels_plugin(tmpdir, args): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({ "provider": "git", "uri": "asd" }, "test-image") version = "1.0" release = "1" workflow.built_image_inspect = { INSPECT_CONFIG: { "Labels": { "name": TEST_IMAGE, "version": version, "release": release } } } workflow.push_conf.add_docker_registry(LOCALHOST_REGISTRY, insecure=True) image = ImageName(repo=TEST_IMAGE, tag="%s_%s" % (version, release), registry=LOCALHOST_REGISTRY) setattr(workflow, 'builder', X) runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': TagByLabelsPlugin.key, 'args': args, }]) output = runner.run() assert TagByLabelsPlugin.key in output.keys() images = [i.to_str() for i in workflow.tag_conf.images] primary_images = [i.to_str() for i in workflow.tag_conf.primary_images] unique_images = [i.to_str() for i in workflow.tag_conf.unique_images] if args.get('unique_tag_only'): assert len(workflow.tag_conf.images) == 1 assert len(primary_images) == 0 else: assert len(workflow.tag_conf.images) == 4 assert len(primary_images) == 3 assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in images assert ("%s:%s" % (TEST_IMAGE, version)) in images assert ("%s:latest" % (TEST_IMAGE, )) in images assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in primary_images assert ("%s:%s" % (TEST_IMAGE, version)) in primary_images assert ("%s:latest" % (TEST_IMAGE, )) in primary_images assert len(unique_images) == 1 assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in images assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in unique_images tasker.remove_image(image)
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
def test_tag_by_labels_plugin(tmpdir, args): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, "test-image") version = "1.0" release = "1" workflow.built_image_inspect = { INSPECT_CONFIG: { "Labels": { "name": TEST_IMAGE, "version": version, "release": release } } } workflow.push_conf.add_docker_registry(LOCALHOST_REGISTRY, insecure=True) image = ImageName(repo=TEST_IMAGE, tag="%s_%s" % (version, release), registry=LOCALHOST_REGISTRY) setattr(workflow, 'builder', X) runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': TagByLabelsPlugin.key, 'args': args, }] ) output = runner.run() assert TagByLabelsPlugin.key in output.keys() images = [i.to_str() for i in workflow.tag_conf.images] primary_images = [i.to_str() for i in workflow.tag_conf.primary_images] unique_images = [i.to_str() for i in workflow.tag_conf.unique_images] if args.get('unique_tag_only'): assert len(workflow.tag_conf.images) == 1 assert len(primary_images) == 0 else: assert len(workflow.tag_conf.images) == 4 assert len(primary_images) == 3 assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in images assert ("%s:%s" % (TEST_IMAGE, version)) in images assert ("%s:latest" % (TEST_IMAGE, )) in images assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in primary_images assert ("%s:%s" % (TEST_IMAGE, version)) in primary_images assert ("%s:latest" % (TEST_IMAGE, )) in primary_images assert len(unique_images) == 1 assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in images assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in unique_images tasker.remove_image(image)
def test_skip_plugin(workflow, caplog): setup_flatpak_composes(workflow) runner = PostBuildPluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) runner.run() assert 'not flatpak build, skipping plugin' in caplog.text
def test_mismatch_reporting(tmpdir, caplog, mismatch): """Test if expected log entries are reported when components mismatch""" workflow = mock_workflow(tmpdir) worker_metadatas = mock_metadatas() component_name = "openssl" component_ppc64le = worker_metadatas['ppc64le']['output'][2]['components'][4] assert component_ppc64le['name'] == component_name, "Error in test data" # add extra fake worker for s390x to having 3 different platforms # we care about only one component worker_metadatas['s390x'] = copy.deepcopy(worker_metadatas['ppc64le']) component_s390x = copy.deepcopy(component_ppc64le) component_s390x['arch'] = 's390x' worker_metadatas['s390x']['output'][2]['components'] = [component_s390x] if mismatch: component_ppc64le['version'] = 'bacon' component_s390x['version'] = 'sandwich' workflow.postbuild_results[PLUGIN_FETCH_WORKER_METADATA_KEY] = worker_metadatas runner = PostBuildPluginsRunner( None, workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }] ) log_entries = ( 'Comparison mismatch for component openssl:', 'ppc64le: openssl-bacon-8.el7 (199e2f91fd431d51)', 'x86_64: openssl-1.0.2k-8.el7 (199e2f91fd431d51)', 's390x: openssl-sandwich-8.el7 (199e2f91fd431d51)', ) if mismatch: # mismatch detected, failure and log entries are expected with pytest.raises(PluginFailedException): try: runner.run() except PluginFailedException as e: assert 'Failed component comparison for components: openssl' in str(e) raise for entry in log_entries: # component mismatch must be reported only once assert caplog.text.count(entry) == 1 else: # no mismatch, no failure, no log entries runner.run() for entry in log_entries: assert entry not in caplog.text
def test_skip_plugin(self, caplog, workflow): workflow.user_params['scratch'] = True runner = PostBuildPluginsRunner( workflow, [{ 'name': FetchDockerArchivePlugin.key, 'args': {}, }]) runner.run() assert 'scratch build, skipping plugin' in caplog.text
def test_rpmqa_plugin_exception(workflow): platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64'] workflow.build_dir.init_build_dirs(platforms, workflow.source) set_df_images(workflow) runner = PostBuildPluginsRunner(workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": {} }]) with pytest.raises(PluginFailedException): runner.run()
def test_tag_parse(tmpdir, docker_tasker, floating_tags, unique_tags, primary_tags, expected): df = df_parser(str(tmpdir)) df.content = DF_CONTENT_LABELS workflow = mock_workflow(tmpdir) setattr(workflow.builder, 'df_path', df.dockerfile_path) workflow.build_result = BuildResult.make_remote_image_result() base_inspect = { INSPECT_CONFIG: { 'Labels': { 'parentrelease': '7.4.1' }, 'Env': { 'parentrelease': '7.4.1' }, } } setattr(workflow.builder, 'base_image_inspect', base_inspect) mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags']) if unique_tags is not None and primary_tags is not None and floating_tags is not None: input_tags = { 'unique': unique_tags, 'primary': primary_tags, 'floating': floating_tags, } else: input_tags = None runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ 'name': TagFromConfigPlugin.key, 'args': { 'tag_suffixes': input_tags } }]) if expected is not None: results = runner.run() plugin_result = results[TagFromConfigPlugin.key] # Plugin should return the tags we expect assert plugin_result == expected # Workflow should have the expected tags configured for tag in expected: assert any(tag == str(image) for image in workflow.tag_conf.images) # Workflow should not have any other tags configured assert len(workflow.tag_conf.images) == len(expected) else: with pytest.raises(PluginFailedException): runner.run()
def test_skip_plugin(workflow, caplog): mock_workflow(workflow) workflow.user_params['scratch'] = True runner = PostBuildPluginsRunner(workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }]) runner.run() assert 'scratch build, skipping plugin' in caplog.text
def test_fail_if_no_platform_is_set(workflow): runner = PostBuildPluginsRunner( workflow, [{ 'name': GatherBuildsMetadataPlugin.key, 'args': { "koji_upload_dir": "path/to/upload", } }], ) with pytest.raises(PluginFailedException, match="No enabled platforms"): runner.run()
def test_tag_and_push_plugin(tmpdir, image_name, logs, should_raise, use_secret): if MOCK: mock_docker() flexmock(docker.Client, push=lambda iid, **kwargs: iter(logs), login=lambda username, registry, dockercfg_path: {'Status': 'Login Succeeded'}) tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) workflow.tag_conf.add_primary_image(image_name) setattr(workflow, 'builder', X) secret_path = None if use_secret: temp_dir = mkdtemp() with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig: dockerconfig_contents = { LOCALHOST_REGISTRY: { "username": "******", "email": "*****@*****.**", "password": "******"}} dockerconfig.write(json.dumps(dockerconfig_contents)) dockerconfig.flush() secret_path = temp_dir runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': TagAndPushPlugin.key, 'args': { 'registries': { LOCALHOST_REGISTRY: { 'insecure': True, 'secret': secret_path } } }, }] ) if should_raise: with pytest.raises(Exception): runner.run() else: output = runner.run() image = output[TagAndPushPlugin.key][0] tasker.remove_image(image) assert len(workflow.push_conf.docker_registries) > 0 if MOCK: # we only test this when mocking docker because we don't expect # running actual docker against v2 registry assert workflow.push_conf.docker_registries[0].digests[image_name] == DIGEST1
def test_tags_enclosed(tmpdir, docker_tasker, name, organization, expected): df = df_parser(str(tmpdir)) df.content = dedent("""\ FROM fedora LABEL "name"="{}" LABEL "version"="1.7" LABEL "release"="99" """.format(name)) workflow = mock_workflow(tmpdir) setattr(workflow.builder, 'df_path', df.dockerfile_path) workflow.build_result = BuildResult.make_remote_image_result() if organization: reactor_config = ReactorConfig({ 'version': 1, 'registries_organization': organization }) workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: reactor_config } input_tags = { 'unique': ['foo', 'bar'], 'primary': ['{version}', '{version}-{release}'], } runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ 'name': TagFromConfigPlugin.key, 'args': { 'tag_suffixes': input_tags } }]) results = runner.run() plugin_result = results[TagFromConfigPlugin.key] expected_tags = [ '{}:{}'.format(expected, tag) for tag in ['foo', 'bar', '1.7', '1.7-99'] ] # Plugin should return the tags we expect assert plugin_result == expected_tags # Workflow should have the expected tags configured for tag in expected_tags: assert any(tag == str(image) for image in workflow.tag_conf.images) # Workflow should not have any other tags configured assert len(workflow.tag_conf.images) == len(expected_tags)
def test_flatpak_create_oci_no_source(workflow): workflow.user_params['flatpak'] = True setup_flatpak_composes(workflow) (flexmock(FlatpakUtil).should_receive( 'get_flatpak_source_info').and_return(None)) runner = PostBuildPluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) msg = "flatpak_create_dockerfile must be run before flatpak_create_oci" with pytest.raises(PluginFailedException, match=msg): runner.run()
def test_rpmqa_plugin_exception(docker_tasker): # noqa mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs_raise) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) with pytest.raises(PluginFailedException): runner.run()
def test_rpmqa_plugin_exception(docker_tasker): # noqa workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") flexmock(docker.Client, logs=mock_logs_raise) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": {'image_id': TEST_IMAGE}}]) with pytest.raises(PluginFailedException): runner.run()
def test_rpmqa_plugin_exception(docker_tasker): workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") flexmock(docker.Client, logs=mock_logs_raise) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": {'image_id': TEST_IMAGE}}]) with pytest.raises(PluginFailedException): results = runner.run()
def prepare(insecure_registry=None): """ Boiler-plate test set-up """ if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', 'asd123') setattr(workflow.builder, 'source', X()) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) fake_conf = osbs.conf.Configuration(conf_file=None, openshift_uri='/') flexmock(osbs.conf).should_receive('Configuration').and_return(fake_conf) runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': ImportImagePlugin.key, 'args': { 'imagestream': TEST_IMAGESTREAM, 'docker_image_repo': TEST_REPO, 'url': '', 'build_json_dir': "", 'verify_ssl': False, 'use_auth': False, 'insecure_registry': insecure_registry, } }]) return runner
def test_rpmqa_plugin(): tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") runner = PostBuildPluginsRunner(tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": {'image_id': TEST_IMAGE}}]) results = runner.run() assert results is not None assert results[PostBuildRPMqaPlugin.key] is not None assert len(results[PostBuildRPMqaPlugin.key]) > 0
def test_bad_inspect_data(tmpdir, docker_tasker, inspect, error): workflow = mock_workflow(tmpdir) if inspect is not None: workflow.built_image_inspect = {INSPECT_CONFIG: inspect} mock_additional_tags_file(str(tmpdir), ['spam', 'bacon']) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ 'name': TagFromConfigPlugin.key }]) with pytest.raises(PluginFailedException) as exc: runner.run() assert error in str(exc)
def test_metadata_plugin(tmpdir): def set_annotations_on_build(build_id, labels): pass new_environ = deepcopy(os.environ) new_environ["BUILD"] = '{"metadata": {"name": "asd"}}' flexmock(OSBS, set_annotations_on_build=set_annotations_on_build) flexmock(os) os.should_receive("environ").and_return(new_environ) workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, "test-image") workflow.push_conf.add_pulp_registry("test", LOCALHOST_REGISTRY) workflow.tag_conf.add_primary_image(TEST_IMAGE) workflow.tag_conf.add_unique_image("namespace/image:asd123") setattr(workflow, 'builder', X) workflow.build_logs = ["a", "b"] workflow.source.lg = LazyGit(None, commit="commit") flexmock(workflow.source.lg) workflow.source.lg.should_receive("_commit_id").and_return("commit") workflow.prebuild_results = { CpDockerfilePlugin.key: "dockerfile-content", DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2", } workflow.postbuild_results = { PostBuildRPMqaPlugin.key: "rpm1\nrpm2", } runner = PostBuildPluginsRunner( None, workflow, [{ 'name': StoreMetadataInOSv3Plugin.key, "args": { "url": "http://example.com/" } }] ) output = runner.run() assert StoreMetadataInOSv3Plugin.key in output labels = output[StoreMetadataInOSv3Plugin.key] assert "dockerfile" in labels assert "artefacts" in labels assert "logs" in labels assert "rpm-packages" in labels assert "repositories" in labels assert "commit_id" in labels
def test_tag_parse(tmpdir, docker_tasker, floating_tags, unique_tags, primary_tags, expected): df = df_parser(str(tmpdir)) df.content = DF_CONTENT_LABELS workflow = mock_workflow(tmpdir) setattr(workflow.builder, 'df_path', df.dockerfile_path) workflow.build_result = BuildResult.make_remote_image_result() base_inspect = { INSPECT_CONFIG: { 'Labels': {'parentrelease': '7.4.1'}, 'Env': {'parentrelease': '7.4.1'}, } } setattr(workflow.builder, 'base_image_inspect', base_inspect) mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags']) if unique_tags is not None and primary_tags is not None and floating_tags is not None: input_tags = { 'unique': unique_tags, 'primary': primary_tags, 'floating': floating_tags, } else: input_tags = None runner = PostBuildPluginsRunner( docker_tasker, workflow, [{'name': TagFromConfigPlugin.key, 'args': {'tag_suffixes': input_tags}}] ) if expected is not None: results = runner.run() plugin_result = results[TagFromConfigPlugin.key] # Plugin should return the tags we expect assert plugin_result == expected # Workflow should have the expected tags configured for tag in expected: assert any(tag == str(image) for image in workflow.tag_conf.images) # Workflow should not have any other tags configured assert len(workflow.tag_conf.images) == len(expected) else: with pytest.raises(PluginFailedException): runner.run()
def test_compress(self, tmpdir, caplog, method, load_exported_image, give_export, extension): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({'provider': 'git', 'uri': 'asd'}, 'test-image') workflow.builder = X() exp_img = os.path.join(str(tmpdir), 'img.tar') if load_exported_image and give_export: tarfile.open(exp_img, mode='w').close() workflow.exported_image_sequence.append({'path': exp_img, 'type': IMAGE_TYPE_DOCKER_ARCHIVE}) tasker = None # image provided, should not query docker runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': CompressPlugin.key, 'args': { 'method': method, 'load_exported_image': load_exported_image, }, }] ) if not extension: with pytest.raises(Exception) as excinfo: runner.run() assert 'Unsupported compression format' in str(excinfo.value) return runner.run() compressed_img = os.path.join( workflow.source.tmpdir, EXPORTED_COMPRESSED_IMAGE_NAME_TEMPLATE.format(extension)) assert os.path.exists(compressed_img) metadata = workflow.exported_image_sequence[-1] assert metadata['path'] == compressed_img assert metadata['type'] == IMAGE_TYPE_DOCKER_ARCHIVE assert 'uncompressed_size' in metadata assert isinstance(metadata['uncompressed_size'], integer_types) assert ", ratio: " in caplog.text
def test_tags_enclosed(tmpdir, docker_tasker, name, organization, expected): df = df_parser(str(tmpdir)) df.content = dedent("""\ FROM fedora LABEL "name"="{}" LABEL "version"="1.7" LABEL "release"="99" """.format(name)) workflow = mock_workflow(tmpdir) setattr(workflow.builder, 'df_path', df.dockerfile_path) workflow.build_result = BuildResult.make_remote_image_result() if organization: reactor_config = ReactorConfig({ 'version': 1, 'registries_organization': organization }) workflow.plugin_workspace[ReactorConfigPlugin.key] = {WORKSPACE_CONF_KEY: reactor_config} input_tags = { 'unique': ['foo', 'bar'], 'primary': ['{version}', '{version}-{release}'], } runner = PostBuildPluginsRunner( docker_tasker, workflow, [{'name': TagFromConfigPlugin.key, 'args': {'tag_suffixes': input_tags}}] ) results = runner.run() plugin_result = results[TagFromConfigPlugin.key] expected_tags = ['{}:{}'.format(expected, tag) for tag in ['foo', 'bar', '1.7', '1.7-99']] # Plugin should return the tags we expect assert plugin_result == expected_tags # Workflow should have the expected tags configured for tag in expected_tags: assert any(tag == str(image) for image in workflow.tag_conf.images) # Workflow should not have any other tags configured assert len(workflow.tag_conf.images) == len(expected_tags)
def test_bad_inspect_data(tmpdir, docker_tasker, inspect, error): workflow = mock_workflow(tmpdir) if inspect is not None: workflow.built_image_inspect = { INSPECT_CONFIG: inspect } mock_additional_tags_file(str(tmpdir), ['spam', 'bacon']) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{'name': TagFromConfigPlugin.key}] ) with pytest.raises(PluginFailedException) as exc: runner.run() assert error in str(exc)
def test_pulp_service_account_secret(tmpdir): tasker, workflow = prepare() os.environ['SOURCE_SECRET_PATH'] = str(tmpdir) + "/not-used" with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), }}]) runner.run() images = [i.to_str() for i in workflow.postbuild_results[PulpPushPlugin.key]] assert "registry.example.com/image-name1" in images assert "registry.example.com/prefix/image-name2" in images assert "registry.example.com/image-name3:asd" in images
def test_tag_and_push_plugin(tmpdir): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) workflow.tag_conf.add_primary_image(TEST_IMAGE) workflow.push_conf.add_docker_registry(LOCALHOST_REGISTRY, insecure=True) setattr(workflow, 'builder', X) runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': TagAndPushPlugin.key, }] ) output = runner.run() image = output[TagAndPushPlugin.key][0] tasker.remove_image(image)
def test_bad_component_type(tmpdir): workflow = mock_workflow(tmpdir) worker_metadatas = mock_metadatas() # example data has 2 log items before component item hence output[2] worker_metadatas['x86_64']['output'][2]['components'][0]['type'] = "foo" workflow.postbuild_results[PLUGIN_FETCH_WORKER_METADATA_KEY] = worker_metadatas runner = PostBuildPluginsRunner( None, workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }] ) with pytest.raises(PluginFailedException): runner.run()
def test_tag_from_config_plugin_generated(tmpdir, docker_tasker, tags, name, expected): workflow = mock_workflow(tmpdir) workflow.built_image_inspect = { INSPECT_CONFIG: {'Labels': {'Name': name}} } # Simulate missing additional-tags file. if tags is not None: mock_additional_tags_file(str(tmpdir), tags) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{'name': TagFromConfigPlugin.key}] ) results = runner.run() plugin_result = results[TagFromConfigPlugin.key] assert plugin_result == expected
def test_remove_built_image_plugin(self, remove_base, deferred, expected): tasker, workflow = mock_environment() runner = PostBuildPluginsRunner( tasker, workflow, [{ 'name': GarbageCollectionPlugin.key, 'args': {'remove_pulled_base_image': remove_base}, }] ) removed_images = [] def spy_remove_image(image_id, force=None): removed_images.append(image_id) flexmock(tasker, remove_image=spy_remove_image) for image in deferred: defer_removal(workflow, image) output = runner.run() image_set = set(removed_images) assert len(image_set) == len(removed_images) assert image_set == expected