def test_add_labels_plugin(tmpdir, docker_tasker, df_content, labels_conf_base, labels_conf, dont_overwrite, aliases, expected_output, caplog): df = DockerfileParser(str(tmpdir)) df.content = df_content if MOCK: mock_docker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X) flexmock(workflow, base_image_inspect=labels_conf_base) setattr(workflow.builder, 'df_path', df.dockerfile_path) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': { 'labels': labels_conf, 'dont_overwrite': dont_overwrite, 'auto_labels': [], 'aliases': aliases, } }] ) runner.run() if isinstance(expected_output, RuntimeError): assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" in caplog.text() else: assert AddLabelsPlugin.key is not None assert df.content in expected_output
def test_add_labels_plugin_generated(tmpdir, docker_tasker, auto_label, value_re_part): df = df_parser(str(tmpdir)) df.content = DF_CONTENT if MOCK: mock_docker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X) flexmock(workflow, source=MockSource()) flexmock(workflow, base_image_inspect=LABELS_CONF_BASE) setattr(workflow.builder, 'df_path', df.dockerfile_path) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': {'labels': {}, "dont_overwrite": [], "auto_labels": [auto_label], 'aliases': {'Build_Host': 'com.redhat.build-host'}} }] ) runner.run() if value_re_part: assert re.match(value_re_part, df.labels[auto_label]) if auto_label == "build-date": utc_dt = datetime.datetime.utcfromtimestamp(atomic_reactor_start_time).isoformat() assert df.labels[auto_label] == utc_dt
def test_adddockerfile_todest(tmpdir, docker_tasker, workflow): # noqa df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content prepare(workflow, df.dockerfile_path) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddDockerfilePlugin.key, 'args': { 'nvr': 'jboss-eap-6-docker-6.4-77', 'destdir': '/usr/share/doc/' } }]) runner.run() assert AddDockerfilePlugin.key is not None expected_output = """ FROM fedora RUN yum install -y python-django ADD Dockerfile-jboss-eap-6-docker-6.4-77 /usr/share/doc/Dockerfile-jboss-eap-6-docker-6.4-77 CMD blabla""" assert df.content == expected_output
def test_fetch_maven_artifacts_nvr_bad_nvr(tmpdir, docker_tasker, reactor_config_map): """Err when given nvr is not a valid build in Koji.""" workflow = mock_workflow(tmpdir) mock_koji_session() contents = dedent("""\ - nvr: where-is-this-build-3.0-2 """) mock_fetch_artifacts_by_nvr(str(tmpdir), contents=contents) mock_nvr_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT } }]) with pytest.raises(PluginFailedException) as e: runner.run() assert 'Build where-is-this-build-3.0-2 not found' in str(e)
def test_assertlabels_plugin(tmpdir, df_content, req_labels, expected): df = DockerfileParser(str(tmpdir)) df.content = df_content tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': AssertLabelsPlugin.key, 'args': {'required_labels': req_labels} }] ) assert AssertLabelsPlugin.key is not None if isinstance(expected, PluginFailedException): with pytest.raises(PluginFailedException): runner.run() else: runner.run()
def test_pull_raises_retry_error(workflow, caplog): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker(retry_times=1) workflow.builder = MockBuilder() image_name = ImageName.parse(IMAGE_RAISE_RETRYGENERATOREXCEPTION) base_image_str = "{}/{}:{}".format(SOURCE_REGISTRY, image_name.repo, 'some') source_registry = image_name.registry workflow.builder.dockerfile_images = DockerfileImages([base_image_str]) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'source_registry': {'url': source_registry, 'insecure': True}}) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': {}, }], ) with pytest.raises(Exception): runner.run() exp_img = ImageName.parse(base_image_str) exp_img.registry = source_registry assert 'failed to pull image: {}'.format(exp_img.to_str()) in caplog.text
def test_distgit_fetch_artefacts_plugin(tmpdir, workflow): # noqa command = 'fedpkg sources' expected_command = ['fedpkg', 'sources'] workflow.source = StubSource() workflow.source.path = str(tmpdir) initial_dir = os.getcwd() assert initial_dir != str(tmpdir) def assert_tmpdir(*args, **kwargs): assert os.getcwd() == str(tmpdir) (flexmock( pre_pyrpkg_fetch_artefacts.subprocess).should_receive('check_call'). with_args(expected_command).replace_with(assert_tmpdir).once()) workflow.conf.conf['sources_command'] = command runner = PreBuildPluginsRunner(workflow, [{ 'name': DistgitFetchArtefactsPlugin.key, }]) runner.run() assert os.getcwd() == initial_dir
def test_distgit_fetch_artefacts_plugin(tmpdir, docker_tasker): # noqa command = 'fedpkg sources' expected_command = ['fedpkg', 'sources'] workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X() workflow.source = flexmock(path=str(tmpdir)) initial_dir = os.getcwd() assert initial_dir != str(tmpdir) def assert_tmpdir(*args, **kwargs): assert os.getcwd() == str(tmpdir) (flexmock(pre_pyrpkg_fetch_artefacts.subprocess) .should_receive('check_call') .with_args(expected_command) .replace_with(assert_tmpdir) .once()) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': DistgitFetchArtefactsPlugin.key, 'args': {'command': command} }] ) runner.run() assert os.getcwd() == initial_dir
def test_distgit_fetch_artefacts_failure(tmpdir, docker_tasker): # noqa command = 'fedpkg sources' expected_command = ['fedpkg', 'sources'] workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X() workflow.source = flexmock(path=str(tmpdir)) initial_dir = os.getcwd() assert initial_dir != str(tmpdir) (flexmock(pre_pyrpkg_fetch_artefacts.subprocess) .should_receive('check_call') .with_args(expected_command) .and_raise(RuntimeError) .once()) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': DistgitFetchArtefactsPlugin.key, 'args': {'command': command} }] ) with pytest.raises(PluginFailedException): runner.run() assert os.getcwd() == initial_dir
def run_plugin_with_args(self, workflow, plugin_args=None, expect_result=None, expect_error=None): plugin_args = plugin_args or {} plugin_args.setdefault('odcs_url', ODCS_URL) plugin_args.setdefault('koji_target', KOJI_TARGET_NAME) plugin_args.setdefault('koji_hub', KOJI_HUB) runner = PreBuildPluginsRunner( workflow.builder.tasker, workflow, [ {'name': ReactorConfigPlugin.key, 'args': {'config_path': str(workflow._tmpdir)}}, {'name': ResolveComposesPlugin.key, 'args': plugin_args}, ] ) if expect_error: with pytest.raises(PluginFailedException) as exc_info: runner.run() assert expect_error in str(exc_info.value) return results = runner.run()[ResolveComposesPlugin.key] if results: assert len(self.get_override_yum_repourls(workflow)) > 0 assert set(results.keys()) == set(['signing_intent', 'signing_intent_overridden', 'composes']) else: assert self.get_override_yum_repourls(workflow) is None assert results is None return results
def test_yuminject_plugin_notwrapped(tmpdir): df_content = """\ FROM fedora RUN yum install -y python-django CMD blabla""" df = DockerfileParser(str(tmpdir)) df.content = df_content tasker, workflow = prepare(df.dockerfile_path) metalink = 'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch' workflow.files[os.path.join(YUM_REPOS_DIR, DEFAULT_YUM_REPOFILE_NAME)] = render_yum_repo(OrderedDict( (('name', 'my-repo'), ('metalink', metalink), ('enabled', 1), ('gpgcheck', 0)), )) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': InjectYumRepoPlugin.key, 'args': { "wrap_commands": False } }]) runner.run() assert InjectYumRepoPlugin.key is not None expected_output = r"""FROM fedora ADD atomic-reactor-repos/* '/etc/yum.repos.d/' RUN yum install -y python-django CMD blabla RUN rm -f '/etc/yum.repos.d/atomic-reactor-injected.repo' """ assert expected_output == df.content
def test_retry_pull_base_image(exc, failures, should_succeed): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = MockBuilder() workflow.builder.base_image = ImageName.parse('parent-image') class MockResponse(object): content = '' expectation = flexmock(tasker).should_receive('tag_image') for _ in range(failures): expectation = expectation.and_raise(exc('', MockResponse())) expectation.and_return('foo') expectation.and_return('parent-image') runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': {'parent_registry': 'registry.example.com', 'parent_registry_insecure': True}, }], ) if should_succeed: runner.run() else: with pytest.raises(Exception): runner.run()
def test_returndockerfile_plugin(tmpdir): df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = DockerfileParser(str(tmpdir)) df.content = df_content tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': CpDockerfilePlugin.key }] ) runner.run() assert CpDockerfilePlugin.key is not None assert workflow.prebuild_results.get(CpDockerfilePlugin.key, "") == df_content
def test_add_filesystem_plugin_legacy(tmpdir, docker_tasker, scratch): if MOCK: mock_docker() dockerfile = dedent("""\ FROM koji/image-build RUN dnf install -y python-django """) workflow = mock_workflow(tmpdir, dockerfile) mock_koji_session(scratch=scratch) mock_image_build_file(str(tmpdir)) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': PLUGIN_ADD_FILESYSTEM_KEY, 'args': { 'koji_hub': KOJI_HUB, } }] ) results = runner.run() plugin_result = results[PLUGIN_ADD_FILESYSTEM_KEY] assert 'base-image-id' in plugin_result assert plugin_result['base-image-id'] == IMPORTED_IMAGE_ID assert 'filesystem-koji-task-id' in plugin_result
def test_fetch_maven_artifacts_url_allowed_domains(tmpdir, docker_tasker, domains, raises, reactor_config_map): """Validate URL domain is allowed when fetching remote file.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_url(str(tmpdir)) mock_url_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow, {'artifacts_allowed_domains': domains}) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT, 'allowed_domains': domains, } }] ) if raises: with pytest.raises(PluginFailedException) as e: runner.run() assert 'is not in list of allowed domains' in str(e.value) else: results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] for download in plugin_result: dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest)
def test_check_and_set_platforms(tmpdir, caplog, platforms, platform_exclude, platform_only, result): write_container_yaml(tmpdir, platform_exclude, platform_only) tasker, workflow = prepare(tmpdir) build_json = {'metadata': {'labels': {}}} flexmock(util).should_receive('get_build_json').and_return(build_json) session = mock_session(platforms) mock_koji_config = { 'auth': {}, 'hub_url': 'test', } flexmock(reactor_config).should_receive('get_koji').and_return(mock_koji_config) flexmock(koji_util).should_receive('create_koji_session').and_return(session) mock_config = MockConfig(platforms) flexmock(reactor_config).should_receive('get_config').and_return(mock_config) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, 'args': {'koji_target': KOJI_TARGET}, }]) plugin_result = runner.run() if platforms: koji_msg = "Koji platforms are {0}".format(sorted(platforms.split())) assert koji_msg in caplog.text assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result) else: assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None assert "No platforms found in koji target" in caplog.text
def test_add_filesystem_plugin_generated(tmpdir, docker_tasker, scratch): if MOCK: mock_docker() dockerfile = dedent("""\ FROM koji/image-build RUN dnf install -y python-django """) workflow = mock_workflow(tmpdir, dockerfile) task_id = FILESYSTEM_TASK_ID mock_koji_session(scratch=scratch) mock_image_build_file(str(tmpdir)) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': PLUGIN_ADD_FILESYSTEM_KEY, 'args': { 'koji_hub': KOJI_HUB, 'from_task_id': task_id, 'architecture': 'x86_64' } }]) expected_results = { 'base-image-id': IMPORTED_IMAGE_ID, 'filesystem-koji-task-id': FILESYSTEM_TASK_ID, } results = runner.run() plugin_result = results[PLUGIN_ADD_FILESYSTEM_KEY] assert 'base-image-id' in plugin_result assert 'filesystem-koji-task-id' in plugin_result assert plugin_result == expected_results
def test_check_and_set_platforms_no_koji(tmpdir, caplog, platforms, platform_only, result): write_container_yaml(tmpdir, platform_only=platform_only) tasker, workflow = prepare(tmpdir) if platforms: set_orchestrator_platforms(workflow, platforms.split()) build_json = {'metadata': {'labels': {}}} flexmock(util).should_receive('get_build_json').and_return(build_json) mock_config = MockConfig(platforms) flexmock(reactor_config).should_receive('get_config').and_return(mock_config) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, }]) if platforms: plugin_result = runner.run() # Build up the message to avoid wrapping no_koji_msg = "No koji platforms. " platform_msg = "User specified platforms are {0}".format(sorted(platforms.split())) user_msg = no_koji_msg + platform_msg assert user_msg in caplog.text assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result) else: with pytest.raises(Exception) as e: plugin_result = runner.run() assert "no koji target or platform list" in str(e)
def test_distgit_fetch_artefacts_plugin(tmpdir, docker_tasker): command = 'fedpkg sources' expected_command = ['fedpkg', 'sources'] workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X() workflow.source = flexmock(path=str(tmpdir)) initial_dir = os.getcwd() assert initial_dir != str(tmpdir) def assert_tmpdir(*args, **kwargs): assert os.getcwd() == str(tmpdir) (flexmock(pre_pyrpkg_fetch_artefacts.subprocess) .should_receive('check_call') .with_args(expected_command) .replace_with(assert_tmpdir) .once()) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': DistgitFetchArtefactsPlugin.key, 'args': {'command': command} }] ) runner.run() assert os.getcwd() == initial_dir
def test_platforms_from_cluster_config(tmpdir, platforms, platform_only, cluster_platforms, result): write_container_yaml(tmpdir, platform_only=platform_only) tasker, workflow = prepare(tmpdir) if platforms: set_orchestrator_platforms(workflow, platforms.split()) build_json = {'metadata': {'labels': {}}} flexmock(util).should_receive('get_build_json').and_return(build_json) mock_config = MockConfig(cluster_platforms) flexmock(reactor_config).should_receive('get_config').and_return(mock_config) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, }]) plugin_result = runner.run() if platforms: assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result) else: assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
def test_fetch_maven_artifacts(tmpdir, docker_tasker, reactor_config_map): workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_nvr(str(tmpdir)) mock_fetch_artifacts_by_url(str(tmpdir)) mock_nvr_downloads() mock_url_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT } }]) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len( plugin_result) == len(DEFAULT_ARCHIVES) + len(DEFAULT_REMOTE_FILES) for download in plugin_result: dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest)
def test_pull_base_image_plugin(df_base, parent_registry, expected_w_reg, expected_wo_reg): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = MockBuilder() workflow.builder.base_image = ImageName.parse(df_base) assert not tasker.image_exists(BASE_IMAGE) assert not tasker.image_exists(BASE_IMAGE_W_REGISTRY) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': {'parent_registry': parent_registry, 'parent_registry_insecure': True} }] ) runner.run() assert tasker.image_exists(BASE_IMAGE) == expected_wo_reg assert tasker.image_exists(BASE_IMAGE_W_REGISTRY) == expected_w_reg try: tasker.remove_image(BASE_IMAGE) tasker.remove_image(BASE_IMAGE_W_REGISTRY) except: pass
def test_fetch_maven_artifacts_url_bad_checksum(tmpdir, docker_tasker, reactor_config_map): """Err when downloaded remote file has unexpected checksum.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_url(str(tmpdir)) mock_url_downloads( overrides={REMOTE_FILE_SPAM['url']: { 'body': 'corrupted-file' }}) if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT } }]) with pytest.raises(PluginFailedException) as e: runner.run() assert 'does not match expected checksum' in str(e)
def run_plugin_with_args(self, workflow, plugin_args=None, reactor_config_map=False, # noqa organization=None, base_from_scratch=False, custom_base_image=False): plugin_args = plugin_args or {} plugin_args.setdefault('koji_parent_build', KOJI_BUILD_ID) plugin_args.setdefault('koji_hub', KOJI_HUB) if reactor_config_map: koji_map = { 'hub_url': KOJI_HUB, 'root_url': '', 'auth': {}} if 'koji_ssl_certs_dir' in plugin_args: koji_map['auth']['ssl_certs_dir'] = plugin_args['koji_ssl_certs_dir'] workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'koji': koji_map, 'registries_organization': organization}) runner = PreBuildPluginsRunner( workflow.builder.tasker, workflow, [{'name': InjectParentImage.key, 'args': plugin_args}] ) result = runner.run() if base_from_scratch or custom_base_image: assert result[InjectParentImage.key] is None else: # Koji build ID is always used, even when NVR is given. assert result[InjectParentImage.key] == KOJI_BUILD_ID self.assert_images_to_remove(workflow)
def test_platforms_from_cluster_config(tmpdir, platforms, platform_only, cluster_platforms, result): write_container_yaml(tmpdir, platform_only=platform_only) tasker, workflow = prepare(tmpdir) if platforms: set_orchestrator_platforms(workflow, platforms.split()) build_json = {'metadata': {'labels': {}}} flexmock(util).should_receive('get_build_json').and_return(build_json) mock_config = MockConfig(cluster_platforms) flexmock(reactor_config).should_receive('get_config').and_return( mock_config) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, }]) plugin_result = runner.run() if platforms: assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result) else: assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] is None
def test_adddockerfile_nvr_from_labels(tmpdir, docker_tasker): df_content = """ FROM fedora RUN yum install -y python-django LABEL Name="jboss-eap-6-docker" "Version"="6.4" "Release"=77 CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddDockerfilePlugin.key }] ) runner.run() assert AddDockerfilePlugin.key is not None assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content
def test_adddockerfile_nvr_from_labels2(tmpdir, docker_tasker, workflow): # noqa df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content if MOCK: mock_docker() prepare(workflow, df.dockerfile_path) workflow.builder.set_inspection_data({INSPECT_CONFIG: {"Labels": {}}}) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': { 'labels': { 'Name': 'jboss-eap-6-docker', 'Version': '6.4', 'Release': '77' }, 'auto_labels': [] } }, { 'name': AddDockerfilePlugin.key }]) runner.run() assert AddDockerfilePlugin.key is not None assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content # noqa
def test_adddockerfile_nvr_from_labels2(tmpdir, docker_tasker): df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content if MOCK: mock_docker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') flexmock(workflow, base_image_inspect={INSPECT_CONFIG: {"Labels": {}}}) workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': {'labels': {'Name': 'jboss-eap-6-docker', 'Version': '6.4', 'Release': '77'}, 'auto_labels': []} }, { 'name': AddDockerfilePlugin.key }] ) runner.run() assert AddDockerfilePlugin.key is not None assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content
def test_add_filesystem_plugin_generated(tmpdir, docker_tasker, scratch): if MOCK: mock_docker() dockerfile = dedent("""\ FROM koji/image-build RUN dnf install -y python-django """) workflow = mock_workflow(tmpdir, dockerfile) task_id = FILESYSTEM_TASK_ID mock_koji_session(scratch=scratch) mock_image_build_file(str(tmpdir)) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': PLUGIN_ADD_FILESYSTEM_KEY, 'args': { 'koji_hub': KOJI_HUB, 'from_task_id': task_id, 'architecture': 'x86_64' } }] ) expected_results = { 'base-image-id': IMPORTED_IMAGE_ID, 'filesystem-koji-task-id': FILESYSTEM_TASK_ID, } results = runner.run() plugin_result = results[PLUGIN_ADD_FILESYSTEM_KEY] assert 'base-image-id' in plugin_result assert 'filesystem-koji-task-id' in plugin_result assert plugin_result == expected_results
def test_adddockerfile_plugin(tmpdir, docker_tasker): df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddDockerfilePlugin.key, 'args': {'nvr': 'rhel-server-docker-7.1-20'} }] ) runner.run() assert AddDockerfilePlugin.key is not None expected_output = """ FROM fedora RUN yum install -y python-django ADD Dockerfile-rhel-server-docker-7.1-20 /root/buildinfo/Dockerfile-rhel-server-docker-7.1-20 CMD blabla""" assert df.content == expected_output
def test_fetch_maven_artifacts_nvr_filtering(tmpdir, docker_tasker, nvr_requests, expected, reactor_config_map): """Test filtering of archives in a Koji build.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_nvr(str(tmpdir), contents=yaml.safe_dump(nvr_requests)) mock_nvr_downloads(archives=expected) if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len(plugin_result) == len(expected) for download in plugin_result: assert len(download.checksums.values()) == 1 assert (set(list(download.checksums.values())[0] for download in plugin_result) == set(expectation['checksum'] for expectation in expected)) for download in plugin_result: dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest)
def test_adddockerfile_todest(tmpdir, docker_tasker): df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddDockerfilePlugin.key, 'args': {'nvr': 'jboss-eap-6-docker-6.4-77', 'destdir': '/usr/share/doc/'} }] ) runner.run() assert AddDockerfilePlugin.key is not None expected_output = """ FROM fedora RUN yum install -y python-django ADD Dockerfile-jboss-eap-6-docker-6.4-77 /usr/share/doc/Dockerfile-jboss-eap-6-docker-6.4-77 CMD blabla""" assert df.content == expected_output
def test_fetch_maven_artifacts_commented_out_files(tmpdir, docker_tasker, reactor_config_map): workflow = mock_workflow(tmpdir) mock_koji_session() contents = dedent("""\ # This file # is completely # and absolutely # commented out! """) mock_fetch_artifacts_by_nvr(str(tmpdir), contents=contents) mock_fetch_artifacts_by_url(str(tmpdir), contents=contents) mock_nvr_downloads() mock_url_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len(plugin_result) == 0 artifacts_dir = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR) assert not os.path.exists(artifacts_dir)
def test_all_missing_required_labels(tmpdir, docker_tasker, caplog, df_content, req_labels): df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X workflow.builder.df_path = df.dockerfile_path workflow.builder.df_dir = str(tmpdir) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AssertLabelsPlugin.key, 'args': {'required_labels': req_labels} }] ) assert AssertLabelsPlugin.key is not None with pytest.raises(PluginFailedException): runner.run() error_msg = "Dockerfile is missing required labels: {0}".format(req_labels) assert error_msg in caplog.text()
def test_image_download(tmpdir, docker_tasker, architecture, architectures, download_filesystem): if MOCK: mock_docker() dockerfile = dedent("""\ FROM koji/image-build RUN dnf install -y python-django """) workflow = mock_workflow(tmpdir, dockerfile) mock_koji_session(download_filesystem=download_filesystem) mock_image_build_file(str(tmpdir)) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': PLUGIN_ADD_FILESYSTEM_KEY, 'args': { 'koji_hub': KOJI_HUB, 'architecture': architecture, 'architectures': architectures, } }]) results = runner.run() plugin_result = results[PLUGIN_ADD_FILESYSTEM_KEY] assert 'base-image-id' in plugin_result assert 'filesystem-koji-task-id' in plugin_result if download_filesystem: assert plugin_result['base-image-id'] == IMPORTED_IMAGE_ID assert plugin_result['filesystem-koji-task-id'] == FILESYSTEM_TASK_ID else: assert plugin_result['base-image-id'] is None assert plugin_result['filesystem-koji-task-id'] is None
def test_pull_base_image_plugin(df_base, parent_registry, expected_w_reg, expected_wo_reg): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = MockBuilder() workflow.builder.base_image = ImageName.parse(df_base) assert not tasker.image_exists(BASE_IMAGE) assert not tasker.image_exists(BASE_IMAGE_W_REGISTRY) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': { 'parent_registry': parent_registry, 'parent_registry_insecure': True } }]) runner.run() assert tasker.image_exists(BASE_IMAGE) == expected_wo_reg assert tasker.image_exists(BASE_IMAGE_W_REGISTRY) == expected_w_reg try: tasker.remove_image(BASE_IMAGE) tasker.remove_image(BASE_IMAGE_W_REGISTRY) except: pass
def test_add_labels_equal_aliases(tmpdir, docker_tasker, caplog, base_l, df_l, expected, expected_log, reactor_config_map): if MOCK: mock_docker() df_content = "FROM fedora\n" plugin_labels = {} if df_l[0]: df_content += 'LABEL description="{0}"\n'.format(df_l[0]) if df_l[1]: df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1]) base_labels = {INSPECT_CONFIG: {"Labels": {}}} if base_l[0]: base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0] if base_l[1]: base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1] df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X()) setattr(workflow.builder, 'df_path', df.dockerfile_path) setattr(workflow.builder, 'base_image_inspect', base_labels) if reactor_config_map: make_and_store_reactor_config_map( workflow, { 'image_labels': plugin_labels, 'image_equal_labels': [['description', 'io.k8s.description']]}) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': { 'labels': plugin_labels, 'dont_overwrite': [], 'auto_labels': [], 'aliases': {}, 'equal_labels': [['description', 'io.k8s.description']] } }] ) runner.run() assert AddLabelsPlugin.key is not None result_fst = df.labels.get("description") or \ base_labels[INSPECT_CONFIG]["Labels"].get("description") result_snd = df.labels.get("io.k8s.description") or \ base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description") assert result_fst == expected[0] assert result_snd == expected[1] if expected_log: assert expected_log in caplog.text
def test_fetch_maven_artifacts(tmpdir, docker_tasker, reactor_config_map): workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_nvr(str(tmpdir)) mock_fetch_artifacts_by_url(str(tmpdir)) mock_nvr_downloads() mock_url_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len(plugin_result) == len(DEFAULT_ARCHIVES) + len(DEFAULT_REMOTE_FILES) for download in plugin_result: dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest)
def test_distgit_fetch_artefacts_failure(tmpdir, docker_tasker): command = 'fedpkg sources' expected_command = ['fedpkg', 'sources'] workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') workflow.builder = X() workflow.source = flexmock(path=str(tmpdir)) initial_dir = os.getcwd() assert initial_dir != str(tmpdir) (flexmock(pre_pyrpkg_fetch_artefacts.subprocess) .should_receive('check_call') .with_args(expected_command) .and_raise(RuntimeError) .once()) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': DistgitFetchArtefactsPlugin.key, 'args': {'command': command} }] ) with pytest.raises(PluginFailedException) as exc: runner.run() assert os.getcwd() == initial_dir
def test_labels_from_user_params(workflow): workflow.user_params["release"] = "42" runner = PreBuildPluginsRunner(workflow, []) plugin = runner.create_instance_from_plugin(AddLabelsPlugin, {}) assert plugin.labels == {"release": "42"}
def test_fetch_maven_artifacts_nvr_no_match(tmpdir, docker_tasker, nvr_requests, error_msg, reactor_config_map): """Err when a requested archive is not found in Koji build.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_nvr(str(tmpdir), contents=yaml.safe_dump(nvr_requests)) mock_nvr_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) with pytest.raises(PluginFailedException) as e: runner.run() assert 'failed to find archives' in str(e) assert error_msg in str(e)
def test_fetch_maven_artifacts_nvr_no_match(tmpdir, docker_tasker, nvr_requests, error_msg, reactor_config_map): """Err when a requested archive is not found in Koji build.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_nvr(str(tmpdir), contents=yaml.safe_dump(nvr_requests)) mock_nvr_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT } }]) with pytest.raises(PluginFailedException) as e: runner.run() assert 'failed to find archives' in str(e) assert error_msg in str(e)
def test_fetch_maven_artifacts_nvr_bad_nvr(tmpdir, docker_tasker, reactor_config_map): """Err when given nvr is not a valid build in Koji.""" workflow = mock_workflow(tmpdir) mock_koji_session() contents = dedent("""\ - nvr: where-is-this-build-3.0-2 """) mock_fetch_artifacts_by_nvr(str(tmpdir), contents=contents) mock_nvr_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) with pytest.raises(PluginFailedException) as e: runner.run() assert 'Build where-is-this-build-3.0-2 not found' in str(e)
def test_fetch_maven_artifacts_url_with_target(tmpdir, docker_tasker, reactor_config_map): """Remote file is downloaded into specified filename.""" workflow = mock_workflow(tmpdir) mock_koji_session() remote_files = [REMOTE_FILE_WITH_TARGET] mock_fetch_artifacts_by_url(str(tmpdir), contents=yaml.safe_dump(remote_files)) mock_url_downloads(remote_files) if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT } }]) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len(plugin_result) == len(remote_files) download = plugin_result[0] dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest) assert download.dest == REMOTE_FILE_WITH_TARGET['target'] assert not REMOTE_FILE_WITH_TARGET['url'].endswith(download.dest)
def test_fetch_maven_artifacts_url_with_target(tmpdir, docker_tasker, reactor_config_map): """Remote file is downloaded into specified filename.""" workflow = mock_workflow(tmpdir) mock_koji_session() remote_files = [REMOTE_FILE_WITH_TARGET] mock_fetch_artifacts_by_url(str(tmpdir), contents=yaml.safe_dump(remote_files)) mock_url_downloads(remote_files) if reactor_config_map: make_and_store_reactor_config_map(workflow) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': {'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT} }] ) results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] assert len(plugin_result) == len(remote_files) download = plugin_result[0] dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest) assert download.dest == REMOTE_FILE_WITH_TARGET['target'] assert not REMOTE_FILE_WITH_TARGET['url'].endswith(download.dest)
def test_skip_plugin(self, caplog, target, yum_repos, include_repo): tasker, workflow = prepare() args = {'target': target} workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1}) add_koji_map_in_workflow(workflow, hub_url='', root_url='http://example.com') workflow.user_params['include_koji_repo'] = include_repo workflow.user_params['yum_repourls'] = yum_repos runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': KojiPlugin.key, 'args': args, }]) runner.run() if (not include_repo and yum_repos): log_msg = 'there is a yum repo user parameter, skipping plugin' else: log_msg = 'no target provided, skipping plugin' assert log_msg in caplog.text
def test_fetch_maven_artifacts_url_allowed_domains(tmpdir, docker_tasker, domains, raises, reactor_config_map): """Validate URL domain is allowed when fetching remote file.""" workflow = mock_workflow(tmpdir) mock_koji_session() mock_fetch_artifacts_by_url(str(tmpdir)) mock_url_downloads() if reactor_config_map: make_and_store_reactor_config_map(workflow, {'artifacts_allowed_domains': domains}) runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FetchMavenArtifactsPlugin.key, 'args': { 'koji_hub': KOJI_HUB, 'koji_root': KOJI_ROOT, 'allowed_domains': domains, } }] ) if raises: with pytest.raises(PluginFailedException) as e: runner.run() assert 'is not in list of allowed domains' in str(e) else: results = runner.run() plugin_result = results[FetchMavenArtifactsPlugin.key] for download in plugin_result: dest = os.path.join(str(tmpdir), FetchMavenArtifactsPlugin.DOWNLOAD_DIR, download.dest) assert os.path.exists(dest)
def test_check_and_set_platforms_no_koji(tmpdir, caplog, platforms, platform_only, result): write_container_yaml(tmpdir, platform_only=platform_only) tasker, workflow = prepare(tmpdir) if platforms: set_orchestrator_platforms(workflow, platforms.split()) build_json = {'metadata': {'labels': {}}} flexmock(util).should_receive('get_build_json').and_return(build_json) mock_config = MockConfig(platforms) flexmock(reactor_config).should_receive('get_config').and_return( mock_config) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PLUGIN_CHECK_AND_SET_PLATFORMS_KEY, }]) if platforms: plugin_result = runner.run() # Build up the message to avoid wrapping no_koji_msg = "No koji platforms. " platform_msg = "User specified platforms are {0}".format( sorted(platforms.split())) user_msg = no_koji_msg + platform_msg assert user_msg in caplog.text assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] assert plugin_result[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] == set(result) else: with pytest.raises(Exception) as e: plugin_result = runner.run() assert "no koji target or platform list" in str(e.value)
def test_add_labels_plugin(tmpdir, labels_conf_base, labels_conf, dont_overwrite, expected_output): df = DockerfileParser(str(tmpdir)) df.content = DF_CONTENT tasker = DockerTasker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X) flexmock(workflow, base_image_inspect=labels_conf_base) setattr(workflow.builder, 'df_path', df.dockerfile_path) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': {'labels': labels_conf, "dont_overwrite": dont_overwrite} }] ) if isinstance(expected_output, RuntimeError): with pytest.raises(RuntimeError): runner.run() else: runner.run() assert AddLabelsPlugin.key is not None assert df.content in expected_output
def test_multiple_repourls(inject_proxy): tasker, workflow = prepare() url1 = 'http://example.com/a/b/c/myrepo.repo' filename1 = 'myrepo.repo' url2 = 'http://example.com/repo-2.repo' filename2 = 'repo-2.repo' runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': AddYumRepoByUrlPlugin.key, 'args': { 'repourls': [url1, url2], 'inject_proxy': inject_proxy } }]) runner.run() repo_content = repocontent if inject_proxy: repo_content = '%sproxy = %s\n\n' % (repocontent.decode('utf-8'), inject_proxy) assert workflow.files[os.path.join(YUM_REPOS_DIR, filename1)] assert workflow.files[os.path.join(YUM_REPOS_DIR, filename2)] assert workflow.files[os.path.join(YUM_REPOS_DIR, filename1)] == repo_content assert workflow.files[os.path.join(YUM_REPOS_DIR, filename2)] == repo_content assert len(workflow.files) == 2
def test_multiple_repourls_no_suffix(inject_proxy, repos, patterns): tasker, workflow = prepare() runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': AddYumRepoByUrlPlugin.key, 'args': { 'repourls': repos, 'inject_proxy': inject_proxy } }]) runner.run() repo_content = repocontent if inject_proxy: repo_content = '%sproxy = %s\n\n' % (repocontent.decode('utf-8'), inject_proxy) assert len(workflow.files) == 2 for pattern in patterns: for filename, content in workflow.files.items(): if fnmatch(filename, os.path.join(YUM_REPOS_DIR, pattern)): assert content == repo_content # only because they're all the same del workflow.files[filename] break else: raise RuntimeError("no filename in %s matching pattern %s" % (list(workflow.files.keys()), pattern))
def test_add_labels_plugin_explicit(tmpdir, docker_tasker, auto_label, labels_docker, labels_base): df = df_parser(str(tmpdir)) df.content = labels_docker if MOCK: mock_docker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X) flexmock(workflow, source=MockSource()) flexmock(workflow, base_image_inspect=labels_base) setattr(workflow.builder, 'df_path', df.dockerfile_path) prov_labels = {} prov_labels[auto_label] = 'explicit_value' runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': {'labels': prov_labels, "dont_overwrite": [], "auto_labels": [auto_label], 'aliases': {'Build_Host': 'com.redhat.build-host'}} }] ) runner.run() assert df.labels[auto_label] == 'explicit_value'
def run_plugin_with_args( self, workflow, plugin_args=None, expect_result=True, # noqa reactor_config_map=False): plugin_args = plugin_args or {} plugin_args.setdefault('koji_hub', KOJI_HUB) plugin_args.setdefault('poll_interval', 0.01) plugin_args.setdefault('poll_timeout', 1) if reactor_config_map: koji_map = {'hub_url': KOJI_HUB, 'root_url': '', 'auth': {}} if 'koji_ssl_certs_dir' in plugin_args: koji_map['auth']['ssl_certs_dir'] = plugin_args[ 'koji_ssl_certs_dir'] workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'koji': koji_map}) runner = PreBuildPluginsRunner(workflow.builder.tasker, workflow, [{ 'name': KojiParentPlugin.key, 'args': plugin_args }]) result = runner.run() if expect_result: expected_result = {'parent-image-koji-build': KOJI_BUILD} else: expected_result = None assert result[KojiParentPlugin.key] == expected_result
def test_adddockerfile_plugin(tmpdir, docker_tasker, workflow): # noqa df_content = """ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content prepare(workflow, df.dockerfile_path) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddDockerfilePlugin.key, 'args': { 'nvr': 'rhel-server-docker-7.1-20' } }]) runner.run() assert AddDockerfilePlugin.key is not None expected_output = """ FROM fedora RUN yum install -y python-django ADD Dockerfile-rhel-server-docker-7.1-20 /root/buildinfo/Dockerfile-rhel-server-docker-7.1-20 CMD blabla""" assert df.content == expected_output
def test_add_labels_plugin(tmpdir, docker_tasker, df_content, labels_conf_base, labels_conf, dont_overwrite, aliases, expected_output, caplog): df = df_parser(str(tmpdir)) df.content = df_content if MOCK: mock_docker() workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image') setattr(workflow, 'builder', X) flexmock(workflow, base_image_inspect=labels_conf_base) setattr(workflow.builder, 'df_path', df.dockerfile_path) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddLabelsPlugin.key, 'args': { 'labels': labels_conf, 'dont_overwrite': dont_overwrite, 'auto_labels': [], 'aliases': aliases, } }]) runner.run() if isinstance(expected_output, RuntimeError): assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" in caplog.text( ) else: assert AddLabelsPlugin.key is not None assert df.content in expected_output
def test_multiple_repourls(caplog, base_from_scratch, parent_images, inject_proxy, repos, filenames): tasker, workflow = prepare() dockerfile_images = [] if parent_images: dockerfile_images.append('parent_image:latest') if base_from_scratch: dockerfile_images.append('scratch') workflow.builder.set_dockerfile_images(dockerfile_images) runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': AddYumRepoByUrlPlugin.key, 'args': {'repourls': repos, 'inject_proxy': inject_proxy}}]) runner.run() if base_from_scratch and not parent_images: assert AddYumRepoByUrlPlugin.key is not None assert workflow.files == {} log_msg = "Skipping add yum repo by url: unsupported for FROM-scratch images" assert log_msg in caplog.text else: repo_content = repocontent if inject_proxy: repo_content = '%sproxy = %s\n\n' % (repocontent, inject_proxy) for filename in filenames: assert workflow.files[os.path.join(YUM_REPOS_DIR, filename)] assert workflow.files[os.path.join(YUM_REPOS_DIR, filename)] == repo_content assert len(workflow.files) == 2
def test_multistage_dockerfiles(name, inherited_user, dockerfile, expect_cleanup_lines, base_from_scratch, tmpdir): # expect repo ADD instructions where indicated in the content, and RUN rm at the end. # begin by splitting on "### ADD HERE" so we know where to expect changes. segments = re.split(r'^.*ADD HERE.*$\n?', dockerfile, flags=re.M) segment_lines = [seg.splitlines(True) for seg in segments] # build expected contents by manually inserting expected ADD lines between the segments for lines in segment_lines[:-1]: lines.append("ADD %s* '/etc/yum.repos.d/'\n" % RELATIVE_REPOS_PATH) expected_lines = list(itertools.chain.from_iterable(segment_lines)) # flatten lines # now run the plugin to transform the given dockerfile df = df_parser(str(tmpdir)) df.content = ''.join(segments) # dockerfile without the "### ADD HERE" lines tasker, workflow = prepare(df.dockerfile_path, inherited_user) workflow.builder.set_base_from_scratch(base_from_scratch) repo_file = 'myrepo.repo' repo_path = os.path.join(YUM_REPOS_DIR, repo_file) workflow.files[repo_path] = repocontent runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': InjectYumRepoPlugin.key, 'args': {}}]) runner.run() # assert the Dockerfile has changed as expected up to the cleanup lines new_df = df.lines assert new_df[:len(expected_lines)] == expected_lines # the rest of the lines should be cleanup lines cleanup_lines = new_df[len(expected_lines):] assert remove_lines_match(cleanup_lines, expect_cleanup_lines, [repo_file])