def test_get_methods(self, fallback, method): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if fallback is False: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = \ ReactorConfig(yaml.safe_load(REACTOR_CONFIG_MAP)) else: if fallback: fall_source = ReactorConfig(yaml.safe_load(REACTOR_CONFIG_MAP)) else: fall_source = ReactorConfig(yaml.safe_load("version: 1")) method_name = 'get_' + method real_method = getattr(atomic_reactor.plugins.pre_reactor_config, method_name) if fallback is True: output = real_method(workflow, fall_source.conf[method]) else: if fallback is False: output = real_method(workflow) else: with pytest.raises(KeyError): real_method(workflow) return expected = yaml.safe_load(REACTOR_CONFIG_MAP)[method] if method == 'registries': registries_cm = {} for registry in expected: reguri = RegistryURI(registry.get('url')) regdict = {} regdict['version'] = reguri.version if registry.get('auth'): regdict['secret'] = registry['auth']['cfg_path'] regdict['insecure'] = registry.get('insecure', False) regdict['expected_media_types'] = registry.get( 'expected_media_types', []) registries_cm[reguri.docker_uri] = regdict if fallback: output = real_method(workflow, registries_cm) assert output == registries_cm return if method == 'source_registry': expect = { 'uri': RegistryURI(expected['url']), 'insecure': expected.get('insecure', False) } if fallback: output = real_method(workflow, expect) assert output['insecure'] == expect['insecure'] assert output['uri'].uri == expect['uri'].uri return assert output == expected
def run_plugin_with_args(self, workflow, plugin_args=None, expect_error=None, reactor_config_map=False, platforms=ODCS_COMPOSE_DEFAULT_ARCH_LIST, is_pulp=None): plugin_args = plugin_args or {} plugin_args.setdefault('odcs_url', ODCS_URL) plugin_args.setdefault('koji_target', KOJI_TARGET_NAME) plugin_args.setdefault('koji_hub', KOJI_HUB) reactor_conf =\ deepcopy(workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY].conf) if reactor_config_map: reactor_conf['koji'] = { 'hub_url': KOJI_HUB, 'root_url': '', 'auth': {} } if 'koji_ssl_certs_dir' in plugin_args: reactor_conf['koji']['auth']['ssl_certs_dir'] = plugin_args[ 'koji_ssl_certs_dir'] workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(reactor_conf) runner = PreBuildPluginsRunner(workflow.builder.tasker, workflow, [ { 'name': ResolveComposesPlugin.key, 'args': plugin_args }, ]) if expect_error: with pytest.raises(PluginFailedException) as exc_info: runner.run() assert expect_error in str(exc_info.value) return results = runner.run()[ResolveComposesPlugin.key] if results: for platform in platforms or []: yum_repourls = self.get_override_yum_repourls( workflow, platform) # Koji tag compose is present in each one assert ODCS_COMPOSE['result_repofile'] in yum_repourls if is_pulp: pulp_repo = ODCS_COMPOSE_REPO + '/pulp_compose-' + platform assert pulp_repo in yum_repourls yum_repourls = self.get_override_yum_repourls(workflow, None) if platforms: assert yum_repourls is None else: assert ODCS_COMPOSE['result_repofile'] in yum_repourls assert set(results.keys()) == set( ['signing_intent', 'signing_intent_overridden', 'composes']) else: assert self.get_override_yum_repourls(workflow) is None assert results is None return results
def test_skip_plugin(tmpdir, caplog, docker_tasker, reactor_config_map, user_params): workflow = mock_workflow(tmpdir, "", user_params={}) base_image = "registry.fedoraproject.org/fedora:latest" args = { 'base_image': base_image, } if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig({'version': 1, 'flatpak': {'base_image': base_image}}) } runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FlatpakCreateDockerfilePlugin.key, 'args': args }] ) runner.run() assert 'not flatpak build, skipping plugin' in caplog.text
def test_skip_plugin(self, caplog, target, yum_repos, include_repo): tasker, workflow = prepare() args = {'target': target} workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1}) add_koji_map_in_workflow(workflow, hub_url='', root_url='http://example.com') workflow.user_params['include_koji_repo'] = include_repo workflow.user_params['yum_repourls'] = yum_repos runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': KojiPlugin.key, 'args': args, }]) runner.run() if (not include_repo and yum_repos): log_msg = 'there is a yum repo user parameter, skipping plugin' else: log_msg = 'no target provided, skipping plugin' assert log_msg in caplog.text
def test_pulp_source_secret(tmpdir, check_repo_retval, should_raise, monkeypatch, reactor_config_map): tasker, workflow = prepare(check_repo_retval=check_repo_retval) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) if should_raise: with pytest.raises(Exception): runner.run() return runner.run() assert PulpPushPlugin.key is not None _, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_should_send(self, rebuild, success, auto_canceled, manual_canceled, send_on, expected, reactor_config_map): class WF(object): exit_results = {KojiPromotePlugin.key: MOCK_KOJI_BUILD_ID} plugin_workspace = {} kwargs = { 'smtp_host': 'smtp.bar.com', 'from_address': '*****@*****.**', 'send_on': send_on, } workflow = WF() if reactor_config_map: smtp_map = { 'from_address': '*****@*****.**', 'host': 'smtp.spam.com', } workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'smtp': smtp_map}) p = SendMailPlugin(None, workflow, **kwargs) assert p._should_send(rebuild, success, auto_canceled, manual_canceled) == expected
def test_pull_raises_retry_error(workflow, caplog): if MOCK: mock_docker(remember_images=True) tasker = DockerTasker(retry_times=1) workflow.builder = MockBuilder() image_name = ImageName.parse(IMAGE_RAISE_RETRYGENERATOREXCEPTION) base_image_str = "{}/{}:{}".format(SOURCE_REGISTRY, image_name.repo, 'some') source_registry = image_name.registry workflow.builder.dockerfile_images = DockerfileImages([base_image_str]) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'source_registry': {'url': source_registry, 'insecure': True}}) runner = PreBuildPluginsRunner( tasker, workflow, [{ 'name': PullBaseImagePlugin.key, 'args': {}, }], ) with pytest.raises(Exception): runner.run() exp_img = ImageName.parse(base_image_str) exp_img.registry = source_registry assert 'failed to pull image: {}'.format(exp_img.to_str()) in caplog.text
def mock_reactor_config(workflow, tmpdir, data=None, default_si=DEFAULT_SIGNING_INTENT): if data is None: data = dedent("""\ version: 1 odcs: signing_intents: - name: release keys: ['R123'] - name: beta keys: ['R123', 'B456', 'B457'] - name: unsigned keys: [] default_signing_intent: {} api_url: {} auth: ssl_certs_dir: {} """.format(default_si, ODCS_URL, tmpdir)) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config = {} if data: tmpdir.join('cert').write('') config = read_yaml(data, 'schemas/config.json') workflow.plugin_workspace[ ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = ReactorConfig(config)
def workflow(self, push=True, sync=True, build_process_failed=False, postbuild_results=None, prebuild_results=None, expectv2schema2=False, platform_descriptors=False): tag_conf = TagConf() tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE) push_conf = PushConf() if push: push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=False) if sync: push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=True) conf = { ReactorConfigKeys.VERSION_KEY: 1, 'prefer_schema1_digest': not expectv2schema2 } if platform_descriptors: conf['platform_descriptors'] = [ {'platform': 'x86_64', 'architecture': 'amd64'}, ] plugin_workspace = { ReactorConfigPlugin.key: { WORKSPACE_CONF_KEY: ReactorConfig(conf) } } mock_get_retry_session() builder = flexmock() setattr(builder, 'image_id', 'sha256:(old)') return flexmock(tag_conf=tag_conf, push_conf=push_conf, builder=builder, build_process_failed=build_process_failed, plugin_workspace=plugin_workspace, postbuild_results=postbuild_results or {}, prebuild_results=prebuild_results or {})
def test_create_missing_repo(self, reactor_config_map): # noqa docker_registry = 'http://registry.example.com' docker_repository = 'prod/myrepository' prefixed_pulp_repoid = 'redhat-prod-myrepository' env = 'pulp' if reactor_config_map: self.workflow.plugin_workspace = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key] = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': env}}) plugin = PulpSyncPlugin(tasker=None, workflow=self.workflow([docker_repository]), pulp_registry_name=env, docker_registry=docker_registry) mockpulp = MockPulp() (flexmock(mockpulp).should_receive('getRepos').with_args( [prefixed_pulp_repoid], fields=['id']).and_return([]).once().ordered()) (flexmock(mockpulp).should_receive('createRepo').with_args( prefixed_pulp_repoid, None, registry_id=docker_repository, prefix_with='redhat-').once().ordered()) (flexmock(mockpulp).should_receive('syncRepo').with_args( repo=prefixed_pulp_repoid, feed=docker_registry).and_return( ([], [])).once().ordered()) (flexmock(mockpulp).should_receive('crane').with_args( [prefixed_pulp_repoid], wait=True).once().ordered()) (flexmock(dockpulp).should_receive('Pulp').with_args( env=env).and_return(mockpulp)) plugin.run()
def test_delete_not_implemented(self, caplog, reactor_config_map): # noqa """ Should log an error (but not raise an exception) when delete_from_registry is True. """ mockpulp = MockPulp() (flexmock(mockpulp).should_receive('getRepos').with_args( ['redhat-prod-myrepository'], fields=['id']).and_return([{ 'id': 'redhat-prod-myrepository' }]).once().ordered()) (flexmock(mockpulp).should_receive('syncRepo').and_return(([], []))) flexmock(dockpulp).should_receive('Pulp').and_return(mockpulp) if reactor_config_map: self.workflow.plugin_workspace = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key] = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'pulp'}}) plugin = PulpSyncPlugin(tasker=None, workflow=self.workflow(['prod/myrepository']), pulp_registry_name='pulp', docker_registry='http://registry.example.com', delete_from_registry=True) plugin.run() errors = [ record.getMessage() for record in caplog.records() if record.levelname == 'ERROR' ] assert [message for message in errors if 'not implemented' in message]
def test_dockercfg_missing_or_invalid(self, tmpdir, content, reactor_config_map): env = 'pulp' if content is not None: registry_secret = os.path.join(str(tmpdir), '.dockercfg') with open(registry_secret, 'w') as fp: fp.write(content) if reactor_config_map: self.workflow.plugin_workspace = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key] = {} self.workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': env}}) plugin = PulpSyncPlugin(tasker=None, workflow=self.workflow(['repo']), pulp_registry_name=env, docker_registry='http://registry.example.com', registry_secret_path=str(tmpdir)) mockpulp = MockPulp() (flexmock(dockpulp).should_receive('Pulp').with_args( env=env).and_return(mockpulp)) with pytest.raises(RuntimeError): plugin.run()
def test_pulp_tag_service_account_secret(tmpdir, monkeypatch, caplog, reactor_config_map): v1_image_ids = {'x86_64': None, 'ppc64le': 'ppc64le_v1_image_id'} msg = "tagging v1-image-id ppc64le_v1_image_id for platform ppc64le" expected_results = { 'redhat-image-name1': { 'tag': 'latest:ppc64le_v1_image_id' }, } tasker, workflow = prepare(v1_image_ids) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PLUGIN_PULP_TAG_KEY, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), } }]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) results = runner.run() assert msg in caplog.text assert results['pulp_tag'] == expected_results
def create_runner(tasker, workflow, ssl_certs=False, principal=None, keytab=None, poll_interval=0.01, proxy_user=None, use_args=True, koji_target='koji-target'): args = { 'target': koji_target, } if poll_interval is not None: args['poll_interval'] = poll_interval workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1}) add_koji_map_in_workflow(workflow, hub_url='', ssl_certs_dir='/' if ssl_certs else None, krb_keytab=keytab, krb_principal=principal, proxyuser=proxy_user) plugin_conf = { 'name': KojiTagBuildPlugin.key } if use_args: plugin_conf['args'] = args else: plugin_conf['args'] = {'target': koji_target} runner = ExitPluginsRunner(tasker, workflow, [plugin_conf]) return runner
def test_pulp_publish_success(caplog, reactor_config_map): tasker, workflow = prepare(success=True) if reactor_config_map: pulp_map = {'name': 'pulp_registry_name', 'auth': {}} workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': pulp_map}) plugin = PulpPublishPlugin(tasker, workflow, 'pulp_registry_name') (flexmock(dockpulp.Pulp).should_receive('crane').with_args( set([ 'redhat-image-name1', 'redhat-image-name3', 'redhat-namespace-image-name2' ]), wait=True).and_return([])) (flexmock(dockpulp.Pulp).should_receive('watch_tasks').with_args(list)) crane_images = plugin.run() assert 'to be published' in caplog.text() images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/image-name1:2" in images assert "registry.example.com/namespace/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def test_compare_components_plugin(tmpdir, caplog, base_from_scratch, mismatch, exception, fail): workflow = mock_workflow(tmpdir) worker_metadatas = mock_metadatas() # example data has 2 log items before component item hence output[2] component = worker_metadatas['ppc64le']['output'][2]['components'][0] if mismatch: component['version'] = 'bacon' if exception: workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig({ 'version': 1, 'package_comparison_exceptions': [component['name']] }) } workflow.postbuild_results[ PLUGIN_FETCH_WORKER_METADATA_KEY] = worker_metadatas workflow.builder.base_from_scratch = base_from_scratch runner = PostBuildPluginsRunner(None, workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }]) if fail and not base_from_scratch: with pytest.raises(PluginFailedException): runner.run() else: runner.run() if base_from_scratch: log_msg = "Skipping comparing components: unsupported for FROM-scratch images" assert log_msg in caplog.text
def test_pulp_publish_delete(worker_builds_created, v1_image_ids, expected, caplog, reactor_config_map): tasker, workflow = prepare(success=False, v1_image_ids=v1_image_ids) if not worker_builds_created: workflow.build_result = BuildResult(fail_reason="not built") if reactor_config_map: pulp_map = {'name': 'pulp_registry_name', 'auth': {}} workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': pulp_map}) plugin = PulpPublishPlugin(tasker, workflow, 'pulp_registry_name') msg = "removing ppc64le_v1_image_id from" (flexmock(dockpulp.Pulp).should_receive('crane').never()) if expected: (flexmock(dockpulp.Pulp).should_receive('remove').with_args( unicode, unicode)) else: (flexmock(dockpulp.Pulp).should_receive('remove').never()) crane_images = plugin.run() assert crane_images == [] if expected and worker_builds_created: assert msg in caplog.text() else: assert msg not in caplog.text()
def run_plugin_with_args( self, workflow, plugin_args=None, reactor_config_map=False, # noqa organization=None): plugin_args = plugin_args or {} plugin_args.setdefault('koji_parent_build', KOJI_BUILD_ID) plugin_args.setdefault('koji_hub', KOJI_HUB) if reactor_config_map: koji_map = {'hub_url': KOJI_HUB, 'root_url': '', 'auth': {}} if 'koji_ssl_certs_dir' in plugin_args: koji_map['auth']['ssl_certs_dir'] = plugin_args[ 'koji_ssl_certs_dir'] workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'koji': koji_map, 'registries_organization': organization}) runner = PreBuildPluginsRunner(workflow.builder.tasker, workflow, [{ 'name': InjectParentImage.key, 'args': plugin_args }]) result = runner.run() # Koji build ID is always used, even when NVR is given. assert result[InjectParentImage.key] == KOJI_BUILD_ID self.assert_images_to_remove(workflow)
def run_plugin_with_args( self, workflow, plugin_args=None, expect_result=True, # noqa reactor_config_map=False): plugin_args = plugin_args or {} plugin_args.setdefault('koji_hub', KOJI_HUB) plugin_args.setdefault('poll_interval', 0.01) plugin_args.setdefault('poll_timeout', 1) if reactor_config_map: koji_map = {'hub_url': KOJI_HUB, 'root_url': '', 'auth': {}} if 'koji_ssl_certs_dir' in plugin_args: koji_map['auth']['ssl_certs_dir'] = plugin_args[ 'koji_ssl_certs_dir'] workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'koji': koji_map}) runner = PreBuildPluginsRunner(workflow.builder.tasker, workflow, [{ 'name': KojiParentPlugin.key, 'args': plugin_args }]) result = runner.run() if expect_result: expected_result = {'parent-image-koji-build': KOJI_BUILD} else: expected_result = None assert result[KojiParentPlugin.key] == expected_result
def test_get_pdc_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') if not PDC_AVAILABLE: return auth_info = { "server": config_json['pdc']['api_url'], "ssl_verify": not config_json['pdc'].get('insecure', False), "develop": True, } fallback_map = {} if fallback: fallback_map['api_url'] = config_json['pdc']['api_url'] fallback_map['insecure'] = config_json['pdc'].get( 'insecure', False) else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(pdc_client.PDCClient).should_receive('__init__').with_args( **auth_info).once().and_return(None)) get_pdc_session(workflow, fallback_map)
def test_run_does_nothing_if_conditions_not_met( self, reactor_config_map): # noqa class WF(object): autorebuild_canceled = False build_canceled = False prebuild_results = {CheckAndSetRebuildPlugin.key: True} image = util.ImageName.parse('repo/name') build_process_failed = True exit_results = {} plugin_workspace = {} workflow = WF() if reactor_config_map: smtp_map = { 'from_address': '*****@*****.**', 'host': 'smtp.spam.com', } workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'smtp': smtp_map}) p = SendMailPlugin(None, workflow, from_address='*****@*****.**', smtp_host='smtp.spam.com', send_on=[MS]) (flexmock(p).should_receive('_should_send').with_args( True, False, False, False).and_return(False)) flexmock(p).should_receive('_get_receivers_list').times(0) flexmock(p).should_receive('_send_mail').times(0) p.run()
def test_get_koji_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { "proxyuser": config_json['koji']['auth'].get('proxyuser'), "ssl_certs_dir": config_json['koji']['auth'].get('ssl_certs_dir'), "krb_principal": config_json['koji']['auth'].get('krb_principal'), "krb_keytab": config_json['koji']['auth'].get('krb_keytab_path') } fallback_map = {} if fallback: fallback_map = { 'auth': deepcopy(auth_info), 'hub_url': config_json['koji']['hub_url'] } fallback_map['auth']['krb_keytab_path'] = fallback_map['auth'].pop( 'krb_keytab') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = \ ReactorConfig(config_json) (flexmock(atomic_reactor.koji_util).should_receive( 'create_koji_session').with_args( config_json['koji']['hub_url'], auth_info).once().and_return(True)) get_koji_session(workflow, fallback_map)
def test_pulp_dedup_layers(unsupported, unlink_exc, tmpdir, existing_layers, should_raise, monkeypatch, subprocess_exceptions, reactor_config_map): tasker, workflow = prepare( check_repo_retval=0, existing_layers=existing_layers, subprocess_exceptions=subprocess_exceptions, unsupported=unsupported) monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir)) with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") if unlink_exc is not None: (flexmock(os).should_receive('unlink') .and_raise(unlink_exc)) runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test' }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {}}}) runner.run() assert PulpPushPlugin.key is not None top_layer, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images assert top_layer == 'foo'
def prepare(self, df_path, inherited_user='', hide_files=None, parent_images=None): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow("test-image", source=SOURCE) workflow.source = MockSource(df_path) workflow.builder = (StubInsideBuilder() .for_workflow(workflow) .set_df_path(df_path)) for parent in parent_images or []: workflow.builder.set_parent_inspection_data(parent, { INSPECT_CONFIG: { 'User': inherited_user, }, }) if hide_files is not None: reactor_config = ReactorConfig({ 'version': 1, 'hide_files': hide_files }) workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: reactor_config } return tasker, workflow
def test_pulp_service_account_secret(tmpdir, monkeypatch, reactor_config_map): tasker, workflow = prepare() monkeypatch.setenv('SOURCE_SECRET_PATH', str(tmpdir) + "/not-used") with open(os.path.join(str(tmpdir), "pulp.cer"), "wt") as cer: cer.write("pulp certificate\n") with open(os.path.join(str(tmpdir), "pulp.key"), "wt") as key: key.write("pulp key\n") runner = PostBuildPluginsRunner(tasker, workflow, [{ 'name': PulpPushPlugin.key, 'args': { 'pulp_registry_name': 'test', 'pulp_secret_path': str(tmpdir), }}]) if reactor_config_map: workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'pulp': {'name': 'test', 'auth': {'ssl_certs_dir': str(tmpdir)}}}) runner.run() _, crane_images = workflow.postbuild_results[PulpPushPlugin.key] images = [i.to_str() for i in crane_images] assert "registry.example.com/image-name1:latest" in images assert "registry.example.com/prefix/image-name2:latest" in images assert "registry.example.com/image-name3:asd" in images
def prepare(self, workflow): # Setup expected platforms workflow.buildstep_plugins_conf[0]['args']['platforms'] = ['x86_64', 'ppc64le'] workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(['x86_64', 'ppc64le']) # Setup platform descriptors workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({ 'version': 1, 'source_registry': {'url': 'registry.example.com', 'insecure': True}, 'platform_descriptors': [{'platform': 'x86_64', 'architecture': 'amd64'}], }) # Setup multi-arch manifest list manifest_list = { 'manifests': [ {'platform': {'architecture': 'amd64'}, 'digest': 'sha256:123456'}, {'platform': {'architecture': 'ppc64le'}, 'digest': 'sha256:654321'}, ] } (flexmock(atomic_reactor.util) .should_receive('get_manifest_list') .and_return(flexmock(json=lambda: manifest_list))) return workflow
def mock_reactor_config(tmpdir, clusters=None): if not clusters: clusters = { 'x86_64': [ { 'name': 'worker_x86_64', 'max_concurrent_builds': 3 } ], 'ppc64le': [ { 'name': 'worker_ppc64le', 'max_concurrent_builds': 3 } ] } conf = ReactorConfig({'version': 1, 'clusters': clusters}) (flexmock(pre_reactor_config) .should_receive('get_config') .and_return(conf)) with open(os.path.join(str(tmpdir), 'osbs.conf'), 'w') as f: for platform, plat_clusters in clusters.items(): for cluster in plat_clusters: f.write(dedent("""\ [{name}] openshift_url = https://{name}.com/ namespace = {name}_namespace """.format(name=cluster['name'])))
def test_compare_components_plugin(tmpdir, mismatch, exception, fail): workflow = mock_workflow(tmpdir) worker_metadatas = mock_metadatas() # example data has 2 log items before component item hence output[2] component = worker_metadatas['ppc64le']['output'][2]['components'][0] if mismatch: component['version'] = 'bacon' if exception: workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig({ 'version': 1, 'package_comparison_exceptions': [component['name']] }) } workflow.postbuild_results[ PLUGIN_FETCH_WORKER_METADATA_KEY] = worker_metadatas runner = PostBuildPluginsRunner(None, workflow, [{ 'name': PLUGIN_COMPARE_COMPONENTS_KEY, "args": {} }]) if fail: with pytest.raises(PluginFailedException): runner.run() else: runner.run()
def test_allowed_domains(allowed_domains, repo_urls, will_raise, scratch): tasker, workflow = prepare(scratch) reactor_map = {'version': 1} if allowed_domains is not None: reactor_map['yum_repo_allowed_domains'] = allowed_domains workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig(reactor_map) } runner = PreBuildPluginsRunner(tasker, workflow, [{ 'name': AddYumRepoByUrlPlugin.key, 'args': { 'repourls': repo_urls, 'inject_proxy': None } }]) if will_raise and not scratch: with pytest.raises(PluginFailedException) as exc: runner.run() msg = 'Errors found while checking yum repo urls' assert msg in str(exc.value) else: runner.run()
def test_get_receiver_list(self, monkeypatch, additional_addresses, expected_receivers): class TagConf(object): unique_images = [] class WF(object): image = ImageName.parse('foo/bar:baz') openshift_build_selflink = '/builds/blablabla' build_process_failed = False autorebuild_canceled = False build_canceled = False tag_conf = TagConf() exit_results = { KojiImportPlugin.key: MOCK_KOJI_BUILD_ID } prebuild_results = {} plugin_workspace = {} monkeypatch.setenv("BUILD", json.dumps({ 'metadata': { 'labels': { 'koji-task-id': MOCK_KOJI_TASK_ID, }, 'name': {}, } })) session = MockedClientSession('', has_kerberos=True) pathinfo = MockedPathInfo('https://koji') flexmock(koji, ClientSession=lambda hub, opts: session, PathInfo=pathinfo) kwargs = { 'url': 'https://something.com', 'smtp_host': 'smtp.bar.com', 'from_address': '*****@*****.**', 'additional_addresses': additional_addresses } workflow = WF() openshift_map = {'url': 'https://something.com'} smtp_map = { 'from_address': '*****@*****.**', 'host': 'smtp.bar.com', 'send_to_submitter': False, 'send_to_pkg_owner': False, 'additional_addresses': additional_addresses } workflow.plugin_workspace[ReactorConfigPlugin.key] = {} workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig({'version': 1, 'smtp': smtp_map, 'openshift': openshift_map}) add_koji_map_in_workflow(workflow, hub_url=None, root_url='https://koji/', ssl_certs_dir='/certs') p = SendMailPlugin(None, workflow, **kwargs) if expected_receivers is not None: assert sorted(expected_receivers) == sorted(p._get_receivers_list()) else: with pytest.raises(RuntimeError) as ex: p._get_receivers_list() assert str(ex.value) == 'No recipients found'