def test_get_koji_session(self, config, raise_error): required_config = """\ version: 1 source_registry: url: source_registry.com registries: - url: registry_url openshift: url: openshift_url """ config += "\n" + required_config if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { "proxyuser": config_json['koji']['auth'].get('proxyuser'), "ssl_certs_dir": config_json['koji']['auth'].get('ssl_certs_dir'), "krb_principal": config_json['koji']['auth'].get('krb_principal'), "krb_keytab": config_json['koji']['auth'].get('krb_keytab_path') } use_fast_upload = config_json['koji'].get('use_fast_upload', True) conf = Configuration(raw_config=config_json) (flexmock(atomic_reactor.utils.koji).should_receive( 'create_koji_session').with_args( config_json['koji']['hub_url'], auth_info, use_fast_upload).once().and_return(True)) get_koji_session(conf)
def test_get_koji_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { "proxyuser": config_json['koji']['auth'].get('proxyuser'), "ssl_certs_dir": config_json['koji']['auth'].get('ssl_certs_dir'), "krb_principal": config_json['koji']['auth'].get('krb_principal'), "krb_keytab": config_json['koji']['auth'].get('krb_keytab_path') } fallback_map = {} if fallback: fallback_map = { 'auth': deepcopy(auth_info), 'hub_url': config_json['koji']['hub_url'] } fallback_map['auth']['krb_keytab_path'] = fallback_map['auth'].pop( 'krb_keytab') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = \ ReactorConfig(config_json) (flexmock(atomic_reactor.koji_util).should_receive( 'create_koji_session').with_args( config_json['koji']['hub_url'], auth_info).once().and_return(True)) get_koji_session(workflow, fallback_map)
def run(self): build_json = get_build_json() self.target_registry = os.environ.get('OUTPUT_REGISTRY', None) user_params = os.environ['USER_PARAMS'] user_data = validate_user_data( user_params, 'schemas/source_containers_user_params.json') arrangement_version = user_data.get('arrangement_version', None) plugins_json_serialized = get_plugins_with_user_data( user_params, user_data) # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too reactor_config_map = os.environ['REACTOR_CONFIG'] self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json') if arrangement_version and arrangement_version <= 5: raise ValueError('arrangement_version <= 5 is no longer supported') # validate json before performing any changes read_yaml(plugins_json_serialized, 'schemas/plugins.json') self.plugins_json = json.loads(plugins_json_serialized) input_json = { 'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None) } input_json.update(self.plugins_json) self.log.debug("build json: %s", input_json) self.assert_koji_integration() # validate after performing changes read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') return input_json
def test_get_pdc_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') if not PDC_AVAILABLE: return auth_info = { "server": config_json['pdc']['api_url'], "ssl_verify": not config_json['pdc'].get('insecure', False), "develop": True, } fallback_map = {} if fallback: fallback_map['api_url'] = config_json['pdc']['api_url'] fallback_map['insecure'] = config_json['pdc'].get( 'insecure', False) else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(pdc_client.PDCClient).should_receive('__init__').with_args( **auth_info).once().and_return(None)) get_pdc_session(workflow, fallback_map)
def test_get_koji_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { "proxyuser": config_json['koji']['auth'].get('proxyuser'), "ssl_certs_dir": config_json['koji']['auth'].get('ssl_certs_dir'), "krb_principal": config_json['koji']['auth'].get('krb_principal'), "krb_keytab": config_json['koji']['auth'].get('krb_keytab_path') } fallback_map = {} if fallback: fallback_map = {'auth': deepcopy(auth_info), 'hub_url': config_json['koji']['hub_url']} fallback_map['auth']['krb_keytab_path'] = fallback_map['auth'].pop('krb_keytab') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = \ ReactorConfig(config_json) (flexmock(atomic_reactor.koji_util) .should_receive('create_koji_session') .with_args(config_json['koji']['hub_url'], auth_info) .once() .and_return(True)) get_koji_session(workflow, fallback_map)
def test_get_pulp_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { "pulp_secret_path": config_json['pulp']['auth'].get('ssl_certs_dir'), "username": config_json['pulp']['auth'].get('username'), "password": config_json['pulp']['auth'].get('password'), "dockpulp_loglevel": None } fallback_map = {} if fallback: fallback_map = {'auth': deepcopy(auth_info), 'name': config_json['pulp']['name']} fallback_map['auth']['ssl_certs_dir'] = fallback_map['auth'].pop('pulp_secret_path') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(atomic_reactor.pulp_util.PulpHandler) .should_receive('__init__') .with_args(workflow, config_json['pulp']['name'], 'logger', **auth_info) .once() .and_return(None)) get_pulp_session(workflow, 'logger', fallback_map)
def validate_user_data(user_params, schema_path): """Validates JSON user data against schema and returns them in python dict :param str user_params: JSON with user data :param str schema_path: path to JSON schema definitions :return: dict with user data """ read_yaml(user_params, schema_path) return json.loads(user_params)
def run(self): """ each plugin has to implement this method -- it is used to run the plugin actually response from plugin is kept and used in json result response """ user_params = None build_json = get_build_json() git_url = os.environ['SOURCE_URI'] git_ref = os.environ.get('SOURCE_REF', None) image = os.environ['OUTPUT_IMAGE'] self.target_registry = os.environ.get('OUTPUT_REGISTRY', None) self.reactor_env = None git_commit_depth = None git_branch = None try: user_params = os.environ['USER_PARAMS'] user_data = self.validate_user_data(user_params) git_commit_depth = user_data.get('git_commit_depth', None) git_branch = user_data.get('git_branch', None) self.plugins_json = self.get_plugins_with_user_data(user_params, user_data) # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too reactor_config_map = os.environ['REACTOR_CONFIG'] self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json') except KeyError: try: self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS'] except KeyError: raise RuntimeError("No plugin configuration found!") self.plugins_json = json.loads(self.plugins_json) # validate json before performing any changes read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') input_json = { 'source': { 'provider': 'git', 'uri': git_url, 'provider_params': { 'git_commit': git_ref, 'git_commit_depth': git_commit_depth, 'git_branch': git_branch, }, }, 'image': image, 'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None) } input_json.update(self.plugins_json) self.log.debug("build json: %s", input_json) self.remove_plugins_without_parameters() # make sure the final json is valid read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') return input_json
def test_validate_build_env_vars(self, config, valid): # Only test schema validation, atomic-reactor has no additional support # for build_env_vars (osbs-client does, however) config += "\n" + REQUIRED_CONFIG if valid: read_yaml(config, 'schemas/config.json') else: with pytest.raises(OsbsValidationException): read_yaml(config, 'schemas/config.json')
def run(self): """ each plugin has to implement this method -- it is used to run the plugin actually response from plugin is kept and used in json result response """ build_json = get_build_json() git_url = os.environ['SOURCE_URI'] git_ref = os.environ.get('SOURCE_REF', None) self.target_registry = os.environ.get('OUTPUT_REGISTRY', None) try: user_params = os.environ['USER_PARAMS'] user_data = validate_user_data(user_params, 'schemas/user_params.json') git_commit_depth = user_data.get('git_commit_depth', None) git_branch = user_data.get('git_branch', None) arrangement_version = user_data.get('arrangement_version', None) self.plugins_json = get_plugins_with_user_data( user_params, user_data) # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too reactor_config_map = os.environ['REACTOR_CONFIG'] self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json') except KeyError as exc: raise RuntimeError("No plugin configuration found!") from exc if arrangement_version and arrangement_version <= 5: raise ValueError('arrangement_version <= 5 is no longer supported') self.plugins_json = json.loads(self.plugins_json) # validate json before performing any changes read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') input_json = { 'source': { 'provider': 'git', 'uri': git_url, 'provider_params': { 'git_commit': git_ref, 'git_commit_depth': git_commit_depth, 'git_branch': git_branch, }, }, 'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None) } input_json.update(self.plugins_json) self.log.debug("build json: %s", input_json) self.remove_plugins_without_parameters() # make sure the final json is valid read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') return input_json
def test_get_openshift_session(self, config, raise_error): required_config = """\ version: 1 koji: hub_url: / root_url: '' auth: {} source_registry: url: source_registry.com registries: - url: registry_url """ config += "\n" + required_config if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { 'openshift_url': config_json['openshift']['url'], 'verify_ssl': not config_json['openshift'].get('insecure', False), 'use_auth': False, 'conf_file': None, 'namespace': 'namespace', } if config_json['openshift'].get('auth'): if config_json['openshift']['auth'].get('krb_keytab_path'): auth_info['kerberos_keytab'] =\ config_json['openshift']['auth'].get('krb_keytab_path') if config_json['openshift']['auth'].get('krb_principal'): auth_info['kerberos_principal'] =\ config_json['openshift']['auth'].get('krb_principal') if config_json['openshift']['auth'].get('krb_cache_path'): auth_info['kerberos_ccache'] =\ config_json['openshift']['auth'].get('krb_cache_path') if config_json['openshift']['auth'].get('ssl_certs_dir'): auth_info['client_cert'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'cert') auth_info['client_key'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'key') auth_info['use_auth'] = config_json['openshift']['auth'].get('enable', False) (flexmock(osbs.conf.Configuration) .should_call('__init__') .with_args(**auth_info) .once()) (flexmock(osbs.api.OSBS) .should_call('__init__') .once()) conf = Configuration(raw_config=config_json) get_openshift_session(conf, 'namespace')
def test_get_odcs_session(self, tmpdir, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = {'insecure': config_json['odcs'].get('insecure', False)} if 'openidc_dir' in config_json['odcs']['auth']: config_json['odcs']['auth']['openidc_dir'] = str(tmpdir) filename = str(tmpdir.join('token')) with open(filename, 'w') as fp: fp.write("my_token") auth_info['token'] = "my_token" ssl_dir_raise = False if 'ssl_certs_dir' in config_json['odcs']['auth']: if config_json['odcs']['auth']['ssl_certs_dir'] != "nonexistent": config_json['odcs']['auth']['ssl_certs_dir'] = str(tmpdir) filename = str(tmpdir.join('cert')) with open(filename, 'w') as fp: fp.write("my_cert") auth_info['cert'] = filename else: ssl_dir_raise = True fallback_map = {} if fallback: fallback_map = { 'auth': deepcopy(auth_info), 'api_url': config_json['odcs']['api_url'] } fallback_map['auth']['ssl_certs_dir'] = config_json['odcs'][ 'auth'].get('ssl_certs_dir') fallback_map['auth']['openidc_dir'] = config_json['odcs'][ 'auth'].get('openidc_dir') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) if not ssl_dir_raise: (flexmock(atomic_reactor.odcs_util.ODCSClient).should_receive( '__init__').with_args(config_json['odcs']['api_url'], **auth_info).once().and_return(None)) get_odcs_session(workflow, fallback_map) else: with pytest.raises(KeyError): get_odcs_session(workflow, fallback_map)
def run(self): """ each plugin has to implement this method -- it is used to run the plugin actually response from plugin is kept and used in json result response """ user_params = None build_json = get_build_json() git_url = os.environ['SOURCE_URI'] git_ref = os.environ.get('SOURCE_REF', None) image = os.environ['OUTPUT_IMAGE'] self.target_registry = os.environ.get('OUTPUT_REGISTRY', None) self.reactor_env = None try: user_params = os.environ['USER_PARAMS'] self.plugins_json = self.get_plugins_with_user_params( build_json, user_params) # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too reactor_config_map = os.environ['REACTOR_CONFIG'] self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json') except KeyError: try: self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS'] except KeyError: raise RuntimeError("No plugin configuration found!") self.plugins_json = json.loads(self.plugins_json) input_json = { 'source': { 'provider': 'git', 'uri': git_url, 'provider_params': { 'git_commit': git_ref } }, 'image': image, 'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None) } input_json.update(self.plugins_json) self.log.debug("build json: %s", input_json) self.remove_plugins_without_parameters() # make sure the final json is valid read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json') return input_json
def get_plugins_with_user_data(self, user_params, user_data): # get the reactor config map and derive an osbs instance from it from osbs.api import OSBS from osbs.conf import Configuration reactor_config_override = user_data.get('reactor_config_override') if reactor_config_override: read_yaml(json.dumps(reactor_config_override), 'schemas/config.json') osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir')) osbs = OSBS(osbs_conf, osbs_conf) return osbs.render_plugins_configuration(user_params)
def get_plugins_with_user_data(user_params, user_data): """Get the reactor config map and derive an osbs instance from it""" from osbs.api import OSBS from osbs.conf import Configuration reactor_config_override = user_data.get('reactor_config_override') if reactor_config_override: read_yaml(json.dumps(reactor_config_override), 'schemas/config.json') osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir')) osbs = OSBS(osbs_conf, osbs_conf) return osbs.render_plugins_configuration(user_params)
def make_icm(self, platform: str) -> dict: """Create the complete ICM document for the specified platform.""" icm = deepcopy(self._icm_base) content_sets = read_content_sets(self.workflow) or {} icm['content_sets'] = content_sets.get(platform, []) self.log.debug('Output ICM content_sets: %s', icm['content_sets']) self.log.debug('Output ICM metadata: %s', icm['metadata']) # Validate; `json.dumps()` converts `icm` to str. Confusingly, `read_yaml` # *will* validate JSON read_yaml(json.dumps(icm), 'schemas/content_manifest.json') return icm
def test_get_odcs_session(self, tmpdir, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = {'insecure': config_json['odcs'].get('insecure', False)} if 'openidc_dir' in config_json['odcs']['auth']: config_json['odcs']['auth']['openidc_dir'] = str(tmpdir) filename = str(tmpdir.join('token')) with open(filename, 'w') as fp: fp.write("my_token") auth_info['token'] = "my_token" ssl_dir_raise = False if 'ssl_certs_dir' in config_json['odcs']['auth']: if config_json['odcs']['auth']['ssl_certs_dir'] != "nonexistent": config_json['odcs']['auth']['ssl_certs_dir'] = str(tmpdir) filename = str(tmpdir.join('cert')) with open(filename, 'w') as fp: fp.write("my_cert") auth_info['cert'] = filename else: ssl_dir_raise = True fallback_map = {} if fallback: fallback_map = {'auth': deepcopy(auth_info), 'api_url': config_json['odcs']['api_url']} fallback_map['auth']['ssl_certs_dir'] = config_json['odcs']['auth'].get('ssl_certs_dir') fallback_map['auth']['openidc_dir'] = config_json['odcs']['auth'].get('openidc_dir') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) if not ssl_dir_raise: (flexmock(atomic_reactor.odcs_util.ODCSClient) .should_receive('__init__') .with_args(config_json['odcs']['api_url'], **auth_info) .once() .and_return(None)) get_odcs_session(workflow, fallback_map) else: with pytest.raises(KeyError): get_odcs_session(workflow, fallback_map)
def _update_icm_data(self): # Inject the content_sets data into the ICM JSON object self.icm['content_sets'] = self.content_sets # Inject the current image layer index number into the ICM JSON object metadata self.icm['metadata']['image_layer_index'] = self.layer_index # Convert dict -> str icm_json = json.dumps(self.icm, indent=4) # Validate the updated ICM with the ICM JSON Schema read_yaml(icm_json, 'schemas/content_manifest.json') self.log.debug('Output ICM content_sets: %s', self.icm['content_sets']) self.log.debug('Output ICM metadata: %s', self.icm['metadata'])
def test_get_smtp_session(self, config, raise_error): config += "\n" + REQUIRED_CONFIG if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') conf = Configuration(raw_config=config_json) (flexmock(smtplib.SMTP).should_receive('__init__').with_args( config_json['smtp']['host']).once().and_return(None)) get_smtp_session(conf)
def mock_reactor_config(workflow, tmpdir, data=None, default_si=DEFAULT_SIGNING_INTENT): if data is None: data = dedent("""\ version: 1 odcs: signing_intents: - name: release keys: ['R123'] - name: beta keys: ['R123', 'B456', 'B457'] - name: unsigned keys: [] default_signing_intent: {} api_url: {} auth: ssl_certs_dir: {} """.format(default_si, ODCS_URL, tmpdir)) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config = {} if data: tmpdir.join('cert').write('') config = read_yaml(data, 'schemas/config.json') workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = ReactorConfig(config)
def test_get_docker_registry(self, config, exc): required_config = """\ version: 1 koji: hub_url: / root_url: '' auth: {} openshift: url: openshift_url source_registry: url: source_registry.com """ config += "\n" + required_config config_json = read_yaml(config, 'schemas/config.json') expected = { 'url': 'https://container-registry.example.com', 'insecure': False, 'secret': '/var/run/secrets/atomic-reactor/v2-registry-dockercfg' } conf = Configuration(raw_config=config_json) if exc is None: docker_registry = conf.docker_registry assert docker_registry == expected else: with pytest.raises(exc): getattr(conf, 'docker_registry')
def test_get_remote_hosts(self, config, expected_slots_dir, expected_enabled_hosts): config += "\n" + REQUIRED_CONFIG config_json = read_yaml(config, 'schemas/config.json') conf = Configuration(raw_config=config_json) remote_hosts = conf.remote_hosts assert expected_slots_dir == remote_hosts['slots_dir'] pools = remote_hosts['pools'] assert len(pools), 'Remote hosts do not have 2 architectures' assert len( pools['x86_64']) == 2, '2 entries expected for x86_64 architecture' assert sorted(pools['x86_64']) == sorted( ['remote-host1.x86_64', 'remote-host2.x86_64']) assert len( pools['ppc64le']) == 1, '1 entry expected for ppc64le architecture' host1_x86_64 = pools['x86_64']['remote-host1.x86_64'] assert host1_x86_64['auth'] == 'foo', 'Unexpected SSH key path' assert host1_x86_64[ 'socket_path'] == '/user/foo/podman.sock', 'Unexpected socket path' host2_x86_64 = pools['x86_64']['remote-host2.x86_64'] assert host2_x86_64['username'] == 'bar', 'Unexpected user name' host3_ppc64le = pools['ppc64le']['remote-host3.ppc64le'] assert host3_ppc64le['slots'] == 3, 'Unexpected number of slots' for arch in ['x86_64', 'ppc64le']: enabled_hosts = [ host for host, items in pools[arch].items() if items['enabled'] ] assert enabled_hosts == expected_enabled_hosts[arch]
def read_url_requests(self): file_path = os.path.join(self.workdir, self.URL_REQUESTS_FILENAME) if not os.path.exists(file_path): self.log.debug('%s not found', self.URL_REQUESTS_FILENAME) return [] return util.read_yaml(file_path, 'schemas/fetch-artifacts-url.json')
def get_plugins_with_user_params(self, build_json, user_params): # get the reactor config map and derive an osbs instance from it from osbs.api import OSBS from osbs.conf import Configuration # make sure the input json is valid read_yaml(user_params, 'schemas/user_params.json') user_data = json.loads(user_params) reactor_config_override = user_data.get('reactor_config_override') if reactor_config_override: read_yaml(json.dumps(reactor_config_override), 'schemas/config.json') osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir')) osbs = OSBS(osbs_conf, osbs_conf) return osbs.render_plugins_configuration(user_params)
def test_get_registry(self, config, exc): required_config = dedent("""\ version: 1 koji: hub_url: / root_url: '' auth: {} openshift: url: openshift_url source_registry: url: source_registry.com """) config += "\n" + required_config config_json = read_yaml(config, 'schemas/config.json') expected = { 'uri': 'container-registry.example.com', 'insecure': False, 'expected_media_types': [], 'version': 'v2', } if 'registries_cfg_path' in config: expected[ 'secret'] = '/var/run/secrets/atomic-reactor/v2-registry-dockercfg' conf = Configuration(raw_config=config_json) if exc is None: assert conf.registry == expected else: with exc: getattr(conf, 'registry')
def __init__(self, config_path=None, env_name=REACTOR_CONFIG_ENV_NAME, raw_config=None): self.conf = deepcopy(self.DEFAULT_CONFIG) reactor_config_from_env = os.environ.get(env_name, None) if raw_config: logger.info("reading config from raw_config kwarg") self.conf = deepcopy(raw_config) elif reactor_config_from_env: logger.info("reading config from %s env variable", env_name) self.conf = read_yaml(reactor_config_from_env, 'schemas/config.json') elif config_path and os.path.exists(config_path): logger.info("reading config from %s", config_path) self.conf = read_yaml_from_file_path(config_path, 'schemas/config.json') else: logger.info("using default config: %s", self.DEFAULT_CONFIG) version = self.conf[ReactorConfigKeys.VERSION_KEY] if version != 1: raise ValueError("version %r unknown" % version) logger.info("reading config content %s", self.conf)
def mock_reactor_config(workflow, tmpdir, data=None, default_si=DEFAULT_SIGNING_INTENT): if data is None: data = dedent("""\ version: 1 odcs: signing_intents: - name: release keys: ['R123'] - name: beta keys: ['R123', 'B456', 'B457'] - name: unsigned keys: [] default_signing_intent: {} api_url: {} auth: ssl_certs_dir: {} """.format(default_si, ODCS_URL, tmpdir)) workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config = {} if data: tmpdir.join('cert').write('') config = read_yaml(data, 'schemas/config.json') workflow.plugin_workspace[ ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = ReactorConfig(config)
def test_get_default_image_build_method(self, config, expect): config_json = read_yaml(config, 'schemas/config.json') _, workflow = self.prepare() workspace = workflow.plugin_workspace.setdefault(ReactorConfigPlugin.key, {}) workspace[WORKSPACE_CONF_KEY] = ReactorConfig(config_json) method = get_default_image_build_method(workflow) assert method == expect
def read_nvr_requests(self): file_path = os.path.join(self.workdir, self.NVR_REQUESTS_FILENAME) if not os.path.exists(file_path): self.log.debug('%s not found', self.NVR_REQUESTS_FILENAME) return [] nvr_requests = util.read_yaml(file_path, 'schemas/fetch-artifacts-nvr.json') return [NvrRequest(**nvr_request) for nvr_request in nvr_requests]
def icm(self): """ Get and validate the ICM from the Cachito API `content-manifest` endpoint. :return: dict, the ICM as a Python dict """ if not self.remote_sources and self._icm is None: self._icm = deepcopy(self.minimal_icm) elif self._icm is None: request_ids = [remote_source['request_id'] for remote_source in self.remote_sources] self._icm = self.cachito_session.get_image_content_manifest( request_ids ) # Validate; `json.dumps()` converts `icm` to str. Confusingly, `read_yaml` # *will* validate JSON read_yaml(json.dumps(self._icm), 'schemas/content_manifest.json') return self._icm
def test_get_operator_manifests(self, tmpdir, config, valid): config += "\n" + REQUIRED_CONFIG if valid: read_yaml(config, 'schemas/config.json') else: with pytest.raises(OsbsValidationException): read_yaml(config, 'schemas/config.json') return filename = os.path.join(str(tmpdir), 'config.yaml') with open(filename, 'w') as fp: fp.write(dedent(config)) conf = Configuration(config_path=filename) operator_config = conf.operator_manifests assert isinstance(operator_config, dict) assert "allowed_registries" in operator_config
def test_get_default_image_build_method(self, config, expect): config_json = read_yaml(config, 'schemas/config.json') _, workflow = self.prepare() workspace = workflow.plugin_workspace.setdefault( ReactorConfigPlugin.key, {}) workspace[WORKSPACE_CONF_KEY] = ReactorConfig(config_json) method = get_default_image_build_method(workflow) assert method == expect
def test_get_build_image_override(self, config, expect): config += "\n" + REQUIRED_CONFIG config_json = read_yaml(config, 'schemas/config.json') conf = Configuration(raw_config=config_json) build_image_override = conf.build_image_override assert build_image_override == expect
def test_get_odcs_session(self, tmpdir, config, raise_error): config += "\n" + REQUIRED_CONFIG if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { 'insecure': config_json['odcs'].get('insecure', False), 'timeout': config_json['odcs'].get('timeout', None), } if 'openidc_dir' in config_json['odcs']['auth']: config_json['odcs']['auth']['openidc_dir'] = str(tmpdir) filename = str(tmpdir.join('token')) with open(filename, 'w') as fp: fp.write("my_token") auth_info['token'] = "my_token" ssl_dir_raise = False if 'ssl_certs_dir' in config_json['odcs']['auth']: if config_json['odcs']['auth']['ssl_certs_dir'] != "nonexistent": config_json['odcs']['auth']['ssl_certs_dir'] = str(tmpdir) filename = str(tmpdir.join('cert')) with open(filename, 'w') as fp: fp.write("my_cert") auth_info['cert'] = filename else: ssl_dir_raise = True conf = Configuration(raw_config=config_json) if not ssl_dir_raise: (flexmock(atomic_reactor.utils.odcs.ODCSClient) .should_receive('__init__') .with_args(config_json['odcs']['api_url'], **auth_info) .once() .and_return(None)) get_odcs_session(conf) else: with pytest.raises(KeyError): get_odcs_session(conf)
def icm(self): """ Get and validate the ICM from the Cachito API `content-manifest` endpoint. :return: dict, the ICM as a Python dict """ if self.icm_url is None and self._icm is None: self._icm = deepcopy(self.minimal_icm) if self._icm is None: session = get_retrying_requests_session() session.verify = self.cachito_verify self.log.debug('Making request to "%s"', self.icm_url) response = session.get(self.icm_url) response.raise_for_status() self._icm = response.json() # Returns dict # Validate; `json.dumps()` converts `icm` to str. Confusingly, `read_yaml` # *will* validate JSON read_yaml(json.dumps(self._icm), 'schemas/content_manifest.json') return self._icm
def test_get_koji_path_info(self, fallback, root_url): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config = { 'version': 1, 'koji': { 'hub_url': 'https://koji.example.com/hub', 'auth': { 'ssl_certs_dir': '/var/certs' } } } expected_root_url = 'https://koji.example.com/root' if root_url: config['koji']['root_url'] = root_url config_yaml = yaml.safe_dump(config) expect_error = not root_url if expect_error: with pytest.raises(Exception): read_yaml(config_yaml, 'schemas/config.json') return parsed_config = read_yaml(config_yaml, 'schemas/config.json') fallback_map = {} if fallback: fallback_map = deepcopy(config['koji']) else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] = \ ReactorConfig(parsed_config) (flexmock(koji.PathInfo) .should_receive('__init__') .with_args(topdir=expected_root_url) .once()) get_koji_path_info(workflow, fallback_map)
def test_read_yaml_file_or_yaml(tmpdir, from_file, config): expected = yaml.safe_load(config) if from_file: config_path = os.path.join(str(tmpdir), 'config.yaml') with open(config_path, 'w') as fp: fp.write(config) output = read_yaml_from_file_path(config_path, 'schemas/config.json') else: output = read_yaml(config, 'schemas/config.json') assert output == expected
def test_get_smtp_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') fallback_map = {} if fallback: fallback_map['host'] = config_json['smtp']['host'] else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(smtplib.SMTP).should_receive('__init__').with_args( config_json['smtp']['host']).once().and_return(None)) get_smtp_session(workflow, fallback_map)
def test_get_build_image_override(self, fallback, config, expect): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config_json = read_yaml(config, 'schemas/config.json') workspace = workflow.plugin_workspace[ReactorConfigPlugin.key] workspace[WORKSPACE_CONF_KEY] = ReactorConfig(config_json) kwargs = {} if fallback: kwargs['fallback'] = expect build_image_override = get_build_image_override(workflow, **kwargs) assert build_image_override == expect
def run(self): """ Run the plugin Parse and validate config. Store in workflow workspace for later retrieval. """ config_filename = os.path.join(self.config_path, self.basename) self.log.info("reading config from %s", config_filename) conf = read_yaml(config_filename, 'schemas/config.json') reactor_conf = ReactorConfig(conf) workspace = self.workflow.plugin_workspace.get(self.key, {}) workspace[WORKSPACE_CONF_KEY] = reactor_conf self.workflow.plugin_workspace[self.key] = workspace
def test_get_smtp_session(self, fallback, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') fallback_map = {} if fallback: fallback_map['host'] = config_json['smtp']['host'] else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(smtplib.SMTP) .should_receive('__init__') .with_args(config_json['smtp']['host']) .once() .and_return(None)) get_smtp_session(workflow, fallback_map)
def test_get_platform_to_goarch_mapping(self, fallback, config, expect): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config_json = read_yaml(config, 'schemas/config.json') workspace = workflow.plugin_workspace[ReactorConfigPlugin.key] workspace[WORKSPACE_CONF_KEY] = ReactorConfig(config_json) kwargs = {} if fallback: kwargs['descriptors_fallback'] = {'x86_64': 'amd64'} platform_to_goarch = get_platform_to_goarch_mapping(workflow, **kwargs) goarch_to_platform = get_goarch_to_platform_mapping(workflow, **kwargs) for plat, goarch in expect.items(): assert platform_to_goarch[plat] == goarch assert goarch_to_platform[goarch] == plat
def test_get_flatpak_base_image(self, config, fallback, expect): config_json = read_yaml(config, 'schemas/config.json') _, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig(config_json) } kwargs = {} if fallback: kwargs['fallback'] = fallback if expect: base_image = get_flatpak_base_image(workflow, **kwargs) assert base_image == expect else: with pytest.raises(KeyError): get_flatpak_base_image(workflow, **kwargs)
def run(self): """ Run the plugin Parse and validate config. Store in workflow workspace for later retrieval. """ if self.reactor_config_map: self.log.info("reading config from REACTOR_CONFIG env variable") conf = read_yaml(self.reactor_config_map, 'schemas/config.json') else: config_filename = os.path.join(self.config_path, self.basename) self.log.info("reading config from %s", config_filename) conf = read_yaml_from_file_path(config_filename, 'schemas/config.json') reactor_conf = ReactorConfig(conf) workspace = self.workflow.plugin_workspace.setdefault(self.key, {}) workspace[WORKSPACE_CONF_KEY] = reactor_conf self.log.info("reading config content %s", reactor_conf.conf) # need to stash this on the workflow for access in a place that can't import this module self.workflow.default_image_build_method = get_default_image_build_method(self.workflow)
def test_get_docker_registry(self, config, fallback, valid): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} config_json = read_yaml(config, 'schemas/config.json') docker_reg = { 'version': 'v2', 'insecure': False, 'secret': '/var/run/secrets/atomic-reactor/v2-registry-dockercfg', 'url': 'https://container-registry.example.com/v2', } if fallback: if valid: docker_fallback = docker_reg expected = docker_reg else: docker_fallback = NO_FALLBACK else: docker_fallback = {} expected = { 'url': 'https://container-registry.example.com', 'insecure': False, 'secret': '/var/run/secrets/atomic-reactor/v2-registry-dockercfg' } workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) if valid: docker_registry = get_docker_registry(workflow, docker_fallback) assert docker_registry == expected else: if fallback: with pytest.raises(KeyError): get_docker_registry(workflow, docker_fallback) else: with pytest.raises(OsbsValidationException): get_docker_registry(workflow, docker_fallback)
def test_get_openshift_session(self, fallback, build_json_dir, config, raise_error): tasker, workflow = self.prepare() workflow.plugin_workspace[ReactorConfigPlugin.key] = {} if build_json_dir: config += " build_json_dir: " + build_json_dir if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { 'openshift_url': config_json['openshift']['url'], 'verify_ssl': not config_json['openshift'].get('insecure', False), 'use_auth': False, 'conf_file': None, 'namespace': 'namespace', 'build_json_dir': build_json_dir } if config_json['openshift'].get('auth'): if config_json['openshift']['auth'].get('krb_keytab_path'): auth_info['kerberos_keytab'] =\ config_json['openshift']['auth'].get('krb_keytab_path') if config_json['openshift']['auth'].get('krb_principal'): auth_info['kerberos_principal'] =\ config_json['openshift']['auth'].get('krb_principal') if config_json['openshift']['auth'].get('krb_cache_path'): auth_info['kerberos_ccache'] =\ config_json['openshift']['auth'].get('krb_cache_path') if config_json['openshift']['auth'].get('ssl_certs_dir'): auth_info['client_cert'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'cert') auth_info['client_key'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'key') auth_info['use_auth'] = config_json['openshift']['auth'].get('enable', False) fallback_map = {} if fallback: fallback_map = {'url': config_json['openshift']['url'], 'insecure': config_json['openshift'].get('insecure', False), 'build_json_dir': build_json_dir} if config_json['openshift'].get('auth'): fallback_map['auth'] = {} fallback_map['auth']['krb_keytab_path'] =\ config_json['openshift']['auth'].get('krb_keytab_path') fallback_map['auth']['krb_principal'] =\ config_json['openshift']['auth'].get('krb_principal') fallback_map['auth']['enable'] =\ config_json['openshift']['auth'].get('enable', False) fallback_map['auth']['krb_cache_path'] =\ config_json['openshift']['auth'].get('krb_cache_path') fallback_map['auth']['ssl_certs_dir'] =\ config_json['openshift']['auth'].get('ssl_certs_dir') else: workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\ ReactorConfig(config_json) (flexmock(osbs.conf.Configuration) .should_call('__init__') .with_args(**auth_info) .once()) (flexmock(osbs.api.OSBS) .should_call('__init__') .once()) flexmock(os, environ={'BUILD': '{"metadata": {"namespace": "namespace"}}'}) get_openshift_session(workflow, fallback_map)
def validate_user_data(self, user_params): # make sure the input json is valid read_yaml(user_params, 'schemas/user_params.json') return json.loads(user_params)