def _create_build_config_and_build(self, build_request): # TODO: test this method more thoroughly build_json = build_request.render() api_version = build_json['apiVersion'] if api_version != self.os_conf.get_openshift_api_version(): raise OsbsValidationException("BuildConfig template has incorrect apiVersion (%s)" % api_version) build_config_name = build_json['metadata']['name'] # check if a build already exists for this config; if so then raise running_builds = self._get_running_builds_for_build_config(build_config_name) rb_len = len(running_builds) if rb_len > 0: if rb_len == 1: rb = running_builds[0] msg = 'Build %s for %s in state %s, can\'t proceed.' % \ (rb.get_build_name(), build_config_name, rb.status) else: msg = self._panic_msg_for_more_running_builds(build_config_name, running_builds) raise OsbsException(msg) try: # see if there's already a build config existing_bc = self.os.get_build_config(build_config_name) except OsbsException: # doesn't exist existing_bc = None build = None if existing_bc is not None: utils.buildconfig_update(existing_bc, build_json) logger.debug('build config for %s already exists, updating...', build_config_name) self.os.update_build_config(build_config_name, json.dumps(existing_bc)) else: # if it doesn't exist, then create it logger.debug('build config for %s doesn\'t exist, creating...', build_config_name) bc = self.os.create_build_config(json.dumps(build_json)).json() # if there's an "ImageChangeTrigger" on the BuildConfig and "From" is of type # "ImageStreamTag", the build will be scheduled automatically # see https://github.com/projectatomic/osbs-client/issues/205 if build_request.is_auto_instantiated(): prev_version = bc['status']['lastVersion'] build_id = self.os.wait_for_new_build_config_instance(build_config_name, prev_version) build = BuildResponse(self.os.get_build(build_id).json()) if build is None: response = self.os.start_build(build_config_name) build = BuildResponse(response.json()) return build
def _create_build_config_and_build(self, build_request): build = None build_json = build_request.render() api_version = build_json['apiVersion'] if api_version != self.os_conf.get_openshift_api_version(): raise OsbsValidationException( 'BuildConfig template has incorrect apiVersion (%s)' % api_version) build_config_name = build_json['metadata']['name'] logger.debug('build config to be named "%s"', build_config_name) existing_bc = self._get_existing_build_config(build_json) if existing_bc is not None: self._verify_labels_match(build_json, existing_bc) # Existing build config may have a different name if matched by # git-repo-name and git-branch labels. Continue using existing # build config name. build_config_name = existing_bc['metadata']['name'] logger.debug('existing build config name to be used "%s"', build_config_name) self._verify_no_running_builds(build_config_name) utils.buildconfig_update(existing_bc, build_json) # Reset name change that may have occurred during # update above, since renaming is not supported. existing_bc['metadata']['name'] = build_config_name logger.debug('build config for %s already exists, updating...', build_config_name) self.os.update_build_config(build_config_name, json.dumps(existing_bc)) else: # if it doesn't exist, then create it logger.debug('build config for %s doesn\'t exist, creating...', build_config_name) bc = self.os.create_build_config(json.dumps(build_json)).json() # if there's an "ImageChangeTrigger" on the BuildConfig and "From" is of type # "ImageStreamTag", the build will be scheduled automatically # see https://github.com/projectatomic/osbs-client/issues/205 if build_request.is_auto_instantiated(): prev_version = bc['status']['lastVersion'] build_id = self.os.wait_for_new_build_config_instance( build_config_name, prev_version) build = BuildResponse(self.os.get_build(build_id).json()) if build is None: response = self.os.start_build(build_config_name) build = BuildResponse(response.json()) return build
def create_source_container_pipeline_run(self, component=None, koji_task_id=None, target=None, **kwargs): """ Take input args, create source pipeline run :return: instance of PiplelineRun """ error_messages = [] # most likely can be removed, source build should get component name # from binary build OSBS2 TBD if not component: error_messages.append( "required argument 'component' can't be empty") if error_messages: raise OsbsValidationException(", ".join(error_messages)) pipeline_run_name, pipeline_run_data = self._get_source_container_pipeline_data( ) build_json_store = self.os_conf.get_build_json_store() user_params = SourceContainerUserParams.make_params( build_json_dir=build_json_store, build_conf=self.os_conf, component=component, koji_target=target, koji_task_id=koji_task_id, pipeline_run_name=pipeline_run_name, **kwargs) self._set_source_container_pipeline_data(pipeline_run_name, pipeline_run_data, user_params) logger.info("creating source container image pipeline run: %s", pipeline_run_name) pipeline_run = PipelineRun(self.os, pipeline_run_name, pipeline_run_data) try: logger.info("pipeline run created: %s", pipeline_run.start_pipeline_run()) except OsbsResponseException: logger.error("failed to create pipeline run %s", pipeline_run_name) raise return pipeline_run
def _set_flatpak(self, reactor_config_data): flatpak_key = 'flatpak' flatpak_base_image_key = 'base_image' if self.user_params.flatpak.value: flatpack_base_image = ( reactor_config_data .get(flatpak_key, {}) .get(flatpak_base_image_key, None) ) if flatpack_base_image: self.base_image = flatpack_base_image self.user_params.base_image.value = flatpack_base_image else: raise OsbsValidationException("flatpak_base_image must be provided")
def _create_build_config_and_build(self, build_request): build_json = build_request.render() api_version = build_json['apiVersion'] if api_version != self.os_conf.get_openshift_api_version(): raise OsbsValidationException('BuildConfig template has incorrect apiVersion (%s)' % api_version) build_config_name = build_json['metadata']['name'] logger.debug('build config to be named "%s"', build_config_name) existing_bc = self._get_existing_build_config(build_json) image_stream, image_stream_tag_name = \ self._get_image_stream_info_for_build_request(build_request) # Remove triggers in BuildConfig to avoid accidental # auto instance of Build. If defined, triggers will # be added to BuildConfig after ImageStreamTag object # is properly configured. triggers = build_json['spec'].pop('triggers', None) if existing_bc: build_config_name = existing_bc['metadata']['name'] existing_bc = self._update_build_config_when_exist(build_json) else: logger.debug("build config for %s doesn't exist, creating...", build_config_name) existing_bc = self.os.create_build_config(json.dumps(build_json)).json() if image_stream: changed_ist = self.ensure_image_stream_tag(image_stream, image_stream_tag_name, scheduled=True) logger.debug('Changed parent ImageStreamTag? %s', changed_ist) if triggers: existing_bc = self._update_build_config_with_triggers(build_json, triggers) if image_stream and triggers: prev_version = existing_bc['status']['lastVersion'] build_id = self.os.wait_for_new_build_config_instance( build_config_name, prev_version) build = BuildResponse(self.os.get_build(build_id).json()) else: response = self.os.start_build(build_config_name) build = BuildResponse(response.json()) return build
def _verify_labels_match(self, new_build_config, existing_build_config): new_labels = new_build_config['metadata']['labels'] existing_labels = existing_build_config['metadata']['labels'] for key in self._GIT_LABEL_KEYS: new_label_value = new_labels.get(key) existing_label_value = existing_labels.get(key) if (existing_label_value and existing_label_value != new_label_value): msg = ('Git labels collide with existing build config "%s". ' 'Existing labels: %r, ' 'New labels: %r ') % ( existing_build_config['metadata']['name'], existing_labels, new_labels) raise OsbsValidationException(msg)
def set_data_from_reactor_config(self, reactor_config_data): """ Sets data from reactor config """ super(BuildRequestV2, self).set_data_from_reactor_config(reactor_config_data) if not reactor_config_data: if self.user_params.flatpak.value and not self.user_params.base_image.value: raise OsbsValidationException( "Flatpak base_image must be be set in container.yaml or reactor config" ) else: return self._set_flatpak(reactor_config_data)
def render_add_filesystem(self): phase = 'prebuild_plugins' plugin = 'add_filesystem' if self.dj.dock_json_has_plugin_conf(phase, plugin): if not self.spec.kojihub.value: raise OsbsValidationException( 'Custom base image builds require kojihub to be defined') self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', self.spec.kojihub.value) if self.spec.proxy.value: self.dj.dock_json_set_arg(phase, plugin, 'koji_proxyuser', self.spec.proxy.value) if self.spec.yum_repourls.value: self.dj.dock_json_set_arg(phase, plugin, 'repos', self.spec.yum_repourls.value)
def render_pulp_sync(self): """ If a pulp registry is specified, use the pulp plugin """ if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'): return pulp_registry = self.spec.pulp_registry.value # Find which registry to use docker_registry = None registry_secret = None registries = zip_longest(self.spec.registry_uris.value, self.spec.registry_secrets.value) for registry, secret in registries: if registry.version == 'v2': # First specified v2 registry is the one we'll tell pulp # to sync from. Keep the http prefix -- pulp wants it. docker_registry = registry.uri registry_secret = secret logger.info("using docker v2 registry %s for pulp_sync", docker_registry) break if pulp_registry and docker_registry: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'pulp_registry_name', pulp_registry) self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'docker_registry', docker_registry) if registry_secret: self.set_secret_for_plugin( ('postbuild_plugins', 'pulp_sync', 'registry_secret_path'), registry_secret) # Verify we have a pulp secret if self.spec.pulp_secret.value is None: raise OsbsValidationException("Pulp registry specified " "but no auth config") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_sync from request, " "requires pulp_registry and a v2 registry") self.dj.remove_plugin("postbuild_plugins", "pulp_sync")
def create_worker_build(self, **kwargs): """ Create a worker build Pass through method to create_prod_build with the following modifications: - platform param is required - release param is required - arrangement_version param is required, which is used to select which worker_inner:n.json template to use - inner template set to worker_inner:n.json if not set - outer template set to worker.json if not set - customize configuration set to worker_customize.json if not set :return: BuildResponse instance """ missing = set() for required in ('platform', 'release', 'arrangement_version'): if not kwargs.get(required): missing.add(required) if missing: raise ValueError("Worker build missing required parameters: %s" % missing) if kwargs.get('platforms'): raise ValueError( "Worker build called with unwanted platforms param") arrangement_version = kwargs['arrangement_version'] kwargs.setdefault( 'inner_template', WORKER_INNER_TEMPLATE.format( arrangement_version=arrangement_version)) kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE) kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF) kwargs['build_type'] = BUILD_TYPE_WORKER try: return self._do_create_prod_build(**kwargs) except IOError as ex: if os.path.basename(ex.filename) == kwargs['inner_template']: raise OsbsValidationException( "worker invalid arrangement_version %s" % arrangement_version) raise
def __set__(self, obj, value): # build ID has to conform to: # * 63 chars at most # * (([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])? if len(value) > 63: # component + timestamp > 63 new_name = value[:63] logger.warning("'%s' is too long, changing to '%s'", value, new_name) value = new_name build_id_re = re.compile(r"^(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])?$") match = build_id_re.match(value) if not match: logger.error("'%s' is not valid build ID", value) raise OsbsValidationException("Build ID '%s', doesn't match regex '%s'" % (value, build_id_re)) super(BuildIDParam, self).__set__(obj, value)
def value(self, val): # pylint: disable=W0221 # build ID has to conform to: # * 63 chars at most # * (([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])? if len(val) > 63: # component + timestamp > 63 new_name = val[:63] logger.warning("'%s' is too long, changing to '%s'", val, new_name) val = new_name build_id_re = re.compile(r"^(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])?$") match = build_id_re.match(val) if not match: logger.error("'%s' is not valid build ID", val) raise OsbsValidationException("Build ID '%s', doesn't match regex '%s'" % (val, build_id_re)) BuildParam.value.fset(self, val)
def validate_with_schema(data, schema): """ :param data: dict, data to be validated :param schema: dict, schema to validate with """ validator = jsonschema.Draft4Validator(schema=schema) try: jsonschema.Draft4Validator.check_schema(schema) validator.validate(data) except jsonschema.SchemaError: logger.error('invalid schema, cannot validate') raise except jsonschema.ValidationError as exc: logger.debug("schema validation error: %s", exc) exc_message = get_error_message(exc) for error in validator.iter_errors(data): error_message = get_error_message(error) logger.debug("validation error: %s", error_message) raise OsbsValidationException(exc_message)
def set_params(self, sources_for_koji_build_nvr=None, sources_for_koji_build_id=None, **kwargs): """ :param str sources_for_koji_build_nvr: NVR of build that will be used to fetch sources :param int sources_for_koji_build_id: ID of build that will be used to fetch sources :return: """ super(SourceContainerUserParams, self).set_params(**kwargs) if sources_for_koji_build_id is None and sources_for_koji_build_nvr is None: raise OsbsValidationException( "At least one param from 'sources_for_koji_build_id' or " "'sources_for_koji_build_nvr' must be specified") self.sources_for_koji_build_nvr.value = sources_for_koji_build_nvr self.sources_for_koji_build_id.value = sources_for_koji_build_id
def read_yaml(yaml_data, schema, package=None): """ :param yaml_data: string, yaml content :param schema: string, file path to the JSON schema :package: string, package name containing the schema """ package = package or 'osbs' try: resource = resource_stream(package, schema) schema = codecs.getreader('utf-8')(resource) except (ImportError): logger.error('Unable to find package %s', package) raise except (IOError, TypeError): logger.error('unable to extract JSON schema, cannot validate') raise try: schema = json.load(schema) except ValueError: logger.error('unable to decode JSON schema, cannot validate') raise data = yaml.safe_load(yaml_data) validator = jsonschema.Draft4Validator(schema=schema) try: jsonschema.Draft4Validator.check_schema(schema) validator.validate(data) except jsonschema.SchemaError: logger.error('invalid schema, cannot validate') raise except jsonschema.ValidationError as exc: logger.debug("schema validation error: %s", exc) exc_message = get_error_message(exc) for error in validator.iter_errors(data): error_message = get_error_message(error) logger.debug("validation error: %s", error_message) raise OsbsValidationException(exc_message) return data
def create_orchestrator_build(self, **kwargs): """ Create an orchestrator build Pass through method to create_prod_build with the following modifications: - platforms param is required - arrangement_version param may be used to select which orchestrator_inner:n.json template to use - inner template set to orchestrator_inner:n.json if not set - outer template set to orchestrator.json if not set - customize configuration set to orchestrator_customize.json if not set :return: BuildResponse instance """ if not kwargs.get('platforms'): raise ValueError('Orchestrator build requires platforms param') if not self.can_orchestrate(): raise OsbsOrchestratorNotEnabled("can't create orchestrate build " "when can_orchestrate isn't enabled") extra = [x for x in ('platform',) if kwargs.get(x)] if extra: raise ValueError("Orchestrator build called with unwanted parameters: %s" % extra) arrangement_version = kwargs.setdefault('arrangement_version', self.build_conf.get_arrangement_version()) kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format( arrangement_version=arrangement_version)) kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE) kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF) kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR try: return self._do_create_prod_build(**kwargs) except IOError as ex: if os.path.basename(ex.filename) == kwargs['inner_template']: raise OsbsValidationException("orchestrator invalid arrangement_version %s" % arrangement_version) raise
def _ensure_parsed(self): """Parse the Dockerfile and set self._labels and self._base_image.""" if self._parsed: return self._parsed = True if self.configuration.is_flatpak: modules = self.configuration.container_module_specs if modules: module = modules[0] else: raise OsbsValidationException( '"compose" config is missing "modules",' ' required for Flatpak') # modules is always required for a Flatpak build, but is only used # for the name and component labels if they aren't explicitly set # in container.yaml name = self.configuration.flatpak_name or module.name component = self.configuration.flatpak_component or module.name self._labels = Labels({ Labels.LABEL_TYPE_NAME: name, Labels.LABEL_TYPE_COMPONENT: component, Labels.LABEL_TYPE_VERSION: module.stream, }) self._base_image = self.configuration.flatpak_base_image else: df_parser = self.dockerfile_parser # DockerfileParse does not ensure a Dockerfile exists during initialization try: self._labels = Labels(df_parser.labels) self._base_image = df_parser.baseimage except IOError as e: raise RuntimeError( 'Could not parse Dockerfile in {}: {}'.format( df_parser.dockerfile_path, e))
def set_build_env_vars(self, reactor_config_data): """ Propagates build_env_vars from config map to build environment """ env = self.template['spec']['strategy']['customStrategy']['env'] existing_vars = set(var['name'] for var in env) build_env_vars = reactor_config_data.get('build_env_vars', []) for var in build_env_vars: if var['name'] not in existing_vars: # Could be just `env.append(var)`, but here we are using the reactor config # without validating it (that happens later, in atomic-reactor). Do it this # way to fail early if format does not match schema. env.append({'name': var['name'], 'value': var['value']}) existing_vars.add(var['name']) logger.info('Set environment variable from reactor config: %s', var['name']) else: msg = 'Cannot set environment variable from reactor config (already exists): {}' raise OsbsValidationException(msg.format(var['name']))
def make_params(cls, sources_for_koji_build_nvr=None, sources_for_koji_build_id=None, **kwargs): """ :param str sources_for_koji_build_nvr: NVR of build that will be used to fetch sources :param int sources_for_koji_build_id: ID of build that will be used to fetch sources :return: """ if sources_for_koji_build_id is None and sources_for_koji_build_nvr is None: raise OsbsValidationException( "At least one param from 'sources_for_koji_build_id' or " "'sources_for_koji_build_nvr' must be specified") kwargs.update({ "sources_for_koji_build_id": sources_for_koji_build_id, "sources_for_koji_build_nvr": sources_for_koji_build_nvr, }) return cls._make_params_super(**kwargs)
def set_data_from_reactor_config(self, reactor_config_data): """ Sets data from reactor config """ super(BuildRequestV2, self).set_data_from_reactor_config(reactor_config_data) if not reactor_config_data: if self.user_params.flatpak and not self.user_params.base_image: raise OsbsValidationException( "Flatpak base_image must be be set in container.yaml or reactor config" ) else: self._set_flatpak(reactor_config_data) if not self.source_registry: raise RuntimeError( 'mandatory "source_registry" is not defined in reactor_config') source_registry = RegistryURI(self.source_registry['url']).docker_uri self._update_trigger_imagestreamtag(source_registry) self._update_imagestream_name(source_registry)
def render_pulp_sync(self): """ If a pulp registry is specified, use the pulp plugin """ if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'): return pulp_registry = self.spec.pulp_registry.value docker_v2_registries = [ registry for registry in self.spec.registry_uris.value if registry.version == 'v2' ] if pulp_registry and docker_v2_registries: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'pulp_registry_name', pulp_registry) # First specified v2 registry is the one we'll tell pulp # to sync from. Keep the http prefix -- pulp wants it. docker_registry = docker_v2_registries[0].uri logger.info("using docker v2 registry %s for pulp_sync", docker_registry) self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'docker_registry', docker_registry) # Verify we have either a secret or username/password if self.spec.pulp_secret.value is None: conf = self.dj.dock_json_get_plugin_conf( 'postbuild_plugins', 'pulp_sync') args = conf.get('args', {}) if 'username' not in args: raise OsbsValidationException("Pulp registry specified " "but no auth config") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_sync from request, " "requires pulp_registry and a v2 registry") self.dj.remove_plugin("postbuild_plugins", "pulp_sync")
def render(self, validate=True): # the api is required for build requests # can't check that its an OSBS object because of the circular import if not self.osbs_api: raise OsbsValidationException("OSBS API is not specified") # Validate BuildUserParams if validate: self.user_params.validate() self.render_name() self.render_output_name() self.render_custom_strategy() self.render_resource_limits() self.adjust_for_scratch() self.set_reactor_config() # Set required_secrets based on reactor_config # Set worker_token_secrets based on reactor_config, if any data = self.get_reactor_config_data() self.set_data_from_reactor_config(data) self.render_resource_requests() # render params has to run after setting data from reactor config, # because that is updating user_params self.render_user_params() koji_task_id = self.user_params.koji_task_id if koji_task_id is not None: self.set_label('koji-task-id', str(koji_task_id)) # Should run near the end of render() to avoid setting any special # environment variables (such as USER_PARAMS, REACTOR_CONFIG) in case # someone sets them in reactor_config self.set_build_env_vars(data) # make sure that there is always deadline set (even if this may be changed in subclass) self.set_deadline() return self.template
def get_token_secrets(self): value = self._get_value("token_secrets", self.conf_section, "token_secrets") token_dict = {} will_raise = False if value: for pairs in value.split(): pair = pairs.split(':', 1) if len(pair) == 2: if pair[1] in ["", "/"]: logger.error("token_secret file path must be valid: %s", pair[1]) will_raise = True continue token_dict[pair[0]] = pair[1] else: token_dict[pair[0]] = None if will_raise: raise OsbsValidationException("Wrong token_secrets configuration") return token_dict
def get_registry_api_versions(self, platform=None): value = self._get_value("registry_api_versions", self.conf_section, "registry_api_versions", default='v1,v2') versions = value.split(',') if platform is None: return versions section = 'platform:{0}'.format(platform) enable_v1 = self._get_value("enable_v1", section, "enable_v1", default=False, is_bool_val=True) if enable_v1: return versions else: if 'v2' in versions: return ['v2'] else: raise OsbsValidationException( 'v2 only platform in v1 only instance')
def get_equal_labels(self): equal_labels = [] equal_labels_str = self._get_value("equal_labels", self.conf_section, "equal_labels") if equal_labels_str: # must be in correct format # e.g. `name1:name2:name3, release1:release2, version1:version2` # ',' separator for groups # ':' separator for equal preference labels # there has to be at least 2 labels in equal labels in group if re.match( r'^[^:,\s]+(:[^:,\s]+)+\s*(,\s*[^:,\s]+(:[^:,\s]+\s*)+)*$', equal_labels_str): label_groups = equal_labels_str.split(',') for label_group in label_groups: equal_labels.append( [label.strip() for label in label_group.split(':')]) else: raise OsbsValidationException( "Wrong equal_labels configuration") return equal_labels
def set_params(self, git_uri=None, git_ref=None, git_branch=None, base_image=None, name_label=None, release=None, platforms=None, build_type=None, filesystem_koji_task_id=None, koji_parent_build=None, koji_upload_dir=None, flatpak=None, yum_repourls=None, compose_ids=None, isolated=None, parent_images_digests=None, tags_from_yaml=None, additional_tags=None, git_commit_depth=None, operator_manifests_extract_platform=None, triggered_after_koji_task=None, **kwargs): super(BuildUserParams, self).set_params(**kwargs) self.git_uri.value = git_uri self.git_ref.value = git_ref self.git_branch.value = git_branch self.git_commit_depth.value = git_commit_depth self.tags_from_yaml.value = tags_from_yaml self.additional_tags.value = additional_tags or set() self.release.value = release self.build_type.value = build_type self.base_image.value = base_image self.name.value = make_name_from_git(self.git_uri.value, self.git_branch.value) self.parent_images_digests.value = parent_images_digests self.operator_manifests_extract_platform.value = operator_manifests_extract_platform self.platforms.value = platforms self.filesystem_koji_task_id.value = filesystem_koji_task_id self.koji_parent_build.value = koji_parent_build self.koji_upload_dir.value = koji_upload_dir self.flatpak.value = flatpak self.isolated.value = isolated self.triggered_after_koji_task.value = triggered_after_koji_task if not flatpak: if not base_image: raise OsbsValidationException("base_image must be provided") self.trigger_imagestreamtag.value = get_imagestreamtag_from_image( base_image) if not name_label: raise OsbsValidationException("name_label must be provided") self.imagestream_name.value = name_label.replace('/', '-') if kwargs.get('signing_intent') and compose_ids: raise OsbsValidationException( 'Please only define signing_intent -OR- compose_ids, not both') if not (compose_ids is None or isinstance(compose_ids, list)): raise OsbsValidationException("compose_ids must be a list") if not (yum_repourls is None or isinstance(yum_repourls, list)): raise OsbsValidationException("yum_repourls must be a list") self.yum_repourls.value = yum_repourls or [] self.compose_ids.value = compose_ids or []
def set_params(self, build_from=None, build_image=None, build_imagestream=None, component=None, koji_target=None, koji_task_id=None, orchestrator_deadline=None, platform=None, reactor_config_map=None, reactor_config_override=None, scratch=None, signing_intent=None, user=None, worker_deadline=None, **kwargs): self.component.value = component self.koji_target.value = koji_target self.koji_task_id.value = koji_task_id self.platform.value = platform self.reactor_config_map.value = reactor_config_map self.reactor_config_override.value = reactor_config_override self.scratch.value = scratch self.signing_intent.value = signing_intent self.user.value = user unique_build_args = (build_imagestream, build_image, build_from) if sum(bool(a) for a in unique_build_args) != 1: raise OsbsValidationException( 'Please only define one of build_from, build_image, build_imagestream' ) self.build_image.value = build_image self.build_imagestream.value = build_imagestream if self.build_image.value or self.build_imagestream.value: logger.warning( "build_image or build_imagestream is defined, they are deprecated," "use build_from instead") if build_from: if ':' not in build_from: raise OsbsValidationException( 'build_from must be "source_type:source_value"') source_type, source_value = build_from.split(':', 1) if source_type not in ('image', 'imagestream'): raise OsbsValidationException( 'first part in build_from, may be only image or imagestream' ) if source_type == 'image': self.build_image.value = source_value else: self.build_imagestream.value = source_value try: self.orchestrator_deadline.value = int(orchestrator_deadline) except (ValueError, TypeError): self.orchestrator_deadline.value = ORCHESTRATOR_MAX_RUNTIME try: self.worker_deadline.value = int(worker_deadline) except (ValueError, TypeError): self.worker_deadline.value = WORKER_MAX_RUNTIME self._populate_image_tag()
def set_params(self, git_uri=None, git_ref=None, registry_uri=None, # compatibility name for registry_uris registry_uris=None, registry_secrets=None, user=None, component=None, openshift_uri=None, source_registry_uri=None, yum_repourls=None, use_auth=None, builder_openshift_url=None, build_image=None, build_imagestream=None, proxy=None, sources_command=None, architecture=None, vendor=None, build_host=None, authoritative_registry=None, distribution_scope=None, koji_target=None, kojiroot=None, kojihub=None, koji_certs_secret=None, koji_use_kerberos=None, koji_kerberos_keytab=None, koji_kerberos_principal=None, koji_task_id=None, source_secret=None, # compatibility name for pulp_secret pulp_secret=None, pulp_registry=None, smtp_host=None, smtp_from=None, smtp_email_domain=None, smtp_additional_addresses=None, smtp_error_addresses=None, smtp_to_submitter=None, smtp_to_pkgowner=None, nfs_server_path=None, nfs_dest_dir=None, git_branch=None, base_image=None, name_label=None, builder_build_json_dir=None, registry_api_versions=None, platform=None, platforms=None, release=None, reactor_config_secret=None, client_config_secret=None, token_secrets=None, arrangement_version=None, info_url_format=None, artifacts_allowed_domains=None, **kwargs): self.git_uri.value = git_uri self.git_ref.value = git_ref self.user.value = user self.component.value = component self.proxy.value = proxy # registry_uri is the compatibility name for registry_uris if registry_uri is not None: assert registry_uris is None registry_uris = [registry_uri] self.registry_uris.value = registry_uris or [] self.registry_secrets.value = registry_secrets or [] self.source_registry_uri.value = source_registry_uri self.openshift_uri.value = openshift_uri self.builder_openshift_url.value = builder_openshift_url if not (yum_repourls is None or isinstance(yum_repourls, list)): raise OsbsValidationException("yum_repourls must be a list") self.yum_repourls.value = yum_repourls or [] self.use_auth.value = use_auth if build_imagestream and build_image: raise OsbsValidationException( 'Please only define build_image -OR- build_imagestream, not both') self.build_image.value = build_image or DEFAULT_BUILD_IMAGE self.build_imagestream.value = build_imagestream self.sources_command.value = sources_command self.architecture.value = architecture self.vendor.value = vendor self.build_host.value = build_host self.authoritative_registry.value = authoritative_registry self.distribution_scope.value = distribution_scope self.registry_api_versions.value = registry_api_versions self.koji_target.value = koji_target self.kojiroot.value = kojiroot self.kojihub.value = kojihub self.koji_certs_secret.value = koji_certs_secret self.koji_use_kerberos.value = koji_use_kerberos self.koji_kerberos_principal.value = koji_kerberos_principal self.koji_kerberos_keytab.value = koji_kerberos_keytab self.koji_task_id.value = koji_task_id self.pulp_secret.value = pulp_secret or source_secret self.pulp_registry.value = pulp_registry self.smtp_host.value = smtp_host self.smtp_from.value = smtp_from self.smtp_additional_addresses.value = smtp_additional_addresses self.smtp_error_addresses.value = smtp_error_addresses self.smtp_email_domain.value = smtp_email_domain self.smtp_to_submitter.value = smtp_to_submitter self.smtp_to_pkgowner.value = smtp_to_pkgowner self.nfs_server_path.value = nfs_server_path self.nfs_dest_dir.value = nfs_dest_dir self.git_branch.value = git_branch self.name.value = make_name_from_git(self.git_uri.value, self.git_branch.value) if not base_image: raise OsbsValidationException("base_image must be provided") self.trigger_imagestreamtag.value = get_imagestreamtag_from_image(base_image) self.builder_build_json_dir.value = builder_build_json_dir if not name_label: raise OsbsValidationException("name_label must be provided") self.imagestream_name.value = name_label.replace('/', '-') # The ImageStream should take tags from the source registry # or, if no source registry is set, the first listed registry imagestream_reg = self.source_registry_uri.value if not imagestream_reg: try: imagestream_reg = self.registry_uris.value[0] except IndexError: logger.info("no registries specified, cannot determine imagestream url") imagestream_reg = None if imagestream_reg: self.imagestream_url.value = os.path.join(imagestream_reg.docker_uri, name_label) logger.debug("setting 'imagestream_url' to '%s'", self.imagestream_url.value) insecure = imagestream_reg.uri.startswith('http://') self.imagestream_insecure_registry.value = insecure logger.debug("setting 'imagestream_insecure_registry' to %r", insecure) timestamp = utcnow().strftime('%Y%m%d%H%M%S') self.image_tag.value = "%s/%s:%s-%s-%s" % ( self.user.value, self.component.value, self.koji_target.value or 'none', random.randrange(10**(RAND_DIGITS - 1), 10**RAND_DIGITS), timestamp ) self.platform.value = platform self.platforms.value = platforms self.release.value = release self.reactor_config_secret.value = reactor_config_secret self.client_config_secret.value = client_config_secret self.token_secrets.value = token_secrets or {} self.arrangement_version.value = arrangement_version self.info_url_format.value = info_url_format self.artifacts_allowed_domains.value = artifacts_allowed_domains
def make_params(cls, additional_tags=None, base_image=None, build_conf=None, compose_ids=None, dependency_replacements=None, filesystem_koji_task_id=None, flatpak=None, git_branch=None, git_commit_depth=None, git_ref=None, git_uri=None, include_koji_repo=None, isolated=None, koji_parent_build=None, koji_upload_dir=None, name_label=None, operator_bundle_replacement_pullspecs=None, operator_csv_modifications_url=None, operator_manifests_extract_platform=None, parent_images_digests=None, platform=None, platforms=None, release=None, remote_sources=None, repo_info=None, tags_from_yaml=None, yum_repourls=None, **kwargs): """ Create a BuildUserParams instance. Like the parent method, most params are simply used as values for the corresponding BuildParam, this time with two notable exceptions: `build_conf` and `repo_info`. Compared to the parent method, this one pulls even more param values from `build_conf` and may also pull some values from `repo_info` (see below). these parameters are accepted: :param build_conf: BuildConfiguration, optional build configuration :param compose_ids: list of int, ODCS composes to use instead of generating new ones :param dependency_replacements: list of str, dependencies to be replaced by cachito, as pkg_manager:name:version[:new_name] :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param flatpak: if we should build a Flatpak OCI Image :param git_branch: str, branch name of the branch to be pulled :param git_ref: str, commit ID of the branch to be pulled :param git_uri: str, uri of the git repository for the source :param include_koji_repo: include the repo from the target build tag, even if other repourls are provided. :param isolated: bool, build as an isolated build :param koji_parent_build: str, :param koji_upload_dir: str, koji directory where the completed image will be uploaded :param name_label: str, label of the parent image :param user: str, name of the user requesting the build :param operator_bundle_replacement_pullspecs: dict, mapping of original pullspecs to replacement pullspecs for operator manifest bundle builds :param operator_csv_modifications_url: str, URL to JSON file describing operator CSV changes :param operator_manifests_extract_platform: str, indicates which platform should upload operator manifests to koji :param parent_images_digests: dict, mapping image digests to names and platforms :param platforms: list of str, platforms to build on :param platform: str, platform :param reactor_config_map: str, name of the config map containing the reactor environment :param release: str, :param remote_sources: list of dicts, each dict contains info about particular remote source with the following keys: build_args: dict, extra args for `builder.build_args`, if any configs: list of str, configuration files to be injected into the exploded remote sources dir request_id: int, cachito request id; used to request the Image Content Manifest url: str, URL from which to download a source archive name: str, name of remote source :param repo_info: RepoInfo, git repo data for the build :param scratch: bool, build as a scratch build :param signing_intent: bool, True to sign the resulting image :param yum_repourls: list of str, uris of the yum repos to pull from Please keep the paramater list alphabetized for easier tracking of changes the following parameters are pulled from the BuildConfiguration (ie, build_conf) the following parameters can be pulled from the RepoInfo (ie, repo_info) :param git_branch: str, branch name of the branch to be pulled :param git_ref: str, commit ID of the branch to be pulled :param git_uri: str, uri of the git repository for the source """ if repo_info: additional_tags = repo_info.additional_tags.tags git_branch = repo_info.git_branch git_commit_depth = repo_info.git_commit_depth git_ref = repo_info.git_ref git_uri = repo_info.git_uri tags_from_yaml = repo_info.additional_tags.from_container_yaml elif not git_uri: raise OsbsValidationException( 'no repo_info passed to BuildUserParams') # For flatpaks, we can set this later from the reactor config if not base_image and not flatpak: raise OsbsValidationException("base_image must be provided") if not name_label: raise OsbsValidationException("name_label must be provided") if kwargs.get('signing_intent') and compose_ids: raise OsbsValidationException( 'Please only define signing_intent -OR- compose_ids, not both') if not (compose_ids is None or isinstance(compose_ids, list)): raise OsbsValidationException("compose_ids must be a list") if not (dependency_replacements is None or isinstance(dependency_replacements, list)): raise OsbsValidationException( "dependency_replacements must be a list") if not (yum_repourls is None or isinstance(yum_repourls, list)): raise OsbsValidationException("yum_repourls must be a list") kwargs.update({ "base_image": base_image, "build_conf": build_conf, "compose_ids": compose_ids or [], "dependency_replacements": dependency_replacements or [], "filesystem_koji_task_id": filesystem_koji_task_id, "flatpak": flatpak, "include_koji_repo": include_koji_repo, "isolated": isolated, "koji_parent_build": koji_parent_build, "koji_upload_dir": koji_upload_dir, "operator_bundle_replacement_pullspecs": operator_bundle_replacement_pullspecs, "operator_csv_modifications_url": operator_csv_modifications_url, "operator_manifests_extract_platform": operator_manifests_extract_platform, "parent_images_digests": parent_images_digests, "platform": platform, "platforms": platforms, "release": release, "remote_sources": remote_sources, "yum_repourls": yum_repourls or [], # Potentially pulled from repo_info "additional_tags": additional_tags or set(), "git_branch": git_branch, "git_commit_depth": git_commit_depth, "git_ref": git_ref, "git_uri": git_uri, "name": make_name_from_git(git_uri, git_branch), "tags_from_yaml": tags_from_yaml, }) params = cls._make_params_super(**kwargs) if (params.scratch, params.isolated).count(True) > 1: raise OsbsValidationException( 'Build variations are mutually exclusive. ' 'Must set either scratch, isolated, or none. ') return params
def make_params(cls, build_conf=None, component=None, koji_target=None, koji_task_id=None, platform=None, scratch=None, signing_intent=None, user=None, userdata=None, **kwargs): """ Create a user_params instance. Most parameters will simply be used as the value of the corresponding BuildParam. The notable exception is `build_conf`, which contains values for other params but is not a BuildParam itself (list of params set from build_conf can be found below). Arguments that are None (either passed as None, or None by default) are ignored. This is important to avoid overwriting default values of params. Once the instance is created, however, overwriting defaults by setting None is allowed, e.g.: >>> params = BuildCommon.make_params(build_conf=bc) # does not overwrite defaults >>> params.version = None # does overwrite the default these parameters are accepted: :param base_image: str, name of the parent image :param build_conf: BuildConfiguration, the build configuration :param component: str, name of the component :param koji_parent_build: str, :param koji_target: str, koji tag with packages used to build the image :param koji_task_id: int, koji *task* ID :param koji_upload_dir: str, koji directory where the completed image will be uploaded :param platform: str, platform :param scratch: bool, build as a scratch build (if not specified in build_conf) :param signing_intent: bool, True to sign the resulting image :param user: str, name of the user requesting the build :param userdata: dict, custom user data Please keep the paramater list alphabetized for easier tracking of changes the following parameters are pulled from the BuildConfiguration (ie, build_conf) :param reactor_config_map: str, name of the config map containing the reactor environment :param scratch: bool, build as a scratch build """ if not build_conf: raise OsbsValidationException('build_conf must be defined') if build_conf.get_scratch(scratch): reactor_config = build_conf.get_reactor_config_map_scratch() else: reactor_config = build_conf.get_reactor_config_map() # Update kwargs with arguments explicitly accepted by this method kwargs.update({ "component": component, "koji_target": koji_target, "koji_task_id": koji_task_id, "platform": platform, "signing_intent": signing_intent, "user": user, "userdata": userdata, # Potentially pulled from build_conf "reactor_config_map": reactor_config, "scratch": build_conf.get_scratch(scratch), }) # Drop arguments that are: # - unknown; some callers may pass deprecated params # - not set (set to None, either explicitly or implicitly) kwargs = { k: v for k, v in kwargs.items() if v is not None and cls.get_param(k) is not None } params = cls(**kwargs) params._populate_image_tag() return params