def get_source_build_info(self, build_id, build_nvr): build_identifier = build_nvr or build_id koji_build = self.session.getBuild(build_identifier) if not koji_build: raise koji.BuildError("specified source build '%s' doesn't exist" % build_identifier) if build_id and (build_id != koji_build['build_id']): err_msg = ( 'koji_build_id {} does not match koji_build_nvr {} with id {}. ' 'When specifying both an id and an nvr, they should point to the same image build' .format(build_id, build_nvr, koji_build['build_id']) ) raise koji.BuildError(err_msg) build_extras = koji_build['extra'] if 'image' not in build_extras: err_msg = ('koji build {} is not image build which source container requires' .format(koji_build['nvr'])) raise koji.BuildError(err_msg) elif 'sources_for_nvr' in koji_build['extra']['image']: err_msg = ('koji build {} is source container build, source container can not ' 'use source container build image'.format(koji_build['nvr'])) raise koji.BuildError(err_msg) if not build_id: build_id = koji_build['build_id'] if not build_nvr: build_nvr = koji_build['nvr'] component = "%s-source" % koji_build['name'] return component, build_id, build_nvr
def getArchList(self, build_tag, extra=None): """Copied from build task""" # get list of arches to build for buildconfig = self.session.getBuildConfig(build_tag, event=self.event_id) arches = buildconfig['arches'] if not arches: # XXX - need to handle this better raise koji.BuildError("No arches for tag %(name)s [%(id)s]" % buildconfig) tag_archlist = [koji.canonArch(a) for a in arches.split()] self.logger.debug('arches: %s', arches) if extra: self.logger.debug('Got extra arches: %s', extra) arches = "%s %s" % (arches, extra) archlist = arches.split() self.logger.debug('base archlist: %r', archlist) override = self.opts.get('arch_override') if (self.opts.get('isolated') or self.opts.get('scratch')) and override: # only honor override for scratch builds self.logger.debug('arch override: %s', override) archlist = override.split() elif override: raise koji.BuildError("arch-override is only allowed for isolated or scratch builds") archdict = {} for a in archlist: # Filter based on canonical arches for tag # This prevents building for an arch that we can't handle if a == 'noarch' or koji.canonArch(a) in tag_archlist: archdict[a] = 1 if not archdict: raise koji.BuildError("No matching arches were found") return list(archdict.keys())
def check_whitelist(self, name, target_info): """Check if container name is whitelisted in destination tag Raises with koji.BuildError if package is not whitelisted or blocked. """ pkg_cfg = self.session.getPackageConfig(target_info['dest_tag_name'], name) self.logger.debug("%r", pkg_cfg) # Make sure package is on the list for this tag if pkg_cfg is None: raise koji.BuildError("package (container) %s not in list for tag %s" % (name, target_info['dest_tag_name'])) elif pkg_cfg['blocked']: raise koji.BuildError("package (container) %s is blocked for tag %s" % (name, target_info['dest_tag_name']))
def find_arch(self, arch, host, tag, preferred_arch=None): """ For noarch tasks, find a canonical arch that is supported by both the host and tag. If the arch is anything other than noarch, return it unmodified. If preferred_arch is set, try to get it, but not fail on that """ if arch != "noarch": return arch # We need a concrete arch. Pick one that: # a) this host can handle # b) the build tag can support # c) is canonical host_arches = host['arches'] if not host_arches: raise koji.BuildError("No arch list for this host: %s" % host['name']) tag_arches = tag['arches'] if not tag_arches: raise koji.BuildError("No arch list for tag: %s" % tag['name']) # index canonical host arches host_arches = set([koji.canonArch(a) for a in host_arches.split()]) # index canonical tag arches tag_arches = set([koji.canonArch(a) for a in tag_arches.split()]) # find the intersection of host and tag arches common_arches = list(host_arches & tag_arches) if common_arches: if preferred_arch and preferred_arch in common_arches: self.logger.info( 'Valid arches: %s, using preferred: %s' % (' '.join(sorted(common_arches)), preferred_arch)) return preferred_arch # pick one of the common arches randomly # need to re-seed the prng or we'll get the same arch every time, # because we just forked from a common parent random.seed() arch = random.choice(common_arches) self.logger.info('Valid arches: %s, using: %s' % (' '.join(sorted(common_arches)), arch)) return arch else: # no overlap raise koji.BuildError( "host %s (%s) does not support any arches" " of tag %s (%s)" % (host['name'], ', '.join(sorted(host_arches)), tag['name'], ', '.join(sorted(tag_arches))))
def checkLabels(self, src): dockerfile_path = self.fetchDockerfile(src) labels_wrapper = LabelsWrapper(dockerfile_path, logger_name=self.logger.name) missing_labels = labels_wrapper.get_missing_label_ids() if missing_labels: formatted_labels_list = [labels_wrapper.format_label(label_id) for label_id in missing_labels] msg_template = ("Required LABELs haven't been found in " "Dockerfile: %s.") raise koji.BuildError, (msg_template % ', '.join(formatted_labels_list)) # Make sure the longest tag for the docker image is no more than 128 chars # see https://github.com/docker/docker/issues/8445 data = labels_wrapper.get_extra_data() tags = labels_wrapper.get_additional_tags() if LABEL_DATA_MAP['RELEASE'] in data: version_release_tag = "%s-%s" % ( data[LABEL_DATA_MAP['VERSION']], data[LABEL_DATA_MAP['RELEASE']]) tags.append(version_release_tag) if tags: longest_tag = max(tags, key=len) if len(longest_tag) > 128: raise koji.BuildError( "Docker cannot create image with a tag longer than 128, " "current version-release tag length is %s" % len(longest_tag)) return (labels_wrapper.get_extra_data(), labels_wrapper.get_expected_nvr())
def fetchDockerfile(self, src, build_tag): """ Gets Dockerfile. Roughly corresponds to getSRPM method of build task """ scm = SCM(src) scm.assert_allowed(self.options.allowed_scms) scmdir = os.path.join(self.workdir, 'sources') koji.ensuredir(scmdir) logfile = os.path.join(self.workdir, 'checkout-for-labels.log') uploadpath = self.getUploadDir() koji.ensuredir(uploadpath) self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch', False)) # Check out sources from the SCM sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile) self.run_callbacks("postSCMCheckout", scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch', False), srcdir=sourcedir) fn = os.path.join(sourcedir, 'Dockerfile') if not os.path.exists(fn): raise koji.BuildError("Dockerfile file missing: %s" % fn) return fn
def getRepo(self, tag, builds=None, wait=False): """ Get the active repo for the given tag. If there is no repo available, wait for a repo to be created. if wait is True - always wait for new repo if builds are present, wait until repo doesn't contain these """ if wait: create_ts = time.time() else: create_ts = None repo_info = self.session.getRepo(tag) taginfo = self.session.getTag(tag, strict=True) if not repo_info: # make sure there is a target targets = self.session.getBuildTargets(buildTagID=taginfo['id']) if not targets: raise koji.BuildError('no repo (and no target) for tag %s' % taginfo['name']) wait = True elif builds: build_infos = [koji.parse_NVR(build) for build in builds] if not koji.util.checkForBuilds(self.session, taginfo['id'], build_infos, repo_info['create_event']): wait = True if wait: task_id = self.session.host.subtask(method='waitrepo', arglist=[tag, create_ts, builds], parent=self.id) repo_info = self.wait(task_id)[task_id] return repo_info
def createSourceContainer(self, target_info=None, scratch=None, component=None, koji_build_id=None, koji_build_nvr=None, signing_intent=None): this_task = self.session.getTaskInfo(self.id) self.logger.debug("This task: %r", this_task) owner_info = self.session.getUser(this_task['owner']) self.logger.debug("Started by %s", owner_info['name']) create_build_args = { 'user': owner_info['name'], 'component': component, 'sources_for_koji_build_id': koji_build_id, 'sources_for_koji_build_nvr': koji_build_nvr, 'target': target_info['name'], 'scratch': scratch, 'koji_task_id': self.id, } if signing_intent: create_build_args['signing_intent'] = signing_intent try: create_method = self.osbs().create_source_container_build self.logger.debug("Starting %s with params: '%s", create_method, create_build_args) build_response = create_method(**create_build_args) except AttributeError: raise koji.BuildError("method %s doesn't exists in osbs" % create_method) return self.handle_build_response(build_response)
def checkLabels(self, src, build_tag, label_overwrites=None): label_overwrites = label_overwrites or {} dockerfile_path = self.fetchDockerfile(src, build_tag) labels_wrapper = LabelsWrapper(dockerfile_path, logger_name=self.logger.name, label_overwrites=label_overwrites) missing_labels = labels_wrapper.get_missing_label_ids() if missing_labels: formatted_labels_list = [ labels_wrapper.format_label(label_id) for label_id in missing_labels ] msg_template = ("Required LABELs haven't been found in " "Dockerfile: %s.") raise koji.BuildError(msg_template % ', '.join(formatted_labels_list)) # Make sure the longest tag for the docker image is no more than 128 chars # see https://github.com/docker/docker/issues/8445 data = labels_wrapper.get_data_labels() tags = labels_wrapper.get_additional_tags() check_nvr = False if 'RELEASE' in data and 'VERSION' in data: if data['RELEASE'] and data['VERSION']: version_release_tag = "%s-%s" % (data['VERSION'], data['RELEASE']) tags.append(version_release_tag) check_nvr = True if tags: longest_tag = max(tags, key=len) if len(longest_tag) > 128: raise koji.BuildError( "Docker cannot create image with a tag longer than 128, " "current version-release tag length is %s" % len(longest_tag)) if check_nvr: return (data['COMPONENT'], labels_wrapper.get_expected_nvr()) return (data['COMPONENT'], None)
def _run(cmd, chdir=None, fatal=False, log=True, _count=[0]): if globals().get('KOJIKAMID'): #we've been inserted into kojikamid, use its run() return run(cmd, chdir=chdir, fatal=fatal, log=log) else: append = (_count[0] > 0) _count[0] += 1 if log_output(session, cmd[0], cmd, logfile, uploadpath, cwd=chdir, logerror=1, append=append, env=env): raise koji.BuildError('Error running %s command "%s", see %s for details' % \ (self.scmtype, ' '.join(cmd), os.path.basename(logfile)))
def handler(self, target, opts=None): jsonschema.validate([target, opts], self.PARAMS_SCHEMA) self.opts = opts self.event_id = self.session.getLastEvent()['id'] target_info = self.session.getBuildTarget(target, event=self.event_id) if not target_info: raise koji.BuildError("Target `%s` not found" % target) component, build_id, build_nvr = self.get_source_build_info(opts.get('koji_build_id'), opts.get('koji_build_nvr')) # scratch builds do not get imported, and consequently not tagged if not self.opts.get('scratch'): self.check_whitelist(component, target_info) self.logger.debug("Spawning job for sources") kwargs = dict( target_info=target_info, scratch=opts.get('scratch', False), component=component, koji_build_id=build_id, koji_build_nvr=build_nvr, signing_intent=opts.get('signing_intent', None), ) results = [] semi_results = self.createSourceContainer(**kwargs) if semi_results is not None: results = [semi_results] self.logger.debug("Results: %r", results) all_repositories = [] all_koji_builds = [] for result in results: try: repository = result.get('repositories') all_repositories.extend(repository) except Exception as error: self.logger.error("Failed to merge list of repositories " "%r. Reason (%s): %s", repository, type(error), error) koji_build_id = result.get('koji_build_id') if koji_build_id: all_koji_builds.append(koji_build_id) return { 'repositories': all_repositories, 'koji_builds': all_koji_builds, }
def getRepo(self, tag): """ Get the active repo for the given tag. If there is no repo available, wait for a repo to be created. """ repo_info = self.session.getRepo(tag) if not repo_info: #make sure there is a target taginfo = self.session.getTag(tag, strict=True) targets = self.session.getBuildTargets(buildTagID=taginfo['id']) if not targets: raise koji.BuildError('no repo (and no target) for tag %s' % taginfo['name']) #wait for it task_id = self.session.host.subtask(method='waitrepo', arglist=[tag, None, None], parent=self.id) repo_info = self.wait(task_id)[task_id] return repo_info
def assert_allowed(self, allowed): """ Verify that the host and repository of this SCM is in the provided list of allowed repositories. allowed is a space-separated list of host:repository[:use_common[:source_cmd]] tuples. Incorrectly-formatted tuples will be ignored. If use_common is not present, kojid will attempt to checkout a common/ directory from the repository. If use_common is set to no, off, false, or 0, it will not attempt to checkout a common/ directory. source_cmd is a shell command (args separated with commas instead of spaces) to run before building the srpm. It is generally used to retrieve source files from a remote location. If no source_cmd is specified, "make sources" is run by default. """ for allowed_scm in allowed.split(): scm_tuple = allowed_scm.split(':') if len(scm_tuple) >= 2: if fnmatch(self.host, scm_tuple[0]) and fnmatch(self.repository, scm_tuple[1]): # SCM host:repository is in the allowed list # check if we specify a value for use_common if len(scm_tuple) >= 3: if scm_tuple[2].lower() in ('no', 'off', 'false', '0'): self.use_common = False # check if we specify a custom source_cmd if len(scm_tuple) >= 4: if scm_tuple[3]: self.source_cmd = scm_tuple[3].split(',') else: # there was nothing after the trailing :, so they don't want to run a source_cmd at all self.source_cmd = None break else: self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' % allowed_scm) else: raise koji.BuildError('%s:%s is not in the list of allowed SCMs' % (self.host, self.repository))
def arches_for_config(buildconfig: Dict): archstr = buildconfig["arches"] if not archstr: name = buildconfig["name"] raise koji.BuildError(f"Missing arches for tag '%{name}'") return set(koji.canonArch(a) for a in archstr.split())
def createContainer(self, src=None, target_info=None, arches=None, scratch=None, isolated=None, yum_repourls=None, branch=None, push_url=None, koji_parent_build=None, release=None, flatpak=False, signing_intent=None, compose_ids=None, skip_build=False, triggered_after_koji_task=None, dependency_replacements=None, operator_csv_modifications_url=None): if not yum_repourls: yum_repourls = [] this_task = self.session.getTaskInfo(self.id) self.logger.debug("This task: %r", this_task) owner_info = self.session.getUser(this_task['owner']) self.logger.debug("Started by %s", owner_info['name']) scm = My_SCM(src) scm.assert_allowed(self.options.allowed_scms) git_uri = scm.get_git_uri() component = scm.get_component() arch = None if not arches: raise koji.BuildError("arches aren't specified") if signing_intent and compose_ids: raise koji.BuildError("signing_intent used with compose_ids") create_build_args = { 'git_uri': git_uri, 'git_ref': scm.revision, 'user': owner_info['name'], 'component': component, 'target': target_info['name'], 'dependency_replacements': dependency_replacements, 'yum_repourls': yum_repourls, 'scratch': scratch, 'koji_task_id': self.id, 'architecture': arch, } if branch: create_build_args['git_branch'] = branch if push_url: create_build_args['git_push_url'] = push_url if flatpak: create_build_args['flatpak'] = True if skip_build: create_build_args['skip_build'] = True if triggered_after_koji_task is not None: create_build_args['triggered_after_koji_task'] = triggered_after_koji_task if operator_csv_modifications_url: create_build_args['operator_csv_modifications_url'] = operator_csv_modifications_url orchestrator_create_build_args = create_build_args.copy() orchestrator_create_build_args['platforms'] = arches if signing_intent: orchestrator_create_build_args['signing_intent'] = signing_intent if compose_ids: orchestrator_create_build_args['compose_ids'] = compose_ids if koji_parent_build: orchestrator_create_build_args['koji_parent_build'] = koji_parent_build if isolated: orchestrator_create_build_args['isolated'] = isolated if release: orchestrator_create_build_args['release'] = release try: create_method = self.osbs().create_orchestrator_build self.logger.debug("Starting %s with params: '%s", create_method, orchestrator_create_build_args) build_response = create_method(**orchestrator_create_build_args) except (AttributeError, OsbsOrchestratorNotEnabled): # Older osbs-client, or else orchestration not enabled create_build_args['architecture'] = arch = arches[0] create_build_args.pop('skip_build', None) create_method = self.osbs().create_build self.logger.debug("Starting %s with params: '%s'", create_method, create_build_args) build_response = create_method(**create_build_args) except OsbsValidationException as exc: raise ContainerError('OSBS validation exception: {0}'.format(exc)) if build_response is None: self.logger.debug("Build was skipped") osbs_logs_dir = self.resultdir() koji.ensuredir(osbs_logs_dir) try: self._incremental_upload_logs() except koji.ActionNotAllowed: pass return return self.handle_build_response(build_response, arch=arch)
def checkout(self, scmdir, session=None, uploadpath=None, logfile=None): """ Checkout the module from SCM. Accepts the following parameters: - scmdir: the working directory - session: a ClientSession object - uploadpath: the path on the server the logfile should be uploaded to - logfile: the file used for logging command output session, uploadpath, and logfile are not used when run within kojikamid, but are otherwise required. Returns the directory that the module was checked-out into (a subdirectory of scmdir) """ # TODO: sanity check arguments sourcedir = '%s/%s' % (scmdir, self.module) update_checkout_cmd = None update_checkout_dir = None env = None def _run(cmd, chdir=None, fatal=False, log=True, _count=[0]): if globals().get('KOJIKAMID'): #we've been inserted into kojikamid, use its run() return run(cmd, chdir=chdir, fatal=fatal, log=log) else: append = (_count[0] > 0) _count[0] += 1 if log_output(session, cmd[0], cmd, logfile, uploadpath, cwd=chdir, logerror=1, append=append, env=env): raise koji.BuildError('Error running %s command "%s", see %s for details' % \ (self.scmtype, ' '.join(cmd), os.path.basename(logfile))) if self.scmtype == 'CVS': pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host, self.repository) module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision, self.module] common_checkout_cmd = ['cvs', '-d', pserver, 'checkout', 'common'] elif self.scmtype == 'CVS+SSH': if not self.user: raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) cvsserver = ':ext:%s@%s:%s' % (self.user, self.host, self.repository) module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision, self.module] common_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', 'common'] env = {'CVS_RSH': 'ssh'} elif self.scmtype == 'GIT': scheme = self.scheme if '+' in scheme: scheme = scheme.split('+')[1] gitrepo = '%s%s%s' % (scheme, self.host, self.repository) commonrepo = os.path.dirname(gitrepo) + '/common' checkout_path = os.path.basename(self.repository) if self.repository.endswith('/.git'): # If we're referring to the .git subdirectory of the main module, # assume we need to do the same for the common module checkout_path = os.path.basename(self.repository[:-5]) commonrepo = os.path.dirname(gitrepo[:-5]) + '/common/.git' elif self.repository.endswith('.git'): # If we're referring to a bare repository for the main module, # assume we need to do the same for the common module checkout_path = os.path.basename(self.repository[:-4]) commonrepo = os.path.dirname(gitrepo[:-4]) + '/common.git' sourcedir = '%s/%s' % (scmdir, checkout_path) module_checkout_cmd = ['git', 'clone', '-n', gitrepo, sourcedir] common_checkout_cmd = ['git', 'clone', commonrepo, 'common'] update_checkout_cmd = ['git', 'reset', '--hard', self.revision] update_checkout_dir = sourcedir # self.module may be empty, in which case the specfile should be in the top-level directory if self.module: # Treat the module as a directory inside the git repository sourcedir = '%s/%s' % (sourcedir, self.module) elif self.scmtype == 'GIT+SSH': if not self.user: raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) gitrepo = 'git+ssh://%s@%s%s' % (self.user, self.host, self.repository) commonrepo = os.path.dirname(gitrepo) + '/common' checkout_path = os.path.basename(self.repository) if self.repository.endswith('/.git'): # If we're referring to the .git subdirectory of the main module, # assume we need to do the same for the common module checkout_path = os.path.basename(self.repository[:-5]) commonrepo = os.path.dirname(gitrepo[:-5]) + '/common/.git' elif self.repository.endswith('.git'): # If we're referring to a bare repository for the main module, # assume we need to do the same for the common module checkout_path = os.path.basename(self.repository[:-4]) commonrepo = os.path.dirname(gitrepo[:-4]) + '/common.git' sourcedir = '%s/%s' % (scmdir, checkout_path) module_checkout_cmd = ['git', 'clone', '-n', gitrepo, sourcedir] common_checkout_cmd = ['git', 'clone', commonrepo, 'common'] update_checkout_cmd = ['git', 'reset', '--hard', self.revision] update_checkout_dir = sourcedir # self.module may be empty, in which case the specfile should be in the top-level directory if self.module: # Treat the module as a directory inside the git repository sourcedir = '%s/%s' % (sourcedir, self.module) elif self.scmtype == 'SVN': scheme = self.scheme if '+' in scheme: scheme = scheme.split('+')[1] svnserver = '%s%s%s' % (scheme, self.host, self.repository) module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module] common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver] elif self.scmtype == 'SVN+SSH': if not self.user: raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) svnserver = 'svn+ssh://%s@%s%s' % (self.user, self.host, self.repository) module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module] common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver] else: raise koji.BuildError('Unknown SCM type: %s' % self.scmtype) # perform checkouts _run(module_checkout_cmd, chdir=scmdir, fatal=True) if update_checkout_cmd: # Currently only required for GIT checkouts # Run the command in the directory the source was checked out into if self.scmtype.startswith('GIT') and globals().get('KOJIKAMID'): _run(['git', 'config', 'core.autocrlf', 'true'], chdir=update_checkout_dir, fatal=True) _run(['git', 'config', 'core.safecrlf', 'true'], chdir=update_checkout_dir, fatal=True) _run(update_checkout_cmd, chdir=update_checkout_dir, fatal=True) if self.use_common and not globals().get('KOJIKAMID'): _run(common_checkout_cmd, chdir=scmdir, fatal=True) if not os.path.exists('%s/../common' % sourcedir): # find the relative distance from sourcedir/../common to scmdir/common destdir = os.path.split(sourcedir)[0] path_comps = destdir[len(scmdir) + 1:] rel_path = '../' * len(path_comps.split('/')) os.symlink(rel_path + 'common', '%s/../common' % sourcedir) return sourcedir
def handler(self, name, version, distro, image_types, target, arches, opts): """Main entry point for the task""" self.logger.debug("Building image via osbuild %s, %s, %s, %s", name, str(arches), str(target), str(opts)) self.logger.debug("Task id: %s", str(self.id)) target_info = self.session.getBuildTarget(target, strict=True) if not target_info: raise koji.BuildError(f"Target '{target}' not found") build_tag = target_info['build_tag'] buildconfig = self.session.getBuildConfig(build_tag) # Architectures tag_arches = self.arches_for_config(buildconfig) arches = set(arches) diff = tag_arches - arches if diff: raise koji.BuildError("Unsupported architecture(s): " + str(diff)) # Repositories repo_urls = opts.get("repo") if repo_urls: repos = self.make_repos_for_user(repo_urls) else: repos = self.make_repos_for_target(target_info) client = self.client # Version and names nvr = NVR(name, version, opts.get("release")) if not nvr.release: nvr.release = self.session.getNextRelease(nvr.as_dict()) # Arches and image types ireqs = [ ImageRequest(a, i, repos) for a in arches for i in image_types ] self.logger.debug( "Creating compose: %s (%s)\n koji: %s\n images: %s", nvr, distro, self.koji_url, str([i.as_dict() for i in ireqs])) # Setup down, talk to composer to create the compose kojidata = ComposeRequest.Koji(self.koji_url, self.id) cid, bid = client.compose_create(nvr, distro, ireqs, kojidata) self.logger.info("Compose id: %s", cid) self.logger.debug("Waiting for comose to finish") status = client.wait_for_compose(cid) if not status.is_success: self.logger.error("Compose failed: %s", str(status)) return {'koji_builds': []} return { 'koji_builds': [bid], 'composer_id': cid, 'build': bid, }
def handler(self, src, target, opts=None): jsonschema.validate([src, target, opts], self.PARAMS_SCHEMA) self.opts = opts component = None if not opts.get('git_branch'): raise koji.BuildError("Git branch must be specified") if opts.get('scratch') and opts.get('isolated'): raise koji.BuildError("Build cannot be both isolated and scratch") self.event_id = self.session.getLastEvent()['id'] target_info = self.session.getBuildTarget(target, event=self.event_id) if not target_info: raise koji.BuildError("Target `%s` not found" % target) build_tag = target_info['build_tag'] archlist = self.getArchList(build_tag) flatpak = opts.get('flatpak', False) if flatpak: if not osbs_flatpak_support: raise koji.BuildError("osbs-client on koji builder doesn't have Flatpak support") release_overwrite = None else: label_overwrites = {} release_overwrite = opts.get('release') if release_overwrite: label_overwrites = {LABEL_NAME_MAP['RELEASE'][0]: release_overwrite} component, expected_nvr = self.checkLabels(src, label_overwrites=label_overwrites, build_tag=build_tag) # scratch builds do not get imported, and consequently not tagged if not self.opts.get('scratch') and not flatpak: self.check_whitelist(component, target_info) if flatpak: expected_nvr = None if not SCM.is_scm_url(src): raise koji.BuildError('Invalid source specification: %s' % src) # don't check build nvr for autorebuild (has triggered_after_koji_task) # as they might be using add_timestamp_to_release # and don't check it for skipped build, which might be enabling/disabling # autorebuilds which use add_timestamp_to_release triggered_after_koji_task = opts.get('triggered_after_koji_task', None) skip_build = opts.get('skip_build', False) if triggered_after_koji_task or skip_build: expected_nvr = None # Scratch and auto release builds shouldn't be checked for nvr if not self.opts.get('scratch') and expected_nvr: try: build = self.session.getBuild(expected_nvr) build_id = build['id'] except Exception: self.logger.info("No build for %s found", expected_nvr, exc_info=True) else: if build['state'] in (koji.BUILD_STATES['FAILED'], koji.BUILD_STATES['CANCELED']): self.logger.info("Build for %s found, but with reusable state %s", expected_nvr, build['state'], exc_info=True) else: raise koji.BuildError("Build for %s already exists, id %s" % (expected_nvr, build_id)) self.logger.debug("Spawning jobs for arches: %r", archlist) kwargs = dict( src=src, target_info=target_info, scratch=opts.get('scratch', False), isolated=opts.get('isolated', False), dependency_replacements=opts.get('dependency_replacements', None), yum_repourls=opts.get('yum_repourls', None), branch=opts.get('git_branch', None), push_url=opts.get('push_url', None), arches=archlist, koji_parent_build=opts.get('koji_parent_build'), release=release_overwrite, flatpak=flatpak, signing_intent=opts.get('signing_intent', None), compose_ids=opts.get('compose_ids', None), skip_build=skip_build, triggered_after_koji_task=triggered_after_koji_task, ) results = [] semi_results = self.createContainer(**kwargs) if semi_results is not None: results = [semi_results] self.logger.debug("Results: %r", results) all_repositories = [] all_koji_builds = [] if not results: return { 'repositories': all_repositories, 'koji_builds': all_koji_builds, 'build': 'skipped', } for result in results: try: repository = result.get('repositories') all_repositories.extend(repository) except Exception as error: self.logger.error("Failed to merge list of repositories " "%r. Reason (%s): %s", repository, type(error), error) koji_build_id = result.get('koji_build_id') if koji_build_id: all_koji_builds.append(koji_build_id) return { 'repositories': all_repositories, 'koji_builds': all_koji_builds, }
def handler(self, src, target, opts=None): if not opts: opts = {} self.opts = opts data = {} self.event_id = self.session.getLastEvent()['id'] target_info = self.session.getBuildTarget(target, event=self.event_id) build_tag = target_info['build_tag'] archlist = self.getArchList(build_tag) flatpak = opts.get('flatpak', False) if flatpak: if not osbs_flatpak_support: raise koji.BuildError( "osbs-client on koji builder doesn't have Flatpak support") module = opts.get('module', None) if not module: raise koji.BuildError( "Module must be specified for a Flatpak build") module_name, module_stream, module_version = split_module_spec( module) data = { 'name': module_name, 'version': module_stream, } if module_version is not None: data['release'] = module_version release_overwrite = None else: label_overwrites = {} release_overwrite = opts.get('release') if release_overwrite: label_overwrites = { LABEL_DATA_MAP['RELEASE']: release_overwrite } data, expected_nvr = self.checkLabels( src, label_overwrites=label_overwrites, build_tag=build_tag) admin_opts = self._get_admin_opts(opts) data.update(admin_opts) # scratch builds do not get imported, and consequently not tagged if not self.opts.get('scratch'): self.check_whitelist(data[LABEL_DATA_MAP['COMPONENT']], target_info) try: # Flatpak builds append .<N> to the release generated from module version if flatpak: auto_release = True else: auto_release = (data[LABEL_DATA_MAP['RELEASE']] == LABEL_DEFAULT_VALUES['RELEASE']) if auto_release: # Do not expose default release value del data[LABEL_DATA_MAP['RELEASE']] self.extra_information = { "src": src, "data": data, "target": target } if not SCM.is_scm_url(src): raise koji.BuildError('Invalid source specification: %s' % src) # Scratch and auto release builds shouldn't be checked for nvr if not self.opts.get('scratch') and not auto_release: try: build_id = self.session.getBuild(expected_nvr)['id'] except: self.logger.info("No build for %s found", expected_nvr, exc_info=True) else: raise koji.BuildError( "Build for %s already exists, id %s" % (expected_nvr, build_id)) results = self.runBuilds( src, target_info, archlist, scratch=opts.get('scratch', False), isolated=opts.get('isolated', False), yum_repourls=opts.get('yum_repourls', None), branch=opts.get('git_branch', None), push_url=opts.get('push_url', None), koji_parent_build=opts.get('koji_parent_build'), release=release_overwrite, flatpak=flatpak, module=opts.get('module', None), compose_ids=opts.get('compose_ids', None), signing_intent=opts.get('signing_intent', None), ) all_repositories = [] all_koji_builds = [] for result in results: try: repository = result.get('repositories') all_repositories.extend(repository) except Exception, error: self.logger.error( "Failed to merge list of repositories " "%r. Reason (%s): %s", repository, type(error), error) koji_build_id = result.get('koji_build_id') if koji_build_id: all_koji_builds.append(koji_build_id) except (SystemExit, ServerExit, KeyboardInterrupt): # we do not trap these raise except: # reraise the exception raise return { 'repositories': all_repositories, 'koji_builds': all_koji_builds, }
def createContainer(self, src=None, target_info=None, arches=None, scratch=None, isolated=None, yum_repourls=[], branch=None, push_url=None, koji_parent_build=None, release=None, flatpak=False, module=None, signing_intent=None, compose_ids=None): if not yum_repourls: yum_repourls = [] this_task = self.session.getTaskInfo(self.id) self.logger.debug("This task: %r", this_task) owner_info = self.session.getUser(this_task['owner']) self.logger.debug("Started by %s", owner_info['name']) scm = My_SCM(src) scm.assert_allowed(self.options.allowed_scms) git_uri = scm.get_git_uri() component = scm.get_component() arch = None if not arches: raise koji.BuildError("arches aren't specified") if signing_intent and compose_ids: raise koji.BuildError("signing_intent used with compose_ids") if compose_ids and yum_repourls: raise koji.BuildError("compose_ids used with repo_url") create_build_args = { 'git_uri': git_uri, 'git_ref': scm.revision, 'user': owner_info['name'], 'component': component, 'target': target_info['name'], 'yum_repourls': yum_repourls, 'scratch': scratch, 'koji_task_id': self.id, 'architecture': arch, } if branch: create_build_args['git_branch'] = branch if push_url: create_build_args['git_push_url'] = push_url if flatpak: create_build_args['flatpak'] = True if module: create_build_args['module'] = module try: orchestrator_create_build_args = create_build_args.copy() orchestrator_create_build_args['platforms'] = arches if signing_intent: orchestrator_create_build_args[ 'signing_intent'] = signing_intent if compose_ids: orchestrator_create_build_args['compose_ids'] = compose_ids if koji_parent_build: orchestrator_create_build_args[ 'koji_parent_build'] = koji_parent_build if isolated: orchestrator_create_build_args['isolated'] = isolated if release: orchestrator_create_build_args['release'] = release create_method = self.osbs().create_orchestrator_build self.logger.debug("Starting %s with params: '%s", create_method, orchestrator_create_build_args) build_response = create_method(**orchestrator_create_build_args) except (AttributeError, OsbsOrchestratorNotEnabled): # Older osbs-client, or else orchestration not enabled create_build_args['architecture'] = arch = arches[0] create_method = self.osbs().create_build self.logger.debug("Starting %s with params: '%s'", create_method, create_build_args) build_response = create_method(**create_build_args) build_id = build_response.get_build_name() self.logger.debug("OSBS build id: %r", build_id) # When builds are cancelled the builder plugin process gets SIGINT and SIGKILL # If osbs has started a build it should get cancelled def sigint_handler(*args, **kwargs): if not build_id: return self.logger.warn("Cannot read logs, cancelling build %s", build_id) self.osbs().cancel_build(build_id) signal.signal(signal.SIGINT, sigint_handler) self.logger.debug("Waiting for osbs build_id: %s to be scheduled.", build_id) # we need to wait for kubelet to schedule the build, otherwise it's 500 self.osbs().wait_for_build_to_get_scheduled(build_id) self.logger.debug("Build was scheduled") osbs_logs_dir = self.resultdir() koji.ensuredir(osbs_logs_dir) pid = os.fork() if pid: try: self._incremental_upload_logs(pid) except koji.ActionNotAllowed: pass else: self._osbs = None # Following retry code is here mainly to workaround bug which causes # connection drop while reading logs after about 5 minutes. # OpenShift bug with description: # https://github.com/openshift/origin/issues/2348 # and upstream bug in Kubernetes: # https://github.com/GoogleCloudPlatform/kubernetes/issues/9013 retry = 0 max_retries = 30 while retry < max_retries: try: self._write_incremental_logs(build_id, osbs_logs_dir) except Exception, error: self.logger.info( "Error while saving incremental logs " "(retry #%d): %s", retry, error) retry += 1 time.sleep(10) continue break else:
def getNextRelease(self, build_info): raise koji.BuildError('Unable to increment release')
def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None, skip_setarch=False, weight=None, upload_logs=None, new_chroot=False): """Create a buildroot and run a command (as root) inside of it Command may be a string or a list. Returns a message indicating success if the command was successful, and raises an error otherwise. Command output will be available in runroot.log in the task output directory on the hub. skip_setarch is a rough approximation of an old hack the keep option is not used. keeping for compatibility for now... upload_logs is list of absolute paths which will be uploaded for archiving on hub. It always consists of /tmp/runroot.log, but can be used for additional logs (pungi.log, etc.) """ if weight is not None: weight = max(weight, 0.5) self.session.host.setTaskWeight(self.id, weight) #noarch is funny if arch == "noarch": #we need a buildroot arch. Pick one that: # a) this host can handle # b) the build tag can support # c) is canonical host_arches = self.session.host.getHost()['arches'] if not host_arches: raise koji.BuildError("No arch list for this host") tag_arches = self.session.getBuildConfig(root)['arches'] if not tag_arches: raise koji.BuildError("No arch list for tag: %s" % root) #index canonical host arches host_arches = dict([(koji.canonArch(a), 1) for a in host_arches.split()]) #pick the first suitable match from tag's archlist for br_arch in tag_arches.split(): br_arch = koji.canonArch(br_arch) if br_arch in host_arches: #we're done break else: #no overlap raise koji.BuildError("host does not match tag arches: %s (%s)" % (root, tag_arches)) else: br_arch = arch if repo_id: repo_info = self.session.repoInfo(repo_id, strict=True) if repo_info['tag_name'] != root: raise koji.BuildError("build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name'])) if repo_info['state'] not in (koji.REPO_STATES['READY'], koji.REPO_STATES['EXPIRED']): raise koji.BuildError("repos in the %s state may not be used by runroot" % koji.REPO_STATES[repo_info['state']]) else: repo_info = self.session.getRepo(root) if not repo_info: #wait for it task_id = self.session.host.subtask(method='waitrepo', arglist=[root, None, None], parent=self.id) repo_info = self.wait(task_id)[task_id] broot = BuildRoot(self.session, self.options, root, br_arch, self.id, repo_id=repo_info['id'], setup_dns=True) broot.workdir = self.workdir broot.init() rootdir = broot.rootdir() #workaround for rpm oddness os.system('rm -f "%s"/var/lib/rpm/__db.*' % rootdir) #update buildroot state (so that updateBuildRootList() will work) self.session.host.setBuildRootState(broot.id, 'BUILDING') try: if packages: #pkglog = '%s/%s' % (broot.resultdir(), 'packages.log') pkgcmd = ['--install'] + packages status = broot.mock(pkgcmd) self.session.host.updateBuildRootList(broot.id, broot.getPackageList()) if not isSuccess(status): raise koji.BuildrootError(parseStatus(status, pkgcmd)) if isinstance(command, str): cmdstr = command else: #we were passed an arglist #we still have to run this through the shell (for redirection) #but we can preserve the list structure precisely with careful escaping cmdstr = ' '.join(["'%s'" % arg.replace("'", r"'\''") for arg in command]) # A nasty hack to put command output into its own file until mock can be # patched to do something more reasonable than stuff everything into build.log cmdargs = ['/bin/sh', '-c', "{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit ${PIPESTATUS[0]}" % cmdstr] # always mount /mnt/redhat (read-only) # always mount /mnt/iso (read-only) # also need /dev bind mount self.do_mounts(rootdir, [self._get_path_params(x) for x in self.config['default_mounts']]) self.do_extra_mounts(rootdir, mounts) mock_cmd = ['chroot'] if new_chroot: mock_cmd.append('--new-chroot') if skip_setarch: #we can't really skip it, but we can set it to the current one instead of of the chroot one myarch = platform.uname()[5] mock_cmd.extend(['--arch', myarch]) mock_cmd.append('--') mock_cmd.extend(cmdargs) rv = broot.mock(mock_cmd) log_paths = ['/builddir/runroot.log'] if upload_logs is not None: log_paths += upload_logs for log_path in log_paths: self.uploadFile(rootdir + log_path) finally: # mock should umount its mounts, but it will not handle ours self.undo_mounts(rootdir, fatal=False) broot.expire() if isinstance(command, str): cmdlist = command.split() else: cmdlist = command cmdlist = [param for param in cmdlist if '=' not in param] if cmdlist: cmd = os.path.basename(cmdlist[0]) else: cmd = '(none)' if isSuccess(rv): return '%s completed successfully' % cmd else: raise koji.BuildrootError(parseStatus(rv, cmd))
def handler(self, name, version, distro, image_types, target, arches, opts): """Main entry point for the task""" self.logger.debug("Building image via osbuild %s, %s, %s, %s", name, str(arches), str(target), str(opts)) self.logger.debug("Task id: %s", str(self.id)) target_info = self.session.getBuildTarget(target, strict=True) if not target_info: raise koji.BuildError(f"Target '{target}' not found") build_tag = target_info['build_tag'] buildconfig = self.session.getBuildConfig(build_tag) # Architectures tag_arches = self.arches_for_config(buildconfig) diff = set(arches) - tag_arches if diff: raise koji.BuildError("Unsupported architecture(s): " + str(diff)) # Repositories repo_urls = opts.get("repo") if repo_urls: repos = self.make_repos_for_user(repo_urls) else: repos = self.make_repos_for_target(target_info) client = self.client # Version and names nvr = NVR(name, version, opts.get("release")) if not nvr.release: nvr.release = self.session.getNextRelease(nvr.as_dict()) # Arches and image types ireqs = [ImageRequest(a, i, repos) for a in arches for i in image_types] self.logger.debug("Creating compose: %s (%s)\n koji: %s\n images: %s", nvr, distro, self.koji_url, str([i.as_dict() for i in ireqs])) # Setup done, create the compose request and send it off kojidata = ComposeRequest.Koji(self.koji_url, self.id) request = ComposeRequest(nvr, distro, ireqs, kojidata) self.upload_json(request.as_dict(), "compose-request") cid, bid = client.compose_create(request) self.logger.info("Compose id: %s, Koji build id: %s", cid, bid) self.logger.debug("Waiting for comose to finish") status = client.wait_for_compose(cid, callback=self.on_status_update) self.logger.debug("Compose finished: %s", str(status.as_dict())) self.logger.info("Compose result: %s", status.status) self.attach_logs(cid, ireqs) if not status.is_success: raise koji.BuildError(f"Compose failed (id: {cid})") self.tag_build(target_info["dest_tag"], bid) result = { "composer": { "server": self.composer_url, "id": cid }, "koji": { "build": bid } } return result