def compose_atomic_trees(self): """Compose Atomic OSTrees for each tag that we mashed""" composer = AtomicComposer() mashed_repos = dict([('-'.join(basename(repo).split('-')[:-1]), repo) for repo in self.composed_repos]) for tag, mash_path in mashed_repos.items(): if tag not in atomic_config['releases']: log.warn('Cannot find atomic configuration for %r', tag) continue # Update the repo URLs to point to our local mashes release = copy.deepcopy(atomic_config['releases'][tag]) mash_path = 'file://' + os.path.join(mash_path, tag, release['arch']) if 'updates-testing' in tag: release['repos']['updates-testing'] = mash_path updates_tag = tag.replace('-testing', '') if updates_tag in mashed_repos: release['repos']['updates'] = 'file://' + os.path.join( mashed_repos[updates_tag], updates_tag, release['arch']) log.debug('Using the updates repo from %s', release['repos']['updates']) else: release['repos']['updates'] = mash_path # Compose the tree, and raise an exception upon failure result = composer.compose(release) if result['result'] != 'success': log.error(result) raise MashTaskException('%s atomic compose failed' % tag) else: log.info('%s atomic tree compose successful', tag)
def mash(self): t0 = time.time() self.mashing = True self.update_comps() # {'f9-updates': '/mnt/koji/mash/updates/f9-updates-080905.0057',} finished_repos = dict([('-'.join(basename(repo).split('-')[:-1]), repo) for repo in self.composed_repos]) for repo in self.repos: # Skip mashing this repo if we successfully mashed it previously if repo in finished_repos: log.info('Skipping previously mashed repo %s' % repo) self.mashed_repos[repo] = finished_repos[repo] continue #fedmsg.publish(topic="mashtask.mashing", msg=dict(repo=repo)) mashdir = join(config.get('mashed_dir'), repo + '-' + time.strftime("%y%m%d.%H%M")) self.mashed_repos[repo] = mashdir comps = join(config.get('comps_dir'), 'comps-%s.xml' % repo.split('-')[0]) updatepath = join(config.get('mashed_dir'), repo) mashcmd = self.cmd % (mashdir, comps) + '-p %s ' % updatepath + repo log.info("Running `%s`" % mashcmd) p = subprocess.Popen(mashcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) stdout, stderr = p.communicate() log.info("mash returncode = %s" % p.returncode) if p.returncode: self.success = False failed_output = join( config.get('mashed_dir'), 'mash-failed-%s' % time.strftime("%y%m%d.%H%M")) out = file(failed_output, 'w') out.write(stdout) if stderr: out.write('\n\nstderr:\n\n') out.write(stderr) out.close() log.info("Wrote failed mash output to %s" % failed_output) self.log = failed_output raise MashTaskException("Mash failed") else: self.success = True mash_output = '%s/mash.out' % mashdir out = file(mash_output, 'w') out.write(stdout) out.close() log.info("Wrote mash output to %s" % mash_output) self.log = mash_output self.composed_repos.append(mashdir) self._update_lock() self.mashing = False log.debug("Mashed for %s seconds" % (time.time() - t0))
def move_builds(self): """ Move all builds associated with our batch of updates to the proper tag. This is determined based on the request of the update, and it's current state. """ t0 = time.time() self.success = False self.moving = True log.debug("Setting up koji multicall for moving builds") self.koji.multicall = True for update in sort_updates(self.updates): if update.request == 'stable': self.tag = update.release.stable_tag # [No Frozen Rawhide] Move stable builds going to a pending # release to the Release.dist-tag if update.release.locked: self.tag = update.release.dist_tag elif update.request == 'testing': self.tag = update.release.testing_tag elif update.request == 'obsolete': self.tag = update.release.candidate_tag current_tag = update.get_build_tag() for build in update.builds: if build.inherited: log.debug("Adding tag %s to %s" % (self.tag, build.nvr)) self.koji.tagBuild(self.tag, build.nvr, force=True) elif update.release.locked and update.request == 'stable': log.debug("Adding tag %s to %s" % (self.tag, build.nvr)) self.koji.tagBuild(self.tag, build.nvr, force=True) else: log.debug("Moving %s from %s to %s" % (build.nvr, current_tag, self.tag)) self.koji.moveBuild(current_tag, self.tag, build.nvr, force=True) self.actions.append((build.nvr, current_tag, self.tag)) results = self.koji.multiCall() if not buildsys.wait_for_tasks([task[0] for task in results]): self.success = True self.moving = False log.debug("Moved builds in %s seconds" % (time.time() - t0)) if not self.success: raise MashTaskException("Failed to move builds")
def _lock(self): """ Write out what updates we are pushing and any successfully mashed repositories to our MASHING lock """ mashed_dir = config.get('mashed_dir') mash_stage = config.get('mashed_stage_dir') mash_lock = join(mashed_dir, 'MASHING-%s' % self.mash_lock_id) if not os.path.isdir(mashed_dir): log.info("Creating mashed_dir %s" % mashed_dir) os.makedirs(mashed_dir) if not os.path.isdir(mash_stage): log.info("Creating mashed_stage_dir %s" % mash_stage) os.makedirs(mash_stage) if os.path.exists(mash_lock): if self.resume: log.debug("Resuming previous push!") lock = file(mash_lock, 'r') masher_state = pickle.load(lock) lock.close() # For backwards compatability, we need to make sure we handle # masher state that is just a list of updates, as well as a # dictionary of updates and successfully mashed repos if isinstance(masher_state, list): for up in masher_state: try: up = PackageUpdate.byTitle(up) self.updates.add(up) except SQLObjectNotFound: log.warning("Cannot find %s" % up) # { 'updates' : [PackageUpdate.title,], # 'repos' : ['/path_to_completed_repo',] } elif isinstance(masher_state, dict): for up in masher_state['updates']: try: up = PackageUpdate.byTitle(up) self.updates.add(up) except SQLObjectNotFound: log.warning("Cannot find %s" % up) for repo in masher_state['composed_repos']: self.composed_repos.append(repo) else: log.error('Unknown masher lock format: %s' % masher_state) raise MashTaskException else: log.error("Previous mash not complete! Either resume the last " "push, or remove %s" % mash_lock) raise MashTaskException else: if self.resume: msg = "Trying to resume a push, yet %s doesn't exist!" % mash_lock log.error(msg) raise MashTaskException(msg) log.debug("Creating lock for updates push: %s" % mash_lock) lock = file(mash_lock, 'w') pickle.dump({ 'updates': [update.title for update in self.updates], 'composed_repos': self.composed_repos, }, lock) lock.close()