def register_real_build(self, package, build_info): # TODO send fedmsg for real builds? try: state_map = { koji.BUILD_STATES['COMPLETE']: Build.COMPLETE, koji.BUILD_STATES['FAILED']: Build.FAILED } build = Build(task_id=build_info['task_id'], real=True, version=build_info['version'], epoch=build_info['epoch'], release=build_info['release'], package_id=package.id, state=state_map[build_info['state']]) self.db.add(build) self.db.flush() self.sync_tasks(build) self.flush_depchanges(build) self.log.info( 'Registering real build {}-{}-{} (task_id {})'.format( package.name, build.version, build.release, build.task_id)) self.db.commit() return build except IntegrityError: # other daemon adds the same concurrently self.db.rollback()
def submit_build(session, package, arch_override=None): """ Submits a scratch-build to Koji for given package. :param session: KoscheiBackendSession :param package: A package for which to submit build :param arch_override: optional list of architectures that will be used intead of Koji's default :return: """ assert package.collection.latest_repo_id build = Build(package_id=package.id, state=Build.RUNNING) name = package.name build_opts = {} if arch_override: build_opts['arch_override'] = ' '.join(arch_override) # on secondary Koji, collections SRPMs are taken from secondary, primary # needs to be able to build from relative URL constructed against # secondary (internal redirect) srpm_res = koji_util.get_last_srpm( session.secondary_koji_for(package.collection), package.collection.dest_tag, name, relative=True ) if srpm_res: srpm, srpm_url = srpm_res if session.build_from_repo_id: target = None build.repo_id = package.collection.latest_repo_id build_opts.update({'repo_id': build.repo_id}) else: target = package.collection.target # priorities are reset after the build is done # - the reason for that is that the build might be canceled and we want # the priorities to be retained in that case build.task_id = koji_util.koji_scratch_build( session.koji('primary'), target, name, srpm_url, build_opts, ) build.started = datetime.now() build.epoch = srpm['epoch'] build.version = srpm['version'] build.release = srpm['release'] session.db.add(build) session.db.flush() return build
def prepare_build(self, package, state=None, repo_id=None, resolved=True, arches=(), task_id=None, started=None, untagged=False, epoch=None, version='1', release='1.fc25', real=False): states = { True: Build.COMPLETE, False: Build.FAILED, None: Build.RUNNING, 'complete': Build.COMPLETE, 'failed': Build.FAILED, 'running': Build.RUNNING, } if isinstance(state, (bool, str)): state = states[state] if isinstance(package, str): found = self.db.query(Package).\ filter_by(name=package, collection_id=self.collection.id).first() package = found or self.prepare_package(package) build = Build(package=package, state=state, repo_id=repo_id or (1 if state != Build.RUNNING else None), version=version, release=release, untagged=untagged, real=real, task_id=task_id or self.task_id_counter, started=started or datetime.fromtimestamp(self.task_id_counter), deps_resolved=resolved) if not task_id: self.task_id_counter += 1 self.db.add(build) self.db.commit() for arch in arches: koji_task = KojiTask(task_id=7541, arch=arch, state=1, started=datetime.fromtimestamp(123), build_id=build.id) self.db.add(koji_task) self.db.commit() return build
def submit_build(self, package): build = Build(package_id=package.id, state=Build.RUNNING) name = package.name build_opts = {} if package.arch_override: build_opts = {'arch_override': package.arch_override} srpm, srpm_url = (util.get_last_srpm(self.koji_session, name) or (None, None)) if srpm_url: package.manual_priority = 0 build.task_id = util.koji_scratch_build(self.koji_session, name, srpm_url, build_opts) build.started = datetime.now() build.epoch = srpm['epoch'] build.version = srpm['version'] build.release = srpm['release'] self.db.add(build) self.db.flush() self.flush_depchanges(build) return build
def submit_build(self, package): build = Build(package_id=package.id, state=Build.RUNNING) name = package.name build_opts = {} if package.arch_override: build_opts = {'arch_override': package.arch_override} tag = package.collection.target_tag # SRPMs are taken from secondary, primary needs to be able to build # from relative URL constructed against secondary (internal redirect) srpm_res = koji_util.get_last_srpm(self.koji_sessions['secondary'], tag, name) if srpm_res: srpm, srpm_url = srpm_res package.manual_priority = 0 build.task_id = koji_util.koji_scratch_build( self.koji_sessions['primary'], tag, name, srpm_url, build_opts) build.started = datetime.now() build.epoch = srpm['epoch'] build.version = srpm['version'] build.release = srpm['release'] self.db.add(build) self.db.flush() self.flush_depchanges(build) return build
def submit_build(self, package): build = Build(package_id=package.id, state=Build.RUNNING) name = package.name build_opts = {} if package.arch_override: build_opts = {'arch_override': package.arch_override} tag = package.collection.target_tag # SRPMs are taken from secondary, primary needs to be able to build # from relative URL constructed against secondary (internal redirect) srpm_res = koji_util.get_last_srpm(self.koji_sessions['secondary'], tag, name) if srpm_res: srpm, srpm_url = srpm_res package.manual_priority = 0 build.task_id = koji_util.koji_scratch_build(self.koji_sessions['primary'], tag, name, srpm_url, build_opts) build.started = datetime.now() build.epoch = srpm['epoch'] build.version = srpm['version'] build.release = srpm['release'] self.db.add(build) self.db.flush() self.flush_depchanges(build) return build
def register_real_builds(session, collection, package_build_infos): """ Registers real builds for given build infos. Takes care of concurrency and commits the transaction. :param: package_build_infos tuples in format (package_id, build_info) """ state_map = { koji.BUILD_STATES['COMPLETE']: Build.COMPLETE, koji.BUILD_STATES['FAILED']: Build.FAILED, } # prepare ORM objects for insertion builds = [ Build( real=True, state=state_map[build_info['state']], task_id=build_info['task_id'], epoch=build_info['epoch'], version=build_info['version'], release=build_info['release'], package_id=package_id, ) for package_id, build_info in package_build_infos ] # process the input in chunks to prevent locking too many packages at once for chunk in util.chunks(builds, get_config('real_builds_insert_chunk')): # get koji tasks and populate repo_id # format: {build: [koji_task], ...} build_tasks = sync_tasks(session, collection, chunk, real=True) # discard builds with no repo_id, because those cannot be resolved build_tasks = { build: tasks for build, tasks in build_tasks.items() if build.repo_id } if not build_tasks: continue # get and lock packages to prevent concurrent build insertion packages = { p.id: p for p in session.db.query(Package).filter( Package.id.in_( build.package_id for build in build_tasks)).lock_rows() } # find builds that may have been inserted in parallel # using (package_id, task_id) as lookup key # - task_id might not be enough - different collections may use # different koji. same package_id implies same collection existing = set( session.db.query(Build.package_id, Build.task_id).filter( Build.package_id.in_(packages.keys())).filter( Build.real).filter( Build.task_id.in_(build.task_id for build in build_tasks))) # discard builds that have already been inserted in parallel build_tasks = { build: tasks for build, tasks in build_tasks.items() if (build.package_id, build.task_id) not in existing } # log what we're doing for build in build_tasks: package = packages[build.package_id] session.log.info( 'Registering real build {}-{}-{} for collection {} (task_id {})' .format( package.name, build.version, build.release, package.collection, build.task_id, )) # insert valid builds session.db.bulk_insert(list(build_tasks.keys())) # set build_ids of new koji tasks for build, tasks in build_tasks.items(): for task in tasks: task.build_id = build.id # insert tasks insert_koji_tasks(session, build_tasks) # reset priorities clear_priority_data(session, packages.values()) session.db.commit()
def register_real_builds(self, package_build_infos): """ Registers real builds for given build infos. Takes care of concurrency and commits the transaction. :param: package_build_infos tuples in format (package_id, build_info) """ # TODO send fedmsg for real builds? state_map = { koji.BUILD_STATES['COMPLETE']: Build.COMPLETE, koji.BUILD_STATES['FAILED']: Build.FAILED } builds = [ Build(task_id=build_info['task_id'], real=True, version=build_info['version'], epoch=build_info['epoch'], release=build_info['release'], package_id=package_id, state=state_map[build_info['state']]) for package_id, build_info in package_build_infos ] registered = [] for chunk in util.chunks(builds, 100): # TODO configurable retries = 10 while True: try: build_tasks = self.sync_tasks( chunk, self.koji_sessions['secondary']) for build, tasks in build_tasks.items(): if not build.repo_id: del build_tasks[build] chunk = build_tasks.keys() self.db.bulk_insert(chunk) for build, tasks in build_tasks.items(): for task in tasks: task.build_id = build.id self.insert_koji_tasks(build_tasks) self.db.commit() registered += chunk break except IntegrityError: retries -= 1 if not retries: raise self.db.rollback() self.log.info("Retrying real build insertion") existing_ids = self.db.query(Build.task_id)\ .filter_by(real=True)\ .filter(Build.task_id.in_(b.task_id for b in chunk))\ .all() existing_ids = {b.task_id for [b] in existing_ids} chunk = [b for b in chunk if b.task_id not in existing_ids] if registered: # pylint:disable=unused-variable # used via sqla cache pkgs = self.db.query(Package)\ .filter(Package.id.in_(b.package_id for b in registered))\ .all() for build in registered: package = self.db.query(Package).get(build.package_id) self.log.info( 'Registering real build {}-{}-{} for collection {} (task_id {})' .format( package.name, build.version, build.release, self.db.query(Collection).get(package.collection_id), build.task_id))