def new_update(request): """ Save an update. This entails either creating a new update, or editing an existing one. To edit an existing update, the update's alias must be specified in the ``edited`` parameter. If the ``from_tag`` parameter is specified and ``builds`` is missing or empty, the list of builds will be filled with the latest builds in this Koji tag. This is done by validate_from_tag() because the list of builds needs to be available in validate_acls(). If the release is composed by Bodhi (i.e. a branched or stable release after the Bodhi activation point), ensure that related tags ``from_tag``-pending-signing and ``from_tag``-testing exists and if not create them in Koji. If the state of the release is not `pending`, add its pending-signing tag and remove it if it's a side tag. Args: request (pyramid.request): The current request. """ data = request.validated log.debug('validated = %s' % data) # This has already been validated at this point, but we need to ditch # it since the models don't care about a csrf argument. data.pop('csrf_token') # Same here, but it can be missing. data.pop('builds_from_tag', None) data.pop('sidetag_owner', None) build_nvrs = data.get('builds', []) from_tag = data.get('from_tag') caveats = [] try: releases = set() builds = [] # Create the Package and Build entities for nvr in build_nvrs: name, version, release = request.buildinfo[nvr]['nvr'] package = Package.get_or_create(request.db, request.buildinfo[nvr]) # Also figure out the build type and create the build if absent. build_class = ContentType.infer_content_class( base=Build, build=request.buildinfo[nvr]['info']) build = build_class.get(nvr) if build is None: log.debug("Adding nvr %s, type %r", nvr, build_class) build = build_class(nvr=nvr, package=package) request.db.add(build) request.db.flush() build.package = package build.release = request.buildinfo[build.nvr]['release'] builds.append(build) releases.add(request.buildinfo[build.nvr]['release']) # Disable manual updates for releases not composed by Bodhi # see #4058 if not from_tag: for release in releases: if not release.composed_by_bodhi: request.errors.add( 'body', 'builds', "Cannot manually create updates for a Release which is not " "composed by Bodhi.\nRead the 'Automatic updates' page in " "Bodhi docs about this error.") request.db.rollback() return # We want to go ahead and commit the transaction now so that the Builds are in the database. # Otherwise, there will be a race condition between robosignatory signing the Builds and the # signed handler attempting to mark the builds as signed. When we lose that race, the signed # handler doesn't see the Builds in the database and gives up. After that, nothing will mark # the builds as signed. request.db.commit() # After we commit the transaction, we need to get the builds and releases again, # since they were tied to the previous session that has now been terminated. builds = [] releases = set() for nvr in build_nvrs: # At this moment, we are sure the builds are in the database (that is what the commit # was for actually). build = Build.get(nvr) builds.append(build) releases.add(build.release) if data.get('edited'): log.info('Editing update: %s' % data['edited']) data['release'] = list(releases)[0] data['builds'] = [b.nvr for b in builds] data['from_tag'] = from_tag result, _caveats = Update.edit(request, data) caveats.extend(_caveats) else: if len(releases) > 1: caveats.append({ 'name': 'releases', 'description': 'Your update is being split ' 'into %i, one for each release.' % len(releases) }) updates = [] for release in releases: _data = copy.copy(data) # Copy it because .new(..) mutates it _data['builds'] = [b for b in builds if b.release == release] _data['release'] = release _data['from_tag'] = from_tag log.info('Creating new update: %r' % _data['builds']) result, _caveats = Update.new(request, _data) log.debug('%s update created', result.alias) updates.append(result) caveats.extend(_caveats) if len(releases) > 1: result = dict(updates=updates) if from_tag: for u in updates: builds = [b.nvr for b in u.builds] if not u.release.composed_by_bodhi: # Before the Bodhi activation point of a release, keep builds tagged # with the side-tag and its associate tags. side_tag_signing_pending = u.release.get_pending_signing_side_tag( from_tag) side_tag_testing_pending = u.release.get_pending_testing_side_tag( from_tag) handle_side_and_related_tags_task.delay( builds=builds, pending_signing_tag=side_tag_signing_pending, from_tag=from_tag, pending_testing_tag=side_tag_testing_pending) else: # After the Bodhi activation point of a release, add the pending-signing tag # of the release to funnel the builds back into a normal workflow for a # stable release. pending_signing_tag = u.release.pending_signing_tag candidate_tag = u.release.candidate_tag handle_side_and_related_tags_task.delay( builds=builds, pending_signing_tag=pending_signing_tag, from_tag=from_tag, candidate_tag=candidate_tag) except LockedUpdateException as e: log.warning(str(e)) request.errors.add('body', 'builds', "%s" % str(e)) return except Exception as e: log.exception('Failed to create update') request.errors.add('body', 'builds', 'Unable to create update. %s' % str(e)) return # Obsolete older updates for three different cases... # editing an update, submitting a new single update, submitting multiple. if isinstance(result, dict): updates = result['updates'] else: updates = [result] for update in updates: try: caveats.extend(update.obsolete_older_updates(request.db)) except Exception as e: caveats.append({ 'name': 'update', 'description': 'Problem obsoleting older updates: %s' % str(e), }) if not isinstance(result, dict): result = result.__json__() result['caveats'] = caveats return result
def __call__(self, message: fedora_messaging.api.Message) -> None: """Create updates from appropriately tagged builds. Args: message: The message we are processing. """ body = message.body missing = [] for mandatory in ('tag', 'build_id', 'name', 'version', 'release'): if mandatory not in body: missing.append(mandatory) if missing: log.debug( f"Received incomplete tag message. Missing: {', '.join(missing)}" ) return btag = body['tag'] bnvr = '{name}-{version}-{release}'.format(**body) koji = buildsys.get_session() kbuildinfo = koji.getBuild(bnvr) if not kbuildinfo: log.debug(f"Can't find Koji build for {bnvr}.") return if 'nvr' not in kbuildinfo: log.debug(f"Koji build info for {bnvr} doesn't contain 'nvr'.") return if 'owner_name' not in kbuildinfo: log.debug( f"Koji build info for {bnvr} doesn't contain 'owner_name'.") return if kbuildinfo['owner_name'] in config.get( 'automatic_updates_blacklist'): log.debug( f"{bnvr} owned by {kbuildinfo['owner_name']} who is listed in " "automatic_updates_blacklist, skipping.") return # some APIs want the Koji build info, some others want the same # wrapped in a larger (request?) structure rbuildinfo = { 'info': kbuildinfo, 'nvr': kbuildinfo['nvr'].rsplit('-', 2), } with self.db_factory() as dbsession: rel = dbsession.query(Release).filter_by( create_automatic_updates=True, candidate_tag=btag).first() if not rel: log.debug( f"Ignoring build being tagged into {btag!r}, no release configured for " "automatic updates for it found.") return bcls = ContentType.infer_content_class(Build, kbuildinfo) build = bcls.get(bnvr) if build and build.update: log.info( f"Build, active update for {bnvr} exists already, skipping." ) return if not build: log.debug(f"Build for {bnvr} doesn't exist yet, creating.") # Package.get_or_create() infers content type already log.debug("Getting/creating related package object.") pkg = Package.get_or_create(dbsession, rbuildinfo) log.debug("Creating build object, adding it to the DB.") build = bcls(nvr=bnvr, package=pkg, release=rel) dbsession.add(build) owner_name = kbuildinfo['owner_name'] user = User.get(owner_name) if not user: log.debug(f"Creating bodhi user for '{owner_name}'.") # Leave email, groups blank, these will be filled # in or updated when they log into Bodhi next time, see # bodhi.server.security:remember_me(). user = User(name=owner_name) dbsession.add(user) log.debug(f"Creating new update for {bnvr}.") try: changelog = build.get_changelog(lastupdate=True) except ValueError: # Often due to bot-generated builds # https://github.com/fedora-infra/bodhi/issues/4146 changelog = None except Exception: # Re-raise exception, so that the message can be re-queued raise closing_bugs = [] if changelog: log.debug("Adding changelog to update notes.") notes = f"""Automatic update for {bnvr}. ##### **Changelog** ``` {changelog} ```""" if rel.name not in config.get('bz_exclude_rels'): for b in re.finditer(config.get('bz_regex'), changelog, re.IGNORECASE): idx = int(b.group(1)) log.debug(f'Adding bug #{idx} to the update.') bug = Bug.get(idx) if bug is None: bug = Bug(bug_id=idx) dbsession.add(bug) dbsession.flush() if bug not in closing_bugs: closing_bugs.append(bug) else: notes = f"Automatic update for {bnvr}." update = Update( release=rel, builds=[build], bugs=closing_bugs, notes=notes, type=UpdateType.unspecified, stable_karma=3, unstable_karma=-3, autokarma=False, user=user, status=UpdateStatus.pending, critpath=Update.contains_critpath_component([build], rel.branch), ) # Comment on the update that it was automatically created. update.comment( dbsession, str("This update was automatically created"), author="bodhi", ) update.add_tag(update.release.pending_signing_tag) log.debug("Adding new update to the database.") dbsession.add(update) log.debug("Flushing changes to the database.") dbsession.flush() # Obsolete older updates which may be stuck in testing due to failed gating try: update.obsolete_older_updates(dbsession) except Exception as e: log.error(f'Problem obsoleting older updates: {e}') alias = update.alias buglist = [b.bug_id for b in update.bugs] # This must be run after dbsession is closed so changes are committed to db work_on_bugs_task.delay(alias, buglist)
def new_update(request): """ Save an update. This entails either creating a new update, or editing an existing one. To edit an existing update, the update's alias must be specified in the ``edited`` parameter. Args: request (pyramid.request): The current request. """ data = request.validated log.debug('validated = %s' % data) # This has already been validated at this point, but we need to ditch # it since the models don't care about a csrf argument. data.pop('csrf_token') caveats = [] try: releases = set() builds = [] # Create the Package and Build entities for nvr in data['builds']: name, version, release = request.buildinfo[nvr]['nvr'] package = Package.get_or_create(request.buildinfo[nvr]) # Also figure out the build type and create the build if absent. build_class = ContentType.infer_content_class( base=Build, build=request.buildinfo[nvr]['info']) build = build_class.get(nvr) if build is None: log.debug("Adding nvr %s, type %r", nvr, build_class) build = build_class(nvr=nvr, package=package) request.db.add(build) request.db.flush() build.package = package build.release = request.buildinfo[build.nvr]['release'] builds.append(build) releases.add(request.buildinfo[build.nvr]['release']) # We want to go ahead and commit the transaction now so that the Builds are in the database. # Otherwise, there will be a race condition between robosignatory signing the Builds and the # signed handler attempting to mark the builds as signed. When we lose that race, the signed # handler doesn't see the Builds in the database and gives up. After that, nothing will mark # the builds as signed. request.db.commit() # After we commit the transaction, we need to get the builds and releases again, since they # were tied to the previous session that has now been terminated. builds = [] releases = set() for nvr in data['builds']: # At this moment, we are sure the builds are in the database (that is what the commit # was for actually). build = Build.get(nvr) builds.append(build) releases.add(build.release) if data.get('edited'): log.info('Editing update: %s' % data['edited']) data['release'] = list(releases)[0] data['builds'] = [b.nvr for b in builds] result, _caveats = Update.edit(request, data) caveats.extend(_caveats) else: if len(releases) > 1: caveats.append({ 'name': 'releases', 'description': 'Your update is being split ' 'into %i, one for each release.' % len(releases) }) updates = [] for release in releases: _data = copy.copy(data) # Copy it because .new(..) mutates it _data['builds'] = [b for b in builds if b.release == release] _data['release'] = release log.info('Creating new update: %r' % _data['builds']) result, _caveats = Update.new(request, _data) log.debug('%s update created', result.alias) updates.append(result) caveats.extend(_caveats) if len(releases) > 1: result = dict(updates=updates) except LockedUpdateException as e: log.warning(str(e)) request.errors.add('body', 'builds', "%s" % str(e)) return except Exception as e: log.exception('Failed to create update') request.errors.add('body', 'builds', 'Unable to create update. %s' % str(e)) return # Obsolete older updates for three different cases... # editing an update, submitting a new single update, submitting multiple. if isinstance(result, dict): updates = result['updates'] else: updates = [result] for update in updates: try: caveats.extend(update.obsolete_older_updates(request.db)) except Exception as e: caveats.append({ 'name': 'update', 'description': 'Problem obsoleting older updates: %s' % str(e), }) if not isinstance(result, dict): result = result.__json__() result['caveats'] = caveats return result
def __call__(self, message: fedora_messaging.api.Message) -> None: """Create updates from appropriately tagged builds. Args: message: The message we are processing. """ body = message.body missing = [] for mandatory in ('tag', 'build_id', 'name', 'version', 'release'): if mandatory not in body: missing.append(mandatory) if missing: log.debug( f"Received incomplete tag message. Missing: {', '.join(missing)}" ) return btag = body['tag'] bnvr = '{name}-{version}-{release}'.format(**body) koji = buildsys.get_session() kbuildinfo = koji.getBuild(bnvr) if not kbuildinfo: log.debug(f"Can't find Koji build for {bnvr}.") return if 'nvr' not in kbuildinfo: log.debug(f"Koji build info for {bnvr} doesn't contain 'nvr'.") return if 'owner_name' not in kbuildinfo: log.debug( f"Koji build info for {bnvr} doesn't contain 'owner_name'.") return if kbuildinfo['owner_name'] in config.get( 'automatic_updates_blacklist'): log.debug( f"{bnvr} owned by {kbuildinfo['owner_name']} who is listed in " "automatic_updates_blacklist, skipping.") return # some APIs want the Koji build info, some others want the same # wrapped in a larger (request?) structure rbuildinfo = { 'info': kbuildinfo, 'nvr': kbuildinfo['nvr'].rsplit('-', 2), } with self.db_factory() as dbsession: rel = dbsession.query(Release).filter_by( create_automatic_updates=True, candidate_tag=btag).first() if not rel: log.debug( f"Ignoring build being tagged into {btag!r}, no release configured for " "automatic updates for it found.") return bcls = ContentType.infer_content_class(Build, kbuildinfo) build = bcls.get(bnvr) if build and build.update: log.info( f"Build, active update for {bnvr} exists already, skipping." ) return if not build: log.debug(f"Build for {bnvr} doesn't exist yet, creating.") # Package.get_or_create() infers content type already log.debug("Getting/creating related package object.") pkg = Package.get_or_create(dbsession, rbuildinfo) log.debug("Creating build object, adding it to the DB.") build = bcls(nvr=bnvr, package=pkg, release=rel) dbsession.add(build) owner_name = kbuildinfo['owner_name'] user = User.get(owner_name) if not user: log.debug(f"Creating bodhi user for '{owner_name}'.") # Leave email, groups blank, these will be filled # in or updated when they log into Bodhi next time, see # bodhi.server.security:remember_me(). user = User(name=owner_name) dbsession.add(user) log.debug(f"Creating new update for {bnvr}.") changelog = build.get_changelog(lastupdate=True) if changelog: notes = f"""Automatic update for {bnvr}. ##### **Changelog** ``` {changelog} ```""" else: notes = f"Automatic update for {bnvr}." update = Update( release=rel, builds=[build], notes=notes, type=UpdateType.unspecified, stable_karma=3, unstable_karma=-3, autokarma=False, user=user, status=UpdateStatus.pending, ) # Comment on the update that it was automatically created. update.comment( dbsession, str("This update was automatically created"), author="bodhi", ) update.add_tag(update.release.pending_signing_tag) log.debug("Adding new update to the database.") dbsession.add(update) log.debug("Committing changes to the database.") dbsession.commit()