def sorted_updates(updates): """ Sort the given iterable of Updates so the highest version appears last. Order our updates so that the highest version gets tagged last so that it appears as the 'latest' in koji. Args: updates (iterable): An iterable of bodhi.server.models.Update objects to be sorted. Returns: tuple: A 2-tuple of lists. The first list contains builds that should be tagged synchronously in a specific order. The second list can be tagged asynchronously in koji with a multicall. """ builds = defaultdict(set) sync, async = [], [] for update in updates: for build in update.builds: builds[build.nvr_name].add(build) # The sorted here is so we actually have a way to test this # Otherwise, we would be depending on the way Python orders dict keys for package in sorted(builds.keys()): if len(builds[package]) > 1: log.debug(builds[package]) for build in sorted_builds(builds[package])[::-1]: if build.update not in sync: sync.append(build.update) if build.update in async: async .remove(build.update) else: build = list(builds[package])[0] if build.update not in async and build.update not in sync: async .append(build.update)
def cmd(cmd, cwd=None): """ Run the given command in a subprocess. Args: cmd (list or basestring): The command to be run. This may be expressed as a list to be passed directly to subprocess.Popen(), or as a basestring which will be processed with basestring.split() to form the list to pass to Popen(). cwd (basestring or None): The current working directory to use when launching the subprocess. Returns: tuple: A 3-tuple of the standard output (basestring), standard error (basestring), and the process's return code (int). """ log.info('Running %r', cmd) if isinstance(cmd, six.string_types): cmd = cmd.split() p = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if out: log.debug(out) if err: if p.returncode == 0: log.debug(err) else: log.error(err) if p.returncode != 0: log.error('return code %s', p.returncode) return out, err, p.returncode
def _send_mail(from_addr, to_addr, body): """ Send emails with smtplib. This is a lower level function than send_e-mail(). Args: from_addr (str): The e-mail address to use in the envelope from field. to_addr (str): The e-mail address to use in the envelope to field. body (str): The body of the e-mail. """ smtp_server = config.get('smtp_server') if not smtp_server: log.info('Not sending email: No smtp_server defined') return smtp = None try: log.debug('Connecting to %s', smtp_server) smtp = smtplib.SMTP(smtp_server) smtp.sendmail(from_addr, [to_addr], body) except smtplib.SMTPRecipientsRefused as e: log.warning('"recipient refused" for %r, %r' % (to_addr, e)) except Exception: log.exception('Unable to send mail') finally: if smtp: smtp.quit()
def cmd(cmd, cwd=None, raise_on_error=False): """ Run the given command in a subprocess. Args: cmd (list): The command to be run. This is expressed as a list to be passed directly to subprocess.Popen(). cwd (str or None): The current working directory to use when launching the subprocess. raise_on_error (bool): If True, raise a RuntimeError if the command's exit code is non-0. Defaults to False. Returns: tuple: A 3-tuple of the standard output (str), standard error (str), and the process's return code (int). Raises: RuntimeError: If exception is True and the command's exit code is non-0. """ log.debug('Running {}'.format(' '.join(cmd))) p = subprocess.Popen(cmd, cwd=cwd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() output = '{}\n{}'.format(out, err) if p.returncode != 0: msg = '{} returned a non-0 exit code: {}'.format(' '.join(cmd), p.returncode) log.error(msg) log.error(output) if raise_on_error: raise RuntimeError(msg) elif out or err: log.debug(f"subprocess output: {output}") return out, err, p.returncode
def flash_log(msg): """ Log the given message at debug level. Args: msg (basestring): The message to log. """ log.debug(msg)
def get_all_packages(): """ Return a list of all packages in Koji. Returns: list: The list of package_names from the koji.listPackages() call. """ log.debug('Fetching list of all packages...') koji = buildsys.get_session() return [pkg['package_name'] for pkg in koji.listPackages()]
def work(testing, hide_existing, pkg=None, prefix=None): result = [] koji.multicall = True releases = db.query(models.Release) \ .filter( models.Release.state.in_( (models.ReleaseState.pending, models.ReleaseState.frozen, models.ReleaseState.current))) kwargs = dict(package=pkg, prefix=prefix, latest=True) tag_release = dict() for release in releases: tag_release[release.candidate_tag] = release.long_name tag_release[release.testing_tag] = release.long_name tag_release[release.pending_testing_tag] = release.long_name tag_release[release.pending_signing_tag] = release.long_name koji.listTagged(release.candidate_tag, **kwargs) if testing: koji.listTagged(release.testing_tag, **kwargs) koji.listTagged(release.pending_testing_tag, **kwargs) koji.listTagged(release.pending_signing_tag, **kwargs) response = koji.multiCall() or [] # Protect against None for taglist in response: # if the call to koji results in errors, it returns them # in the reponse as dicts. Here we detect these, and log # the errors if isinstance(taglist, dict): log.error(taglist) else: for build in taglist[0]: log.debug(build) item = { 'nvr': build['nvr'], 'id': build['id'], 'package_name': build['package_name'], 'owner_name': build['owner_name'], 'release_name': tag_release[build['tag_name']] } # Prune duplicates # https://github.com/fedora-infra/bodhi/issues/450 if item not in result: if hide_existing: # show only builds that don't have updates already b = request.db.query(models.Build).filter_by(nvr=build['nvr']).first() if (b and b.update is None) or not b: result.append(item) else: result.append(item) return result
def send_testing_digest(self): """Send digest mail to mailing lists""" self.log.info('Sending updates-testing digest') sechead = u'The following %s Security updates need testing:\n Age URL\n' crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n' testhead = u'The following builds have been pushed to %s updates-testing\n\n' for prefix, content in self.testing_digest.iteritems(): release = self.db.query(Release).filter_by(long_name=prefix).one() test_list_key = '%s_test_announce_list' % ( release.id_prefix.lower().replace('-', '_')) test_list = config.get(test_list_key) if not test_list: log.warn('%r undefined. Not sending updates-testing digest', test_list_key) continue log.debug("Sending digest for updates-testing %s" % prefix) maildata = u'' security_updates = self.get_security_updates(prefix) if security_updates: maildata += sechead % prefix for update in security_updates: maildata += u' %3i %s %s\n' % ( update.days_in_testing, update.abs_url(), update.title) maildata += '\n\n' critpath_updates = self.get_unapproved_critpath_updates(prefix) if critpath_updates: maildata += crithead % prefix for update in self.get_unapproved_critpath_updates(prefix): maildata += u' %3i %s %s\n' % ( update.days_in_testing, update.abs_url(), update.title) maildata += '\n\n' maildata += testhead % prefix updlist = content.keys() updlist.sort() for pkg in updlist: maildata += u' %s\n' % pkg maildata += u'\nDetails about builds:\n\n' for nvr in updlist: maildata += u"\n" + self.testing_digest[prefix][nvr] mail.send_mail(config.get('bodhi_email'), test_list, '%s updates-testing report' % prefix, maildata)
def sorted_updates(updates): """ Sort the given iterable of Updates so the highest version appears last. Order our updates so that the highest version gets tagged last so that it appears as the 'latest' in koji. Args: updates (iterable): An iterable of bodhi.server.models.Update objects to be sorted. Returns: tuple: A 2-tuple of lists. The first list contains builds that should be tagged synchronously in a specific order. The second list can be tagged asynchronously in koji with a multicall. """ builds = defaultdict(set) sync, async_ = [], [] for update in updates: for build in update.builds: builds[build.nvr_name].add(build) # The sorted here is so we actually have a way to test this # Otherwise, we would be depending on the way Python orders dict keys for package in sorted(builds.keys()): if len(builds[package]) > 1: log.debug(builds[package]) for build in sorted_builds(builds[package])[::-1]: if build.update not in sync: sync.append(build.update) if build.update in async_: async_.remove(build.update) else: build = list(builds[package])[0] if build.update not in async_ and build.update not in sync: async_.append(build.update) log.info('sync = %s', [up.alias for up in sync]) log.info('async_ = %s', [up.alias for up in async_]) if not (len(set(sync) & set(async_)) == 0 and len(set(sync) | set(async_)) == len(updates)): # There should be absolutely no way to hit this code path, but let's be paranoid, and check # every run, to make sure no update gets left behind. # It makes sure that there is no update in sync AND async, and that the combination of # sync OR async_ is the full set of updates. raise Exception('ERROR! SYNC+ASYNC != UPDATES! sorted_updates failed' ) # pragma: no cover return sync, async_
def taskotron_results(settings, entity='results/latest', max_queries=10, **kwargs): """ Yield resultsdb results using query arguments. Args: settings (bodhi.server.config.BodhiConfig): Bodhi's settings. entity (str): The API endpoint to use (see resultsdb documentation). max_queries (int): The maximum number of queries to perform (pages to retrieve). ``1`` means just a single page. ``None`` or ``0`` means no limit. Please note some tests might have thousands of results in the database and it's very reasonable to limit queries (thus the default value). kwargs (dict): Args that will be passed to resultsdb to specify what results to retrieve. Returns: generator or None: Yields Python objects loaded from ResultsDB's "data" field in its JSON response, or None if there was an Exception while performing the query. """ max_queries = max_queries or 0 url = settings['resultsdb_api_url'] + "/api/v2.0/" + entity if kwargs: url = url + "?" + urlencode(kwargs) data = True queries = 0 try: while data and url: log.debug("Grabbing %r" % url) response = requests.get(url, timeout=60) if response.status_code != 200: raise IOError("status code was %r" % response.status_code) json = response.json() for datum in json['data']: yield datum url = json.get('next') queries += 1 if max_queries and queries >= max_queries and url: log.debug('Too many result pages, aborting at: %r' % url) break except Exception as e: log.exception("Problem talking to %r : %r" % (url, str(e)))
def sorted_updates(updates): """ Sort the given iterable of Updates so the highest version appears last. Order our updates so that the highest version gets tagged last so that it appears as the 'latest' in koji. Args: updates (iterable): An iterable of bodhi.server.models.Update objects to be sorted. Returns: tuple: A 2-tuple of lists. The first list contains builds that should be tagged synchronously in a specific order. The second list can be tagged asynchronously in koji with a multicall. """ builds = defaultdict(set) build_to_update = {} sync, async = [], [] for update in updates: for build in update.builds: n, v, r = get_nvr(build.nvr) builds[n].add(build.nvr) build_to_update[build.nvr] = update for package in builds: if len(builds[package]) > 1: log.info('Found multiple %s packages' % package) log.debug(builds[package]) for build in sorted_builds(builds[package])[::-1]: update = build_to_update[build] if update not in sync: sync.append(update) if update in async: async.remove(update) else: update = build_to_update[builds[package].pop()] if update not in async and update not in sync: async.append(update)
def remember_me(context: 'mako.runtime.Context', request: 'pyramid.request.Request', info: dict, *args, **kw) -> HTTPFound: """ Remember information about a newly logged in user given by the OpenID provider. This is configured via the openid.success_callback configuration, and is called upon successful login. Args: context: The current template rendering context. Unused. request: The current request. info: The information passed to Bodhi from the OpenID provider about the authenticated user. This includes things like the user's username, e-mail address and groups. args: A list of additional positional parameters. Unused. kw: A dictionary of additional keyword parameters. Unused. Returns: A 302 redirect to the URL the user was visiting before they clicked login, or home if they have not used a valid OpenID provider. """ log.debug('remember_me(%s)' % locals()) log.debug('remember_me: request.params = %r' % request.params) endpoint = request.params['openid.op_endpoint'] if endpoint != request.registry.settings['openid.provider']: log.warning('Invalid OpenID provider: %s' % endpoint) raise HTTPUnauthorized( 'Invalid OpenID provider. You can only use: %s' % request.registry.settings['openid.provider'] ) username = info['sreg']['nickname'] email = info['sreg']['email'] log.debug('remember_me: groups = %s' % info['groups']) log.info('%s successfully logged in' % username) create_or_update_user(request.db, username, email, info["groups"]) headers = remember(request, username) response = get_final_redirect(request) response.headerlist.extend(headers) return response
def get_template(update, use_template='fedora_errata_template'): """ Build the update notice for a given update. Args: update (bodhi.server.models.Update): The update to generate a template about. use_template (basestring): The name of the variable in bodhi.server.mail that references the template to generate this notice with. Returns: list: A list of templates for the given update. """ from bodhi.server.models import UpdateStatus, UpdateType use_template = read_template(use_template) line = six.text_type('-' * 80) + '\n' templates = [] for build in update.builds: h = get_rpm_header(build.nvr) info = {} info['date'] = str(update.date_pushed) info['name'] = h['name'] info['summary'] = h['summary'] info['version'] = h['version'] info['release'] = h['release'] info['url'] = h['url'] if update.status is UpdateStatus.testing: info['testing'] = ' Test' info['yum_repository'] = ' --enablerepo=updates-testing' else: info['testing'] = '' info['yum_repository'] = '' info['subject'] = u"%s%s%s Update: %s" % ( update.type is UpdateType.security and '[SECURITY] ' or '', update.release.long_name, info['testing'], build.nvr) info['updateid'] = update.alias info['description'] = h['description'] info['product'] = update.release.long_name info['notes'] = "" if update.notes and len(update.notes): info['notes'] = u"Update Information:\n\n%s\n" % \ '\n'.join(wrap(update.notes, width=80)) info['notes'] += line # Add this updates referenced Bugzillas and CVEs i = 1 info['references'] = "" if len(update.bugs) or len(update.cves): info['references'] = u"References:\n\n" parent = True in [bug.parent for bug in update.bugs] for bug in update.bugs: # Don't show any tracker bugs for security updates if update.type is UpdateType.security: # If there is a parent bug, don't show trackers if parent and not bug.parent: log.debug("Skipping tracker bug %s" % bug) continue title = ( bug.title != 'Unable to fetch title' and bug.title != 'Invalid bug number') \ and ' - %s' % bug.title or '' info['references'] += u" [ %d ] Bug #%d%s\n %s\n" % \ (i, bug.bug_id, title, bug.url) i += 1 for cve in update.cves: info['references'] += u" [ %d ] %s\n %s\n" % \ (i, cve.cve_id, cve.url) i += 1 info['references'] += line # Find the most recent update for this package, other than this one try: lastpkg = build.get_latest() except AttributeError: # Not all build types have the get_latest() method, such as ModuleBuilds. lastpkg = None # Grab the RPM header of the previous update, and generate a ChangeLog info['changelog'] = u"" if lastpkg: oldh = get_rpm_header(lastpkg) oldtime = oldh['changelogtime'] text = oldh['changelogtext'] del oldh if not text: oldtime = 0 elif len(text) != 1: oldtime = oldtime[0] info['changelog'] = u"ChangeLog:\n\n%s%s" % \ (to_unicode(build.get_changelog(oldtime)), line) try: templates.append((info['subject'], use_template % info)) except UnicodeDecodeError: # We can't trust the strings we get from RPM log.debug("UnicodeDecodeError! Will try again after decoding") for (key, value) in info.items(): if value: info[key] = to_unicode(value) templates.append((info['subject'], use_template % info)) return templates
def work(testing, hide_existing, pkg=None, prefix=None): result = [] koji.multicall = True releases = db.query(models.Release) \ .filter( models.Release.state.in_( (models.ReleaseState.pending, models.ReleaseState.frozen, models.ReleaseState.current))) if hide_existing: # We want to filter out builds associated with an update. # Don't filter by releases here, because the associated update # might be archived but the build might be inherited into an active # release. If this gives performance troubles later on, caching # this set should be easy enough. associated_build_nvrs = set( row[0] for row in db.query(models.Build.nvr).filter( models.Build.update_id != None)) kwargs = dict(package=pkg, prefix=prefix, latest=True) tag_release = dict() for release in releases: tag_release[release.candidate_tag] = release.long_name tag_release[release.testing_tag] = release.long_name tag_release[release.pending_testing_tag] = release.long_name tag_release[release.pending_signing_tag] = release.long_name koji.listTagged(release.candidate_tag, **kwargs) if testing: koji.listTagged(release.testing_tag, **kwargs) koji.listTagged(release.pending_testing_tag, **kwargs) koji.listTagged(release.pending_signing_tag, **kwargs) response = koji.multiCall() or [] # Protect against None for taglist in response: # if the call to koji results in errors, it returns them # in the reponse as dicts. Here we detect these, and log # the errors if isinstance(taglist, dict): log.error(taglist) else: for build in taglist[0]: log.debug(build) if hide_existing and build['nvr'] in associated_build_nvrs: continue item = { 'nvr': build['nvr'], 'id': build['id'], 'package_name': build['package_name'], 'owner_name': build['owner_name'], } # The build's tag might not be present in tag_release # because its associated release is archived and therefore # filtered out in the query above. if build['tag_name'] in tag_release: item['release_name'] = tag_release[build['tag_name']] # Prune duplicates # https://github.com/fedora-infra/bodhi/issues/450 if item not in result: result.append(item) return result
def get_template(update: 'Update', use_template: str = 'fedora_errata_template') -> list: """ Build the update notice for a given update. Args: update: The update to generate a template about. use_template: The name of the variable in bodhi.server.mail that references the template to generate this notice with. Returns: A list of templates for the given update. """ from bodhi.server.models import UpdateStatus, UpdateType use_template = read_template(use_template) line = str('-' * 80) + '\n' templates = [] for build in update.builds: h = get_rpm_header(build.nvr) info = {} info['date'] = str(update.date_pushed) info['name'] = h['name'] info['summary'] = h['summary'] info['version'] = h['version'] info['release'] = h['release'] info['url'] = h['url'] if update.status is UpdateStatus.testing: info['testing'] = ' Test' info['yum_repository'] = ' --enablerepo=updates-testing' else: info['testing'] = '' info['yum_repository'] = '' info['subject'] = "%s%s%s Update: %s" % ( update.type is UpdateType.security and '[SECURITY] ' or '', update.release.long_name, info['testing'], build.nvr) info['updateid'] = update.alias info['description'] = h['description'] info['product'] = update.release.long_name info['notes'] = "" if update.notes and len(update.notes): info['notes'] = "Update Information:\n\n%s\n" % \ '\n'.join(wrap(update.notes, width=80)) info['notes'] += line # Add this update's referenced Bugzillas i = 1 info['references'] = "" if update.bugs: info['references'] = "References:\n\n" parent = True in [bug.parent for bug in update.bugs] for bug in update.bugs: # Don't show any tracker bugs for security updates if update.type is UpdateType.security: # If there is a parent bug, don't show trackers if parent and not bug.parent: log.debug("Skipping tracker bug %s" % bug) continue title = ( bug.title != 'Unable to fetch title' and bug.title != 'Invalid bug number') \ and ' - %s' % bug.title or '' info['references'] += " [ %d ] Bug #%d%s\n %s\n" % \ (i, bug.bug_id, title, bug.url) i += 1 info['references'] += line # generate a ChangeLog info['changelog'] = "" changelog = build.get_changelog(lastupdate=True) if changelog is not None: info['changelog'] = "ChangeLog:\n\n%s%s" % \ (changelog, line) templates.append((info['subject'], use_template % info)) return templates
def latest_candidates(request): """ Return the most recent candidate builds for a given package name. For a given `package`, this method returns the most recent builds tagged into the Release.candidate_tag for all Releases. The package name is specified in the request "package" parameter. Args: request (pyramid.request.Request): The current request. The package name is specified in the request's "package" parameter. Returns: list: A list of dictionaries of the found builds. Each dictionary has 5 keys: "nvr" maps to the build's nvr field, "id" maps to the build's id, "tag_name" is the tag of the build, owner_name is the person who built the package in koji, and 'release_name' is the bodhi release name of the package. """ koji = request.koji db = request.db def work(testing, pkg=None, prefix=None): result = [] koji.multicall = True releases = db.query(models.Release) \ .filter( models.Release.state.in_( (models.ReleaseState.pending, models.ReleaseState.frozen, models.ReleaseState.current))) kwargs = dict(package=pkg, prefix=prefix, latest=True) tag_release = dict() for release in releases: tag_release[release.candidate_tag] = release.long_name tag_release[release.testing_tag] = release.long_name tag_release[release.pending_testing_tag] = release.long_name tag_release[release.pending_signing_tag] = release.long_name koji.listTagged(release.candidate_tag, **kwargs) if testing: koji.listTagged(release.testing_tag, **kwargs) koji.listTagged(release.pending_testing_tag, **kwargs) koji.listTagged(release.pending_signing_tag, **kwargs) response = koji.multiCall() or [] # Protect against None for taglist in response: # if the call to koji results in errors, it returns them # in the reponse as dicts. Here we detect these, and log # the errors if isinstance(taglist, dict): log.error(taglist) else: for build in taglist[0]: log.debug(build) item = { 'nvr': build['nvr'], 'id': build['id'], 'package_name': build['package_name'], 'owner_name': build['owner_name'], 'release_name': tag_release[build['tag_name']] } # Prune duplicates # https://github.com/fedora-infra/bodhi/issues/450 if item not in result: result.append(item) return result pkg = request.params.get('package') prefix = request.params.get('prefix') testing = asbool(request.params.get('testing')) log.debug('latest_candidate(%r, %r)' % (pkg, testing)) if pkg: result = work(testing, pkg=pkg) else: result = work(testing, prefix=prefix) log.debug(result) return result
def save_stack(request): """ Save a stack. Args: request (pyramid.request): The current web request. Returns: dict: A dictionary with key "stack" that indexes the newly created Stack. """ data = request.validated db = request.db user = User.get(request.user.name) # Fetch or create the stack stack = Stack.get(data['name']) if not stack: stack = Stack(name=data['name'], users=[user]) db.add(stack) db.flush() if stack.users or stack.groups: if user in stack.users: log.info('%s is an owner of the %s', user.name, stack.name) else: for group in user.groups: if group in stack.groups: log.info('%s is a member of the %s group', user.name, stack.name) break else: log.warning('%s is not an owner of the %s stack', user.name, stack.name) log.debug('owners = %s; groups = %s', stack.users, stack.groups) request.errors.add('body', 'name', '%s does not have privileges' ' to modify the %s stack' % (user.name, stack.name)) request.errors.status = HTTPForbidden.code return # Update the stack description desc = data['description'] if desc: stack.description = desc # Update the stack requirements # If the user passed in no value at all for requirements, then use # the site defaults. If, however, the user passed in the empty string, we # assume they mean *really*, no requirements so we leave the value null. reqs = data['requirements'] if reqs is None: stack.requirements = config.get('site_requirements') elif reqs: stack.requirements = reqs stack.update_relationship('users', User, data, db) stack.update_relationship('groups', Group, data, db) # We make a special case out of packages here, since when a package is # added to a stack, we want to give it the same requirements as the stack # has. See https://github.com/fedora-infra/bodhi/issues/101 new, same, rem = stack.update_relationship('packages', Package, data, db) if stack.requirements: additional = list(tokenize(stack.requirements)) for name in new: package = Package.get(name) original = package.requirements original = [] if not original else list(tokenize(original)) package.requirements = " ".join(list(set(original + additional))) log.info('Saved %s stack', data['name']) notifications.publish(topic='stack.save', msg=dict( stack=stack, agent=user.name)) return dict(stack=stack)
def latest_candidates(request): """ Return the most recent candidate builds for a given package name. For a given `package`, this method returns the most recent builds tagged into the Release.candidate_tag for all Releases. The package name is specified in the request "package" parameter. Args: request (pyramid.request.Request): The current request. The package name is specified in the request's "package" parameter. Returns: list: A list of dictionaries of the found builds. Each dictionary has 5 keys: "nvr" maps to the build's nvr field, "id" maps to the build's id, "tag_name" is the tag of the build, owner_name is the person who built the package in koji, and 'release_name' is the bodhi release name of the package. """ koji = request.koji db = request.db def work(testing, hide_existing, pkg=None, prefix=None): result = [] koji.multicall = True releases = db.query(models.Release) \ .filter( models.Release.state.in_( (models.ReleaseState.pending, models.ReleaseState.frozen, models.ReleaseState.current))) if hide_existing: # We want to filter out builds associated with an update. # Since the candidate_tag is removed when an update is pushed to # stable, we only need a list of builds that are associated to # updates still in pending state. # Don't filter by releases here, because the associated update # might be archived but the build might be inherited into an active # release. If this gives performance troubles later on, caching # this set should be easy enough. associated_build_nvrs = set( row[0] for row in db.query(models.Build.nvr). join(models.Update). filter(models.Update.status == models.UpdateStatus.pending) ) kwargs = dict(package=pkg, prefix=prefix, latest=True) tag_release = dict() for release in releases: tag_release[release.candidate_tag] = release.long_name tag_release[release.testing_tag] = release.long_name tag_release[release.pending_testing_tag] = release.long_name tag_release[release.pending_signing_tag] = release.long_name koji.listTagged(release.candidate_tag, **kwargs) if testing: koji.listTagged(release.testing_tag, **kwargs) koji.listTagged(release.pending_testing_tag, **kwargs) if release.pending_signing_tag: koji.listTagged(release.pending_signing_tag, **kwargs) response = koji.multiCall() or [] # Protect against None for taglist in response: # if the call to koji results in errors, it returns them # in the reponse as dicts. Here we detect these, and log # the errors if isinstance(taglist, dict): log.error('latest_candidates endpoint asked Koji about a non-existent tag:') log.error(taglist) else: for build in taglist[0]: if hide_existing and build['nvr'] in associated_build_nvrs: continue item = { 'nvr': build['nvr'], 'id': build['id'], 'package_name': build['package_name'], 'owner_name': build['owner_name'], } # The build's tag might not be present in tag_release # because its associated release is archived and therefore # filtered out in the query above. if build['tag_name'] in tag_release: item['release_name'] = tag_release[build['tag_name']] # Prune duplicates # https://github.com/fedora-infra/bodhi/issues/450 if item not in result: result.append(item) return result pkg = request.params.get('package') prefix = request.params.get('prefix') testing = asbool(request.params.get('testing')) hide_existing = asbool(request.params.get('hide_existing')) log.debug('latest_candidate(%r, %r, %r)' % (pkg, testing, hide_existing)) if pkg: result = work(testing, hide_existing, pkg=pkg) else: result = work(testing, hide_existing, prefix=prefix) return result
def new_update(request): """ Save an update. This entails either creating a new update, or editing an existing one. To edit an existing update, the update's alias must be specified in the ``edited`` parameter. Args: request (pyramid.request): The current request. """ data = request.validated log.debug('validated = %s' % data) # This has already been validated at this point, but we need to ditch # it since the models don't care about a csrf argument. data.pop('csrf_token') caveats = [] try: releases = set() builds = [] # Create the Package and Build entities for nvr in data['builds']: name, version, release = request.buildinfo[nvr]['nvr'] package = Package.get_or_create(request.buildinfo[nvr]) # Also figure out the build type and create the build if absent. build_class = ContentType.infer_content_class( base=Build, build=request.buildinfo[nvr]['info']) build = build_class.get(nvr) if build is None: log.debug("Adding nvr %s, type %r", nvr, build_class) build = build_class(nvr=nvr, package=package) request.db.add(build) request.db.flush() build.package = package build.release = request.buildinfo[build.nvr]['release'] builds.append(build) releases.add(request.buildinfo[build.nvr]['release']) # We want to go ahead and commit the transaction now so that the Builds are in the database. # Otherwise, there will be a race condition between robosignatory signing the Builds and the # signed handler attempting to mark the builds as signed. When we lose that race, the signed # handler doesn't see the Builds in the database and gives up. After that, nothing will mark # the builds as signed. request.db.commit() # After we commit the transaction, we need to get the builds and releases again, since they # were tied to the previous session that has now been terminated. builds = [] releases = set() for nvr in data['builds']: # At this moment, we are sure the builds are in the database (that is what the commit # was for actually). build = Build.get(nvr) builds.append(build) releases.add(build.release) if data.get('edited'): log.info('Editing update: %s' % data['edited']) data['release'] = list(releases)[0] data['builds'] = [b.nvr for b in builds] result, _caveats = Update.edit(request, data) caveats.extend(_caveats) else: if len(releases) > 1: caveats.append({ 'name': 'releases', 'description': 'Your update is being split ' 'into %i, one for each release.' % len(releases) }) updates = [] for release in releases: _data = copy.copy(data) # Copy it because .new(..) mutates it _data['builds'] = [b for b in builds if b.release == release] _data['release'] = release log.info('Creating new update: %r' % _data['builds']) result, _caveats = Update.new(request, _data) log.debug('%s update created', result.alias) updates.append(result) caveats.extend(_caveats) if len(releases) > 1: result = dict(updates=updates) except LockedUpdateException as e: log.warning(str(e)) request.errors.add('body', 'builds', "%s" % str(e)) return except Exception as e: log.exception('Failed to create update') request.errors.add('body', 'builds', 'Unable to create update. %s' % str(e)) return # Obsolete older updates for three different cases... # editing an update, submitting a new single update, submitting multiple. if isinstance(result, dict): updates = result['updates'] else: updates = [result] for update in updates: try: caveats.extend(update.obsolete_older_updates(request.db)) except Exception as e: caveats.append({ 'name': 'update', 'description': 'Problem obsoleting older updates: %s' % str(e), }) if not isinstance(result, dict): result = result.__json__() result['caveats'] = caveats return result
def call_api(api_url, service_name, error_key=None, method='GET', data=None, headers=None, retries=0): """ Perform an HTTP request with response type and error handling. Args: api_url (str): The URL to query. service_name (str): The service name being queried (used to form human friendly error messages). error_key (str): The key that indexes error messages in the JSON body for the given service. If this is set to None, the JSON response will be used as the error message. method (str): The HTTP method to use for the request. Defaults to ``GET``. data (dict): Query string parameters that will be sent along with the request to the server. headers (dict): The headers to send along with the request. retries (int): The number of times to retry, each after a 1 second sleep, if we get a non-200 HTTP code. Defaults to 3. Returns: dict: A dictionary representing the JSON response from the remote service. Raises: RuntimeError: If the server did not give us a 200 code. """ if data is None: data = dict() log.debug("Querying url: %s", api_url) if method == 'POST': if headers is None: headers = {'Content-Type': 'application/json'} base_error_msg = ( 'Bodhi failed to send POST request to {0} at the following URL ' '"{1}". The status code was "{2}".') rv = http_session.post(api_url, headers=headers, data=json.dumps(data), timeout=60) else: base_error_msg = ( 'Bodhi failed to get a resource from {0} at the following URL ' '"{1}". The status code was "{2}".') rv = http_session.get(api_url, timeout=60) if rv.status_code >= 200 and rv.status_code < 300: return rv.json() elif retries: time.sleep(1) return call_api(api_url, service_name, error_key, method, data, headers, retries - 1) elif rv.status_code == 500: log.debug(rv.text) # There will be no JSON with an error message here error_msg = base_error_msg.format( service_name, api_url, rv.status_code) log.error(error_msg) raise RuntimeError(error_msg) else: log.debug(rv.text) # If it's not a 500 error, we can assume that the API returned an error # message in JSON that we can log try: rv_error = rv.json() if error_key is not None: rv_error = rv_error.get(error_key) except ValueError: rv_error = '' error_msg = base_error_msg.format( service_name, api_url, rv.status_code) error_msg = '{0} The error was "{1}".'.format(error_msg, rv_error) log.error(error_msg) raise RuntimeError(error_msg)
def new_update(request): """ Save an update. This entails either creating a new update, or editing an existing one. To edit an existing update, the update's alias must be specified in the ``edited`` parameter. If the ``from_tag`` parameter is specified and ``builds`` is missing or empty, the list of builds will be filled with the latest builds in this Koji tag. This is done by validate_from_tag() because the list of builds needs to be available in validate_acls(). If the release is composed by Bodhi (i.e. a branched or stable release after the Bodhi activation point), ensure that related tags ``from_tag``-pending-signing and ``from_tag``-testing exists and if not create them in Koji. If the state of the release is not `pending`, add its pending-signing tag and remove it if it's a side tag. Args: request (pyramid.request): The current request. """ data = request.validated log.debug('validated = %s' % data) # This has already been validated at this point, but we need to ditch # it since the models don't care about a csrf argument. data.pop('csrf_token') # Same here, but it can be missing. data.pop('builds_from_tag', None) data.pop('sidetag_owner', None) build_nvrs = data.get('builds', []) from_tag = data.get('from_tag') caveats = [] try: releases = set() builds = [] # Create the Package and Build entities for nvr in build_nvrs: name, version, release = request.buildinfo[nvr]['nvr'] package = Package.get_or_create(request.db, request.buildinfo[nvr]) # Also figure out the build type and create the build if absent. build_class = ContentType.infer_content_class( base=Build, build=request.buildinfo[nvr]['info']) build = build_class.get(nvr) if build is None: log.debug("Adding nvr %s, type %r", nvr, build_class) build = build_class(nvr=nvr, package=package) request.db.add(build) request.db.flush() build.package = package build.release = request.buildinfo[build.nvr]['release'] builds.append(build) releases.add(request.buildinfo[build.nvr]['release']) # Disable manual updates for releases not composed by Bodhi # see #4058 if not from_tag: for release in releases: if not release.composed_by_bodhi: request.errors.add( 'body', 'builds', "Cannot manually create updates for a Release which is not " "composed by Bodhi.\nRead the 'Automatic updates' page in " "Bodhi docs about this error.") request.db.rollback() return # We want to go ahead and commit the transaction now so that the Builds are in the database. # Otherwise, there will be a race condition between robosignatory signing the Builds and the # signed handler attempting to mark the builds as signed. When we lose that race, the signed # handler doesn't see the Builds in the database and gives up. After that, nothing will mark # the builds as signed. request.db.commit() # After we commit the transaction, we need to get the builds and releases again, # since they were tied to the previous session that has now been terminated. builds = [] releases = set() for nvr in build_nvrs: # At this moment, we are sure the builds are in the database (that is what the commit # was for actually). build = Build.get(nvr) builds.append(build) releases.add(build.release) if data.get('edited'): log.info('Editing update: %s' % data['edited']) data['release'] = list(releases)[0] data['builds'] = [b.nvr for b in builds] data['from_tag'] = from_tag result, _caveats = Update.edit(request, data) caveats.extend(_caveats) else: if len(releases) > 1: caveats.append({ 'name': 'releases', 'description': 'Your update is being split ' 'into %i, one for each release.' % len(releases) }) updates = [] for release in releases: _data = copy.copy(data) # Copy it because .new(..) mutates it _data['builds'] = [b for b in builds if b.release == release] _data['release'] = release _data['from_tag'] = from_tag log.info('Creating new update: %r' % _data['builds']) result, _caveats = Update.new(request, _data) log.debug('%s update created', result.alias) updates.append(result) caveats.extend(_caveats) if len(releases) > 1: result = dict(updates=updates) if from_tag: for u in updates: builds = [b.nvr for b in u.builds] if not u.release.composed_by_bodhi: # Before the Bodhi activation point of a release, keep builds tagged # with the side-tag and its associate tags. side_tag_signing_pending = u.release.get_pending_signing_side_tag( from_tag) side_tag_testing_pending = u.release.get_pending_testing_side_tag( from_tag) handle_side_and_related_tags_task.delay( builds=builds, pending_signing_tag=side_tag_signing_pending, from_tag=from_tag, pending_testing_tag=side_tag_testing_pending) else: # After the Bodhi activation point of a release, add the pending-signing tag # of the release to funnel the builds back into a normal workflow for a # stable release. pending_signing_tag = u.release.pending_signing_tag candidate_tag = u.release.candidate_tag handle_side_and_related_tags_task.delay( builds=builds, pending_signing_tag=pending_signing_tag, from_tag=from_tag, candidate_tag=candidate_tag) except LockedUpdateException as e: log.warning(str(e)) request.errors.add('body', 'builds', "%s" % str(e)) return except Exception as e: log.exception('Failed to create update') request.errors.add('body', 'builds', 'Unable to create update. %s' % str(e)) return # Obsolete older updates for three different cases... # editing an update, submitting a new single update, submitting multiple. if isinstance(result, dict): updates = result['updates'] else: updates = [result] for update in updates: try: caveats.extend(update.obsolete_older_updates(request.db)) except Exception as e: caveats.append({ 'name': 'update', 'description': 'Problem obsoleting older updates: %s' % str(e), }) if not isinstance(result, dict): result = result.__json__() result['caveats'] = caveats return result
def latest_candidates(request): """ Return the most recent candidate builds for a given package name. For a given `package`, this method returns the most recent builds tagged into the Release.candidate_tag for all Releases. The package name is specified in the request "package" parameter. Args: request (pyramid.util.Request): The current request. The package name is specified in the request's "package" parameter. Returns: list: A list of dictionaries of the found builds. Each dictionary has two keys: "nvr" maps to the build's nvr field, and "id" maps to the build's id. """ koji = request.koji db = request.db def work(pkg, testing): result = [] koji.multicall = True releases = db.query(models.Release) \ .filter( models.Release.state.in_( (models.ReleaseState.pending, models.ReleaseState.current))) kwargs = dict(package=pkg, latest=True) for release in releases: koji.listTagged(release.candidate_tag, **kwargs) if testing: koji.listTagged(release.testing_tag, **kwargs) koji.listTagged(release.pending_testing_tag, **kwargs) koji.listTagged(release.pending_signing_tag, **kwargs) response = koji.multiCall() or [] # Protect against None for taglist in response: for build in taglist[0]: item = { 'nvr': build['nvr'], 'id': build['id'], } # Prune duplicates # https://github.com/fedora-infra/bodhi/issues/450 if item not in result: result.append(item) return result pkg = request.params.get('package') testing = asbool(request.params.get('testing')) log.debug('latest_candidate(%r, %r)' % (pkg, testing)) if not pkg: return [] result = work(pkg, testing) log.debug(result) return result