Example #1
0
 def eject_from_mash(self, update, reason):
     update.locked = False
     text = '%s ejected from the push because %r' % (update.title, reason)
     log.warn(text)
     update.comment(self.db, text, author=u'bodhi')
     # Remove the pending tag as well
     if update.request is UpdateRequest.stable:
         update.remove_tag(update.release.pending_stable_tag,
                           koji=self.koji)
     elif update.request is UpdateRequest.testing:
         update.remove_tag(update.release.pending_testing_tag,
                           koji=self.koji)
     update.request = None
     if update in self.state['updates']:
         self.state['updates'].remove(update)
     if update in self.updates:
         self.updates.remove(update)
     notifications.publish(
         topic="update.eject",
         msg=dict(
             repo=self.id,
             update=update,
             reason=reason,
             request=self.request,
             release=self.release,
         ),
         force=True,
     )
Example #2
0
 def eject_from_mash(self, update, reason):
     update.locked = False
     text = '%s ejected from the push because %r' % (update.title, reason)
     log.warn(text)
     update.comment(self.db, text, author=u'bodhi')
     # Remove the pending tag as well
     if update.request is UpdateRequest.stable:
         update.remove_tag(update.release.pending_stable_tag,
                           koji=self.koji)
     elif update.request is UpdateRequest.testing:
         update.remove_tag(update.release.pending_testing_tag,
                           koji=self.koji)
     update.request = None
     if update in self.state['updates']:
         self.state['updates'].remove(update)
     if update in self.updates:
         self.updates.remove(update)
     notifications.publish(
         topic="update.eject",
         msg=dict(
             repo=self.id,
             update=update,
             reason=reason,
             request=self.request,
             release=self.release,
         ),
         force=True,
     )
Example #3
0
    def __init__(self,
                 hub,
                 db_factory=None,
                 mash_dir=config.get('mash_dir'),
                 *args,
                 **kw):
        if not db_factory:
            config_uri = '/etc/bodhi/production.ini'
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, 'sqlalchemy.')
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get('topic_prefix')
        env = hub.config.get('environment')
        self.topic = prefix + '.' + env + '.' + hub.config.get('masher_topic')
        self.valid_signer = hub.config.get('releng_fedmsg_certname')
        if not self.valid_signer:
            log.warn('No releng_fedmsg_certname defined'
                     'Cert validation disabled')
        super(Masher, self).__init__(hub, *args, **kw)
        log.info('Bodhi masher listening on topic: %s' % self.topic)
Example #4
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
Example #5
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
Example #6
0
    def __init__(self, hub, db_factory=None, mash_dir=config.get("mash_dir"), *args, **kw):
        if not db_factory:
            config_uri = "/etc/bodhi/production.ini"
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, "sqlalchemy.")
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get("topic_prefix")
        env = hub.config.get("environment")
        self.topic = prefix + "." + env + "." + hub.config.get("masher_topic")
        self.valid_signer = hub.config.get("releng_fedmsg_certname")
        if not self.valid_signer:
            log.warn("No releng_fedmsg_certname defined" "Cert validation disabled")
        super(Masher, self).__init__(hub, *args, **kw)
        log.info("Bodhi masher listening on topic: %s" % self.topic)
Example #7
0
 def eject_from_mash(self, update, reason):
     update.locked = False
     text = '%s ejected from the push because %r' % (update.title, reason)
     log.warn(text)
     update.comment(text, author=u'bodhi')
     update.request = None
     if update in self.state['updates']:
         self.state['updates'].remove(update)
     if update in self.updates:
         self.updates.remove(update)
     notifications.publish(
         topic="update.eject",
         msg=dict(
             repo=self.id,
             update=update,
             reason=reason,
             request=self.request,
             release=self.release,
         ),
         force=True,
     )
Example #8
0
    def __init__(self, hub, db_factory=None, mash_dir=config.get('mash_dir'),
                 *args, **kw):
        if not db_factory:
            config_uri = '/etc/bodhi/production.ini'
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, 'sqlalchemy.')
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get('topic_prefix')
        env = hub.config.get('environment')
        self.topic = prefix + '.' + env + '.' + hub.config.get('masher_topic')
        self.valid_signer = hub.config.get('releng_fedmsg_certname')
        if not self.valid_signer:
            log.warn('No releng_fedmsg_certname defined'
                     'Cert validation disabled')
        super(Masher, self).__init__(hub, *args, **kw)
        log.info('Bodhi masher listening on topic: %s' % self.topic)
Example #9
0
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict([('-'.join(os.path.basename(repo).split('-')[:-1]),
                              repo) for repo in self.state['completed_repos']])
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config['releases']:
                log.warn('Cannot find atomic configuration for %r', tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config['releases'][tag])
            mash_path = 'file://' + os.path.join(mash_path, tag,
                                                 release['arch'])

            if 'updates-testing' in tag:
                release['repos']['updates-testing'] = mash_path
                updates_tag = tag.replace('-testing', '')
                if updates_tag in mashed_repos:
                    release['repos']['updates'] = 'file://' + os.path.join(
                        mashed_repos[updates_tag], updates_tag,
                        release['arch'])
                log.debug('Using the updates repo from %s',
                          release['repos']['updates'])
            else:
                release['repos']['updates'] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result['result'] != 'success':
                self.log.error(result)
                raise Exception('%s atomic compose failed' % tag)
            else:
                self.log.info('%s atomic tree compose successful', tag)
Example #10
0
def exception_view(exc, request):
    """ A generic error page handler (404s, 403s, 500s, etc..)

    This is here to catch everything that isn't caught by our cornice error
    handlers.  When we do catch something, we transform it intpu a cornice
    Errors object and pass it to our nice cornice error handler.  That way, all
    the exception presentation and rendering we can keep in one place.
    """

    errors = getattr(request, 'errors', [])
    status = getattr(exc, 'status_code', 500)

    if status not in (404, 403):
        log.exception("Error caught.  Handling HTML response.")
    else:
        log.warn(str(exc))

    if not len(errors):
        description = getattr(exc, 'explanation', None) or str(exc)

        errors = cornice.errors.Errors(request, status=status)
        errors.add('unknown', description=description)

    return bodhi.services.errors.html_handler(errors)
Example #11
0
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict([('-'.join(os.path.basename(repo).split('-')[:-1]), repo)
                             for repo in self.state['completed_repos']])
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config['releases']:
                log.warn('Cannot find atomic configuration for %r', tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config['releases'][tag])
            mash_path = 'file://' + os.path.join(mash_path, tag, release['arch'])

            if 'updates-testing' in tag:
                release['repos']['updates-testing'] = mash_path
                updates_tag = tag.replace('-testing', '')
                if updates_tag in mashed_repos:
                    release['repos']['updates'] = 'file://' + os.path.join(
                            mashed_repos[updates_tag], updates_tag,
                            release['arch'])
                log.debug('Using the updates repo from %s',
                          release['repos']['updates'])
            else:
                release['repos']['updates'] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result['result'] != 'success':
                self.log.error(result)
                raise Exception('%s atomic compose failed' % tag)
            else:
                self.log.info('%s atomic tree compose successful', tag)
Example #12
0
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict(
            [("-".join(os.path.basename(repo).split("-")[:-1]), repo) for repo in self.state["completed_repos"]]
        )
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config["releases"]:
                log.warn("Cannot find atomic configuration for %r", tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config["releases"][tag])
            mash_path = "file://" + os.path.join(mash_path, tag, release["arch"])

            if "updates-testing" in tag:
                release["repos"]["updates-testing"] = mash_path
                updates_tag = tag.replace("-testing", "")
                if updates_tag in mashed_repos:
                    release["repos"]["updates"] = "file://" + os.path.join(
                        mashed_repos[updates_tag], updates_tag, release["arch"]
                    )
                log.debug("Using the updates repo from %s", release["repos"]["updates"])
            else:
                release["repos"]["updates"] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result["result"] != "success":
                self.log.error(result)
                raise Exception("%s atomic compose failed" % tag)
            else:
                self.log.info("%s atomic tree compose successful", tag)
Example #13
0
def save_stack(request):
    """Save a stack"""
    data = request.validated
    db = request.db
    user = User.get(request.user.name, db)

    # Fetch or create the stack
    stack = Stack.get(data["name"], db)
    if not stack:
        stack = Stack(name=data["name"], users=[user])
        db.add(stack)
        db.flush()

    if stack.users or stack.groups:
        if user in stack.users:
            log.info("%s is an owner of the %s", user.name, stack.name)
        else:
            for group in user.groups:
                if group in stack.groups:
                    log.info("%s is a member of the %s group", user.name, stack.name)
                    break
            else:
                log.warn("%s is not an owner of the %s stack", user.name, stack.name)
                log.debug("owners = %s; groups = %s", stack.users, stack.groups)
                request.errors.add(
                    "body", "name", "%s does not have privileges" " to modify the %s stack" % (user.name, stack.name)
                )
                request.errors.status = HTTPForbidden.code
                return

    # Update the stack description
    desc = data["description"]
    if desc:
        stack.description = desc

    # Update the stack requirements
    # If the user passed in no value at all for requirements, then use
    # the site defaults.  If, however, the user passed in the empty string, we
    # assume they mean *really*, no requirements so we leave the value null.
    reqs = data["requirements"]
    if reqs is None:
        stack.requirements = request.registry.settings.get("site_requirements")
    elif reqs:
        stack.requirements = reqs

    stack.update_relationship("users", User, data, db)
    stack.update_relationship("groups", Group, data, db)

    # We make a special case out of packages here, since when a package is
    # added to a stack, we want to give it the same requirements as the stack
    # has. See https://github.com/fedora-infra/bodhi/issues/101
    new, same, rem = stack.update_relationship("packages", Package, data, db)
    if stack.requirements:
        additional = list(tokenize(stack.requirements))

        for name in new:
            package = Package.get(name, db)
            original = package.requirements
            original = [] if not original else list(tokenize(original))
            package.requirements = " ".join(list(set(original + additional)))

    log.info("Saved %s stack", data["name"])
    notifications.publish(topic="stack.save", msg=dict(stack=stack, agent=user.name))

    return dict(stack=stack)
Example #14
0
def new_update(request):
    """ Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's original title must be specified in
    the ``edited`` parameter.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in data['builds']:
            name, version, release = request.buildinfo[nvr]['nvr']
            package = request.db.query(Package).filter_by(name=name).first()
            if not package:
                package = Package(name=name)
                request.db.add(package)
                request.db.flush()

            build = Build.get(nvr, request.db)

            if build is None:
                log.debug("Adding nvr %s", nvr)
                build = Build(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])

        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            assert len(releases) == 1, "Updates may not span multiple releases"
            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name':
                    'releases',
                    'description':
                    'Your update is being split '
                    'into %i, one for each release.' % len(releases)
                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('%s update created', result.title)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)
    except LockedUpdateException as e:
        log.warn(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add('body', 'builds',
                           'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request.db))
        except Exception as e:
            caveats.append({
                'name':
                'update',
                'description':
                'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result
Example #15
0
def new_update(request):
    """ Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's original title must be specified in
    the ``edited`` parameter.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in data['builds']:
            name, version, release = request.buildinfo[nvr]['nvr']
            package = request.db.query(Package).filter_by(name=name).first()
            if not package:
                package = Package(name=name)
                request.db.add(package)
                request.db.flush()

            build = Build.get(nvr, request.db)

            if build is None:
                log.debug("Adding nvr %s", nvr)
                build = Build(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])


        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            assert len(releases) == 1, "Updates may not span multiple releases"
            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name': 'releases',
                    'description': 'Your update is being split '
                    'into %i, one for each release.' % len(releases)

                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('update = %r' % result)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)
    except LockedUpdateException as e:
        log.warn(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add(
            'body', 'builds', 'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request))
        except Exception as e:
            caveats.append({
                'name': 'update',
                'description': 'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result
Example #16
0
def save_stack(request):
    """Save a stack"""
    data = request.validated
    db = request.db
    user = User.get(request.user.name, db)

    # Fetch or create the stack
    stack = Stack.get(data['name'], db)
    if not stack:
        stack = Stack(name=data['name'], users=[user])
        db.add(stack)
        db.flush()

    if stack.users or stack.groups:
        if user in stack.users:
            log.info('%s is an owner of the %s', user.name, stack.name)
        else:
            for group in user.groups:
                if group in stack.groups:
                    log.info('%s is a member of the %s group', user.name,
                             stack.name)
                    break
            else:
                log.warn('%s is not an owner of the %s stack', user.name,
                         stack.name)
                log.debug('owners = %s; groups = %s', stack.users,
                          stack.groups)
                request.errors.add(
                    'body', 'name', '%s does not have privileges'
                    ' to modify the %s stack' % (user.name, stack.name))
                request.errors.status = HTTPForbidden.code
                return

    # Update the stack description
    desc = data['description']
    if desc:
        stack.description = desc

    # Update the stack requirements
    # If the user passed in no value at all for requirements, then use
    # the site defaults.  If, however, the user passed in the empty string, we
    # assume they mean *really*, no requirements so we leave the value null.
    reqs = data['requirements']
    if reqs is None:
        stack.requirements = request.registry.settings.get('site_requirements')
    elif reqs:
        stack.requirements = reqs

    stack.update_relationship('users', User, data, db)
    stack.update_relationship('groups', Group, data, db)

    # We make a special case out of packages here, since when a package is
    # added to a stack, we want to give it the same requirements as the stack
    # has. See https://github.com/fedora-infra/bodhi/issues/101
    new, same, rem = stack.update_relationship('packages', Package, data, db)
    if stack.requirements:
        additional = list(tokenize(stack.requirements))

        for name in new:
            package = Package.get(name, db)
            original = package.requirements
            original = [] if not original else list(tokenize(original))
            package.requirements = " ".join(list(set(original + additional)))

    log.info('Saved %s stack', data['name'])
    notifications.publish(topic='stack.save',
                          msg=dict(stack=stack, agent=user.name))

    return dict(stack=stack)