示例#1
0
    def setUp(self):
        engine = create_engine(DB_PATH)
        Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
        Session.configure(bind=engine)
        log.debug('Creating all models for %s' % engine)
        Base.metadata.bind = engine
        Base.metadata.create_all(engine)
        self.db = Session()
        populate(self.db)

        # Initialize our temporary repo
        self.tempdir = tempfile.mkdtemp('bodhi')
        self.temprepo = join(self.tempdir, 'f17-updates-testing')
        mkmetadatadir(join(self.temprepo, 'f17-updates-testing', 'i386'))
        self.repodata = join(self.temprepo, 'f17-updates-testing', 'i386', 'repodata')
        assert exists(join(self.repodata, 'repomd.xml'))

        DevBuildsys.__rpms__ = [{
            'arch': 'src',
            'build_id': 6475,
            'buildroot_id': 1883,
            'buildtime': 1178868422,
            'epoch': None,
            'id': 62330,
            'name': 'bodhi',
            'nvr': 'bodhi-2.0-1.fc17',
            'release': '1.fc17',
            'size': 761742,
            'version': '2.0'
        }]
示例#2
0
    def setUp(self):
        engine = create_engine(DB_PATH)
        Session = scoped_session(
            sessionmaker(extension=ZopeTransactionExtension()))
        Session.configure(bind=engine)
        log.debug('Creating all models for %s' % engine)
        Base.metadata.bind = engine
        Base.metadata.create_all(engine)
        self.db = Session()
        populate(self.db)

        # Initialize our temporary repo
        self.tempdir = tempfile.mkdtemp('bodhi')
        self.temprepo = join(self.tempdir, 'f17-updates-testing')
        mkmetadatadir(join(self.temprepo, 'f17-updates-testing', 'i386'))
        self.repodata = join(self.temprepo, 'f17-updates-testing', 'i386',
                             'repodata')
        assert exists(join(self.repodata, 'repomd.xml'))

        DevBuildsys.__rpms__ = [{
            'arch': 'src',
            'build_id': 6475,
            'buildroot_id': 1883,
            'buildtime': 1178868422,
            'epoch': None,
            'id': 62330,
            'name': 'bodhi',
            'nvr': 'bodhi-2.0-1.fc17',
            'release': '1.fc17',
            'size': 761742,
            'version': '2.0'
        }]
示例#3
0
 def tearDown(self):
     log.debug('Removing session')
     self.db.close()
     if DB_NAME:
         try:
             import requests
             requests.get('%s/clean/%s' % (FAITOUT, DB_NAME))
         except:
             pass
示例#4
0
 def tearDown(self):
     log.debug('Removing session')
     self.db.close()
     if DB_NAME:
         try:
             import requests
             requests.get('%s/clean/%s' % (FAITOUT, DB_NAME))
         except:
             pass
示例#5
0
def latest_candidates(request):
    """
    For a given `package`, this method returns the most recent builds tagged
    into the Release.candidate_tag for all Releases.
    """
    koji = request.koji
    db = request.db

    @request.cache.cache_on_arguments()
    def work(pkg, testing):
        result = []
        koji.multicall = True

        releases = db.query(bodhi.models.Release) \
                     .filter(
                         bodhi.models.Release.state.in_(
                             (bodhi.models.ReleaseState.pending,
                              bodhi.models.ReleaseState.current)))

        kwargs = dict(package=pkg, latest=True)
        for release in releases:
            koji.listTagged(release.candidate_tag, **kwargs)
            if testing:
                koji.listTagged(release.testing_tag, **kwargs)
                koji.listTagged(release.pending_testing_tag, **kwargs)

        builds = koji.multiCall() or []  # Protect against None

        for build in builds:
            if isinstance(build, dict):
                continue
            if build and build[0] and build[0][0]:
                item = {
                    'nvr': build[0][0]['nvr'],
                    'id': build[0][0]['id'],
                }
                # Prune duplicates
                # https://github.com/fedora-infra/bodhi/issues/450
                if item not in result:
                    result.append(item)
        return result


    pkg = request.params.get('package')
    testing = asbool(request.params.get('testing'))
    log.debug('latest_candidate(%r, %r)' % (pkg, testing))

    if not pkg:
        return []

    result = work(pkg, testing)

    log.debug(result)
    return result
示例#6
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
示例#7
0
文件: base.py 项目: remicollet/bodhi
    def setUp(self):
        engine = create_engine(DB_PATH)
        DBSession.configure(bind=engine)
        log.debug('Creating all models for %s' % engine)
        Base.metadata.create_all(engine)
        self.db = DBSession()
        populate(self.db)
        self.app = TestApp(main({}, testing=u'guest', **self.app_settings))

        # Track sql statements in every test
        self.sql_statements = []
        def track(conn, cursor, statement, param, ctx, many):
            self.sql_statements.append(statement)

        event.listen(engine, "before_cursor_execute", track)
示例#8
0
    def setUp(self):
        engine = create_engine(DB_PATH)
        Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
        Session.configure(bind=engine)
        log.debug('Creating all models for %s' % engine)
        Base.metadata.bind = engine
        Base.metadata.create_all(engine)
        self.db = Session()
        populate(self.db)
        self.app = TestApp(main({}, testing=u'guest', session=self.db, **self.app_settings))

        # Track sql statements in every test
        self.sql_statements = []
        def track(conn, cursor, statement, param, ctx, many):
            self.sql_statements.append(statement)

        event.listen(engine, "before_cursor_execute", track)
示例#9
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
示例#10
0
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict([('-'.join(os.path.basename(repo).split('-')[:-1]),
                              repo) for repo in self.state['completed_repos']])
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config['releases']:
                log.warn('Cannot find atomic configuration for %r', tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config['releases'][tag])
            mash_path = 'file://' + os.path.join(mash_path, tag,
                                                 release['arch'])

            if 'updates-testing' in tag:
                release['repos']['updates-testing'] = mash_path
                updates_tag = tag.replace('-testing', '')
                if updates_tag in mashed_repos:
                    release['repos']['updates'] = 'file://' + os.path.join(
                        mashed_repos[updates_tag], updates_tag,
                        release['arch'])
                log.debug('Using the updates repo from %s',
                          release['repos']['updates'])
            else:
                release['repos']['updates'] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result['result'] != 'success':
                self.log.error(result)
                raise Exception('%s atomic compose failed' % tag)
            else:
                self.log.info('%s atomic tree compose successful', tag)
示例#11
0
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict([('-'.join(os.path.basename(repo).split('-')[:-1]), repo)
                             for repo in self.state['completed_repos']])
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config['releases']:
                log.warn('Cannot find atomic configuration for %r', tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config['releases'][tag])
            mash_path = 'file://' + os.path.join(mash_path, tag, release['arch'])

            if 'updates-testing' in tag:
                release['repos']['updates-testing'] = mash_path
                updates_tag = tag.replace('-testing', '')
                if updates_tag in mashed_repos:
                    release['repos']['updates'] = 'file://' + os.path.join(
                            mashed_repos[updates_tag], updates_tag,
                            release['arch'])
                log.debug('Using the updates repo from %s',
                          release['repos']['updates'])
            else:
                release['repos']['updates'] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result['result'] != 'success':
                self.log.error(result)
                raise Exception('%s atomic compose failed' % tag)
            else:
                self.log.info('%s atomic tree compose successful', tag)
示例#12
0
文件: masher.py 项目: cgwalters/bodhi
    def compose_atomic_trees(self):
        """Compose Atomic OSTrees for each tag that we mashed"""
        from fedmsg_atomic_composer.composer import AtomicComposer
        from fedmsg_atomic_composer.config import config as atomic_config

        composer = AtomicComposer()
        mashed_repos = dict(
            [("-".join(os.path.basename(repo).split("-")[:-1]), repo) for repo in self.state["completed_repos"]]
        )
        for tag, mash_path in mashed_repos.items():
            if tag not in atomic_config["releases"]:
                log.warn("Cannot find atomic configuration for %r", tag)
                continue

            # Update the repo URLs to point to our local mashes
            release = copy.deepcopy(atomic_config["releases"][tag])
            mash_path = "file://" + os.path.join(mash_path, tag, release["arch"])

            if "updates-testing" in tag:
                release["repos"]["updates-testing"] = mash_path
                updates_tag = tag.replace("-testing", "")
                if updates_tag in mashed_repos:
                    release["repos"]["updates"] = "file://" + os.path.join(
                        mashed_repos[updates_tag], updates_tag, release["arch"]
                    )
                log.debug("Using the updates repo from %s", release["repos"]["updates"])
            else:
                release["repos"]["updates"] = mash_path

            # Compose the tree, and raise an exception upon failure
            result = composer.compose(release)
            if result["result"] != "success":
                self.log.error(result)
                raise Exception("%s atomic compose failed" % tag)
            else:
                self.log.info("%s atomic tree compose successful", tag)
示例#13
0
文件: stacks.py 项目: cgwalters/bodhi
def save_stack(request):
    """Save a stack"""
    data = request.validated
    db = request.db
    user = User.get(request.user.name, db)

    # Fetch or create the stack
    stack = Stack.get(data["name"], db)
    if not stack:
        stack = Stack(name=data["name"], users=[user])
        db.add(stack)
        db.flush()

    if stack.users or stack.groups:
        if user in stack.users:
            log.info("%s is an owner of the %s", user.name, stack.name)
        else:
            for group in user.groups:
                if group in stack.groups:
                    log.info("%s is a member of the %s group", user.name, stack.name)
                    break
            else:
                log.warn("%s is not an owner of the %s stack", user.name, stack.name)
                log.debug("owners = %s; groups = %s", stack.users, stack.groups)
                request.errors.add(
                    "body", "name", "%s does not have privileges" " to modify the %s stack" % (user.name, stack.name)
                )
                request.errors.status = HTTPForbidden.code
                return

    # Update the stack description
    desc = data["description"]
    if desc:
        stack.description = desc

    # Update the stack requirements
    # If the user passed in no value at all for requirements, then use
    # the site defaults.  If, however, the user passed in the empty string, we
    # assume they mean *really*, no requirements so we leave the value null.
    reqs = data["requirements"]
    if reqs is None:
        stack.requirements = request.registry.settings.get("site_requirements")
    elif reqs:
        stack.requirements = reqs

    stack.update_relationship("users", User, data, db)
    stack.update_relationship("groups", Group, data, db)

    # We make a special case out of packages here, since when a package is
    # added to a stack, we want to give it the same requirements as the stack
    # has. See https://github.com/fedora-infra/bodhi/issues/101
    new, same, rem = stack.update_relationship("packages", Package, data, db)
    if stack.requirements:
        additional = list(tokenize(stack.requirements))

        for name in new:
            package = Package.get(name, db)
            original = package.requirements
            original = [] if not original else list(tokenize(original))
            package.requirements = " ".join(list(set(original + additional)))

    log.info("Saved %s stack", data["name"])
    notifications.publish(topic="stack.save", msg=dict(stack=stack, agent=user.name))

    return dict(stack=stack)
示例#14
0
def new_update(request):
    """ Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's original title must be specified in
    the ``edited`` parameter.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in data['builds']:
            name, version, release = request.buildinfo[nvr]['nvr']
            package = request.db.query(Package).filter_by(name=name).first()
            if not package:
                package = Package(name=name)
                request.db.add(package)
                request.db.flush()

            build = Build.get(nvr, request.db)

            if build is None:
                log.debug("Adding nvr %s", nvr)
                build = Build(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])

        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            assert len(releases) == 1, "Updates may not span multiple releases"
            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name':
                    'releases',
                    'description':
                    'Your update is being split '
                    'into %i, one for each release.' % len(releases)
                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('%s update created', result.title)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)
    except LockedUpdateException as e:
        log.warn(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add('body', 'builds',
                           'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request.db))
        except Exception as e:
            caveats.append({
                'name':
                'update',
                'description':
                'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result
示例#15
0
def get_all_packages():
    """ Get a list of all packages in Koji """
    log.debug('Fetching list of all packages...')
    koji = buildsys.get_session()
    return [pkg['package_name'] for pkg in koji.listPackages()]
示例#16
0
def new_update(request):
    """ Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's original title must be specified in
    the ``edited`` parameter.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in data['builds']:
            name, version, release = request.buildinfo[nvr]['nvr']
            package = request.db.query(Package).filter_by(name=name).first()
            if not package:
                package = Package(name=name)
                request.db.add(package)
                request.db.flush()

            build = Build.get(nvr, request.db)

            if build is None:
                log.debug("Adding nvr %s", nvr)
                build = Build(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])


        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            assert len(releases) == 1, "Updates may not span multiple releases"
            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name': 'releases',
                    'description': 'Your update is being split '
                    'into %i, one for each release.' % len(releases)

                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('update = %r' % result)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)
    except LockedUpdateException as e:
        log.warn(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add(
            'body', 'builds', 'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request))
        except Exception as e:
            caveats.append({
                'name': 'update',
                'description': 'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result
示例#17
0
def save_stack(request):
    """Save a stack"""
    data = request.validated
    db = request.db
    user = User.get(request.user.name, db)

    # Fetch or create the stack
    stack = Stack.get(data['name'], db)
    if not stack:
        stack = Stack(name=data['name'], users=[user])
        db.add(stack)
        db.flush()

    if stack.users or stack.groups:
        if user in stack.users:
            log.info('%s is an owner of the %s', user.name, stack.name)
        else:
            for group in user.groups:
                if group in stack.groups:
                    log.info('%s is a member of the %s group', user.name,
                             stack.name)
                    break
            else:
                log.warn('%s is not an owner of the %s stack', user.name,
                         stack.name)
                log.debug('owners = %s; groups = %s', stack.users,
                          stack.groups)
                request.errors.add(
                    'body', 'name', '%s does not have privileges'
                    ' to modify the %s stack' % (user.name, stack.name))
                request.errors.status = HTTPForbidden.code
                return

    # Update the stack description
    desc = data['description']
    if desc:
        stack.description = desc

    # Update the stack requirements
    # If the user passed in no value at all for requirements, then use
    # the site defaults.  If, however, the user passed in the empty string, we
    # assume they mean *really*, no requirements so we leave the value null.
    reqs = data['requirements']
    if reqs is None:
        stack.requirements = request.registry.settings.get('site_requirements')
    elif reqs:
        stack.requirements = reqs

    stack.update_relationship('users', User, data, db)
    stack.update_relationship('groups', Group, data, db)

    # We make a special case out of packages here, since when a package is
    # added to a stack, we want to give it the same requirements as the stack
    # has. See https://github.com/fedora-infra/bodhi/issues/101
    new, same, rem = stack.update_relationship('packages', Package, data, db)
    if stack.requirements:
        additional = list(tokenize(stack.requirements))

        for name in new:
            package = Package.get(name, db)
            original = package.requirements
            original = [] if not original else list(tokenize(original))
            package.requirements = " ".join(list(set(original + additional)))

    log.info('Saved %s stack', data['name'])
    notifications.publish(topic='stack.save',
                          msg=dict(stack=stack, agent=user.name))

    return dict(stack=stack)