Beispiel #1
0
    def create_pungi_config(self):
        loader = jinja2.FileSystemLoader(searchpath=config.get('pungi.basepath'))
        env = jinja2.Environment(loader=loader,
                                 autoescape=False,
                                 block_start_string='[%',
                                 block_end_string='%]',
                                 variable_start_string='[[',
                                 variable_end_string=']]',
                                 comment_start_string='[#',
                                 comment_end_string='#]')

        env.globals['id'] = self.id
        env.globals['release'] = self.release
        env.globals['request'] = self.request
        env.globals['updates'] = self.updates

        config_template = config.get(self.pungi_template_config_key)
        template = env.get_template(config_template)

        self._pungi_conf_dir = tempfile.mkdtemp(prefix='bodhi-pungi-%s-' % self.id)

        with open(os.path.join(self._pungi_conf_dir, 'pungi.conf'), 'w') as conffile:
            conffile.write(template.render())

        self.copy_additional_pungi_files(self._pungi_conf_dir, env)
Beispiel #2
0
    def test_subsequent_comments_after_initial_push_comment(self):
        """
        If a user edits an update after Bodhi comments a testing_approval_msg,
        Bodhi should send an additional testing_approval_msg when the revised
        update is eligible to be pushed to stable.

        See https://github.com/fedora-infra/bodhi/issues/1310
        """
        update = self.db.query(models.Update).all()[0]
        update.request = None
        update.status = models.UpdateStatus.testing
        update.date_testing = datetime.utcnow() - timedelta(days=14)
        update.autotime = False
        self.db.flush()
        # Clear pending messages
        self.db.info['messages'] = []

        with patch('bodhi.server.scripts.approve_testing.initialize_db'):
            with patch('bodhi.server.scripts.approve_testing.get_appsettings',
                       return_value=''):
                with fml_testing.mock_sends(api.Message):
                    approve_testing.main(['nosetests', 'some_config.ini'])
                update.comment(self.db, "Removed build", 0, 'bodhi')
                with fml_testing.mock_sends(api.Message):
                    approve_testing.main(['nosetests', 'some_config.ini'])

        bodhi = self.db.query(models.User).filter_by(name='bodhi').one()
        cmnts = self.db.query(models.Comment).filter_by(update_id=update.id,
                                                        user_id=bodhi.id)
        # There are 3 comments: testing_approval_msg, build change, testing_approval_msg
        assert cmnts.count() == 3
        assert cmnts[0].text == config.get('testing_approval_msg')
        assert cmnts[1].text == 'Removed build'
        assert cmnts[2].text == config.get('testing_approval_msg')
Beispiel #3
0
    def _get_master_repomd_url(self, arch):
        """
        Return the master repomd URL for the given arch.

        Look up the correct *_master_repomd setting in the config and use it to form the URL that
        wait_for_sync() will use to determine when the repository has been synchronized to the
        master mirror.

        Args:
            arch (basestring): The architecture for which a URL needs to be formed.

        Returns:
            basestring: A URL on the master mirror where the repomd.xml file should be synchronized.
        """
        release = self.release.id_prefix.lower().replace('-', '_')
        request = self.request.value

        # If the release has primary_arches defined in the config, we need to consider whether to
        # use the release's *alt_master_repomd setting.
        primary_arches = config.get(
            '{release}_{version}_primary_arches'.format(
                release=release, version=self.release.version))
        if primary_arches and arch not in primary_arches.split():
            key = '%s_%s_alt_master_repomd'
        else:
            key = '%s_%s_master_repomd'
        key = key % (release, request)

        master_repomd = config.get(key)
        if not master_repomd:
            raise ValueError("Could not find %s in the config file" % key)

        return master_repomd % (self.release.version, arch)
Beispiel #4
0
def send_mail(from_addr, to_addr, subject, body_text, headers=None):
    """
    Send an e-mail.

    Args:
        from_addr (basestring): The address to use in the From: header.
        to_addr (basestring): The address to send the e-mail to.
        subject (basestring): The subject of the e-mail.
        body_text (basestring): The body of the e-mail to be sent.
        headers (dict or None): A mapping of header fields to values to be included in the e-mail,
            if not None.
    """
    if not from_addr:
        from_addr = config.get('bodhi_email')
    if not from_addr:
        log.warn('Unable to send mail: bodhi_email not defined in the config')
        return
    if to_addr in config.get('exclude_mail'):
        return

    from_addr = to_bytes(from_addr)
    to_addr = to_bytes(to_addr)
    subject = to_bytes(subject)
    body_text = to_bytes(body_text)

    msg = [b'From: %s' % from_addr, b'To: %s' % to_addr]
    if headers:
        for key, value in headers.items():
            msg.append(b'%s: %s' % (to_bytes(key), to_bytes(value)))
    msg.append(b'X-Bodhi: %s' % to_bytes(config.get('default_email_domain')))
    msg += [b'Subject: %s' % subject, b'', body_text]
    body = b'\r\n'.join(msg)

    log.info('Sending mail to %s: %s', to_addr, subject)
    _send_mail(from_addr, to_addr, body)
Beispiel #5
0
def get_top_testers():
    """
    Return a query of the 5 users that have submitted the most comments in the last 7 days.

    Returns:
        sqlalchemy.orm.query.Query: A SQLAlchemy query that contains the
                                  5 users that have submitted the most comments
                                  in the last 7 days, and their total number of
                                  comments in bodhi.
    """
    blacklist = config.get('stats_blacklist')
    days = config.get('top_testers_timeframe')
    start_time = datetime.datetime.utcnow() - datetime.timedelta(days=days)

    query = models.Session().query(
        models.User,
        sa.func.count(models.User.comments).label('count_1')).join(
            models.Comment)
    query = query\
        .order_by(sa.text('count_1 desc'))\
        .filter(models.Comment.timestamp > start_time)

    for user in blacklist:
        query = query.filter(models.User.name != str(user))

    return query\
        .group_by(models.User)\
        .limit(5)\
        .all()
Beispiel #6
0
def copy_container(build, destination_registry=None, destination_tag=None):
    """
    Copy a ContainerBuild from the source registry to a destination registry under the given tag.

    Args:
        build (bodhi.server.models.ContainerBuild): The build you wish to copy from the source tag
            to the destination tag.
        destination_registry (str or None): The registry to copy the build into. If None (the
            default), the container.destination_registry setting is used.
        destination_tag (str or None): The destination tag you wish to copy the source image to. If
            None (the default), the build's version and release are used to form the destination
            tag.
    Raises:
        RuntimeError: If skopeo returns a non-0 exit code.
    """
    source_registry = config['container.source_registry']
    source_tag = '{}-{}'.format(build.nvr_version, build.nvr_release)

    if destination_tag is None:
        destination_tag = source_tag
    if destination_registry is None:
        destination_registry = config['container.destination_registry']

    repository = _get_build_repository(build)

    source_url = _container_image_url(source_registry, repository, source_tag)
    destination_url = _container_image_url(destination_registry, repository, destination_tag)

    skopeo_cmd = [
        config.get('skopeo.cmd'), 'copy', source_url, destination_url]
    if config.get('skopeo.extra_copy_flags'):
        for flag in reversed(config.get('skopeo.extra_copy_flags').split(',')):
            skopeo_cmd.insert(2, flag)
    cmd(skopeo_cmd, raise_on_error=True)
Beispiel #7
0
def get_critpath_components(collection='master', component_type='rpm', components=None):
    """
    Return a list of critical path packages for a given collection, filtered by components.

    Args:
        collection (str): The collection/branch to search. Defaults to 'master'.
        component_type (str): The component type to search for. This only affects PDC
            queries. Defaults to 'rpm'.
        components (frozenset or None): The list of components we are interested in. If None (the
            default), all components for the given collection and type are returned.
    Returns:
        list: The critpath components for the given collection and type.
    Raises:
        RuntimeError: If the PDC did not give us a 200 code.
    """
    critpath_components = []
    critpath_type = config.get('critpath.type')
    if critpath_type != 'pdc' and component_type != 'rpm':
        log.warning('The critpath.type of "{0}" does not support searching for'
                    ' non-RPM components'.format(component_type))

    if critpath_type == 'pdc':
        critpath_components = get_critpath_components_from_pdc(
            collection, component_type, components)
    else:
        critpath_components = config.get('critpath_pkgs')

    # Filter the list of components down to what was requested, in case the specific path did
    # not take our request into account.
    if components is not None:
        critpath_components = [c for c in critpath_components if c in components]

    return critpath_components
Beispiel #8
0
    def _test_extended_metadata(self, has_alias):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        if not has_alias:
            update.alias = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ['f17-updates-testing']

        # Generate the XML
        md = UpdateInfoMetadata(update.release, update.request, self.db,
                                self.tempcompdir)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo(self.tempcompdir)
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, 'mutt-1.5.14-1.fc13')
        self.assertIsNone(notice)

        self.assertEquals(len(uinfo.updates), 1)
        notice = uinfo.updates[0]

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        if update.date_modified:
            self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get('bodhi_email'))
        self.assertEquals(notice.rights, config.get('updateinfo_rights'))
        self.assertEquals(notice.description, update.notes)
        self.assertEquals(notice.id, update.alias)
        bug = notice.references[0]
        self.assertEquals(bug.href, update.bugs[0].url)
        self.assertEquals(bug.id, '12345')
        self.assertEquals(bug.type, 'bugzilla')
        cve = notice.references[1]
        self.assertEquals(cve.type, 'cve')
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        col = notice.collections[0]
        self.assertEquals(col.name, update.release.long_name)
        self.assertEquals(col.shortname, update.release.name)

        pkg = col.packages[0]
        self.assertEquals(pkg.epoch, '0')
        self.assertEquals(pkg.name, 'TurboGears')
        self.assertEquals(pkg.src, (
            'https://download.fedoraproject.org/pub/fedora/linux/updates/testing/17/SRPMS/T/'
            'TurboGears-1.0.2.2-2.fc17.src.rpm'))
        self.assertEquals(pkg.version, '1.0.2.2')
        self.assertFalse(pkg.reboot_suggested)
        self.assertEquals(pkg.arch, 'src')
        self.assertEquals(pkg.filename, 'TurboGears-1.0.2.2-2.fc17.src.rpm')
    def test_subsequent_comments_after_initial_push_comment(self):
        """
        If a user edits an update after Bodhi comments a testing_approval_msg,
        Bodhi should send an additional testing_approval_msg when the revised
        update is eligible to be pushed to stable.

        See https://github.com/fedora-infra/bodhi/issues/1310
        """
        update = self.db.query(models.Update).all()[0]
        update.request = None
        update.status = models.UpdateStatus.testing
        update.date_testing = datetime.utcnow() - timedelta(days=14)
        self.db.flush()

        with patch('bodhi.server.scripts.approve_testing.initialize_db'):
            with patch('bodhi.server.scripts.approve_testing.get_appsettings', return_value=''):
                approve_testing.main(['nosetests', 'some_config.ini'])
                update.comment(self.db, u"Removed build", 0, u'bodhi')
                approve_testing.main(['nosetests', 'some_config.ini'])

        bodhi = self.db.query(models.User).filter_by(name=u'bodhi').one()
        cmnts = self.db.query(models.Comment).filter_by(update_id=update.id, user_id=bodhi.id)
        # There are 3 comments: testing_approval_msg, build change, testing_approval_msg
        self.assertEqual(cmnts.count(), 3)
        self.assertEqual(
            cmnts[0].text,
            config.get('testing_approval_msg') %
            update.release.mandatory_days_in_testing)
        self.assertEqual(cmnts[1].text, 'Removed build')
        self.assertEqual(
            cmnts[2].text,
            config.get('testing_approval_msg') %
            update.release.mandatory_days_in_testing)
Beispiel #10
0
def get_critpath_components(collection='master', component_type='rpm'):
    """
    Return a list of critical path packages for a given collection.

    Args:
    collection (basestring): The collection/branch to search. Defaults to 'master'.
    component_type (basestring): The component type to search for. This only affects PDC queries.
        Defaults to 'rpm'.
    Returns:
        list: The critpath components for the given collection and type.
    """
    critpath_components = []
    critpath_type = config.get('critpath.type')
    if critpath_type != 'pdc' and component_type != 'rpm':
        log.warning('The critpath.type of "{0}" does not support searching for'
                    ' non-RPM components'.format(component_type))

    if critpath_type == 'pkgdb':
        from pkgdb2client import PkgDB
        pkgdb = PkgDB(config.get('pkgdb_url'))
        results = pkgdb.get_critpath_packages(branches=collection)
        if collection in results['pkgs']:
            critpath_components = results['pkgs'][collection]
    elif critpath_type == 'pdc':
        critpath_components = get_critpath_components_from_pdc(
            collection, component_type)
    else:
        critpath_components = config.get('critpath_pkgs')
    return critpath_components
Beispiel #11
0
def send_mail(from_addr: str,
              to_addr: str,
              subject: str,
              body_text: str,
              headers: typing.Optional[dict] = None) -> None:
    """
    Send an e-mail.

    Args:
        from_addr: The address to use in the From: header.
        to_addr: The address to send the e-mail to.
        subject: The subject of the e-mail.
        body_text: The body of the e-mail to be sent.
        headers: A mapping of header fields to values to be included in the e-mail,
            if not None.
    """
    if not from_addr:
        from_addr = config.get('bodhi_email')
    if not from_addr:
        log.warning(
            'Unable to send mail: bodhi_email not defined in the config')
        return
    if to_addr in config.get('exclude_mail'):
        return

    msg = [f'From: {from_addr}', f'To: {to_addr}']
    if headers:
        for key, value in headers.items():
            msg.append(f'{key}: {value}')
    msg.append(f"X-Bodhi: {config.get('default_email_domain')}")
    msg += [f'Subject: {subject}', '', body_text]
    body = '\r\n'.join(msg)

    log.info('Sending mail to %s: %s', to_addr, subject)
    _send_mail(from_addr, to_addr, body)
Beispiel #12
0
    def test_build_not_in_builds(self):
        """
        Test correct behavior when a build in update.builds isn't found in self.builds() and
        koji.getBuild() is called instead.
        """
        update = self.db.query(Update).one()
        now = datetime(year=2018, month=2, day=8, hour=12, minute=41, second=4)
        update.date_pushed = now
        update.date_modified = now
        md = UpdateInfoMetadata(update.release, update.request, self.db, self.temprepo,
                                close_shelf=False)

        md.add_update(update)

        md.shelf.close()

        assert len(md.uinfo.updates) == 1
        assert md.uinfo.updates[0].title == update.title
        assert md.uinfo.updates[0].release == update.release.long_name
        assert md.uinfo.updates[0].status == update.status.value
        assert md.uinfo.updates[0].updated_date == update.date_modified
        assert md.uinfo.updates[0].fromstr == config.get('bodhi_email')
        assert md.uinfo.updates[0].rights == config.get('updateinfo_rights')
        assert md.uinfo.updates[0].description == update.notes
        assert md.uinfo.updates[0].id == update.alias
        assert md.uinfo.updates[0].severity == 'Moderate'
        assert len(md.uinfo.updates[0].references) == 1
        bug = md.uinfo.updates[0].references[0]
        assert bug.href == update.bugs[0].url
        assert bug.id == '12345'
        assert bug.type == 'bugzilla'
        assert len(md.uinfo.updates[0].collections) == 1
        col = md.uinfo.updates[0].collections[0]
        assert col.name == update.release.long_name
        assert col.shortname == update.release.name
        assert len(col.packages) == 2
        pkg = col.packages[0]
        assert pkg.epoch == '0'
        # It's a little goofy, but the DevBuildsys is going to return TurboGears rpms when its
        # listBuildRPMs() method is called, so let's just roll with it.
        assert pkg.name == 'TurboGears'
        assert pkg.src == \
            ('https://download.fedoraproject.org/pub/fedora/linux/updates/17/SRPMS/T/'
             'TurboGears-1.0.2.2-2.fc17.src.rpm')
        assert pkg.version == '1.0.2.2'
        assert not pkg.reboot_suggested
        assert pkg.arch == 'src'
        assert pkg.filename == 'TurboGears-1.0.2.2-2.fc17.src.rpm'
        pkg = col.packages[1]
        assert pkg.epoch == '0'
        assert pkg.name == 'TurboGears'
        assert pkg.src == \
            ('https://download.fedoraproject.org/pub/fedora/linux/updates/17/i386/T/'
             'TurboGears-1.0.2.2-2.fc17.noarch.rpm')
        assert pkg.version == '1.0.2.2'
        assert not pkg.reboot_suggested
        assert pkg.arch == 'noarch'
        assert pkg.filename == 'TurboGears-1.0.2.2-2.fc17.noarch.rpm'
Beispiel #13
0
    def _test_extended_metadata(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ['f17-updates-testing']

        # Generate the XML
        md = UpdateInfoMetadata(update.release, update.request, self.db,
                                self.tempcompdir)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo(self.tempcompdir)
        updateinfos = self._verify_updateinfos(self.repodata)

        for updateinfo in updateinfos:
            # Read an verify the updateinfo.xml.gz
            uinfo = createrepo_c.UpdateInfo(updateinfo)
            notice = self.get_notice(uinfo, 'mutt-1.5.14-1.fc13')
            assert notice is None

            assert len(uinfo.updates) == 1
            notice = uinfo.updates[0]

            assert notice is not None
            assert notice.title == update.title
            assert notice.release == update.release.long_name
            assert notice.status == update.status.value
            if update.date_modified:
                assert notice.updated_date == update.date_modified
            assert notice.fromstr == config.get('bodhi_email')
            assert notice.rights == config.get('updateinfo_rights')
            assert notice.description == update.notes
            assert notice.id == update.alias
            assert notice.severity == 'Moderate'
            bug = notice.references[0]
            assert bug.href == update.bugs[0].url
            assert bug.id == '12345'
            assert bug.type == 'bugzilla'

            col = notice.collections[0]
            assert col.name == update.release.long_name
            assert col.shortname == update.release.name

            pkg = col.packages[0]
            assert pkg.epoch == '0'
            assert pkg.name == 'TurboGears'
            assert pkg.src == \
                ('https://download.fedoraproject.org/pub/fedora/linux/updates/testing/17/SRPMS/T/'
                 'TurboGears-1.0.2.2-2.fc17.src.rpm')
            assert pkg.version == '1.0.2.2'
            assert not pkg.reboot_suggested
            assert not pkg.relogin_suggested
            assert pkg.arch == 'src'
            assert pkg.filename == 'TurboGears-1.0.2.2-2.fc17.src.rpm'
Beispiel #14
0
    def __init__(self, release, request, db, composedir, close_shelf=True):
        """
        Initialize the UpdateInfoMetadata object.

        Args:
            release (bodhi.server.models.Release): The Release that is being composed.
            request (bodhi.server.models.UpdateRequest): The Request that is being composed.
            db (): A database session to be used for queries.
            composedir (str): A path to the composedir.
            close_shelf (bool): Whether to close the shelve, which is used to cache updateinfo
                between composes.
        """
        self.request = request
        if request is UpdateRequest.stable:
            self.tag = release.stable_tag
        else:
            self.tag = release.testing_tag

        self.db = db
        self.updates = set()
        self.builds = {}
        self._from = config.get('bodhi_email')
        if config.get('cache_dir'):
            self.shelf = shelve.open(
                os.path.join(config.get('cache_dir'), '%s.shelve' % self.tag))
        else:
            # If we have no cache dir, let's at least cache in-memory.
            self.shelf = {}
            close_shelf = False
        self._fetch_updates()

        self.uinfo = cr.UpdateInfo()

        self.comp_type = cr.XZ

        # Some repos such as FEDORA-EPEL, are primarily targeted at
        # distributions that use the yum client, which does not support zchunk metadata
        self.legacy_repos = ['FEDORA-EPEL']
        self.zchunk = True

        if release.id_prefix in self.legacy_repos:
            # FIXME: I'm not sure which versions of RHEL support xz metadata
            # compression, so use the lowest common denominator for now.
            self.comp_type = cr.BZ2

            log.warning(
                'Zchunk data is disabled for repo {release.id_prefix} until it moves to a client'
                ' with Zchunk support')
            self.zchunk = False

        self.uinfo = cr.UpdateInfo()
        for update in self.updates:
            self.add_update(update)

        if close_shelf:
            self.shelf.close()
Beispiel #15
0
def send_releng(subject, body):
    """
    Send the Release Engineering team a message.

    Args:
        subject (basestring): The subject of the e-mail.
        body (basestring): The body of the e-mail.
    """
    send_mail(config.get('bodhi_email'), config.get('release_team_address'),
              subject, body)
Beispiel #16
0
    def work(username, size):
        openid = "http://" + config.get('openid_template').format(username=username) + "/"
        if config.get('libravatar_enabled'):
            if config.get('libravatar_dns'):
                return get_libravatar_url(openid, https, size)
            else:
                query = urlencode({'s': size, 'd': 'retro'})
                hash = hashlib.sha256(openid.encode('utf-8')).hexdigest()
                template = "https://seccdn.libravatar.org/avatar/%s?%s"
                return template % (hash, query)

        return 'libravatar.org'
Beispiel #17
0
 def _connect(self):
     user = config.get('bodhi_email')
     password = config.get('bodhi_password')
     url = config.get("bz_server")
     log.info("Using BZ URL %s" % url)
     if user and password:
         self._bz = bugzilla.Bugzilla(url=url,
                                      user=user, password=password,
                                      cookiefile=None, tokenfile=None)
     else:
         self._bz = bugzilla.Bugzilla(url=url,
                                      cookiefile=None, tokenfile=None)
Beispiel #18
0
def can_waive_test_results(context, update):
    """
    Return True or False if the test results can be waived on an update.

    Args:
        context (mako.runtime.Context): The current template rendering context. Unused.
        update (bodhi.server.models.Update): The Update on which we are going to waive test results.
    Returns:
        bool: Indicating if the test results can be waived on the given update.
    """
    return config.get('test_gating.required') and not update.test_gating_passed \
        and config.get('waiverdb.access_token') and update.status.description != 'stable'
Beispiel #19
0
 def _connect(self):
     """Create a Bugzilla client instance and store it on self._bz."""
     user = config.get('bodhi_email')
     password = config.get('bodhi_password')
     url = config.get("bz_server")
     log.info("Using BZ URL %s" % url)
     if user and password:
         self._bz = bugzilla.Bugzilla(url=url,
                                      user=user, password=password,
                                      cookiefile=None, tokenfile=None)
     else:
         self._bz = bugzilla.Bugzilla(url=url,
                                      cookiefile=None, tokenfile=None)
Beispiel #20
0
    def mash(self):
        if self.path and self.path in self.state['completed_repos']:
            self.log.info('Skipping completed repo: %s', self.path)
            return

        # We have a thread-local devnull FD so that we can close them after the mash is done
        self.devnull = open(os.devnull, 'wb')

        self.create_pungi_config()
        config_file = os.path.join(self._pungi_conf_dir, 'pungi.conf')
        self._label = '%s-%s' % (config.get('pungi.labeltype'),
                                 datetime.utcnow().strftime('%Y%m%d.%H%M'))
        pungi_cmd = [config.get('pungi.cmd'),
                     '--config', config_file,
                     '--quiet',
                     '--target-dir', self.mash_dir,
                     '--old-composes', self.mash_dir,
                     '--no-latest-link',
                     '--label', self._label]
        pungi_cmd += config.get('pungi.extracmdline')

        self.log.info('Running the pungi command: %s', pungi_cmd)
        mash_process = subprocess.Popen(pungi_cmd,
                                        # Nope. No shell for you
                                        shell=False,
                                        # Should be useless, but just to set something predictable
                                        cwd=self.mash_dir,
                                        # Pungi will logs its stdout into pungi.global.log
                                        stdout=self.devnull,
                                        # Stderr should also go to pungi.global.log if it starts
                                        stderr=subprocess.PIPE,
                                        # We will never have additional input
                                        stdin=self.devnull)
        self.log.info('Pungi running as PID: %s', mash_process.pid)
        # Since the mash process takes a long time, we can safely just wait 3 seconds to abort the
        # entire mash early if Pungi fails to start up correctly.
        time.sleep(3)
        if mash_process.poll() not in [0, None]:
            self.log.error('Pungi process terminated with error within 3 seconds! Abandoning!')
            _, err = mash_process.communicate()
            self.log.error('Stderr: %s', err)
            self.devnull.close()
            raise Exception('Pungi returned error, aborting!')

        # This is used to find the generated directory post-mash.
        # This is stored at the time of start so that even if the update run crosses the year
        # border, we can still find it back.
        self._startyear = datetime.utcnow().year

        return mash_process
Beispiel #21
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
Beispiel #22
0
    def __init__(self, release, request, db, mashdir, close_shelf=True):
        """
        Initialize the UpdateInfoMetadata object.

        Args:
            release (bodhi.server.models.Release): The Release that is being mashed.
            request (bodhi.server.models.UpdateRequest): The Request that is being mashed.
            db (): A database session to be used for queries.
            mashdir (basestring): A path to the mashdir.
            close_shelf (bool): Whether to close the shelve, which is used to cache updateinfo
                between mashes.
        """
        self.request = request
        if request is UpdateRequest.stable:
            self.tag = release.stable_tag
        else:
            self.tag = release.testing_tag

        self.db = db
        self.updates = set()
        self.builds = {}
        self._from = config.get('bodhi_email')
        if config.get('cache_dir'):
            self.shelf = shelve.open(
                os.path.join(config.get('cache_dir'), '%s.shelve' % self.tag))
        else:
            # If we have no cache dir, let's at least cache in-memory.
            self.shelf = {}
            close_shelf = False
        self._fetch_updates()

        self.uinfo = cr.UpdateInfo()

        self.comp_type = cr.XZ

        if release.id_prefix == u'FEDORA-EPEL':
            # FIXME: I'm not sure which versions of RHEL support xz metadata
            # compression, so use the lowest common denominator for now.
            self.comp_type = cr.BZ2

        self.uinfo = cr.UpdateInfo()
        for update in self.updates:
            if not update.alias:
                update.assign_alias()
            self.add_update(update)

        if close_shelf:
            self.shelf.close()
Beispiel #23
0
    def test_update_conflicting_build_not_pushed(self, build_creation_time):
        """
        Ensure that an update that have conflicting builds will not get pushed.
        """
        update = self.db.query(models.Update).all()[0]
        update.autokarma = False
        update.autotime = True
        update.request = None
        update.stable_karma = 1
        update.stable_days = 7
        update.date_testing = datetime.utcnow() - timedelta(days=8)
        update.status = models.UpdateStatus.testing
        update.release.composed_by_bodhi = False
        update.from_tag = 'f17-build-side-1234'

        # Clear pending messages
        self.db.info['messages'] = []
        self.db.commit()

        with patch('bodhi.server.scripts.approve_testing.initialize_db'):
            with patch('bodhi.server.scripts.approve_testing.get_appsettings',
                       return_value=''):
                with fml_testing.mock_sends(api.Message):
                    approve_testing.main(['nosetests', 'some_config.ini'])

        assert update.status == models.UpdateStatus.pending

        bodhi = self.db.query(models.User).filter_by(name='bodhi').one()
        cmnts = self.db.query(models.Comment).filter_by(update_id=update.id,
                                                        user_id=bodhi.id)
        assert cmnts.count() == 2
        assert cmnts[0].text == config.get('testing_approval_msg')
        assert cmnts[1].text == "This update cannot be pushed to stable. "\
            "These builds bodhi-2.0-1.fc17 have a more recent build in koji's "\
            f"{update.release.stable_tag} tag."
Beispiel #24
0
    def test_autotime_update_no_autokarma_met_karma_requirements_get_comments(
            self):
        """
        Ensure that an autotime update which met the karma requirements but has autokarma off
        get a comment to let the packager know that he can push the update to stable.
        """
        update = self.db.query(models.Update).all()[0]
        update.autokarma = False
        update.autotime = True
        update.request = None
        update.stable_karma = 1
        update.stable_days = 10
        update.date_testing = datetime.utcnow() - timedelta(days=0)
        update.status = models.UpdateStatus.testing
        # Clear pending messages
        self.db.info['messages'] = []

        update.comment(self.db, u'Works great', author=u'luke', karma=1)
        with fml_testing.mock_sends(api.Message):
            self.db.commit()

        with patch('bodhi.server.scripts.approve_testing.initialize_db'):
            with patch('bodhi.server.scripts.approve_testing.get_appsettings',
                       return_value=''):
                with fml_testing.mock_sends(api.Message):
                    approve_testing.main(['nosetests', 'some_config.ini'])

        assert update.request is None

        bodhi = self.db.query(models.User).filter_by(name='bodhi').one()
        cmnts = self.db.query(models.Comment).filter_by(update_id=update.id,
                                                        user_id=bodhi.id)
        assert cmnts.count() == 1
        assert cmnts[0].text == config.get('testing_approval_msg')
Beispiel #25
0
    def test_non_autokarma_update_with_unmet_karma_requirement_after_time_met(
            self):
        """
        A non-autokarma update without enough karma that reaches mandatory days in testing should
        get a comment from Bodhi that the update can be pushed to stable.

        See https://github.com/fedora-infra/bodhi/issues/1094
        """
        update = self.db.query(models.Update).all()[0]
        update.autokarma = False
        update.autotime = False
        update.request = None
        update.stable_karma = 10
        update.status = models.UpdateStatus.testing
        update.date_testing = datetime.utcnow() - timedelta(days=7)
        self.db.flush()
        # Clear pending messages
        self.db.info['messages'] = []

        with patch('bodhi.server.scripts.approve_testing.initialize_db'):
            with patch('bodhi.server.scripts.approve_testing.get_appsettings',
                       return_value=''):
                with fml_testing.mock_sends(api.Message):
                    approve_testing.main(['nosetests', 'some_config.ini'])

        # The update should have one positive karma and no negative karmas
        assert update._composite_karma == (1, 0)
        bodhi = self.db.query(models.User).filter_by(name='bodhi').one()
        comment_q = self.db.query(models.Comment).filter_by(
            update_id=update.id, user_id=bodhi.id)
        assert comment_q.count() == 1
        assert comment_q[0].text == config.get('testing_approval_msg')
Beispiel #26
0
    def test_autotime_update_no_autokarma_met_karma_and_time_requirements_get_pushed(
            self):
        """
        Ensure that an autotime update which met the karma and time requirements but
        has autokarma off gets pushed.
        """
        update = self.db.query(models.Update).all()[0]
        update.autokarma = False
        update.autotime = True
        update.request = None
        update.stable_karma = 1
        update.stable_days = 0
        update.date_testing = datetime.utcnow() - timedelta(days=0)
        update.status = models.UpdateStatus.testing
        # Clear pending messages
        self.db.info['messages'] = []

        update.comment(self.db, u'Works great', author=u'luke', karma=1)
        with fml_testing.mock_sends(api.Message):
            self.db.commit()

        with fml_testing.mock_sends(api.Message, api.Message):
            approve_testing_main()

        assert update.request == models.UpdateRequest.stable

        bodhi = self.db.query(models.User).filter_by(name='bodhi').one()
        cmnts = self.db.query(models.Comment).filter_by(update_id=update.id,
                                                        user_id=bodhi.id)
        assert cmnts.count() == 2
        assert cmnts[0].text == config.get('testing_approval_msg')
        assert cmnts[
            1].text == 'This update has been submitted for stable by bodhi. '
Beispiel #27
0
def _send_mail(from_addr, to_addr, body):
    """
    Send emails with smtplib. This is a lower level function than send_e-mail().

    Args:
        from_addr (str): The e-mail address to use in the envelope from field.
        to_addr (str): The e-mail address to use in the envelope to field.
        body (str): The body of the e-mail.
    """
    smtp_server = config.get('smtp_server')
    if not smtp_server:
        log.info('Not sending email: No smtp_server defined')
        return
    smtp = None
    try:
        log.debug('Connecting to %s', smtp_server)
        smtp = smtplib.SMTP(smtp_server)
        smtp.sendmail(from_addr, [to_addr], body)
    except smtplib.SMTPRecipientsRefused as e:
        log.warning('"recipient refused" for %r, %r' % (to_addr, e))
    except Exception:
        log.exception('Unable to send mail')
    finally:
        if smtp:
            smtp.quit()
Beispiel #28
0
    def modified(self, bug_id: typing.Union[int, str], comment: str) -> None:
        """
        Change the status of this bug to MODIFIED if not already MODIFIED, VERIFIED, or CLOSED.

        This method will only operate on bugs that are associated with products listed
        in the bz_products setting.

        This will also comment on the bug stating that an update has been submitted.

        Args:
            bug_id: The bug you wish to mark MODIFIED.
            comment: The comment to be included with the state change.
        """
        try:
            bug = self.bz.getbug(bug_id)
            if bug.product not in config.get('bz_products'):
                log.info("Skipping set modified on {0!r} bug #{1}".format(bug.product, bug_id))
                return
            if bug.bug_status not in ('MODIFIED', 'VERIFIED', 'CLOSED'):
                log.info('Setting bug #%s status to MODIFIED' % bug_id)
                bug.setstatus('MODIFIED', comment=comment)
            else:
                bug.addcomment(comment)
        except xmlrpc_client.Fault as err:
            if err.faultCode == 102:
                log.info('Cannot retrieve private bug #%d.', bug_id)
            else:
                log.exception(
                    "Got fault from Bugzilla on #%d: fault code: %d, fault string: %s",
                    bug_id, err.faultCode, err.faultString)
        except Exception:
            log.exception("Unable to alter bug #%s" % bug_id)
Beispiel #29
0
    def on_qa(self, bug_id: int, comment: str) -> None:
        """
        Change the status of this bug to ON_QA if it is not already ON_QA, VERIFIED, or CLOSED.

        This method will only operate on bugs that are associated with products listed
        in the bz_products setting.

        This will also comment on the bug with some details on how to test and provide feedback for
        this update.

        Args:
            bug_id: The bug id you wish to set to ON_QA.
            comment: The comment to be included with the state change.
        """
        try:
            bug = self.bz.getbug(bug_id)
            if bug.product not in config.get('bz_products'):
                log.info("Skipping set on_qa on {0!r} bug #{1}".format(bug.product, bug_id))
                return
            if bug.bug_status not in ('ON_QA', 'VERIFIED', 'CLOSED'):
                log.debug("Setting Bug #%d to ON_QA" % bug_id)
                bug.setstatus('ON_QA', comment=comment)
            else:
                bug.addcomment(comment)
        except xmlrpc_client.Fault as err:
            if err.faultCode == 102:
                log.info('Cannot retrieve private bug #%d.', bug_id)
            else:
                log.exception(
                    "Got fault from Bugzilla on #%d: fault code: %d, fault string: %s",
                    bug_id, err.faultCode, err.faultString)
        except Exception:
            log.exception("Unable to alter bug #%d" % bug_id)
Beispiel #30
0
def validate_expiration_date(request, **kwargs):
    """
    Ensure the expiration date is in the future.

    Args:
        request (pyramid.util.Request): The current request.
        kwargs (dict): The kwargs of the related service definition. Unused.
    """
    expiration_date = request.validated.get('expiration_date')

    if expiration_date is None:
        return

    now = datetime.utcnow()

    if expiration_date <= now:
        request.errors.add('body', 'expiration_date',
                           'Expiration date in the past')
        return

    days = config.get('buildroot_limit')
    limit = now + timedelta(days=days)
    if expiration_date > limit:
        request.errors.add('body', 'expiration_date',
                           'Expiration date may not be longer than %i' % days)
        return

    request.validated['expiration_date'] = expiration_date