Пример #1
0
    def test_testing_digest(self, mail, *args):
        t = MasherThread(u'F17', u'testing', [u'bodhi-2.0-1.fc17'],
                         log, self.db_factory, self.tempdir)
        with self.db_factory() as session:
            t.db = session
            t.work()
            t.db = None
        self.assertEquals(t.testing_digest[u'Fedora 17'][u'bodhi-2.0-1.fc17'], """\
================================================================================
 libseccomp-2.1.0-1.fc20 (FEDORA-%s-0001)
 Enhanced seccomp library
--------------------------------------------------------------------------------
Update Information:

Useful details!
--------------------------------------------------------------------------------
References:

  [ 1 ] Bug #12345 - None
        https://bugzilla.redhat.com/show_bug.cgi?id=12345
  [ 2 ] CVE-1985-0110
        http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-1985-0110
--------------------------------------------------------------------------------

""" % time.strftime('%Y'))

        mail.assert_called_with(config.get('bodhi_email'), config.get('fedora_test_announce_list'), mock.ANY)
        assert len(mail.mock_calls) == 2, len(mail.mock_calls)
        body = mail.mock_calls[1][1][2]
        assert body.startswith('From: [email protected]\r\nTo: %s\r\nSubject: Fedora 17 updates-testing report\r\n\r\nThe following builds have been pushed to Fedora 17 updates-testing\n\n    bodhi-2.0-1.fc17\n\nDetails about builds:\n\n\n================================================================================\n libseccomp-2.1.0-1.fc20 (FEDORA-%s-0001)\n Enhanced seccomp library\n--------------------------------------------------------------------------------\nUpdate Information:\n\nUseful details!\n--------------------------------------------------------------------------------\nReferences:\n\n  [ 1 ] Bug #12345 - None\n        https://bugzilla.redhat.com/show_bug.cgi?id=12345\n  [ 2 ] CVE-1985-0110\n        http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-1985-0110\n--------------------------------------------------------------------------------\n\n' % (config.get('fedora_test_announce_list'), time.strftime('%Y'))), repr(body)
Пример #2
0
    def test_testing_digest(self, mail, *args):
        t = MasherThread(u'F17', u'testing', [u'bodhi-2.0-1.fc17'],
                         log, self.db_factory, self.tempdir)
        with self.db_factory() as session:
            t.db = session
            t.work()
            t.db = None
        self.assertEquals(t.testing_digest[u'Fedora 17'][u'bodhi-2.0-1.fc17'], """\
================================================================================
 libseccomp-2.1.0-1.fc20 (FEDORA-%s-a3bbe1a8f2)
 Enhanced seccomp library
--------------------------------------------------------------------------------
Update Information:

Useful details!
--------------------------------------------------------------------------------
References:

  [ 1 ] Bug #12345 - None
        https://bugzilla.redhat.com/show_bug.cgi?id=12345
  [ 2 ] CVE-1985-0110
        http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-1985-0110
--------------------------------------------------------------------------------

""" % time.strftime('%Y'))

        mail.assert_called_with(config.get('bodhi_email'), config.get('fedora_test_announce_list'), mock.ANY)
        assert len(mail.mock_calls) == 2, len(mail.mock_calls)
        body = mail.mock_calls[1][1][2]
        assert body.startswith('From: [email protected]\r\nTo: %s\r\nX-Bodhi: fedoraproject.org\r\nSubject: Fedora 17 updates-testing report\r\n\r\nThe following builds have been pushed to Fedora 17 updates-testing\n\n    bodhi-2.0-1.fc17\n\nDetails about builds:\n\n\n================================================================================\n libseccomp-2.1.0-1.fc20 (FEDORA-%s-a3bbe1a8f2)\n Enhanced seccomp library\n--------------------------------------------------------------------------------\nUpdate Information:\n\nUseful details!\n--------------------------------------------------------------------------------\nReferences:\n\n  [ 1 ] Bug #12345 - None\n        https://bugzilla.redhat.com/show_bug.cgi?id=12345\n  [ 2 ] CVE-1985-0110\n        http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-1985-0110\n--------------------------------------------------------------------------------\n\n' % (config.get('fedora_test_announce_list'), time.strftime('%Y'))), repr(body)
Пример #3
0
    def test_extended_metadata(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ['f17-updates-testing']

        # Generate the XML
        md = ExtendedMetadata(update.release, update.request, self.db,
                              self.temprepo)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo()
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, 'mutt-1.5.14-1.fc13')
        self.assertIsNone(notice)

        self.assertEquals(len(uinfo.updates), 1)
        notice = uinfo.updates[0]

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        if update.date_modified:
            self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get('bodhi_email'))
        self.assertEquals(notice.rights, config.get('updateinfo_rights'))
        self.assertEquals(notice.description, update.notes)
        #self.assertIsNotNone(notice.issued_date)
        self.assertEquals(notice.id, update.alias)
        bug = notice.references[0]
        self.assertEquals(bug.href, update.bugs[0].url)
        self.assertEquals(bug.id, '12345')
        self.assertEquals(bug.type, 'bugzilla')
        cve = notice.references[1]
        self.assertEquals(cve.type, 'cve')
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        col = notice.collections[0]
        self.assertEquals(col.name, update.release.long_name)
        self.assertEquals(col.shortname, update.release.name)

        pkg = col.packages[0]
        self.assertEquals(pkg.epoch, '0')
        self.assertEquals(pkg.name, 'TurboGears')
        self.assertEquals(
            pkg.src,
            'https://download.fedoraproject.org/pub/fedora/linux/updates/testing/17/SRPMS/T/TurboGears-1.0.2.2-2.fc7.src.rpm'
        )
        self.assertEquals(pkg.version, '1.0.2.2')
        self.assertFalse(pkg.reboot_suggested)
        self.assertEquals(pkg.arch, 'src')
        self.assertEquals(pkg.filename, 'TurboGears-1.0.2.2-2.fc7.src.rpm')
Пример #4
0
    def test_extended_metadata(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ["f17-updates-testing"]

        # Generate the XML
        md = ExtendedMetadata(update.release, update.request, self.db, self.temprepo)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo()
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, "mutt-1.5.14-1.fc13")
        self.assertIsNone(notice)

        self.assertEquals(len(uinfo.updates), 1)
        notice = uinfo.updates[0]

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        if update.date_modified:
            self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get("bodhi_email"))
        self.assertEquals(notice.rights, config.get("updateinfo_rights"))
        self.assertEquals(notice.description, update.notes)
        # self.assertIsNotNone(notice.issued_date)
        self.assertEquals(notice.id, update.alias)
        bug = notice.references[0]
        self.assertEquals(bug.href, update.bugs[0].url)
        self.assertEquals(bug.id, "12345")
        self.assertEquals(bug.type, "bugzilla")
        cve = notice.references[1]
        self.assertEquals(cve.type, "cve")
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        col = notice.collections[0]
        self.assertEquals(col.name, update.release.long_name)
        self.assertEquals(col.shortname, update.release.name)

        pkg = col.packages[0]
        self.assertEquals(pkg.epoch, "0")
        self.assertEquals(pkg.name, "TurboGears")
        self.assertEquals(
            pkg.src,
            "https://download.fedoraproject.org/pub/fedora/linux/updates/testing/17/SRPMS/T/TurboGears-1.0.2.2-2.fc7.src.rpm",
        )
        self.assertEquals(pkg.version, "1.0.2.2")
        self.assertFalse(pkg.reboot_suggested)
        self.assertEquals(pkg.arch, "src")
        self.assertEquals(pkg.filename, "TurboGears-1.0.2.2-2.fc7.src.rpm")
Пример #5
0
    def mash(self):
        if self.path in self.state["completed_repos"]:
            self.log.info("Skipping completed repo: %s", self.path)
            return

        comps = os.path.join(config.get("comps_dir"), "comps-%s.xml" % self.release.branch)
        previous = os.path.join(config.get("mash_stage_dir"), self.id)

        mash_thread = MashThread(self.id, self.path, comps, previous, self.log)
        mash_thread.start()
        return mash_thread
Пример #6
0
 def __init__(self):
     user = config.get('bodhi_email')
     password = config.get('bodhi_password', None)
     url = config.get("bz_server")
     log.info("Using BZ URL %s" % url)
     if user and password:
         self.bz = bugzilla.Bugzilla(url=url,
                                     user=user, password=password,
                                     cookiefile=None, tokenfile=None)
     else:
         self.bz = bugzilla.Bugzilla(url=url,
                                     cookiefile=None, tokenfile=None)
Пример #7
0
    def mash(self):
        if self.path in self.state['completed_repos']:
            self.log.info('Skipping completed repo: %s', self.path)
            return

        comps = os.path.join(config.get('comps_dir'),
                             'comps-%s.xml' % self.release.branch)
        previous = os.path.join(config.get('mash_stage_dir'), self.id)

        mash_thread = MashThread(self.id, self.path, comps, previous, self.log)
        mash_thread.start()
        return mash_thread
Пример #8
0
    def mash(self):
        if self.path in self.state['completed_repos']:
            self.log.info('Skipping completed repo: %s', self.path)
            return

        comps = os.path.join(config.get('comps_dir'), 'comps-%s.xml' %
                             self.release.branch)
        previous = os.path.join(config.get('mash_stage_dir'), self.id)

        mash_thread = MashThread(self.id, self.path, comps, previous, self.log)
        mash_thread.start()
        return mash_thread
Пример #9
0
 def insert_pkgtags(self):
     """Download and inject the pkgtags sqlite from fedora-tagger"""
     if config.get('pkgtags_url'):
         try:
             tags_url = config.get('pkgtags_url')
             tempdir = tempfile.mkdtemp('bodhi')
             local_tags = os.path.join(tempdir, 'pkgtags.sqlite')
             log.info('Downloading %s' % tags_url)
             urlgrab(tags_url, filename=local_tags)
             self.modifyrepo(local_tags)
         except:
             log.exception("There was a problem injecting pkgtags")
         finally:
             shutil.rmtree(tempdir)
Пример #10
0
 def insert_pkgtags(self):
     """Download and inject the pkgtags sqlite from fedora-tagger"""
     if config.get('pkgtags_url'):
         try:
             tags_url = config.get('pkgtags_url')
             tempdir = tempfile.mkdtemp('bodhi')
             local_tags = os.path.join(tempdir, 'pkgtags.sqlite')
             log.info('Downloading %s' % tags_url)
             urlgrab(tags_url, filename=local_tags)
             self.modifyrepo(local_tags)
         except:
             log.exception("There was a problem injecting pkgtags")
         finally:
             shutil.rmtree(tempdir)
Пример #11
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing,
                        update.abs_url(),
                        update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
Пример #12
0
 def __init__(self):
     user = config.get('bodhi_email')
     password = config.get('bodhi_password', None)
     url = config.get("bz_server")
     log.info("Using BZ URL %s" % url)
     if user and password:
         self.bz = bugzilla.Bugzilla(url=url,
                                     user=user,
                                     password=password,
                                     cookiefile=None,
                                     tokenfile=None)
     else:
         self.bz = bugzilla.Bugzilla(url=url,
                                     cookiefile=None,
                                     tokenfile=None)
Пример #13
0
    def __init__(self, errors):

        location = config.get('mako.directories')
        module, final = location.split(':')
        base = os.path.dirname(__import__(module).__file__)
        directory = base + "/" + final

        lookup = mako.lookup.TemplateLookup(
            directories=[directory],
            output_encoding='utf-8',
            input_encoding='utf-8',
        )
        template = lookup.get_template('errors.html')

        try:
            body = template.render(
                errors=errors,
                status=errors.status,
                request=errors.request,
                summary=status2summary(errors.status),
            )
        except:
            log.error(mako.exceptions.text_error_template().render())
            raise

        # This thing inherits from both Exception *and* Response, so.. take the
        # Response path in the diamond inheritance chain and ignore the
        # exception side.
        # That turns this thing into a "real boy" like pinnochio.
        pyramid.response.Response.__init__(self, body)

        self.status = errors.status
        self.content_type = 'text/html'
Пример #14
0
    def __init__(self, errors):

        location = config.get('mako.directories')
        module, final = location.split(':')
        base = os.path.dirname(__import__(module).__file__)
        directory = base + "/" + final

        lookup = mako.lookup.TemplateLookup(
            directories=[directory],
            output_encoding='utf-8',
            input_encoding='utf-8',
        )
        template = lookup.get_template('errors.html')

        try:
            body = template.render(
                errors=errors,
                status=errors.status,
                request=errors.request,
                summary=status2summary(errors.status),
            )
        except:
            log.error(mako.exceptions.text_error_template().render())
            raise

        # This thing inherits from both Exception *and* Response, so.. take the
        # Response path in the diamond inheritance chain and ignore the
        # exception side.
        # That turns this thing into a "real boy" like pinnochio.
        pyramid.response.Response.__init__(self, body)

        self.status = errors.status
        self.content_type = 'text/html'
Пример #15
0
 def update_comps(self):
     """
     Update our comps git module and merge the latest translations so we can
     pass it to mash insert into the repodata.
     """
     self.log.info("Updating comps")
     comps_dir = config.get('comps_dir')
     comps_url = config.get('comps_url')
     if not os.path.exists(comps_dir):
         util.cmd(['git', 'clone', comps_url], os.path.dirname(comps_dir))
     if comps_url.startswith('https://'):
         util.cmd(['git', 'pull'], comps_dir)
     else:
         self.log.error('comps_url must start with https://')
         return
     util.cmd(['make'], comps_dir)
Пример #16
0
 def update_comps(self):
     """
     Update our comps git module and merge the latest translations so we can
     pass it to mash insert into the repodata.
     """
     self.log.info("Updating comps")
     comps_dir = config.get("comps_dir")
     comps_url = config.get("comps_url")
     if not os.path.exists(comps_dir):
         util.cmd(["git", "clone", comps_url], os.path.dirname(comps_dir))
     if comps_url.startswith("https://"):
         util.cmd(["git", "pull"], comps_dir)
     else:
         self.log.error("comps_url must start with https://")
         return
     util.cmd(["make"], comps_dir)
Пример #17
0
 def update_comps(self):
     """
     Update our comps git module and merge the latest translations so we can
     pass it to mash insert into the repodata.
     """
     self.log.info("Updating comps")
     comps_dir = config.get('comps_dir')
     comps_url = config.get('comps_url')
     if not os.path.exists(comps_dir):
         util.cmd(['git', 'clone', comps_url], os.path.dirname(comps_dir))
     if comps_url.startswith('https://'):
         util.cmd(['git', 'pull'], comps_dir)
     else:
         self.log.error('comps_url must start with https://')
         return
     util.cmd(['make'], comps_dir)
Пример #18
0
    def __init__(self,
                 hub,
                 db_factory=None,
                 mash_dir=config.get('mash_dir'),
                 *args,
                 **kw):
        if not db_factory:
            config_uri = '/etc/bodhi/production.ini'
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, 'sqlalchemy.')
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get('topic_prefix')
        env = hub.config.get('environment')
        self.topic = prefix + '.' + env + '.' + hub.config.get('masher_topic')
        self.valid_signer = hub.config.get('releng_fedmsg_certname')
        if not self.valid_signer:
            log.warn('No releng_fedmsg_certname defined'
                     'Cert validation disabled')
        super(Masher, self).__init__(hub, *args, **kw)
        log.info('Bodhi masher listening on topic: %s' % self.topic)
Пример #19
0
    def send_testing_digest(self):
        """Send digest mail to mailing lists"""
        self.log.info('Sending updates-testing digest')
        sechead = u'The following %s Security updates need testing:\n Age  URL\n'
        crithead = u'The following %s Critical Path updates have yet to be approved:\n Age URL\n'
        testhead = u'The following builds have been pushed to %s updates-testing\n\n'

        for prefix, content in self.testing_digest.iteritems():
            release = self.db.query(Release).filter_by(long_name=prefix).one()
            test_list_key = '%s_test_announce_list' % (
                release.id_prefix.lower().replace('-', '_'))
            test_list = config.get(test_list_key)
            if not test_list:
                log.warn('%r undefined. Not sending updates-testing digest',
                         test_list_key)
                continue

            log.debug("Sending digest for updates-testing %s" % prefix)
            maildata = u''
            security_updates = self.get_security_updates(prefix)
            if security_updates:
                maildata += sechead % prefix
                for update in security_updates:
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            critpath_updates = self.get_unapproved_critpath_updates(prefix)
            if critpath_updates:
                maildata += crithead % prefix
                for update in self.get_unapproved_critpath_updates(prefix):
                    maildata += u' %3i  %s   %s\n' % (
                        update.days_in_testing, update.abs_url(), update.title)
                maildata += '\n\n'

            maildata += testhead % prefix
            updlist = content.keys()
            updlist.sort()
            for pkg in updlist:
                maildata += u'    %s\n' % pkg
            maildata += u'\nDetails about builds:\n\n'
            for nvr in updlist:
                maildata += u"\n" + self.testing_digest[prefix][nvr]

            mail.send_mail(config.get('bodhi_email'), test_list,
                           '%s updates-testing report' % prefix, maildata)
Пример #20
0
 def test_stage(self):
     t = MasherThread(u"F17", u"testing", [u"bodhi-2.0-1.fc17"], log, self.db_factory, self.tempdir)
     t.id = "f17-updates-testing"
     t.init_path()
     t.stage_repo()
     stage_dir = config.get("mash_stage_dir")
     link = os.path.join(stage_dir, t.id)
     self.assertTrue(os.path.islink(link))
Пример #21
0
 def test_stage(self):
     t = MasherThread(u'F17', u'testing', [u'bodhi-2.0-1.fc17'], log,
                      self.db_factory, self.tempdir)
     t.id = 'f17-updates-testing'
     t.init_path()
     t.stage_repo()
     stage_dir = config.get('mash_stage_dir')
     link = os.path.join(stage_dir, t.id)
     self.assertTrue(os.path.islink(link))
Пример #22
0
 def test_stage(self):
     t = MasherThread(u'F17', u'testing', [u'bodhi-2.0-1.fc17'],
                      log, self.db_factory, self.tempdir)
     t.id = 'f17-updates-testing'
     t.init_path()
     t.stage_repo()
     stage_dir = config.get('mash_stage_dir')
     link = os.path.join(stage_dir, t.id)
     self.assertTrue(os.path.islink(link))
Пример #23
0
 def modified(self, bug_id):
     try:
         bug = self.bz.getbug(bug_id)
         if bug.product not in config.get('bz_products', '').split(','):
             log.info("Skipping %r bug" % bug.product)
             return
         if bug.bug_status not in ('MODIFIED', 'VERIFIED', 'CLOSED'):
             log.info('Setting bug #%d status to MODIFIED' % bug_id)
             bug.setstatus('MODIFIED')
     except:
         log.exception("Unable to alter bug #%d" % bug_id)
Пример #24
0
 def stage_repo(self):
     """Symlink our updates repository into the staging directory"""
     stage_dir = config.get('mash_stage_dir')
     if not os.path.isdir(stage_dir):
         self.log.info('Creating mash_stage_dir %s', stage_dir)
         os.mkdir(stage_dir)
     link = os.path.join(stage_dir, self.id)
     if os.path.islink(link):
         os.unlink(link)
     self.log.info("Creating symlink: %s => %s" % (self.path, link))
     os.symlink(os.path.join(self.path, self.id), link)
Пример #25
0
 def modified(self, bug_id):
     try:
         bug = self.bz.getbug(bug_id)
         if bug.product not in config.get('bz_products', '').split(','):
             log.info("Skipping %r bug" % bug.product)
             return
         if bug.bug_status not in ('MODIFIED', 'VERIFIED', 'CLOSED'):
             log.info('Setting bug #%d status to MODIFIED' % bug_id)
             bug.setstatus('MODIFIED')
     except:
         log.exception("Unable to alter bug #%d" % bug_id)
Пример #26
0
 def stage_repo(self):
     """Symlink our updates repository into the staging directory"""
     stage_dir = config.get('mash_stage_dir')
     if not os.path.isdir(stage_dir):
         self.log.info('Creating mash_stage_dir %s', stage_dir)
         os.mkdir(stage_dir)
     link = os.path.join(stage_dir, self.id)
     if os.path.islink(link):
         os.unlink(link)
     self.log.info("Creating symlink: %s => %s" % (self.path, link))
     os.symlink(os.path.join(self.path, self.id), link)
Пример #27
0
 def __init__(self, tag, outputdir, comps, previous, log):
     super(MashThread, self).__init__()
     self.tag = tag
     self.log = log
     self.success = False
     mash_cmd = 'mash -o {outputdir} -c {config} -f {compsfile} {tag}'
     mash_conf = config.get('mash_conf', '/etc/mash/mash.conf')
     if os.path.exists(previous):
         mash_cmd += ' -p {}'.format(previous)
     self.mash_cmd = mash_cmd.format(outputdir=outputdir, config=mash_conf,
                                     compsfile=comps, tag=self.tag).split()
Пример #28
0
 def __init__(self, tag, outputdir, comps, previous, log):
     super(MashThread, self).__init__()
     self.tag = tag
     self.log = log
     self.success = False
     mash_cmd = "mash -o {outputdir} -c {config} -f {compsfile} {tag}"
     mash_conf = config.get("mash_conf", "/etc/mash/mash.conf")
     if os.path.exists(previous):
         mash_cmd += " -p {}".format(previous)
     self.mash_cmd = mash_cmd.format(outputdir=outputdir, config=mash_conf, compsfile=comps, tag=self.tag).split()
     # Set our thread's "name" so it shows up nicely in the logs.
     # https://docs.python.org/2/library/threading.html#thread-objects
     self.name = tag
Пример #29
0
    def test_get_single_avatar(self):
        res = self.app.get('/users/guest')
        self.assertEquals(res.json_body['user']['name'], 'guest')

        if not asbool(config.get('libravatar_enabled', True)):
            return

        base = 'https://seccdn.libravatar.org/avatar/'
        h = 'eb48e08cc23bcd5961de9541ba5156c385cd39799e1dbf511477aa4d4d3a37e7'
        tail = '?d=retro&s=24'
        url = base + h

        self.assertEquals(res.json_body['user']['avatar'][:-len(tail)], url)
Пример #30
0
    def test_get_single_avatar(self):
        res = self.app.get('/users/guest')
        self.assertEquals(res.json_body['user']['name'], 'guest')

        if not asbool(config.get('libravatar_enabled', True)):
            return

        base = 'https://seccdn.libravatar.org/avatar/'
        h = 'eb48e08cc23bcd5961de9541ba5156c385cd39799e1dbf511477aa4d4d3a37e7'
        tail = '?d=retro&s=24'
        url = base + h

        self.assertEquals(res.json_body['user']['avatar'][:-len(tail)], url)
Пример #31
0
    def __init__(self, release, request, db, path):
        self.repo = path
        log.debug('repo = %r' % self.repo)
        self.request = request
        if request is UpdateRequest.stable:
            self.tag = release.stable_tag
        else:
            self.tag = release.testing_tag
        self.repo_path = os.path.join(self.repo, self.tag)

        self.db = db
        self.updates = set()
        self.builds = {}
        self.missing_ids = []
        self._from = config.get('bodhi_email')
        self.koji = get_session()
        self._fetch_updates()

        self.uinfo = cr.UpdateInfo()

        self.hash_type = cr.SHA256
        self.comp_type = cr.XZ

        if release.id_prefix == u'FEDORA-EPEL':
            # yum on py2.4 doesn't support sha256 (#1080373)
            if 'el5' in self.repo or '5E' in self.repo:
                self.hash_type = cr.SHA1
                self.comp_type = cr.GZ
            else:
                # FIXME: I'm not sure which versions of RHEL support xz metadata
                # compression, so use the lowest common denominator for now.
                self.comp_type = cr.BZ2

        # Load from the cache if it exists
        self.cached_repodata = os.path.join(self.repo, '..',
                                            self.tag + '.repocache',
                                            'repodata/')
        if os.path.isdir(self.cached_repodata):
            self._load_cached_updateinfo()
        else:
            log.debug("Generating new updateinfo.xml")
            self.uinfo = cr.UpdateInfo()
            for update in self.updates:
                if update.alias:
                    self.add_update(update)
                else:
                    self.missing_ids.append(update.title)

        if self.missing_ids:
            log.error("%d updates with missing ID: %r" %
                      (len(self.missing_ids), self.missing_ids))
Пример #32
0
    def __init__(self, release, request, db, path):
        self.repo = path
        log.debug('repo = %r' % self.repo)
        self.request = request
        if request is UpdateRequest.stable:
            self.tag = release.stable_tag
        else:
            self.tag = release.testing_tag
        self.repo_path = os.path.join(self.repo, self.tag)

        self.db = db
        self.updates = set()
        self.builds = {}
        self.missing_ids = []
        self._from = config.get('bodhi_email')
        self.koji = get_session()
        self._fetch_updates()

        self.uinfo = cr.UpdateInfo()

        self.hash_type = cr.SHA256
        self.comp_type = cr.XZ

        if release.id_prefix == u'FEDORA-EPEL':
            # yum on py2.4 doesn't support sha256 (#1080373)
            if 'el5' in self.repo or '5E' in self.repo:
                self.hash_type = cr.SHA1
                self.comp_type = cr.GZ
            else:
                # FIXME: I'm not sure which versions of RHEL support xz metadata
                # compression, so use the lowest common denominator for now.
                self.comp_type = cr.BZ2

        # Load from the cache if it exists
        self.cached_repodata = os.path.join(self.repo, '..', self.tag +
                                            '.repocache', 'repodata/')
        if os.path.isdir(self.cached_repodata):
            self._load_cached_updateinfo()
        else:
            log.debug("Generating new updateinfo.xml")
            self.uinfo = cr.UpdateInfo()
            for update in self.updates:
                if update.alias:
                    self.add_update(update)
                else:
                    self.missing_ids.append(update.title)

        if self.missing_ids:
            log.error("%d updates with missing ID: %r" % (
                len(self.missing_ids), self.missing_ids))
Пример #33
0
 def __init__(self, tag, outputdir, comps, previous, log):
     super(MashThread, self).__init__()
     self.tag = tag
     self.log = log
     self.success = False
     mash_cmd = 'mash -o {outputdir} -c {config} -f {compsfile} {tag}'
     mash_conf = config.get('mash_conf', '/etc/mash/mash.conf')
     if os.path.exists(previous):
         mash_cmd += ' -p {}'.format(previous)
     self.mash_cmd = mash_cmd.format(outputdir=outputdir,
                                     config=mash_conf,
                                     compsfile=comps,
                                     tag=self.tag).split()
     # Set our thread's "name" so it shows up nicely in the logs.
     # https://docs.python.org/2/library/threading.html#thread-objects
     self.name = tag
Пример #34
0
    def wait_for_sync(self):
        """Block until our repomd.xml hits the master mirror"""
        self.log.info('Waiting for updates to hit the master mirror')
        notifications.publish(
            topic="mashtask.sync.wait",
            msg=dict(repo=self.id),
            force=True,
        )
        mash_path = os.path.join(self.path, self.id)
        arch = os.listdir(mash_path)[0]

        release = self.release.id_prefix.lower().replace('-', '_')
        request = self.request.value
        key = '%s_%s_master_repomd' % (release, request)
        master_repomd = config.get(key)
        if not master_repomd:
            raise ValueError("Could not find %s in the config file" % key)

        repomd = os.path.join(mash_path, arch, 'repodata', 'repomd.xml')
        if not os.path.exists(repomd):
            self.log.error('Cannot find local repomd: %s', repomd)
            return

        checksum = hashlib.sha1(file(repomd).read()).hexdigest()
        while True:
            try:
                url = master_repomd % (self.release.version, arch)
                self.log.info('Polling %s' % url)
                masterrepomd = urllib2.urlopen(url)
            except (urllib2.URLError, urllib2.HTTPError):
                self.log.exception('Error fetching repomd.xml')
                time.sleep(200)
                continue
            newsum = hashlib.sha1(masterrepomd.read()).hexdigest()
            if newsum == checksum:
                self.log.info("master repomd.xml matches!")
                notifications.publish(
                    topic="mashtask.sync.done",
                    msg=dict(repo=self.id),
                    force=True,
                )
                return

            self.log.debug("master repomd.xml doesn't match! %s != %s for %r",
                           checksum, newsum, self.id)
            time.sleep(200)
Пример #35
0
    def wait_for_sync(self):
        """Block until our repomd.xml hits the master mirror"""
        self.log.info('Waiting for updates to hit the master mirror')
        notifications.publish(
            topic="mashtask.sync.wait",
            msg=dict(repo=self.id),
            force=True,
        )
        mash_path = os.path.join(self.path, self.id)
        arch = os.listdir(mash_path)[0]

        release = self.release.id_prefix.lower().replace('-', '_')
        request = self.request.value
        key = '%s_%s_master_repomd' % (release, request)
        master_repomd = config.get(key)
        if not master_repomd:
            raise ValueError("Could not find %s in the config file" % key)

        repomd = os.path.join(mash_path, arch, 'repodata', 'repomd.xml')
        if not os.path.exists(repomd):
            self.log.error('Cannot find local repomd: %s', repomd)
            return

        checksum = hashlib.sha1(file(repomd).read()).hexdigest()
        while True:
            try:
                url = master_repomd % (self.release.version, arch)
                self.log.info('Polling %s' % url)
                masterrepomd = urllib2.urlopen(url)
            except (urllib2.URLError, urllib2.HTTPError):
                self.log.exception('Error fetching repomd.xml')
                time.sleep(200)
                continue
            newsum = hashlib.sha1(masterrepomd.read()).hexdigest()
            if newsum == checksum:
                self.log.info("master repomd.xml matches!")
                notifications.publish(
                    topic="mashtask.sync.done",
                    msg=dict(repo=self.id),
                    force=True,
                )
                return

            self.log.debug("master repomd.xml doesn't match! %s != %s for %r",
                           checksum, newsum, self.id)
            time.sleep(200)
Пример #36
0
    def __init__(self, hub, db_factory=None, mash_dir=config.get("mash_dir"), *args, **kw):
        if not db_factory:
            config_uri = "/etc/bodhi/production.ini"
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, "sqlalchemy.")
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get("topic_prefix")
        env = hub.config.get("environment")
        self.topic = prefix + "." + env + "." + hub.config.get("masher_topic")
        self.valid_signer = hub.config.get("releng_fedmsg_certname")
        if not self.valid_signer:
            log.warn("No releng_fedmsg_certname defined" "Cert validation disabled")
        super(Masher, self).__init__(hub, *args, **kw)
        log.info("Bodhi masher listening on topic: %s" % self.topic)
Пример #37
0
    def __init__(self, hub, db_factory=None, mash_dir=config.get('mash_dir'),
                 *args, **kw):
        if not db_factory:
            config_uri = '/etc/bodhi/production.ini'
            settings = get_appsettings(config_uri)
            engine = engine_from_config(settings, 'sqlalchemy.')
            Base.metadata.create_all(engine)
            self.db_factory = transactional_session_maker(engine)
        else:
            self.db_factory = db_factory

        buildsys.setup_buildsystem(config)
        self.mash_dir = mash_dir
        prefix = hub.config.get('topic_prefix')
        env = hub.config.get('environment')
        self.topic = prefix + '.' + env + '.' + hub.config.get('masher_topic')
        self.valid_signer = hub.config.get('releng_fedmsg_certname')
        if not self.valid_signer:
            log.warn('No releng_fedmsg_certname defined'
                     'Cert validation disabled')
        super(Masher, self).__init__(hub, *args, **kw)
        log.info('Bodhi masher listening on topic: %s' % self.topic)
Пример #38
0
    def test_set_meets_then_met_requirements(self, publish):
        req = DummyRequest()
        req.errors = cornice.Errors()
        req.koji = buildsys.get_session()
        req.user = model.User(name='bob')

        self.obj.status = UpdateStatus.testing
        self.obj.request = None

        # Pretend it's been in testing for a week
        self.obj.comment(
            self.db, u'This update has been pushed to testing.', author=u'bodhi')
        self.obj.date_testing = self.obj.comments[-1].timestamp - timedelta(days=7)
        eq_(self.obj.days_in_testing, 7)
        eq_(self.obj.meets_testing_requirements, True)
        eq_(self.obj.met_testing_requirements, False)

        text = config.get('testing_approval_msg') % self.obj.days_in_testing
        self.obj.comment(self.db, text, author=u'bodhi')

        eq_(self.obj.meets_testing_requirements, True)
        eq_(self.obj.met_testing_requirements, True)
Пример #39
0
    def test_set_meets_then_met_requirements(self, publish):
        req = DummyRequest()
        req.errors = cornice.Errors()
        req.koji = buildsys.get_session()
        req.user = model.User(name='bob')

        self.obj.status = UpdateStatus.testing
        self.obj.request = None

        # Pretend it's been in testing for a week
        self.obj.comment(self.db,
                         u'This update has been pushed to testing.',
                         author=u'bodhi')
        self.obj.date_testing = self.obj.comments[-1].timestamp - timedelta(
            days=7)
        eq_(self.obj.days_in_testing, 7)
        eq_(self.obj.meets_testing_requirements, True)
        eq_(self.obj.met_testing_requirements, False)

        text = config.get('testing_approval_msg') % self.obj.days_in_testing
        self.obj.comment(self.db, text, author=u'bodhi')

        eq_(self.obj.meets_testing_requirements, True)
        eq_(self.obj.met_testing_requirements, True)
Пример #40
0
        if bug.product == 'Security Response':
            bug_entity.parent = True
        bug_entity.title = to_unicode(bug.short_desc)
        if isinstance(bug.keywords, basestring):
            keywords = bug.keywords.split()
        else:  # python-bugzilla 0.8.0+
            keywords = bug.keywords
        if 'security' in [keyword.lower() for keyword in keywords]:
            bug_entity.security = True

    def modified(self, bug_id):
        try:
            bug = self.bz.getbug(bug_id)
            if bug.product not in config.get('bz_products', '').split(','):
                log.info("Skipping %r bug" % bug.product)
                return
            if bug.bug_status not in ('MODIFIED', 'VERIFIED', 'CLOSED'):
                log.info('Setting bug #%d status to MODIFIED' % bug_id)
                bug.setstatus('MODIFIED')
        except:
            log.exception("Unable to alter bug #%d" % bug_id)


if config.get('bugtracker') == 'bugzilla':
    import bugzilla
    log.info('Using python-bugzilla')
    bugtracker = Bugzilla()
else:
    log.info('Using the FakeBugTracker')
    bugtracker = FakeBugTracker()
Пример #41
0
    def test_metadata_updating_with_edited_update(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ["f17-updates-testing"]

        # Generate the XML
        md = ExtendedMetadata(update.release, update.request, self.db, self.temprepo)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo()
        md.cache_repodata()

        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get("bodhi_email"))
        self.assertEquals(notice.description, update.notes)
        self.assertIsNotNone(notice.issued_date)
        self.assertEquals(notice.id, update.alias)
        # self.assertIsNone(notice.epoch)
        bug = notice.references[0]
        self.assertEquals(bug.href, update.bugs[0].url)
        self.assertEquals(bug.id, "12345")
        self.assertEquals(bug.type, "bugzilla")
        cve = notice.references[1]
        self.assertEquals(cve.type, "cve")
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        # Change the notes on the update *and* the date_modified
        update.notes = u"x"
        update.date_modified = datetime.utcnow()

        # Re-initialize our temporary repo
        shutil.rmtree(self.temprepo)
        os.mkdir(self.temprepo)
        mkmetadatadir(join(self.temprepo, "f17-updates-testing", "i386"))

        md = ExtendedMetadata(update.release, update.request, self.db, self.temprepo)
        md.insert_updateinfo()
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.description, u"x")
        self.assertEquals(
            notice.updated_date.strftime("%Y-%m-%d %H:%M:%S"), update.date_modified.strftime("%Y-%m-%d %H:%M:%S")
        )
Пример #42
0
    def test_metadata_updating_with_edited_update(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ['f17-updates-testing']

        # Generate the XML
        md = ExtendedMetadata(update.release, update.request, self.db,
                              self.temprepo)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo()
        md.cache_repodata()

        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get('bodhi_email'))
        self.assertEquals(notice.description, update.notes)
        self.assertIsNotNone(notice.issued_date)
        self.assertEquals(notice.id, update.alias)
        #self.assertIsNone(notice.epoch)
        bug = notice.references[0]
        self.assertEquals(bug.href, update.bugs[0].url)
        self.assertEquals(bug.id, '12345')
        self.assertEquals(bug.type, 'bugzilla')
        cve = notice.references[1]
        self.assertEquals(cve.type, 'cve')
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        # Change the notes on the update *and* the date_modified
        update.notes = u'x'
        update.date_modified = datetime.utcnow()

        # Re-initialize our temporary repo
        shutil.rmtree(self.temprepo)
        os.mkdir(self.temprepo)
        mkmetadatadir(join(self.temprepo, 'f17-updates-testing', 'i386'))

        md = ExtendedMetadata(update.release, update.request, self.db,
                              self.temprepo)
        md.insert_updateinfo()
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.description, u'x')
        self.assertEquals(notice.updated_date.strftime('%Y-%m-%d %H:%M:%S'),
                          update.date_modified.strftime('%Y-%m-%d %H:%M:%S'))
Пример #43
0
    def work(self):
        self.koji = buildsys.get_session()
        self.release = self.db.query(Release)\
                              .filter_by(name=self.release).one()
        self.id = getattr(self.release, '%s_tag' % self.request.value)

        # Set our thread's "name" so it shows up nicely in the logs.
        # https://docs.python.org/2/library/threading.html#thread-objects
        self.name = self.id

        # For 'pending' branched releases, we only want to perform repo-related
        # tasks for testing updates. For stable updates, we should just add the
        # dist_tag and do everything else other than mashing/updateinfo, since
        # the nightly build-branched cron job mashes for us.
        self.skip_mash = False
        if (self.release.state is ReleaseState.pending and
                self.request is UpdateRequest.stable):
            self.skip_mash = True

        self.log.info('Running MasherThread(%s)' % self.id)
        self.init_state()
        if not self.resume:
            self.init_path()

        notifications.publish(
            topic="mashtask.mashing",
            msg=dict(repo=self.id, updates=self.state['updates']),
            force=True,
        )

        try:
            if self.resume:
                self.load_state()
            else:
                self.save_state()

            self.load_updates()
            self.verify_updates()

            if self.request is UpdateRequest.stable:
                self.perform_gating()

            self.determine_and_perform_tag_actions()

            self.update_security_bugs()

            self.expire_buildroot_overrides()
            self.remove_pending_tags()
            self.update_comps()

            if self.resume and self.path in self.state['completed_repos']:
                self.log.info('Skipping completed repo: %s', self.path)
                self.complete_requests()
                # We still need to generate the testing digest, since it's stored in memory
                self.generate_testing_digest()
            else:
                if not self.skip_mash:
                    mash_thread = self.mash()

                # Things we can do while we're mashing
                self.complete_requests()
                self.generate_testing_digest()

                if not self.skip_mash:
                    uinfo = self.generate_updateinfo()

                    self.wait_for_mash(mash_thread)

                    uinfo.insert_updateinfo()
                    uinfo.insert_pkgtags()
                    uinfo.cache_repodata()

            # Compose OSTrees from our freshly mashed repos
            if config.get('compose_atomic_trees'):
                self.compose_atomic_trees()

            if not self.skip_mash:
                self.sanity_check_repo()
                self.stage_repo()

                # Wait for the repo to hit the master mirror
                self.wait_for_sync()

            # Send fedmsg notifications
            self.send_notifications()

            # Update bugzillas
            self.modify_bugs()

            # Add comments to updates
            self.status_comments()

            # Announce stable updates to the mailing list
            self.send_stable_announcements()

            # Email updates-testing digest
            self.send_testing_digest()

            self.success = True
            self.remove_state()
            self.unlock_updates()
            self.check_all_karma_thresholds()
        except:
            self.log.exception('Exception in MasherThread(%s)' % self.id)
            self.save_state()
            raise
        finally:
            self.finish(self.success)
Пример #44
0
    def add_update(self, update):
        """Generate the extended metadata for a given update"""
        rec = cr.UpdateRecord()
        rec.version = __version__
        rec.fromstr = config.get('bodhi_email')
        rec.status = update.status.value
        rec.type = update.type.value
        rec.id = to_bytes(update.alias)
        rec.title = to_bytes(update.title)
        rec.summary = to_bytes('%s %s update' % (update.get_title(),
                                                 update.type.value))
        rec.description = to_bytes(update.notes)
        rec.release = to_bytes(update.release.long_name)
        rec.rights = config.get('updateinfo_rights')

        if update.date_pushed:
            rec.issued_date = update.date_pushed
        if update.date_modified:
            rec.updated_date = update.date_modified

        col = cr.UpdateCollection()
        col.name = to_bytes(update.release.long_name)
        col.shortname = to_bytes(update.release.name)

        for build in update.builds:
            try:
                kojiBuild = self.builds[build.nvr]
            except:
                kojiBuild = self.koji.getBuild(build.nvr)

            rpms = self.koji.listBuildRPMs(kojiBuild['id'])
            for rpm in rpms:
                pkg = cr.UpdateCollectionPackage()
                pkg.name = rpm['name']
                pkg.version = rpm['version']
                pkg.release = rpm['release']
                if rpm['epoch'] is not None:
                    pkg.epoch = str(rpm['epoch'])
                else:
                    pkg.epoch = '0'
                pkg.arch = rpm['arch']

                # TODO: how do we handle UpdateSuggestion.logout, etc?
                pkg.reboot_suggested = update.suggest is UpdateSuggestion.reboot

                filename = '%s.%s.rpm' % (rpm['nvr'], rpm['arch'])
                pkg.filename = filename

                # Build the URL
                if rpm['arch'] == 'src':
                    arch = 'SRPMS'
                elif rpm['arch'] in ('noarch', 'i686'):
                    arch = 'i386'
                else:
                    arch = rpm['arch']

                pkg.src = os.path.join(config.get('file_url'), update.status is
                        UpdateStatus.testing and 'testing' or '',
                        str(update.release.version), arch, filename[0], filename)

                col.append(pkg)

        rec.append_collection(col)

        # Create references for each bug
        for bug in update.bugs:
            ref = cr.UpdateReference()
            ref.type = 'bugzilla'
            ref.id = to_bytes(bug.bug_id)
            ref.href = to_bytes(bug.url)
            ref.title = to_bytes(bug.title)
            rec.append_reference(ref)

        # Create references for each CVE
        for cve in update.cves:
            ref = cr.UpdateReference()
            ref.type = 'cve'
            ref.id = to_bytes(cve.cve_id)
            ref.href = to_bytes(cve.url)
            rec.append_reference(ref)

        self.uinfo.append(rec)
Пример #45
0
    def add_update(self, update):
        """Generate the extended metadata for a given update"""
        rec = cr.UpdateRecord()
        rec.version = __version__
        rec.fromstr = config.get('bodhi_email')
        rec.status = update.status.value
        rec.type = update.type.value
        rec.id = to_bytes(update.alias)
        rec.title = to_bytes(update.title)
        rec.summary = to_bytes('%s %s update' % (update.get_title(),
                                                 update.type.value))
        rec.description = to_bytes(update.notes)
        rec.release = to_bytes(update.release.long_name)
        rec.rights = config.get('updateinfo_rights')

        if update.date_pushed:
            log.warning('No date_pushed set for %s' % update.title)
            rec.issued_date = update.date_pushed
        if update.date_modified:
            rec.updated_date = update.date_modified

        col = cr.UpdateCollection()
        col.name = to_bytes(update.release.long_name)
        col.shortname = to_bytes(update.release.name)

        for build in update.builds:
            try:
                kojiBuild = self.builds[build.nvr]
            except:
                kojiBuild = self.koji.getBuild(build.nvr)

            rpms = self.koji.listBuildRPMs(kojiBuild['id'])
            for rpm in rpms:
                pkg = cr.UpdateCollectionPackage()
                pkg.name = rpm['name']
                pkg.version = rpm['version']
                pkg.release = rpm['release']
                if rpm['epoch'] is not None:
                    pkg.epoch = str(rpm['epoch'])
                else:
                    pkg.epoch = '0'
                pkg.arch = rpm['arch']

                # TODO: how do we handle UpdateSuggestion.logout, etc?
                pkg.reboot_suggested = update.suggest is UpdateSuggestion.reboot

                filename = '%s.%s.rpm' % (rpm['nvr'], rpm['arch'])
                pkg.filename = filename

                # Build the URL
                if rpm['arch'] == 'src':
                    arch = 'SRPMS'
                elif rpm['arch'] in ('noarch', 'i686'):
                    arch = 'i386'
                else:
                    arch = rpm['arch']

                pkg.src = os.path.join(config.get('file_url'), update.status is
                        UpdateStatus.testing and 'testing' or '',
                        str(update.release.version), arch, filename[0], filename)

                col.append(pkg)

        rec.append_collection(col)

        # Create references for each bug
        for bug in update.bugs:
            ref = cr.UpdateReference()
            ref.type = 'bugzilla'
            ref.id = to_bytes(bug.bug_id)
            ref.href = to_bytes(bug.url)
            ref.title = to_bytes(bug.title)
            rec.append_reference(ref)

        # Create references for each CVE
        for cve in update.cves:
            ref = cr.UpdateReference()
            ref.type = 'cve'
            ref.id = to_bytes(cve.cve_id)
            ref.href = to_bytes(cve.url)
            rec.append_reference(ref)

        self.uinfo.append(rec)
Пример #46
0
        if bug.product == 'Security Response':
            bug_entity.parent = True
        bug_entity.title = to_unicode(bug.short_desc)
        if isinstance(bug.keywords, basestring):
            keywords = bug.keywords.split()
        else:  # python-bugzilla 0.8.0+
            keywords = bug.keywords
        if 'security' in [keyword.lower() for keyword in keywords]:
            bug_entity.security = True

    def modified(self, bug_id):
        try:
            bug = self.bz.getbug(bug_id)
            if bug.product not in config.get('bz_products', '').split(','):
                log.info("Skipping %r bug" % bug.product)
                return
            if bug.bug_status not in ('MODIFIED', 'VERIFIED', 'CLOSED'):
                log.info('Setting bug #%d status to MODIFIED' % bug_id)
                bug.setstatus('MODIFIED')
        except:
            log.exception("Unable to alter bug #%d" % bug_id)


if config.get('bugtracker') == 'bugzilla':
    import bugzilla
    log.info('Using python-bugzilla')
    bugtracker = Bugzilla()
else:
    log.info('Using the FakeBugTracker')
    bugtracker = FakeBugTracker()
Пример #47
0
class TestUpdate(ModelTest):
    """Unit test case for the ``Update`` model."""
    klass = model.Update
    attrs = dict(
        title=u'TurboGears-1.0.8-3.fc11',
        type=UpdateType.security,
        status=UpdateStatus.pending,
        request=UpdateRequest.testing,
        severity=UpdateSeverity.medium,
        suggest=UpdateSuggestion.reboot,
        stable_karma=3,
        unstable_karma=-3,
        close_bugs=True,
        notes=u'foobar',
        karma=0,
    )

    def do_get_dependencies(self):
        release = model.Release(**TestRelease.attrs)
        return dict(builds=[
            model.Build(nvr=u'TurboGears-1.0.8-3.fc11',
                        package=model.Package(**TestPackage.attrs),
                        release=release)
        ],
                    bugs=[model.Bug(bug_id=1),
                          model.Bug(bug_id=2)],
                    cves=[model.CVE(cve_id=u'CVE-2009-0001')],
                    release=release,
                    user=model.User(name=u'lmacken'))

    def get_update(self, name=u'TurboGears-1.0.8-3.fc11'):
        attrs = self.attrs.copy()
        pkg = self.db.query(model.Package) \
                .filter_by(name=u'TurboGears').one()
        rel = self.db.query(model.Release) \
                .filter_by(name=u'F11').one()
        attrs.update(
            dict(
                builds=[model.Build(nvr=name, package=pkg, release=rel)],
                release=rel,
            ))
        return self.klass(**attrs)

    def test_builds(self):
        eq_(len(self.obj.builds), 1)
        eq_(self.obj.builds[0].nvr, u'TurboGears-1.0.8-3.fc11')
        eq_(self.obj.builds[0].release.name, u'F11')
        eq_(self.obj.builds[0].package.name, u'TurboGears')

    def test_unpush_build(self):
        eq_(len(self.obj.builds), 1)
        b = self.obj.builds[0]
        release = self.obj.release
        koji = buildsys.get_session()
        koji.clear()
        koji.__tagged__[b.nvr] = [
            release.testing_tag, release.pending_testing_tag
        ]
        self.obj.builds[0].unpush(koji)
        eq_(koji.__moved__,
            [(u'dist-f11-updates-testing', u'dist-f11-updates-candidate',
              u'TurboGears-1.0.8-3.fc11')])
        eq_(koji.__untag__, [
            (u'dist-f11-updates-testing-pending', u'TurboGears-1.0.8-3.fc11')
        ])

    def test_title(self):
        eq_(self.obj.title, u'TurboGears-1.0.8-3.fc11')

    def test_pkg_str(self):
        """ Ensure str(pkg) is correct """
        eq_(
            str(self.obj.builds[0].package),
            '================================================================================\n     TurboGears\n================================================================================\n\n Pending Updates (1)\n    o TurboGears-1.0.8-3.fc11\n'
        )

    def test_bugstring(self):
        eq_(self.obj.get_bugstring(), u'1 2')

    def test_cvestring(self):
        eq_(self.obj.get_cvestring(), u'CVE-2009-0001')

    def test_assign_alias(self):
        update = self.obj
        with mock.patch(target='uuid.uuid4', return_value='wat'):
            update.assign_alias()
        year = time.localtime()[0]
        idx = 'a3bbe1a8f2'
        eq_(update.alias, u'%s-%s-%s' % (update.release.id_prefix, year, idx))

        update = self.get_update(name=u'TurboGears-0.4.4-8.fc11')
        with mock.patch(target='uuid.uuid4', return_value='wat2'):
            update.assign_alias()
        idx = '016462d41f'
        eq_(update.alias, u'%s-%s-%s' % (update.release.id_prefix, year, idx))

        ## Create another update for another release that has the same
        ## Release.id_prefix.  This used to trigger a bug that would cause
        ## duplicate IDs across Fedora 10/11 updates.
        update = self.get_update(name=u'nethack-3.4.5-1.fc10')
        otherrel = model.Release(
            name=u'fc10',
            long_name=u'Fedora 10',
            id_prefix=u'FEDORA',
            dist_tag=u'dist-fc10',
            stable_tag=u'dist-fc10-updates',
            testing_tag=u'dist-fc10-updates-testing',
            candidate_tag=u'dist-fc10-updates-candidate',
            pending_testing_tag=u'dist-fc10-updates-testing-pending',
            pending_stable_tag=u'dist-fc10-updates-pending',
            override_tag=u'dist-fc10-override',
            branch=u'fc10',
            version=u'10')
        update.release = otherrel
        with mock.patch(target='uuid.uuid4', return_value='wat3'):
            update.assign_alias()
        idx = '0efffa96f7'
        eq_(update.alias, u'%s-%s-%s' % (update.release.id_prefix, year, idx))

        newest = self.get_update(name=u'nethack-2.5.8-1.fc10')
        with mock.patch(target='uuid.uuid4', return_value='wat4'):
            newest.assign_alias()
        idx = '0efffa96f7'
        eq_(update.alias, u'%s-%s-%s' % (update.release.id_prefix, year, idx))

    def test_epel_id(self):
        """ Make sure we can handle id_prefixes that contain dashes.
        eg: FEDORA-EPEL
        """
        # Create a normal Fedora update first
        update = self.obj
        with mock.patch(target='uuid.uuid4', return_value='wat'):
            update.assign_alias()
        idx = 'a3bbe1a8f2'
        eq_(update.alias, u'FEDORA-%s-%s' % (time.localtime()[0], idx))

        update = self.get_update(name=u'TurboGears-2.1-1.el5')
        release = model.Release(
            name=u'EL-5',
            long_name=u'Fedora EPEL 5',
            id_prefix=u'FEDORA-EPEL',
            dist_tag=u'dist-5E-epel',
            stable_tag=u'dist-5E-epel',
            testing_tag=u'dist-5E-epel-testing',
            candidate_tag=u'dist-5E-epel-testing-candidate',
            pending_testing_tag=u'dist-5E-epel-testing-pending',
            pending_stable_tag=u'dist-5E-epel-pending',
            override_tag=u'dist-5E-epel-override',
            branch=u'el5',
            version=u'5')
        update.release = release
        idx = 'a3bbe1a8f2'
        with mock.patch(target='uuid.uuid4', return_value='wat'):
            update.assign_alias()
        eq_(update.alias, u'FEDORA-EPEL-%s-%s' % (time.localtime()[0], idx))

        update = self.get_update(name=u'TurboGears-2.2-1.el5')
        update.release = release
        idx = '016462d41f'
        with mock.patch(target='uuid.uuid4', return_value='wat2'):
            update.assign_alias()
        eq_(update.alias,
            u'%s-%s-%s' % (release.id_prefix, time.localtime()[0], idx))

    @raises(IntegrityError)
    def test_dupe(self):
        self.get_update()
        self.get_update()

    @mock.patch('bodhi.notifications.publish')
    def test_stable_karma(self, publish):
        update = self.obj
        update.request = None
        update.status = UpdateStatus.testing
        eq_(update.karma, 0)
        eq_(update.request, None)
        update.comment(self.db, u"foo", 1, u'foo')
        eq_(update.karma, 1)
        eq_(update.request, None)
        update.comment(self.db, u"foo", 1, u'bar')
        eq_(update.karma, 2)
        eq_(update.request, None)
        update.comment(self.db, u"foo", 1, u'biz')
        eq_(update.karma, 3)
        eq_(update.request, UpdateRequest.stable)
        publish.assert_called_with(topic='update.comment', msg=mock.ANY)
        #publish.assert_called_with(topic='update.request.stable', msg=mock.ANY)

    @mock.patch('bodhi.notifications.publish')
    def test_unstable_karma(self, publish):
        update = self.obj
        update.status = UpdateStatus.testing
        eq_(update.karma, 0)
        eq_(update.status, UpdateStatus.testing)
        update.comment(self.db, u"foo", -1, u'foo')
        eq_(update.status, UpdateStatus.testing)
        eq_(update.karma, -1)
        update.comment(self.db, u"bar", -1, u'bar')
        eq_(update.status, UpdateStatus.testing)
        eq_(update.karma, -2)
        update.comment(self.db, u"biz", -1, u'biz')
        eq_(update.karma, -3)
        eq_(update.status, UpdateStatus.obsolete)
        publish.assert_called_with(topic='update.comment', msg=mock.ANY)

    def test_update_bugs(self):
        update = self.obj
        eq_(len(update.bugs), 2)
        session = self.db

        # try just adding bugs
        bugs = ['1234']
        update.update_bugs(bugs, session)
        eq_(len(update.bugs), 1)
        eq_(update.bugs[0].bug_id, 1234)

        # try just removing
        bugs = []
        update.update_bugs(bugs, session)
        eq_(len(update.bugs), 0)
        eq_(self.db.query(model.Bug).filter_by(bug_id=1234).first(), None)

        # Test new duplicate bugs
        bugs = ['1234', '1234']
        update.update_bugs(bugs, session)
        assert len(update.bugs) == 1

        # Try adding a new bug, and removing the rest
        bugs = ['4321']
        update.update_bugs(bugs, session)
        assert len(update.bugs) == 1
        assert update.bugs[0].bug_id == 4321
        eq_(self.db.query(model.Bug).filter_by(bug_id=1234).first(), None)

        # Try removing a bug when it already has BugKarma
        karma = BugKarma(bug_id=4321, karma=1)
        self.db.add(karma)
        self.db.flush()
        bugs = ['5678']
        update.update_bugs(bugs, session)
        assert len(update.bugs) == 1
        assert update.bugs[0].bug_id == 5678
        eq_(self.db.query(model.Bug).filter_by(bug_id=4321).count(), 1)

    def test_unicode_bug_title(self):
        bug = self.obj.bugs[0]
        bug.title = u'foo\xe9bar'
        from bodhi.util import bug_link
        link = bug_link(None, bug)
        eq_(
            link,
            u"<a target='_blank' href='https://bugzilla.redhat.com/show_bug.cgi?id=1'>#1</a> foo\xe9bar"
        )

    def test_set_request_untested_stable(self):
        """
        Ensure that we can't submit an update for stable if it hasn't met the
        minimum testing requirements.
        """
        req = DummyRequest(user=DummyUser())
        req.errors = cornice.Errors()
        req.koji = buildsys.get_session()
        eq_(self.obj.status, UpdateStatus.pending)
        try:
            self.obj.set_request(self.db, UpdateRequest.stable, req.user.name)
            assert False
        except BodhiException, e:
            pass
        eq_(self.obj.request, UpdateRequest.testing)
        eq_(self.obj.status, UpdateStatus.pending)
        eq_(e.message, config.get('not_yet_tested_msg'))
Пример #48
0
    def test_extended_metadata_updating(self):
        update = self.db.query(Update).one()

        # Pretend it's pushed to testing
        update.status = UpdateStatus.testing
        update.request = None
        update.date_pushed = datetime.utcnow()
        DevBuildsys.__tagged__[update.title] = ['f17-updates-testing']

        # Generate the XML
        md = ExtendedMetadata(update.release, update.request, self.db, self.temprepo)

        # Insert the updateinfo.xml into the repository
        md.insert_updateinfo()
        md.cache_repodata()

        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.title, update.title)
        self.assertEquals(notice.release, update.release.long_name)
        self.assertEquals(notice.status, update.status.value)
        self.assertEquals(notice.updated_date, update.date_modified)
        self.assertEquals(notice.fromstr, config.get('bodhi_email'))
        self.assertEquals(notice.description, update.notes)
        #self.assertIsNotNone(notice.issued_date)
        self.assertEquals(notice.id, update.alias)
        #self.assertIsNone(notice.epoch)
        bug = notice.references[0]
        url = update.bugs[0].url
        self.assertEquals(bug.href, url)
        self.assertEquals(bug.id, '12345')
        self.assertEquals(bug.type, 'bugzilla')
        cve = notice.references[1]
        self.assertEquals(cve.type, 'cve')
        self.assertEquals(cve.href, update.cves[0].url)
        self.assertEquals(cve.id, update.cves[0].cve_id)

        # Change the notes on the update, but not the date_modified, so we can
        # ensure that the notice came from the cache
        update.notes = u'x'

        # Re-initialize our temporary repo
        shutil.rmtree(self.temprepo)
        os.mkdir(self.temprepo)
        mkmetadatadir(join(self.temprepo, 'f17-updates-testing', 'i386'))

        md = ExtendedMetadata(update.release, update.request, self.db, self.temprepo)
        md.insert_updateinfo()
        updateinfo = self._verify_updateinfo(self.repodata)

        # Read an verify the updateinfo.xml.gz
        uinfo = createrepo_c.UpdateInfo(updateinfo)
        notice = self.get_notice(uinfo, update.title)

        self.assertIsNotNone(notice)
        self.assertEquals(notice.description, u'Useful details!')  # not u'x'
Пример #49
0
 def __init__(self, *args, **kw):
     super(TestExtendedMetadata, self).__init__(*args, **kw)
     repo_path = os.path.join(config.get('mash_dir'), 'f17-updates-testing')
     if not os.path.exists(repo_path):
         os.makedirs(repo_path)
Пример #50
0
 def __init__(self, *args, **kw):
     super(TestExtendedMetadata, self).__init__(*args, **kw)
     repo_path = os.path.join(config.get("mash_dir"), "f17-updates-testing")
     if not os.path.exists(repo_path):
         os.makedirs(repo_path)
Пример #51
0
    def work(self):
        self.koji = buildsys.get_session()
        self.release = self.db.query(Release)\
                              .filter_by(name=self.release).one()
        self.id = getattr(self.release, '%s_tag' % self.request.value)

        # Set our thread's "name" so it shows up nicely in the logs.
        # https://docs.python.org/2/library/threading.html#thread-objects
        self.name = self.id

        # For 'pending' branched releases, we only want to perform repo-related
        # tasks for testing updates. For stable updates, we should just add the
        # dist_tag and do everything else other than mashing/updateinfo, since
        # the nightly build-branched cron job mashes for us.
        self.skip_mash = False
        if (self.release.state is ReleaseState.pending
                and self.request is UpdateRequest.stable):
            self.skip_mash = True

        self.log.info('Running MasherThread(%s)' % self.id)
        self.init_state()
        if not self.resume:
            self.init_path()

        notifications.publish(
            topic="mashtask.mashing",
            msg=dict(repo=self.id, updates=self.state['updates']),
            force=True,
        )

        try:
            if self.resume:
                self.load_state()
            else:
                self.save_state()

            self.load_updates()
            self.verify_updates()

            if self.request is UpdateRequest.stable:
                self.perform_gating()

            self.determine_and_perform_tag_actions()

            self.update_security_bugs()

            self.expire_buildroot_overrides()
            self.remove_pending_tags()
            self.update_comps()

            if self.resume and self.path in self.state['completed_repos']:
                self.log.info('Skipping completed repo: %s', self.path)
                self.complete_requests()
                # We still need to generate the testing digest, since it's stored in memory
                self.generate_testing_digest()
            else:
                if not self.skip_mash:
                    mash_thread = self.mash()

                # Things we can do while we're mashing
                self.complete_requests()
                self.generate_testing_digest()

                if not self.skip_mash:
                    uinfo = self.generate_updateinfo()

                    self.wait_for_mash(mash_thread)

                    uinfo.insert_updateinfo()
                    uinfo.insert_pkgtags()
                    uinfo.cache_repodata()

            # Compose OSTrees from our freshly mashed repos
            if config.get('compose_atomic_trees'):
                self.compose_atomic_trees()

            if not self.skip_mash:
                self.sanity_check_repo()
                self.stage_repo()

                # Wait for the repo to hit the master mirror
                self.wait_for_sync()

            # Send fedmsg notifications
            self.send_notifications()

            # Update bugzillas
            self.modify_bugs()

            # Add comments to updates
            self.status_comments()

            # Announce stable updates to the mailing list
            self.send_stable_announcements()

            # Email updates-testing digest
            self.send_testing_digest()

            self.success = True
            self.remove_state()
            self.unlock_updates()
            self.check_all_karma_thresholds()
        except:
            self.log.exception('Exception in MasherThread(%s)' % self.id)
            self.save_state()
            raise
        finally:
            self.finish(self.success)
Пример #52
0
 def test_config(self):
     assert config.get('sqlalchemy.url'), config
     assert config['sqlalchemy.url'], config
Пример #53
0
 def test_config(self):
     assert config.get('sqlalchemy.url'), config
     assert config['sqlalchemy.url'], config
Пример #54
0
 def test_config(self):
     assert config.get("sqlalchemy.url"), config
     assert config["sqlalchemy.url"], config