Ejemplo n.º 1
0
    def _lock(self):
        """ Write out what updates we are pushing and any successfully mashed
        repositories to our MASHING lock """
        mashed_dir = config.get("mashed_dir")
        mash_stage = config.get("mashed_stage_dir")
        mash_lock = join(mashed_dir, "MASHING-%s" % self.mash_lock_id)
        if not os.path.isdir(mashed_dir):
            log.info("Creating mashed_dir %s" % mashed_dir)
            os.makedirs(mashed_dir)
        if not os.path.isdir(mash_stage):
            log.info("Creating mashed_stage_dir %s" % mash_stage)
            os.makedirs(mash_stage)
        if os.path.exists(mash_lock):
            if self.resume:
                log.debug("Resuming previous push!")
                lock = file(mash_lock, "r")
                masher_state = pickle.load(lock)
                lock.close()

                # For backwards compatability, we need to make sure we handle
                # masher state that is just a list of updates, as well as a
                # dictionary of updates and successfully mashed repos
                if isinstance(masher_state, list):
                    for up in masher_state:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)

                # { 'updates' : [PackageUpdate.title,],
                #   'repos'   : ['/path_to_completed_repo',] }
                elif isinstance(masher_state, dict):
                    for up in masher_state["updates"]:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)
                    for repo in masher_state["composed_repos"]:
                        self.composed_repos.append(repo)
                else:
                    log.error("Unknown masher lock format: %s" % masher_state)
                    raise MashTaskException
            else:
                log.error("Previous mash not complete!  Either resume the last " "push, or remove %s" % mash_lock)
                raise MashTaskException
        else:
            if self.resume:
                msg = "Trying to resume a push, yet %s doesn't exist!" % mash_lock
                log.error(msg)
                raise MashTaskException(msg)

            log.debug("Creating lock for updates push: %s" % mash_lock)
            lock = file(mash_lock, "w")
            pickle.dump(
                {"updates": [update.title for update in self.updates], "composed_repos": self.composed_repos}, lock
            )
            lock.close()
Ejemplo n.º 2
0
 def test_delete(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     data = bodhi.delete(update=self.build)
     try:
         PackageUpdate.byTitle(self.build)
         assert False, "Update not deleted properly"
     except SQLObjectNotFound:
         pass
Ejemplo n.º 3
0
 def test_delete(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     data = bodhi.delete(update=self.build)
     try:
         PackageUpdate.byTitle(self.build)
         assert False, "Update not deleted properly"
     except SQLObjectNotFound:
         pass
Ejemplo n.º 4
0
 def test_request(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.request == 'testing'
     opts.request = 'testing'
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.request == 'testing'
Ejemplo n.º 5
0
 def test_request(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.request == 'testing'
     opts.request = 'testing'
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.request == 'testing'
Ejemplo n.º 6
0
 def test_comment(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     bodhi.comment(update=self.build, comment=opts.comment, karma=opts.karma)
     update = PackageUpdate.byTitle(self.build)
     assert len(update.comments) == 2, update.comments
     assert update.comments[1].text == opts.comment
     assert update.karma == 0  # Submitter cannot alter karma
     #assert update.karma == int(opts.karma), update.karma
     bodhi.comment(update=self.build, comment=opts.comment, karma=1)
     update = PackageUpdate.byTitle(self.build)
     assert len(update.comments) == 3, update.comments
Ejemplo n.º 7
0
 def test_comment(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     bodhi.comment(update=self.build, comment=opts.comment, karma=opts.karma)
     update = PackageUpdate.byTitle(self.build)
     assert len(update.comments) == 2, update.comments
     assert update.comments[1].text == opts.comment
     assert update.karma == int(opts.karma)
     bodhi.comment(update=self.build, comment=opts.comment, karma=1)
     update = PackageUpdate.byTitle(self.build)
     assert len(update.comments) == 3, update.comments
     assert update.karma == int(opts.karma) + 2
Ejemplo n.º 8
0
    def test_file_input(self):
        bodhi = self.__get_bodhi_client()
        opts = self.__get_opts()

        out = file(opts.input_file, 'w')
        out.write('''[%s]
type=enhancement
request=testing
bugs=123,456
notes=bar
autokarma=True
stable_karma=10
unstable_karma=-10
close_bugs=True
''' % self.build)
        out.close()

        updates = bodhi.parse_file(input_file=opts.input_file)
        for update_args in updates:
            bodhi.save(**update_args)

        update = PackageUpdate.byTitle(self.build)
        assert update.type == 'enhancement'
        assert update.request == 'testing'
        assert update.notes == 'bar', repr(update.notes)
        for bug in (123, 456):
            bz = Bugzilla.byBz_id(bug)
            assert bz in update.bugs

        os.unlink(opts.input_file)
Ejemplo n.º 9
0
    def test_file_input(self):
        bodhi = self.__get_bodhi_client()
        opts = self.__get_opts()

        out = file(opts.input_file, 'w')
        out.write('''[%s]
type=enhancement
request=testing
bugs=123,456
notes=bar
autokarma=True
stable_karma=10
unstable_karma=-10
close_bugs=True
''' % self.build)
        out.close()

        updates = bodhi.parse_file(input_file=opts.input_file)
        for update_args in updates:
            bodhi.save(**update_args)

        update = PackageUpdate.byTitle(self.build)
        assert update.type == 'enhancement'
        assert update.request == 'testing'
        assert update.notes == 'bar', repr(update.notes)
        for bug in (123, 456):
            bz = Bugzilla.byBz_id(bug)
            assert bz in update.bugs

        os.unlink(opts.input_file)
Ejemplo n.º 10
0
 def test_mine(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     data = bodhi.query(mine=True)
     assert data['title'] == u"1 update found", repr(data)
     assert len(data['updates']) == 1
Ejemplo n.º 11
0
 def test_mine(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     assert PackageUpdate.byTitle(self.build)
     data = bodhi.query(mine=True)
     assert data['title'] == u"1 update found", repr(data)
     assert len(data['updates']) == 1
Ejemplo n.º 12
0
 def test_unpush(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     opts.request = 'unpush'
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.status == 'pending'
Ejemplo n.º 13
0
 def test_unpush(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     opts.request = 'unpush'
     bodhi.request(update=self.build, request=opts.request)
     update = PackageUpdate.byTitle(self.build)
     assert update.status == 'pending'
Ejemplo n.º 14
0
 def test_new_update(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     update = PackageUpdate.byTitle(self.build)
     assert update and update.title == self.build
     assert update.release.name == opts.release.upper()
     assert update.type == opts.type_
     assert update.notes == opts.notes
     for bug in opts.bugs.split(','):
         bz = Bugzilla.byBz_id(int(bug))
         assert bz in update.bugs
Ejemplo n.º 15
0
 def test_new_update(self):
     bodhi = self.__get_bodhi_client()
     opts = self.__get_opts()
     self.__save_update(self.build, opts, bodhi)
     update = PackageUpdate.byTitle(self.build)
     assert update and update.title == self.build
     assert update.release.name == opts.release.upper()
     assert update.type == opts.type_
     assert update.notes == opts.notes
     for bug in opts.bugs.split(','):
         bz = Bugzilla.byBz_id(int(bug))
         assert bz in update.bugs
Ejemplo n.º 16
0
 def test_encoding(self, buildnvr='yum-3.2.1-1.fc7'):
     update = PackageUpdate(title=buildnvr,
                            release=get_rel(),
                            submitter=u'Foo \xc3\xa9 Bar <*****@*****.**>',
                            notes=u'Testing \u2019t stuff',
                            type='security')
     assert update
     assert update.notes == u'Testing \u2019t stuff'
     assert update.submitter == u'Foo \xc3\xa9 Bar <*****@*****.**>'
     build = get_build(buildnvr)
     update.addPackageBuild(build)
     update = PackageUpdate.byTitle(buildnvr)
     assert update.builds[0].updates[0] == update
     return update
Ejemplo n.º 17
0
 def test_encoding(self, buildnvr='yum-3.2.1-1.fc7'):
     update = PackageUpdate(title=buildnvr,
                            release=get_rel(),
                            submitter=u'Foo \xc3\xa9 Bar <*****@*****.**>',
                            notes=u'Testing \u2019t stuff',
                            type='security')
     assert update
     assert update.notes == u'Testing \u2019t stuff'
     assert update.submitter == u'Foo \xc3\xa9 Bar <*****@*****.**>'
     build = get_build(buildnvr)
     update.addPackageBuild(build)
     update = PackageUpdate.byTitle(buildnvr)
     assert update.builds[0].updates[0] == update
     return update
Ejemplo n.º 18
0
    def mash(self, updates=None, resume=False, **kw):
        """ Mash a list of PackageUpdate objects.

        If this instance is deployed with a remote masher, then it simply
        proxies the request.  If we are the masher, then send these updates to
        our Mash instance.  This will then start a thread that takes care of
        handling all of the update requests, composing fresh repositories,
        generating and sending update notices, closing bugs, etc.
        """
        if not updates:
            updates = []
        if not isinstance(updates, list):
            if isinstance(updates, basestring):
                log.debug("Doing json hack")
                try:
                    updates = json.loads(
                        updates.replace("u'", "\"").replace("'", "\""))
                except:
                    log.debug("Didn't work, assuming it's a single update...")
                    updates = [updates]
            else:
                updates = [updates]

        # If we're not The Masher, then proxy this request to it
        if config.get('masher'):
            data = self._masher_request(
                '/admin/mash', updates=updates, resume=resume) or {}
            flash_log('Push request %s' %
                      (data.get('success') and 'succeeded' or 'failed'))
            raise redirect('/admin/masher')

        from bodhi.masher import masher
        masher.queue([PackageUpdate.byTitle(title) for title in updates],
                     resume=resume)
        if request_format() == 'json':
            return dict(success=True)
        flash("Updates queued for mashing")
        raise redirect('/admin/masher')
Ejemplo n.º 19
0
    def push(self):
        """ List updates tagged with a push/unpush/move request """
        updates = []
        resume = False
        mash = self._current_mash()
        if not mash:
            flash_log("A masher exception has occured.")
            return dict(updates=[], resume=False)
        if mash['mashing']:
            flash_log('The masher is currently pushing updates')
        else:
            for update in mash.get('updates', []):
                try:
                    updates.append(PackageUpdate.byTitle(update))
                except SQLObjectNotFound:
                    log.warning("Cannot find update %s in push queue" % update)
            if updates:
                flash_log('There is an updates push ready to be resumed')
                resume = True
            else:
                # Get a list of all updates with a request that aren't
                # unapproved security updates, or for a locked release
                requests = PackageUpdate.select(
                    PackageUpdate.q.request != None)

                # Come F13+, bodhi will not have locked releases.  It will
                # implement the 'No Frozen Rawhide' proposal, and treat 'locked'
                # releases as pending.
                #requests = filter(lambda update: not update.release.locked,
                #                  PackageUpdate.select(
                #                      PackageUpdate.q.request != None))
                for update in requests:
                    # Disable security approval requirement
                    #if update.type == 'security' and not update.approved:
                    #    continue
                    updates.append(update)
        return dict(updates=updates, resume=resume)
Ejemplo n.º 20
0
    def push(self):
        """ List updates tagged with a push/unpush/move request """
        updates = []
        resume = False
        mash = self._current_mash()
        if not mash:
            flash_log("A masher exception has occured.")
            return dict(updates=[], resume=False)
        if mash['mashing']:
            flash_log('The masher is currently pushing updates')
        else:
            for update in mash.get('updates', []):
                try:
                    updates.append(PackageUpdate.byTitle(update))
                except SQLObjectNotFound:
                    log.warning("Cannot find update %s in push queue" % update)
            if updates:
                flash_log('There is an updates push ready to be resumed')
                resume = True
            else:
                # Get a list of all updates with a request that aren't
                # unapproved security updates, or for a locked release
                requests = PackageUpdate.select(PackageUpdate.q.request != None)

                # Come F13+, bodhi will not have locked releases.  It will
                # implement the 'No Frozen Rawhide' proposal, and treat 'locked'
                # releases as pending.
                #requests = filter(lambda update: not update.release.locked,
                #                  PackageUpdate.select(
                #                      PackageUpdate.q.request != None))
                for update in requests:
                    # Disable security approval requirement
                    #if update.type == 'security' and not update.approved:
                    #    continue
                    updates.append(update)
        return dict(updates=updates, resume=resume)
Ejemplo n.º 21
0
    def mash(self, updates=None, resume=False, **kw):
        """ Mash a list of PackageUpdate objects.

        If this instance is deployed with a remote masher, then it simply
        proxies the request.  If we are the masher, then send these updates to
        our Mash instance.  This will then start a thread that takes care of
        handling all of the update requests, composing fresh repositories,
        generating and sending update notices, closing bugs, etc.
        """
        if not updates:
            updates = []
        if not isinstance(updates, list):
            if isinstance(updates, basestring):
                log.debug("Doing json hack")
                try:
                    updates = json.loads(updates.replace("u'", "\"").replace("'", "\""))
                except:
                    log.debug("Didn't work, assuming it's a single update...")
                    updates = [updates]
            else:
                updates = [updates]

        # If we're not The Masher, then proxy this request to it
        if config.get('masher'):
            data = self._masher_request('/admin/mash', updates=updates, resume=resume) or {}
            flash_log('Push request %s'
                      % (data.get('success') and 'succeeded' or 'failed'))
            raise redirect('/admin/masher')

        from bodhi.masher import masher
        masher.queue([PackageUpdate.byTitle(title) for title in updates],
                     resume=resume)
        if request_format() == 'json':
            return dict(success=True)
        flash("Updates queued for mashing")
        raise redirect('/admin/masher')
Ejemplo n.º 22
0
def load_db():
    print "\nLoading pickled database %s" % sys.argv[2]
    db = file(sys.argv[2], 'r')
    data = pickle.load(db)

    # Legacy format was just a list of update dictionaries
    # Now we'll pull things out into an organized dictionary:
    # {'updates': [], 'releases': []}
    if isinstance(data, dict):
        for release in data['releases']:
            try:
                Release.byName(release['name'])
            except SQLObjectNotFound:
                Release(**release)
        data = data['updates']

    progress = ProgressBar(maxValue=len(data))

    for u in data:
        try:
            release = Release.byName(u['release'][0])
        except SQLObjectNotFound:
            release = Release(name=u['release'][0],
                              long_name=u['release'][1],
                              id_prefix=u['release'][2],
                              dist_tag=u['release'][3])

        ## Backwards compatbility
        request = u['request']
        if u['request'] == 'move':
            request = 'stable'
        elif u['request'] == 'push':
            request = 'testing'
        elif u['request'] == 'unpush':
            request = 'obsolete'
        if u['approved'] in (True, False):
            u['approved'] = None
        if u.has_key('update_id'):
            u['updateid'] = u['update_id']
        if not u.has_key('date_modified'):
            u['date_modified'] = None

        try:
            update = PackageUpdate.byTitle(u['title'])
        except SQLObjectNotFound:
            update = PackageUpdate(title=u['title'],
                                   date_submitted=u['date_submitted'],
                                   date_pushed=u['date_pushed'],
                                   date_modified=u['date_modified'],
                                   release=release,
                                   submitter=u['submitter'],
                                   updateid=u['updateid'],
                                   type=u['type'],
                                   status=u['status'],
                                   pushed=u['pushed'],
                                   notes=u['notes'],
                                   karma=u['karma'],
                                   request=request,
                                   approved=u['approved'])

        ## Create Package and PackageBuild objects
        for pkg, nvr in u['builds']:
            try:
                package = Package.byName(pkg)
            except SQLObjectNotFound:
                package = Package(name=pkg)
            try:
                build = PackageBuild.byNvr(nvr)
            except SQLObjectNotFound:
                build = PackageBuild(nvr=nvr, package=package)
            update.addPackageBuild(build)

        ## Create all Bugzilla objects for this update
        for bug_num, bug_title, security, parent in u['bugs']:
            try:
                bug = Bugzilla.byBz_id(bug_num)
            except SQLObjectNotFound:
                bug = Bugzilla(bz_id=bug_num, security=security, parent=parent)
                bug.title = bug_title
            update.addBugzilla(bug)

        ## Create all CVE objects for this update
        for cve_id in u['cves']:
            try:
                cve = CVE.byCve_id(cve_id)
            except SQLObjectNotFound:
                cve = CVE(cve_id=cve_id)
            update.addCVE(cve)
        for timestamp, author, text, karma, anonymous in u['comments']:
            comment = Comment(timestamp=timestamp,
                              author=author,
                              text=text,
                              karma=karma,
                              update=update,
                              anonymous=anonymous)

        progress()
Ejemplo n.º 23
0
def main():
    load_config()
    hub.begin()

    print "Finding updates with duplicate IDs..."
    if os.path.exists('dupes.pickle'):
        out = file('dupes.pickle')
        dupes = cPickle.load(out)
        out.close()
        highest_fedora = int(file('highest_fedora').read())
        highest_epel = int(file('highest_epel').read())
    else:
        dupes = set()
        highest_fedora = 0
        highest_epel = 0

        for update in PackageUpdate.select(PackageUpdate.q.updateid!=None):
            if '-2010-' in update.updateid:
                if update.release.id_prefix == 'FEDORA':
                    if update.updateid_int > highest_fedora:
                        highest_fedora = update.updateid_int
                else:
                    if update.updateid_int > highest_epel:
                        highest_epel = update.updateid_int

            updates = PackageUpdate.select(
                    AND(PackageUpdate.q.updateid == update.updateid,
                        PackageUpdate.q.title != update.title))
            if updates.count():
                # Maybe TODO?: ensure these dupes have a date_pushed less tahn update?!
                # this way, the new ID is based on the oldest update
                for u in updates:
                    dupes.add(u.title)

        out = file('dupes.pickle', 'w')
        cPickle.dump(dupes, out)
        out.close()
        print "Wrote dupes.pickle"

        file('highest_fedora', 'w').write(str(highest_fedora))
        file('highest_epel', 'w').write(str(highest_epel))

    # verify what we really found the highest IDs
    assert PackageUpdate.select(PackageUpdate.q.updateid=='FEDORA-2010-%d' % (highest_fedora + 1)).count() == 0
    assert PackageUpdate.select(PackageUpdate.q.updateid=='FEDORA-EPEL-2010-%d' % (highest_epel + 1)).count() == 0

    # Should be 740?
    print "%d dupes" % len(dupes)

    print "Highest FEDORA ID:", highest_fedora
    print "Highest FEDORA-EPEL ID:", highest_epel

    # Reassign the update IDs on all of our dupes
    for dupe in dupes:
        up = PackageUpdate.byTitle(dupe)
        #print "%s *was* %s" % (up.title, up.updateid)
        up.updateid = None

        # TODO: save & restore this value after new id assignment?!
        #up.date_pushed = None

    # Tweak the date_pushed to on the updates with the highest IDs
    PackageUpdate.select(PackageUpdate.q.updateid=='FEDORA-2010-%d' % highest_fedora).date_pushed = datetime.now()
    PackageUpdate.select(PackageUpdate.q.updateid=='FEDORA-EPEL-2010-%d' % highest_epel).date_pushed = datetime.now()

    #hub.commit()

    for dupe in dupes:
        up = PackageUpdate.byTitle(dupe)
        up.assign_id()
        ups = PackageUpdate.select(PackageUpdate.q.updateid == up.updateid)
        if ups.count() == 1:
            print "Still a dupe!!"
            for update in ups:
                if update.title == up.title:
                    continue
                else:
                    if update.title in dupes:
                        print "%s in dupes, yet shares an updateid %s" % (
                                update.title, update.updateid)
                    else:
                        print "%s is not in dupes, but dupes %s" % (
                                update.title, updateid)

    print "Checking to ensure we have no more dupes..."
    dupes = set()
    for update in PackageUpdate.select(PackageUpdate.q.updateid != None):
        updates = PackageUpdate.select(
                AND(PackageUpdate.q.updateid == update.updateid,
                    PackageUpdate.q.title != update.title))
        if updates.count():
            dupes.add(update.title)
    print "%d dupes (should be 0)" % len(dupes)
Ejemplo n.º 24
0
 def test_bullets_in_notes(self):
     update = self.get_update(name='foo-1.2.3-4')
     update.notes = u'\xb7'
     u = PackageUpdate.byTitle('foo-1.2.3-4')
     assert u.notes == u'\xb7'
Ejemplo n.º 25
0
    def _lock(self):
        """ Write out what updates we are pushing and any successfully mashed 
        repositories to our MASHING lock """
        mashed_dir = config.get('mashed_dir')
        mash_stage = config.get('mashed_stage_dir')
        mash_lock = join(mashed_dir, 'MASHING-%s' % self.mash_lock_id)
        if not os.path.isdir(mashed_dir):
            log.info("Creating mashed_dir %s" % mashed_dir)
            os.makedirs(mashed_dir)
        if not os.path.isdir(mash_stage):
            log.info("Creating mashed_stage_dir %s" % mash_stage)
            os.makedirs(mash_stage)
        if os.path.exists(mash_lock):
            if self.resume:
                log.debug("Resuming previous push!")
                lock = file(mash_lock, 'r')
                masher_state = pickle.load(lock)
                lock.close()

                # For backwards compatability, we need to make sure we handle
                # masher state that is just a list of updates, as well as a
                # dictionary of updates and successfully mashed repos
                if isinstance(masher_state, list):
                    for up in masher_state:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)

                # { 'updates' : [PackageUpdate.title,],
                #   'repos'   : ['/path_to_completed_repo',] }
                elif isinstance(masher_state, dict):
                    for up in masher_state['updates']:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)
                    for repo in masher_state['composed_repos']:
                        self.composed_repos.append(repo)
                else:
                    log.error('Unknown masher lock format: %s' % masher_state)
                    raise MashTaskException
            else:
                log.error("Previous mash not complete!  Either resume the last "
                          "push, or remove %s" % mash_lock)
                raise MashTaskException
        else:
            if self.resume:
                msg = "Trying to resume a push, yet %s doesn't exist!" % mash_lock
                log.error(msg)
                raise MashTaskException(msg)

            log.debug("Creating lock for updates push: %s" % mash_lock)
            lock = file(mash_lock, 'w')
            pickle.dump({
                'updates': [update.title for update in self.updates],
                'composed_repos': self.composed_repos,
                }, lock)
            lock.close()
Ejemplo n.º 26
0
def main():
    load_config()
    hub.begin()

    print "Finding updates with duplicate IDs..."
    if os.path.exists('dupes.pickle'):
        out = file('dupes.pickle')
        dupes = cPickle.load(out)
        out.close()
        highest_fedora = int(file('highest_fedora').read())
        highest_epel = int(file('highest_epel').read())
    else:
        dupes = set()
        highest_fedora = 0
        highest_epel = 0

        for update in PackageUpdate.select(PackageUpdate.q.updateid != None):
            if '-2010-' in update.updateid:
                if update.release.id_prefix == 'FEDORA':
                    if update.updateid_int > highest_fedora:
                        highest_fedora = update.updateid_int
                else:
                    if update.updateid_int > highest_epel:
                        highest_epel = update.updateid_int

            updates = PackageUpdate.select(
                AND(PackageUpdate.q.updateid == update.updateid,
                    PackageUpdate.q.title != update.title))
            if updates.count():
                # Maybe TODO?: ensure these dupes have a date_pushed less tahn update?!
                # this way, the new ID is based on the oldest update
                for u in updates:
                    dupes.add(u.title)

        out = file('dupes.pickle', 'w')
        cPickle.dump(dupes, out)
        out.close()
        print "Wrote dupes.pickle"

        file('highest_fedora', 'w').write(str(highest_fedora))
        file('highest_epel', 'w').write(str(highest_epel))

    # verify what we really found the highest IDs
    assert PackageUpdate.select(PackageUpdate.q.updateid == 'FEDORA-2010-%d' %
                                (highest_fedora + 1)).count() == 0
    assert PackageUpdate.select(
        PackageUpdate.q.updateid == 'FEDORA-EPEL-2010-%d' %
        (highest_epel + 1)).count() == 0

    # Should be 740?
    print "%d dupes" % len(dupes)

    print "Highest FEDORA ID:", highest_fedora
    print "Highest FEDORA-EPEL ID:", highest_epel

    # Reassign the update IDs on all of our dupes
    for dupe in dupes:
        up = PackageUpdate.byTitle(dupe)
        #print "%s *was* %s" % (up.title, up.updateid)
        up.updateid = None

        # TODO: save & restore this value after new id assignment?!
        #up.date_pushed = None

    # Tweak the date_pushed to on the updates with the highest IDs
    PackageUpdate.select(PackageUpdate.q.updateid == 'FEDORA-2010-%d' %
                         highest_fedora).date_pushed = datetime.now()
    PackageUpdate.select(PackageUpdate.q.updateid == 'FEDORA-EPEL-2010-%d' %
                         highest_epel).date_pushed = datetime.now()

    #hub.commit()

    for dupe in dupes:
        up = PackageUpdate.byTitle(dupe)
        up.assign_id()
        ups = PackageUpdate.select(PackageUpdate.q.updateid == up.updateid)
        if ups.count() == 1:
            print "Still a dupe!!"
            for update in ups:
                if update.title == up.title:
                    continue
                else:
                    if update.title in dupes:
                        print "%s in dupes, yet shares an updateid %s" % (
                            update.title, update.updateid)
                    else:
                        print "%s is not in dupes, but dupes %s" % (
                            update.title, updateid)

    print "Checking to ensure we have no more dupes..."
    dupes = set()
    for update in PackageUpdate.select(PackageUpdate.q.updateid != None):
        updates = PackageUpdate.select(
            AND(PackageUpdate.q.updateid == update.updateid,
                PackageUpdate.q.title != update.title))
        if updates.count():
            dupes.add(update.title)
    print "%d dupes (should be 0)" % len(dupes)
Ejemplo n.º 27
0
def load_db():
    print "\nLoading pickled database %s" % sys.argv[2]
    db = file(sys.argv[2], "r")
    data = pickle.load(db)

    # Legacy format was just a list of update dictionaries
    # Now we'll pull things out into an organized dictionary:
    # {'updates': [], 'releases': []}
    if isinstance(data, dict):
        for release in data["releases"]:
            try:
                Release.byName(release["name"])
            except SQLObjectNotFound:
                Release(**release)
        data = data["updates"]

    progress = ProgressBar(maxValue=len(data))

    for u in data:
        try:
            release = Release.byName(u["release"][0])
        except SQLObjectNotFound:
            release = Release(
                name=u["release"][0], long_name=u["release"][1], id_prefix=u["release"][2], dist_tag=u["release"][3]
            )

        ## Backwards compatbility
        request = u["request"]
        if u["request"] == "move":
            request = "stable"
        elif u["request"] == "push":
            request = "testing"
        elif u["request"] == "unpush":
            request = "obsolete"
        if u["approved"] in (True, False):
            u["approved"] = None
        if u.has_key("update_id"):
            u["updateid"] = u["update_id"]
        if not u.has_key("date_modified"):
            u["date_modified"] = None

        try:
            update = PackageUpdate.byTitle(u["title"])
        except SQLObjectNotFound:
            update = PackageUpdate(
                title=u["title"],
                date_submitted=u["date_submitted"],
                date_pushed=u["date_pushed"],
                date_modified=u["date_modified"],
                release=release,
                submitter=u["submitter"],
                updateid=u["updateid"],
                type=u["type"],
                status=u["status"],
                pushed=u["pushed"],
                notes=u["notes"],
                karma=u["karma"],
                request=request,
                approved=u["approved"],
            )

        ## Create Package and PackageBuild objects
        for pkg, nvr in u["builds"]:
            try:
                package = Package.byName(pkg)
            except SQLObjectNotFound:
                package = Package(name=pkg)
            try:
                build = PackageBuild.byNvr(nvr)
            except SQLObjectNotFound:
                build = PackageBuild(nvr=nvr, package=package)
            update.addPackageBuild(build)

        ## Create all Bugzilla objects for this update
        for bug_num, bug_title, security, parent in u["bugs"]:
            try:
                bug = Bugzilla.byBz_id(bug_num)
            except SQLObjectNotFound:
                bug = Bugzilla(bz_id=bug_num, security=security, parent=parent)
                bug.title = bug_title
            update.addBugzilla(bug)

        ## Create all CVE objects for this update
        for cve_id in u["cves"]:
            try:
                cve = CVE.byCve_id(cve_id)
            except SQLObjectNotFound:
                cve = CVE(cve_id=cve_id)
            update.addCVE(cve)
        for timestamp, author, text, karma, anonymous in u["comments"]:
            comment = Comment(
                timestamp=timestamp, author=author, text=text, karma=karma, update=update, anonymous=anonymous
            )

        progress()
Ejemplo n.º 28
0
 def test_bullets_in_notes(self):
     update = self.get_update(name='foo-1.2.3-4')
     update.notes = u'\xb7'
     u = PackageUpdate.byTitle('foo-1.2.3-4')
     assert u.notes == u'\xb7'
Ejemplo n.º 29
0
def load_db():
    print "\nLoading pickled database %s" % sys.argv[2]
    db = file(sys.argv[2], 'r')
    data = pickle.load(db)

    # Load up all of the overrides
    for override in data.get('overrides', []):
        try:
            BuildRootOverride.byBuild(override['build'])
        except SQLObjectNotFound:
            BuildRootOverride(**override)

    # Legacy format was just a list of update dictionaries
    # Now we'll pull things out into an organized dictionary:
    # {'updates': [], 'releases': []}
    if isinstance(data, dict):
        for release in data['releases']:
            try:
                Release.byName(release['name'])
            except SQLObjectNotFound:
                Release(**release)
        data = data['updates']

    progress = ProgressBar(maxValue=len(data))

    for u in data:
        try:
            release = Release.byName(u['release'][0])
        except SQLObjectNotFound:
            release = Release(name=u['release'][0], long_name=u['release'][1],
                              id_prefix=u['release'][2], dist_tag=u['release'][3])

        ## Backwards compatbility
        request = u['request']
        if u['request'] == 'move':
            request = 'stable'
        elif u['request'] == 'push':
            request = 'testing'
        elif u['request'] == 'unpush':
            request = 'obsolete'
        if u['approved'] in (True, False):
            u['approved'] = None
        if 'update_id' in u:
            u['updateid'] = u['update_id']
        if not 'date_modified' in u:
            u['date_modified'] = None

        try:
            update = PackageUpdate.byTitle(u['title'])
        except SQLObjectNotFound:
            update = PackageUpdate(title=u['title'],
                                   date_submitted=u['date_submitted'],
                                   date_pushed=u['date_pushed'],
                                   date_modified=u['date_modified'],
                                   release=release,
                                   submitter=u['submitter'],
                                   updateid=u['updateid'],
                                   type=u['type'],
                                   status=u['status'],
                                   pushed=u['pushed'],
                                   notes=u['notes'],
                                   karma=u['karma'],
                                   request=request,
                                   approved=u['approved'])

        ## Create Package and PackageBuild objects
        for pkg, nvr in u['builds']:
            try:
                package = Package.byName(pkg)
            except SQLObjectNotFound:
                package = Package(name=pkg)
            try:
                build = PackageBuild.byNvr(nvr)
            except SQLObjectNotFound:
                build = PackageBuild(nvr=nvr, package=package)
            update.addPackageBuild(build)

        ## Create all Bugzilla objects for this update
        for bug_num, bug_title, security, parent in u['bugs']:
            try:
                bug = Bugzilla.byBz_id(bug_num)
            except SQLObjectNotFound:
                bug = Bugzilla(bz_id=bug_num, security=security, parent=parent)
                bug.title = bug_title
            update.addBugzilla(bug)

        ## Create all CVE objects for this update
        for cve_id in u['cves']:
            try:
                cve = CVE.byCve_id(cve_id)
            except SQLObjectNotFound:
                cve = CVE(cve_id=cve_id)
            update.addCVE(cve)
        for timestamp, author, text, karma, anonymous in u['comments']:
            comment = Comment(timestamp=timestamp, author=author, text=text,
                              karma=karma, update=update, anonymous=anonymous)

        progress()