def createUserBMPsAndRecordQueries(self, number_of_bmps):
     # Create {number_of_bmps} branch merge proposals related to a
     # user, render the person's +activereviews page, and return the
     # view and a recorder of the queries generated by this page
     # rendering.
     user = self.factory.makePerson()
     for i in xrange(number_of_bmps):
         # Create one of the two types of BMP which will be displayed
         # on a person's +activereviews page:
         # - A BMP for which the person is the reviewer.
         # - A BMP for which the person is the owner of the target
         # branch.
         if i % 2 == 0:
             self.createUserBMP(target_branch_owner=user)
         else:
             self.createUserBMP(reviewer=user)
     login_person(user)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         view = create_initialized_view(user,
                                        name='+activereviews',
                                        rootsite='code',
                                        principal=user)
         view.render()
     return recorder, view
 def test_eta(self):
     # BuildView.eta returns a non-None value when it should, or None
     # when there's no start time.
     build = self.factory.makeBinaryPackageBuild()
     build.queueBuild()
     self.factory.makeBuilder(processor=build.processor, virtualized=True)
     self.assertIsNot(None, create_initialized_view(build, '+index').eta)
     with admin_logged_in():
         build.archive.disable()
     flush_database_caches()
     self.assertIs(None, create_initialized_view(build, '+index').eta)
    def test_karmacache_entries(self):
        # Sample Person has some KarmaCache entries, but it's a long time
        # since we last updated this cache, and now the karma he earned a long
        # ago is not worth anything, so the karmacache-updater script will
        # delete the cache entries for Sample Person.
        sample_person = self.personset.getByName('name12')
        cache_entries = self._getCacheEntriesByPerson(sample_person)
        self.failUnless(not cache_entries.is_empty())
        for cache in cache_entries:
            self.failIf(cache.karmavalue <= 0)

        # As we can see, Foo Bar already has some karmacache entries. We'll
        # now add some fresh Karma entries for him and later we'll check that
        # the cache-updater script simply updated the existing cache entries
        # instead of creating new ones.
        foobar = self.personset.getByName('name16')
        cache_entries = self._getCacheEntriesByPerson(foobar)
        foobar_original_entries_count = cache_entries.count()
        self.failUnless(foobar_original_entries_count > 0)
        for cache in cache_entries:
            self.failIf(cache.karmavalue <= 0)
        firefox = getUtility(IProductSet)['firefox']
        foobar.assignKarma('bugcreated', firefox)

        # In the case of No Priv, he has no KarmaCache entries, so if we add
        # some fresh Karma entries to him, our cache-updater script will have
        # to create new KarmaCache entries for him.
        nopriv = self.personset.getByName('no-priv')
        self.failUnless(self._getCacheEntriesByPerson(nopriv).count() == 0)
        nopriv.assignKarma('bugcreated', firefox)

        transaction.commit()

        self._runScript()

        # Need to flush our caches since things were updated behind our back.
        flush_database_caches()

        # Check that Sample Person has no KarmaCache entries at all
        sample_person = self.personset.getByName('name12')
        self.failUnless(
            self._getCacheEntriesByPerson(sample_person).count() == 0)

        # Check that Foo Bar had his KarmaCache entries updated.
        entries_count = self._getCacheEntriesByPerson(foobar).count()
        # The cache entries that would have their karmavalue updated to 0 are
        # instead deleted from the DB; that's why the new count can be smaller
        # than the original one.
        self.failUnless(entries_count <= foobar_original_entries_count)

        # And finally, ensure that No Priv got some new KarmaCache entries.
        self.failUnless(not self._getCacheEntriesByPerson(nopriv).is_empty())
    def test_karmacache_entries(self):
        # Sample Person has some KarmaCache entries, but it's a long time
        # since we last updated this cache, and now the karma they earned a
        # long ago is not worth anything, so the karmacache-updater script
        # will delete the cache entries for Sample Person.
        sample_person = self.personset.getByName('name12')
        cache_entries = self._getCacheEntriesByPerson(sample_person)
        self.assertFalse(cache_entries.is_empty())
        for cache in cache_entries:
            self.assertFalse(cache.karmavalue <= 0)

        # As we can see, Foo Bar already has some karmacache entries. We'll
        # now add some fresh Karma entries for them and later we'll check that
        # the cache-updater script simply updated the existing cache entries
        # instead of creating new ones.
        foobar = self.personset.getByName('name16')
        cache_entries = self._getCacheEntriesByPerson(foobar)
        foobar_original_entries_count = cache_entries.count()
        self.assertTrue(foobar_original_entries_count > 0)
        for cache in cache_entries:
            self.assertFalse(cache.karmavalue <= 0)
        firefox = getUtility(IProductSet)['firefox']
        foobar.assignKarma('bugcreated', firefox)

        # In the case of No Priv, they have no KarmaCache entries, so if we
        # add some fresh Karma entries to them, our cache-updater script
        # will have to create new KarmaCache entries for them.
        nopriv = self.personset.getByName('no-priv')
        self.assertTrue(self._getCacheEntriesByPerson(nopriv).count() == 0)
        nopriv.assignKarma('bugcreated', firefox)

        transaction.commit()

        self._runScript()

        # Need to flush our caches since things were updated behind our back.
        flush_database_caches()

        # Check that Sample Person has no KarmaCache entries at all
        sample_person = self.personset.getByName('name12')
        self.assertTrue(
            self._getCacheEntriesByPerson(sample_person).count() == 0)

        # Check that Foo Bar had their KarmaCache entries updated.
        entries_count = self._getCacheEntriesByPerson(foobar).count()
        # The cache entries that would have their karmavalue updated to 0 are
        # instead deleted from the DB; that's why the new count can be smaller
        # than the original one.
        self.assertTrue(entries_count <= foobar_original_entries_count)

        # And finally, ensure that No Priv got some new KarmaCache entries.
        self.assertFalse(self._getCacheEntriesByPerson(nopriv).is_empty())
 def test_query_count_git(self):
     if not self.supports_git:
         self.skipTest("Context doesn't support Git repositories.")
     with admin_logged_in():
         for i in range(7):
             self.makeGitMergeProposal()
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         self.getViewBrowser(self.context,
                             self.view_name,
                             rootsite='code',
                             user=self.user)
     self.assertThat(recorder, HasQueryCount(LessThan(47)))
    def test_getPrecachedPersonsFromIDs_is_ubuntu_coc_signer(self):
        # getPrecachedPersonsFromIDs() sets is_ubuntu_coc_signer
        # correctly.
        person_ids = [self.factory.makePerson().id for i in range(3)]
        SignedCodeOfConduct(owner=person_ids[0], active=True)
        flush_database_caches()

        persons = list(
            self.person_set.getPrecachedPersonsFromIDs(person_ids,
                                                       need_ubuntu_coc=True))
        self.assertContentEqual(zip(person_ids, [True, False, False]),
                                [(p.id, p.is_ubuntu_coc_signer)
                                 for p in persons])
Exemple #7
0
 def test_queries_for_distribution_with_non_derived_series(self):
     for index in range(10):
         self.factory.makeDistroSeries()
     distribution = self.factory.makeDistribution()
     distroseries = self.factory.makeDistroSeries(distribution=distribution)
     flush_database_caches()
     # Reload distroseries and distribution; these will reasonably already
     # be loaded before using the vocabulary.
     distroseries.distribution
     # Getting terms issues two queries: one to search for parent serieses
     # (of which there are none) and a second for all serieses.
     with StormStatementRecorder() as recorder:
         DistroSeriesDerivationVocabulary(distroseries).terms
         self.assertThat(recorder, HasQueryCount(Equals(2)))
Exemple #8
0
 def test_scheduleDeletion(self):
     archive_files = [self.factory.makeArchiveFile() for _ in range(3)]
     expected_rows = [(archive_file.container, archive_file.path,
                       archive_file.library_file.content.sha256)
                      for archive_file in archive_files[:2]]
     rows = getUtility(IArchiveFileSet).scheduleDeletion(
         archive_files[:2], timedelta(days=1))
     self.assertContentEqual(expected_rows, rows)
     flush_database_caches()
     tomorrow = (get_transaction_timestamp(Store.of(archive_files[0])) +
                 timedelta(days=1))
     self.assertEqual(tomorrow, archive_files[0].scheduled_deletion_date)
     self.assertEqual(tomorrow, archive_files[1].scheduled_deletion_date)
     self.assertIsNone(archive_files[2].scheduled_deletion_date)
Exemple #9
0
 def match(self, context):
     # circular dependencies.
     from lp.testing.pages import setupBrowserForUser
     with person_logged_in(self.user):
         context_url = canonical_url(context,
                                     view_name=self.view_name,
                                     **self.options)
     browser = setupBrowserForUser(self.user)
     flush_database_caches()
     with RequestTimelineCollector() as collector:
         browser.open(context_url)
     counter = HasQueryCount(LessThan(self.query_limit))
     # When bug 724691 is fixed, this can become an AnnotateMismatch to
     # describe the object being rendered.
     return counter.match(collector)
 def test_query_count(self):
     product = self.factory.makeProduct()
     target = self.factory.makeBranch(
         product=product, information_type=InformationType.USERDATA)
     for i in range(7):
         source = self.factory.makeBranch(
             product=product, information_type=InformationType.USERDATA)
         self.factory.makeBranchMergeProposal(
             source_branch=removeSecurityProxy(source),
             target_branch=target)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         self.getViewBrowser(
             product, '+merges', rootsite='code', user=product.owner)
     self.assertThat(recorder, HasQueryCount(Equals(41)))
Exemple #11
0
 def test_unscheduleDeletion(self):
     archive_files = [self.factory.makeArchiveFile() for _ in range(3)]
     now = get_transaction_timestamp(Store.of(archive_files[0]))
     for archive_file in archive_files:
         removeSecurityProxy(archive_file).scheduled_deletion_date = now
     expected_rows = [(archive_file.container, archive_file.path,
                       archive_file.library_file.content.sha256)
                      for archive_file in archive_files[:2]]
     rows = getUtility(IArchiveFileSet).unscheduleDeletion(
         archive_files[:2])
     self.assertContentEqual(expected_rows, rows)
     flush_database_caches()
     self.assertIsNone(archive_files[0].scheduled_deletion_date)
     self.assertIsNone(archive_files[1].scheduled_deletion_date)
     self.assertEqual(now, archive_files[2].scheduled_deletion_date)
 def test_queries_for_distribution_with_non_derived_series(self):
     for index in range(10):
         self.factory.makeDistroSeries()
     distribution = self.factory.makeDistribution()
     distroseries = self.factory.makeDistroSeries(
         distribution=distribution)
     flush_database_caches()
     # Reload distroseries and distribution; these will reasonably already
     # be loaded before using the vocabulary.
     distroseries.distribution
     # Getting terms issues two queries: one to search for parent serieses
     # (of which there are none) and a second for all serieses.
     with StormStatementRecorder() as recorder:
         DistroSeriesDerivationVocabulary(distroseries).terms
         self.assertThat(recorder, HasQueryCount(Equals(2)))
 def createProductBMPsAndRecordQueries(self, number_of_bmps):
     # Create {number_of_bmps} branch merge proposals related to a
     # product, render the product's +activereviews page, and return the
     # view and a recorder of the queries generated by this page
     # rendering.
     product = self.factory.makeProduct()
     for i in xrange(number_of_bmps):
         self.createProductBMP(product=product)
     login_person(product.owner)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         view = create_initialized_view(
             product, name='+activereviews', rootsite='code',
             principal=product.owner)
         view.render()
     return recorder, view
Exemple #14
0
 def test_queries_for_distribution_with_derived_series(self):
     for index in range(10):
         self.factory.makeDistroSeries()
     distribution = self.factory.makeDistribution()
     parent_distroseries = self.factory.makeDistroSeries()
     distroseries = self.factory.makeDistroSeries(distribution=distribution)
     self.factory.makeDistroSeriesParent(derived_series=distroseries,
                                         parent_series=parent_distroseries)
     flush_database_caches()
     # Reload distroseries and distribution; these will reasonably already
     # be loaded before using the vocabulary.
     distroseries.distribution
     # Getting terms issues 2 queries to find parent serieses.
     with StormStatementRecorder() as recorder:
         DistroSeriesDerivationVocabulary(distroseries).terms
         self.assertThat(recorder, HasQueryCount(Equals(2)))
 def createProductBMPsAndRecordQueries(self, number_of_bmps):
     # Create {number_of_bmps} branch merge proposals related to a
     # product, render the product's +activereviews page, and return the
     # view and a recorder of the queries generated by this page
     # rendering.
     product = self.factory.makeProduct()
     for i in xrange(number_of_bmps):
         self.createProductBMP(product=product)
     login_person(product.owner)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         view = create_initialized_view(product,
                                        name='+activereviews',
                                        rootsite='code',
                                        principal=product.owner)
         view.render()
     return recorder, view
 def test_queries_for_distribution_with_derived_series(self):
     for index in range(10):
         self.factory.makeDistroSeries()
     distribution = self.factory.makeDistribution()
     parent_distroseries = self.factory.makeDistroSeries()
     distroseries = self.factory.makeDistroSeries(
         distribution=distribution)
     self.factory.makeDistroSeriesParent(
         derived_series=distroseries, parent_series=parent_distroseries)
     flush_database_caches()
     # Reload distroseries and distribution; these will reasonably already
     # be loaded before using the vocabulary.
     distroseries.distribution
     # Getting terms issues 2 queries to find parent serieses.
     with StormStatementRecorder() as recorder:
         DistroSeriesDerivationVocabulary(distroseries).terms
         self.assertThat(recorder, HasQueryCount(Equals(2)))
 def test_query_count(self):
     product = self.factory.makeProduct()
     target = self.factory.makeBranch(
         product=product, information_type=InformationType.USERDATA)
     for i in range(7):
         source = self.factory.makeBranch(
             product=product, information_type=InformationType.USERDATA)
         self.factory.makeBranchMergeProposal(
             source_branch=removeSecurityProxy(source),
             target_branch=target)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         self.getViewBrowser(product,
                             '+merges',
                             rootsite='code',
                             user=product.owner)
     self.assertThat(recorder, HasQueryCount(Equals(41)))
Exemple #18
0
 def match(self, context):
     # circular dependencies.
     from lp.testing.pages import setupBrowserForUser
     with person_logged_in(self.user):
         context_url = canonical_url(
             context, view_name=self.view_name, **self.options)
     browser = setupBrowserForUser(self.user)
     flush_database_caches()
     collector = QueryCollector()
     collector.register()
     try:
         browser.open(context_url)
         counter = HasQueryCount(LessThan(self.query_limit))
         # When bug 724691 is fixed, this can become an AnnotateMismatch to
         # describe the object being rendered.
         return counter.match(collector)
     finally:
         # Unregister now in case this method is called multiple
         # times in a single test.
         collector.unregister()
 def test_source_overrides_constant_query_count(self):
     # The query count is constant, no matter how many sources are
     # checked.
     spns = []
     distroseries = self.factory.makeDistroSeries()
     pocket = self.factory.getAnyPocket()
     for i in xrange(10):
         spph = self.factory.makeSourcePackagePublishingHistory(
             distroseries=distroseries, archive=distroseries.main_archive,
             pocket=pocket)
         spns.append(spph.sourcepackagerelease.sourcepackagename)
     flush_database_caches()
     distroseries.main_archive
     bulk.reload(spns)
     policy = FromExistingOverridePolicy()
     with StormStatementRecorder() as recorder:
         policy.calculateSourceOverrides(
             spph.distroseries.main_archive, spph.distroseries,
             spph.pocket, spns)
     self.assertThat(recorder, HasQueryCount(Equals(4)))
 def test_binary_overrides_constant_query_count(self):
     # The query count is constant, no matter how many bpn-das pairs are
     # checked.
     bpns = []
     distroarchseries = self.factory.makeDistroArchSeries()
     distroseries = distroarchseries.distroseries
     distroseries.nominatedarchindep = distroarchseries
     pocket = self.factory.getAnyPocket()
     for i in xrange(10):
         bpph = self.factory.makeBinaryPackagePublishingHistory(
             distroarchseries=distroarchseries,
             archive=distroseries.main_archive, pocket=pocket)
         bpns.append((bpph.binarypackagerelease.binarypackagename, None))
     flush_database_caches()
     distroseries.main_archive
     bulk.reload(bpn[0] for bpn in bpns)
     policy = FromExistingOverridePolicy()
     with StormStatementRecorder() as recorder:
         policy.calculateBinaryOverrides(
             distroseries.main_archive, distroseries, pocket, bpns)
     self.assertThat(recorder, HasQueryCount(Equals(4)))
 def createUserBMPsAndRecordQueries(self, number_of_bmps):
     # Create {number_of_bmps} branch merge proposals related to a
     # user, render the person's +activereviews page, and return the
     # view and a recorder of the queries generated by this page
     # rendering.
     user = self.factory.makePerson()
     for i in xrange(number_of_bmps):
         # Create one of the two types of BMP which will be displayed
         # on a person's +activereviews page:
         # - A BMP for which the person is the reviewer.
         # - A BMP for which the person is the owner of the target
         # branch.
         if i % 2 == 0:
             self.createUserBMP(target_branch_owner=user)
         else:
             self.createUserBMP(reviewer=user)
     login_person(user)
     flush_database_caches()
     with StormStatementRecorder() as recorder:
         view = create_initialized_view(
             user, name='+activereviews', rootsite='code', principal=user)
         view.render()
     return recorder, view
Exemple #22
0
    def test_binaryFileUrls_include_meta(self):
        person = self.factory.makePerson()
        webservice = webservice_for_person(
            person, permission=OAuthPermission.READ_PUBLIC)

        bpph, url = self.make_bpph_for(person)
        query_counts = []
        for i in range(3):
            flush_database_caches()
            with QueryCollector() as collector:
                response = webservice.named_get(
                    url, 'binaryFileUrls', include_meta=True,
                    api_version='devel')
            query_counts.append(collector.count)
            with person_logged_in(person):
                self.factory.makeBinaryPackageFile(
                    binarypackagerelease=bpph.binarypackagerelease)
        self.assertEqual(query_counts[0] - 1, query_counts[-1])

        self.assertEqual(200, response.status)
        urls = response.jsonBody()
        self.assertEqual(3, len(urls))
        self.assertThat(urls[0], IsInstance(dict))
    def test_disabled_template(self):
        """Test that Distroseries stats do not include disabled templates."""
        # First, we check current values of cached statistics.

        # We get some objects we will need for this test.
        ubuntu = self.distribution['ubuntu']
        hoary = self.distroseriesset.queryByName(ubuntu, 'hoary')
        spanish = self.languageset['es']
        spanish_hoary = hoary.getDistroSeriesLanguage(spanish)
        # We need pmount's template.
        templates = self.potemplateset.getAllByName('pmount')
        pmount_template = None
        for template in templates:
            if template.distroseries == hoary:
                pmount_template = template

        self.failIfEqual(pmount_template, None)

        # Let's calculate the statistics ourselves so we can check that cached
        # values are the right ones.
        messagecount = 0
        currentcount = 0
        for template in hoary.getCurrentTranslationTemplates():
            messagecount += template.messageCount()
            # Get the Spanish IPOFile.
            pofile = template.getPOFileByLang('es')
            if pofile is not None:
                currentcount += pofile.currentCount()
        contributor_count = hoary.getPOFileContributorsByLanguage(
                spanish).count()

        # The amount of messages to translate in Hoary is the expected.
        self.failUnlessEqual(hoary.messagecount, messagecount)

        # And the same for translations and contributors.
        self.failUnlessEqual(spanish_hoary.currentCount(), currentcount)
        # XXX Danilo Segan 2010-08-06: we should not assert that
        # sampledata is correct. Bug #614397.
        #self.failUnlessEqual(spanish_hoary.contributor_count,
        #    contributor_count)

        # Let's set 'pmount' template as not current for Hoary.
        pmount_template.iscurrent = False
        # And store its statistics values to validate cached values later.
        pmount_messages = pmount_template.messageCount()
        pmount_spanish_pofile = pmount_template.getPOFileByLang('es')
        pmount_spanish_translated = pmount_spanish_pofile.currentCount()

        # Commit the current transaction because the script will run in
        # another transaction and thus it won't see the changes done on this
        # test unless we commit.
        # XXX CarlosPerelloMarin 2007-01-22 bug=3989:
        # Unecessary flush_database_updates required.
        from lp.services.database.sqlbase import flush_database_updates
        flush_database_updates()
        import transaction
        transaction.commit()

        # Run update-stats.py script to see that we don't count the
        # information in that template anymore.
        cmd = [sys.executable, get_script(), '--quiet']
        process = subprocess.Popen(
            cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT)
        (stdout, empty_stderr) = process.communicate()

        # Ensure it returned a success code
        self.failUnlessEqual(
            process.returncode, 0,
            'update-stats.py exited with return code %d. Output was %r' % (
                process.returncode, stdout))

        # Now confirm it did stuff it is supposed to

        # We flush the caches, so that the above defined objects gets
        # their content from the modified DB.
        from lp.services.database.sqlbase import flush_database_caches
        flush_database_caches()

        # The transaction changed, we need to refetch SQLObjects.
        ubuntu = self.distribution['ubuntu']
        hoary = self.distroseriesset.queryByName(ubuntu, 'hoary')
        spanish = self.languageset['es']
        spanish_hoary = hoary.getDistroSeriesLanguage(spanish)

        # Let's recalculate the statistics ourselved to validate what the
        # script run recalculated.
        new_messagecount = 0
        new_currentcount = 0
        for template in hoary.getCurrentTranslationTemplates():
            new_messagecount += template.messageCount()
            pofile = template.getPOFileByLang('es')
            if pofile is not None:
                new_currentcount += pofile.currentCount()

        new_contributor_count = (
            hoary.getPOFileContributorsByLanguage(spanish).count())

        # The amount of messages to translate in Hoary is now lower because we
        # don't count anymore pmount messages.
        self.failUnlessEqual(hoary.messagecount, new_messagecount)
        self.failIf(messagecount <= new_messagecount)
        self.failUnlessEqual(messagecount - pmount_messages, new_messagecount)

        # The amount of messages translate into Spanish is also lower now
        # because we don't count Spanish translations for pmount anymore.
        self.failUnlessEqual(spanish_hoary.currentCount(), new_currentcount)
        self.failIf(currentcount <= new_currentcount)
        self.failUnlessEqual(currentcount - pmount_spanish_translated,
            new_currentcount)

        # Also, there are two Spanish translators that only did contributions
        # to pmount, so they are gone now.
        self.failUnlessEqual(
            spanish_hoary.contributor_count, new_contributor_count)
        self.failIf(contributor_count <= new_contributor_count)
    def test_disabled_template(self):
        """Test that Distroseries stats do not include disabled templates."""
        # First, we check current values of cached statistics.

        # We get some objects we will need for this test.
        ubuntu = self.distribution['ubuntu']
        hoary = self.distroseriesset.queryByName(ubuntu, 'hoary')
        spanish = self.languageset['es']
        spanish_hoary = hoary.getDistroSeriesLanguage(spanish)
        # We need pmount's template.
        templates = self.potemplateset.getAllByName('pmount')
        pmount_template = None
        for template in templates:
            if template.distroseries == hoary:
                pmount_template = template

        self.failIfEqual(pmount_template, None)

        # Let's calculate the statistics ourselves so we can check that cached
        # values are the right ones.
        messagecount = 0
        currentcount = 0
        for template in hoary.getCurrentTranslationTemplates():
            messagecount += template.messageCount()
            # Get the Spanish IPOFile.
            pofile = template.getPOFileByLang('es')
            if pofile is not None:
                currentcount += pofile.currentCount()
        contributor_count = hoary.getPOFileContributorsByLanguage(
            spanish).count()

        # The amount of messages to translate in Hoary is the expected.
        self.failUnlessEqual(hoary.messagecount, messagecount)

        # And the same for translations and contributors.
        self.failUnlessEqual(spanish_hoary.currentCount(), currentcount)
        # XXX Danilo Segan 2010-08-06: we should not assert that
        # sampledata is correct. Bug #614397.
        #self.failUnlessEqual(spanish_hoary.contributor_count,
        #    contributor_count)

        # Let's set 'pmount' template as not current for Hoary.
        pmount_template.iscurrent = False
        # And store its statistics values to validate cached values later.
        pmount_messages = pmount_template.messageCount()
        pmount_spanish_pofile = pmount_template.getPOFileByLang('es')
        pmount_spanish_translated = pmount_spanish_pofile.currentCount()

        # Commit the current transaction because the script will run in
        # another transaction and thus it won't see the changes done on this
        # test unless we commit.
        # XXX CarlosPerelloMarin 2007-01-22 bug=3989:
        # Unecessary flush_database_updates required.
        from lp.services.database.sqlbase import flush_database_updates
        flush_database_updates()
        import transaction
        transaction.commit()

        # Run update-stats.py script to see that we don't count the
        # information in that template anymore.
        cmd = [sys.executable, get_script(), '--quiet']
        process = subprocess.Popen(cmd,
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT)
        (stdout, empty_stderr) = process.communicate()

        # Ensure it returned a success code
        self.failUnlessEqual(
            process.returncode, 0,
            'update-stats.py exited with return code %d. Output was %r' %
            (process.returncode, stdout))

        # Now confirm it did stuff it is supposed to

        # We flush the caches, so that the above defined objects gets
        # their content from the modified DB.
        from lp.services.database.sqlbase import flush_database_caches
        flush_database_caches()

        # The transaction changed, we need to refetch SQLObjects.
        ubuntu = self.distribution['ubuntu']
        hoary = self.distroseriesset.queryByName(ubuntu, 'hoary')
        spanish = self.languageset['es']
        spanish_hoary = hoary.getDistroSeriesLanguage(spanish)

        # Let's recalculate the statistics ourselved to validate what the
        # script run recalculated.
        new_messagecount = 0
        new_currentcount = 0
        for template in hoary.getCurrentTranslationTemplates():
            new_messagecount += template.messageCount()
            pofile = template.getPOFileByLang('es')
            if pofile is not None:
                new_currentcount += pofile.currentCount()

        new_contributor_count = (
            hoary.getPOFileContributorsByLanguage(spanish).count())

        # The amount of messages to translate in Hoary is now lower because we
        # don't count anymore pmount messages.
        self.failUnlessEqual(hoary.messagecount, new_messagecount)
        self.failIf(messagecount <= new_messagecount)
        self.failUnlessEqual(messagecount - pmount_messages, new_messagecount)

        # The amount of messages translate into Spanish is also lower now
        # because we don't count Spanish translations for pmount anymore.
        self.failUnlessEqual(spanish_hoary.currentCount(), new_currentcount)
        self.failIf(currentcount <= new_currentcount)
        self.failUnlessEqual(currentcount - pmount_spanish_translated,
                             new_currentcount)

        # Also, there are two Spanish translators that only did contributions
        # to pmount, so they are gone now.
        self.failUnlessEqual(spanish_hoary.contributor_count,
                             new_contributor_count)
        self.failIf(contributor_count <= new_contributor_count)
Exemple #25
0
 def runScript(self, script):
     try:
         script.main()
     finally:
         self.addDetail("log", script.logger.content)
         flush_database_caches()