def test_cache_by_bug_notification_level(self):
     # The BugNotificationRecipients set is cached by notification level
     # to avoid duplicate work. The returned set is copy of the cached set.
     subscriber = self.factory.makePerson()
     product = self.factory.makeProduct()
     with person_logged_in(subscriber):
         subscription = product.addBugSubscription(subscriber, subscriber)
         bug_filter = subscription.bug_filters[0]
         bug_filter.bug_notification_level = BugNotificationLevel.COMMENTS
     bug = self.factory.makeBug(target=product)
     # The factory call queued LIFECYCLE and COMMENT notifications.
     bug.clearBugNotificationRecipientsCache()
     levels = [
         BugNotificationLevel.LIFECYCLE,
         BugNotificationLevel.METADATA,
         BugNotificationLevel.COMMENTS,
     ]
     for level in levels:
         with StormStatementRecorder() as recorder:
             first_recipients = bug.getBugNotificationRecipients(
                 level=level)
             self.assertThat(recorder, HasQueryCount(GreaterThan(1)))
         with StormStatementRecorder() as recorder:
             second_recipients = bug.getBugNotificationRecipients(
                 level=level)
             self.assertThat(recorder, HasQueryCount(Equals(0)))
         self.assertContentEqual([bug.owner, subscriber], first_recipients)
         self.assertContentEqual(first_recipients, second_recipients)
         self.assertIsNot(first_recipients, second_recipients)
Example #2
0
 def test_match(self):
     matcher = HasQueryCount(Is(3))
     collector = RequestTimelineCollector()
     collector.count = 3
     # not inspected
     del collector.queries
     self.assertThat(matcher.match(collector), Is(None))
 def test_messages_query_counts_constant(self):
     # XXX Robert Collins 2010-09-15 bug=619017
     # This test may be thrown off by the reference bug. To get around the
     # problem, flush and reset are called on the bug storm cache before
     # each call to the webservice. When lp's storm is updated to release
     # the committed fix for this bug, please see about updating this test.
     login(USER_EMAIL)
     bug = self.factory.makeBug()
     store = Store.of(bug)
     self.factory.makeBugComment(bug)
     self.factory.makeBugComment(bug)
     self.factory.makeBugComment(bug)
     webservice = LaunchpadWebServiceCaller('launchpad-library',
                                            'salgado-change-anything')
     collector = QueryCollector()
     collector.register()
     self.addCleanup(collector.unregister)
     url = '/bugs/%d/messages?ws.size=75' % bug.id
     # First request.
     store.flush()
     store.reset()
     response = webservice.get(url)
     self.assertThat(collector, HasQueryCount(LessThan(24)))
     with_2_count = collector.count
     self.failUnlessEqual(response.status, 200)
     login(USER_EMAIL)
     for i in range(50):
         self.factory.makeBugComment(bug)
     self.factory.makeBugAttachment(bug)
     logout()
     # Second request.
     store.flush()
     store.reset()
     response = webservice.get(url)
     self.assertThat(collector, HasQueryCount(Equals(with_2_count)))
Example #4
0
 def test_with_backtrace(self):
     matcher = HasQueryCount(LessThan(2))
     collector = RequestTimelineCollector()
     collector.count = 2
     collector.queries = [
         (0, 1, "SQL-main-slave", "SELECT 1 FROM Person",
          '  File "example", line 2, in <module>\n'
          '    Store.of(Person).one()\n'),
         (2, 3, "SQL-main-slave", "SELECT 1 FROM Product",
          '  File "example", line 3, in <module>\n'
          '    Store.of(Product).one()\n'),
         ]
     mismatch = matcher.match(collector)
     self.assertThat(mismatch, Not(Is(None)))
     details = mismatch.get_details()
     lines = []
     for name, content in details.items():
         self.assertEqual("queries", name)
         self.assertEqual("text", content.content_type.type)
         lines.append(''.join(content.iter_text()))
     separator = "-" * 70
     backtrace_separator = "." * 70
     expected_lines = [
         '0-1@SQL-main-slave SELECT 1 FROM Person\n' + separator + '\n' +
         '  File "example", line 2, in <module>\n' +
         '    Store.of(Person).one()\n' + backtrace_separator + '\n' +
         '2-3@SQL-main-slave SELECT 1 FROM Product\n' + separator + '\n' +
         '  File "example", line 3, in <module>\n' +
         '    Store.of(Product).one()\n' + backtrace_separator,
         ]
     self.assertEqual(expected_lines, lines)
     self.assertEqual(
         "queries do not match: %s" % (LessThan(2).match(2).describe(),),
         mismatch.describe())
Example #5
0
 def test_mismatch(self):
     matcher = HasQueryCount(LessThan(2))
     collector = RequestTimelineCollector()
     collector.count = 2
     collector.queries = [
         (0, 1, "SQL-main-slave", "SELECT 1 FROM Person", None),
         (2, 3, "SQL-main-slave", "SELECT 1 FROM Product", None),
         ]
     mismatch = matcher.match(collector)
     self.assertThat(mismatch, Not(Is(None)))
     details = mismatch.get_details()
     lines = []
     for name, content in details.items():
         self.assertEqual("queries", name)
         self.assertEqual("text", content.content_type.type)
         lines.append(''.join(content.iter_text()))
     separator = "-" * 70
     expected_lines = [
         "0-1@SQL-main-slave SELECT 1 FROM Person\n" + separator + "\n" +
         "2-3@SQL-main-slave SELECT 1 FROM Product\n" + separator,
         ]
     self.assertEqual(expected_lines, lines)
     self.assertEqual(
         "queries do not match: %s" % (LessThan(2).match(2).describe(),),
         mismatch.describe())
    def test_getPrecachedPersonsFromIDs(self):
        # The getPrecachedPersonsFromIDs() method should only make one
        # query to load all the extraneous data. Accessing the
        # attributes should then cause zero queries.
        person_ids = [self.factory.makePerson().id for i in range(3)]

        with StormStatementRecorder() as recorder:
            persons = list(
                self.person_set.getPrecachedPersonsFromIDs(
                    person_ids,
                    need_karma=True,
                    need_ubuntu_coc=True,
                    need_location=True,
                    need_archive=True,
                    need_preferred_email=True,
                    need_validity=True))
        self.assertThat(recorder, HasQueryCount(LessThan(2)))

        with StormStatementRecorder() as recorder:
            for person in persons:
                person.is_valid_person
                person.karma
                person.is_ubuntu_coc_signer
                person.location,
                person.archive
                person.preferredemail
        self.assertThat(recorder, HasQueryCount(LessThan(1)))
Example #7
0
    def test_landing_targets_constant_queries(self):
        project = self.factory.makeProduct()
        with person_logged_in(project.owner):
            source = self.factory.makeBranch(target=project)
            source_url = api_url(source)
            webservice = webservice_for_person(
                project.owner, permission=OAuthPermission.WRITE_PRIVATE)

        def create_mp():
            with admin_logged_in():
                branch = self.factory.makeBranch(
                    target=project,
                    stacked_on=self.factory.makeBranch(
                        target=project,
                        information_type=InformationType.PRIVATESECURITY),
                    information_type=InformationType.PRIVATESECURITY)
                self.factory.makeBranchMergeProposal(source_branch=source,
                                                     target_branch=branch)

        def list_mps():
            webservice.get(source_url + '/landing_targets')

        list_mps()
        recorder1, recorder2 = record_two_runs(list_mps, create_mp, 2)
        self.assertThat(recorder1, HasQueryCount(LessThan(30)))
        self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
Example #8
0
 def test_binary_query_counts(self):
     query_baseline = 40
     # Assess the baseline.
     collector = RequestTimelineCollector()
     collector.register()
     self.addCleanup(collector.unregister)
     ppa = self.factory.makeArchive()
     viewer = self.factory.makePerson()
     browser = self.getUserBrowser(user=viewer)
     with person_logged_in(viewer):
         # The baseline has one package, because otherwise the
         # short-circuit prevents the packages iteration happening at
         # all and we're not actually measuring scaling
         # appropriately.
         pkg = self.factory.makeBinaryPackagePublishingHistory(archive=ppa)
         url = canonical_url(ppa) + "/+packages"
     browser.open(url)
     self.assertThat(collector, HasQueryCount(LessThan(query_baseline)))
     expected_count = collector.count
     # Use all new objects - avoids caching issues invalidating the
     # gathered metrics.
     login(ADMIN_EMAIL)
     ppa = self.factory.makeArchive()
     viewer = self.factory.makePerson()
     browser = self.getUserBrowser(user=viewer)
     with person_logged_in(viewer):
         for i in range(3):
             pkg = self.factory.makeBinaryPackagePublishingHistory(
                 archive=ppa, distroarchseries=pkg.distroarchseries)
         url = canonical_url(ppa) + "/+packages"
     browser.open(url)
     self.assertThat(collector, HasQueryCount(Equals(expected_count)))
    def test_sample_binary_packages__constant_number_sql_queries(self):
        # Retrieving
        # DistributionSourcePackageRelease.sample_binary_packages and
        # accessing the property "summary" of its items requires a
        # constant number of SQL queries, regardless of the number
        # of existing binary package releases.
        self.makeBinaryPackageRelease()
        self.updateDistroSeriesPackageCache()
        with StormStatementRecorder() as recorder:
            for ds_package in self.dsp_release.sample_binary_packages:
                ds_package.summary
        self.assertThat(recorder, HasQueryCount(LessThan(5)))
        self.assertEqual(1, self.dsp_release.sample_binary_packages.count())

        for iteration in range(5):
            self.makeBinaryPackageRelease()
        self.updateDistroSeriesPackageCache()
        with StormStatementRecorder() as recorder:
            for ds_package in self.dsp_release.sample_binary_packages:
                ds_package.summary
        self.assertThat(recorder, HasQueryCount(LessThan(5)))
        self.assertEqual(6, self.dsp_release.sample_binary_packages.count())

        # Even if the cache is not updated for binary packages,
        # DistributionSourcePackageRelease objects do not try to
        # retrieve DistroSeriesPackageCache records if they know
        # that such records do not exist.
        for iteration in range(5):
            self.makeBinaryPackageRelease()
        with StormStatementRecorder() as recorder:
            for ds_package in self.dsp_release.sample_binary_packages:
                ds_package.summary
        self.assertThat(recorder, HasQueryCount(LessThan(5)))
        self.assertEqual(11, self.dsp_release.sample_binary_packages.count())
Example #10
0
 def test_match(self):
     matcher = HasQueryCount(Is(3))
     collector = QueryCollector()
     collector.count = 3
     # not inspected
     del collector.queries
     self.assertThat(matcher.match(collector), Is(None))
Example #11
0
    def test_query_count(self):
        # The function issues a constant number of queries regardless of
        # team count.
        login_person(self.user)
        context = self.factory.makeProduct(owner=self.user)
        self._setup_teams(self.user)

        IStore(Person).invalidate()
        clear_cache()
        with StormStatementRecorder() as recorder:
            expose_user_administered_teams_to_js(
                self.request, self.user, context,
                absoluteURL=fake_absoluteURL)
        self.assertThat(recorder, HasQueryCount(Equals(4)))

        # Create some new public teams owned by the user, and a private
        # team administered by the user.
        for i in range(3):
            self.factory.makeTeam(owner=self.user)
        pt = self.factory.makeTeam(
            visibility=PersonVisibility.PRIVATE, members=[self.user])
        with person_logged_in(pt.teamowner):
            pt.addMember(
                self.user, pt.teamowner, status=TeamMembershipStatus.ADMIN)

        IStore(Person).invalidate()
        clear_cache()
        del IJSONRequestCache(self.request).objects['administratedTeams']
        with StormStatementRecorder() as recorder:
            expose_user_administered_teams_to_js(
                self.request, self.user, context,
                absoluteURL=fake_absoluteURL)
        self.assertThat(recorder, HasQueryCount(Equals(4)))
 def test_attachments_query_counts_constant(self):
     # XXX j.c.sackett 2010-09-02 bug=619017
     # This test was being thrown off by the reference bug. To get around
     # the problem, flush and reset are called on the bug storm cache
     # before each call to the webservice. When lp's storm is updated
     # to release the committed fix for this bug, please see about
     # updating this test.
     login(USER_EMAIL)
     self.bug = self.factory.makeBug()
     store = Store.of(self.bug)
     self.factory.makeBugAttachment(self.bug)
     self.factory.makeBugAttachment(self.bug)
     webservice = LaunchpadWebServiceCaller('launchpad-library',
                                            'salgado-change-anything')
     collector = RequestTimelineCollector()
     collector.register()
     self.addCleanup(collector.unregister)
     url = '/bugs/%d/attachments?ws.size=75' % self.bug.id
     # First request.
     store.flush()
     store.reset()
     response = webservice.get(url)
     self.assertThat(collector, HasQueryCount(LessThan(24)))
     with_2_count = collector.count
     self.assertEqual(response.status, 200)
     login(USER_EMAIL)
     for i in range(5):
         self.factory.makeBugAttachment(self.bug)
     logout()
     # Second request.
     store.flush()
     store.reset()
     response = webservice.get(url)
     self.assertThat(collector, HasQueryCount(Equals(with_2_count)))
 def test_getReviewableMessages_queries(self):
     # The Message and user that posted it are retrieved with the query
     # that get the MessageApproval.
     test_objects = self.makeMailingListAndHeldMessage()
     team, member, sender, held_message = test_objects
     held_messages = team.mailing_list.getReviewableMessages()
     with StormStatementRecorder() as recorder:
         held_message = held_messages[0]
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     with StormStatementRecorder() as recorder:
         held_message.message
         held_message.posted_by
     self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #14
0
 def test_eta_cached(self):
     # The expensive completion time estimate is cached.
     self.build.queueBuild()
     self.build.eta
     with StormStatementRecorder() as recorder:
         self.build.eta
     self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #15
0
 def test_many_duplicate_team_admin_subscriptions_few_queries(self):
     # This is related to bug 811447. The user is subscribed to a
     # duplicate bug through team membership in which the user is an admin.
     team = self.factory.makeTeam()
     with person_logged_in(team.teamowner):
         team.addMember(self.subscriber, team.teamowner,
                        status=TeamMembershipStatus.ADMIN)
     self.makeDuplicates(count=1, subscriber=team)
     with StormStatementRecorder() as recorder:
         self.subscriptions.reload()
     # This should produce a very small number of queries.
     self.assertThat(recorder, HasQueryCount(LessThan(6)))
     count_with_one_subscribed_duplicate = recorder.count
     # It should have the correct result.
     self.assertCollectionsAreEmpty(except_='from_duplicate')
     self.assertCollectionContents(
         self.subscriptions.from_duplicate, as_team_admin=1)
     # If we increase the number of duplicates subscribed via the team that
     # the user administers...
     self.makeDuplicates(count=4, subscriber=team)
     with StormStatementRecorder() as recorder:
         self.subscriptions.reload()
     # ...then the query count should remain the same.
     count_with_five_subscribed_duplicates = recorder.count
     self.assertEqual(
         count_with_one_subscribed_duplicate,
         count_with_five_subscribed_duplicates)
     # We should still have the correct result.
     self.assertCollectionsAreEmpty(except_='from_duplicate')
     self.assertCollectionContents(
         self.subscriptions.from_duplicate, as_team_admin=5)
Example #16
0
 def test_zero_values_is_noop(self):
     # create()ing 0 rows is a no-op.
     with StormStatementRecorder() as recorder:
         self.assertEqual([],
                          bulk.create((BugSubscription.bug, ), [],
                                      get_objects=True))
     self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #17
0
    def test_private_team_query_count(self):
        # Testing visibility of a private team involves checking for
        # subscriptions to any private PPAs owned by that team.  Make sure
        # that this doesn't involve a query for every archive subscription
        # the user has.
        person = self.factory.makePerson()
        team_owner = self.factory.makePerson()
        private_team = self.factory.makeTeam(
            owner=team_owner, visibility=PersonVisibility.PRIVATE)
        checker = PublicOrPrivateTeamsExistence(
            removeSecurityProxy(private_team))

        def create_subscribed_archive():
            with person_logged_in(team_owner):
                archive = self.factory.makeArchive(owner=private_team,
                                                   private=True)
                archive.newSubscription(person, team_owner)

        def check_team_limited_view():
            person.clearInTeamCache()
            with person_logged_in(person):
                self.assertTrue(
                    checker.checkAuthenticated(IPersonRoles(person)))

        recorder1, recorder2 = record_two_runs(check_team_limited_view,
                                               create_subscribed_archive, 5)
        self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
    def test_preloads_irc_nicks_and_preferredemail(self):
        """Test that IRC nicks and preferred email addresses are preloaded."""
        # Create three people with IRC nicks, and one without.
        people = []
        for num in range(3):
            person = self.factory.makePerson(displayname='foobar %d' % num)
            getUtility(IIrcIDSet).new(person, 'launchpad', person.name)
            people.append(person)
        people.append(self.factory.makePerson(displayname='foobar 4'))

        # Remember the current values for checking later, and throw out
        # the cache.
        expected_nicks = dict(
            (person.id, list(person.ircnicknames)) for person in people)
        expected_emails = dict(
            (person.id, person.preferredemail) for person in people)
        Store.of(people[0]).invalidate()

        results = list(self.searchVocabulary(None, u'foobar'))
        with StormStatementRecorder() as recorder:
            self.assertEqual(4, len(results))
            for person in results:
                self.assertEqual(expected_nicks[person.id],
                                 person.ircnicknames)
                self.assertEqual(expected_emails[person.id],
                                 person.preferredemail)
        self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #19
0
    def test_query_count(self):
        # The number of queries required to render the mirror table is
        # constant in the number of mirrors.
        person = self.factory.makePerson()
        distro = self.factory.makeDistribution(owner=person)
        login_celebrity("admin")
        distro.supports_mirrors = True
        login_person(person)
        distro.mirror_admin = person
        countries = iter(getUtility(ICountrySet))

        def render_mirrors():
            text = create_initialized_view(
                distro, self.view, principal=person).render()
            self.assertNotIn("We don't know of any", text)
            return text

        def create_mirror():
            mirror = self.factory.makeMirror(
                distro, country=next(countries), official_candidate=True)
            self.configureMirror(mirror)

        recorder1, recorder2 = record_two_runs(
            render_mirrors, create_mirror, 10)
        self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
Example #20
0
    def test_delete(self):
        target = self.makeTarget()
        login_person(target.owner)
        hooks = []
        for i in range(3):
            hook = self.factory.makeWebhook(target, u'http://path/to/%d' % i)
            hook.ping()
            hooks.append(hook)
        self.assertEqual(3, IStore(WebhookJob).find(WebhookJob).count())
        self.assertContentEqual(
            [u'http://path/to/0', u'http://path/to/1', u'http://path/to/2'], [
                hook.delivery_url
                for hook in getUtility(IWebhookSet).findByTarget(target)
            ])

        transaction.commit()
        with StormStatementRecorder() as recorder:
            getUtility(IWebhookSet).delete(hooks[:2])
        self.assertThat(recorder, HasQueryCount(Equals(4)))

        self.assertContentEqual([u'http://path/to/2'], [
            hook.delivery_url
            for hook in getUtility(IWebhookSet).findByTarget(target)
        ])
        self.assertEqual(1, IStore(WebhookJob).find(WebhookJob).count())
        self.assertEqual(1, hooks[2].deliveries.count())
Example #21
0
    def test_newFromBazaarRevisions(self):
        # newFromBazaarRevisions behaves as expected.
        # only branchscanner can SELECT revisionproperties.

        self.becomeDbUser('branchscanner')
        bzr_revisions = [
            self.factory.makeBzrRevision(b'rev-1',
                                         props={
                                             'prop1': 'foo',
                                             'deb-pristine-delta': 'bar',
                                             'deb-pristine-delta-xz': 'baz'
                                         }),
            self.factory.makeBzrRevision(b'rev-2', parent_ids=[b'rev-1'])
        ]
        with StormStatementRecorder() as recorder:
            self.revision_set.newFromBazaarRevisions(bzr_revisions)
        rev_1 = self.revision_set.getByRevisionId('rev-1')
        self.assertEqual(bzr_revisions[0].committer,
                         rev_1.revision_author.name)
        self.assertEqual(bzr_revisions[0].message, rev_1.log_body)
        self.assertEqual(datetime(1970, 1, 1, 0, 0, tzinfo=pytz.UTC),
                         rev_1.revision_date)
        self.assertEqual([], rev_1.parents)
        # Revision properties starting with 'deb-pristine-delta' aren't
        # imported into the database; they're huge, opaque and
        # uninteresting for the application.
        self.assertEqual({'prop1': 'foo'}, rev_1.getProperties())
        rev_2 = self.revision_set.getByRevisionId('rev-2')
        self.assertEqual(['rev-1'], rev_2.parent_ids)
        # Really, less than 9 is great, but if the count improves, we should
        # tighten this restriction.
        self.assertThat(recorder, HasQueryCount(Equals(8)))
 def test_preloading_for_previewdiff(self):
     project = self.factory.makeProduct()
     [target] = self.factory.makeGitRefs(target=project)
     owner = self.factory.makePerson()
     [ref1] = self.factory.makeGitRefs(target=project, owner=owner)
     [ref2] = self.factory.makeGitRefs(target=project, owner=owner)
     bmp1 = self.factory.makeBranchMergeProposalForGit(
         target_ref=target, source_ref=ref1)
     bmp2 = self.factory.makeBranchMergeProposalForGit(
         target_ref=target, source_ref=ref2)
     old_date = datetime.now(pytz.UTC) - timedelta(hours=1)
     self.factory.makePreviewDiff(
         merge_proposal=bmp1, date_created=old_date)
     previewdiff1 = self.factory.makePreviewDiff(merge_proposal=bmp1)
     self.factory.makePreviewDiff(
         merge_proposal=bmp2, date_created=old_date)
     previewdiff2 = self.factory.makePreviewDiff(merge_proposal=bmp2)
     Store.of(bmp1).flush()
     Store.of(bmp1).invalidate()
     collection = self.all_repositories.ownedBy(owner)
     [pre_bmp1, pre_bmp2] = sorted(
         collection.getMergeProposals(eager_load=True),
         key=attrgetter('id'))
     with StormStatementRecorder() as recorder:
         self.assertEqual(
             removeSecurityProxy(pre_bmp1.preview_diff).id, previewdiff1.id)
         self.assertEqual(
             removeSecurityProxy(pre_bmp2.preview_diff).id, previewdiff2.id)
     self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #23
0
 def test_getAllLanguages_can_preload_translators_count(self):
     # LanguageSet.getAllLanguages() can preload translators_count.
     list(getUtility(ILanguageSet).getAllLanguages(
         want_translators_count=True))
     with StormStatementRecorder() as recorder:
         self.assertEqual(3, self.translated_lang.translators_count)
         self.assertEqual(0, self.untranslated_lang.translators_count)
     self.assertThat(recorder, HasQueryCount(Equals(0)))
Example #24
0
 def test_search_query_count(self):
     # Verify query count.
     Store.of(self.milestone).flush()
     with StormStatementRecorder() as recorder:
         list(self.bugtask_set.search(self.params))
     # 1 query for the tasks, 1 query for the product (target) eager
     # loading.
     self.assertThat(recorder, HasQueryCount(Equals(4)))
    def test_getSourcePackagePublishing_query_count(self):
        # Check that the number of queries required to publish source
        # packages is constant in the number of source packages.
        def get_index_stanzas():
            for spp in self.series.getSourcePackagePublishing(
                    PackagePublishingPocket.RELEASE, self.universe_component,
                    self.series.main_archive):
                build_source_stanza_fields(spp.sourcepackagerelease,
                                           spp.component, spp.section)

        recorder1, recorder2 = record_two_runs(
            get_index_stanzas,
            partial(self.makeSeriesPackage,
                    pocket=PackagePublishingPocket.RELEASE,
                    status=PackagePublishingStatus.PUBLISHED), 5, 5)
        self.assertThat(recorder1, HasQueryCount(Equals(11)))
        self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
Example #26
0
 def test_mismatch(self):
     matcher = HasQueryCount(LessThan(2))
     collector = QueryCollector()
     collector.count = 2
     collector.queries = [("foo", "bar"), ("baaz", "quux")]
     mismatch = matcher.match(collector)
     self.assertThat(mismatch, Not(Is(None)))
     details = mismatch.get_details()
     lines = []
     for name, content in details.items():
         self.assertEqual("queries", name)
         self.assertEqual("text", content.content_type.type)
         lines.append(''.join(content.iter_text()))
     self.assertEqual(["('foo', 'bar')\n('baaz', 'quux')"],
         lines)
     self.assertEqual(
         "queries do not match: %s" % (LessThan(2).match(2).describe(),),
         mismatch.describe())
 def test_sourcepackagenames_bulk_loaded(self):
     # SourcePackageName records referenced by POTemplates
     # are bulk loaded. Accessing the sourcepackagename attribute
     # of a potemplate does not require an additional SQL query.
     self.view.initialize()
     template = self.view.batchnav.currentBatch()[0]
     with StormStatementRecorder() as recorder:
         template.sourcepackagename
     self.assertThat(recorder, HasQueryCount(Equals(0)))
 def test_product_affiliation_query_count(self):
     # Only 2 queries are expected, selects from:
     # - Product, Person
     person = self.factory.makePerson()
     product = self.factory.makeProduct(owner=person, name='pting')
     Store.of(product).invalidate()
     with StormStatementRecorder() as recorder:
         IHasAffiliation(product).getAffiliationBadges([person])
     self.assertThat(recorder, HasQueryCount(Equals(4)))
Example #29
0
 def test_search_query_count(self):
     # Verify query count.
     # 1. Query all the distroseries to determine the distro's
     #    currentseries.
     # 2. Query the bugtasks.
     Store.of(self.milestone).flush()
     with StormStatementRecorder() as recorder:
         list(self.bugtask_set.search(self.params))
     self.assertThat(recorder, HasQueryCount(Equals(4)))
Example #30
0
 def exactly_x_queries(self, count):
     # Assert that there are exactly `count` queries sent to the database
     # in this context. Flush first to ensure we don't count things that
     # happened before entering this context.
     self.store.flush()
     condition = HasQueryCount(Equals(count))
     with StormStatementRecorder() as recorder:
         yield recorder
     self.assertThat(recorder, condition)
Example #31
0
 def test_distro_context(self):
     bug = self.factory.makeBug()
     mint = self.factory.makeDistribution()
     task = self.factory.makeBugTask(bug=bug, target=mint)
     tasks = list(bug.bugtasks)
     with StormStatementRecorder() as recorder:
         filtered = filter_bugtasks_by_context(mint, tasks)
     self.assertThat(recorder, HasQueryCount(Equals(0)))
     self.assertThat(filtered, Equals([task]))
Example #32
0
 def test_sourcepackage_context_distro_task(self):
     bug = self.factory.makeBug()
     sp = self.factory.makeSourcePackage()
     task = self.factory.makeBugTask(bug=bug, target=sp.distribution)
     tasks = list(bug.bugtasks)
     with StormStatementRecorder() as recorder:
         filtered = filter_bugtasks_by_context(sp, tasks)
     self.assertThat(recorder, HasQueryCount(Equals(0)))
     self.assertThat(filtered, Equals([task]))