def test_getPrecachedPersonsFromIDs(self): # The getPrecachedPersonsFromIDs() method should only make one # query to load all the extraneous data. Accessing the # attributes should then cause zero queries. person_ids = [self.factory.makePerson().id for i in range(3)] with StormStatementRecorder() as recorder: persons = list( self.person_set.getPrecachedPersonsFromIDs( person_ids, need_karma=True, need_ubuntu_coc=True, need_location=True, need_archive=True, need_preferred_email=True, need_validity=True)) self.assertThat(recorder, HasQueryCount(LessThan(2))) with StormStatementRecorder() as recorder: for person in persons: person.is_valid_person person.karma person.is_ubuntu_coc_signer person.location, person.archive person.preferredemail self.assertThat(recorder, HasQueryCount(LessThan(1)))
def test_many_duplicate_team_admin_subscriptions_few_queries(self): # This is related to bug 811447. The user is subscribed to a # duplicate bug through team membership in which the user is an admin. team = self.factory.makeTeam() with person_logged_in(team.teamowner): team.addMember(self.subscriber, team.teamowner, status=TeamMembershipStatus.ADMIN) self.makeDuplicates(count=1, subscriber=team) with StormStatementRecorder() as recorder: self.subscriptions.reload() # This should produce a very small number of queries. self.assertThat(recorder, HasQueryCount(LessThan(6))) count_with_one_subscribed_duplicate = recorder.count # It should have the correct result. self.assertCollectionsAreEmpty(except_='from_duplicate') self.assertCollectionContents( self.subscriptions.from_duplicate, as_team_admin=1) # If we increase the number of duplicates subscribed via the team that # the user administers... self.makeDuplicates(count=4, subscriber=team) with StormStatementRecorder() as recorder: self.subscriptions.reload() # ...then the query count should remain the same. count_with_five_subscribed_duplicates = recorder.count self.assertEqual( count_with_one_subscribed_duplicate, count_with_five_subscribed_duplicates) # We should still have the correct result. self.assertCollectionsAreEmpty(except_='from_duplicate') self.assertCollectionContents( self.subscriptions.from_duplicate, as_team_admin=5)
def test_expose_user_subscriptions_to_js__uses_cached_teams(self): # The function expose_user_subscriptions_to_js() uses a # cached list of administrated teams. user = self.factory.makePerson() target = self.factory.makeProduct() request = LaunchpadTestRequest() with person_logged_in(user): sub = target.addBugSubscription(user, user) # The first call requires one query to retrieve the administrated # teams. with StormStatementRecorder() as recorder: expose_user_subscriptions_to_js(user, [sub], request) statements_for_admininstrated_teams = [ statement for statement in recorder.statements if 'TeamMembership' in statement ] self.assertEqual(1, len(statements_for_admininstrated_teams)) # Calling the function a second time does not require an # SQL call to retrieve the administrated teams. with person_logged_in(user): with StormStatementRecorder() as recorder: expose_user_subscriptions_to_js(user, [sub], request) statements_for_admininstrated_teams = [ statement for statement in recorder.statements if 'TeamMembership' in statement ] self.assertEqual(0, len(statements_for_admininstrated_teams))
def test_cache_by_bug_notification_level(self): # The BugNotificationRecipients set is cached by notification level # to avoid duplicate work. The returned set is copy of the cached set. subscriber = self.factory.makePerson() product = self.factory.makeProduct() with person_logged_in(subscriber): subscription = product.addBugSubscription(subscriber, subscriber) bug_filter = subscription.bug_filters[0] bug_filter.bug_notification_level = BugNotificationLevel.COMMENTS bug = self.factory.makeBug(target=product) # The factory call queued LIFECYCLE and COMMENT notifications. bug.clearBugNotificationRecipientsCache() levels = [ BugNotificationLevel.LIFECYCLE, BugNotificationLevel.METADATA, BugNotificationLevel.COMMENTS, ] for level in levels: with StormStatementRecorder() as recorder: first_recipients = bug.getBugNotificationRecipients( level=level) self.assertThat(recorder, HasQueryCount(GreaterThan(1))) with StormStatementRecorder() as recorder: second_recipients = bug.getBugNotificationRecipients( level=level) self.assertThat(recorder, HasQueryCount(Equals(0))) self.assertContentEqual([bug.owner, subscriber], first_recipients) self.assertContentEqual(first_recipients, second_recipients) self.assertIsNot(first_recipients, second_recipients)
def test_sample_binary_packages__constant_number_sql_queries(self): # Retrieving # DistributionSourcePackageRelease.sample_binary_packages and # accessing the property "summary" of its items requires a # constant number of SQL queries, regardless of the number # of existing binary package releases. self.makeBinaryPackageRelease() self.updateDistroSeriesPackageCache() with StormStatementRecorder() as recorder: for ds_package in self.dsp_release.sample_binary_packages: ds_package.summary self.assertThat(recorder, HasQueryCount(LessThan(5))) self.assertEqual(1, self.dsp_release.sample_binary_packages.count()) for iteration in range(5): self.makeBinaryPackageRelease() self.updateDistroSeriesPackageCache() with StormStatementRecorder() as recorder: for ds_package in self.dsp_release.sample_binary_packages: ds_package.summary self.assertThat(recorder, HasQueryCount(LessThan(5))) self.assertEqual(6, self.dsp_release.sample_binary_packages.count()) # Even if the cache is not updated for binary packages, # DistributionSourcePackageRelease objects do not try to # retrieve DistroSeriesPackageCache records if they know # that such records do not exist. for iteration in range(5): self.makeBinaryPackageRelease() with StormStatementRecorder() as recorder: for ds_package in self.dsp_release.sample_binary_packages: ds_package.summary self.assertThat(recorder, HasQueryCount(LessThan(5))) self.assertEqual(11, self.dsp_release.sample_binary_packages.count())
def test_expose_user_administered_teams_to_js__uses_cached_teams(self): # The function expose_user_administered_teams_to_js uses a # cached list of administrated teams. context = self.factory.makeProduct(owner=self.user) self._setup_teams(self.user) # The first call requires one query to retrieve the administrated # teams. with StormStatementRecorder() as recorder: expose_user_administered_teams_to_js(self.request, self.user, context, absoluteURL=fake_absoluteURL) statements_for_admininstrated_teams = [ statement for statement in recorder.statements if 'TeamMembership' in statement ] self.assertEqual(1, len(statements_for_admininstrated_teams)) # Calling the function a second time does not require an # SQL call to retrieve the administrated teams. with StormStatementRecorder() as recorder: expose_user_administered_teams_to_js(self.request, self.user, context, absoluteURL=fake_absoluteURL) statements_for_admininstrated_teams = [ statement for statement in recorder.statements if 'TeamMembership' in statement ] self.assertEqual(0, len(statements_for_admininstrated_teams))
def test_query_count(self): # The function issues a constant number of queries regardless of # team count. login_person(self.user) context = self.factory.makeProduct(owner=self.user) self._setup_teams(self.user) IStore(Person).invalidate() clear_cache() with StormStatementRecorder() as recorder: expose_user_administered_teams_to_js( self.request, self.user, context, absoluteURL=fake_absoluteURL) self.assertThat(recorder, HasQueryCount(Equals(4))) # Create some new public teams owned by the user, and a private # team administered by the user. for i in range(3): self.factory.makeTeam(owner=self.user) pt = self.factory.makeTeam( visibility=PersonVisibility.PRIVATE, members=[self.user]) with person_logged_in(pt.teamowner): pt.addMember( self.user, pt.teamowner, status=TeamMembershipStatus.ADMIN) IStore(Person).invalidate() clear_cache() del IJSONRequestCache(self.request).objects['administratedTeams'] with StormStatementRecorder() as recorder: expose_user_administered_teams_to_js( self.request, self.user, context, absoluteURL=fake_absoluteURL) self.assertThat(recorder, HasQueryCount(Equals(4)))
def test_getSpecificJobs_sql_queries_count(self): # getSpecificJobs issues a constant number of queries. builds = self.createBuilds() build_farm_jobs = [build.build_farm_job for build in builds] flush_database_updates() with StormStatementRecorder() as recorder: getSpecificJobs(build_farm_jobs) builds2 = self.createBuilds() build_farm_jobs.extend([build.build_farm_job for build in builds2]) flush_database_updates() with StormStatementRecorder() as recorder2: getSpecificJobs(build_farm_jobs) self.assertThat(recorder, HasQueryCount.byEquality(recorder2))
def test_getReviewableMessages_queries(self): # The Message and user that posted it are retrieved with the query # that get the MessageApproval. test_objects = self.makeMailingListAndHeldMessage() team, member, sender, held_message = test_objects held_messages = team.mailing_list.getReviewableMessages() with StormStatementRecorder() as recorder: held_message = held_messages[0] self.assertThat(recorder, HasQueryCount(Equals(1))) with StormStatementRecorder() as recorder: held_message.message held_message.posted_by self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_delete(self): target = self.makeTarget() login_person(target.owner) hooks = [] for i in range(3): hook = self.factory.makeWebhook(target, u'http://path/to/%d' % i) hook.ping() hooks.append(hook) self.assertEqual(3, IStore(WebhookJob).find(WebhookJob).count()) self.assertContentEqual( [u'http://path/to/0', u'http://path/to/1', u'http://path/to/2'], [ hook.delivery_url for hook in getUtility(IWebhookSet).findByTarget(target) ]) transaction.commit() with StormStatementRecorder() as recorder: getUtility(IWebhookSet).delete(hooks[:2]) self.assertThat(recorder, HasQueryCount(Equals(4))) self.assertContentEqual([u'http://path/to/2'], [ hook.delivery_url for hook in getUtility(IWebhookSet).findByTarget(target) ]) self.assertEqual(1, IStore(WebhookJob).find(WebhookJob).count()) self.assertEqual(1, hooks[2].deliveries.count())
def createUserBMPsAndRecordQueries(self, number_of_bmps): # Create {number_of_bmps} branch merge proposals related to a # user, render the person's +activereviews page, and return the # view and a recorder of the queries generated by this page # rendering. user = self.factory.makePerson() for i in xrange(number_of_bmps): # Create one of the two types of BMP which will be displayed # on a person's +activereviews page: # - A BMP for which the person is the reviewer. # - A BMP for which the person is the owner of the target # branch. if i % 2 == 0: self.createUserBMP(target_branch_owner=user) else: self.createUserBMP(reviewer=user) login_person(user) flush_database_caches() with StormStatementRecorder() as recorder: view = create_initialized_view(user, name='+activereviews', rootsite='code', principal=user) view.render() return recorder, view
def test_preloading_for_previewdiff(self): project = self.factory.makeProduct() [target] = self.factory.makeGitRefs(target=project) owner = self.factory.makePerson() [ref1] = self.factory.makeGitRefs(target=project, owner=owner) [ref2] = self.factory.makeGitRefs(target=project, owner=owner) bmp1 = self.factory.makeBranchMergeProposalForGit( target_ref=target, source_ref=ref1) bmp2 = self.factory.makeBranchMergeProposalForGit( target_ref=target, source_ref=ref2) old_date = datetime.now(pytz.UTC) - timedelta(hours=1) self.factory.makePreviewDiff( merge_proposal=bmp1, date_created=old_date) previewdiff1 = self.factory.makePreviewDiff(merge_proposal=bmp1) self.factory.makePreviewDiff( merge_proposal=bmp2, date_created=old_date) previewdiff2 = self.factory.makePreviewDiff(merge_proposal=bmp2) Store.of(bmp1).flush() Store.of(bmp1).invalidate() collection = self.all_repositories.ownedBy(owner) [pre_bmp1, pre_bmp2] = sorted( collection.getMergeProposals(eager_load=True), key=attrgetter('id')) with StormStatementRecorder() as recorder: self.assertEqual( removeSecurityProxy(pre_bmp1.preview_diff).id, previewdiff1.id) self.assertEqual( removeSecurityProxy(pre_bmp2.preview_diff).id, previewdiff2.id) self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_preloads_irc_nicks_and_preferredemail(self): """Test that IRC nicks and preferred email addresses are preloaded.""" # Create three people with IRC nicks, and one without. people = [] for num in range(3): person = self.factory.makePerson(displayname='foobar %d' % num) getUtility(IIrcIDSet).new(person, 'launchpad', person.name) people.append(person) people.append(self.factory.makePerson(displayname='foobar 4')) # Remember the current values for checking later, and throw out # the cache. expected_nicks = dict( (person.id, list(person.ircnicknames)) for person in people) expected_emails = dict( (person.id, person.preferredemail) for person in people) Store.of(people[0]).invalidate() results = list(self.searchVocabulary(None, u'foobar')) with StormStatementRecorder() as recorder: self.assertEqual(4, len(results)) for person in results: self.assertEqual(expected_nicks[person.id], person.ircnicknames) self.assertEqual(expected_emails[person.id], person.preferredemail) self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_newFromBazaarRevisions(self): # newFromBazaarRevisions behaves as expected. # only branchscanner can SELECT revisionproperties. self.becomeDbUser('branchscanner') bzr_revisions = [ self.factory.makeBzrRevision(b'rev-1', props={ 'prop1': 'foo', 'deb-pristine-delta': 'bar', 'deb-pristine-delta-xz': 'baz' }), self.factory.makeBzrRevision(b'rev-2', parent_ids=[b'rev-1']) ] with StormStatementRecorder() as recorder: self.revision_set.newFromBazaarRevisions(bzr_revisions) rev_1 = self.revision_set.getByRevisionId('rev-1') self.assertEqual(bzr_revisions[0].committer, rev_1.revision_author.name) self.assertEqual(bzr_revisions[0].message, rev_1.log_body) self.assertEqual(datetime(1970, 1, 1, 0, 0, tzinfo=pytz.UTC), rev_1.revision_date) self.assertEqual([], rev_1.parents) # Revision properties starting with 'deb-pristine-delta' aren't # imported into the database; they're huge, opaque and # uninteresting for the application. self.assertEqual({'prop1': 'foo'}, rev_1.getProperties()) rev_2 = self.revision_set.getByRevisionId('rev-2') self.assertEqual(['rev-1'], rev_2.parent_ids) # Really, less than 9 is great, but if the count improves, we should # tighten this restriction. self.assertThat(recorder, HasQueryCount(Equals(8)))
def test_eta_cached(self): # The expensive completion time estimate is cached. self.build.queueBuild() self.build.eta with StormStatementRecorder() as recorder: self.build.eta self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_zero_values_is_noop(self): # create()ing 0 rows is a no-op. with StormStatementRecorder() as recorder: self.assertEqual([], bulk.create((BugSubscription.bug, ), [], get_objects=True)) self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_sql_parameters(self): with StormStatementRecorder() as logger: self.execute(statement='SELECT * FROM bar WHERE bing = %s', params=(142, )) self.assertEqual((1, 2, 'SQL-stub-database', 'SELECT * FROM bar WHERE bing = 142', None), logger.query_data[0]['sql'])
def test_getAllLanguages_can_preload_translators_count(self): # LanguageSet.getAllLanguages() can preload translators_count. list(getUtility(ILanguageSet).getAllLanguages( want_translators_count=True)) with StormStatementRecorder() as recorder: self.assertEqual(3, self.translated_lang.translators_count) self.assertEqual(0, self.untranslated_lang.translators_count) self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_search_query_count(self): # Verify query count. Store.of(self.milestone).flush() with StormStatementRecorder() as recorder: list(self.bugtask_set.search(self.params)) # 1 query for the tasks, 1 query for the product (target) eager # loading. self.assertThat(recorder, HasQueryCount(Equals(4)))
def assertQuerylessVitals(comparator): expected_vitals = extract_vitals_from_db(builder) transaction.commit() with StormStatementRecorder() as recorder: got_vitals = pbf.getVitals(name) comparator(expected_vitals, got_vitals) comparator(expected_vitals.build_queue, got_vitals.build_queue) self.assertThat(recorder, HasQueryCount(Equals(0))) return got_vitals
def test_search_query_count(self): # Verify query count. # 1. Query all the distroseries to determine the distro's # currentseries. # 2. Query the bugtasks. Store.of(self.milestone).flush() with StormStatementRecorder() as recorder: list(self.bugtask_set.search(self.params)) self.assertThat(recorder, HasQueryCount(Equals(4)))
def exactly_x_queries(self, count): # Assert that there are exactly `count` queries sent to the database # in this context. Flush first to ensure we don't count things that # happened before entering this context. self.store.flush() condition = HasQueryCount(Equals(count)) with StormStatementRecorder() as recorder: yield recorder self.assertThat(recorder, condition)
def test_distro_context(self): bug = self.factory.makeBug() mint = self.factory.makeDistribution() task = self.factory.makeBugTask(bug=bug, target=mint) tasks = list(bug.bugtasks) with StormStatementRecorder() as recorder: filtered = filter_bugtasks_by_context(mint, tasks) self.assertThat(recorder, HasQueryCount(Equals(0))) self.assertThat(filtered, Equals([task]))
def test_product_affiliation_query_count(self): # Only 2 queries are expected, selects from: # - Product, Person person = self.factory.makePerson() product = self.factory.makeProduct(owner=person, name='pting') Store.of(product).invalidate() with StormStatementRecorder() as recorder: IHasAffiliation(product).getAffiliationBadges([person]) self.assertThat(recorder, HasQueryCount(Equals(4)))
def test_sourcepackage_context_distro_task(self): bug = self.factory.makeBug() sp = self.factory.makeSourcePackage() task = self.factory.makeBugTask(bug=bug, target=sp.distribution) tasks = list(bug.bugtasks) with StormStatementRecorder() as recorder: filtered = filter_bugtasks_by_context(sp, tasks) self.assertThat(recorder, HasQueryCount(Equals(0))) self.assertThat(filtered, Equals([task]))
def test_sourcepackagenames_bulk_loaded(self): # SourcePackageName records referenced by POTemplates # are bulk loaded. Accessing the sourcepackagename attribute # of a potemplate does not require an additional SQL query. self.view.initialize() template = self.view.batchnav.currentBatch()[0] with StormStatementRecorder() as recorder: template.sourcepackagename self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_productseries_context_with_only_product_task(self): bug = self.factory.makeBug() widget = self.factory.makeProduct() task = self.factory.makeBugTask(bug=bug, target=widget) series = widget.development_focus tasks = list(bug.bugtasks) with StormStatementRecorder() as recorder: filtered = filter_bugtasks_by_context(series, tasks) self.assertThat(recorder, HasQueryCount(Equals(0))) self.assertThat(filtered, Equals([task]))
def test_distro_affiliation_query_count(self): # Only 2 business queries are expected, selects from: # - Distribution, Person # plus an additional query to create a PublisherConfig record. person = self.factory.makePerson() distro = self.factory.makeDistribution(owner=person, name='pting') Store.of(distro).invalidate() with StormStatementRecorder() as recorder: IHasAffiliation(distro).getAffiliationBadges([person]) self.assertThat(recorder, HasQueryCount(Equals(3)))
def test_none_cache_passed_at_init_counts_as_cached(self): # If the value None is passed as the constructor parameter # "cache", it is considered as a valid value. # Accesing the property DistroSeriesBinaryPackage.cache # later does not lead to the execution of an SQL query to # retrieve a DistroSeriesPackageCache record. binary_package = DistroSeriesBinaryPackage(self.distroseries, self.binary_package_name, cache=None) with StormStatementRecorder() as recorder: binary_package.cache self.assertThat(recorder, HasQueryCount(Equals(0))) # If the parameter "cache" was not passed, accessing # DistroSeriesBinaryPackage.cache for the first time requires # at least one SQL query. with StormStatementRecorder() as recorder: self.distroseries_binary_package.cache self.assertThat(recorder, HasQueryCount(NotEquals(0)))
def test_two_product_tasks_case(self): widget = self.factory.makeProduct() bug = self.factory.makeBug(target=widget) cogs = self.factory.makeProduct() task = self.factory.makeBugTask(bug=bug, target=cogs) tasks = list(bug.bugtasks) with StormStatementRecorder() as recorder: filtered = filter_bugtasks_by_context(cogs, tasks) self.assertThat(recorder, HasQueryCount(Equals(0))) self.assertThat(filtered, Equals([task]))