コード例 #1
0
 def test_getSpecificJobs_no_specific_job(self):
     build_farm_job_source = getUtility(IBuildFarmJobSource)
     build_farm_job = build_farm_job_source.new(
         BuildFarmJobType.TRANSLATIONTEMPLATESBUILD)
     flush_database_updates()
     self.assertRaises(InconsistentBuildFarmJobError, getSpecificJobs,
                       [build_farm_job])
コード例 #2
0
    def test_clientProvidedDuplicateContent(self):
        # Check the new behaviour specified by LibrarianTransactions
        # spec: allow duplicate content with distinct IDs.

        content = 'some content'

        # Store a file with id 6661
        newfile1 = LibraryFileUpload(self.storage, 'filename', 0)
        newfile1.contentID = 6661
        newfile1.append(content)
        fileid1, aliasid1 = newfile1.store()

        # Store second file identical to the first, with id 6662
        newfile2 = LibraryFileUpload(self.storage, 'filename', 0)
        newfile2.contentID = 6662
        newfile2.append(content)
        fileid2, aliasid2 = newfile2.store()

        # Create rows in the database for these files.
        LibraryFileContent(filesize=0,
                           sha1='foo',
                           md5='xx',
                           sha256='xx',
                           id=6661)
        LibraryFileContent(filesize=0,
                           sha1='foo',
                           md5='xx',
                           sha256='xx',
                           id=6662)

        flush_database_updates()
コード例 #3
0
 def setUp(self):
     super(EmailNotificationTestBase, self).setUp()
     login('*****@*****.**')
     self.product_owner = self.factory.makePerson(name="product-owner")
     self.person = self.factory.makePerson(name="sample-person")
     self.product = self.factory.makeProduct(owner=self.product_owner)
     self.product_subscriber = self.factory.makePerson(
         name="product-subscriber")
     self.product.addBugSubscription(
         self.product_subscriber, self.product_subscriber)
     self.bug_subscriber = self.factory.makePerson(name="bug-subscriber")
     self.bug_owner = self.factory.makePerson(name="bug-owner")
     self.bug = self.factory.makeBug(
         target=self.product, owner=self.bug_owner,
         information_type=InformationType.USERDATA)
     self.reporter = self.bug.owner
     self.bug.subscribe(self.bug_subscriber, self.reporter)
     [self.product_bugtask] = self.bug.bugtasks
     commit()
     login('*****@*****.**')
     switch_dbuser(config.malone.bugnotification_dbuser)
     self.now = datetime.now(pytz.UTC)
     self.ten_minutes_ago = self.now - timedelta(minutes=10)
     self.notification_set = getUtility(IBugNotificationSet)
     for notification in self.notification_set.getNotificationsToSend():
         notification.date_emailed = self.now
     flush_database_updates()
コード例 #4
0
    def delete_action(self, action, data):
        # First unlink bug watches from all bugtasks, flush updates,
        # then delete the watches themselves.
        for watch in self.context.watches:
            for bugtask in watch.bugtasks:
                if len(bugtask.bug.bugtasks) < 2:
                    raise AssertionError(
                        'There should be more than one bugtask for a bug '
                        'when one of them is linked to the original bug via '
                        'a bug watch.')
                bugtask.bugwatch = None
        flush_database_updates()
        for watch in self.context.watches:
            watch.destroySelf()

        # Now delete the aliases and the bug tracker itself.
        self.context.aliases = []
        self.context.destroySelf()

        # Hey, it worked! Tell the user.
        self.request.response.addInfoNotification(
            '%s has been deleted.' % (self.context.title,))

        # Go back to the bug tracker listing.
        self.next_url = canonical_url(getUtility(IBugTrackerSet))
コード例 #5
0
    def test_clientProvidedDuplicateContent(self):
        # Check the new behaviour specified by LibrarianTransactions
        # spec: allow duplicate content with distinct IDs.

        content = 'some content'

        # Store a file with id 6661
        newfile1 = LibraryFileUpload(self.storage, 'filename', 0)
        newfile1.contentID = 6661
        newfile1.append(content)
        fileid1, aliasid1 = newfile1.store()

        # Store second file identical to the first, with id 6662
        newfile2 = LibraryFileUpload(self.storage, 'filename', 0)
        newfile2.contentID = 6662
        newfile2.append(content)
        fileid2, aliasid2 = newfile2.store()

        # Create rows in the database for these files.
        LibraryFileContent(
            filesize=0, sha1='foo', md5='xx', sha256='xx', id=6661)
        LibraryFileContent(
            filesize=0, sha1='foo', md5='xx', sha256='xx', id=6662)

        flush_database_updates()
コード例 #6
0
 def test_saves_record(self):
     # A build farm job can be stored in the database.
     flush_database_updates()
     store = Store.of(self.build_farm_job)
     retrieved_job = store.find(
         BuildFarmJob, BuildFarmJob.id == self.build_farm_job.id).one()
     self.assertEqual(self.build_farm_job, retrieved_job)
コード例 #7
0
 def ensureBugTracker(self,
                      baseurl,
                      owner,
                      bugtrackertype,
                      title=None,
                      summary=None,
                      contactdetails=None,
                      name=None):
     """See `IBugTrackerSet`."""
     # Try to find an existing bug tracker that matches.
     bugtracker = self.queryByBaseURL(baseurl)
     if bugtracker is not None:
         return bugtracker
     # Create the bugtracker; we don't know about it.
     if name is None:
         base_name = make_bugtracker_name(baseurl)
         # If we detect that this name exists already we mutate it
         # until it doesn't.
         name = base_name
         name_increment = 1
         while self.getByName(name) is not None:
             name = "%s-%d" % (base_name, name_increment)
             name_increment += 1
     if title is None:
         title = make_bugtracker_title(baseurl)
     bugtracker = BugTracker(name=name,
                             bugtrackertype=bugtrackertype,
                             title=title,
                             summary=summary,
                             baseurl=baseurl,
                             contactdetails=contactdetails,
                             owner=owner)
     flush_database_updates()
     return bugtracker
コード例 #8
0
 def test_saves_record(self):
     # A build farm job can be stored in the database.
     flush_database_updates()
     store = Store.of(self.build_farm_job)
     retrieved_job = store.find(
         BuildFarmJob,
         BuildFarmJob.id == self.build_farm_job.id).one()
     self.assertEqual(self.build_farm_job, retrieved_job)
コード例 #9
0
 def test_getSpecificJobs_no_specific_job(self):
     build_farm_job_source = getUtility(IBuildFarmJobSource)
     build_farm_job = build_farm_job_source.new(
         BuildFarmJobType.TRANSLATIONTEMPLATESBUILD)
     flush_database_updates()
     self.assertRaises(
         InconsistentBuildFarmJobError,
         getSpecificJobs, [build_farm_job])
コード例 #10
0
 def test_runJobHandleErrors_oops_timeline_detail_filter(self):
     """A job can choose to filter oops timeline details."""
     job = RaisingJobTimelineMessage('boom')
     job.timeline_detail_filter = lambda _, detail: '<redacted>'
     flush_database_updates()
     runner = JobRunner([job])
     runner.runJobHandleError(job)
     self.assertEqual(1, len(self.oopses))
     actions = [action[2:4] for action in self.oopses[0]['timeline']]
     self.assertIn(('job', '<redacted>'), actions)
コード例 #11
0
 def test_archive_mirror_with_source_content_freshness(self):
     self._create_source_mirror(self.hoary, PackagePublishingPocket.RELEASE,
                                self.hoary.components[0],
                                MirrorFreshness.UP)
     self._create_source_mirror(self.hoary, PackagePublishingPocket.RELEASE,
                                self.hoary.components[1],
                                MirrorFreshness.TWODAYSBEHIND)
     flush_database_updates()
     self.assertEqual(self.archive_mirror.getOverallFreshness(),
                      MirrorFreshness.TWODAYSBEHIND)
コード例 #12
0
 def test_archive_mirror_with_binary_content_freshness(self):
     self._create_bin_mirror(
         self.hoary_i386, PackagePublishingPocket.RELEASE,
         self.hoary.components[0], MirrorFreshness.UP)
     self._create_bin_mirror(
         self.hoary_i386, PackagePublishingPocket.RELEASE,
         self.hoary.components[1], MirrorFreshness.ONEHOURBEHIND)
     flush_database_updates()
     self.failUnlessEqual(
         self.archive_mirror.getOverallFreshness(),
         MirrorFreshness.ONEHOURBEHIND)
コード例 #13
0
 def test_arch_mirror_freshness_property(self):
     self._create_bin_mirror(
         self.hoary_i386, PackagePublishingPocket.RELEASE,
         self.hoary.components[0], MirrorFreshness.UP)
     self._create_bin_mirror(
         self.hoary_i386, PackagePublishingPocket.RELEASE,
         self.hoary.components[1], MirrorFreshness.ONEHOURBEHIND)
     flush_database_updates()
     self.failUnlessEqual(
         removeSecurityProxy(self.archive_mirror).arch_mirror_freshness,
         MirrorFreshness.ONEHOURBEHIND)
コード例 #14
0
 def test_source_mirror_freshness_property(self):
     self._create_source_mirror(
         self.hoary, PackagePublishingPocket.RELEASE,
         self.hoary.components[0], MirrorFreshness.UP)
     self._create_source_mirror(
         self.hoary, PackagePublishingPocket.RELEASE,
         self.hoary.components[1], MirrorFreshness.TWODAYSBEHIND)
     flush_database_updates()
     self.failUnlessEqual(
         removeSecurityProxy(self.archive_mirror).source_mirror_freshness,
         MirrorFreshness.TWODAYSBEHIND)
コード例 #15
0
def new_list_for_team(team):
    """A helper that creates a new, active mailing list for a team.

    Used in doctests.
    """
    list_set = getUtility(IMailingListSet)
    team_list = list_set.new(team)
    team_list.startConstructing()
    team_list.transitionToStatus(MailingListStatus.ACTIVE)
    flush_database_updates()
    return team_list
コード例 #16
0
 def test_source_mirror_freshness_property(self):
     self._create_source_mirror(self.hoary, PackagePublishingPocket.RELEASE,
                                self.hoary.components[0],
                                MirrorFreshness.UP)
     self._create_source_mirror(self.hoary, PackagePublishingPocket.RELEASE,
                                self.hoary.components[1],
                                MirrorFreshness.TWODAYSBEHIND)
     flush_database_updates()
     self.assertEqual(
         removeSecurityProxy(self.archive_mirror).source_mirror_freshness,
         MirrorFreshness.TWODAYSBEHIND)
コード例 #17
0
 def test_runJobHandleErrors_oops_timeline(self):
     """The oops timeline only covers the job itself."""
     timeline = get_request_timeline(get_current_browser_request())
     timeline.start('test', 'sentinel').finish()
     job = RaisingJobTimelineMessage('boom')
     flush_database_updates()
     runner = JobRunner([job])
     runner.runJobHandleError(job)
     self.assertEqual(1, len(self.oopses))
     actions = [action[2:4] for action in self.oopses[0]['timeline']]
     self.assertIn(('job', 'boom'), actions)
     self.assertNotIn(('test', 'sentinel'), actions)
コード例 #18
0
 def test_getSpecificJobs_sql_queries_count(self):
     # getSpecificJobs issues a constant number of queries.
     builds = self.createBuilds()
     build_farm_jobs = [build.build_farm_job for build in builds]
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         getSpecificJobs(build_farm_jobs)
     builds2 = self.createBuilds()
     build_farm_jobs.extend([build.build_farm_job for build in builds2])
     flush_database_updates()
     with StormStatementRecorder() as recorder2:
         getSpecificJobs(build_farm_jobs)
     self.assertThat(recorder, HasQueryCount.byEquality(recorder2))
コード例 #19
0
 def test_redirect_release_uploads_ppa(self):
     # The Distribution.redirect_release_uploads flag does not affect PPA
     # uploads.
     ubuntu = getUtility(IDistributionSet)["ubuntu"]
     with celebrity_logged_in("admin"):
         ubuntu.redirect_release_uploads = True
     flush_database_updates()
     insecure_policy = findPolicyByName("insecure")
     insecure_policy.archive = self.factory.makeArchive()
     insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
     self.assertEqual("hoary", insecure_policy.distroseries.name)
     self.assertEqual(
         PackagePublishingPocket.RELEASE, insecure_policy.pocket)
コード例 #20
0
 def test_getSpecificJobs_sql_queries_count(self):
     # getSpecificJobs issues a constant number of queries.
     builds = self.createBuilds()
     build_farm_jobs = [build.build_farm_job for build in builds]
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         getSpecificJobs(build_farm_jobs)
     builds2 = self.createBuilds()
     build_farm_jobs.extend([build.build_farm_job for build in builds2])
     flush_database_updates()
     with StormStatementRecorder() as recorder2:
         getSpecificJobs(build_farm_jobs)
     self.assertThat(recorder, HasQueryCount(Equals(recorder2.count)))
コード例 #21
0
    def declineSpecificationLinks(self, idlist, decider):
        """See `ISprint`."""
        for sprintspec in idlist:
            speclink = self.getSpecificationLink(sprintspec)
            speclink.declineBy(decider)

        # we need to flush all the changes we have made to disk, then try
        # the query again to see if we have any specs remaining in this
        # queue
        flush_database_updates()

        return self.specifications(decider,
                        filter=[SpecificationFilter.PROPOSED]).count()
コード例 #22
0
ファイル: sprint.py プロジェクト: pombreda/UnnaturalCodeFork
    def declineSpecificationLinks(self, idlist, decider):
        """See `ISprint`."""
        for sprintspec in idlist:
            speclink = self.getSpecificationLink(sprintspec)
            speclink.declineBy(decider)

        # we need to flush all the changes we have made to disk, then try
        # the query again to see if we have any specs remaining in this
        # queue
        flush_database_updates()

        return self.specifications(decider,
                        filter=[SpecificationFilter.PROPOSED]).count()
コード例 #23
0
 def test_redirect_release_uploads_ppa(self):
     # The Distribution.redirect_release_uploads flag does not affect PPA
     # uploads.
     ubuntu = getUtility(IDistributionSet)["ubuntu"]
     with celebrity_logged_in("admin"):
         ubuntu.redirect_release_uploads = True
     flush_database_updates()
     insecure_policy = findPolicyByName("insecure")
     insecure_policy.archive = self.factory.makeArchive()
     insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
     self.assertEqual("hoary", insecure_policy.distroseries.name)
     self.assertEqual(PackagePublishingPocket.RELEASE,
                      insecure_policy.pocket)
コード例 #24
0
 def test_redirect_release_uploads_primary(self):
     # With the insecure policy, the
     # Distribution.redirect_release_uploads flag causes uploads to the
     # RELEASE pocket to be automatically redirected to PROPOSED.
     ubuntu = getUtility(IDistributionSet)["ubuntu"]
     with celebrity_logged_in("admin"):
         ubuntu.redirect_release_uploads = True
     flush_database_updates()
     insecure_policy = findPolicyByName("insecure")
     insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
     self.assertEqual("hoary", insecure_policy.distroseries.name)
     self.assertEqual(
         PackagePublishingPocket.PROPOSED, insecure_policy.pocket)
コード例 #25
0
 def test_redirect_release_uploads_primary(self):
     # With the insecure policy, the
     # Distribution.redirect_release_uploads flag causes uploads to the
     # RELEASE pocket to be automatically redirected to PROPOSED.
     ubuntu = getUtility(IDistributionSet)["ubuntu"]
     with celebrity_logged_in("admin"):
         ubuntu.redirect_release_uploads = True
     flush_database_updates()
     insecure_policy = findPolicyByName("insecure")
     insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
     self.assertEqual("hoary", insecure_policy.distroseries.name)
     self.assertEqual(PackagePublishingPocket.PROPOSED,
                      insecure_policy.pocket)
コード例 #26
0
 def test_default_values(self):
     # We flush the database updates to ensure sql defaults
     # are set for various attributes.
     flush_database_updates()
     bfj = removeSecurityProxy(self.build_farm_job)
     self.assertEqual(BuildStatus.NEEDSBUILD, bfj.status)
     # The date_created is set automatically.
     self.assertTrue(bfj.date_created is not None)
     # The job type is required to create a build farm job.
     self.assertEqual(BuildFarmJobType.PACKAGEBUILD, bfj.job_type)
     # Other attributes are unset by default.
     self.assertEqual(None, bfj.date_finished)
     self.assertEqual(None, bfj.builder)
コード例 #27
0
    def A_update_karmacache(self):
        self.logger.info("Step A: Calculating individual KarmaCache entries")

        # Calculate everyones karma. Karma degrades each day, becoming
        # worthless after karma_expires_after. This query produces odd results
        # when datecreated is in the future, but there is really no point
        # adding the extra WHEN clause.
        karma_expires_after = '1 year'
        self.cur.execute(
            """
            SELECT person, category, product, distribution,
                ROUND(SUM(
                CASE WHEN karma.datecreated + %s::interval
                    <= CURRENT_TIMESTAMP AT TIME ZONE 'UTC' THEN 0
                ELSE points * (1 - extract(
                    EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' -
                    karma.datecreated
                    ) / extract(EPOCH FROM %s::interval))
                END
                ))
            FROM Karma
            JOIN KarmaAction ON action = KarmaAction.id
            GROUP BY person, category, product, distribution
            """, (karma_expires_after, karma_expires_after))

        # Suck into RAM to avoid tieing up resources on the DB.
        results = list(self.cur.fetchall())
        self.logger.debug("Got %d (person, category) scores", len(results))

        # Note that we don't need to commit each iteration because we are
        # running in autocommit mode.
        scaling = self.calculate_scaling(results)
        for entry in results:
            self.update_one_karma_cache_entry(entry, scaling)
        flush_database_updates()

        # Delete the entries we're going to replace.
        self.cur.execute("DELETE FROM KarmaCache WHERE category IS NULL")
        self.cur.execute("""
            DELETE FROM KarmaCache
            WHERE project IS NOT NULL AND product IS NULL""")
        self.cur.execute("""
            DELETE FROM KarmaCache
            WHERE category IS NOT NULL AND project IS NULL AND product IS NULL
                  AND distribution IS NULL AND sourcepackagename IS NULL""")

        # Don't allow our table to bloat with inactive users.
        self.cur.execute("DELETE FROM KarmaCache WHERE karmavalue <= 0")

        # VACUUM KarmaCache since we have just touched every record in it.
        self.cur.execute("""VACUUM KarmaCache""")
コード例 #28
0
 def test_get_not_cached_and_not_found(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # not exist in the database and no mapping cache entry exists for
     # it. It does not modify the mapping cache.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual({},
                      DistributionSourcePackageInDatabase._cache.items())
コード例 #29
0
    def A_update_karmacache(self):
        self.logger.info("Step A: Calculating individual KarmaCache entries")

        # Calculate everyones karma. Karma degrades each day, becoming
        # worthless after karma_expires_after. This query produces odd results
        # when datecreated is in the future, but there is really no point
        # adding the extra WHEN clause.
        karma_expires_after = '1 year'
        self.cur.execute("""
            SELECT person, category, product, distribution,
                ROUND(SUM(
                CASE WHEN karma.datecreated + %s::interval
                    <= CURRENT_TIMESTAMP AT TIME ZONE 'UTC' THEN 0
                ELSE points * (1 - extract(
                    EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' -
                    karma.datecreated
                    ) / extract(EPOCH FROM %s::interval))
                END
                ))
            FROM Karma
            JOIN KarmaAction ON action = KarmaAction.id
            GROUP BY person, category, product, distribution
            """, (karma_expires_after, karma_expires_after))

        # Suck into RAM to avoid tieing up resources on the DB.
        results = list(self.cur.fetchall())
        self.logger.debug("Got %d (person, category) scores", len(results))

        # Note that we don't need to commit each iteration because we are
        # running in autocommit mode.
        scaling = self.calculate_scaling(results)
        for entry in results:
            self.update_one_karma_cache_entry(entry, scaling)
        flush_database_updates()

        # Delete the entries we're going to replace.
        self.cur.execute("DELETE FROM KarmaCache WHERE category IS NULL")
        self.cur.execute("""
            DELETE FROM KarmaCache
            WHERE project IS NOT NULL AND product IS NULL""")
        self.cur.execute("""
            DELETE FROM KarmaCache
            WHERE category IS NOT NULL AND project IS NULL AND product IS NULL
                  AND distribution IS NULL AND sourcepackagename IS NULL""")

        # Don't allow our table to bloat with inactive users.
        self.cur.execute("DELETE FROM KarmaCache WHERE karmavalue <= 0")

        # VACUUM KarmaCache since we have just touched every record in it.
        self.cur.execute("""VACUUM KarmaCache""")
コード例 #30
0
 def test_get_not_cached_and_not_found(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # not exist in the database and no mapping cache entry exists for
     # it. It does not modify the mapping cache.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual(
         {}, DistributionSourcePackageInDatabase._cache.items())
コード例 #31
0
ファイル: core.py プロジェクト: pombreda/UnnaturalCodeFork
    def importBug(self, external_bugtracker, bugtracker, bug_target,
                  remote_bug):
        """Import a remote bug into Launchpad.

        :param external_bugtracker: An ISupportsBugImport, which talks
            to the external bug tracker.
        :param bugtracker: An IBugTracker, to which the created bug
            watch will be linked.
        :param bug_target: An IBugTarget, to which the created bug will
            be linked.
        :param remote_bug: The remote bug id as a string.

        :return: The created Launchpad bug.
        """
        assert IDistribution.providedBy(bug_target), (
            'Only imports of bugs for a distribution is implemented.')
        reporter_name, reporter_email = (
            external_bugtracker.getBugReporter(remote_bug))
        reporter = getUtility(IPersonSet).ensurePerson(
            reporter_email, reporter_name, PersonCreationRationale.BUGIMPORT,
            comment='when importing bug #%s from %s' % (
                remote_bug, external_bugtracker.baseurl))
        package_name = external_bugtracker.getBugTargetName(remote_bug)
        package = bug_target.getSourcePackage(package_name)
        if package is not None:
            bug_target = package
        else:
            self.warning(
                'Unknown %s package (#%s at %s): %s' % (
                    bug_target.name, remote_bug,
                    external_bugtracker.baseurl, package_name))
        summary, description = (
            external_bugtracker.getBugSummaryAndDescription(remote_bug))
        bug = bug_target.createBug(
            CreateBugParams(
                reporter, summary, description, subscribe_owner=False,
                filed_by=getUtility(ILaunchpadCelebrities).bug_watch_updater))
        [added_task] = bug.bugtasks
        bug_watch = getUtility(IBugWatchSet).createBugWatch(
            bug=bug,
            owner=getUtility(ILaunchpadCelebrities).bug_watch_updater,
            bugtracker=bugtracker, remotebug=remote_bug)

        added_task.bugwatch = bug_watch
        # Need to flush databse updates, so that the bug watch knows it
        # is linked from a bug task.
        flush_database_updates()

        return bug
コード例 #32
0
ファイル: core.py プロジェクト: pombredanne/launchpad-3
    def importBug(self, external_bugtracker, bugtracker, bug_target,
                  remote_bug):
        """Import a remote bug into Launchpad.

        :param external_bugtracker: An ISupportsBugImport, which talks
            to the external bug tracker.
        :param bugtracker: An IBugTracker, to which the created bug
            watch will be linked.
        :param bug_target: An IBugTarget, to which the created bug will
            be linked.
        :param remote_bug: The remote bug id as a string.

        :return: The created Launchpad bug.
        """
        assert IDistribution.providedBy(bug_target), (
            'Only imports of bugs for a distribution is implemented.')
        reporter_name, reporter_email = (
            external_bugtracker.getBugReporter(remote_bug))
        reporter = getUtility(IPersonSet).ensurePerson(
            reporter_email, reporter_name, PersonCreationRationale.BUGIMPORT,
            comment='when importing bug #%s from %s' % (
                remote_bug, external_bugtracker.baseurl))
        package_name = external_bugtracker.getBugTargetName(remote_bug)
        package = bug_target.getSourcePackage(package_name)
        if package is not None:
            bug_target = package
        else:
            self.warning(
                'Unknown %s package (#%s at %s): %s' % (
                    bug_target.name, remote_bug,
                    external_bugtracker.baseurl, package_name))
        summary, description = (
            external_bugtracker.getBugSummaryAndDescription(remote_bug))
        bug = bug_target.createBug(
            CreateBugParams(
                reporter, summary, description, subscribe_owner=False,
                filed_by=getUtility(ILaunchpadCelebrities).bug_watch_updater))
        [added_task] = bug.bugtasks
        bug_watch = getUtility(IBugWatchSet).createBugWatch(
            bug=bug,
            owner=getUtility(ILaunchpadCelebrities).bug_watch_updater,
            bugtracker=bugtracker, remotebug=remote_bug)

        added_task.bugwatch = bug_watch
        # Need to flush databse updates, so that the bug watch knows it
        # is linked from a bug task.
        flush_database_updates()

        return bug
コード例 #33
0
 def test_get_cached_and_found(self):
     # DistributionSourcePackageInDatabase.get() returns the DSP if it's
     # found in the database from a good mapping cache entry.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(
         distribution, sourcepackagename)
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(dsp, dsp_found)
     # Hurrah! This is what we're aiming for: a DSP that is in the mapping
     # cache *and* in Storm's cache.
     self.assertThat(recorder, HasQueryCount(Equals(0)))
コード例 #34
0
 def test_default_values(self):
     # We flush the database updates to ensure sql defaults
     # are set for various attributes.
     flush_database_updates()
     bfj = removeSecurityProxy(self.build_farm_job)
     self.assertEqual(
         BuildStatus.NEEDSBUILD, bfj.status)
     # The date_created is set automatically.
     self.assertTrue(bfj.date_created is not None)
     # The job type is required to create a build farm job.
     self.assertEqual(
         BuildFarmJobType.PACKAGEBUILD, bfj.job_type)
     # Other attributes are unset by default.
     self.assertEqual(None, bfj.date_finished)
     self.assertEqual(None, bfj.builder)
コード例 #35
0
 def test_get_cached_and_found(self):
     # DistributionSourcePackageInDatabase.get() returns the DSP if it's
     # found in the database from a good mapping cache entry.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(distribution,
                                                   sourcepackagename)
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(dsp, dsp_found)
     # Hurrah! This is what we're aiming for: a DSP that is in the mapping
     # cache *and* in Storm's cache.
     self.assertThat(recorder, HasQueryCount(Equals(0)))
コード例 #36
0
 def test_getDirect_found(self):
     # DistributionSourcePackageInDatabase.getDirect() returns the
     # DSPInDatabase if one already exists in the database. It also adds
     # the new mapping to the mapping cache.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(distribution,
                                                   sourcepackagename)
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.getDirect(
             dsp.distribution, dsp.sourcepackagename)
         self.assertIs(dsp, dsp_found)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual({(distribution.id, sourcepackagename.id): dsp.id},
                      DistributionSourcePackageInDatabase._cache.items())
コード例 #37
0
    def modifySignature(self, sign_id, recipient, admincomment, state):
        """See ISignedCodeOfConductSet."""
        sign = SignedCodeOfConduct.get(sign_id)
        sign.active = state
        sign.admincomment = admincomment
        sign.recipient = recipient.id

        subject = 'Launchpad: Code Of Conduct Signature Modified'
        content = ('State: %s\n'
                   'Comment: %s\n'
                   'Modified by %s' %
                   (state, admincomment, recipient.displayname))

        sign.sendAdvertisementEmail(subject, content)

        flush_database_updates()
コード例 #38
0
    def modifySignature(self, sign_id, recipient, admincomment, state):
        """See ISignedCodeOfConductSet."""
        sign = SignedCodeOfConduct.get(sign_id)
        sign.active = state
        sign.admincomment = admincomment
        sign.recipient = recipient.id

        subject = 'Launchpad: Code Of Conduct Signature Modified'
        content = ('State: %s\n'
                   'Comment: %s\n'
                   'Modified by %s'
                    % (state, admincomment, recipient.displayname))

        sign.sendAdvertisementEmail(subject, content)

        flush_database_updates()
コード例 #39
0
 def setPackaging(self, productseries, owner):
     """See `ISourcePackage`."""
     target = self.direct_packaging
     if target is not None:
         if target.productseries == productseries:
             return
         # Delete the current packaging and create a new one so
         # that the translation sharing jobs are started.
         self.direct_packaging.destroySelf()
     PackagingUtil.createPackaging(distroseries=self.distroseries,
                                   sourcepackagename=self.sourcepackagename,
                                   productseries=productseries,
                                   owner=owner,
                                   packaging=PackagingType.PRIME)
     # and make sure this change is immediately available
     flush_database_updates()
コード例 #40
0
 def test_get_cached_and_not_found(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # not exist in the database for a stale mapping cache entry.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     # Enter a stale entry in the mapping cache.
     stale_dsp_cache_key = distribution.id, sourcepackagename.id
     DistributionSourcePackageInDatabase._cache[stale_dsp_cache_key] = -123
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     # A stale mapping means that we have to issue two queries: the first
     # queries for the stale DSP from the database, the second gets the
     # correct DSP (or None).
     self.assertThat(recorder, HasQueryCount(Equals(2)))
コード例 #41
0
 def test_getDirect_found(self):
     # DistributionSourcePackageInDatabase.getDirect() returns the
     # DSPInDatabase if one already exists in the database. It also adds
     # the new mapping to the mapping cache.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(
         distribution, sourcepackagename)
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.getDirect(
             dsp.distribution, dsp.sourcepackagename)
         self.assertIs(dsp, dsp_found)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual(
         {(distribution.id, sourcepackagename.id): dsp.id},
         DistributionSourcePackageInDatabase._cache.items())
コード例 #42
0
 def test_get_cached_and_not_found(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # not exist in the database for a stale mapping cache entry.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     # Enter a stale entry in the mapping cache.
     stale_dsp_cache_key = distribution.id, sourcepackagename.id
     DistributionSourcePackageInDatabase._cache[stale_dsp_cache_key] = -123
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     # A stale mapping means that we have to issue two queries: the first
     # queries for the stale DSP from the database, the second gets the
     # correct DSP (or None).
     self.assertThat(recorder, HasQueryCount(Equals(2)))
コード例 #43
0
    def testSimpleRun(self):
        """Try a simple script run.

        This test ensures that the script starts up and runs.
        It should create some missing builds.
        """
        # Commit the changes made in setUp()
        self.layer.txn.commit()

        args = [
            "-d",
            "ubuntutest",
            "-s",
            "breezy-autotest",
            "-a",
            "i386",
            "-a",
            "hppa",
            "--ppa",
            "%s" % self.ppa.owner.name,
            "--ppa-name",
            self.ppa.name,
        ]
        code, stdout, stderr = self.runScript(args)
        self.assertEqual(
            code, 0,
            "The script returned with a non zero exit code: %s\n%s\n%s" %
            (code, stdout, stderr))

        # Sync database changes made in the external process.
        flush_database_updates()
        clear_current_connection_cache()

        # The arch-any package will get builds for all architectures.
        self.assertBuildsForAny()

        # The arch-all package is architecture-independent, so it will
        # only get a build for i386 which is the nominated architecture-
        # independent build arch.
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        self.assertIsNot(all_build_i386, None)
        self.assertIs(all_build_hppa, None)
コード例 #44
0
 def test_get_not_cached_and_found(self):
     # DistributionSourcePackageInDatabase.get() returns the DSP if it's
     # found in the database even if no mapping cache entry exists for
     # it. It updates the mapping cache with this discovered information.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(distribution,
                                                   sourcepackagename)
     # new() updates the mapping cache so we must clear it.
     DistributionSourcePackageInDatabase._cache.clear()
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(dsp, dsp_found)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual({(distribution.id, sourcepackagename.id): dsp.id},
                      DistributionSourcePackageInDatabase._cache.items())
コード例 #45
0
 def test_get_not_cached_and_found(self):
     # DistributionSourcePackageInDatabase.get() returns the DSP if it's
     # found in the database even if no mapping cache entry exists for
     # it. It updates the mapping cache with this discovered information.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     dsp = DistributionSourcePackageInDatabase.new(
         distribution, sourcepackagename)
     # new() updates the mapping cache so we must clear it.
     DistributionSourcePackageInDatabase._cache.clear()
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp_found = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(dsp, dsp_found)
     self.assertThat(recorder, HasQueryCount(Equals(1)))
     self.assertEqual(
         {(distribution.id, sourcepackagename.id): dsp.id},
         DistributionSourcePackageInDatabase._cache.items())
コード例 #46
0
    def _doMerge(self):
        """Merges a duplicate person into a target person.

        - Reassigns the duplicate user's primary email address to the
          requesting user.

        - Ensures that the requesting user has a preferred email address, and
          uses the newly acquired one if not.

        - If the duplicate user has no other email addresses, does the merge.

        """
        # The user proved that he has access to this email address of the
        # dupe account, so we can assign it to him.
        requester = self.context.requester
        emailset = getUtility(IEmailAddressSet)
        email = removeSecurityProxy(emailset.getByEmail(self.context.email))
        # As a person can have at most one preferred email, ensure
        # that this new email does not have the PREFERRED status.
        email.status = EmailAddressStatus.NEW
        email.personID = requester.id
        requester.validateAndEnsurePreferredEmail(email)

        # Need to flush all changes we made, so subsequent queries we make
        # with this transaction will see this changes and thus they'll be
        # displayed on the page that calls this method.
        flush_database_updates()

        # Now we must check if the dupe account still have registered email
        # addresses. If it hasn't we can actually do the merge.
        if emailset.getByPerson(self.dupe):
            self.mergeCompleted = False
            return
        getUtility(IPersonSet).mergeAsync(self.dupe,
                                          requester,
                                          requester,
                                          reviewer=requester)
        merge_message = _(
            'A merge is queued and is expected to complete in a few minutes.')
        self.request.response.addInfoNotification(merge_message)
        self.mergeCompleted = True
コード例 #47
0
 def test_get_cached_and_not_found_with_bogus_dsp_in_storm_cache(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # exist in the database for a mapping cache entry, but the DSP
     # discovered does not match the mapping cache key.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     # Put a bogus entry into the mapping cache.
     bogus_dsp = DistributionSourcePackageInDatabase.new(
         distribution, self.factory.makeSourcePackageName())
     bogus_dsp_cache_key = distribution.id, sourcepackagename.id
     DistributionSourcePackageInDatabase._cache[
         bogus_dsp_cache_key] = bogus_dsp.id
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     # A stale mapping means that we ordinarily have to issue two queries:
     # the first gets the bogus DSP from the database, the second gets the
     # correct DSP (or None). However, the bogus DSP is already in Storm's
     # cache, so we issue only one query.
     self.assertThat(recorder, HasQueryCount(Equals(1)))
コード例 #48
0
 def test_get_cached_and_not_found_with_bogus_dsp_in_storm_cache(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # exist in the database for a mapping cache entry, but the DSP
     # discovered does not match the mapping cache key.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     # Put a bogus entry into the mapping cache.
     bogus_dsp = DistributionSourcePackageInDatabase.new(
         distribution, self.factory.makeSourcePackageName())
     bogus_dsp_cache_key = distribution.id, sourcepackagename.id
     DistributionSourcePackageInDatabase._cache[
         bogus_dsp_cache_key] = bogus_dsp.id
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     # A stale mapping means that we ordinarily have to issue two queries:
     # the first gets the bogus DSP from the database, the second gets the
     # correct DSP (or None). However, the bogus DSP is already in Storm's
     # cache, so we issue only one query.
     self.assertThat(recorder, HasQueryCount(Equals(1)))
コード例 #49
0
    def testSimpleRun(self):
        """Try a simple script run.

        This test ensures that the script starts up and runs.
        It should create some missing builds.
        """
        # Commit the changes made in setUp()
        self.layer.txn.commit()

        args = [
            "-d", "ubuntutest",
            "-s", "breezy-autotest",
            "-a", "i386",
            "-a", "hppa",
            "--ppa", "%s" % self.ppa.owner.name,
            "--ppa-name", self.ppa.name,
            ]
        code, stdout, stderr = self.runScript(args)
        self.assertEqual(
            code, 0,
            "The script returned with a non zero exit code: %s\n%s\n%s"  % (
                code, stdout, stderr))

        # Sync database changes made in the external process.
        flush_database_updates()
        clear_current_connection_cache()

        # The arch-any package will get builds for all architectures.
        self.assertBuildsForAny()

        # The arch-all package is architecture-independent, so it will
        # only get a build for i386 which is the nominated architecture-
        # independent build arch.
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        self.assertIsNot(all_build_i386, None)
        self.assertIs(all_build_hppa, None)
コード例 #50
0
    def _doMerge(self):
        """Merges a duplicate person into a target person.

        - Reassigns the duplicate user's primary email address to the
          requesting user.

        - Ensures that the requesting user has a preferred email address, and
          uses the newly acquired one if not.

        - If the duplicate user has no other email addresses, does the merge.

        """
        # The user proved that he has access to this email address of the
        # dupe account, so we can assign it to him.
        requester = self.context.requester
        emailset = getUtility(IEmailAddressSet)
        email = removeSecurityProxy(emailset.getByEmail(self.context.email))
        # As a person can have at most one preferred email, ensure
        # that this new email does not have the PREFERRED status.
        email.status = EmailAddressStatus.NEW
        email.personID = requester.id
        requester.validateAndEnsurePreferredEmail(email)

        # Need to flush all changes we made, so subsequent queries we make
        # with this transaction will see this changes and thus they'll be
        # displayed on the page that calls this method.
        flush_database_updates()

        # Now we must check if the dupe account still have registered email
        # addresses. If it hasn't we can actually do the merge.
        if emailset.getByPerson(self.dupe):
            self.mergeCompleted = False
            return
        getUtility(IPersonSet).mergeAsync(
            self.dupe, requester, requester, reviewer=requester)
        merge_message = _(
            'A merge is queued and is expected to complete in a few minutes.')
        self.request.response.addInfoNotification(merge_message)
        self.mergeCompleted = True
コード例 #51
0
 def test_get_cached_and_not_found_with_bogus_dsp(self):
     # DistributionSourcePackageInDatabase.get() returns None if a DSP does
     # exist in the database for a mapping cache entry, but the DSP
     # discovered does not match the mapping cache key.
     distribution = self.factory.makeDistribution()
     sourcepackagename = self.factory.makeSourcePackageName()
     # Put a bogus entry into the mapping cache.
     bogus_dsp = DistributionSourcePackageInDatabase.new(
         distribution, self.factory.makeSourcePackageName())
     bogus_dsp_cache_key = distribution.id, sourcepackagename.id
     DistributionSourcePackageInDatabase._cache[
         bogus_dsp_cache_key] = bogus_dsp.id
     # Invalidate the bogus DSP from Storm's cache.
     Store.of(bogus_dsp).invalidate(bogus_dsp)
     flush_database_updates()
     with StormStatementRecorder() as recorder:
         dsp = DistributionSourcePackageInDatabase.get(
             distribution, sourcepackagename)
         self.assertIs(None, dsp)
     # A stale mapping means that we have to issue two queries: the first
     # gets the bogus DSP from the database, the second gets the correct
     # DSP (or None).
     self.assertThat(recorder, HasQueryCount(Equals(2)))
コード例 #52
0
    def dominateSources(self, distroseries, pocket):
        """Perform domination on source package publications.

        Dominates sources, restricted to `distroseries`, `pocket`, and
        `self.archive`.
        """
        self.logger.debug(
            "Performing domination across %s/%s (Source)",
            distroseries.name, pocket.title)

        generalization = GeneralizedPublication(is_source=True)

        self.logger.debug("Finding sources...")
        sources = self.findSourcesForDomination(distroseries, pocket)
        sorted_packages = self._sortPackages(sources, generalization)

        self.logger.debug("Dominating sources...")
        for name, pubs in sorted_packages.iteritems():
            self.logger.debug("Dominating %s" % name)
            assert len(pubs) > 0, "Dominating zero sources!"
            live_versions = find_live_source_versions(pubs)
            self.dominatePackage(pubs, live_versions, generalization)

        flush_database_updates()
コード例 #53
0
 def test_default_values(self):
     builder = self.factory.makeBuilder()
     # Make sure the Storm cache gets the values that the database
     # initializes.
     flush_database_updates()
     self.assertEqual(0, builder.failure_count)
コード例 #54
0
    def setStatus(self, status, user, comment=None, silent=False):
        """See `ITeamMembership`."""
        if status == self.status:
            return False

        if silent and not self.canChangeStatusSilently(user):
            raise UserCannotChangeMembershipSilently(
                "Only Launchpad administrators may change membership "
                "statuses silently.")

        approved = TeamMembershipStatus.APPROVED
        admin = TeamMembershipStatus.ADMIN
        expired = TeamMembershipStatus.EXPIRED
        declined = TeamMembershipStatus.DECLINED
        deactivated = TeamMembershipStatus.DEACTIVATED
        proposed = TeamMembershipStatus.PROPOSED
        invited = TeamMembershipStatus.INVITED
        invitation_declined = TeamMembershipStatus.INVITATION_DECLINED

        self.person.clearInTeamCache()

        # Make sure the transition from the current status to the given one
        # is allowed. All allowed transitions are in the TeamMembership spec.
        state_transition = {
            admin: [approved, expired, deactivated],
            approved: [admin, expired, deactivated],
            deactivated: [proposed, approved, admin, invited],
            expired: [proposed, approved, admin, invited],
            proposed: [approved, admin, declined],
            declined: [proposed, approved, admin, invited],
            invited: [approved, admin, invitation_declined],
            invitation_declined: [invited, approved, admin]}

        if self.status not in state_transition:
            raise TeamMembershipTransitionError(
                "Unknown status: %s" % self.status.name)
        if status not in state_transition[self.status]:
            raise TeamMembershipTransitionError(
                "Bad state transition from %s to %s"
                % (self.status.name, status.name))

        if status in ACTIVE_STATES and self.team in self.person.allmembers:
            raise CyclicalTeamMembershipError(
                "Cannot make %(person)s a member of %(team)s because "
                "%(team)s is a member of %(person)s."
                % dict(person=self.person.name, team=self.team.name))

        old_status = self.status
        self.status = status

        now = datetime.now(pytz.timezone('UTC'))
        if status in [proposed, invited]:
            self.proposed_by = user
            self.proponent_comment = comment
            self.date_proposed = now
        elif ((status in ACTIVE_STATES and old_status not in ACTIVE_STATES)
              or status == declined):
            self.reviewed_by = user
            self.reviewer_comment = comment
            self.date_reviewed = now
            if self.datejoined is None and status in ACTIVE_STATES:
                # This is the first time this membership is made active.
                self.datejoined = now
        else:
            # No need to set proponent or reviewer.
            pass

        if old_status == invited:
            # This member has been invited by an admin and is now accepting or
            # declining the invitation.
            self.acknowledged_by = user
            self.date_acknowledged = now
            self.acknowledger_comment = comment

        self.last_changed_by = user
        self.last_change_comment = comment
        self.date_last_changed = now

        if status in ACTIVE_STATES:
            _fillTeamParticipation(self.person, self.team)
        elif old_status in ACTIVE_STATES:
            _cleanTeamParticipation(self.person, self.team)
            # A person has left the team so they may no longer have access
            # to some artifacts shared with the team. We need to run a job
            # to remove any subscriptions to such artifacts.
            getUtility(IRemoveArtifactSubscriptionsJobSource).create(
                user, grantee=self.person)
        else:
            # Changed from an inactive state to another inactive one, so no
            # need to fill/clean the TeamParticipation table.
            pass

        # Flush all updates to ensure any subsequent calls to this method on
        # the same transaction will operate on the correct data.  That is the
        # case with our script to expire team memberships.
        flush_database_updates()

        # When a member proposes himself, a more detailed notification is
        # sent to the team admins by a subscriber of JoinTeamEvent; that's
        # why we don't send anything here.
        if ((self.person != self.last_changed_by or self.status != proposed)
            and not silent):
            self._sendStatusChangeNotification(old_status)
        return True
コード例 #55
0
 def tearDown(self):
     for notification in self.notification_set.getNotificationsToSend():
         notification.date_emailed = self.now
     flush_database_updates()
     super(EmailNotificationTestBase, self).tearDown()
コード例 #56
0
 def commit(self):
     """Synchronize database state."""
     flush_database_updates()
     transaction.commit()