Beispiel #1
0
    def test_store_disconnected_after_request_handled_logs_oops(self):
        # Bug #504291 was that a Store was being left in a disconnected
        # state after a request, causing subsequent requests handled by that
        # thread to fail. We detect this state in endRequest and log an
        # OOPS to help track down the trigger.
        request = LaunchpadTestRequest()
        publication = WebServicePublication(None)
        dbadapter.set_request_started()

        # Disconnect a store
        store = IMasterStore(EmailAddress)
        store._connection._state = STATE_DISCONNECTED

        # Invoke the endRequest hook.
        publication.endRequest(request, None)

        self.assertEqual(1, len(self.oopses))
        oops = self.oopses[0]

        # Ensure the OOPS mentions the correct exception
        self.assertStartsWith(oops['value'], "Bug #504291")

        # Ensure the store has been rolled back and in a usable state.
        self.assertEqual(store._connection._state, STATE_RECONNECT)
        store.find(EmailAddress).first()  # Confirms Store is working.
    def test_LoginTokenPruner(self):
        store = IMasterStore(LoginToken)
        now = datetime.now(UTC)
        switch_dbuser('testadmin')

        # It is configured as a daily task.
        self.assertTrue(
            LoginTokenPruner in DailyDatabaseGarbageCollector.tunable_loops)

        # Create a token that will be pruned.
        old_token = LoginToken(
            email='whatever', tokentype=LoginTokenType.NEWACCOUNT)
        old_token.date_created = now - timedelta(days=666)
        old_token_id = old_token.id
        store.add(old_token)

        # Create a token that will not be pruned.
        current_token = LoginToken(
            email='whatever', tokentype=LoginTokenType.NEWACCOUNT)
        current_token_id = current_token.id
        store.add(current_token)

        # Run the pruner. Batching is tested by the BulkPruner tests so
        # no need to repeat here.
        switch_dbuser('garbo_daily')
        pruner = LoginTokenPruner(logging.getLogger('garbo'))
        while not pruner.isDone():
            pruner(10)
        pruner.cleanUp()

        # Only the old LoginToken is gone.
        self.assertEqual(
            store.find(LoginToken, id=old_token_id).count(), 0)
        self.assertEqual(
            store.find(LoginToken, id=current_token_id).count(), 1)
    def test_store_disconnected_after_request_handled_logs_oops(self):
        # Bug #504291 was that a Store was being left in a disconnected
        # state after a request, causing subsequent requests handled by that
        # thread to fail. We detect this state in endRequest and log an
        # OOPS to help track down the trigger.
        request = LaunchpadTestRequest()
        publication = WebServicePublication(None)
        dbadapter.set_request_started()

        # Disconnect a store
        store = IMasterStore(EmailAddress)
        store._connection._state = STATE_DISCONNECTED

        # Invoke the endRequest hook.
        publication.endRequest(request, None)

        self.assertEqual(1, len(self.oopses))
        oops = self.oopses[0]

        # Ensure the OOPS mentions the correct exception
        self.assertStartsWith(oops['value'], "Bug #504291")

        # Ensure the store has been rolled back and in a usable state.
        self.assertEqual(store._connection._state, STATE_RECONNECT)
        store.find(EmailAddress).first()  # Confirms Store is working.
    def test_BranchJobPruner(self):
        # Garbo should remove jobs completed over 30 days ago.
        switch_dbuser('testadmin')
        store = IMasterStore(Job)

        db_branch = self.factory.makeAnyBranch()
        db_branch.branch_format = BranchFormat.BZR_BRANCH_5
        db_branch.repository_format = RepositoryFormat.BZR_KNIT_1
        Store.of(db_branch).flush()
        branch_job = BranchUpgradeJob.create(
            db_branch, self.factory.makePerson())
        branch_job.job.date_finished = THIRTY_DAYS_AGO

        self.assertEqual(
            store.find(
                BranchJob,
                BranchJob.branch == db_branch.id).count(),
                1)

        self.runDaily()

        switch_dbuser('testadmin')
        self.assertEqual(
            store.find(
                BranchJob,
                BranchJob.branch == db_branch.id).count(),
                0)
    def _test_AnswerContactPruner(self, status, interval, expected_count=0):
        # Garbo should remove answer contacts for accounts with given 'status'
        # which was set more than 'interval' days ago.
        switch_dbuser('testadmin')
        store = IMasterStore(AnswerContact)

        person = self.factory.makePerson()
        person.addLanguage(getUtility(ILanguageSet)['en'])
        question = self.factory.makeQuestion()
        with person_logged_in(question.owner):
            question.target.addAnswerContact(person, person)
        Store.of(question).flush()
        self.assertEqual(
            store.find(
                AnswerContact,
                AnswerContact.person == person.id).count(),
                1)

        account = person.account
        account.status = status
        # We flush because a trigger sets the date_status_set and we need to
        # modify it ourselves.
        Store.of(account).flush()
        if interval is not None:
            account.date_status_set = interval

        self.runDaily()

        switch_dbuser('testadmin')
        self.assertEqual(
            store.find(
                AnswerContact,
                AnswerContact.person == person.id).count(),
                expected_count)
    def test_CodeImportResultPruner(self):
        now = datetime.now(UTC)
        store = IMasterStore(CodeImportResult)

        results_to_keep_count = (
            config.codeimport.consecutive_failure_limit - 1)

        switch_dbuser('testadmin')
        code_import_id = self.factory.makeCodeImport().id
        machine_id = self.factory.makeCodeImportMachine().id
        requester_id = self.factory.makePerson().id
        transaction.commit()

        def new_code_import_result(timestamp):
            switch_dbuser('testadmin')
            CodeImportResult(
                date_created=timestamp,
                code_importID=code_import_id, machineID=machine_id,
                requesting_userID=requester_id,
                status=CodeImportResultStatus.FAILURE,
                date_job_started=timestamp)
            transaction.commit()

        new_code_import_result(now - timedelta(days=60))
        for i in range(results_to_keep_count - 1):
            new_code_import_result(now - timedelta(days=19 + i))

        # Run the garbage collector
        self.runDaily()

        # Nothing is removed, because we always keep the
        # ``results_to_keep_count`` latest.
        store = IMasterStore(CodeImportResult)
        self.failUnlessEqual(
            results_to_keep_count,
            store.find(CodeImportResult).count())

        new_code_import_result(now - timedelta(days=31))
        self.runDaily()
        store = IMasterStore(CodeImportResult)
        self.failUnlessEqual(
            results_to_keep_count,
            store.find(CodeImportResult).count())

        new_code_import_result(now - timedelta(days=29))
        self.runDaily()
        store = IMasterStore(CodeImportResult)
        self.failUnlessEqual(
            results_to_keep_count,
            store.find(CodeImportResult).count())

        # We now have no CodeImportResults older than 30 days
        self.failUnless(
            store.find(
                Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
            >= now - timedelta(days=30))
Beispiel #7
0
 def pruneRevisionCache(limit):
     """See `IRevisionSet`."""
     # Storm doesn't handle remove a limited result set:
     #    FeatureError: Can't remove a sliced result set
     store = IMasterStore(RevisionCache)
     epoch = datetime.now(tz=pytz.UTC) - timedelta(days=30)
     subquery = Select([RevisionCache.id],
                       RevisionCache.revision_date < epoch,
                       limit=limit)
     store.find(RevisionCache, RevisionCache.id.is_in(subquery)).remove()
Beispiel #8
0
 def pruneRevisionCache(limit):
     """See `IRevisionSet`."""
     # Storm doesn't handle remove a limited result set:
     #    FeatureError: Can't remove a sliced result set
     store = IMasterStore(RevisionCache)
     epoch = datetime.now(tz=pytz.UTC) - timedelta(days=30)
     subquery = Select(
         [RevisionCache.id],
         RevisionCache.revision_date < epoch,
         limit=limit)
     store.find(RevisionCache, RevisionCache.id.is_in(subquery)).remove()
    def __call__(self, chunk_size):
        """Take a batch of targets and update their BugTasks' name caches.

        See `ITunableLoop`.
        """
        # XXX 2008-03-05 gmb:
        #     We cast chunk_size to an integer to ensure that we're not
        #     trying to slice using floats or anything similarly
        #     foolish. We shouldn't have to do this, but bug #198767
        #     means that we do.
        chunk_size = int(chunk_size)

        start = self.offset
        end = self.offset + chunk_size

        chunk = self.candidates[start:end]

        self.transaction.begin()
        store = IMasterStore(BugTask)

        # Transpose the target rows into lists of object IDs to retrieve.
        ids_to_cache = zip(*(target for (target, names) in chunk))
        for index, cls in enumerate(target_classes):
            # Get all of the objects that we will need into the cache.
            list(store.find(cls, cls.id.is_in(set(ids_to_cache[index]))))

        for target_bits, cached_names in chunk:
            self.offset += 1
            # Resolve the IDs to objects, and get the actual IBugTarget.
            # If the ID is None, don't even try to get an object.
            target_objects = (
                (store.get(cls, id) if id is not None else None)
                for cls, id in zip(target_classes, target_bits))
            target = bug_target_from_key(*target_objects)
            new_name = target.bugtargetdisplayname
            cached_names.discard(new_name)
            # If there are any outdated names cached, update them all in
            # a single query.
            if len(cached_names) > 0:
                self.logger.info(
                    "Updating %r to '%s'." % (tuple(cached_names), new_name))
                self.total_updated += len(cached_names)
                conditions = (
                    col == id for col, id in zip(target_columns, target_bits))
                to_update = store.find(
                    BugTask,
                    BugTask.targetnamecache.is_in(cached_names),
                    *conditions)
                to_update.set(targetnamecache=new_name)

        self.logger.info("Checked %i targets." % len(chunk))

        self.transaction.commit()
class MigrateCurrentFlagProcess:
    """Mark all translations as is_imported if they are is_current.

    Processes only translations for upstream projects, since Ubuntu
    source packages need no migration.
    """

    def __init__(self, transaction, logger=None):
        self.transaction = transaction
        self.logger = logger
        if logger is None:
            self.logger = logging.getLogger("migrate-current-flag")
        self.store = IMasterStore(Product)

    def getProductsWithTemplates(self):
        """Get Product.ids for projects with any translations templates."""
        return (
            self.store.find(
                Product, POTemplate.productseriesID == ProductSeries.id, ProductSeries.productID == Product.id
            )
            .group_by(Product)
            .having(Count(POTemplate.id) > 0)
        )

    def getCurrentNonimportedTranslations(self, product):
        """Get TranslationMessage.ids that need migration for a `product`."""
        return self.store.find(
            TranslationMessage.id,
            TranslationMessage.is_current_ubuntu == True,
            TranslationMessage.is_current_upstream == False,
            (TranslationMessage.potmsgsetID == TranslationTemplateItem.potmsgsetID),
            TranslationTemplateItem.potemplateID == POTemplate.id,
            POTemplate.productseriesID == ProductSeries.id,
            ProductSeries.productID == product.id,
        ).config(distinct=True)

    def run(self):
        products_with_templates = list(self.getProductsWithTemplates())
        total_products = len(products_with_templates)
        if total_products == 0:
            self.logger.info("Nothing to do.")
        current_product = 0
        for product in products_with_templates:
            current_product += 1
            self.logger.info(
                "Migrating %s translations (%d of %d)..." % (product.name, current_product, total_products)
            )

            tm_ids = self.getCurrentNonimportedTranslations(product)
            tm_loop = TranslationMessageImportedFlagUpdater(self.transaction, self.logger, tm_ids)
            DBLoopTuner(tm_loop, 5, minimum_chunk_size=100).run()

        self.logger.info("Done.")
Beispiel #11
0
class MigrateCurrentFlagProcess:
    """Mark all translations as is_imported if they are is_current.

    Processes only translations for upstream projects, since Ubuntu
    source packages need no migration.
    """
    def __init__(self, transaction, logger=None):
        self.transaction = transaction
        self.logger = logger
        if logger is None:
            self.logger = logging.getLogger("migrate-current-flag")
        self.store = IMasterStore(Product)

    def getProductsWithTemplates(self):
        """Get Product.ids for projects with any translations templates."""
        return self.store.find(
            Product,
            POTemplate.productseriesID == ProductSeries.id,
            ProductSeries.productID == Product.id,
        ).group_by(Product).having(Count(POTemplate.id) > 0)

    def getCurrentNonimportedTranslations(self, product):
        """Get TranslationMessage.ids that need migration for a `product`."""
        return self.store.find(
            TranslationMessage.id,
            TranslationMessage.is_current_ubuntu == True,
            TranslationMessage.is_current_upstream == False,
            (TranslationMessage.potmsgsetID
             == TranslationTemplateItem.potmsgsetID),
            TranslationTemplateItem.potemplateID == POTemplate.id,
            POTemplate.productseriesID == ProductSeries.id,
            ProductSeries.productID == product.id).config(distinct=True)

    def run(self):
        products_with_templates = list(self.getProductsWithTemplates())
        total_products = len(products_with_templates)
        if total_products == 0:
            self.logger.info("Nothing to do.")
        current_product = 0
        for product in products_with_templates:
            current_product += 1
            self.logger.info("Migrating %s translations (%d of %d)..." %
                             (product.name, current_product, total_products))

            tm_ids = self.getCurrentNonimportedTranslations(product)
            tm_loop = TranslationMessageImportedFlagUpdater(
                self.transaction, self.logger, tm_ids)
            DBLoopTuner(tm_loop, 5, minimum_chunk_size=100).run()

        self.logger.info("Done.")
    def __call__(self, chunk_size):
        """Take a batch of targets and update their BugTasks' name caches.

        See `ITunableLoop`.
        """
        # XXX 2008-03-05 gmb:
        #     We cast chunk_size to an integer to ensure that we're not
        #     trying to slice using floats or anything similarly
        #     foolish. We shouldn't have to do this, but bug #198767
        #     means that we do.
        chunk_size = int(chunk_size)

        start = self.offset
        end = self.offset + chunk_size

        chunk = self.candidates[start:end]

        self.transaction.begin()
        store = IMasterStore(BugTask)

        # Transpose the target rows into lists of object IDs to retrieve.
        ids_to_cache = zip(*(target for (target, names) in chunk))
        for index, cls in enumerate(target_classes):
            # Get all of the objects that we will need into the cache.
            list(store.find(cls, cls.id.is_in(set(ids_to_cache[index]))))

        for target_bits, cached_names in chunk:
            self.offset += 1
            # Resolve the IDs to objects, and get the actual IBugTarget.
            # If the ID is None, don't even try to get an object.
            target_objects = (
                (store.get(cls, id) if id is not None else None) for cls, id in zip(target_classes, target_bits)
            )
            target = bug_target_from_key(*target_objects)
            new_name = target.bugtargetdisplayname
            cached_names.discard(new_name)
            # If there are any outdated names cached, update them all in
            # a single query.
            if len(cached_names) > 0:
                self.logger.info("Updating %r to '%s'." % (tuple(cached_names), new_name))
                self.total_updated += len(cached_names)
                conditions = (col == id for col, id in zip(target_columns, target_bits))
                to_update = store.find(BugTask, BugTask.targetnamecache.is_in(cached_names), *conditions)
                to_update.set(targetnamecache=new_name)

        self.logger.info("Checked %i targets." % len(chunk))

        self.transaction.commit()
Beispiel #13
0
    def newPackagesetUploader(self,
                              archive,
                              person,
                              packageset,
                              explicit=False):
        """See `IArchivePermissionSet`."""
        packageset = self._nameToPackageset(packageset)
        store = IMasterStore(ArchivePermission)

        # First see whether we have a matching permission in the database
        # already.
        query = '''
            SELECT ap.id
            FROM archivepermission ap, teamparticipation tp
            WHERE
                ap.person = tp.team AND tp.person = ?
                AND ap.packageset = ? AND ap.archive = ?
        '''
        query = SQL(query, (person.id, packageset.id, archive.id))
        permissions = list(
            store.find(ArchivePermission, ArchivePermission.id.is_in(query)))
        if len(permissions) > 0:
            # Found permissions in the database, does the 'explicit' flag
            # have the requested value?
            conflicting = [
                permission for permission in permissions
                if permission.explicit != explicit
            ]
            if len(conflicting) > 0:
                # At least one permission with conflicting 'explicit' flag
                # value exists already.
                cperm = conflicting[0]
                raise ValueError(
                    "Permission for package set '%s' already exists for %s "
                    "but with a different 'explicit' flag value (%s)." %
                    (packageset.name, cperm.person.name, cperm.explicit))
            else:
                # No conflicts, does the requested permission exist already?
                existing = [
                    permission for permission in permissions
                    if (permission.explicit == explicit and permission.person
                        == person and permission.packageset == packageset)
                ]
                assert len(existing) <= 1, (
                    "Too many permissions for %s and %s" %
                    (person.name, packageset.name))
                if len(existing) == 1:
                    # The existing permission matches, just return it.
                    return existing[0]

        # The requested permission does not exist yet. Insert it into the
        # database.
        permission = ArchivePermission(archive=archive,
                                       person=person,
                                       packageset=packageset,
                                       permission=ArchivePermissionType.UPLOAD,
                                       explicit=explicit)
        store.add(permission)

        return permission
    def test_CodeImportEventPruner(self):
        now = datetime.now(UTC)
        store = IMasterStore(CodeImportResult)

        switch_dbuser('testadmin')
        machine = self.factory.makeCodeImportMachine()
        requester = self.factory.makePerson()
        # Create 6 code import events for this machine, 3 on each side of 30
        # days. Use the event set to the extra event data rows get created
        # too.
        event_set = getUtility(ICodeImportEventSet)
        for age in (35, 33, 31, 29, 27, 15):
            event_set.newOnline(
                machine, user=requester, message='Hello',
                _date_created=(now - timedelta(days=age)))
        transaction.commit()

        # Run the garbage collector
        self.runDaily()

        # Only the three most recent results are left.
        events = list(machine.events)
        self.assertEqual(3, len(events))
        # We now have no CodeImportEvents older than 30 days
        self.failUnless(
            store.find(
                Min(CodeImportEvent.date_created)).one().replace(tzinfo=UTC)
            >= now - timedelta(days=30))
 def iterReady(cls):
     """See `IJobSource`."""
     store = IMasterStore(QuestionJob)
     jobs = store.find(
         QuestionJob,
         And(QuestionJob.job_type == cls.class_job_type,
             QuestionJob.job_id.is_in(Job.ready_jobs)))
     return (cls(job) for job in jobs)
 def iterReady(cls):
     """Iterate through all ready ProductJobs."""
     store = IMasterStore(ProductJob)
     jobs = store.find(
         ProductJob,
         And(ProductJob.job_type == cls.class_job_type,
             ProductJob.job_id.is_in(Job.ready_jobs)))
     return (cls(job) for job in jobs)
Beispiel #17
0
 def iterReady(cls):
     """See `IJobSource`."""
     store = IMasterStore(QuestionJob)
     jobs = store.find(
         QuestionJob,
         And(QuestionJob.job_type == cls.class_job_type,
             QuestionJob.job_id.is_in(Job.ready_jobs)))
     return (cls(job) for job in jobs)
Beispiel #18
0
 def iterReady(cls):
     """Iterate through all ready ProductJobs."""
     store = IMasterStore(ProductJob)
     jobs = store.find(
         ProductJob,
         And(ProductJob.job_type == cls.class_job_type,
             ProductJob.job_id.is_in(Job.ready_jobs)))
     return (cls(job) for job in jobs)
 def getRecentBuilds(cls, requester, recipe, distroseries, _now=None):
     if _now is None:
         _now = datetime.now(pytz.UTC)
     store = IMasterStore(SourcePackageRecipeBuild)
     old_threshold = _now - timedelta(days=1)
     return store.find(cls, cls.distroseries_id == distroseries.id,
         cls.requester_id == requester.id, cls.recipe_id == recipe.id,
         cls.date_created > old_threshold)
Beispiel #20
0
 def getRecentBuilds(cls, requester, recipe, distroseries, _now=None):
     if _now is None:
         _now = datetime.now(pytz.UTC)
     store = IMasterStore(SourcePackageRecipeBuild)
     old_threshold = _now - timedelta(days=1)
     return store.find(cls, cls.distroseries_id == distroseries.id,
                       cls.requester_id == requester.id,
                       cls.recipe_id == recipe.id,
                       cls.date_created > old_threshold)
    def _getOldestLiveRequest(self):
        """Return the oldest live request on the master store.

        Due to replication lag, the master store is always a little
        ahead of the slave store that exports come from.
        """
        master_store = IMasterStore(POExportRequest)
        sorted_by_id = master_store.find(POExportRequest).order_by(
            POExportRequest.id)
        return sorted_by_id.first()
 def exists(owner, name):
     """See `ISourcePackageRecipeSource.new`."""
     store = IMasterStore(SourcePackageRecipe)
     recipe = store.find(SourcePackageRecipe,
                         SourcePackageRecipe.owner == owner,
                         SourcePackageRecipe.name == name).one()
     if recipe:
         return True
     else:
         return False
Beispiel #23
0
    def _getOldestLiveRequest(self):
        """Return the oldest live request on the master store.

        Due to replication lag, the master store is always a little
        ahead of the slave store that exports come from.
        """
        master_store = IMasterStore(POExportRequest)
        sorted_by_id = master_store.find(POExportRequest).order_by(
            POExportRequest.id)
        return sorted_by_id.first()
 def getMatchingDSD(self):
     """Find an existing `DistroSeriesDifference` for this difference."""
     spn_id = self.metadata["sourcepackagename"]
     parent_id = self.metadata["parent_series"]
     store = IMasterStore(DistroSeriesDifference)
     search = store.find(
         DistroSeriesDifference,
         DistroSeriesDifference.derived_series == self.derived_series,
         DistroSeriesDifference.parent_series_id == parent_id,
         DistroSeriesDifference.source_package_name_id == spn_id)
     return search.one()
 def exists(owner, name):
     """See `ISourcePackageRecipeSource.new`."""
     store = IMasterStore(SourcePackageRecipe)
     recipe = store.find(
         SourcePackageRecipe,
         SourcePackageRecipe.owner == owner,
         SourcePackageRecipe.name == name).one()
     if recipe:
         return True
     else:
         return False
 def getMatchingDSD(self):
     """Find an existing `DistroSeriesDifference` for this difference."""
     spn_id = self.metadata["sourcepackagename"]
     parent_id = self.metadata["parent_series"]
     store = IMasterStore(DistroSeriesDifference)
     search = store.find(
         DistroSeriesDifference,
         DistroSeriesDifference.derived_series == self.derived_series,
         DistroSeriesDifference.parent_series_id == parent_id,
         DistroSeriesDifference.source_package_name_id == spn_id)
     return search.one()
Beispiel #27
0
 def cleanupAssociations(self):
     """See `OpenIDStore`."""
     store = IMasterStore(self.Association)
     now = int(time.time())
     expired = store.find(
         self.Association,
         self.Association.issued + self.Association.lifetime < now)
     count = expired.count()
     if count > 0:
         expired.remove()
     return count
    def newPackagesetUploader(
        self, archive, person, packageset, explicit=False):
        """See `IArchivePermissionSet`."""
        packageset = self._nameToPackageset(packageset)
        store = IMasterStore(ArchivePermission)

        # First see whether we have a matching permission in the database
        # already.
        query = '''
            SELECT ap.id
            FROM archivepermission ap, teamparticipation tp
            WHERE
                ap.person = tp.team AND tp.person = ?
                AND ap.packageset = ? AND ap.archive = ?
        '''
        query = SQL(query, (person.id, packageset.id, archive.id))
        permissions = list(
            store.find(
                ArchivePermission, ArchivePermission.id.is_in(query)))
        if len(permissions) > 0:
            # Found permissions in the database, does the 'explicit' flag
            # have the requested value?
            conflicting = [permission for permission in permissions
                           if permission.explicit != explicit]
            if len(conflicting) > 0:
                # At least one permission with conflicting 'explicit' flag
                # value exists already.
                cperm = conflicting[0]
                raise ValueError(
                    "Permission for package set '%s' already exists for %s "
                    "but with a different 'explicit' flag value (%s)." %
                    (packageset.name, cperm.person.name, cperm.explicit))
            else:
                # No conflicts, does the requested permission exist already?
                existing = [permission for permission in permissions
                            if (permission.explicit == explicit and
                                permission.person == person and
                                permission.packageset == packageset)]
                assert len(existing) <= 1, (
                    "Too many permissions for %s and %s" %
                    (person.name, packageset.name))
                if len(existing) == 1:
                    # The existing permission matches, just return it.
                    return existing[0]

        # The requested permission does not exist yet. Insert it into the
        # database.
        permission = ArchivePermission(
            archive=archive,
            person=person, packageset=packageset,
            permission=ArchivePermissionType.UPLOAD, explicit=explicit)
        store.add(permission)

        return permission
Beispiel #29
0
def find_dsd_for(dsp, package):
    """Find `DistroSeriesDifference`.

    :param dsp: `DistroSeriesParent`.
    :param package: `SourcePackageName`.
    """
    store = IMasterStore(DistroSeriesDifference)
    return store.find(
        DistroSeriesDifference,
        DistroSeriesDifference.derived_series == dsp.derived_series,
        DistroSeriesDifference.parent_series == dsp.parent_series,
        DistroSeriesDifference.source_package_name == package)
def find_dsd_for(dsp, package):
    """Find `DistroSeriesDifference`.

    :param dsp: `DistroSeriesParent`.
    :param package: `SourcePackageName`.
    """
    store = IMasterStore(DistroSeriesDifference)
    return store.find(
        DistroSeriesDifference,
        DistroSeriesDifference.derived_series == dsp.derived_series,
        DistroSeriesDifference.parent_series == dsp.parent_series,
        DistroSeriesDifference.source_package_name == package)
    def deletePackagesetUploader(
        self, archive, person, packageset, explicit=False):
        """See `IArchivePermissionSet`."""
        packageset = self._nameToPackageset(packageset)
        store = IMasterStore(ArchivePermission)

        # Do we have the permission the user wants removed in the database?
        permission = store.find(
            ArchivePermission, archive=archive, person=person,
            packageset=packageset, permission=ArchivePermissionType.UPLOAD,
            explicit=explicit).one()
        self._remove_permission(permission)
    def test_OAuthNoncePruner(self):
        now = datetime.now(UTC)
        timestamps = [
            now - timedelta(days=2),  # Garbage
            now - timedelta(days=1) - timedelta(seconds=60),  # Garbage
            now - timedelta(days=1) + timedelta(seconds=60),  # Not garbage
            now,  # Not garbage
            ]
        switch_dbuser('testadmin')
        store = IMasterStore(OAuthNonce)

        # Make sure we start with 0 nonces.
        self.failUnlessEqual(store.find(OAuthNonce).count(), 0)

        for timestamp in timestamps:
            store.add(OAuthNonce(
                access_token=OAuthAccessToken.get(1),
                request_timestamp=timestamp,
                nonce=str(timestamp)))
        transaction.commit()

        # Make sure we have 4 nonces now.
        self.failUnlessEqual(store.find(OAuthNonce).count(), 4)

        self.runFrequently(
            maximum_chunk_size=60)  # 1 minute maximum chunk size

        store = IMasterStore(OAuthNonce)

        # Now back to two, having removed the two garbage entries.
        self.failUnlessEqual(store.find(OAuthNonce).count(), 2)

        # And none of them are older than a day.
        # Hmm... why is it I'm putting tz aware datetimes in and getting
        # naive datetimes back? Bug in the SQLObject compatibility layer?
        # Test is still fine as we know the timezone.
        self.failUnless(
            store.find(
                Min(OAuthNonce.request_timestamp)).one().replace(tzinfo=UTC)
            >= now - timedelta(days=1))
    def test_OpenIDConsumerNoncePruner(self):
        now = int(time.mktime(time.gmtime()))
        MINUTES = 60
        HOURS = 60 * 60
        DAYS = 24 * HOURS
        timestamps = [
            now - 2 * DAYS,  # Garbage
            now - 1 * DAYS - 1 * MINUTES,  # Garbage
            now - 1 * DAYS + 1 * MINUTES,  # Not garbage
            now,  # Not garbage
            ]
        switch_dbuser('testadmin')

        store = IMasterStore(OpenIDConsumerNonce)

        # Make sure we start with 0 nonces.
        self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 0)

        for timestamp in timestamps:
            store.add(OpenIDConsumerNonce(
                    u'http://server/', timestamp, u'aa'))
        transaction.commit()

        # Make sure we have 4 nonces now.
        self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)

        # Run the garbage collector.
        self.runFrequently(maximum_chunk_size=60)  # 1 minute maximum chunks.

        store = IMasterStore(OpenIDConsumerNonce)

        # We should now have 2 nonces.
        self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)

        # And none of them are older than 1 day
        earliest = store.find(Min(OpenIDConsumerNonce.timestamp)).one()
        self.failUnless(
            earliest >= now - 24 * 60 * 60, 'Still have old nonces')
 def test_UnusedPOTMsgSetPruner_preserves_used_potmsgsets(self):
     # UnusedPOTMsgSetPruner will not remove a potmsgset if it changes
     # between calls.
     switch_dbuser('testadmin')
     potmsgset_pofile = {}
     for n in xrange(4):
         pofile = self.factory.makePOFile()
         translation_message = self.factory.makeCurrentTranslationMessage(
             pofile=pofile)
         translation_message.potmsgset.setSequence(
             pofile.potemplate, 0)
         potmsgset_pofile[translation_message.potmsgset.id] = pofile.id
     transaction.commit()
     store = IMasterStore(POTMsgSet)
     test_ids = potmsgset_pofile.keys()
     obsolete_msgsets = store.find(
         POTMsgSet,
         In(TranslationTemplateItem.potmsgsetID, test_ids),
         TranslationTemplateItem.sequence == 0)
     self.assertEqual(4, obsolete_msgsets.count())
     pruner = UnusedPOTMsgSetPruner(self.log)
     pruner(2)
     # A potmsgeset is set to a sequence > 0 between batches/commits.
     last_id = pruner.msgset_ids_to_remove[-1]
     used_potmsgset = store.find(POTMsgSet, POTMsgSet.id == last_id).one()
     used_pofile = store.find(
         POFile, POFile.id == potmsgset_pofile[last_id]).one()
     translation_message = self.factory.makeCurrentTranslationMessage(
         pofile=used_pofile, potmsgset=used_potmsgset)
     used_potmsgset.setSequence(used_pofile.potemplate, 1)
     transaction.commit()
     # Next batch.
     pruner(2)
     self.assertEqual(0, obsolete_msgsets.count())
     preserved_msgsets = store.find(
         POTMsgSet, In(TranslationTemplateItem.potmsgsetID, test_ids))
     self.assertEqual(1, preserved_msgsets.count())
Beispiel #35
0
    def getAssociation(self, server_url, handle=None):
        """See `OpenIDStore`."""
        store = IMasterStore(self.Association)
        server_url = server_url.decode('UTF-8')
        if handle is None:
            result = store.find(self.Association, server_url=server_url)
        else:
            handle = handle.decode('ASCII')
            result = store.find(
                self.Association, server_url=server_url, handle=handle)

        db_associations = list(result)
        associations = []
        for db_assoc in db_associations:
            assoc = db_assoc.as_association()
            if assoc.getExpiresIn() == 0:
                store.remove(db_assoc)
            else:
                associations.append(assoc)

        if len(associations) == 0:
            return None
        associations.sort(key=attrgetter('issued'))
        return associations[-1]
Beispiel #36
0
    def deletePackagesetUploader(self,
                                 archive,
                                 person,
                                 packageset,
                                 explicit=False):
        """See `IArchivePermissionSet`."""
        store = IMasterStore(ArchivePermission)

        # Do we have the permission the user wants removed in the database?
        permission = store.find(ArchivePermission,
                                archive=archive,
                                person=person,
                                packageset=packageset,
                                permission=ArchivePermissionType.UPLOAD,
                                explicit=explicit).one()
        self._remove_permission(permission)
Beispiel #37
0
def create_unique_token_for_table(token_length, column):
    """Create a new unique token in a table.

    Generates a token and makes sure it does not already exist in
    the table and column specified.

    :param token_length: The length for the token string
    :param column: Database column where the token will be stored.

    :return: A new token string
    """
    # Use the master Store to ensure no race conditions. 
    store = IMasterStore(column.cls)
    token = create_token(token_length)
    while store.find(column.cls, column==token).one() is not None:
        token = create_token(token_length)
    return token
Beispiel #38
0
def create_unique_token_for_table(token_length, column):
    """Create a new unique token in a table.

    Generates a token and makes sure it does not already exist in
    the table and column specified.

    :param token_length: The length for the token string
    :param column: Database column where the token will be stored.

    :return: A new token string
    """
    # Use the master Store to ensure no race conditions.
    store = IMasterStore(column.cls)
    token = create_token(token_length)
    while store.find(column.cls, column == token).one() is not None:
        token = create_token(token_length)
    return token
 def test_UnusedPOTMsgSetPruner_removes_obsolete_message_sets(self):
     # UnusedPOTMsgSetPruner removes any POTMsgSet that are
     # participating in a POTemplate only as obsolete messages.
     switch_dbuser('testadmin')
     pofile = self.factory.makePOFile()
     translation_message = self.factory.makeCurrentTranslationMessage(
         pofile=pofile)
     translation_message.potmsgset.setSequence(
         pofile.potemplate, 0)
     transaction.commit()
     store = IMasterStore(POTMsgSet)
     obsolete_msgsets = store.find(
         POTMsgSet,
         TranslationTemplateItem.potmsgset == POTMsgSet.id,
         TranslationTemplateItem.sequence == 0)
     self.assertNotEqual(0, obsolete_msgsets.count())
     self.runDaily()
     self.assertEqual(0, obsolete_msgsets.count())
 def test_UnusedPOTMsgSetPruner_removes_unreferenced_message_sets(self):
     # If a POTMsgSet is not referenced by any templates the
     # UnusedPOTMsgSetPruner will remove it.
     switch_dbuser('testadmin')
     potmsgset = self.factory.makePOTMsgSet()
     # Cheekily drop any references to the POTMsgSet we just created.
     store = IMasterStore(POTMsgSet)
     store.execute(
         "DELETE FROM TranslationTemplateItem WHERE potmsgset = %s"
         % potmsgset.id)
     transaction.commit()
     unreferenced_msgsets = store.find(
         POTMsgSet,
         Not(In(
             POTMsgSet.id,
             SQL("SELECT potmsgset FROM TranslationTemplateItem"))))
     self.assertNotEqual(0, unreferenced_msgsets.count())
     self.runDaily()
     self.assertEqual(0, unreferenced_msgsets.count())
Beispiel #41
0
    def __init__(self, *args, **kwargs):
        """Extended version of the SQLObjectBase constructor.

        We force use of the master Store.

        We refetch any parameters from different stores from the
        correct master Store.
        """
        # Make it simple to write dumb-invalidators - initialized
        # _cached_properties to a valid list rather than just-in-time
        # creation.
        self._cached_properties = []
        store = IMasterStore(self.__class__)

        # The constructor will fail if objects from a different Store
        # are passed in. We need to refetch these objects from the correct
        # master Store if necessary so the foreign key references can be
        # constructed.
        # XXX StuartBishop 2009-03-02 bug=336867: We probably want to remove
        # this code - there are enough other places developers have to be
        # aware of the replication # set boundaries. Why should
        # Person(..., account=an_account) work but
        # some_person.account = an_account fail?
        for key, argument in kwargs.items():
            argument = removeSecurityProxy(argument)
            if not isinstance(argument, Storm):
                continue
            argument_store = Store.of(argument)
            if argument_store is not store:
                new_argument = store.find(argument.__class__,
                                          id=argument.id).one()
                assert new_argument is not None, (
                    '%s not yet synced to this store' % repr(argument))
                kwargs[key] = new_argument

        store.add(self)
        try:
            self._create(None, **kwargs)
        except:
            store.remove(self)
            raise
Beispiel #42
0
    def __init__(self, *args, **kwargs):
        """Extended version of the SQLObjectBase constructor.

        We force use of the master Store.

        We refetch any parameters from different stores from the
        correct master Store.
        """
        # Make it simple to write dumb-invalidators - initialized
        # _cached_properties to a valid list rather than just-in-time
        # creation.
        self._cached_properties = []
        store = IMasterStore(self.__class__)

        # The constructor will fail if objects from a different Store
        # are passed in. We need to refetch these objects from the correct
        # master Store if necessary so the foreign key references can be
        # constructed.
        # XXX StuartBishop 2009-03-02 bug=336867: We probably want to remove
        # this code - there are enough other places developers have to be
        # aware of the replication # set boundaries. Why should
        # Person(..., account=an_account) work but
        # some_person.account = an_account fail?
        for key, argument in kwargs.items():
            argument = removeSecurityProxy(argument)
            if not isinstance(argument, Storm):
                continue
            argument_store = Store.of(argument)
            if argument_store is not store:
                new_argument = store.find(
                    argument.__class__, id=argument.id).one()
                assert new_argument is not None, (
                    '%s not yet synced to this store' % repr(argument))
                kwargs[key] = new_argument

        store.add(self)
        try:
            self._create(None, **kwargs)
        except:
            store.remove(self)
            raise
Beispiel #43
0
    def acquireRevisionAuthors(self, author_names):
        """Find or create the RevisionAuthors with the specified names.

        A name may be any arbitrary string, but if it is an email-id, and
        its email address is a verified email address, it will be
        automatically linked to the corresponding Person.

        Email-ids come in two major forms:
            "Foo Bar" <*****@*****.**>
            [email protected] (Foo Bar)
        :return: a dict of name -> RevisionAuthor
        """
        store = IMasterStore(Revision)
        author_names = set(author_names)
        authors = {}
        for author in store.find(RevisionAuthor,
                                 RevisionAuthor.name.is_in(author_names)):
            authors[author.name] = author
        missing = author_names - set(authors.keys())
        # create missing RevisionAuthors
        for name in missing:
            authors[name] = self._createRevisionAuthor(name)
        return authors
    def test_BranchJobPruner_doesnt_prune_recent_jobs(self):
        # Check to make sure the garbo doesn't remove jobs that aren't more
        # than thirty days old.
        switch_dbuser('testadmin')
        store = IMasterStore(Job)

        db_branch = self.factory.makeAnyBranch(
            branch_format=BranchFormat.BZR_BRANCH_5,
            repository_format=RepositoryFormat.BZR_KNIT_1)

        branch_job = BranchUpgradeJob.create(
            db_branch, self.factory.makePerson())
        branch_job.job.date_finished = THIRTY_DAYS_AGO

        db_branch2 = self.factory.makeAnyBranch(
            branch_format=BranchFormat.BZR_BRANCH_5,
            repository_format=RepositoryFormat.BZR_KNIT_1)
        BranchUpgradeJob.create(db_branch2, self.factory.makePerson())

        self.runDaily()

        switch_dbuser('testadmin')
        self.assertEqual(store.find(BranchJob).count(), 1)
def find_waiting_jobs(derived_series, sourcepackagename, parent_series):
    """Look for pending `DistroSeriesDifference` jobs on a package."""
    # Look for identical pending jobs.  This compares directly on
    # the metadata string.  It's fragile, but this is only an
    # optimization.  It's not actually disastrous to create
    # redundant jobs occasionally.
    json_metadata = make_metadata(sourcepackagename.id, parent_series.id)

    # Use master store because we don't like outdated information
    # here.
    store = IMasterStore(DistributionJob)

    candidates = store.find(
        DistributionJob,
        DistributionJob.job_type == DistributionJobType.DISTROSERIESDIFFERENCE,
        DistributionJob.distroseries == derived_series,
        DistributionJob.metadata == json_metadata,
        DistributionJob.job_id.is_in(Job.ready_jobs))

    return [
        job for job in candidates
        if job.metadata["parent_series"] == parent_series.id
    ]
Beispiel #46
0
    def acquireRevisionAuthors(self, author_names):
        """Find or create the RevisionAuthors with the specified names.

        A name may be any arbitrary string, but if it is an email-id, and
        its email address is a verified email address, it will be
        automatically linked to the corresponding Person.

        Email-ids come in two major forms:
            "Foo Bar" <*****@*****.**>
            [email protected] (Foo Bar)
        :return: a dict of name -> RevisionAuthor
        """
        store = IMasterStore(Revision)
        author_names = set(author_names)
        authors = {}
        for author in store.find(RevisionAuthor,
                RevisionAuthor.name.is_in(author_names)):
            authors[author.name] = author
        missing = author_names - set(authors.keys())
        # create missing RevisionAuthors
        for name in missing:
            authors[name] = self._createRevisionAuthor(name)
        return authors
def find_waiting_jobs(derived_series, sourcepackagename, parent_series):
    """Look for pending `DistroSeriesDifference` jobs on a package."""
    # Look for identical pending jobs.  This compares directly on
    # the metadata string.  It's fragile, but this is only an
    # optimization.  It's not actually disastrous to create
    # redundant jobs occasionally.
    json_metadata = make_metadata(sourcepackagename.id, parent_series.id)

    # Use master store because we don't like outdated information
    # here.
    store = IMasterStore(DistributionJob)

    candidates = store.find(
        DistributionJob,
        DistributionJob.job_type ==
            DistributionJobType.DISTROSERIESDIFFERENCE,
        DistributionJob.distroseries == derived_series,
        DistributionJob.metadata == json_metadata,
        DistributionJob.job_id.is_in(Job.ready_jobs))

    return [
        job
        for job in candidates
            if job.metadata["parent_series"] == parent_series.id]
Beispiel #48
0
class InitializeDistroSeries:
    """Copy in all of the parents distroseries's configuration. This
    includes all configuration for distroseries as well as distroarchseries,
    publishing and all publishing records for sources and binaries.

    We support 2 use cases here:
      #1 If the child distribution has zero initialized series:
        - the parent list can't be empty (otherwise we trigger an error);
        - the series will be derived from the parents passed as argument;
        - the parents will be set to the parents passed as argument;
        - first_derivation = True.
      #2 If the child distribution has more than zero initialized series:
        - the series will be derived from the previous_series;
        - the parents will be set to the parents passed as argument or
          the parents of the previous_series if the passed argument is empty;
        - first_derivation = False.

    Preconditions:
      The distroseries must exist, and be completly unused, with no source
      or binary packages existing, as well as no distroarchseries set up.
      Section and component selections must be empty. It must not have any
      parent series.

    Outcome:
      The distroarchseries set up in the parent series will be copied.
      The publishing structure will be copied from the parents. All
      PUBLISHED and PENDING packages in the parents will be created in
      this distroseries and its distroarchseriess. All component and section
      selections will be duplicated, as will any permission-related
      structures.

    Note:
      This method will raise a InitializationError when the pre-conditions
      are not met. After this is run, you still need to construct chroots
      for building, you need to add anything missing wrt. ports etc. This
      method is only meant to give you a basic copy of parent series in
      order to assist you in preparing a new series of a distribution or
      in the initialization of a derivative.
    """
    def __init__(self,
                 distroseries,
                 parents=(),
                 arches=(),
                 archindep_archtag=None,
                 packagesets=(),
                 rebuild=False,
                 overlays=(),
                 overlay_pockets=(),
                 overlay_components=()):
        self.distroseries = distroseries
        self.parent_ids = [int(id) for id in parents]
        # Load parent objects in bulk...
        parents_bulk = bulk.load(DistroSeries, self.parent_ids)
        # ... sort the parents to match the order in the 'parents' parameter.
        self.parents = sorted(
            parents_bulk, key=lambda parent: self.parent_ids.index(parent.id))
        self.arches = arches
        self.archindep_archtag = archindep_archtag
        self.packagesets_ids = [
            ensure_unicode(packageset) for packageset in packagesets
        ]
        self.packagesets = bulk.load(
            Packageset, [int(packageset) for packageset in packagesets])
        self.rebuild = rebuild
        self.overlays = overlays
        self.overlay_pockets = overlay_pockets
        self.overlay_components = overlay_components
        self._store = IMasterStore(DistroSeries)

        self.first_derivation = (
            not self.distroseries.distribution.has_published_sources)

        if self.first_derivation:
            # Use-case #1.
            self.derivation_parents = self.parents
            self.derivation_parent_ids = self.parent_ids
        else:
            # Use-case #2.
            self.derivation_parents = [self.distroseries.previous_series]
            self.derivation_parent_ids = [
                p.id for p in self.derivation_parents if p is not None
            ]
            if self.parent_ids == []:
                self.parents = (
                    self.distroseries.previous_series.getParentSeries())
        self._create_source_names_by_parent()

    def check(self):
        if self.distroseries.isDerivedSeries():
            raise InitializationError(
                ("Series {child.name} has already been initialised"
                 ".").format(child=self.distroseries))
        self._checkPublisherConfig()
        if (self.distroseries.distribution.has_published_sources
                and self.distroseries.previous_series is None):
            raise InitializationError(
                ("Series {child.name} has no previous series and "
                 "the distribution already has initialised series"
                 ".").format(child=self.distroseries))
        self._checkParents()
        self._checkArchindep()
        for parent in self.derivation_parents:
            self._checkBuilds(parent)
            self._checkQueue(parent)
        self._checkSeries()

    def _checkArchindep(self):
        # Check that the child distroseries has an architecture to
        # build architecture independent binaries.
        if self.archindep_archtag is None:
            # No archindep_archtag was given, so we try to figure out
            # a proper one among the parents'.
            potential_nominated_arches = self._potential_nominated_arches(
                self.derivation_parents)
            if len(potential_nominated_arches) == 0:
                raise InitializationError(
                    "The distroseries has no architectures selected to "
                    "build architecture independent binaries.")
        else:
            # Make sure that the given archindep_archtag is among the
            # selected architectures.
            if (self.arches is not None and len(self.arches) != 0
                    and self.archindep_archtag not in self.arches):
                raise InitializationError(
                    "The selected architecture independent architecture tag "
                    "is not among the selected architectures.")

    def _checkPublisherConfig(self):
        """A series cannot be initialized if it has no publisher config
        set up.
        """
        publisherconfigset = getUtility(IPublisherConfigSet)
        config = publisherconfigset.getByDistribution(
            self.distroseries.distribution)
        if config is None:
            raise InitializationError(
                ("Distribution {child.name} has no publisher configuration. "
                 "Please ask an administrator to set this up"
                 ".").format(child=self.distroseries.distribution))

    def _checkParents(self):
        """If self.first_derivation, the parents list cannot be empty."""
        if self.first_derivation:
            # Use-case #1.
            if len(self.parent_ids) == 0:
                raise InitializationError(
                    "No other series in the distribution is initialised "
                    "and a parent was not explicitly specified.")

    def _checkBuilds(self, parent):
        """Assert there are no pending builds for the given parent series.

        Only cares about the RELEASE, SECURITY and UPDATES pockets, which are
        the only ones inherited via initializeFromParent method.
        Restrict the check to the select architectures (if applicable).
        Restrict the check to the selected packages if a limited set of
        packagesets is used by the initialization.
        """
        spns = self.source_names_by_parent.get(parent.id, None)
        if spns is not None and len(spns) == 0:
            # If no sources are selected in this parent, skip the check.
            return
        # spns=None means no packagesets selected so we need to consider
        # all sources.

        arch_tags = self.arches if len(self.arches) != 0 else None
        pending_builds = parent.getBuildRecords(BuildStatus.NEEDSBUILD,
                                                pocket=INIT_POCKETS,
                                                arch_tag=arch_tags,
                                                name=spns)

        if not pending_builds.is_empty():
            raise InitializationError("The parent series has pending builds "
                                      "for selected sources.")

    def _checkQueue(self, parent):
        """Assert upload queue is empty on the given parent series.

        Only cares about the RELEASE, SECURITY and UPDATES pockets, which are
        the only ones inherited via initializeFromParent method.
        Restrict the check to the selected packages if a limited set of
        packagesets is used by the initialization.
         """
        statuses = [
            PackageUploadStatus.NEW,
            PackageUploadStatus.ACCEPTED,
            PackageUploadStatus.UNAPPROVED,
        ]
        spns = self.source_names_by_parent.get(parent.id, None)
        if spns is not None and len(spns) == 0:
            # If no sources are selected in this parent, skip the check.
            return
        # spns=None means no packagesets selected so we need to consider
        # all sources.

        items = getUtility(IPackageUploadSet).getBuildsForSources(
            parent, statuses, INIT_POCKETS, spns)
        if not items.is_empty():
            raise InitializationError(
                "The parent series has sources waiting in its upload "
                "queues that match your selection.")

    def _checkSeries(self):
        error = ("Cannot copy distroarchseries from parent; there are "
                 "already one or more distroarchseries initialised for "
                 "this series.")
        sources = self.distroseries.getAllPublishedSources()
        binaries = self.distroseries.getAllPublishedBinaries()
        if not all(
                map(methodcaller('is_empty'),
                    (sources, binaries, self.distroseries.architectures,
                     self.distroseries.sections))):
            raise InitializationError(error)
        if self.distroseries.components:
            raise InitializationError(error)

    def initialize(self):
        self._set_parents()
        self._copy_configuration()
        self._copy_architectures()
        self._set_nominatedarchindep()
        self._copy_packages()
        self._copy_packagesets()
        self._copy_pocket_permissions()
        self._create_dsds()
        self._set_initialized()
        transaction.commit()

    def _set_parents(self):
        count = 0
        for parent in self.parents:
            dsp_set = getUtility(IDistroSeriesParentSet)
            if self.overlays and self.overlays[count]:
                pocket = PackagePublishingPocket.__metaclass__.getTermByToken(
                    PackagePublishingPocket, self.overlay_pockets[count]).value
                component_set = getUtility(IComponentSet)
                component = component_set[self.overlay_components[count]]
                dsp_set.new(self.distroseries,
                            parent,
                            initialized=False,
                            is_overlay=True,
                            pocket=pocket,
                            component=component,
                            ordering=count)
            else:
                dsp_set.new(self.distroseries,
                            parent,
                            initialized=False,
                            is_overlay=False,
                            ordering=count)
            count += 1

    def _set_initialized(self):
        dsp_set = getUtility(IDistroSeriesParentSet)
        distroseriesparents = dsp_set.getByDerivedSeries(self.distroseries)
        for distroseriesparent in distroseriesparents:
            distroseriesparent.initialized = True

    def _has_same_parents_as_previous_series(self):
        # Does this distroseries have the same parents as its previous
        # series? (note that the parent's order does not matter here)
        dsp_set = getUtility(IDistroSeriesParentSet)
        previous_series_parents = [
            dsp.parent_series for dsp in dsp_set.getByDerivedSeries(
                self.distroseries.previous_series)
        ]
        return set(previous_series_parents) == set(self.parents)

    def _create_dsds(self):
        if not self.first_derivation:
            if (self._has_same_parents_as_previous_series()
                    and not self.packagesets_ids):
                # If the parents are the same as previous_series's
                # parents and all the packagesets are being copied,
                # then we simply copy the DSDs from previous_series
                # for performance reasons.
                self._copy_dsds_from_previous_series()
            else:
                # Either the parents have changed (compared to
                # previous_series's parents) or a selection only of the
                # packagesets is being copied so we have to recompute
                # the DSDs by creating DSD Jobs.
                self._create_dsd_jobs()
        else:
            # If this is the first derivation, create the DSD Jobs.
            self._create_dsd_jobs()

    def _copy_dsds_from_previous_series(self):
        self._store.execute("""
            INSERT INTO DistroSeriesDifference
                (derived_series, source_package_name, package_diff,
                status, difference_type, parent_package_diff,
                source_version, parent_source_version,
                base_version, parent_series)
            SELECT
                %s AS derived_series, source_package_name,
                package_diff, status,
                difference_type, parent_package_diff, source_version,
                parent_source_version, base_version, parent_series
            FROM DistroSeriesDifference AS dsd
                WHERE dsd.derived_series = %s
            """ % sqlvalues(self.distroseries.id,
                            self.distroseries.previous_series.id))

    def _create_dsd_jobs(self):
        job_source = getUtility(IDistroSeriesDifferenceJobSource)
        job_source.massCreateForSeries(self.distroseries)

    def _copy_configuration(self):
        self.distroseries.backports_not_automatic = any(
            parent.backports_not_automatic
            for parent in self.derivation_parents)
        self.distroseries.include_long_descriptions = any(
            parent.include_long_descriptions
            for parent in self.derivation_parents)

    def _copy_architectures(self):
        das_filter = ' AND distroseries IN %s ' % (sqlvalues(
            [p.id for p in self.derivation_parents]))
        if self.arches:
            das_filter += ' AND architecturetag IN %s ' % (sqlvalues(
                self.arches))
        self._store.execute("""
            INSERT INTO DistroArchSeries
            (distroseries, processor, architecturetag, owner, official,
             supports_virtualized)
            SELECT %s, processor, architecturetag, %s,
                bool_and(official), bool_or(supports_virtualized)
            FROM DistroArchSeries WHERE enabled = TRUE %s
            GROUP BY processor, architecturetag
            """ % (sqlvalues(self.distroseries, self.distroseries.owner) +
                   (das_filter, )))
        self._store.flush()

    def _set_nominatedarchindep(self):
        if self.archindep_archtag is None:
            # Select the arch-indep builder from the intersection between
            # the selected architectures and the list of the parent's
            # arch-indep builders.
            arch_tag = self._potential_nominated_arches(
                self.derivation_parents).pop()
            self.distroseries.nominatedarchindep = (
                self.distroseries.getDistroArchSeries(arch_tag))
        else:
            self.distroseries.nominatedarchindep = (
                self.distroseries.getDistroArchSeries(self.archindep_archtag))

    def _potential_nominated_arches(self, parent_list):
        parent_indep_archtags = set(parent.nominatedarchindep.architecturetag
                                    for parent in parent_list
                                    if parent.nominatedarchindep is not None)

        if len(self.arches) == 0:
            return parent_indep_archtags
        else:
            return parent_indep_archtags.intersection(self.arches)

    def _copy_packages(self):
        # Perform the copies
        self._copy_component_section_and_format_selections()

        # Prepare the lists of distroarchseries for which binary packages
        # shall be copied.
        distroarchseries_lists = {}
        for parent in self.derivation_parents:
            distroarchseries_lists[parent] = []
            for arch in self.distroseries.architectures:
                if self.arches and (arch.architecturetag not in self.arches):
                    continue
                try:
                    parent_arch = parent.getDistroArchSeries(
                        arch.architecturetag)
                except NotFoundError:
                    continue

                distroarchseries_lists[parent].append((parent_arch, arch))
        # Now copy source and binary packages.
        self._copy_publishing_records(distroarchseries_lists)
        self._copy_packaging_links()

    def _use_cloner(self, target_archive, archive):
        """Returns True if it's safe to use the packagecloner (as opposed
        to using the packagecopier).
        We use two different ways to copy packages:
         - the packagecloner: fast but not conflict safe.
         - the packagecopier: slow but performs lots of checks to
         avoid creating conflicts.
        1. We'll use the cloner:
        If this is not a first initialization.
        And If:
            1.a If the archives are different and the target archive is
                empty use the cloner.
            Or
            1.b. If the archives are the same and the target series is
                empty use the cloner.
        2.  Otherwise use the copier.
        """
        if self.first_derivation:
            return False

        target_archive_empty = target_archive.getPublishedSources().is_empty()
        case_1a = (target_archive != archive and target_archive_empty)
        case_1b = (target_archive == archive and
                   (target_archive_empty or target_archive.getPublishedSources(
                       distroseries=self.distroseries).is_empty()))
        return case_1a or case_1b

    def _create_source_names_by_parent(self):
        """If only a subset of the packagesets was selected to be copied,
        create a dict with the list of source names to be copied for each
        parent.

        source_names_by_parent.get(parent) can be 3 different things:
        - None: this means that no specific packagesets where selected
        for the initialization. In this case we need to consider *all*
        the packages in this parent.
        - []: this means that some specific packagesets where selected
        for the initialization but none in this parent. We can skip
        this parent for all the copy/check operations.
        - [name1, ...]: this means that some specific packagesets
        were selected for the initialization and some are in this
        parent so the list of packages to consider in not empty.
        """
        source_names_by_parent = {}
        if self.packagesets_ids:
            for parent in self.derivation_parents:
                spns = []
                for pkgset in self.packagesets:
                    if pkgset.distroseries == parent:
                        spns += list(pkgset.getSourcesIncluded())
                source_names_by_parent[parent.id] = spns
        self.source_names_by_parent = source_names_by_parent

    def _copy_publishing_records(self, distroarchseries_lists):
        """Copy the publishing records from the parent arch series
        to the given arch series in ourselves.

        We copy all PENDING and PUBLISHED records as PENDING into our own
        publishing records.

        We copy only the RELEASE pocket in the PRIMARY archive.
        """
        archive_set = getUtility(IArchiveSet)

        for parent in self.derivation_parents:
            spns = self.source_names_by_parent.get(parent.id, None)
            if spns is not None and len(spns) == 0:
                # Some packagesets where selected but not a single
                # source from this parent: we skip the copy since
                # calling copy with spns=[] would copy all the packagesets
                # from this parent.
                continue
            # spns=None means no packagesets selected so we need to consider
            # all sources.

            distroarchseries_list = distroarchseries_lists[parent]
            for archive in parent.distribution.all_distro_archives:
                if archive.purpose != ArchivePurpose.PRIMARY:
                    continue

                target_archive = archive_set.getByDistroPurpose(
                    self.distroseries.distribution, archive.purpose)
                if archive.purpose is ArchivePurpose.PRIMARY:
                    assert target_archive is not None, (
                        "Target archive doesn't exist?")
                if self._use_cloner(target_archive, archive):
                    origin = PackageLocation(archive, parent.distribution,
                                             parent,
                                             PackagePublishingPocket.RELEASE)
                    destination = PackageLocation(
                        target_archive, self.distroseries.distribution,
                        self.distroseries, PackagePublishingPocket.RELEASE)
                    processors = None
                    if self.rebuild:
                        processors = [
                            das[1].processor for das in distroarchseries_list
                        ]
                        distroarchseries_list = ()
                    getUtility(IPackageCloner).clonePackages(
                        origin, destination, distroarchseries_list, processors,
                        spns, self.rebuild)
                else:
                    # There is only one available pocket in an unreleased
                    # series.
                    target_pocket = PackagePublishingPocket.RELEASE
                    sources = archive.getPublishedSources(
                        distroseries=parent,
                        pocket=INIT_POCKETS,
                        status=(PackagePublishingStatus.PENDING,
                                PackagePublishingStatus.PUBLISHED),
                        name=spns)
                    # XXX: rvb 2011-06-23 bug=801112: do_copy is atomic (all
                    # or none of the sources will be copied). This might
                    # lead to a partially initialised series if there is a
                    # single conflict in the destination series.
                    try:
                        sources_published = do_copy(
                            sources,
                            target_archive,
                            self.distroseries,
                            target_pocket,
                            include_binaries=not self.rebuild,
                            check_permissions=False,
                            strict_binaries=False,
                            close_bugs=False,
                            create_dsd_job=False,
                            person=None)
                        if self.rebuild:
                            rebuilds = []
                            for pubrec in sources_published:
                                builds = pubrec.createMissingBuilds(
                                    list(self.distroseries.architectures))
                                rebuilds.extend(builds)
                            self._rescore_rebuilds(rebuilds)
                    except CannotCopy as error:
                        raise InitializationError(error)

    def _rescore_rebuilds(self, builds):
        """Rescore the passed builds so that they have an appropriately low
         score.
        """
        for build in builds:
            build.buildqueue_record.lastscore -= COPY_ARCHIVE_SCORE_PENALTY

    def _copy_component_section_and_format_selections(self):
        """Copy the section, component and format selections from the parents
        distro series into this one.
        """
        # Copy the component selections
        self._store.execute('''
            INSERT INTO ComponentSelection (distroseries, component)
            SELECT DISTINCT %s AS distroseries, cs.component AS component
            FROM ComponentSelection AS cs WHERE cs.distroseries IN %s
            ''' % sqlvalues(self.distroseries.id, self.derivation_parent_ids))
        # Copy the section selections
        self._store.execute('''
            INSERT INTO SectionSelection (distroseries, section)
            SELECT DISTINCT %s as distroseries, ss.section AS section
            FROM SectionSelection AS ss WHERE ss.distroseries IN %s
            ''' % sqlvalues(self.distroseries.id, self.derivation_parent_ids))
        # Copy the source format selections
        self._store.execute('''
            INSERT INTO SourcePackageFormatSelection (distroseries, format)
            SELECT DISTINCT %s as distroseries, spfs.format AS format
            FROM SourcePackageFormatSelection AS spfs
            WHERE spfs.distroseries IN %s
            ''' % sqlvalues(self.distroseries.id, self.derivation_parent_ids))

    def _copy_packaging_links(self):
        """Copy the packaging links from the parent series to this one."""
        # We iterate over the parents and copy into the child in
        # sequence to avoid creating duplicates.
        for parent_id in self.derivation_parent_ids:
            self._store.execute("""
                INSERT INTO
                    Packaging(
                        distroseries, sourcepackagename, productseries,
                        packaging, owner)
                SELECT
                    ChildSeries.id,
                    Packaging.sourcepackagename,
                    Packaging.productseries,
                    Packaging.packaging,
                    Packaging.owner
                FROM
                    Packaging
                    -- Joining the parent distroseries permits the query to
                    -- build the data set for the series being updated, yet
                    -- results are in fact the data from the original series.
                    JOIN Distroseries ChildSeries
                        ON Packaging.distroseries = %s
                WHERE
                    -- Select only the packaging links that are in the parent
                    -- that are not in the child.
                    ChildSeries.id = %s
                    AND Packaging.sourcepackagename in (
                        SELECT sourcepackagename
                        FROM Packaging
                        WHERE distroseries in (
                            SELECT id
                            FROM Distroseries
                            WHERE id = %s
                            )
                        EXCEPT
                        SELECT sourcepackagename
                        FROM Packaging
                        WHERE distroseries in (
                            SELECT id
                            FROM Distroseries
                            WHERE id = ChildSeries.id
                            )
                        )
                """ % sqlvalues(parent_id, self.distroseries.id, parent_id))

    def _copy_packagesets(self):
        """Copy packagesets from the parent distroseries."""
        packagesets = self._store.find(
            Packageset,
            Packageset.distroseries_id.is_in(self.derivation_parent_ids))
        parent_to_child = {}
        # Create the packagesets and any archivepermissions if we're not
        # copying cross-distribution.
        parent_distro_ids = [
            parent.distribution.id for parent in self.derivation_parents
        ]
        for parent_ps in packagesets:
            # Cross-distro initializations get packagesets owned by the
            # distro owner, otherwise the old owner is preserved.
            if (self.packagesets_ids
                    and str(parent_ps.id) not in self.packagesets_ids):
                continue
            packageset_set = getUtility(IPackagesetSet)
            # First, try to fetch an existing packageset with this name.
            try:
                child_ps = packageset_set.getByName(parent_ps.name,
                                                    self.distroseries)
            except NoSuchPackageSet:
                if self.distroseries.distribution.id in parent_distro_ids:
                    new_owner = parent_ps.owner
                else:
                    new_owner = self.distroseries.owner
                child_ps = getUtility(IPackagesetSet).new(
                    parent_ps.name,
                    parent_ps.description,
                    new_owner,
                    distroseries=self.distroseries,
                    related_set=parent_ps)
            parent_to_child[parent_ps] = child_ps
            # Copy archivepermissions if we're not copying
            # cross-distribution.
            if (self.distroseries.distribution ==
                    parent_ps.distroseries.distribution):
                self._store.execute("""
                    INSERT INTO Archivepermission
                    (person, permission, archive, packageset, explicit)
                    SELECT person, permission, %s, %s, explicit
                    FROM Archivepermission WHERE packageset = %s
                    """ % sqlvalues(self.distroseries.main_archive,
                                    child_ps.id, parent_ps.id))
        # Copy the relations between sets, and the contents.
        for old_series_ps, new_series_ps in parent_to_child.items():
            old_series_sets = old_series_ps.setsIncluded(direct_inclusion=True)
            for old_series_child in old_series_sets:
                new_series_ps.add(parent_to_child[old_series_child])
            new_series_ps.add(
                old_series_ps.sourcesIncluded(direct_inclusion=True))

    def _copy_pocket_permissions(self):
        """Copy per-distroseries/pocket permissions from the parent series."""
        for parent in self.derivation_parents:
            if self.distroseries.distribution == parent.distribution:
                self._store.execute("""
                    INSERT INTO Archivepermission
                    (person, permission, archive, pocket, distroseries)
                    SELECT person, permission, %s, pocket, %s
                    FROM Archivepermission
                    WHERE pocket IS NOT NULL AND distroseries = %s
                    """ % sqlvalues(self.distroseries.main_archive,
                                    self.distroseries.id, parent.id))
Beispiel #49
0
class TranslationMessageImportedFlagUpdater:
    implements(ITunableLoop)
    """Populates is_imported flag from is_current flag on translations."""
    def __init__(self, transaction, logger, tm_ids):
        self.transaction = transaction
        self.logger = logger
        self.start_at = 0

        self.tm_ids = list(tm_ids)
        self.total = len(self.tm_ids)
        self.logger.info("Fixing up a total of %d TranslationMessages." %
                         (self.total))
        self.store = IMasterStore(Product)

    def isDone(self):
        """See `ITunableLoop`."""
        # When the main loop hits the end of the list of objects,
        # it sets start_at to None.
        return self.start_at is None

    def getNextBatch(self, chunk_size):
        """Return a batch of objects to work with."""
        end_at = self.start_at + int(chunk_size)
        self.logger.debug("Getting translations[%d:%d]..." %
                          (self.start_at, end_at))
        return self.tm_ids[self.start_at:end_at]

    def _updateTranslationMessages(self, tm_ids):
        # Unset imported messages that might be in the way.
        PreviousImported = ClassAlias(TranslationMessage, 'PreviousImported')
        CurrentTranslation = ClassAlias(TranslationMessage,
                                        'CurrentTranslation')
        previous_imported_select = Select(
            PreviousImported.id,
            tables=[PreviousImported, CurrentTranslation],
            where=And(
                PreviousImported.is_current_upstream == True,
                (PreviousImported.potmsgsetID
                 == CurrentTranslation.potmsgsetID),
                Or(
                    And(PreviousImported.potemplateID == None,
                        CurrentTranslation.potemplateID == None),
                    (PreviousImported.potemplateID
                     == CurrentTranslation.potemplateID)),
                PreviousImported.languageID == CurrentTranslation.languageID,
                CurrentTranslation.id.is_in(tm_ids)))

        previous_imported = self.store.find(
            TranslationMessage,
            TranslationMessage.id.is_in(previous_imported_select))
        previous_imported.set(is_current_upstream=False)
        translations = self.store.find(TranslationMessage,
                                       TranslationMessage.id.is_in(tm_ids))
        translations.set(is_current_upstream=True)

    def __call__(self, chunk_size):
        """See `ITunableLoop`.

        Retrieve a batch of TranslationMessages in ascending id order,
        and set is_imported flag to True on all of them.
        """
        tm_ids = self.getNextBatch(chunk_size)

        if len(tm_ids) == 0:
            self.start_at = None
        else:
            self._updateTranslationMessages(tm_ids)
            self.transaction.commit()
            self.transaction.begin()

            self.start_at += len(tm_ids)
            self.logger.info("Processed %d/%d TranslationMessages." %
                             (self.start_at, self.total))
Beispiel #50
0
def close_account(username, log):
    """Close a person's account.

    Return True on success, or log an error message and return False
    """
    store = IMasterStore(Person)
    janitor = getUtility(ILaunchpadCelebrities).janitor

    cur = cursor()
    references = list(postgresql.listReferences(cur, 'person', 'id'))
    postgresql.check_indirect_references(references)

    person = store.using(
        Person,
        LeftJoin(EmailAddress, Person.id == EmailAddress.personID)).find(
            Person,
            Or(Person.name == username,
               Lower(EmailAddress.email) == Lower(username))).one()
    if person is None:
        raise LaunchpadScriptFailure("User %s does not exist" % username)
    person_name = person.name

    # We don't do teams
    if person.is_team:
        raise LaunchpadScriptFailure("%s is a team" % person_name)

    log.info("Closing %s's account" % person_name)

    def table_notification(table):
        log.debug("Handling the %s table" % table)

    # All names starting with 'removed' are blacklisted, so this will always
    # succeed.
    new_name = 'removed%d' % person.id

    # Some references can safely remain in place and link to the cleaned-out
    # Person row.
    skip = {
        # These references express some kind of audit trail.  The actions in
        # question still happened, and in some cases the rows may still have
        # functional significance (e.g. subscriptions or access grants), but
        # we no longer identify the actor.
        ('accessartifactgrant', 'grantor'),
        ('accesspolicygrant', 'grantor'),
        ('binarypackagepublishinghistory', 'removed_by'),
        ('branch', 'registrant'),
        ('branchmergeproposal', 'merge_reporter'),
        ('branchmergeproposal', 'merger'),
        ('branchmergeproposal', 'queuer'),
        ('branchmergeproposal', 'registrant'),
        ('branchmergeproposal', 'reviewer'),
        ('branchsubscription', 'subscribed_by'),
        ('bug', 'owner'),
        ('bug', 'who_made_private'),
        ('bugactivity', 'person'),
        ('bugnomination', 'decider'),
        ('bugnomination', 'owner'),
        ('bugtask', 'owner'),
        ('bugsubscription', 'subscribed_by'),
        ('codeimport', 'owner'),
        ('codeimport', 'registrant'),
        ('codeimportevent', 'person'),
        ('faq', 'last_updated_by'),
        ('featureflagchangelogentry', 'person'),
        ('gitactivity', 'changee'),
        ('gitactivity', 'changer'),
        ('gitrepository', 'registrant'),
        ('gitrule', 'creator'),
        ('gitrulegrant', 'grantor'),
        ('gitsubscription', 'subscribed_by'),
        ('message', 'owner'),
        ('messageapproval', 'disposed_by'),
        ('messageapproval', 'posted_by'),
        ('packagecopyrequest', 'requester'),
        ('packagediff', 'requester'),
        ('packageupload', 'signing_key_owner'),
        ('personlocation', 'last_modified_by'),
        ('persontransferjob', 'major_person'),
        ('persontransferjob', 'minor_person'),
        ('poexportrequest', 'person'),
        ('pofile', 'lasttranslator'),
        ('pofiletranslator', 'person'),
        ('product', 'registrant'),
        ('question', 'answerer'),
        ('questionreopening', 'answerer'),
        ('questionreopening', 'reopener'),
        ('snapbuild', 'requester'),
        ('sourcepackagepublishinghistory', 'creator'),
        ('sourcepackagepublishinghistory', 'removed_by'),
        ('sourcepackagepublishinghistory', 'sponsor'),
        ('sourcepackagerecipebuild', 'requester'),
        ('sourcepackagerelease', 'creator'),
        ('sourcepackagerelease', 'maintainer'),
        ('sourcepackagerelease', 'signing_key_owner'),
        ('specification', 'approver'),
        ('specification', 'completer'),
        ('specification', 'drafter'),
        ('specification', 'goal_decider'),
        ('specification', 'goal_proposer'),
        ('specification', 'last_changed_by'),
        ('specification', 'starter'),
        ('structuralsubscription', 'subscribed_by'),
        ('teammembership', 'acknowledged_by'),
        ('teammembership', 'proposed_by'),
        ('teammembership', 'reviewed_by'),
        ('translationimportqueueentry', 'importer'),
        ('translationmessage', 'reviewer'),
        ('translationmessage', 'submitter'),
        ('translationrelicensingagreement', 'person'),
        ('usertouseremail', 'recipient'),
        ('usertouseremail', 'sender'),
        ('xref', 'creator'),

        # This is maintained by trigger functions and a garbo job.  It
        # doesn't need to be updated immediately.
        ('bugsummary', 'viewed_by'),

        # XXX cjwatson 2019-05-02 bug=1827399: This is suboptimal because it
        # does retain some personal information, but it's currently hard to
        # deal with due to the size and complexity of references to it.  We
        # can hopefully provide a garbo job for this eventually.
        ('revisionauthor', 'person'),
    }
    reference_names = {(src_tab, src_col)
                       for src_tab, src_col, _, _, _, _ in references}
    for src_tab, src_col in skip:
        if (src_tab, src_col) not in reference_names:
            raise AssertionError(
                "%s.%s is not a Person reference; possible typo?" %
                (src_tab, src_col))

    # XXX cjwatson 2018-11-29: Registrants could possibly be left as-is, but
    # perhaps we should pretend that the registrant was ~registry in that
    # case instead?

    # Remove the EmailAddress. This is the most important step, as
    # people requesting account removal seem to primarily be interested
    # in ensuring we no longer store this information.
    table_notification('EmailAddress')
    store.find(EmailAddress, EmailAddress.personID == person.id).remove()

    # Clean out personal details from the Person table
    table_notification('Person')
    person.display_name = 'Removed by request'
    person.name = new_name
    person.homepage_content = None
    person.icon = None
    person.mugshot = None
    person.hide_email_addresses = False
    person.registrant = None
    person.logo = None
    person.creation_rationale = PersonCreationRationale.UNKNOWN
    person.creation_comment = None

    # Keep the corresponding PersonSettings row, but reset everything to the
    # defaults.
    table_notification('PersonSettings')
    store.find(PersonSettings, PersonSettings.personID == person.id).set(
        selfgenerated_bugnotifications=DEFAULT,
        # XXX cjwatson 2018-11-29: These two columns have NULL defaults, but
        # perhaps shouldn't?
        expanded_notification_footers=False,
        require_strong_email_authentication=False)
    skip.add(('personsettings', 'person'))

    # Remove almost everything from the Account row and the corresponding
    # OpenIdIdentifier rows, preserving only a minimal audit trail.
    if person.account is not None:
        table_notification('Account')
        account = removeSecurityProxy(person.account)
        account.displayname = 'Removed by request'
        account.creation_rationale = AccountCreationRationale.UNKNOWN
        person.setAccountStatus(AccountStatus.CLOSED, janitor,
                                "Closed using close-account.")

        table_notification('OpenIdIdentifier')
        store.find(OpenIdIdentifier,
                   OpenIdIdentifier.account_id == account.id).remove()

    # Reassign their bugs
    table_notification('BugTask')
    store.find(BugTask, BugTask.assigneeID == person.id).set(assigneeID=None)

    # Reassign questions assigned to the user, and close all their questions
    # in non-final states since nobody else can.
    table_notification('Question')
    store.find(Question, Question.assigneeID == person.id).set(assigneeID=None)
    owned_non_final_questions = store.find(
        Question, Question.ownerID == person.id,
        Question.status.is_in([
            QuestionStatus.OPEN,
            QuestionStatus.NEEDSINFO,
            QuestionStatus.ANSWERED,
        ]))
    owned_non_final_questions.set(
        status=QuestionStatus.SOLVED,
        whiteboard=(
            'Closed by Launchpad due to owner requesting account removal'))
    skip.add(('question', 'owner'))

    # Remove rows from tables in simple cases in the given order
    removals = [
        # Trash their email addresses. People who request complete account
        # removal would be unhappy if they reregistered with their old email
        # address and this resurrected their deleted account, as the email
        # address is probably the piece of data we store that they were most
        # concerned with being removed from our systems.
        ('EmailAddress', 'person'),

        # Trash their codes of conduct and GPG keys
        ('SignedCodeOfConduct', 'owner'),
        ('GpgKey', 'owner'),

        # Subscriptions and notifications
        ('BranchSubscription', 'person'),
        ('BugMute', 'person'),
        ('BugNotificationRecipient', 'person'),
        ('BugSubscription', 'person'),
        ('BugSubscriptionFilterMute', 'person'),
        ('GitSubscription', 'person'),
        ('MailingListSubscription', 'person'),
        ('QuestionSubscription', 'person'),
        ('SpecificationSubscription', 'person'),
        ('StructuralSubscription', 'subscriber'),

        # Personal stuff, freeing up the namespace for others who want to play
        # or just to remove any fingerprints identifying the user.
        ('IrcId', 'person'),
        ('JabberId', 'person'),
        ('WikiName', 'person'),
        ('PersonLanguage', 'person'),
        ('PersonLocation', 'person'),
        ('SshKey', 'person'),

        # Karma
        ('Karma', 'person'),
        ('KarmaCache', 'person'),
        ('KarmaTotalCache', 'person'),

        # Team memberships
        ('TeamMembership', 'person'),
        ('TeamParticipation', 'person'),

        # Contacts
        ('AnswerContact', 'person'),

        # Pending items in queues
        ('POExportRequest', 'person'),

        # Access grants
        ('AccessArtifactGrant', 'grantee'),
        ('AccessPolicyGrant', 'grantee'),
        ('ArchivePermission', 'person'),
        ('GitRuleGrant', 'grantee'),
        ('SharingJob', 'grantee'),

        # Soyuz reporting
        ('LatestPersonSourcePackageReleaseCache', 'creator'),
        ('LatestPersonSourcePackageReleaseCache', 'maintainer'),

        # "Affects me too" information
        ('BugAffectsPerson', 'person'),
    ]
    for table, person_id_column in removals:
        table_notification(table)
        store.execute(
            """
            DELETE FROM %(table)s WHERE %(person_id_column)s = ?
            """ % {
                'table': table,
                'person_id_column': person_id_column,
            }, (person.id, ))

    # Trash Sprint Attendance records in the future.
    table_notification('SprintAttendance')
    store.execute(
        """
        DELETE FROM SprintAttendance
        USING Sprint
        WHERE Sprint.id = SprintAttendance.sprint
            AND attendee = ?
            AND Sprint.time_starts > CURRENT_TIMESTAMP AT TIME ZONE 'UTC'
        """, (person.id, ))
    # Any remaining past sprint attendance records can harmlessly refer to
    # the placeholder person row.
    skip.add(('sprintattendance', 'attendee'))

    # generate_ppa_htaccess currently relies on seeing active
    # ArchiveAuthToken rows so that it knows which ones to remove from
    # .htpasswd files on disk in response to the cancellation of the
    # corresponding ArchiveSubscriber rows; but even once PPA authorisation
    # is handled dynamically, we probably still want to have the per-person
    # audit trail here.
    archive_subscriber_ids = set(
        store.find(
            ArchiveSubscriber.id, ArchiveSubscriber.subscriber_id == person.id,
            ArchiveSubscriber.status == ArchiveSubscriberStatus.CURRENT))
    if archive_subscriber_ids:
        getUtility(IArchiveSubscriberSet).cancel(archive_subscriber_ids,
                                                 janitor)
    skip.add(('archivesubscriber', 'subscriber'))
    skip.add(('archiveauthtoken', 'person'))

    # Remove hardware submissions.
    table_notification('HWSubmissionDevice')
    store.execute(
        """
        DELETE FROM HWSubmissionDevice
        USING HWSubmission
        WHERE HWSubmission.id = HWSubmissionDevice.submission
            AND owner = ?
        """, (person.id, ))
    table_notification('HWSubmission')
    store.find(HWSubmission, HWSubmission.ownerID == person.id).remove()

    has_references = False

    # Check for active related projects, and skip inactive ones.
    for col in 'bug_supervisor', 'driver', 'owner':
        # Raw SQL because otherwise using Product._owner while displaying it
        # as Product.owner is too fiddly.
        result = store.execute(
            """
            SELECT COUNT(*) FROM product WHERE active AND %(col)s = ?
            """ % {'col': col}, (person.id, ))
        count = result.get_one()[0]
        if count:
            log.error("User %s is still referenced by %d product.%s values" %
                      (person_name, count, col))
            has_references = True
        skip.add(('product', col))
    for col in 'driver', 'owner':
        count = store.find(ProductSeries, ProductSeries.product == Product.id,
                           Product.active,
                           getattr(ProductSeries, col) == person).count()
        if count:
            log.error(
                "User %s is still referenced by %d productseries.%s values" %
                (person_name, count, col))
            has_references = True
        skip.add(('productseries', col))

    # Closing the account will only work if all references have been handled
    # by this point.  If not, it's safer to bail out.  It's OK if this
    # doesn't work in all conceivable situations, since some of them may
    # require careful thought and decisions by a human administrator.
    for src_tab, src_col, ref_tab, ref_col, updact, delact in references:
        if (src_tab, src_col) in skip:
            continue
        result = store.execute(
            """
            SELECT COUNT(*) FROM %(src_tab)s WHERE %(src_col)s = ?
            """ % {
                'src_tab': src_tab,
                'src_col': src_col,
            }, (person.id, ))
        count = result.get_one()[0]
        if count:
            log.error("User %s is still referenced by %d %s.%s values" %
                      (person_name, count, src_tab, src_col))
            has_references = True
    if has_references:
        raise LaunchpadScriptFailure("User %s is still referenced" %
                                     person_name)

    return True
Beispiel #51
0
class BugWatchScheduler(TunableLoop):
    """An `ITunableLoop` for scheduling BugWatches."""

    maximum_chunk_size = 1000

    def __init__(self,
                 log,
                 abort_time=None,
                 max_delay_days=None,
                 max_sample_size=None):
        super(BugWatchScheduler, self).__init__(log, abort_time)
        self.transaction = transaction
        self.store = IMasterStore(BugWatch)

        if max_delay_days is None:
            max_delay_days = MAX_DELAY_DAYS
        if max_sample_size is None:
            max_sample_size = MAX_SAMPLE_SIZE
        self.max_sample_size = max_sample_size

        self.delay_coefficient = get_delay_coefficient(max_delay_days,
                                                       max_sample_size)

    def __call__(self, chunk_size):
        """Run the loop."""
        # XXX 2010-03-25 gmb bug=198767:
        #     We cast chunk_size to an integer to ensure that we're not
        #     trying to slice using floats or anything similarly
        #     foolish. We shouldn't have to do this.
        chunk_size = int(chunk_size)
        query = """
        UPDATE BugWatch
            SET next_check =
                COALESCE(
                    lastchecked + interval '1 day',
                    now() AT TIME ZONE 'UTC') +
                (interval '1 day' * (%s * recent_failure_count))
            FROM (
                SELECT bug_watch.id,
                    (SELECT COUNT(*)
                        FROM (SELECT 1
                            FROM bugwatchactivity
                           WHERE bugwatchactivity.bug_watch = bug_watch.id
                             AND bugwatchactivity.result NOT IN (%s)
                           ORDER BY bugwatchactivity.id DESC
                           LIMIT %s) AS recent_failures
                    ) AS recent_failure_count
                FROM BugWatch AS bug_watch
                WHERE bug_watch.next_check IS NULL
                LIMIT %s
            ) AS counts
        WHERE BugWatch.id = counts.id
        """ % sqlvalues(self.delay_coefficient,
                        BUG_WATCH_ACTIVITY_SUCCESS_STATUSES,
                        self.max_sample_size, chunk_size)
        self.transaction.begin()
        result = self.store.execute(query)
        self.log.debug("Scheduled %s watches" % result.rowcount)
        self.transaction.commit()

    def isDone(self):
        """Return True when there are no more watches to schedule."""
        return self.store.find(BugWatch,
                               BugWatch.next_check == None).is_empty()
Beispiel #52
0
class TestDistroSeriesDifferenceJobEndToEnd(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDistroSeriesDifferenceJobEndToEnd, self).setUp()
        self.store = IMasterStore(DistroSeriesDifference)

    def getJobSource(self):
        return getUtility(IDistroSeriesDifferenceJobSource)

    def makeDerivedDistroSeries(self):
        dsp = self.factory.makeDistroSeriesParent()
        return dsp

    def createPublication(self,
                          source_package_name,
                          versions,
                          distroseries,
                          archive=None):
        if archive is None:
            archive = distroseries.main_archive
        changelog_lfa = self.factory.makeChangelog(source_package_name.name,
                                                   versions)
        # Commit for the Librarian's sake.
        transaction.commit()
        spr = self.factory.makeSourcePackageRelease(
            sourcepackagename=source_package_name,
            version=versions[0],
            changelog=changelog_lfa)
        return self.factory.makeSourcePackagePublishingHistory(
            sourcepackagerelease=spr,
            archive=archive,
            distroseries=distroseries,
            status=PackagePublishingStatus.PUBLISHED,
            pocket=PackagePublishingPocket.RELEASE)

    def findDSD(self, derived_series, source_package_name):
        return self.store.find(
            DistroSeriesDifference,
            DistroSeriesDifference.derived_series == derived_series,
            DistroSeriesDifference.source_package_name == source_package_name)

    def runJob(self, job):
        switch_dbuser('distroseriesdifferencejob')
        dsdjob = DistroSeriesDifferenceJob(job)
        dsdjob.start()
        dsdjob.run()
        dsdjob.complete()
        switch_dbuser('launchpad')

    def test_parent_gets_newer(self):
        # When a new source package is uploaded to the parent distroseries,
        # a job is created that updates the relevant DSD.
        dsp = self.makeDerivedDistroSeries()
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1derived1', '1.0-1'],
                               dsp.derived_series)
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.parent_series)

        # Creating the SPPHs has created jobs for us, so grab them off
        # the queue.
        jobs = find_waiting_jobs(dsp.derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = find_dsd_for(dsp, source_package_name)
        self.assertEqual(1, ds_diff.count())
        self.assertEqual('1.0-1', ds_diff[0].parent_source_version)
        self.assertEqual('1.0-1derived1', ds_diff[0].source_version)
        self.assertEqual('1.0-1', ds_diff[0].base_version)
        # Now create a 1.0-2 upload to the parent.
        self.createPublication(source_package_name, ['1.0-2', '1.0-1'],
                               dsp.parent_series)
        jobs = find_waiting_jobs(dsp.derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        # And the DSD we have a hold of will have updated.
        self.assertEqual('1.0-2', ds_diff[0].parent_source_version)
        self.assertEqual('1.0-1derived1', ds_diff[0].source_version)
        self.assertEqual('1.0-1', ds_diff[0].base_version)

    def test_child_gets_newer(self):
        # When a new source is uploaded to the child distroseries, the DSD is
        # updated and auto-blacklisted.
        dsp = self.makeDerivedDistroSeries()
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.derived_series)
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.parent_series)
        jobs = find_waiting_jobs(dsp.derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = find_dsd_for(dsp, source_package_name)
        self.assertEqual(DistroSeriesDifferenceStatus.RESOLVED,
                         ds_diff[0].status)
        self.createPublication(source_package_name, ['2.0-0derived1', '1.0-1'],
                               dsp.derived_series)
        jobs = find_waiting_jobs(dsp.derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        self.assertEqual(DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT,
                         ds_diff[0].status)
        self.assertEqual('1.0-1', ds_diff[0].base_version)

        # An additional upload should not change the blacklisted status.
        self.createPublication(source_package_name, ['2.0-0derived2', '1.0-1'],
                               dsp.derived_series)
        jobs = find_waiting_jobs(dsp.derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        self.assertEqual(DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT,
                         ds_diff[0].status)

    def test_child_is_synced(self):
        # If the source package gets 'synced' to the child from the parent,
        # the job correctly updates the DSD.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1derived1', '1.0-1'],
                               derived_series)
        self.createPublication(source_package_name, ['1.0-2', '1.0-1'],
                               dsp.parent_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        self.assertEqual('1.0-1', ds_diff[0].base_version)
        self.createPublication(source_package_name, ['1.0-2', '1.0-1'],
                               derived_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        self.assertEqual(DistroSeriesDifferenceStatus.RESOLVED,
                         ds_diff[0].status)

    def test_only_in_child(self):
        # If a source package only exists in the child distroseries, the DSD
        # is created with the right type.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-0derived1'],
                               derived_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        self.assertEqual(DistroSeriesDifferenceType.UNIQUE_TO_DERIVED_SERIES,
                         ds_diff[0].difference_type)

    def test_only_in_parent(self):
        # If a source package only exists in the parent distroseries, the DSD
        # is created with the right type.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.parent_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        self.assertEqual(
            DistroSeriesDifferenceType.MISSING_FROM_DERIVED_SERIES,
            ds_diff[0].difference_type)

    def test_deleted_in_parent(self):
        # If a source package is deleted in the parent, a job is created, and
        # the DSD is updated correctly.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1'], derived_series)
        spph = self.createPublication(source_package_name, ['1.0-1'],
                                      dsp.parent_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        self.assertEqual(DistroSeriesDifferenceStatus.RESOLVED,
                         ds_diff[0].status)
        spph.requestDeletion(self.factory.makePerson())
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        self.assertEqual(DistroSeriesDifferenceType.UNIQUE_TO_DERIVED_SERIES,
                         ds_diff[0].difference_type)

    def test_deleted_in_child(self):
        # If a source package is deleted in the child, a job is created, and
        # the DSD is updated correctly.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        spph = self.createPublication(source_package_name, ['1.0-1'],
                                      derived_series)
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.parent_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        self.assertEqual(DistroSeriesDifferenceStatus.RESOLVED,
                         ds_diff[0].status)
        spph.requestDeletion(self.factory.makePerson())
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        self.assertEqual(
            DistroSeriesDifferenceType.MISSING_FROM_DERIVED_SERIES,
            ds_diff[0].difference_type)

    def test_no_job_for_PPA(self):
        # If a source package is uploaded to a PPA, a job is not created.
        dsp = self.makeDerivedDistroSeries()
        source_package_name = self.factory.makeSourcePackageName()
        ppa = self.factory.makeArchive()
        self.createPublication(source_package_name, ['1.0-1'],
                               dsp.derived_series, ppa)
        self.assertContentEqual([],
                                find_waiting_jobs(dsp.derived_series,
                                                  source_package_name,
                                                  dsp.parent_series))

    def test_no_job_for_PPA_with_deleted_source(self):
        # If a source package is deleted from a PPA, no job is created.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        ppa = self.factory.makeArchive()
        spph = self.createPublication(source_package_name, ['1.0-1'],
                                      derived_series, ppa)
        spph.requestDeletion(ppa.owner)
        self.assertContentEqual([],
                                find_waiting_jobs(derived_series,
                                                  source_package_name,
                                                  dsp.parent_series))

    def test_update_deletes_diffs(self):
        # When a DSD is updated, the diffs are invalidated.
        dsp = self.makeDerivedDistroSeries()
        derived_series = dsp.derived_series
        source_package_name = self.factory.makeSourcePackageName()
        self.createPublication(source_package_name, ['1.0-1derived1', '1.0-1'],
                               derived_series)
        self.createPublication(source_package_name, ['1.0-2', '1.0-1'],
                               dsp.parent_series)
        spr = self.factory.makeSourcePackageRelease(
            sourcepackagename=source_package_name, version='1.0-1')
        self.factory.makeSourcePackagePublishingHistory(
            sourcepackagerelease=spr,
            archive=dsp.parent_series.main_archive,
            distroseries=dsp.parent_series,
            status=PackagePublishingStatus.SUPERSEDED)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        ds_diff = self.findDSD(derived_series, source_package_name)
        ds_diff[0].requestPackageDiffs(self.factory.makePerson())
        self.assertIsNot(None, ds_diff[0].package_diff)
        self.assertIsNot(None, ds_diff[0].parent_package_diff)
        self.createPublication(source_package_name,
                               ['1.0-3', '1.0-2', '1.0-1'], dsp.parent_series)
        jobs = find_waiting_jobs(derived_series, source_package_name,
                                 dsp.parent_series)
        self.runJob(jobs[0])
        # Since the diff showing the changes from 1.0-1 to 1.0-1derived1 is
        # still valid, it isn't reset, but the parent diff is.
        self.assertIsNot(None, ds_diff[0].package_diff)
        self.assertIs(None, ds_diff[0].parent_package_diff)
    def create(cls,
               child,
               parents,
               arches=(),
               archindep_archtag=None,
               packagesets=None,
               rebuild=False,
               overlays=(),
               overlay_pockets=(),
               overlay_components=()):
        """Create a new `InitializeDistroSeriesJob`.

        :param child: The child `IDistroSeries` to initialize
        :param parents: An iterable of `IDistroSeries` of parents to
            initialize from.
        :param arches: An iterable of architecture tags which lists the
            architectures to enable in the child.
        :param packagesets: An iterable of `PackageSet` IDs from which to
            copy packages in parents.
        :param rebuild: A boolean to say whether the child should rebuild
            all the copied sources (if True), or to copy the parents'
            binaries (if False).
        :param overlays: An iterable of booleans corresponding exactly to
            each parent in the "parents" parameter.  Each boolean says
            whether this corresponding parent is an overlay for the child
            or not.  An overlay allows the child to use the parent's
            packages for build dependencies, and the overlay_pockets and
            overlay_components parameters dictate from where the
            dependencies may be used in the parent.
        :param overlay_pockets: An iterable of textual pocket names
            corresponding exactly to each parent.  The  name *must* be set
            if the corresponding overlays boolean is True.
        :param overlay_components: An iterable of textual component names
            corresponding exactly to each parent.  The  name *must* be set
            if the corresponding overlays boolean is True.
        """
        store = IMasterStore(DistributionJob)
        # Only one InitializeDistroSeriesJob can be present at a time.
        distribution_job = store.find(
            DistributionJob, DistributionJob.job_id == Job.id,
            DistributionJob.job_type == cls.class_job_type,
            DistributionJob.distroseries_id == child.id).one()
        if distribution_job is not None:
            if distribution_job.job.status == JobStatus.FAILED:
                # Delete the failed job to allow initialization of the series
                # to be rescheduled.
                store.remove(distribution_job)
                store.remove(distribution_job.job)
            elif distribution_job.job.status == JobStatus.COMPLETED:
                raise InitializationCompleted(cls(distribution_job))
            else:
                raise InitializationPending(cls(distribution_job))
        # Schedule the initialization.
        metadata = {
            'parents': parents,
            'arches': arches,
            'archindep_archtag': archindep_archtag,
            'packagesets': packagesets,
            'rebuild': rebuild,
            'overlays': overlays,
            'overlay_pockets': overlay_pockets,
            'overlay_components': overlay_components,
        }
        distribution_job = DistributionJob(child.distribution, child,
                                           cls.class_job_type, metadata)
        store.add(distribution_job)
        derived_job = cls(distribution_job)
        derived_job.celeryRunOnCommit()
        return derived_job
Beispiel #54
0
 def _api_add_or_remove(self, clauses, handler):
     """Look up the data to be added/removed and call the handler."""
     store = IMasterStore(Packageset)
     data = list(store.find(*clauses))
     if len(data) > 0:
         handler(data, store)
class TestPersonSetCreateByOpenId(TestCaseWithFactory):
    layer = DatabaseFunctionalLayer

    def setUp(self):
        super(TestPersonSetCreateByOpenId, self).setUp()
        self.person_set = getUtility(IPersonSet)
        self.store = IMasterStore(Account)

        # Generate some valid test data.
        self.account = self.makeAccount()
        self.identifier = self.makeOpenIdIdentifier(self.account, u'whatever')
        self.person = self.makePerson(self.account)
        self.email = self.makeEmailAddress(email='*****@*****.**',
                                           person=self.person)

    def makeAccount(self):
        return self.store.add(
            Account(displayname='Displayname',
                    creation_rationale=AccountCreationRationale.UNKNOWN,
                    status=AccountStatus.ACTIVE))

    def makeOpenIdIdentifier(self, account, identifier):
        openid_identifier = OpenIdIdentifier()
        openid_identifier.identifier = identifier
        openid_identifier.account = account
        return self.store.add(openid_identifier)

    def makePerson(self, account):
        return self.store.add(
            Person(name='acc%d' % account.id,
                   account=account,
                   displayname='Displayname',
                   creation_rationale=PersonCreationRationale.UNKNOWN))

    def makeEmailAddress(self, email, person):
        return self.store.add(
            EmailAddress(email=email,
                         account=person.account,
                         person=person,
                         status=EmailAddressStatus.PREFERRED))

    def testAllValid(self):
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            self.identifier.identifier, self.email.email, 'Ignored Name',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        self.assertIs(False, updated)
        self.assertIs(self.person, found)
        self.assertIs(self.account, found.account)
        self.assertIs(self.email, found.preferredemail)
        self.assertIs(self.email.account, self.account)
        self.assertIs(self.email.person, self.person)
        self.assertEqual([self.identifier],
                         list(self.account.openid_identifiers))

    def testEmailAddressCaseInsensitive(self):
        # As per testAllValid, but the email address used for the lookup
        # is all upper case.
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            self.identifier.identifier, self.email.email.upper(),
            'Ignored Name', PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        self.assertIs(False, updated)
        self.assertIs(self.person, found)
        self.assertIs(self.account, found.account)
        self.assertIs(self.email, found.preferredemail)
        self.assertIs(self.email.account, self.account)
        self.assertIs(self.email.person, self.person)
        self.assertEqual([self.identifier],
                         list(self.account.openid_identifiers))

    def testNewOpenId(self):
        # Account looked up by email and the new OpenId identifier
        # attached. We can do this because we trust our OpenId Provider.
        new_identifier = u'newident'
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            new_identifier, self.email.email, 'Ignored Name',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        self.assertIs(True, updated)
        self.assertIs(self.person, found)
        self.assertIs(self.account, found.account)
        self.assertIs(self.email, found.preferredemail)
        self.assertIs(self.email.account, self.account)
        self.assertIs(self.email.person, self.person)

        # Old OpenId Identifier still attached.
        self.assertIn(self.identifier, list(self.account.openid_identifiers))

        # So is our new one.
        identifiers = [
            identifier.identifier
            for identifier in self.account.openid_identifiers
        ]
        self.assertIn(new_identifier, identifiers)

    def testNewAccountAndIdentifier(self):
        # If neither the OpenId Identifier nor the email address are
        # found, we create everything.
        new_email = u'*****@*****.**'
        new_identifier = u'new_identifier'
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            new_identifier, new_email, 'New Name',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        # We have a new Person
        self.assertIs(True, updated)
        self.assertIsNot(None, found)

        # It is correctly linked to an account, emailaddress and
        # identifier.
        self.assertIs(found, found.preferredemail.person)
        self.assertEqual(new_identifier,
                         found.account.openid_identifiers.any().identifier)

    def testNoAccount(self):
        # EmailAddress is linked to a Person, but there is no Account.
        # Convert this stub into something valid.
        self.email.account = None
        self.email.status = EmailAddressStatus.NEW
        self.person.account = None
        new_identifier = u'new_identifier'
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            new_identifier, self.email.email, 'Ignored',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        self.assertTrue(updated)

        self.assertIsNot(None, found.account)
        self.assertEqual(new_identifier,
                         found.account.openid_identifiers.any().identifier)
        self.assertIs(self.email.person, found)
        self.assertEqual(EmailAddressStatus.PREFERRED, self.email.status)

    def testEmailAddressAccountAndOpenIDAccountAreDifferent(self):
        # The EmailAddress and OpenId Identifier are both in the database,
        # but they are not linked to the same account. In this case, the
        # OpenId Identifier trumps the EmailAddress's account.
        self.identifier.account = self.store.find(Account,
                                                  displayname='Foo Bar').one()
        email_account = self.email.account

        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            self.identifier.identifier, self.email.email, 'New Name',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        found = removeSecurityProxy(found)

        self.assertFalse(updated)
        self.assertIs(IPerson(self.identifier.account), found)

        self.assertIs(found.account, self.identifier.account)
        self.assertIn(self.identifier, list(found.account.openid_identifiers))
        self.assertIs(email_account, self.email.account)

    def testEmptyOpenIDIdentifier(self):
        self.assertRaises(AssertionError,
                          self.person_set.getOrCreateByOpenIDIdentifier, u'',
                          '*****@*****.**', 'New Name',
                          PersonCreationRationale.UNKNOWN, 'No Comment')

    def testTeamEmailAddress(self):
        # If the EmailAddress is linked to a team, login fails. There is
        # no way to automatically recover -- someone must manually fix
        # the email address of the team or the SSO account.
        self.factory.makeTeam(email="*****@*****.**")

        self.assertRaises(TeamEmailAddressError,
                          self.person_set.getOrCreateByOpenIDIdentifier,
                          u'other-openid-identifier', '*****@*****.**',
                          'New Name', PersonCreationRationale.UNKNOWN,
                          'No Comment')

    def testDeactivatedAccount(self):
        # Logging into a deactivated account with a new email address
        # reactivates the account, adds that email address, and sets it
        # as preferred.
        addr = '*****@*****.**'
        self.person.preDeactivate('I hate life.')
        self.assertEqual(AccountStatus.DEACTIVATED, self.person.account_status)
        self.assertIs(None, self.person.preferredemail)
        found, updated = self.person_set.getOrCreateByOpenIDIdentifier(
            self.identifier.identifier, addr, 'New Name',
            PersonCreationRationale.UNKNOWN, 'No Comment')
        self.assertEqual(AccountStatus.ACTIVE, self.person.account_status)
        self.assertEqual(addr, self.person.preferredemail.email)
 def getByID(cls, build_id):
     """See `ISourcePackageRecipeBuildSource`."""
     store = IMasterStore(SourcePackageRecipeBuild)
     return store.find(cls, cls.id == build_id).one()
class TestBugSummary(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBugSummary, self).setUp()

        # Some things we are testing are impossible as mere mortals,
        # but might happen from the SQL command line.
        switch_dbuser('testadmin')

        self.store = IMasterStore(BugSummary)

    def getCount(self, person, **kw_find_expr):
        self._maybe_rollup()
        store = self.store
        user_with, user_where = get_bugsummary_filter_for_user(person)
        if user_with:
            store = store.with_(user_with)
        summaries = store.find(BugSummary, *user_where, **kw_find_expr)
        # Note that if there a 0 records found, sum() returns None, but
        # we prefer to return 0 here.
        return summaries.sum(BugSummary.count) or 0

    def assertCount(self, count, user=None, **kw_find_expr):
        self.assertEqual(count, self.getCount(user, **kw_find_expr))

    def _maybe_rollup(self):
        """Rollup the journal if the class is testing the rollup case."""
        # The base class does not rollup the journal, see
        # TestBugSummaryRolledUp which does.
        pass

    def test_providesInterface(self):
        bug_summary = self.store.find(BugSummary)[0]
        self.assertTrue(IBugSummary.providedBy(bug_summary))

    def test_addTag(self):
        tag = u'pustular'

        # Ensure nothing using our tag yet.
        self.assertCount(0, tag=tag)

        product = self.factory.makeProduct()

        for count in range(3):
            bug = self.factory.makeBug(target=product)
            bug_tag = BugTag(bug=bug, tag=tag)
            self.store.add(bug_tag)

        # Number of tagged tasks for a particular product
        self.assertCount(3, product=product, tag=tag)

        # There should be no other BugSummary rows.
        self.assertCount(3, tag=tag)

    def test_changeTag(self):
        old_tag = u'pustular'
        new_tag = u'flatulent'

        # Ensure nothing using our tags yet.
        self.assertCount(0, tag=old_tag)
        self.assertCount(0, tag=new_tag)

        product = self.factory.makeProduct()

        for count in range(3):
            bug = self.factory.makeBug(target=product)
            bug_tag = BugTag(bug=bug, tag=old_tag)
            self.store.add(bug_tag)

        # Number of tagged tasks for a particular product
        self.assertCount(3, product=product, tag=old_tag)

        for count in reversed(range(3)):
            bug_tag = self.store.find(BugTag, tag=old_tag).any()
            bug_tag.tag = new_tag

            self.assertCount(count, product=product, tag=old_tag)
            self.assertCount(3 - count, product=product, tag=new_tag)

        # There should be no other BugSummary rows.
        self.assertCount(0, tag=old_tag)
        self.assertCount(3, tag=new_tag)

    def test_removeTag(self):
        tag = u'pustular'

        # Ensure nothing using our tags yet.
        self.assertCount(0, tag=tag)

        product = self.factory.makeProduct()

        for count in range(3):
            bug = self.factory.makeBug(target=product)
            bug_tag = BugTag(bug=bug, tag=tag)
            self.store.add(bug_tag)

        # Number of tagged tasks for a particular product
        self.assertCount(3, product=product, tag=tag)

        for count in reversed(range(3)):
            bug_tag = self.store.find(BugTag, tag=tag).any()
            self.store.remove(bug_tag)
            self.assertCount(count, product=product, tag=tag)

        # There should be no other BugSummary rows.
        self.assertCount(0, tag=tag)

    def test_changeStatus(self):
        org_status = BugTaskStatus.NEW
        new_status = BugTaskStatus.INVALID

        product = self.factory.makeProduct()

        for count in range(3):
            bug = self.factory.makeBug(target=product)
            bug_task = self.store.find(BugTask, bug=bug).one()
            bug_task._status = org_status
            self.assertCount(count + 1, product=product, status=org_status)

        for count in reversed(range(3)):
            bug_task = self.store.find(BugTask,
                                       product=product,
                                       _status=org_status).any()
            bug_task._status = new_status
            self.assertCount(count, product=product, status=org_status)
            self.assertCount(3 - count, product=product, status=new_status)

    def test_changeImportance(self):
        org_importance = BugTaskImportance.UNDECIDED
        new_importance = BugTaskImportance.CRITICAL

        product = self.factory.makeProduct()

        for count in range(3):
            bug = self.factory.makeBug(target=product)
            bug_task = self.store.find(BugTask, bug=bug).one()
            bug_task.importance = org_importance
            self.assertCount(count + 1,
                             product=product,
                             importance=org_importance)

        for count in reversed(range(3)):
            bug_task = self.store.find(BugTask,
                                       product=product,
                                       importance=org_importance).any()
            bug_task.importance = new_importance
            self.assertCount(count, product=product, importance=org_importance)
            self.assertCount(3 - count,
                             product=product,
                             importance=new_importance)

    def test_makePrivate(self):
        # The bug owner and two other people are subscribed directly to
        # the bug, and another has a grant for the whole project. All of
        # them see the bug once.
        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        person_c = self.factory.makePerson()
        product = self.factory.makeProduct()
        getUtility(IService, 'sharing').sharePillarInformation(
            product, person_c, product.owner,
            {InformationType.USERDATA: SharingPermission.ALL})
        bug = self.factory.makeBug(target=product, owner=person_b)

        bug.subscribe(person=person_a, subscribed_by=person_a)

        # Make the bug private. We have to use the Python API to ensure
        # BugSubscription records get created for implicit
        # subscriptions.
        bug.transitionToInformationType(InformationType.USERDATA, bug.owner)

        # Confirm counts; the two other people shouldn't have access.
        self.assertCount(0, product=product)
        self.assertCount(0, user=person_a, product=product)
        self.assertCount(1, user=person_b, product=product)
        self.assertCount(1, user=person_c, product=product)
        self.assertCount(1, user=bug.owner, product=product)

    def test_makePublic(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product,
                                   information_type=InformationType.USERDATA)

        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        bug.subscribe(person=person_a, subscribed_by=person_a)

        # Make the bug public. We have to use the Python API to ensure
        # BugSubscription records get created for implicit
        # subscriptions.
        bug.setPrivate(False, bug.owner)

        self.assertCount(1, product=product)
        self.assertCount(1, user=person_a, product=product)
        self.assertCount(1, user=person_b, product=product)

    def test_subscribePrivate(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product,
                                   information_type=InformationType.USERDATA)

        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        bug.subscribe(person=person_a, subscribed_by=person_a)

        self.assertCount(0, product=product)
        self.assertCount(1, user=person_a, product=product)
        self.assertCount(0, user=person_b, product=product)

    def test_unsubscribePrivate(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product,
                                   information_type=InformationType.USERDATA)

        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        bug.subscribe(person=person_a, subscribed_by=person_a)
        bug.subscribe(person=person_b, subscribed_by=person_b)
        bug.unsubscribe(person=person_b, unsubscribed_by=person_b)

        self.assertCount(0, product=product)
        self.assertCount(1, user=person_a, product=product)
        self.assertCount(0, user=person_b, product=product)

    def test_subscribePublic(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product)

        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        bug.subscribe(person=person_a, subscribed_by=person_a)

        self.assertCount(1, product=product)
        self.assertCount(1, user=person_a, product=product)
        self.assertCount(1, user=person_b, product=product)

    def test_unsubscribePublic(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product)

        person_a = self.factory.makePerson()
        person_b = self.factory.makePerson()
        bug.subscribe(person=person_a, subscribed_by=person_a)
        bug.subscribe(person=person_b, subscribed_by=person_b)
        bug.unsubscribe(person=person_b, unsubscribed_by=person_b)

        self.assertCount(1, product=product)
        self.assertCount(1, user=person_a, product=product)
        self.assertCount(1, user=person_b, product=product)

    def test_addProduct(self):
        distribution = self.factory.makeDistribution()
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=distribution)

        self.assertCount(1, distribution=distribution)
        self.assertCount(0, product=product)

        self.factory.makeBugTask(bug=bug, target=product)

        self.assertCount(1, distribution=distribution)
        self.assertCount(1, product=product)

    def test_changeProduct(self):
        product_a = self.factory.makeProduct()
        product_b = self.factory.makeProduct()
        bug_task = self.factory.makeBugTask(target=product_a)

        self.assertCount(1, product=product_a)
        self.assertCount(0, product=product_b)

        removeSecurityProxy(bug_task).product = product_b

        self.assertCount(0, product=product_a)
        self.assertCount(1, product=product_b)

    def test_removeProduct(self):
        distribution = self.factory.makeDistribution()
        product = self.factory.makeProduct()

        product_bug_task = self.factory.makeBugTask(target=product)
        self.factory.makeBugTask(bug=product_bug_task.bug, target=distribution)

        self.assertCount(1, distribution=distribution)
        self.assertCount(1, product=product)

        self.store.remove(product_bug_task)

        self.assertCount(1, distribution=distribution)
        self.assertCount(0, product=product)

    def test_addProductSeries(self):
        bug = self.factory.makeBug()
        productseries = self.factory.makeProductSeries()
        product = productseries.product

        bug_task = self.factory.makeBugTask(bug=bug, target=productseries)

        self.assertTrue(bug_task.product is None)

        self.assertCount(1, product=product)
        self.assertCount(1, productseries=productseries)

    def test_changeProductSeries(self):
        product = self.factory.makeProduct()
        productseries_a = self.factory.makeProductSeries(product=product)
        productseries_b = self.factory.makeProductSeries(product=product)

        # You can't have a BugTask targetted to a productseries without
        # already having a BugTask targetted to the product. Create
        # this task explicitly.
        product_task = self.factory.makeBugTask(target=product)

        series_task = self.factory.makeBugTask(bug=product_task.bug,
                                               target=productseries_a)

        self.assertCount(1, product=product)
        self.assertCount(1, productseries=productseries_a)

        removeSecurityProxy(series_task).productseries = productseries_b

        self.assertCount(1, product=product)
        self.assertCount(0, productseries=productseries_a)
        self.assertCount(1, productseries=productseries_b)

    def test_removeProductSeries(self):
        series = self.factory.makeProductSeries()
        product = series.product
        bug_task = self.factory.makeBugTask(target=series)

        self.assertCount(1, product=product)
        self.assertCount(1, productseries=series)

        self.store.remove(bug_task)

        self.assertCount(1, product=product)
        self.assertCount(0, productseries=series)

    def test_addDistribution(self):
        distribution = self.factory.makeDistribution()
        self.factory.makeBugTask(target=distribution)

        self.assertCount(1, distribution=distribution)

    def test_changeDistribution(self):
        distribution_a = self.factory.makeDistribution()
        distribution_b = self.factory.makeDistribution()
        bug_task = self.factory.makeBugTask(target=distribution_a)

        self.assertCount(1, distribution=distribution_a)

        removeSecurityProxy(bug_task).distribution = distribution_b

        self.assertCount(0, distribution=distribution_a)
        self.assertCount(1, distribution=distribution_b)

    def test_removeDistribution(self):
        distribution_a = self.factory.makeDistribution()
        distribution_b = self.factory.makeDistribution()
        bug_task_a = self.factory.makeBugTask(target=distribution_a)
        bug = bug_task_a.bug
        bug_task_b = self.factory.makeBugTask(bug=bug, target=distribution_b)

        self.assertCount(1, distribution=distribution_a)
        self.assertCount(1, distribution=distribution_b)

        self.store.remove(bug_task_b)

        self.assertCount(1, distribution=distribution_a)
        self.assertCount(0, distribution=distribution_b)

    def test_addDistroSeries(self):
        series = self.factory.makeDistroSeries()
        distribution = series.distribution

        # This first creates a BugTask on the distribution. We can't
        # have a distroseries BugTask without a distribution BugTask.
        self.factory.makeBugTask(target=series)

        self.assertCount(1, distribution=distribution)
        self.assertCount(1, distroseries=series)

    def test_changeDistroSeries(self):
        distribution = self.factory.makeDistribution()
        series_a = self.factory.makeDistroSeries(distribution=distribution)
        series_b = self.factory.makeDistroSeries(distribution=distribution)

        bug_task = self.factory.makeBugTask(target=series_a)

        self.assertCount(1, distribution=distribution)
        self.assertCount(1, distroseries=series_a)
        self.assertCount(0, distroseries=series_b)

        removeSecurityProxy(bug_task).distroseries = series_b

        self.assertCount(1, distribution=distribution)
        self.assertCount(0, distroseries=series_a)
        self.assertCount(1, distroseries=series_b)

    def test_removeDistroSeries(self):
        series = self.factory.makeDistroSeries()
        distribution = series.distribution
        bug_task = self.factory.makeBugTask(target=series)

        self.assertCount(1, distribution=distribution)
        self.assertCount(1, distroseries=series)

        self.store.remove(bug_task)

        self.assertCount(1, distribution=distribution)
        self.assertCount(0, distroseries=series)

    def test_addDistributionSourcePackage(self):
        distribution = self.factory.makeDistribution()
        sourcepackage = self.factory.makeDistributionSourcePackage(
            distribution=distribution)

        bug = self.factory.makeBug()
        self.factory.makeBugTask(bug=bug, target=sourcepackage)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1,
                         distribution=distribution,
                         sourcepackagename=sourcepackage.sourcepackagename)

    def test_changeDistributionSourcePackage(self):
        distribution = self.factory.makeDistribution()
        sourcepackage_a = self.factory.makeDistributionSourcePackage(
            distribution=distribution)
        sourcepackage_b = self.factory.makeDistributionSourcePackage(
            distribution=distribution)

        bug_task = self.factory.makeBugTask(target=sourcepackage_a)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1,
                         distribution=distribution,
                         sourcepackagename=sourcepackage_a.sourcepackagename)
        self.assertCount(0,
                         distribution=distribution,
                         sourcepackagename=sourcepackage_b.sourcepackagename)

        removeSecurityProxy(bug_task).sourcepackagename = (
            sourcepackage_b.sourcepackagename)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(0,
                         distribution=distribution,
                         sourcepackagename=sourcepackage_a.sourcepackagename)
        self.assertCount(1,
                         distribution=distribution,
                         sourcepackagename=sourcepackage_b.sourcepackagename)

    def test_removeDistributionSourcePackage(self):
        distribution = self.factory.makeDistribution()
        sourcepackage = self.factory.makeDistributionSourcePackage(
            distribution=distribution)

        bug_task = self.factory.makeBugTask(target=sourcepackage)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1,
                         distribution=distribution,
                         sourcepackagename=sourcepackage.sourcepackagename)

        removeSecurityProxy(bug_task).sourcepackagename = None

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(0,
                         distribution=distribution,
                         sourcepackagename=sourcepackage.sourcepackagename)

    def test_addDistroSeriesSourcePackage(self):
        distribution = self.factory.makeDistribution()
        series = self.factory.makeDistroSeries(distribution=distribution)
        package = self.factory.makeSourcePackage(distroseries=series)
        spn = package.sourcepackagename
        self.factory.makeBugTask(target=package)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1, distribution=distribution, sourcepackagename=spn)
        self.assertCount(1, distroseries=series, sourcepackagename=None)
        self.assertCount(1, distroseries=series, sourcepackagename=spn)

    def test_changeDistroSeriesSourcePackage(self):
        distribution = self.factory.makeDistribution()
        series = self.factory.makeDistroSeries(distribution=distribution)
        package_a = self.factory.makeSourcePackage(distroseries=series,
                                                   publish=True)
        package_b = self.factory.makeSourcePackage(distroseries=series,
                                                   publish=True)
        spn_a = package_a.sourcepackagename
        spn_b = package_b.sourcepackagename
        bug_task = self.factory.makeBugTask(target=package_a)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1, distribution=distribution, sourcepackagename=spn_a)
        self.assertCount(0, distribution=distribution, sourcepackagename=spn_b)
        self.assertCount(1, distroseries=series, sourcepackagename=None)
        self.assertCount(1, distroseries=series, sourcepackagename=spn_a)
        self.assertCount(0, distroseries=series, sourcepackagename=spn_b)

        bug_task.transitionToTarget(series.getSourcePackage(spn_b),
                                    bug_task.owner)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(0, distribution=distribution, sourcepackagename=spn_a)
        self.assertCount(1, distribution=distribution, sourcepackagename=spn_b)
        self.assertCount(1, distroseries=series, sourcepackagename=None)
        self.assertCount(0, distroseries=series, sourcepackagename=spn_a)
        self.assertCount(1, distroseries=series, sourcepackagename=spn_b)

    def test_removeDistroSeriesSourcePackage(self):
        distribution = self.factory.makeDistribution()
        series = self.factory.makeDistroSeries(distribution=distribution)
        package = self.factory.makeSourcePackage(distroseries=series)
        spn = package.sourcepackagename
        bug_task = self.factory.makeBugTask(target=package)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(1, distribution=distribution, sourcepackagename=spn)
        self.assertCount(1, distroseries=series, sourcepackagename=None)
        self.assertCount(1, distroseries=series, sourcepackagename=spn)

        bug_task.transitionToTarget(series, bug_task.owner)

        self.assertCount(1, distribution=distribution, sourcepackagename=None)
        self.assertCount(0, distribution=distribution, sourcepackagename=spn)
        self.assertCount(1, distroseries=series, sourcepackagename=None)
        self.assertCount(0, distroseries=series, sourcepackagename=spn)

    def test_addMilestone(self):
        distribution = self.factory.makeDistribution()
        milestone = self.factory.makeMilestone(distribution=distribution)
        bug_task = self.factory.makeBugTask(target=distribution)

        self.assertCount(1, distribution=distribution, milestone=None)

        bug_task.milestone = milestone

        self.assertCount(0, distribution=distribution, milestone=None)
        self.assertCount(1, distribution=distribution, milestone=milestone)

    def test_changeMilestone(self):
        distribution = self.factory.makeDistribution()
        milestone_a = self.factory.makeMilestone(distribution=distribution)
        milestone_b = self.factory.makeMilestone(distribution=distribution)
        bug_task = self.factory.makeBugTask(target=distribution)
        bug_task.milestone = milestone_a

        self.assertCount(0, distribution=distribution, milestone=None)
        self.assertCount(1, distribution=distribution, milestone=milestone_a)
        self.assertCount(0, distribution=distribution, milestone=milestone_b)

        bug_task.milestone = milestone_b

        self.assertCount(0, distribution=distribution, milestone=None)
        self.assertCount(0, distribution=distribution, milestone=milestone_a)
        self.assertCount(1, distribution=distribution, milestone=milestone_b)

    def test_removeMilestone(self):
        distribution = self.factory.makeDistribution()
        milestone = self.factory.makeMilestone(distribution=distribution)
        bug_task = self.factory.makeBugTask(target=distribution)
        bug_task.milestone = milestone

        self.assertCount(0, distribution=distribution, milestone=None)
        self.assertCount(1, distribution=distribution, milestone=milestone)

        bug_task.milestone = None

        self.assertCount(1, distribution=distribution, milestone=None)
        self.assertCount(0, distribution=distribution, milestone=milestone)

    def test_addPatch(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product)

        self.assertCount(0, product=product, has_patch=True)

        removeSecurityProxy(bug).latest_patch_uploaded = datetime.now(tz=utc)

        self.assertCount(1, product=product, has_patch=True)

    def test_removePatch(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product)
        removeSecurityProxy(bug).latest_patch_uploaded = datetime.now(tz=utc)

        self.assertCount(1, product=product, has_patch=True)
        self.assertCount(0, product=product, has_patch=False)

        removeSecurityProxy(bug).latest_patch_uploaded = None

        self.assertCount(0, product=product, has_patch=True)
        self.assertCount(1, product=product, has_patch=False)

    def test_duplicate(self):
        product = self.factory.makeProduct()
        bug = self.factory.makeBug(target=product)

        self.assertCount(1, product=product)

        bug.markAsDuplicate(self.factory.makeBug())

        self.assertCount(0, product=product)