Exemplo n.º 1
0
    def preloadPOFilesAndSequences(self, messages, pofile=None):
        """See `ITranslationMessageSet`."""
        from lp.translations.model.pofile import POFile
        from lp.translations.model.translationtemplateitem import (
            TranslationTemplateItem, )

        if len(messages) == 0:
            return
        language = messages[0].language
        if pofile is not None:
            pofile_constraints = [POFile.id == pofile.id]
        else:
            pofile_constraints = [POFile.language == language]
        results = IStore(POFile).find(
            (TranslationTemplateItem.potmsgsetID, POFile.id,
             TranslationTemplateItem.sequence),
            TranslationTemplateItem.potmsgsetID.is_in(message.potmsgsetID
                                                      for message in messages),
            POFile.potemplateID == TranslationTemplateItem.potemplateID,
            *pofile_constraints).config(
                distinct=(TranslationTemplateItem.potmsgsetID, ))
        potmsgset_map = dict((potmsgset_id, (pofile_id, sequence))
                             for potmsgset_id, pofile_id, sequence in results)
        load(POFile, (pofile_id for pofile_id, _ in potmsgset_map.values()))
        for message in messages:
            assert message.language == language
            pofile_id, sequence = potmsgset_map.get(message.potmsgsetID,
                                                    (None, None))
            message.setPOFile(IStore(POFile).get(POFile, pofile_id), sequence)
Exemplo n.º 2
0
    def findFromMany(self, object_ids, types=None):
        from lp.registry.model.person import Person

        object_ids = list(object_ids)
        if not object_ids:
            return {}

        store = IStore(XRef)
        extract_type = lambda id: id[0]
        rows = list(
            store.using(XRef).find(
                (XRef.from_type, XRef.from_id, XRef.to_type, XRef.to_id,
                 XRef.creator_id, XRef.date_created, XRef.metadata),
                Or(*[
                    And(XRef.from_type == from_type,
                        XRef.from_id.is_in([id[1] for id in group]))
                    for from_type, group in groupby(
                        sorted(object_ids, key=extract_type), extract_type)
                ]),
                XRef.to_type.is_in(types) if types is not None else True))
        bulk.load(Person, [row[4] for row in rows])
        result = {}
        for row in rows:
            result.setdefault((row[0], row[1]), {})[(row[2], row[3])] = {
                "creator": store.get(Person, row[4]) if row[4] else None,
                "date_created": row[5],
                "metadata": row[6]
            }
        return result
Exemplo n.º 3
0
 def decorate(spr_ids):
     # Find the SPPHs for each SPR in our result.
     load(SourcePackageRelease, spr_ids)
     sprs = [SourcePackageRelease.get(spr_id) for spr_id in spr_ids]
     pubs = DistributionSourcePackageRelease.getPublishingHistories(
         self.distribution, sprs)
     sprs_by_id = dict((spr, list(pubs))
                       for (spr, pubs) in itertools.groupby(
                           pubs, attrgetter('sourcepackagereleaseID')))
     return [(DistributionSourcePackageRelease(
         distribution=self.distribution,
         sourcepackagerelease=spr), sprs_by_id[spr.id]) for spr in sprs]
Exemplo n.º 4
0
    def __init__(self,
                 distroseries,
                 parents=(),
                 arches=(),
                 archindep_archtag=None,
                 packagesets=None,
                 rebuild=False,
                 overlays=(),
                 overlay_pockets=(),
                 overlay_components=()):
        self.distroseries = distroseries
        self.parent_ids = [int(id) for id in parents]
        # Load parent objects in bulk...
        parents_bulk = bulk.load(DistroSeries, self.parent_ids)
        # ... sort the parents to match the order in the 'parents' parameter.
        self.parents = sorted(
            parents_bulk, key=lambda parent: self.parent_ids.index(parent.id))
        self.arches = arches
        self.archindep_archtag = archindep_archtag
        if packagesets is None:
            self.packagesets_ids = None
            self.packagesets = None
        else:
            self.packagesets_ids = [
                ensure_unicode(packageset) for packageset in packagesets
            ]
            self.packagesets = bulk.load(
                Packageset, [int(packageset) for packageset in packagesets])
        self.rebuild = rebuild
        self.overlays = overlays
        self.overlay_pockets = overlay_pockets
        self.overlay_components = overlay_components
        self._store = IMasterStore(DistroSeries)

        self.first_derivation = (
            not self.distroseries.distribution.has_published_sources)

        if self.first_derivation:
            # Use-case #1.
            self.derivation_parents = self.parents
            self.derivation_parent_ids = self.parent_ids
        else:
            # Use-case #2.
            self.derivation_parents = [self.distroseries.previous_series]
            self.derivation_parent_ids = [
                p.id for p in self.derivation_parents if p is not None
            ]
            if self.parent_ids == []:
                self.parents = (
                    self.distroseries.previous_series.getParentSeries())
        self._create_source_names_by_parent()
Exemplo n.º 5
0
 def test_load_with_store(self):
     # load() can use an alternative store.
     db_object = self.factory.makeComponent()
     # Commit so the database object is available in both master
     # and slave stores.
     transaction.commit()
     # Master store.
     master_store = IMasterStore(db_object)
     [db_object_from_master] = bulk.load(Component, [db_object.id],
                                         store=master_store)
     self.assertEqual(Store.of(db_object_from_master), master_store)
     # Slave store.
     slave_store = ISlaveStore(db_object)
     [db_object_from_slave] = bulk.load(Component, [db_object.id],
                                        store=slave_store)
     self.assertEqual(Store.of(db_object_from_slave), slave_store)
Exemplo n.º 6
0
    def getSharedArtifacts(self, pillar, person, user, include_bugs=True,
                           include_branches=True, include_specifications=True):
        """See `ISharingService`."""
        bug_ids = set()
        branch_ids = set()
        specification_ids = set()
        for artifact in self.getArtifactGrantsForPersonOnPillar(
            pillar, person):
            if artifact.bug_id and include_bugs:
                bug_ids.add(artifact.bug_id)
            elif artifact.branch_id and include_branches:
                branch_ids.add(artifact.branch_id)
            elif artifact.specification_id and include_specifications:
                specification_ids.add(artifact.specification_id)

        # Load the bugs.
        bugtasks = []
        if bug_ids:
            param = BugTaskSearchParams(user=user, bug=any(*bug_ids))
            param.setTarget(pillar)
            bugtasks = list(getUtility(IBugTaskSet).search(param))
        # Load the branches.
        branches = []
        if branch_ids:
            all_branches = getUtility(IAllBranches)
            wanted_branches = all_branches.visibleByUser(user).withIds(
                *branch_ids)
            branches = list(wanted_branches.getBranches())
        specifications = []
        if specification_ids:
            specifications = load(Specification, specification_ids)

        return bugtasks, branches, specifications
Exemplo n.º 7
0
 def bugs(self):
     from lp.bugs.model.bug import Bug
     bug_ids = [
         int(id) for _, id in getUtility(IXRefSet).findFrom(
             (u'specification', unicode(self.id)), types=[u'bug'])]
     return list(sorted(
         bulk.load(Bug, bug_ids), key=operator.attrgetter('id')))
Exemplo n.º 8
0
    def test_create_multiple_jobs_creates_waiting_jobs(self):
        dsp = self.factory.makeDistroSeriesParent()
        self.createSPPHs(dsp.derived_series, 1)
        job_ids = create_multiple_jobs(dsp.derived_series, dsp.parent_series)
        dsdjob = bulk.load(DistributionJob, job_ids)[0]

        self.assertEqual(JobStatus.WAITING, dsdjob.job.status)
    def updateAll(cls, distro, archive, log, ztm, commit_chunk=500):
        """Update the source package cache.

        Consider every non-REMOVED sourcepackage and entirely skips updates
        for disabled archives.

        :param archive: target `IArchive`;
        :param log: logger object for printing debug level information;
        :param ztm:  transaction used for partial commits, every chunk of
            'commit_chunk' updates is committed;
        :param commit_chunk: number of updates before commit, defaults to 500.

        :return the number packages updated done
        """
        # Do not create cache entries for disabled archives.
        if archive is not None and not archive.enabled:
            return

        # Get the set of source package names to deal with.
        spns = list(
            sorted(cls.findCurrentSourcePackageNames(distro, archive),
                   key=attrgetter('name')))

        number_of_updates = 0
        chunks = []
        chunk = []
        for spn in spns:
            chunk.append(spn)
            if len(chunk) == commit_chunk:
                chunks.append(chunk)
                chunk = []
        if chunk:
            chunks.append(chunk)
        for chunk in chunks:
            bulk.load(SourcePackageName, [spn.id for spn in chunk])
            log.debug("Considering sources %s",
                      ', '.join([spn.name for spn in chunk]))
            if archive is None:
                cls.updateOfficialBranches(distro, chunk)
            else:
                cls.update(distro, chunk, archive, log)
            number_of_updates += len(chunk)
            log.debug("Committing")
            ztm.commit()

        return number_of_updates
Exemplo n.º 10
0
 def _preloadProcessors(self, rows):
     # Grab (Builder.id, Processor.id) pairs and stuff them into the
     # Builders' processor caches.
     store = IStore(BuilderProcessor)
     pairs = list(
         store.using(BuilderProcessor, Processor).find(
             (BuilderProcessor.builder_id, BuilderProcessor.processor_id),
             BuilderProcessor.processor_id == Processor.id,
             BuilderProcessor.builder_id.is_in([
                 b.id for b in rows
             ])).order_by(BuilderProcessor.builder_id, Processor.name))
     load(Processor, [pid for bid, pid in pairs])
     for row in rows:
         get_property_cache(row)._processors_cache = []
     for bid, pid in pairs:
         cache = get_property_cache(store.get(Builder, bid))
         cache._processors_cache.append(store.get(Processor, pid))
Exemplo n.º 11
0
 def bugs(self):
     bug_ids = [
         int(id)
         for _, id in getUtility(IXRefSet).findFrom((u'cve', self.sequence),
                                                    types=[u'bug'])
     ]
     return list(
         sorted(bulk.load(Bug, bug_ids), key=operator.attrgetter('id')))
    def test_create_multiple_jobs_creates_waiting_jobs(self):
        dsp = self.factory.makeDistroSeriesParent()
        self.createSPPHs(dsp.derived_series, 1)
        job_ids = create_multiple_jobs(
            dsp.derived_series, dsp.parent_series)
        dsdjob = bulk.load(DistributionJob, job_ids)[0]

        self.assertEqual(JobStatus.WAITING, dsdjob.job.status)
Exemplo n.º 13
0
 def test_load_with_store(self):
     # load() can use an alternative store.
     db_object = self.factory.makeComponent()
     # Commit so the database object is available in both master
     # and slave stores.
     transaction.commit()
     # Master store.
     master_store = IMasterStore(db_object)
     [db_object_from_master] = bulk.load(
         Component, [db_object.id], store=master_store)
     self.assertEqual(
         Store.of(db_object_from_master), master_store)
     # Slave store.
     slave_store = ISlaveStore(db_object)
     [db_object_from_slave] = bulk.load(
         Component, [db_object.id], store=slave_store)
     self.assertEqual(
         Store.of(db_object_from_slave), slave_store)
Exemplo n.º 14
0
 def test_load(self):
     # load() loads objects of the given type by their primary keys.
     db_objects = [
         self.factory.makeComponent(),
         self.factory.makeComponent(),
     ]
     db_object_ids = [db_object.id for db_object in db_objects]
     self.assertEqual(set(bulk.load(Component, db_object_ids)),
                      set(db_objects))
Exemplo n.º 15
0
    def updateAll(cls, distroseries, archive, log, ztm, commit_chunk=500):
        """Update the binary package cache

        Consider all binary package names published in this distro series
        and entirely skips updates for disabled archives

        :param archive: target `IArchive`;
        :param log: logger object for printing debug level information;
        :param ztm:  transaction used for partial commits, every chunk of
            'commit_chunk' updates is committed;
        :param commit_chunk: number of updates before commit, defaults to 500.

        :return the number of packages updated.
        """
        # Do not create cache entries for disabled archives.
        if not archive.enabled:
            return

        # Get the set of package names to deal with.
        bpns = list(
            sorted(cls.findCurrentBinaryPackageNames(archive, distroseries),
                   key=attrgetter('name')))

        number_of_updates = 0
        chunks = []
        chunk = []
        for bpn in bpns:
            chunk.append(bpn)
            if len(chunk) == commit_chunk:
                chunks.append(chunk)
                chunk = []
        if chunk:
            chunks.append(chunk)
        for chunk in chunks:
            bulk.load(BinaryPackageName, [bpn.id for bpn in chunk])
            log.debug("Considering binaries %s",
                      ', '.join([bpn.name for bpn in chunk]))
            cls._update(distroseries, chunk, archive, log)
            number_of_updates += len(chunk)
            log.debug("Committing")
            ztm.commit()

        return number_of_updates
Exemplo n.º 16
0
 def test_load(self):
     # load() loads objects of the given type by their primary keys.
     db_objects = [
         self.factory.makeComponent(),
         self.factory.makeComponent(),
         ]
     db_object_ids = [db_object.id for db_object in db_objects]
     self.assertEqual(
         set(bulk.load(Component, db_object_ids)),
         set(db_objects))
Exemplo n.º 17
0
 def preload_hook(rows):
     lfas = load(LibraryFileAlias, (pd.diff_contentID for pd in rows))
     load(LibraryFileContent, (lfa.contentID for lfa in lfas))
     sprs = load(
         SourcePackageRelease,
         itertools.chain.from_iterable(
             (pd.from_sourceID, pd.to_sourceID) for pd in rows))
     archives = load(Archive, (spr.upload_archiveID for spr in sprs))
     load(Distribution, (a.distributionID for a in archives))
Exemplo n.º 18
0
 def preload_hook(rows):
     lfas = load(LibraryFileAlias, (pd.diff_contentID for pd in rows))
     load(LibraryFileContent, (lfa.contentID for lfa in lfas))
     sprs = load(
         SourcePackageRelease,
         itertools.chain.from_iterable(
             (pd.from_sourceID, pd.to_sourceID) for pd in rows))
     archives = load(Archive, (spr.upload_archiveID for spr in sprs))
     load(Distribution, (a.distributionID for a in archives))
Exemplo n.º 19
0
 def findCurrentBinaryPackageNames(cls, archive, distroseries):
     bpn_ids = IStore(BinaryPackagePublishingHistory).find(
         BinaryPackagePublishingHistory.binarypackagenameID,
         BinaryPackagePublishingHistory.distroarchseriesID.is_in(
             Select(DistroArchSeries.id,
                    tables=[DistroArchSeries],
                    where=DistroArchSeries.distroseries == distroseries)),
         BinaryPackagePublishingHistory.archive == archive,
         BinaryPackagePublishingHistory.status.is_in(
             (PackagePublishingStatus.PENDING,
              PackagePublishingStatus.PUBLISHED))).config(distinct=True)
     return bulk.load(BinaryPackageName, bpn_ids)
    def __init__(
        self, distroseries, parents=(), arches=(), archindep_archtag=None,
        packagesets=(), rebuild=False, overlays=(), overlay_pockets=(),
        overlay_components=()):
        self.distroseries = distroseries
        self.parent_ids = [int(id) for id in parents]
        # Load parent objects in bulk...
        parents_bulk = bulk.load(DistroSeries, self.parent_ids)
        # ... sort the parents to match the order in the 'parents' parameter.
        self.parents = sorted(
            parents_bulk,
            key=lambda parent: self.parent_ids.index(parent.id))
        self.arches = arches
        self.archindep_archtag = archindep_archtag
        self.packagesets_ids = [
            ensure_unicode(packageset) for packageset in packagesets]
        self.packagesets = bulk.load(
            Packageset, [int(packageset) for packageset in packagesets])
        self.rebuild = rebuild
        self.overlays = overlays
        self.overlay_pockets = overlay_pockets
        self.overlay_components = overlay_components
        self._store = IMasterStore(DistroSeries)

        self.first_derivation = (
            not self.distroseries.distribution.has_published_sources)

        if self.first_derivation:
            # Use-case #1.
            self.derivation_parents = self.parents
            self.derivation_parent_ids = self.parent_ids
        else:
            # Use-case #2.
            self.derivation_parents = [self.distroseries.previous_series]
            self.derivation_parent_ids = [
                p.id for p in self.derivation_parents if p is not None]
            if self.parent_ids == []:
                self.parents = (
                    self.distroseries.previous_series.getParentSeries())
        self._create_source_names_by_parent()
Exemplo n.º 21
0
    def test_load_with_compound_primary_keys(self):
        # load() does not like compound primary keys.
        flags = [
            FeatureFlag(u'foo', 0, u'bar', u'true'),
            FeatureFlag(u'foo', 0, u'baz', u'false'),
        ]
        other_flag = FeatureFlag(u'notfoo', 0, u'notbar', u'true')
        for flag in flags + [other_flag]:
            getFeatureStore().add(flag)

        self.assertContentEqual(
            flags, bulk.load(FeatureFlag,
                             [(ff.scope, ff.flag) for ff in flags]))
Exemplo n.º 22
0
    def test_load_with_compound_primary_keys(self):
        # load() does not like compound primary keys.
        flags = [
            FeatureFlag(u'foo', 0, u'bar', u'true'),
            FeatureFlag(u'foo', 0, u'baz', u'false'),
            ]
        other_flag = FeatureFlag(u'notfoo', 0, u'notbar', u'true')
        for flag in flags + [other_flag]:
            getFeatureStore().add(flag)

        self.assertContentEqual(
            flags,
            bulk.load(FeatureFlag, [(ff.scope, ff.flag) for ff in flags]))
 def findCurrentSourcePackageNames(cls, distro, archive):
     if archive is None:
         spn_ids = IStore(SeriesSourcePackageBranch).find(
             SeriesSourcePackageBranch.sourcepackagenameID,
             DistroSeries.distribution == distro.id,
             SeriesSourcePackageBranch.distroseriesID == DistroSeries.id)
     else:
         spn_ids = IStore(SourcePackagePublishingHistory).find(
             SourcePackagePublishingHistory.sourcepackagenameID,
             SourcePackagePublishingHistory.archive == archive,
             SourcePackagePublishingHistory.status.is_in(
                 (PackagePublishingStatus.PENDING,
                  PackagePublishingStatus.PUBLISHED)))
     return bulk.load(SourcePackageName, spn_ids.config(distinct=True))
Exemplo n.º 24
0
def preload_work_items(work_items):
    """Bulk load data that will be needed to process `work_items`.

    :param work_items: A sequence of `WorkItem` records.
    :return: A dict mapping `POFile` ids from `work_items` to their
        respective `POFile` objects.
    """
    pofiles = load(POFile, [work_item.pofile_id for work_item in work_items])
    load_related(Language, pofiles, ['languageID'])
    templates = load_related(POTemplate, pofiles, ['potemplateID'])
    distroseries = load_related(DistroSeries, templates, ['distroseriesID'])
    load_related(Distribution, distroseries, ['distributionID'])
    productseries = load_related(ProductSeries, templates, ['productseriesID'])
    load_related(Product, productseries, ['productID'])
    return dict((pofile.id, pofile) for pofile in pofiles)
Exemplo n.º 25
0
def enterpriseids_to_objects(eids):
    """Dereference multiple SOA Enterprise IDs."""
    dbid_to_eid = defaultdict(dict)
    for eid in eids:
        if not eid.startswith('lp'):
            raise TypeError
        instance, cls, id = eid.split(':')
        dbid_to_eid[cls][int(id)] = eid
    types = _known_types()
    eid_to_obj = {}
    for kind in dbid_to_eid:
        objs = load(types[kind], dbid_to_eid[kind].keys())
        for obj in objs:
            eid_to_obj[dbid_to_eid[kind][obj.id]] = obj
    return eid_to_obj
def preload_work_items(work_items):
    """Bulk load data that will be needed to process `work_items`.

    :param work_items: A sequence of `WorkItem` records.
    :return: A dict mapping `POFile` ids from `work_items` to their
        respective `POFile` objects.
    """
    pofiles = load(POFile, [work_item.pofile_id for work_item in work_items])
    load_related(Language, pofiles, ['languageID'])
    templates = load_related(POTemplate, pofiles, ['potemplateID'])
    distroseries = load_related(DistroSeries, templates, ['distroseriesID'])
    load_related(Distribution, distroseries, ['distributionID'])
    productseries = load_related(
        ProductSeries, templates, ['productseriesID'])
    load_related(Product, productseries, ['productID'])
    return dict((pofile.id, pofile) for pofile in pofiles)
    def test_create_multiple_jobs_structure(self):
        dsp = self.factory.makeDistroSeriesParent()
        spph = self.createSPPHs(dsp.derived_series, 1)[0]
        job_ids = create_multiple_jobs(
            dsp.derived_series, dsp.parent_series)
        job = bulk.load(DistributionJob, job_ids)[0]

        sourcepackagenameid = spph.sourcepackagerelease.sourcepackagename.id
        expected_metadata = {
            u'sourcepackagename': sourcepackagenameid,
            u'parent_series': dsp.parent_series.id}
        self.assertThat(job, MatchesStructure.byEquality(
            distribution=dsp.derived_series.distribution,
            distroseries=dsp.derived_series,
            job_type=DistributionJobType.DISTROSERIESDIFFERENCE,
            metadata=expected_metadata))
Exemplo n.º 28
0
    def test_create_multiple_jobs_structure(self):
        dsp = self.factory.makeDistroSeriesParent()
        spph = self.createSPPHs(dsp.derived_series, 1)[0]
        job_ids = create_multiple_jobs(dsp.derived_series, dsp.parent_series)
        job = bulk.load(DistributionJob, job_ids)[0]

        sourcepackagenameid = spph.sourcepackagerelease.sourcepackagename.id
        expected_metadata = {
            u'sourcepackagename': sourcepackagenameid,
            u'parent_series': dsp.parent_series.id
        }
        self.assertThat(
            job,
            MatchesStructure.byEquality(
                distribution=dsp.derived_series.distribution,
                distroseries=dsp.derived_series,
                job_type=DistributionJobType.DISTROSERIESDIFFERENCE,
                metadata=expected_metadata))
Exemplo n.º 29
0
    def test_create_multiple_jobs_ignore_other_series(self):
        dsp = self.factory.makeDistroSeriesParent()
        spphs = self.createSPPHs(dsp.derived_series)

        # Create other SPPHs ...
        dsp2 = self.factory.makeDistroSeriesParent()
        self.createSPPHs(dsp2.derived_series)

        # ... and some more.
        dsp3 = self.factory.makeDistroSeriesParent(
            parent_series=dsp.parent_series)
        self.createSPPHs(dsp3.derived_series)

        job_ids = create_multiple_jobs(dsp.derived_series, dsp.parent_series)
        jobs = bulk.load(DistributionJob, job_ids)

        self.assertContentEqual(
            [spph.sourcepackagerelease.sourcepackagename.id for spph in spphs],
            [job.metadata[u'sourcepackagename'] for job in jobs])
    def test_create_multiple_jobs_ignore_other_series(self):
        dsp = self.factory.makeDistroSeriesParent()
        spphs = self.createSPPHs(dsp.derived_series)

        # Create other SPPHs ...
        dsp2 = self.factory.makeDistroSeriesParent()
        self.createSPPHs(dsp2.derived_series)

        # ... and some more.
        dsp3 = self.factory.makeDistroSeriesParent(
            parent_series=dsp.parent_series)
        self.createSPPHs(dsp3.derived_series)

        job_ids = create_multiple_jobs(
            dsp.derived_series, dsp.parent_series)
        jobs = bulk.load(DistributionJob, job_ids)

        self.assertContentEqual(
            [spph.sourcepackagerelease.sourcepackagename.id
                for spph in spphs],
            [job.metadata[u'sourcepackagename'] for job in jobs])
Exemplo n.º 31
0
 def eager_load(rows):
     bulk.load(Component, (row[2] for row in rows))
     bulk.load(Section, (row[3] for row in rows))
Exemplo n.º 32
0
 def eager_load(rows):
     bulk.load(Component, (row[2] for row in rows))
     bulk.load(Section, (row[3] for row in rows))
Exemplo n.º 33
0
def copy_distroseries_translations(source, target, txn, logger,
                                   published_sources_only=False,
                                   check_archive=None,
                                   check_distroseries=None,
                                   skip_duplicates=False):
    """Copy translations into a new `DistroSeries`.

    Wraps around `copy_active_translations`, but also ensures that the
    `hide_all_translations` and `defer_translation_imports` flags are
    set.  After copying they are restored to their previous state.

    If published_sources_only is set, the set of sources in the target
    will be calculated and only templates for those sources will be
    copied.
    """
    statekeeper = SeriesStateKeeper()
    statekeeper.prepare(target)
    name = target.name
    txn.commit()
    txn.begin()

    copy_failed = False

    try:
        # Do the actual work.
        assert target.defer_translation_imports, (
            "defer_translation_imports not set!"
            " That would corrupt translation data mixing new imports"
            " with the information being copied.")
        assert target.hide_all_translations, (
            "hide_all_translations not set!"
            " That would allow users to see and modify incomplete"
            " translation state.")

        if published_sources_only:
            if check_archive is None:
                check_archive = target.main_archive
            if check_distroseries is None:
                check_distroseries = target
            spns = bulk.load(
                SourcePackageName,
                check_archive.getPublishedSources(
                        distroseries=check_distroseries,
                        status=active_publishing_status)
                    .config(distinct=True)
                    .order_by(
                        SourcePackagePublishingHistory.sourcepackagenameID)
                    .values(
                        SourcePackagePublishingHistory.sourcepackagenameID))
        else:
            spns = None
        copy_active_translations(
            source, target, txn, logger, sourcepackagenames=spns,
            skip_duplicates=skip_duplicates)
    except:
        copy_failed = True
        # Give us a fresh transaction for proper cleanup.
        txn.abort()
        txn.begin()
        raise
    finally:
        try:
            statekeeper.restore()
        except Warning as message:
            logger.warning(message)
        except:
            logger.warning(
                "Failed to restore hide_all_translations and "
                "defer_translation_imports flags on %s after translations "
                "copy failed.  Please check them manually." % name)
            # If the original copying etc. in the main try block failed, that
            # is the error most worth propagating.  Propagate a failure in
            # restoring the translations flags only if everything else went
            # well.
            if not copy_failed:
                raise
    def update(cls,
               distro,
               sourcepackagenames,
               archive,
               with_binaries=True,
               log=None):
        """Update the package cache for a given set of `ISourcePackageName`s.

        Cached details include generated binarypackage names, summary
        and description fti.
        """

        # Get the set of published sourcepackage releases.
        all_sprs = list(
            IStore(SourcePackageRelease).find(
                (SourcePackageRelease.sourcepackagenameID,
                 SourcePackageRelease.id, SourcePackageRelease.version),
                SourcePackageRelease.id ==
                SourcePackagePublishingHistory.sourcepackagereleaseID,
                SourcePackagePublishingHistory.sourcepackagenameID.is_in([
                    spn.id for spn in sourcepackagenames
                ]), SourcePackagePublishingHistory.archive == archive,
                SourcePackagePublishingHistory.status.is_in(
                    (PackagePublishingStatus.PENDING,
                     PackagePublishingStatus.PUBLISHED))).config(
                         distinct=True).order_by(SourcePackageRelease.id))
        if len(all_sprs) == 0:
            if log is not None:
                log.debug("No sources releases found.")
            return

        all_caches = IStore(cls).find(
            cls, cls.distribution == distro, cls.archive == archive,
            cls.sourcepackagenameID.is_in(
                [spn.id for spn in sourcepackagenames]))
        cache_map = {cache.sourcepackagename: cache for cache in all_caches}

        for spn in set(sourcepackagenames) - set(cache_map):
            cache_map[spn] = cls(archive=archive,
                                 distribution=distro,
                                 sourcepackagename=spn)

        if with_binaries:
            spr_map = defaultdict(list)
            for spn_id, spr_id, spr_version in all_sprs:
                spn = IStore(SourcePackageName).get(SourcePackageName, spn_id)
                spr_map[spn].append((spr_id, spr_version))

            # Query BinaryPackageBuilds and their BinaryPackageReleases
            # separately, since the big and inconsistent intermediates can
            # confuse postgres into a seq scan over BPR, which never ends
            # well for anybody.
            #
            # Beware: the sets expand much faster than you might expect for
            # the primary archive; COPY archive builds are caught too, of
            # which there are dozens for most SPRs, and there's no easy way
            # to exclude them!
            all_builds = list(
                IStore(BinaryPackageBuild).find(
                    (BinaryPackageBuild.source_package_release_id,
                     BinaryPackageBuild.id),
                    BinaryPackageBuild.source_package_release_id.is_in(
                        [row[1] for row in all_sprs])))
            all_binaries = list(
                IStore(BinaryPackageRelease).find(
                    (BinaryPackageRelease.buildID,
                     BinaryPackageRelease.binarypackagenameID,
                     BinaryPackageRelease.summary,
                     BinaryPackageRelease.description),
                    BinaryPackageRelease.buildID.is_in(
                        [row[1] for row in all_builds])))
            sprs_by_build = {
                build_id: spr_id
                for spr_id, build_id in all_builds
            }

            bulk.load(BinaryPackageName, [row[1] for row in all_binaries])
            binaries_by_spr = defaultdict(list)
            for bpb_id, bpn_id, summary, description in all_binaries:
                spr_id = sprs_by_build[bpb_id]
                binaries_by_spr[spr_id].append(
                    (IStore(BinaryPackageName).get(BinaryPackageName, bpn_id),
                     summary, description))

        for spn in sourcepackagenames:
            cache = cache_map[spn]
            cache.name = spn.name

            if with_binaries:
                binpkgnames = set()
                binpkgsummaries = set()
                binpkgdescriptions = set()
                for spr_id, spr_version in spr_map.get(spn, []):
                    if log is not None:
                        log.debug("Considering source %s %s", spn.name,
                                  spr_version)
                    binpkgs = binaries_by_spr.get(spr_id, [])
                    for bpn, summary, description in binpkgs:
                        binpkgnames.add(bpn.name)
                        binpkgsummaries.add(summary)
                        binpkgdescriptions.add(description)

                # Update the caches.
                cache.binpkgnames = ' '.join(sorted(binpkgnames))
                cache.binpkgsummaries = ' '.join(sorted(binpkgsummaries))
                cache.binpkgdescriptions = ' '.join(sorted(binpkgdescriptions))

            # Column due for deletion.
            cache.changelog = None