def _update(cls, distroseries, binarypackagename, archive, log): """Update the package cache for a given IBinaryPackageName 'log' is required, it should be a logger object able to print DEBUG level messages. 'ztm' is the current trasaction manager used for partial commits (in full batches of 100 elements) """ # get the set of published binarypackagereleases bprs = IStore(BinaryPackageRelease).find( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackagePublishingHistory.binarypackagename == binarypackagename, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseries == distroseries, BinaryPackagePublishingHistory.archive == archive, BinaryPackagePublishingHistory.dateremoved == None) bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated)) bprs = bprs.config(distinct=True) if bprs.count() == 0: log.debug("No binary releases found.") return # find or create the cache entry cache = cls.selectOne(""" distroseries = %s AND archive = %s AND binarypackagename = %s """ % sqlvalues(distroseries, archive, binarypackagename)) if cache is None: log.debug("Creating new binary cache entry.") cache = cls( archive=archive, distroseries=distroseries, binarypackagename=binarypackagename) # make sure the cached name, summary and description are correct cache.name = binarypackagename.name cache.summary = bprs[0].summary cache.description = bprs[0].description # get the sets of binary package summaries, descriptions. there is # likely only one, but just in case... summaries = set() descriptions = set() for bpr in bprs: log.debug("Considering binary version %s" % bpr.version) summaries.add(bpr.summary) descriptions.add(bpr.description) # and update the caches cache.summaries = ' '.join(sorted(summaries)) cache.descriptions = ' '.join(sorted(descriptions))
def _update(cls, distroseries, binarypackagename, archive, log): """Update the package cache for a given IBinaryPackageName 'log' is required, it should be a logger object able to print DEBUG level messages. 'ztm' is the current trasaction manager used for partial commits (in full batches of 100 elements) """ # get the set of published binarypackagereleases bprs = IStore(BinaryPackageRelease).find( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackagePublishingHistory.binarypackagename == binarypackagename, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseries == distroseries, BinaryPackagePublishingHistory.archive == archive, BinaryPackagePublishingHistory.dateremoved == None) bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated)) bprs = bprs.config(distinct=True) if bprs.count() == 0: log.debug("No binary releases found.") return # find or create the cache entry cache = cls.selectOne(""" distroseries = %s AND archive = %s AND binarypackagename = %s """ % sqlvalues(distroseries, archive, binarypackagename)) if cache is None: log.debug("Creating new binary cache entry.") cache = cls(archive=archive, distroseries=distroseries, binarypackagename=binarypackagename) # make sure the cached name, summary and description are correct cache.name = binarypackagename.name cache.summary = bprs[0].summary cache.description = bprs[0].description # get the sets of binary package summaries, descriptions. there is # likely only one, but just in case... summaries = set() descriptions = set() for bpr in bprs: log.debug("Considering binary version %s" % bpr.version) summaries.add(bpr.summary) descriptions.add(bpr.description) # and update the caches cache.summaries = ' '.join(sorted(summaries)) cache.descriptions = ' '.join(sorted(descriptions))
def findCurrentSourcePackageNames(cls, distro, archive): if archive is None: spn_ids = IStore(SeriesSourcePackageBranch).find( SeriesSourcePackageBranch.sourcepackagenameID, DistroSeries.distribution == distro.id, SeriesSourcePackageBranch.distroseriesID == DistroSeries.id) else: spn_ids = IStore(SourcePackagePublishingHistory).find( SourcePackagePublishingHistory.sourcepackagenameID, SourcePackagePublishingHistory.archive == archive, SourcePackagePublishingHistory.status.is_in( (PackagePublishingStatus.PENDING, PackagePublishingStatus.PUBLISHED))) return bulk.load(SourcePackageName, spn_ids.config(distinct=True))