def getDiffsToReleases(self, sprs, preload_for_display=False): """See `IPackageDiffSet`.""" from lp.registry.model.distribution import Distribution from lp.soyuz.model.archive import Archive from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease if len(sprs) == 0: return EmptyResultSet() spr_ids = [spr.id for spr in sprs] result = IStore(PackageDiff).find( PackageDiff, PackageDiff.to_sourceID.is_in(spr_ids)) result.order_by(PackageDiff.to_sourceID, Desc(PackageDiff.date_requested)) def preload_hook(rows): lfas = load(LibraryFileAlias, (pd.diff_contentID for pd in rows)) load(LibraryFileContent, (lfa.contentID for lfa in lfas)) sprs = load( SourcePackageRelease, itertools.chain.from_iterable( (pd.from_sourceID, pd.to_sourceID) for pd in rows)) archives = load(Archive, (spr.upload_archiveID for spr in sprs)) load(Distribution, (a.distributionID for a in archives)) if preload_for_display: return DecoratedResultSet(result, pre_iter_hook=preload_hook) else: return result
def all_package_names(self): """See `IHWDriverSet`.""" # XXX Abel Deuring 2009-06-19 The clause package_name != None # can be removed once bug #306265 is fixed. result = IStore(HWDriverPackageName).find(HWDriverPackageName, HWDriverPackageName.package_name != None) result.order_by(HWDriverPackageName.package_name) return result
def getBuildsForArchive(self, archive, status=None): """See `IBuildFarmJobSet`.""" extra_exprs = [] if status is not None: extra_exprs.append(BuildFarmJob.status == status) result_set = IStore(BuildFarmJob).find( BuildFarmJob, BuildFarmJob.archive == archive, *extra_exprs) # When we have a set of builds that may include pending or # superseded builds, we order by -date_created (as we won't # always have a date_finished). Otherwise we can order by # -date_finished. unfinished_states = [ BuildStatus.NEEDSBUILD, BuildStatus.BUILDING, BuildStatus.UPLOADING, BuildStatus.SUPERSEDED, ] if status is None or status in unfinished_states: result_set.order_by( Desc(BuildFarmJob.date_created), BuildFarmJob.id) else: result_set.order_by( Desc(BuildFarmJob.date_finished), BuildFarmJob.id) return result_set
def getBuildsForArchive(self, archive, status=None): """See `IBuildFarmJobSet`.""" extra_exprs = [] if status is not None: extra_exprs.append(BuildFarmJob.status == status) result_set = IStore(BuildFarmJob).find(BuildFarmJob, BuildFarmJob.archive == archive, *extra_exprs) # When we have a set of builds that may include pending or # superseded builds, we order by -date_created (as we won't # always have a date_finished). Otherwise we can order by # -date_finished. unfinished_states = [ BuildStatus.NEEDSBUILD, BuildStatus.BUILDING, BuildStatus.UPLOADING, BuildStatus.SUPERSEDED, ] if status is None or status in unfinished_states: result_set.order_by(Desc(BuildFarmJob.date_created), BuildFarmJob.id) else: result_set.order_by(Desc(BuildFarmJob.date_finished), BuildFarmJob.id) return result_set
def all_package_names(self): """See `IHWDriverSet`.""" # XXX Abel Deuring 2009-06-19 The clause package_name != None # can be removed once bug #306265 is fixed. result = IStore(HWDriverPackageName).find( HWDriverPackageName, HWDriverPackageName.package_name != None) result.order_by(HWDriverPackageName.package_name) return result
def getComments(self): """See `IDistroSeriesDifference`.""" DSDComment = DistroSeriesDifferenceComment comments = IStore(DSDComment).find( DistroSeriesDifferenceComment, DSDComment.distro_series_difference == self) return comments.order_by(Desc(DSDComment.id))
def test_retrieveDatabaseAncestry(self): # retrieveDatabaseAncestry should set db_ancestry and db_history to # Launchpad's current understanding of the branch state. # db_branch_revision_map should map Bazaar revision_ids to # BranchRevision.ids. # Use the sampledata for this test, so we do not have to rely on # BzrSync to fill the database. That would cause a circular # dependency, as the test setup would depend on # retrieveDatabaseAncestry. branch = getUtility(IBranchLookup).getByUniqueName( '~name12/+junk/junk.contrib') branch_revisions = IStore(BranchRevision).find( BranchRevision, BranchRevision.branch == branch) sampledata = list(branch_revisions.order_by(BranchRevision.sequence)) expected_ancestry = set(branch_revision.revision.revision_id for branch_revision in sampledata) expected_history = [branch_revision.revision.revision_id for branch_revision in sampledata if branch_revision.sequence is not None] self.create_branch_and_tree(db_branch=branch) bzrsync = self.makeBzrSync(branch) db_ancestry, db_history = ( bzrsync.retrieveDatabaseAncestry()) self.assertEqual(expected_ancestry, set(db_ancestry)) self.assertEqual(expected_history, list(db_history))
def test_retrieveDatabaseAncestry(self): # retrieveDatabaseAncestry should set db_ancestry and db_history to # Launchpad's current understanding of the branch state. # db_branch_revision_map should map Bazaar revision_ids to # BranchRevision.ids. # Use the sampledata for this test, so we do not have to rely on # BzrSync to fill the database. That would cause a circular # dependency, as the test setup would depend on # retrieveDatabaseAncestry. branch = getUtility(IBranchLookup).getByUniqueName("~name12/+junk/junk.contrib") branch_revisions = IStore(BranchRevision).find(BranchRevision, BranchRevision.branch == branch) sampledata = list(branch_revisions.order_by(BranchRevision.sequence)) expected_ancestry = set(branch_revision.revision.revision_id for branch_revision in sampledata) expected_history = [ branch_revision.revision.revision_id for branch_revision in sampledata if branch_revision.sequence is not None ] self.create_branch_and_tree(db_branch=branch) bzrsync = self.makeBzrSync(branch) db_ancestry, db_history = bzrsync.retrieveDatabaseAncestry() self.assertEqual(expected_ancestry, set(db_ancestry)) self.assertEqual(expected_history, list(db_history))
def _entries(self): entries = IStore(self._table).using(*self._origin).find( self._table, *self._clauses) return entries.order_by( NullsFirst(Distribution.display_name), Desc(DistroSeries.date_created), Desc(SnappySeries.date_created))
def getActiveJobs(cls, target_archive): """See `IPlainPackageCopyJobSource`.""" jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_archive == target_archive, Job.id == PackageCopyJob.job_id, Job._status == JobStatus.WAITING) jobs = jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(jobs, cls)
def _update(cls, distroseries, binarypackagename, archive, log): """Update the package cache for a given IBinaryPackageName 'log' is required, it should be a logger object able to print DEBUG level messages. 'ztm' is the current trasaction manager used for partial commits (in full batches of 100 elements) """ # get the set of published binarypackagereleases bprs = IStore(BinaryPackageRelease).find( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackagePublishingHistory.binarypackagename == binarypackagename, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseries == distroseries, BinaryPackagePublishingHistory.archive == archive, BinaryPackagePublishingHistory.dateremoved == None) bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated)) bprs = bprs.config(distinct=True) if bprs.count() == 0: log.debug("No binary releases found.") return # find or create the cache entry cache = cls.selectOne(""" distroseries = %s AND archive = %s AND binarypackagename = %s """ % sqlvalues(distroseries, archive, binarypackagename)) if cache is None: log.debug("Creating new binary cache entry.") cache = cls( archive=archive, distroseries=distroseries, binarypackagename=binarypackagename) # make sure the cached name, summary and description are correct cache.name = binarypackagename.name cache.summary = bprs[0].summary cache.description = bprs[0].description # get the sets of binary package summaries, descriptions. there is # likely only one, but just in case... summaries = set() descriptions = set() for bpr in bprs: log.debug("Considering binary version %s" % bpr.version) summaries.add(bpr.summary) descriptions.add(bpr.description) # and update the caches cache.summaries = ' '.join(sorted(summaries)) cache.descriptions = ' '.join(sorted(descriptions))
def iterReady(cls): """Iterate through all ready PackageCopyJobs. Even though it's slower, we repeat the query each time in order that very long queues of mass syncs can be pre-empted by other jobs. """ seen = set() while True: jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.job == Job.id, Job.id.is_in(Job.ready_jobs), Not(Job.id.is_in(seen))) jobs.order_by(PackageCopyJob.copy_policy) job = jobs.first() if job is None: break seen.add(job.job_id) yield cls(job)
def getPendingJobsForTargetSeries(cls, target_series): """Get upcoming jobs for `target_series`, ordered by age.""" raw_jobs = IStore(PackageCopyJob).find( PackageCopyJob, Job.id == PackageCopyJob.job_id, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_distroseries == target_series, Job._status.is_in(Job.PENDING_STATUSES)) raw_jobs = raw_jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(raw_jobs, cls)
def get_distroseries_pofiles(self, series, date=None, component=None, languagepack=None): """See `IVPOExport`. Selects `POFiles` based on the 'series', last modified 'date', archive 'component', and whether it belongs to a 'languagepack' """ tables = [ POFile, POTemplate, ] conditions = [ POTemplate.distroseries == series, POTemplate.iscurrent == True, POFile.potemplate == POTemplate.id, ] if date is not None: conditions.append( Or(POTemplate.date_last_updated > date, POFile.date_changed > date)) if component is not None: tables.extend([ SourcePackagePublishingHistory, Component, ]) conditions.extend([ SourcePackagePublishingHistory.distroseries == series, SourcePackagePublishingHistory.component == Component.id, POTemplate.sourcepackagename == SourcePackagePublishingHistory.sourcepackagenameID, Component.name == component, SourcePackagePublishingHistory.dateremoved == None, SourcePackagePublishingHistory.archive == series.main_archive, ]) if languagepack: conditions.append(POTemplate.languagepack == True) # Use the slave store. We may want to write to the distroseries # to register a language pack, but not to the translation data # we retrieve for it. # XXX wgrant 2017-03-21: Moved to master to avoid termination # due to long transactions. query = IStore(POFile).using(*tables).find(POFile, And(*conditions)) # Order by POTemplate. Caching in the export scripts can be # much more effective when consecutive POFiles belong to the # same POTemplate, e.g. they'll have the same POTMsgSets. sort_list = [POFile.potemplateID, POFile.languageID] return query.order_by(sort_list).config(distinct=True)
def getBinariesForOverrides(self, distroseries, pocket): """Fetch override information about all published binaries. The override information consists of tuples with 'binaryname', 'component', 'section', 'architecture' and 'priority' strings, 'binpackageformat' enum, 'phased_update_percentage' integer, in this order. :param distroseries: target `IDistroSeries` :param pocket: target `PackagePublishingPocket` :return: a `ResultSet` with the binary override information tuples """ origins = ( BinaryPackagePublishingHistory, Join(Component, Component.id == BinaryPackagePublishingHistory.componentID), Join(Section, Section.id == BinaryPackagePublishingHistory.sectionID), Join(BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID), Join(BinaryPackageName, BinaryPackageName.id == BinaryPackageRelease.binarypackagenameID), Join(DistroArchSeries, DistroArchSeries.id == BinaryPackagePublishingHistory.distroarchseriesID), ) architectures_ids = [arch.id for arch in distroseries.architectures] if len(architectures_ids) == 0: return EmptyResultSet() conditions = [ BinaryPackagePublishingHistory.archive == self.publisher.archive, BinaryPackagePublishingHistory.distroarchseriesID.is_in( architectures_ids), BinaryPackagePublishingHistory.pocket == pocket, BinaryPackagePublishingHistory.status == PackagePublishingStatus.PUBLISHED, ] if not self.publisher.archive.publish_debug_symbols: conditions.append( BinaryPackageRelease.binpackageformat != BinaryPackageFormat.DDEB) result_set = IStore(BinaryPackageName).using(*origins).find( (BinaryPackageName.name, Component.name, Section.name, DistroArchSeries.architecturetag, BinaryPackagePublishingHistory.priority, BinaryPackageRelease.binpackageformat, BinaryPackagePublishingHistory.phased_update_percentage), *conditions) return result_set.order_by(Desc(BinaryPackagePublishingHistory.id))
def getBuildsBySourcePackageRelease(self, sourcepackagerelease_ids, buildstate=None): """See `IBinaryPackageBuildSet`.""" if (sourcepackagerelease_ids is None or len(sourcepackagerelease_ids) == 0): return [] query = [ BinaryPackageBuild.source_package_release_id.is_in( sourcepackagerelease_ids), BinaryPackageBuild.is_distro_archive, ] if buildstate is not None: query.append(BinaryPackageBuild.status == buildstate) resultset = IStore(BinaryPackageBuild).find(BinaryPackageBuild, *query) resultset.order_by( Desc(BinaryPackageBuild.date_created), BinaryPackageBuild.id) return resultset
def getBinariesForOverrides(self, distroseries, pocket): """Fetch override information about all published binaries. The override information consists of tuples with 'binaryname', 'component', 'section', 'architecture' and 'priority' strings, 'binpackageformat' enum, 'phased_update_percentage' integer, in this order. :param distroseries: target `IDistroSeries` :param pocket: target `PackagePublishingPocket` :return: a `ResultSet` with the binary override information tuples """ origins = ( BinaryPackagePublishingHistory, Join(Component, Component.id == BinaryPackagePublishingHistory.componentID), Join(Section, Section.id == BinaryPackagePublishingHistory.sectionID), Join( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID), Join( BinaryPackageName, BinaryPackageName.id == BinaryPackageRelease.binarypackagenameID), Join( DistroArchSeries, DistroArchSeries.id == BinaryPackagePublishingHistory.distroarchseriesID), ) architectures_ids = [arch.id for arch in distroseries.architectures] if len(architectures_ids) == 0: return EmptyResultSet() conditions = [ BinaryPackagePublishingHistory.archive == self.publisher.archive, BinaryPackagePublishingHistory.distroarchseriesID.is_in( architectures_ids), BinaryPackagePublishingHistory.pocket == pocket, BinaryPackagePublishingHistory.status == PackagePublishingStatus.PUBLISHED, ] if not self.publisher.archive.publish_debug_symbols: conditions.append(BinaryPackageRelease.binpackageformat != BinaryPackageFormat.DDEB) result_set = IStore(BinaryPackageName).using(*origins).find( (BinaryPackageName.name, Component.name, Section.name, DistroArchSeries.architecturetag, BinaryPackagePublishingHistory.priority, BinaryPackageRelease.binpackageformat, BinaryPackagePublishingHistory.phased_update_percentage), *conditions) return result_set.order_by(Desc(BinaryPackagePublishingHistory.id))
def _update(cls, distroseries, binarypackagename, archive, log): """Update the package cache for a given IBinaryPackageName 'log' is required, it should be a logger object able to print DEBUG level messages. 'ztm' is the current trasaction manager used for partial commits (in full batches of 100 elements) """ # get the set of published binarypackagereleases bprs = IStore(BinaryPackageRelease).find( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackagePublishingHistory.binarypackagename == binarypackagename, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseries == distroseries, BinaryPackagePublishingHistory.archive == archive, BinaryPackagePublishingHistory.dateremoved == None) bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated)) bprs = bprs.config(distinct=True) if bprs.count() == 0: log.debug("No binary releases found.") return # find or create the cache entry cache = cls.selectOne(""" distroseries = %s AND archive = %s AND binarypackagename = %s """ % sqlvalues(distroseries, archive, binarypackagename)) if cache is None: log.debug("Creating new binary cache entry.") cache = cls(archive=archive, distroseries=distroseries, binarypackagename=binarypackagename) # make sure the cached name, summary and description are correct cache.name = binarypackagename.name cache.summary = bprs[0].summary cache.description = bprs[0].description # get the sets of binary package summaries, descriptions. there is # likely only one, but just in case... summaries = set() descriptions = set() for bpr in bprs: log.debug("Considering binary version %s" % bpr.version) summaries.add(bpr.summary) descriptions.add(bpr.description) # and update the caches cache.summaries = ' '.join(sorted(summaries)) cache.descriptions = ' '.join(sorted(descriptions))
def getActiveJobs(cls, target_archive): """See `IPlainPackageCopyJobSource`.""" jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_archive == target_archive, Job.id == PackageCopyJob.job_id, Job._status == JobStatus.WAITING, ) jobs = jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(jobs, cls)
def getPendingJobsForTargetSeries(cls, target_series): """Get upcoming jobs for `target_series`, ordered by age.""" raw_jobs = IStore(PackageCopyJob).find( PackageCopyJob, Job.id == PackageCopyJob.job_id, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_distroseries == target_series, Job._status.is_in(Job.PENDING_STATUSES), ) raw_jobs = raw_jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(raw_jobs, cls)
def iterReady(cls): """Iterate through all ready PackageCopyJobs. Even though it's slower, we repeat the query each time in order that very long queues of mass syncs can be pre-empted by other jobs. """ seen = set() while True: jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.job == Job.id, Job.id.is_in(Job.ready_jobs), Not(Job.id.is_in(seen)), ) jobs.order_by(PackageCopyJob.copy_policy) job = jobs.first() if job is None: break seen.add(job.job_id) yield cls(job)
def _getPublishingHistoryQuery(self, status=None): conditions = [ SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.distroseriesID == DistroSeries.id, DistroSeries.distribution == self.distribution, SourcePackagePublishingHistory.sourcepackagename == self.sourcepackagename, SourcePackageRelease.id == SourcePackagePublishingHistory.sourcepackagereleaseID, ] if status is not None: conditions.append(SourcePackagePublishingHistory.status == status) res = IStore(SourcePackagePublishingHistory).find( (SourcePackagePublishingHistory, SourcePackageRelease), *conditions) res.order_by(Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id)) return DecoratedResultSet(res, operator.itemgetter(0))
def search(self, query, vocab_filter=None): """Search for web bug trackers.""" query = ensure_unicode(query).lower() results = IStore(self._table).find( self._table, And( self._filter, BugTracker.active == True, Or(CONTAINSSTRING(BugTracker.name, query), CONTAINSSTRING(BugTracker.title, query), CONTAINSSTRING(BugTracker.summary, query), CONTAINSSTRING(BugTracker.baseurl, query)))) results = results.order_by(self._order_by) return results
def findPublishedSPPHs(self, distroseries, pocket, package_name): """Find currently published source publications for given package.""" SPPH = SourcePackagePublishingHistory SPR = SourcePackageRelease query = IStore(SourcePackagePublishingHistory).find( SPPH, join_spph_spr(), join_spph_spn(), SourcePackageName.name == package_name, self._composeActiveSourcePubsCondition(distroseries, pocket)) # Sort by descending version (SPR.version has type debversion in # the database, so this should be a real proper comparison) so # that _sortPackage will have slightly less work to do later. return query.order_by(Desc(SPR.version), Desc(SPPH.datecreated))
def _getPublishingHistoryQuery(self, status=None): conditions = [ SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.distroseriesID == DistroSeries.id, DistroSeries.distribution == self.distribution, SourcePackagePublishingHistory.sourcepackagename == self.sourcepackagename, SourcePackageRelease.id == SourcePackagePublishingHistory.sourcepackagereleaseID, ] if status is not None: conditions.append(SourcePackagePublishingHistory.status == status) res = IStore(SourcePackagePublishingHistory).find( (SourcePackagePublishingHistory, SourcePackageRelease), *conditions) res.order_by( Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id)) return DecoratedResultSet(res, operator.itemgetter(0))
def search(self, query, vocab_filter=None): """Search for web bug trackers.""" query = ensure_unicode(query).lower() results = IStore(self._table).find( self._table, And( self._filter, BugTracker.active == True, Or( CONTAINSSTRING(BugTracker.name, query), CONTAINSSTRING(BugTracker.title, query), CONTAINSSTRING(BugTracker.summary, query), CONTAINSSTRING(BugTracker.baseurl, query)))) results = results.order_by(self._order_by) return results
def getBinaryFiles(self, distroseries, pocket): """Fetch publishing information about all published binary files. The publishing information consists of tuples with 'sourcename', 'filename', 'component' and 'architecture' strings, in this order. :param distroseries: target `IDistroSeries` :param pocket: target `PackagePublishingPocket` :return: a `ResultSet` with the binary files information tuples. """ columns = ( SourcePackageName.name, LibraryFileAlias.filename, Component.name, Concatenate(u"binary-", DistroArchSeries.architecturetag), ) join_conditions = [ BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackageFile.binarypackagereleaseID == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackageBuild.id == BinaryPackageRelease.buildID, SourcePackageRelease.id == BinaryPackageBuild.source_package_release_id, SourcePackageName.id == SourcePackageRelease.sourcepackagenameID, LibraryFileAlias.id == BinaryPackageFile.libraryfileID, DistroArchSeries.id == BinaryPackagePublishingHistory.distroarchseriesID, Component.id == BinaryPackagePublishingHistory.componentID, ] select_conditions = [ BinaryPackagePublishingHistory.dateremoved == None, DistroArchSeries.distroseriesID == distroseries.id, BinaryPackagePublishingHistory.archive == self.publisher.archive, BinaryPackagePublishingHistory.pocket == pocket, BinaryPackagePublishingHistory.status == PackagePublishingStatus.PUBLISHED, ] if not self.publisher.archive.publish_debug_symbols: select_conditions.append( BinaryPackageRelease.binpackageformat != BinaryPackageFormat.DDEB) result_set = IStore(SourcePackageRelease).find( columns, *(join_conditions + select_conditions)) return result_set.order_by( BinaryPackagePublishingHistory.id, BinaryPackageFile.id)
def getBinaryFiles(self, distroseries, pocket): """Fetch publishing information about all published binary files. The publishing information consists of tuples with 'sourcename', 'filename', 'component' and 'architecture' strings, in this order. :param distroseries: target `IDistroSeries` :param pocket: target `PackagePublishingPocket` :return: a `ResultSet` with the binary files information tuples. """ columns = ( SourcePackageName.name, LibraryFileAlias.filename, Component.name, Concatenate(u"binary-", DistroArchSeries.architecturetag), ) join_conditions = [ BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackageFile.binarypackagereleaseID == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackageBuild.id == BinaryPackageRelease.buildID, SourcePackageRelease.id == BinaryPackageBuild.source_package_release_id, SourcePackageName.id == SourcePackageRelease.sourcepackagenameID, LibraryFileAlias.id == BinaryPackageFile.libraryfileID, DistroArchSeries.id == BinaryPackagePublishingHistory.distroarchseriesID, Component.id == BinaryPackagePublishingHistory.componentID, ] select_conditions = [ BinaryPackagePublishingHistory.dateremoved == None, DistroArchSeries.distroseriesID == distroseries.id, BinaryPackagePublishingHistory.archive == self.publisher.archive, BinaryPackagePublishingHistory.pocket == pocket, BinaryPackagePublishingHistory.status == PackagePublishingStatus.PUBLISHED, ] if not self.publisher.archive.publish_debug_symbols: select_conditions.append(BinaryPackageRelease.binpackageformat != BinaryPackageFormat.DDEB) result_set = IStore(SourcePackageRelease).find( columns, *(join_conditions + select_conditions)) return result_set.order_by(BinaryPackagePublishingHistory.id, BinaryPackageFile.id)
def getSourceFiles(self, distroseries, pocket): """Fetch publishing information about all published source files. The publishing information consists of tuples with 'sourcename', 'filename' and 'component' strings, in this order. :param distroseries: target `IDistroSeries` :param pocket: target `PackagePublishingPocket` :return: a `ResultSet` with the source files information tuples. """ columns = ( SourcePackageName.name, LibraryFileAlias.filename, Component.name, ) join_conditions = [ SourcePackageReleaseFile.sourcepackagereleaseID == SourcePackagePublishingHistory.sourcepackagereleaseID, SourcePackageName.id == SourcePackagePublishingHistory.sourcepackagenameID, LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID, Component.id == SourcePackagePublishingHistory.componentID, ] select_conditions = [ SourcePackagePublishingHistory.archive == self.publisher.archive, SourcePackagePublishingHistory.distroseriesID == distroseries.id, SourcePackagePublishingHistory.pocket == pocket, SourcePackagePublishingHistory.status == PackagePublishingStatus.PUBLISHED, ] result_set = IStore(SourcePackageRelease).find( columns, *(join_conditions + select_conditions)) return result_set.order_by(LibraryFileAlias.filename, SourcePackageReleaseFile.id)
def usable_distro_series(self): rows = IStore(DistroSeries).find( DistroSeries, SnappyDistroSeries.snappy_series == self, SnappyDistroSeries.distro_series_id == DistroSeries.id) return rows.order_by(Desc(DistroSeries.id))
def getBuildsForDistro(self, context, status=None, name=None, pocket=None, arch_tag=None): """See `IBinaryPackageBuildSet`.""" if IDistribution.providedBy(context): col = BinaryPackageBuild.distribution_id elif IDistroSeries.providedBy(context): col = BinaryPackageBuild.distro_series_id elif IDistroArchSeries.providedBy(context): col = BinaryPackageBuild.distro_arch_series_id else: raise AssertionError("Unsupported context: %r" % context) condition_clauses = [ col == context.id, BinaryPackageBuild.is_distro_archive] # XXX cprov 2006-09-25: It would be nice if we could encapsulate # the chunk of code below (which deals with the optional paramenters) # and share it with ISourcePackage.getBuildRecords() # exclude gina-generated and security (dak-made) builds # status == FULLYBUILT && datebuilt == null if status == BuildStatus.FULLYBUILT: condition_clauses.append(BinaryPackageBuild.date_finished != None) else: condition_clauses.append(Or( BinaryPackageBuild.status != BuildStatus.FULLYBUILT, BinaryPackageBuild.date_finished != None)) # Ordering according status # * NEEDSBUILD, BUILDING & UPLOADING by -lastscore # * SUPERSEDED & All by -BinaryPackageBuild.id # (nearly equivalent to -datecreated, but much more # efficient.) # * FULLYBUILT & FAILURES by -datebuilt # It should present the builds in a more natural order. clauseTables = [] order_by_table = None if status in [ BuildStatus.NEEDSBUILD, BuildStatus.BUILDING, BuildStatus.UPLOADING]: order_by = [Desc(BuildQueue.lastscore), BinaryPackageBuild.id] order_by_table = BuildQueue clauseTables.extend([BuildQueue, BuildPackageJob]) condition_clauses.extend([ BuildPackageJob.build_id == BinaryPackageBuild.id, BuildPackageJob.job_id == BuildQueue.jobID]) elif status == BuildStatus.SUPERSEDED or status is None: order_by = [Desc(BinaryPackageBuild.id)] else: order_by = [Desc(BinaryPackageBuild.date_finished), BinaryPackageBuild.id] # End of duplication (see XXX cprov 2006-09-25 above). self.handleOptionalParamsForBuildQueries( condition_clauses, clauseTables, status, name, pocket, arch_tag) find_spec = (BinaryPackageBuild,) if order_by_table: find_spec = find_spec + (order_by_table,) result_set = IStore(BinaryPackageBuild).using(*clauseTables).find( find_spec, *condition_clauses) result_set.order_by(*order_by) return self._decorate_with_prejoins( DecoratedResultSet(result_set, result_decorator=itemgetter(0)))
def _entries(self): entries = IStore(self._table).find(self._table, *self._clauses) if self._order_by is not None: entries = entries.order_by(self._order_by) return entries