def getBinariesForSeries(self, distroseries):
        """See `IDistributionSourcePackageRelease`."""
        # Avoid circular imports.
        from lp.soyuz.model.distroarchseries import DistroArchSeries
        store = Store.of(distroseries)
        result_row = (BinaryPackageRelease, BinaryPackageBuild,
                      BinaryPackageName)

        tables = (BinaryPackageRelease,
                  Join(BinaryPackageBuild,
                       BinaryPackageBuild.id == BinaryPackageRelease.buildID),
                  Join(
                      BinaryPackagePublishingHistory,
                      BinaryPackageRelease.id ==
                      BinaryPackagePublishingHistory.binarypackagereleaseID),
                  Join(
                      DistroArchSeries, DistroArchSeries.id ==
                      BinaryPackagePublishingHistory.distroarchseriesID),
                  Join(
                      BinaryPackageName, BinaryPackageName.id ==
                      BinaryPackageRelease.binarypackagenameID))
        archive_ids = list(self.distribution.all_distro_archive_ids)
        binaries = store.using(*tables).find(
            result_row,
            And(
                DistroArchSeries.distroseriesID == distroseries.id,
                BinaryPackagePublishingHistory.archiveID.is_in(archive_ids),
                BinaryPackageBuild.source_package_release ==
                self.sourcepackagerelease))
        binaries.order_by(Desc(BinaryPackageRelease.id)).config(distinct=True)
        return DecoratedResultSet(binaries, itemgetter(0))
Пример #2
0
    def calculateSourceOverrides(self,
                                 archive,
                                 distroseries,
                                 pocket,
                                 spns,
                                 source_component=None,
                                 include_deleted=False):
        def eager_load(rows):
            bulk.load(Component, (row[1] for row in rows))
            bulk.load(Section, (row[2] for row in rows))

        store = IStore(SourcePackagePublishingHistory)
        already_published = DecoratedResultSet(store.find(
            (SourcePackagePublishingHistory.sourcepackagenameID,
             SourcePackagePublishingHistory.componentID,
             SourcePackagePublishingHistory.sectionID),
            SourcePackagePublishingHistory.archiveID == archive.id,
            SourcePackagePublishingHistory.distroseriesID == distroseries.id,
            SourcePackagePublishingHistory.status.is_in(
                self.getExistingPublishingStatuses(include_deleted)),
            SourcePackagePublishingHistory.sourcepackagenameID.is_in(
                spn.id for spn in spns)).order_by(
                    SourcePackagePublishingHistory.sourcepackagenameID,
                    Desc(SourcePackagePublishingHistory.datecreated),
                    Desc(SourcePackagePublishingHistory.id),
                ).config(distinct=(
                    SourcePackagePublishingHistory.sourcepackagenameID, )),
                                               id_resolver(
                                                   (SourcePackageName,
                                                    Component, Section)),
                                               pre_iter_hook=eager_load)
        return [
            SourceOverride(name, component, section)
            for (name, component, section) in already_published
        ]
Пример #3
0
    def findSourcesForDomination(self, distroseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  See `find_live_source_versions`
        for this logic.

        To optimize for that logic, `findSourcesForDomination` will ignore
        publications that have no other publications competing for the same
        binary package.  There'd be nothing to do for those cases.
        """
        SPPH = SourcePackagePublishingHistory
        SPR = SourcePackageRelease

        spph_location_clauses = self._composeActiveSourcePubsCondition(
            distroseries, pocket)
        candidate_source_names = Select(SPPH.sourcepackagenameID,
                                        And(join_spph_spr(),
                                            spph_location_clauses),
                                        group_by=SPPH.sourcepackagenameID,
                                        having=(Count() > 1))

        # We'll also access the SourcePackageReleases associated with
        # the publications we find.  Since they're in the join anyway,
        # load them alongside the publications.
        # Actually we'll also want the SourcePackageNames, but adding
        # those to the (outer) query would complicate it, and
        # potentially slow it down.
        query = IStore(SPPH).find(
            (SPPH, SPR), join_spph_spr(),
            SPPH.sourcepackagenameID.is_in(candidate_source_names),
            spph_location_clauses)
        spphs = DecoratedResultSet(query, itemgetter(0))
        load_related(SourcePackageName, spphs, ['sourcepackagenameID'])
        return spphs
Пример #4
0
    def getBranches(self, find_expr=Branch, eager_load=False, sort_by=None):
        """See `IBranchCollection`."""
        all_tables = set(self._tables.values() +
                         self._asymmetric_tables.values())
        tables = [Branch] + list(all_tables)
        expressions = self._getBranchExpressions()
        resultset = self.store.using(*tables).find(find_expr, *expressions)
        if sort_by is not None:
            resultset = resultset.order_by(
                *self._convertListingSortToOrderBy(sort_by))

        def do_eager_load(rows):
            branch_ids = set(branch.id for branch in rows)
            if not branch_ids:
                return
            GenericBranchCollection.preloadDataForBranches(rows)
            # So far have only needed the persons for their canonical_url - no
            # need for validity etc in the /branches API call.
            load_related(Person, rows,
                         ['ownerID', 'registrantID', 'reviewerID'])
            load_referencing(BugBranch, rows, ['branchID'])

        def cache_permission(branch):
            if self._user:
                get_property_cache(branch)._known_viewers = set(
                    [self._user.id])
            return branch

        eager_load_hook = (do_eager_load
                           if eager_load and find_expr == Branch else None)
        return DecoratedResultSet(resultset,
                                  pre_iter_hook=eager_load_hook,
                                  result_decorator=cache_permission)
Пример #5
0
    def findBinariesForDomination(self, distroarchseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  It will ignore publications
        that have no other publications competing for the same binary package.
        """
        BPPH = BinaryPackagePublishingHistory
        BPR = BinaryPackageRelease

        bpph_location_clauses = [
            BPPH.status == PackagePublishingStatus.PUBLISHED,
            BPPH.distroarchseries == distroarchseries,
            BPPH.archive == self.archive,
            BPPH.pocket == pocket,
        ]
        candidate_binary_names = Select(BPPH.binarypackagenameID,
                                        And(*bpph_location_clauses),
                                        group_by=BPPH.binarypackagenameID,
                                        having=(Count() > 1))
        main_clauses = bpph_location_clauses + [
            BPR.id == BPPH.binarypackagereleaseID,
            BPR.binarypackagenameID.is_in(candidate_binary_names),
            BPR.binpackageformat != BinaryPackageFormat.DDEB,
        ]

        # We're going to access the BPRs as well.  Since we make the
        # database look them up anyway, and since there won't be many
        # duplications among them, load them alongside the publications.
        # We'll also want their BinaryPackageNames, but adding those to
        # the join would complicate the query.
        query = IStore(BPPH).find((BPPH, BPR), *main_clauses)
        bpphs = list(DecoratedResultSet(query, itemgetter(0)))
        load_related(BinaryPackageName, bpphs, ['binarypackagenameID'])
        return bpphs
def most_recent_comments(dsds):
    """The most recent comments for the given `DistroSeriesDifference`s.

    Returns an `IResultSet` that yields a single column of
        `DistroSeriesDifferenceComment`.

    :param dsds: An iterable of `DistroSeriesDifference` instances.
    """
    columns = (
        DistroSeriesDifferenceComment,
        Message,
    )
    conditions = And(
        DistroSeriesDifferenceComment.distro_series_difference_id.is_in(
            dsd.id for dsd in dsds),
        Message.id == DistroSeriesDifferenceComment.message_id)
    order_by = (
        DistroSeriesDifferenceComment.distro_series_difference_id,
        Desc(DistroSeriesDifferenceComment.id),
    )
    distinct_on = (DistroSeriesDifferenceComment.distro_series_difference_id, )
    store = IStore(DistroSeriesDifferenceComment)
    comments = store.find(
        columns, conditions).order_by(*order_by).config(distinct=distinct_on)
    return DecoratedResultSet(comments, itemgetter(0))
    def package_upload(self):
        """See `ISourcepackageRelease`."""
        store = Store.of(self)
        # The join on 'changesfile' is used for pre-fetching the
        # corresponding library file, so callsites don't have to issue an
        # extra query.
        origin = [
            PackageUploadSource,
            Join(PackageUpload,
                 PackageUploadSource.packageuploadID == PackageUpload.id),
            Join(LibraryFileAlias,
                 LibraryFileAlias.id == PackageUpload.changes_file_id),
            Join(LibraryFileContent,
                 LibraryFileContent.id == LibraryFileAlias.contentID),
        ]
        results = store.using(*origin).find(
            (PackageUpload, LibraryFileAlias, LibraryFileContent),
            PackageUploadSource.sourcepackagerelease == self,
            PackageUpload.archive == self.upload_archive,
            PackageUpload.distroseries == self.upload_distroseries)

        # Return the unique `PackageUpload` record that corresponds to the
        # upload of this `SourcePackageRelease`, load the `LibraryFileAlias`
        # and the `LibraryFileContent` in cache because it's most likely
        # they will be needed.
        return DecoratedResultSet(results, operator.itemgetter(0)).one()
 def batchnav(self):
     # No point using StormRangeFactory right now, as the sorted
     # lookup can't be fully indexed (it spans multiple archives).
     return BatchNavigator(
         DecoratedResultSet(self.context.publishing_history,
                            pre_iter_hook=self._preload_people),
         self.request)
Пример #9
0
    def getSpecifications(self, user):
        """See `IMilestoneData`"""
        from lp.registry.model.person import Person
        origin = [Specification]
        product_origin, clauses = get_specification_active_product_filter(self)
        origin.extend(product_origin)
        clauses.extend(get_specification_privacy_filter(user))
        origin.append(LeftJoin(Person, Specification._assigneeID == Person.id))
        milestones = self._milestone_ids_expr(user)

        results = Store.of(self.target).using(*origin).find(
            (Specification, Person),
            Specification.id.is_in(
                Union(Select(
                    Specification.id,
                    tables=[Specification],
                    where=(Specification.milestoneID.is_in(milestones))),
                      Select(SpecificationWorkItem.specification_id,
                             tables=[SpecificationWorkItem],
                             where=And(
                                 SpecificationWorkItem.milestone_id.is_in(
                                     milestones),
                                 SpecificationWorkItem.deleted == False)),
                      all=True)), *clauses)
        ordered_results = results.order_by(Desc(Specification.priority),
                                           Specification.definition_status,
                                           Specification.implementation_status,
                                           Specification.title)
        ordered_results.config(distinct=True)
        return DecoratedResultSet(ordered_results, itemgetter(0))
Пример #10
0
 def _scopedGetMergeProposals(self, statuses, eager_load=False):
     expressions = (self._filter_expressions +
                    self._getRepositoryVisibilityExpression())
     with_expr = With(
         "candidate_repositories",
         Select(GitRepository.id,
                tables=[GitRepository] + self._tables.values(),
                where=And(*expressions) if expressions else True))
     expressions = [
         SQL("""
         source_git_repository IN
             (SELECT id FROM candidate_repositories) AND
         target_git_repository IN
             (SELECT id FROM candidate_repositories)""")
     ]
     tables = [BranchMergeProposal]
     if self._asymmetric_filter_expressions:
         # Need to filter on GitRepository beyond the with constraints.
         expressions += self._asymmetric_filter_expressions
         expressions.append(BranchMergeProposal.source_git_repositoryID ==
                            GitRepository.id)
         tables.append(GitRepository)
         tables.extend(self._asymmetric_tables.values())
     if statuses is not None:
         expressions.append(
             BranchMergeProposal.queue_status.is_in(statuses))
     resultset = self.store.with_(with_expr).using(*tables).find(
         BranchMergeProposal, *expressions)
     if not eager_load:
         return resultset
     else:
         loader = partial(BranchMergeProposal.preloadDataForBMPs,
                          user=self._user)
         return DecoratedResultSet(resultset, pre_iter_hook=loader)
Пример #11
0
    def getDiffsToReleases(self, sprs, preload_for_display=False):
        """See `IPackageDiffSet`."""
        from lp.registry.model.distribution import Distribution
        from lp.soyuz.model.archive import Archive
        from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
        if len(sprs) == 0:
            return EmptyResultSet()
        spr_ids = [spr.id for spr in sprs]
        result = IStore(PackageDiff).find(
            PackageDiff, PackageDiff.to_sourceID.is_in(spr_ids))
        result.order_by(PackageDiff.to_sourceID,
                        Desc(PackageDiff.date_requested))

        def preload_hook(rows):
            lfas = load(LibraryFileAlias, (pd.diff_contentID for pd in rows))
            load(LibraryFileContent, (lfa.contentID for lfa in lfas))
            sprs = load(
                SourcePackageRelease,
                itertools.chain.from_iterable(
                    (pd.from_sourceID, pd.to_sourceID) for pd in rows))
            archives = load(Archive, (spr.upload_archiveID for spr in sprs))
            load(Distribution, (a.distributionID for a in archives))

        if preload_for_display:
            return DecoratedResultSet(result, pre_iter_hook=preload_hook)
        else:
            return result
Пример #12
0
    def featured_projects(self):
        """See `IPillarSet`."""
        # Circular imports.
        from lp.registry.model.distribution import Distribution
        from lp.registry.model.product import Product
        from lp.registry.model.projectgroup import ProjectGroup

        store = IStore(PillarName)
        pillar_names = store.find(PillarName,
                                  PillarName.id == FeaturedProject.pillar_name)

        def preload_pillars(rows):
            pillar_names = (set(rows).union(
                load_related(PillarName, rows, ['alias_for'])))
            pillars = load_related(Product, pillar_names, ['productID'])
            pillars.extend(
                load_related(ProjectGroup, pillar_names, ['projectgroupID']))
            pillars.extend(
                load_related(Distribution, pillar_names, ['distributionID']))
            load_related(LibraryFileAlias, pillars, ['iconID'])

        return list(
            DecoratedResultSet(pillar_names,
                               result_decorator=attrgetter('pillar'),
                               pre_iter_hook=preload_pillars))
Пример #13
0
    def getReleasesAndPublishingHistory(self):
        """See `IDistributionSourcePackage`."""
        pub_constraints = (
            DistroSeries.distribution == self.distribution,
            SourcePackagePublishingHistory.distroseries == DistroSeries.id,
            SourcePackagePublishingHistory.archiveID.is_in(
                self.distribution.all_distro_archive_ids),
            SourcePackagePublishingHistory.sourcepackagename ==
            self.sourcepackagename,
        )

        # Find distinct SPRs for our SPN in our archives.
        spr_ids = Store.of(self.distribution).find(
            SourcePackagePublishingHistory.sourcepackagereleaseID,
            *pub_constraints).order_by(
                Desc(SourcePackagePublishingHistory.sourcepackagereleaseID)
            ).config(distinct=True)

        def decorate(spr_ids):
            # Find the SPPHs for each SPR in our result.
            load(SourcePackageRelease, spr_ids)
            sprs = [SourcePackageRelease.get(spr_id) for spr_id in spr_ids]
            pubs = DistributionSourcePackageRelease.getPublishingHistories(
                self.distribution, sprs)
            sprs_by_id = dict((spr, list(pubs))
                              for (spr, pubs) in itertools.groupby(
                                  pubs, attrgetter('sourcepackagereleaseID')))
            return [(DistributionSourcePackageRelease(
                distribution=self.distribution,
                sourcepackagerelease=spr), sprs_by_id[spr.id]) for spr in sprs]

        return DecoratedResultSet(spr_ids, bulk_decorator=decorate)
Пример #14
0
    def getByArchive(archive,
                     container=None,
                     path=None,
                     only_condemned=False,
                     eager_load=False):
        """See `IArchiveFileSet`."""
        clauses = [ArchiveFile.archive == archive]
        # XXX cjwatson 2016-03-15: We'll need some more sophisticated way to
        # match containers once we're using them for custom uploads.
        if container is not None:
            clauses.append(ArchiveFile.container == container)
        if path is not None:
            clauses.append(ArchiveFile.path == path)
        if only_condemned:
            clauses.append(ArchiveFile.scheduled_deletion_date != None)
        archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses)

        def eager_load(rows):
            lfas = load_related(LibraryFileAlias, rows, ["library_file_id"])
            load_related(LibraryFileContent, lfas, ["contentID"])

        if eager_load:
            return DecoratedResultSet(archive_files, pre_iter_hook=eager_load)
        else:
            return archive_files
Пример #15
0
    def getOwnedAndRequestedReviews(self,
                                    status=None,
                                    visible_by_user=None,
                                    project=None,
                                    eager_load=False):
        """See `IHasRequestedReviews`."""
        # Circular import.
        from lp.code.model.branchmergeproposal import BranchMergeProposal

        if not status:
            status = (BranchMergeProposalStatus.NEEDS_REVIEW, )

        def _getProposals(collection):
            collection = collection.visibleByUser(visible_by_user)
            return collection.getMergeProposalsForPerson(self,
                                                         status,
                                                         eager_load=False)

        bzr_collection = removeSecurityProxy(getUtility(IAllBranches))
        git_collection = removeSecurityProxy(getUtility(IAllGitRepositories))
        if project is not None:
            bzr_collection = bzr_collection.inProduct(project)
            git_collection = git_collection.inProject(project)
        proposals = _getProposals(bzr_collection).union(
            _getProposals(git_collection))
        if not eager_load:
            return proposals
        else:
            loader = partial(BranchMergeProposal.preloadDataForBMPs,
                             user=visible_by_user)
            return DecoratedResultSet(proposals, pre_iter_hook=loader)
Пример #16
0
    def getMergeProposals(self,
                          status=None,
                          visible_by_user=None,
                          eager_load=False):
        """See `IHasMergeProposals`."""
        # Circular import.
        from lp.code.model.branchmergeproposal import BranchMergeProposal

        if not status:
            status = (BranchMergeProposalStatus.CODE_APPROVED,
                      BranchMergeProposalStatus.NEEDS_REVIEW,
                      BranchMergeProposalStatus.WORK_IN_PROGRESS)

        def _getProposals(interface):
            collection = removeSecurityProxy(interface(self))
            collection = collection.visibleByUser(visible_by_user)
            return collection.getMergeProposals(status, eager_load=False)

        # SourcePackage Bazaar branches are an aberration which was not
        # replicated for Git, so SourcePackage does not support Git.
        if ISourcePackage.providedBy(self):
            proposals = _getProposals(IBranchCollection)
        else:
            proposals = _getProposals(IBranchCollection).union(
                _getProposals(IGitCollection))
        if not eager_load:
            return proposals
        else:
            loader = partial(BranchMergeProposal.preloadDataForBMPs,
                             user=visible_by_user)
            return DecoratedResultSet(proposals, pre_iter_hook=loader)
Пример #17
0
 def fetchTranslatorData(self):
     """See `ITranslationGroup`."""
     # Fetch Translator, Language, and Person; but also prefetch the
     # icon information.
     using = [
         Translator,
         Language,
         Person,
         LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.iconID),
         LeftJoin(
             LibraryFileContent,
             LibraryFileContent.id == LibraryFileAlias.contentID),
         ]
     tables = (
         Translator,
         Language,
         Person,
         LibraryFileAlias,
         LibraryFileContent,
         )
     translator_data = Store.of(self).using(*using).find(
         tables,
         Translator.translationgroup == self,
         Language.id == Translator.languageID,
         Person.id == Translator.translatorID)
     translator_data = translator_data.order_by(Language.englishname)
     mapper = lambda row: row[slice(0, 3)]
     return DecoratedResultSet(translator_data, mapper)
Пример #18
0
    def getAllLanguages(self,
                        want_translators_count=False,
                        only_visible=False):
        """See `ILanguageSet`."""
        result = IStore(Language).find(
            Language,
            Language.visible == True if only_visible else True,
        ).order_by(Language.englishname)
        if want_translators_count:

            def preload_translators_count(languages):
                from lp.registry.model.person import PersonLanguage
                ids = set(language.id
                          for language in languages).difference(set([None]))
                counts = IStore(Language).using(
                    LeftJoin(Language, self._getTranslatorJoins(),
                             PersonLanguage.languageID == Language.id), ).find(
                                 (Language, Count(PersonLanguage)),
                                 Language.id.is_in(ids),
                             ).group_by(Language)
                for language, count in counts:
                    get_property_cache(language).translators_count = count

            return DecoratedResultSet(result,
                                      pre_iter_hook=preload_translators_count)
        return result
Пример #19
0
 def getActiveJobs(cls, target_archive):
     """See `IPlainPackageCopyJobSource`."""
     jobs = IStore(PackageCopyJob).find(
         PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type,
         PackageCopyJob.target_archive == target_archive,
         Job.id == PackageCopyJob.job_id, Job._status == JobStatus.WAITING)
     jobs = jobs.order_by(PackageCopyJob.id)
     return DecoratedResultSet(jobs, cls)
Пример #20
0
    def contextSpecificSearch(self):
        """See `AbstractPackageSearchView`."""

        if self.search_by_binary_name:
            return self.context.searchBinaryPackages(self.text)
        else:
            non_exact_matches = self.context.searchSourcePackageCaches(self.text)

            # The searchBinaryPackageCaches() method returns tuples, so we
            # use the DecoratedResultSet here to just get the
            # DistributionSourcePackag objects for the template.
            def tuple_to_package_cache(cache_name_tuple):
                return cache_name_tuple[0]

            non_exact_matches = DecoratedResultSet(non_exact_matches, tuple_to_package_cache)

        return non_exact_matches.config(distinct=True)
 def getByBuildFarmJobs(cls, build_farm_jobs):
     """See `ISpecificBuildFarmJobSource`."""
     if len(build_farm_jobs) == 0:
         return EmptyResultSet()
     rows = Store.of(build_farm_jobs[0]).find(
         cls,
         cls.build_farm_job_id.is_in(bfj.id for bfj in build_farm_jobs))
     return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData)
Пример #22
0
 def getByBuildFarmJobs(cls, buildfarmjobs, store=None):
     """See `ITranslationTemplatesBuildSource`."""
     store = cls._getStore(store)
     rows = store.find(
         TranslationTemplatesBuild,
         TranslationTemplatesBuild.build_farm_job_id.is_in(
             bfj.id for bfj in buildfarmjobs))
     return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData)
Пример #23
0
 def getPendingJobsForTargetSeries(cls, target_series):
     """Get upcoming jobs for `target_series`, ordered by age."""
     raw_jobs = IStore(PackageCopyJob).find(
         PackageCopyJob, Job.id == PackageCopyJob.job_id,
         PackageCopyJob.job_type == cls.class_job_type,
         PackageCopyJob.target_distroseries == target_series,
         Job._status.is_in(Job.PENDING_STATUSES))
     raw_jobs = raw_jobs.order_by(PackageCopyJob.id)
     return DecoratedResultSet(raw_jobs, cls)
    def sample_binary_packages(self):
        """See IDistributionSourcePackageRelease."""
        #avoid circular imports.
        from lp.registry.model.distroseries import DistroSeries
        from lp.soyuz.model.distroarchseries import DistroArchSeries
        from lp.soyuz.model.distroseriespackagecache import (
            DistroSeriesPackageCache)
        archive_ids = list(self.distribution.all_distro_archive_ids)
        result_row = (
            SQL('DISTINCT ON(BinaryPackageName.name) 0 AS ignore'),
            BinaryPackagePublishingHistory, DistroSeriesPackageCache,
            BinaryPackageRelease, BinaryPackageName)
        tables = (
            BinaryPackagePublishingHistory,
            Join(
                DistroArchSeries,
                DistroArchSeries.id ==
                 BinaryPackagePublishingHistory.distroarchseriesID),
            Join(
                DistroSeries,
                DistroArchSeries.distroseriesID == DistroSeries.id),
            Join(
                BinaryPackageRelease,
                BinaryPackageRelease.id ==
                BinaryPackagePublishingHistory.binarypackagereleaseID),
            Join(
                BinaryPackageName,
                BinaryPackageName.id ==
                BinaryPackageRelease.binarypackagenameID),
            Join(
                BinaryPackageBuild,
                BinaryPackageBuild.id == BinaryPackageRelease.buildID),
            LeftJoin(
                DistroSeriesPackageCache,
                And(
                    DistroSeriesPackageCache.distroseries == DistroSeries.id,
                    DistroSeriesPackageCache.archiveID.is_in(archive_ids),
                    DistroSeriesPackageCache.binarypackagename ==
                    BinaryPackageName.id)))

        all_published = Store.of(self.distribution).using(*tables).find(
            result_row,
            DistroSeries.distribution == self.distribution,
            BinaryPackagePublishingHistory.archiveID.is_in(archive_ids),
            BinaryPackageBuild.source_package_release ==
                self.sourcepackagerelease)
        all_published = all_published.order_by(
            BinaryPackageName.name)

        def make_dsb_package(row):
            publishing = row[1]
            package_cache = row[2]
            return DistroSeriesBinaryPackage(
                publishing.distroarchseries.distroseries,
                publishing.binarypackagerelease.binarypackagename,
                package_cache)
        return DecoratedResultSet(all_published, make_dsb_package)
Пример #25
0
 def getIncompleteJobsForArchive(cls, archive):
     """See `IPlainPackageCopyJobSource`."""
     jobs = IStore(PackageCopyJob).find(
         PackageCopyJob, PackageCopyJob.target_archive == archive,
         PackageCopyJob.job_type == cls.class_job_type,
         Job.id == PackageCopyJob.job_id,
         Job._status.is_in(
             [JobStatus.WAITING, JobStatus.RUNNING, JobStatus.FAILED]))
     return DecoratedResultSet(jobs, cls)
Пример #26
0
    def contextSpecificSearch(self):
        """See `AbstractPackageSearchView`."""

        if self.search_by_binary_name:
            return self.context.searchBinaryPackages(self.text)
        else:
            non_exact_matches = self.context.searchSourcePackageCaches(
                self.text)

            # The searchBinaryPackageCaches() method returns tuples, so we
            # use the DecoratedResultSet here to just get the
            # DistributionSourcePackag objects for the template.
            def tuple_to_package_cache(cache_name_tuple):
                return cache_name_tuple[0]

            non_exact_matches = DecoratedResultSet(non_exact_matches,
                                                   tuple_to_package_cache)

        return non_exact_matches.config(distinct=True)
Пример #27
0
 def _naiveGetMergeProposals(self,
                             statuses=None,
                             for_branches=None,
                             target_branch=None,
                             prerequisite_branch=None,
                             merged_revnos=None,
                             merged_revision=None,
                             eager_load=False):
     Target = ClassAlias(Branch, "target")
     extra_tables = list(
         set(self._tables.values() + self._asymmetric_tables.values()))
     tables = [Branch] + extra_tables + [
         Join(
             BranchMergeProposal,
             And(
                 Branch.id == BranchMergeProposal.source_branchID,
                 *(self._branch_filter_expressions +
                   self._asymmetric_filter_expressions))),
         Join(Target, Target.id == BranchMergeProposal.target_branchID),
     ]
     expressions = self._getBranchVisibilityExpression()
     expressions.extend(self._getBranchVisibilityExpression(Target))
     if for_branches is not None:
         branch_ids = [branch.id for branch in for_branches]
         expressions.append(
             BranchMergeProposal.source_branchID.is_in(branch_ids))
     if target_branch is not None:
         expressions.append(
             BranchMergeProposal.target_branch == target_branch)
     if prerequisite_branch is not None:
         expressions.append(
             BranchMergeProposal.prerequisite_branch == prerequisite_branch)
     if merged_revnos is not None:
         expressions.append(
             BranchMergeProposal.merged_revno.is_in(merged_revnos))
     if merged_revision is not None:
         expressions.extend([
             BranchMergeProposal.merged_revno == BranchRevision.sequence,
             BranchRevision.revision_id == Revision.id,
             BranchRevision.branch_id ==
             BranchMergeProposal.target_branchID,
             Revision.revision_id == merged_revision
         ])
         tables.extend([BranchRevision, Revision])
     if statuses is not None:
         expressions.append(
             BranchMergeProposal.queue_status.is_in(statuses))
     resultset = self.store.using(*tables).find(BranchMergeProposal,
                                                *expressions)
     if not eager_load:
         return resultset
     else:
         loader = partial(BranchMergeProposal.preloadDataForBMPs,
                          user=self._user)
         return DecoratedResultSet(resultset, pre_iter_hook=loader)
Пример #28
0
 def _naiveGetMergeProposals(self,
                             statuses=None,
                             target_repository=None,
                             target_path=None,
                             prerequisite_repository=None,
                             prerequisite_path=None,
                             merged_revision_ids=None,
                             merge_proposal_ids=None,
                             eager_load=False):
     Target = ClassAlias(GitRepository, "target")
     extra_tables = list(
         set(self._tables.values() + self._asymmetric_tables.values()))
     tables = [GitRepository] + extra_tables + [
         Join(
             BranchMergeProposal,
             And(
                 GitRepository.id
                 == BranchMergeProposal.source_git_repositoryID,
                 *(self._filter_expressions +
                   self._asymmetric_filter_expressions))),
         Join(Target, Target.id
              == BranchMergeProposal.target_git_repositoryID),
     ]
     expressions = self._getRepositoryVisibilityExpression()
     expressions.extend(self._getRepositoryVisibilityExpression(Target))
     if target_repository is not None:
         expressions.append(
             BranchMergeProposal.target_git_repository == target_repository)
     if target_path is not None:
         expressions.append(
             BranchMergeProposal.target_git_path == target_path)
     if prerequisite_repository is not None:
         expressions.append(BranchMergeProposal.prerequisite_git_repository
                            == prerequisite_repository)
     if prerequisite_path is not None:
         expressions.append(
             BranchMergeProposal.prerequisite_git_path == prerequisite_path)
     if merged_revision_ids is not None:
         expressions.append(
             BranchMergeProposal.merged_revision_id.is_in(
                 merged_revision_ids))
     if merge_proposal_ids is not None:
         expressions.append(
             BranchMergeProposal.id.is_in(merge_proposal_ids))
     if statuses is not None:
         expressions.append(
             BranchMergeProposal.queue_status.is_in(statuses))
     resultset = self.store.using(*tables).find(BranchMergeProposal,
                                                *expressions)
     if not eager_load:
         return resultset
     else:
         loader = partial(BranchMergeProposal.preloadDataForBMPs,
                          user=self._user)
         return DecoratedResultSet(resultset, pre_iter_hook=loader)
Пример #29
0
    def store_upload_jobs(self):
        jobs = Store.of(self).find(
            SnapBuildJob,
            SnapBuildJob.snapbuild == self,
            SnapBuildJob.job_type == SnapBuildJobType.STORE_UPLOAD)
        jobs.order_by(Desc(SnapBuildJob.job_id))

        def preload_jobs(rows):
            load_related(Job, rows, ["job_id"])

        return DecoratedResultSet(
            jobs, lambda job: job.makeDerived(), pre_iter_hook=preload_jobs)
Пример #30
0
 def find(cls, product, date_since=None, job_type=None):
     """See `IPersonMergeJobSource`."""
     conditions = [
         ProductJob.job_id == Job.id,
         ProductJob.product == product.id,
     ]
     if date_since is not None:
         conditions.append(Job.date_created >= date_since)
     if job_type is not None:
         conditions.append(ProductJob.job_type == job_type)
     return DecoratedResultSet(
         IStore(ProductJob).find(ProductJob, *conditions), cls)
Пример #31
0
    def getByArchive(self, archive, current_only=True):
        """See `IArchiveSubscriberSet`."""
        extra_exprs = self._getExprsForSubscriptionQueries(
            archive, current_only)

        store = Store.of(archive)
        result = store.using(
            ArchiveSubscriber,
            Join(Person, ArchiveSubscriber.subscriber_id == Person.id)).find(
                (ArchiveSubscriber, Person),
                *extra_exprs).order_by(Person.name)
        return DecoratedResultSet(result, itemgetter(0))