Пример #1
0
    def findSourcesForDomination(self, distroseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  See `find_live_source_versions`
        for this logic.

        To optimize for that logic, `findSourcesForDomination` will ignore
        publications that have no other publications competing for the same
        binary package.  There'd be nothing to do for those cases.
        """
        SPPH = SourcePackagePublishingHistory
        SPR = SourcePackageRelease

        spph_location_clauses = self._composeActiveSourcePubsCondition(
            distroseries, pocket)
        candidate_source_names = Select(SPPH.sourcepackagenameID,
                                        And(join_spph_spr(),
                                            spph_location_clauses),
                                        group_by=SPPH.sourcepackagenameID,
                                        having=(Count() > 1))

        # We'll also access the SourcePackageReleases associated with
        # the publications we find.  Since they're in the join anyway,
        # load them alongside the publications.
        # Actually we'll also want the SourcePackageNames, but adding
        # those to the (outer) query would complicate it, and
        # potentially slow it down.
        query = IStore(SPPH).find(
            (SPPH, SPR), join_spph_spr(),
            SPPH.sourcepackagenameID.is_in(candidate_source_names),
            spph_location_clauses)
        spphs = DecoratedResultSet(query, itemgetter(0))
        load_related(SourcePackageName, spphs, ['sourcepackagenameID'])
        return spphs
Пример #2
0
    def findBinariesForDomination(self, distroarchseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  It will ignore publications
        that have no other publications competing for the same binary package.
        """
        BPPH = BinaryPackagePublishingHistory
        BPR = BinaryPackageRelease

        bpph_location_clauses = [
            BPPH.status == PackagePublishingStatus.PUBLISHED,
            BPPH.distroarchseries == distroarchseries,
            BPPH.archive == self.archive,
            BPPH.pocket == pocket,
            ]
        candidate_binary_names = Select(
            BPPH.binarypackagenameID, And(*bpph_location_clauses),
            group_by=BPPH.binarypackagenameID, having=(Count() > 1))
        main_clauses = bpph_location_clauses + [
            BPR.id == BPPH.binarypackagereleaseID,
            BPR.binarypackagenameID.is_in(candidate_binary_names),
            BPR.binpackageformat != BinaryPackageFormat.DDEB,
            ]

        # We're going to access the BPRs as well.  Since we make the
        # database look them up anyway, and since there won't be many
        # duplications among them, load them alongside the publications.
        # We'll also want their BinaryPackageNames, but adding those to
        # the join would complicate the query.
        query = IStore(BPPH).find((BPPH, BPR), *main_clauses)
        bpphs = list(DecoratedResultSet(query, itemgetter(0)))
        load_related(BinaryPackageName, bpphs, ['binarypackagenameID'])
        return bpphs
Пример #3
0
    def __call__(self, chunk_size):
        bpbs = list(self.findBuilds()[:chunk_size])
        sprs = load_related(
            SourcePackageRelease, bpbs, ['source_package_release_id'])
        load_related(SourcePackageName, sprs, ['sourcepackagenameID'])
        chroots = IStore(PocketChroot).find(
            PocketChroot,
            PocketChroot.distroarchseriesID.is_in(
                b.distro_arch_series_id for b in bpbs),
            PocketChroot.chroot != None)
        chroot_series = set(chroot.distroarchseriesID for chroot in chroots)
        for build in bpbs:
            if (build.distro_arch_series.distroseries.status ==
                    SeriesStatus.OBSOLETE
                or not build.can_be_retried
                or build.distro_arch_series_id not in chroot_series):
                continue
            try:
                build.updateDependencies()
            except UnparsableDependencies as e:
                self.log.error(e)
                continue

            if not build.dependencies:
                self.log.debug('Retrying %s', build.title)
                build.retry()
                build.buildqueue_record.score()

        self.start_at = bpbs[-1].id + 1

        if not self.dry_run:
            transaction.commit()
        else:
            transaction.abort()
Пример #4
0
    def __call__(self, chunk_size):
        bpbs = list(self.findBuilds()[:chunk_size])
        sprs = load_related(SourcePackageRelease, bpbs,
                            ['source_package_release_id'])
        load_related(SourcePackageName, sprs, ['sourcepackagenameID'])
        chroots = IStore(PocketChroot).find(
            PocketChroot,
            PocketChroot.distroarchseriesID.is_in(b.distro_arch_series_id
                                                  for b in bpbs),
            PocketChroot.chroot != None)
        chroot_series = set(chroot.distroarchseriesID for chroot in chroots)
        for build in bpbs:
            if (build.distro_arch_series.distroseries.status
                    == SeriesStatus.OBSOLETE or not build.can_be_retried
                    or build.distro_arch_series_id not in chroot_series):
                continue
            try:
                build.updateDependencies()
            except UnparsableDependencies as e:
                self.log.error(e)
                continue

            if not build.dependencies:
                self.log.debug('Retrying %s', build.title)
                build.retry()
                build.buildqueue_record.score()

        self.start_at = bpbs[-1].id + 1

        if not self.dry_run:
            transaction.commit()
        else:
            transaction.abort()
Пример #5
0
    def findBinariesForDomination(self, distroarchseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  It will ignore publications
        that have no other publications competing for the same binary package.
        """
        BPPH = BinaryPackagePublishingHistory
        BPR = BinaryPackageRelease

        bpph_location_clauses = [
            BPPH.status == PackagePublishingStatus.PUBLISHED,
            BPPH.distroarchseries == distroarchseries,
            BPPH.archive == self.archive,
            BPPH.pocket == pocket,
        ]
        candidate_binary_names = Select(BPPH.binarypackagenameID,
                                        And(*bpph_location_clauses),
                                        group_by=BPPH.binarypackagenameID,
                                        having=(Count() > 1))
        main_clauses = bpph_location_clauses + [
            BPR.id == BPPH.binarypackagereleaseID,
            BPR.binarypackagenameID.is_in(candidate_binary_names),
            BPR.binpackageformat != BinaryPackageFormat.DDEB,
        ]

        # We're going to access the BPRs as well.  Since we make the
        # database look them up anyway, and since there won't be many
        # duplications among them, load them alongside the publications.
        # We'll also want their BinaryPackageNames, but adding those to
        # the join would complicate the query.
        query = IStore(BPPH).find((BPPH, BPR), *main_clauses)
        bpphs = list(DecoratedResultSet(query, itemgetter(0)))
        load_related(BinaryPackageName, bpphs, ['binarypackagenameID'])
        return bpphs
Пример #6
0
 def do_eager_load(rows):
     repository_ids = set(repository.id for repository in rows)
     if not repository_ids:
         return
     GenericGitCollection.preloadDataForRepositories(rows)
     # So far have only needed the persons for their canonical_url - no
     # need for validity etc in the API call.
     load_related(Person, rows, ['owner_id', 'registrant_id'])
Пример #7
0
    def decoratedQueueBatch(self):
        """Return the current batch, converted to decorated objects.

        Each batch item, a PackageUpload, is converted to a
        CompletePackageUpload.  This avoids many additional SQL queries
        in the +queue template.
        """
        uploads = list(self.batchnav.currentBatch())

        if len(uploads) == 0:
            return None

        upload_ids = [upload.id for upload in uploads]
        puses = load_referencing(PackageUploadSource, uploads,
                                 ['packageuploadID'])
        pubs = load_referencing(PackageUploadBuild, uploads,
                                ['packageuploadID'])

        source_sprs = load_related(SourcePackageRelease, puses,
                                   ['sourcepackagereleaseID'])
        bpbs = load_related(BinaryPackageBuild, pubs, ['buildID'])
        bprs = load_referencing(BinaryPackageRelease, bpbs, ['buildID'])
        source_files = load_referencing(SourcePackageReleaseFile, source_sprs,
                                        ['sourcepackagereleaseID'])
        binary_files = load_referencing(BinaryPackageFile, bprs,
                                        ['binarypackagereleaseID'])
        file_lfas = load_related(LibraryFileAlias, source_files + binary_files,
                                 ['libraryfileID'])
        load_related(LibraryFileContent, file_lfas, ['contentID'])

        # Get a dictionary of lists of binary files keyed by upload ID.
        package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)

        build_upload_files, binary_package_names = self.binary_files_dict(
            package_upload_builds_dict, binary_files)

        # Get a dictionary of lists of source files keyed by upload ID.
        package_upload_source_dict = {}
        for pus in puses:
            package_upload_source_dict[pus.sourcepackagereleaseID] = pus
        source_upload_files = self.source_files_dict(
            package_upload_source_dict, source_files)

        # Get a list of binary package names that already exist in
        # the distribution.  The avoids multiple queries to is_new
        # on IBinaryPackageRelease.
        self.old_binary_packages = self.calculateOldBinaries(
            binary_package_names)

        package_sets = self.getPackagesetsFor(source_sprs)

        self.loadPackageCopyJobs(uploads)

        return [
            CompletePackageUpload(item, build_upload_files,
                                  source_upload_files, package_sets)
            for item in uploads
        ]
Пример #8
0
    def decoratedQueueBatch(self):
        """Return the current batch, converted to decorated objects.

        Each batch item, a PackageUpload, is converted to a
        CompletePackageUpload.  This avoids many additional SQL queries
        in the +queue template.
        """
        uploads = list(self.batchnav.currentBatch())

        if len(uploads) == 0:
            return None

        upload_ids = [upload.id for upload in uploads]
        puses = load_referencing(
            PackageUploadSource, uploads, ['packageuploadID'])
        pubs = load_referencing(
            PackageUploadBuild, uploads, ['packageuploadID'])

        source_sprs = load_related(
            SourcePackageRelease, puses, ['sourcepackagereleaseID'])
        bpbs = load_related(BinaryPackageBuild, pubs, ['buildID'])
        bprs = load_referencing(BinaryPackageRelease, bpbs, ['buildID'])
        source_files = load_referencing(
            SourcePackageReleaseFile, source_sprs, ['sourcepackagereleaseID'])
        binary_files = load_referencing(
            BinaryPackageFile, bprs, ['binarypackagereleaseID'])
        file_lfas = load_related(
            LibraryFileAlias, source_files + binary_files, ['libraryfileID'])
        load_related(LibraryFileContent, file_lfas, ['contentID'])

        # Get a dictionary of lists of binary files keyed by upload ID.
        package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)

        build_upload_files, binary_package_names = self.binary_files_dict(
            package_upload_builds_dict, binary_files)

        # Get a dictionary of lists of source files keyed by upload ID.
        package_upload_source_dict = {}
        for pus in puses:
            package_upload_source_dict[pus.sourcepackagereleaseID] = pus
        source_upload_files = self.source_files_dict(
            package_upload_source_dict, source_files)

        # Get a list of binary package names that already exist in
        # the distribution.  The avoids multiple queries to is_new
        # on IBinaryPackageRelease.
        self.old_binary_packages = self.calculateOldBinaries(
            binary_package_names)

        package_sets = self.getPackagesetsFor(source_sprs)

        self.loadPackageCopyJobs(uploads)

        return [
            CompletePackageUpload(
                item, build_upload_files, source_upload_files, package_sets)
            for item in uploads]
Пример #9
0
 def preloadBuildsData(cls, builds):
     # Circular imports.
     from lp.services.librarian.model import LibraryFileAlias
     # Load the related branches.
     branches = load_related(Branch, builds, ['branch_id'])
     # Preload branches' cached associated targets, product series, and
     # suite source packages for all the related branches.
     GenericBranchCollection.preloadDataForBranches(branches)
     load_related(LibraryFileAlias, builds, ['log_id'])
Пример #10
0
 def preloadGrantsForRules(rules):
     """Preload the access grants related to an iterable of rules."""
     grants = load_referencing(GitRuleGrant, rules, ["rule_id"])
     grants_map = defaultdict(list)
     for grant in grants:
         grants_map[grant.rule_id].append(grant)
     for rule in rules:
         get_property_cache(rule).grants = grants_map[rule.id]
     load_related(Person, grants, ["grantee_id"])
Пример #11
0
 def do_eager_load(rows):
     branch_ids = set(branch.id for branch in rows)
     if not branch_ids:
         return
     GenericBranchCollection.preloadDataForBranches(rows)
     # So far have only needed the persons for their canonical_url - no
     # need for validity etc in the /branches API call.
     load_related(Person, rows,
                  ['ownerID', 'registrantID', 'reviewerID'])
     load_referencing(BugBranch, rows, ['branchID'])
Пример #12
0
 def do_eager_load(rows):
     branch_ids = set(branch.id for branch in rows)
     if not branch_ids:
         return
     GenericBranchCollection.preloadDataForBranches(rows)
     load_related(Product, rows, ['productID'])
     # So far have only needed the persons for their canonical_url - no
     # need for validity etc in the /branches API call.
     load_related(Person, rows,
         ['ownerID', 'registrantID', 'reviewerID'])
     load_referencing(BugBranch, rows, ['branchID'])
 def preloadJobsData(cls, jobs):
     # Circular imports.
     from lp.code.model.branch import Branch
     from lp.registry.model.product import Product
     from lp.code.model.branchcollection import GenericBranchCollection
     from lp.services.job.model.job import Job
     contexts = [job.context for job in jobs]
     load_related(Job, contexts, ['jobID'])
     branches = load_related(Branch, contexts, ['branchID'])
     GenericBranchCollection.preloadDataForBranches(branches)
     load_related(Product, branches, ['productID'])
 def preloadJobsData(cls, jobs):
     # Circular imports.
     from lp.code.model.branch import Branch
     from lp.registry.model.product import Product
     from lp.code.model.branchcollection import GenericBranchCollection
     from lp.services.job.model.job import Job
     contexts = [job.context for job in jobs]
     load_related(Job, contexts, ['jobID'])
     branches = load_related(Branch, contexts, ['branchID'])
     GenericBranchCollection.preloadDataForBranches(branches)
     load_related(Product, branches, ['productID'])
Пример #15
0
 def preloadBuildsData(self, builds):
     # Circular import.
     from lp.soyuz.model.livefs import LiveFS
     load_related(Person, builds, ["requester_id"])
     load_related(LibraryFileAlias, builds, ["log_id"])
     archives = load_related(Archive, builds, ["archive_id"])
     load_related(Person, archives, ["ownerID"])
     load_related(LiveFS, builds, ["livefs_id"])
 def preloadBuildsData(cls, builds):
     # Circular imports.
     from lp.services.librarian.model import LibraryFileAlias
     # Load the related branches, products.
     branches = load_related(
         Branch, builds, ['branch_id'])
     load_related(
         Product, branches, ['productID'])
     # Preload branches cached associated product series and
     # suite source packages for all the related branches.
     GenericBranchCollection.preloadDataForBranches(branches)
     load_related(LibraryFileAlias, builds, ['log_id'])
Пример #17
0
def check_copy_permissions(person, archive, series, pocket, sources):
    """Check that `person` has permission to copy a package.

    :param person: User attempting the upload.
    :param archive: Destination `Archive`.
    :param series: Destination `DistroSeries`.
    :param pocket: Destination `Pocket`.
    :param sources: Sequence of `SourcePackagePublishingHistory`s for the
        packages to be copied.
    :raises CannotCopy: If the copy is not allowed.
    """
    # Circular import.
    from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease

    if person is None:
        raise CannotCopy("Cannot check copy permissions (no requester).")

    if len(sources) > 1:
        # Bulk-load the data we'll need from each source publication.
        load_related(SourcePackageRelease, sources, ["sourcepackagereleaseID"])

    # If there is a requester, check that they have upload permission into
    # the destination (archive, component, pocket). This check is done
    # here rather than in the security adapter because it requires more
    # info than is available in the security adapter.
    for source in sources:
        dest_series = series or source.distroseries
        spn = source.sourcepackagerelease.sourcepackagename
        policy = archive.getOverridePolicy(dest_series, pocket)
        override = policy.calculateSourceOverrides(
            {spn: SourceOverride(component=source.component)})[spn]

        # Is the destination pocket open at all?
        reason = archive.checkUploadToPocket(dest_series,
                                             pocket,
                                             person=person)
        if reason is not None:
            raise CannotCopy(reason)

        # If the package exists in the target, make sure the person has
        # upload permission for its component. Otherwise, any upload
        # permission on this archive will do.
        reason = archive.verifyUpload(person,
                                      spn,
                                      override.component,
                                      dest_series,
                                      strict_component=(override.new == False),
                                      pocket=pocket)
        if reason is not None:
            # Queue admins are allowed to copy even if they can't upload.
            if not archive.canAdministerQueue(person, override.component,
                                              pocket, dest_series):
                raise CannotCopy(reason)
Пример #18
0
 def loadPackageCopyJobs(self, uploads):
     """Batch-load `PackageCopyJob`s and related information."""
     package_copy_jobs = load_related(
         PackageCopyJob, uploads, ['package_copy_job_id'])
     archives = load_related(
         Archive, package_copy_jobs, ['source_archive_id'])
     load_related(Distribution, archives, ['distributionID'])
     person_ids = map(attrgetter('ownerID'), archives)
     jobs = load_related(Job, package_copy_jobs, ['job_id'])
     person_ids.extend(map(attrgetter('requester_id'), jobs))
     list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
         person_ids, need_validity=True, need_icon=True))
 def preLoadDataForSourcePackageRecipes(sourcepackagerecipes):
     # Load the referencing SourcePackageRecipeData.
     spr_datas = load_referencing(
         SourcePackageRecipeData,
         sourcepackagerecipes, ['sourcepackage_recipe_id'])
     # Load the related branches.
     load_related(Branch, spr_datas, ['base_branch_id'])
     # Store the SourcePackageRecipeData in the sourcepackagerecipes
     # objects.
     for spr_data in spr_datas:
         cache = get_property_cache(spr_data.sourcepackage_recipe)
         cache._recipe_data = spr_data
     SourcePackageRecipeData.preLoadReferencedBranches(spr_datas)
 def preLoadDataForSourcePackageRecipes(sourcepackagerecipes):
     # Load the referencing SourcePackageRecipeData.
     spr_datas = load_referencing(SourcePackageRecipeData,
                                  sourcepackagerecipes,
                                  ['sourcepackage_recipe_id'])
     # Load the related branches.
     load_related(Branch, spr_datas, ['base_branch_id'])
     # Store the SourcePackageRecipeData in the sourcepackagerecipes
     # objects.
     for spr_data in spr_datas:
         cache = get_property_cache(spr_data.sourcepackage_recipe)
         cache._recipe_data = spr_data
     SourcePackageRecipeData.preLoadReferencedBranches(spr_datas)
Пример #21
0
 def preloadSpecificBuild(queues):
     from lp.buildmaster.model.buildfarmjob import BuildFarmJob
     queues = [removeSecurityProxy(bq) for bq in queues]
     load_related(BuildFarmJob, queues, ['_build_farm_job_id'])
     bfj_to_bq = dict((bq._build_farm_job, bq) for bq in queues)
     key = attrgetter('_build_farm_job.job_type')
     for job_type, group in groupby(sorted(queues, key=key), key=key):
         source = getUtility(ISpecificBuildFarmJobSource, job_type.name)
         builds = source.getByBuildFarmJobs(
             [bq._build_farm_job for bq in group])
         for build in builds:
             bq = bfj_to_bq[removeSecurityProxy(build).build_farm_job]
             get_property_cache(bq).specific_build = build
Пример #22
0
 def loadPackageCopyJobs(self, uploads):
     """Batch-load `PackageCopyJob`s and related information."""
     package_copy_jobs = load_related(PackageCopyJob, uploads,
                                      ['package_copy_job_id'])
     archives = load_related(Archive, package_copy_jobs,
                             ['source_archive_id'])
     load_related(Distribution, archives, ['distributionID'])
     person_ids = map(attrgetter('ownerID'), archives)
     jobs = load_related(Job, package_copy_jobs, ['job_id'])
     person_ids.extend(map(attrgetter('requester_id'), jobs))
     list(
         getUtility(IPersonSet).getPrecachedPersonsFromIDs(
             person_ids, need_validity=True, need_icon=True))
Пример #23
0
    def getDirtySuites(self, distribution):
        """Return set of suites that have packages pending publication."""
        self.logger.debug("Querying which suites are pending publication...")

        archive = distribution.main_archive
        pending = PackagePublishingStatus.PENDING
        pending_sources = list(archive.getPublishedSources(status=pending))
        pending_binaries = list(
            archive.getAllPublishedBinaries(status=pending))
        load_related(DistroArchSeries, pending_binaries,
                     ['distroarchseriesID'])
        return set(pub.distroseries.name + pocketsuffix[pub.pocket]
                   for pub in pending_sources + pending_binaries)
    def getDirtySuites(self, distribution):
        """Return set of suites that have packages pending publication."""
        self.logger.debug("Querying which suites are pending publication...")

        archive = distribution.main_archive
        pending = PackagePublishingStatus.PENDING
        pending_sources = list(archive.getPublishedSources(status=pending))
        pending_binaries = list(archive.getAllPublishedBinaries(
            status=pending))
        load_related(
            DistroArchSeries, pending_binaries, ['distroarchseriesID'])
        return set(
            pub.distroseries.name + pocketsuffix[pub.pocket]
            for pub in pending_sources + pending_binaries)
    def preloadDataForBMPs(branch_merge_proposals, user):
        # Utility to load the data related to a list of bmps.
        # Circular imports.
        from lp.code.model.branch import Branch
        from lp.code.model.branchcollection import GenericBranchCollection
        from lp.registry.model.product import Product
        from lp.registry.model.distroseries import DistroSeries

        ids = set()
        source_branch_ids = set()
        person_ids = set()
        for mp in branch_merge_proposals:
            ids.add(mp.id)
            source_branch_ids.add(mp.source_branchID)
            person_ids.add(mp.registrantID)
            person_ids.add(mp.merge_reporterID)

        branches = load_related(
            Branch, branch_merge_proposals, (
                "target_branchID", "prerequisite_branchID",
                "source_branchID"))
        # The stacked on branches are used to check branch visibility.
        GenericBranchCollection.preloadVisibleStackedOnBranches(
            branches, user)

        if len(branches) == 0:
            return

        # Pre-load PreviewDiffs and Diffs.
        preview_diffs = IStore(BranchMergeProposal).find(
            PreviewDiff,
            PreviewDiff.branch_merge_proposal_id.is_in(ids)).order_by(
                PreviewDiff.branch_merge_proposal_id,
                Desc(PreviewDiff.date_created)).config(
                    distinct=[PreviewDiff.branch_merge_proposal_id])
        load_related(Diff, preview_diffs, ['diff_id'])
        for previewdiff in preview_diffs:
            cache = get_property_cache(previewdiff.branch_merge_proposal)
            cache.preview_diff = previewdiff

        # Add source branch owners' to the list of pre-loaded persons.
        person_ids.update(
            branch.ownerID for branch in branches
            if branch.id in source_branch_ids)

        # Pre-load Person and ValidPersonCache.
        list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
            person_ids, need_validity=True))

        # Pre-load branches' data.
        load_related(SourcePackageName, branches, ['sourcepackagenameID'])
        load_related(DistroSeries, branches, ['distroseriesID'])
        load_related(Product, branches, ['productID'])
        GenericBranchCollection.preloadDataForBranches(branches)
Пример #26
0
    def getBugCvesForBugTasks(self, bugtasks, cve_mapper=None):
        """See ICveSet."""
        bugs = load_related(Bug, bugtasks, ('bugID', ))
        if len(bugs) == 0:
            return []
        bug_ids = [bug.id for bug in bugs]

        # Do not use BugCve instances: Storm may need a very long time
        # to look up the bugs and CVEs referenced by a BugCve instance
        # when the +cve view of a distroseries is rendered: There may
        # be a few thousand (bug, CVE) tuples, while the number of bugs
        # and CVEs is in the order of hundred. It is much more efficient
        # to retrieve just (bug_id, cve_id) from the BugCve table and
        # to map this to (Bug, CVE) here, instead of letting Storm
        # look up the CVE and bug for a BugCve instance, even if bugs
        # and CVEs are bulk loaded.
        store = Store.of(bugtasks[0])
        bugcve_ids = store.find(
            (BugCve.bugID, BugCve.cveID), In(BugCve.bugID, bug_ids))
        bugcve_ids.order_by(BugCve.bugID, BugCve.cveID)
        bugcve_ids = list(bugcve_ids)

        cve_ids = set(cve_id for bug_id, cve_id in bugcve_ids)
        cves = store.find(Cve, In(Cve.id, list(cve_ids)))

        if cve_mapper is None:
            cvemap = dict((cve.id, cve) for cve in cves)
        else:
            cvemap = dict((cve.id, cve_mapper(cve)) for cve in cves)
        bugmap = dict((bug.id, bug) for bug in bugs)
        return [
            (bugmap[bug_id], cvemap[cve_id])
            for bug_id, cve_id in bugcve_ids
            ]
Пример #27
0
    def getBugCvesForBugTasks(self, bugtasks, cve_mapper=None):
        """See ICveSet."""
        bugs = bulk.load_related(Bug, bugtasks, ('bugID', ))
        if len(bugs) == 0:
            return []
        store = Store.of(bugtasks[0])

        xrefs = getUtility(IXRefSet).findFromMany([(u'bug', unicode(bug.id))
                                                   for bug in bugs],
                                                  types=[u'cve'])
        bugcve_ids = set()
        for bug_key in xrefs:
            for cve_key in xrefs[bug_key]:
                bugcve_ids.add((int(bug_key[1]), cve_key[1]))

        bugcve_ids = list(sorted(bugcve_ids))

        cves = store.find(Cve, In(Cve.sequence,
                                  [seq for _, seq in bugcve_ids]))

        if cve_mapper is None:
            cvemap = dict((cve.sequence, cve) for cve in cves)
        else:
            cvemap = dict((cve.sequence, cve_mapper(cve)) for cve in cves)
        bugmap = dict((bug.id, bug) for bug in bugs)
        return [(bugmap[bug_id], cvemap[cve_sequence])
                for bug_id, cve_sequence in bugcve_ids]
Пример #28
0
def find_live_binary_versions_pass_2(sorted_pubs, cache):
    """Find versions out of Published publications that should stay live.

    This particular notion of liveness applies to second-pass binary
    domination: the latest version stays live, and architecture-specific
    publications stay live (i.e, ones that are not for the "all"
    architecture).

    More importantly, any publication for binary packages of the "all"
    architecture stay live if any of the non-"all" binary packages from
    the same source package release are still active -- even if they are
    for other architectures.

    This is the raison d'etre for the two-pass binary domination algorithm:
    to let us see which architecture-independent binary publications can be
    superseded without rendering any architecture-specific binaries from the
    same source package release uninstallable.

    (Note that here, "active" includes Published publications but also
    Pending ones.  This is standard nomenclature in Soyuz.  Some of the
    domination code confuses matters by using the term "active" to mean only
    Published publications).

    :param sorted_pubs: An iterable of `BinaryPackagePublishingHistory`,
        sorted by descending package version.
    :param cache: An `ArchSpecificPublicationsCache` to reduce the number of
        times we need to look up whether an spr/archive/distroseries/pocket
        has active arch-specific publications.
    :return: A list of live versions.
    """
    sorted_pubs = list(sorted_pubs)
    latest = sorted_pubs.pop(0)
    is_arch_specific = attrgetter('architecture_specific')
    arch_specific_pubs = list(ifilter(is_arch_specific, sorted_pubs))
    arch_indep_pubs = list(ifilterfalse(is_arch_specific, sorted_pubs))

    bpbs = load_related(BinaryPackageBuild,
                        [pub.binarypackagerelease for pub in arch_indep_pubs],
                        ['buildID'])
    load_related(SourcePackageRelease, bpbs, ['source_package_release_id'])

    reprieved_pubs = [
        pub for pub in arch_indep_pubs
        if cache.hasArchSpecificPublications(pub)
    ]

    return get_binary_versions([latest] + arch_specific_pubs + reprieved_pubs)
Пример #29
0
def find_live_binary_versions_pass_2(sorted_pubs, cache):
    """Find versions out of Published publications that should stay live.

    This particular notion of liveness applies to second-pass binary
    domination: the latest version stays live, and architecture-specific
    publications stay live (i.e, ones that are not for the "all"
    architecture).

    More importantly, any publication for binary packages of the "all"
    architecture stay live if any of the non-"all" binary packages from
    the same source package release are still active -- even if they are
    for other architectures.

    This is the raison d'etre for the two-pass binary domination algorithm:
    to let us see which architecture-independent binary publications can be
    superseded without rendering any architecture-specific binaries from the
    same source package release uninstallable.

    (Note that here, "active" includes Published publications but also
    Pending ones.  This is standard nomenclature in Soyuz.  Some of the
    domination code confuses matters by using the term "active" to mean only
    Published publications).

    :param sorted_pubs: An iterable of `BinaryPackagePublishingHistory`,
        sorted by descending package version.
    :param cache: An `ArchSpecificPublicationsCache` to reduce the number of
        times we need to look up whether an spr/archive/distroseries/pocket
        has active arch-specific publications.
    :return: A list of live versions.
    """
    sorted_pubs = list(sorted_pubs)
    latest = sorted_pubs.pop(0)
    is_arch_specific = attrgetter('architecture_specific')
    arch_specific_pubs = list(ifilter(is_arch_specific, sorted_pubs))
    arch_indep_pubs = list(ifilterfalse(is_arch_specific, sorted_pubs))

    bpbs = load_related(
        BinaryPackageBuild,
        [pub.binarypackagerelease for pub in arch_indep_pubs], ['buildID'])
    load_related(SourcePackageRelease, bpbs, ['source_package_release_id'])

    reprieved_pubs = [
        pub
        for pub in arch_indep_pubs
            if cache.hasArchSpecificPublications(pub)]

    return get_binary_versions([latest] + arch_specific_pubs + reprieved_pubs)
Пример #30
0
    def preloadDataForBMPs(branch_merge_proposals, user):
        # Utility to load the data related to a list of bmps.
        # Circular imports.
        from lp.code.model.branch import Branch
        from lp.code.model.branchcollection import GenericBranchCollection
        from lp.registry.model.product import Product
        from lp.registry.model.distroseries import DistroSeries

        ids = set()
        source_branch_ids = set()
        person_ids = set()
        for mp in branch_merge_proposals:
            ids.add(mp.id)
            source_branch_ids.add(mp.source_branchID)
            person_ids.add(mp.registrantID)
            person_ids.add(mp.merge_reporterID)

        branches = load_related(
            Branch, branch_merge_proposals,
            ("target_branchID", "prerequisite_branchID", "source_branchID"))
        # The stacked on branches are used to check branch visibility.
        GenericBranchCollection.preloadVisibleStackedOnBranches(branches, user)

        if len(branches) == 0:
            return

        # Pre-load PreviewDiffs and Diffs.
        preview_diffs = IStore(BranchMergeProposal).find(
            PreviewDiff,
            PreviewDiff.branch_merge_proposal_id.is_in(ids)).order_by(
                PreviewDiff.branch_merge_proposal_id,
                Desc(PreviewDiff.date_created)).config(
                    distinct=[PreviewDiff.branch_merge_proposal_id])
        load_related(Diff, preview_diffs, ['diff_id'])
        for previewdiff in preview_diffs:
            cache = get_property_cache(previewdiff.branch_merge_proposal)
            cache.preview_diff = previewdiff

        # Add source branch owners' to the list of pre-loaded persons.
        person_ids.update(branch.ownerID for branch in branches
                          if branch.id in source_branch_ids)

        # Pre-load Person and ValidPersonCache.
        list(
            getUtility(IPersonSet).getPrecachedPersonsFromIDs(
                person_ids, need_validity=True))

        # Pre-load branches' data.
        load_related(SourcePackageName, branches, ['sourcepackagenameID'])
        load_related(DistroSeries, branches, ['distroseriesID'])
        load_related(Product, branches, ['productID'])
        GenericBranchCollection.preloadDataForBranches(branches)
 def preloadBuildsData(cls, builds):
     # Circular imports.
     from lp.code.model.sourcepackagerecipe import SourcePackageRecipe
     from lp.registry.model.distribution import Distribution
     from lp.registry.model.distroseries import DistroSeries
     from lp.services.librarian.model import LibraryFileAlias
     load_related(LibraryFileAlias, builds, ['log_id'])
     archives = load_related(Archive, builds, ['archive_id'])
     load_related(Person, archives, ['ownerID'])
     distroseries = load_related(DistroSeries, builds, ['distroseries_id'])
     load_related(Distribution, distroseries, ['distributionID'])
     sprs = load_related(SourcePackageRecipe, builds, ['recipe_id'])
     SourcePackageRecipe.preLoadDataForSourcePackageRecipes(sprs)
Пример #32
0
 def test_load_related(self):
     owning_objects = [
         self.factory.makeBug(),
         self.factory.makeBug(),
         ]
     expected = set(bug.owner for bug in owning_objects)
     self.assertEqual(expected,
         set(bulk.load_related(Person, owning_objects, ['ownerID'])))
Пример #33
0
    def initialize(self, series, translationgroup):
        self.series = series
        self.translationgroup = translationgroup
        self.form = self.request.form

        if IDistroSeriesLanguage.providedBy(self.context):
            self.batchnav = BatchNavigator(
                self.series.getCurrentTranslationTemplates(),
                self.request)
            self.pofiles = self.context.getPOFilesFor(
                self.batchnav.currentBatch())
            load_related(
                SourcePackageName, self.batchnav.currentBatch(),
                ['sourcepackagenameID'])
        else:
            self.batchnav = BatchNavigator(self.context.pofiles, self.request)
            self.pofiles = self.batchnav.currentBatch()
 def preLoadReferencedBranches(sourcepackagerecipedatas):
     # Circular imports.
     from lp.code.model.branchcollection import GenericBranchCollection
     from lp.code.model.gitcollection import GenericGitCollection
     # Load the related Branch, _SourcePackageRecipeDataInstruction.
     base_branches = load_related(Branch, sourcepackagerecipedatas,
                                  ['base_branch_id'])
     base_repositories = load_related(GitRepository,
                                      sourcepackagerecipedatas,
                                      ['base_git_repository_id'])
     sprd_instructions = load_referencing(
         _SourcePackageRecipeDataInstruction, sourcepackagerecipedatas,
         ['recipe_data_id'])
     sub_branches = load_related(Branch, sprd_instructions, ['branch_id'])
     sub_repositories = load_related(GitRepository, sprd_instructions,
                                     ['git_repository_id'])
     all_branches = base_branches + sub_branches
     all_repositories = base_repositories + sub_repositories
     # Pre-load branches'/repositories' data.
     if all_branches:
         GenericBranchCollection.preloadDataForBranches(all_branches)
     if all_repositories:
         GenericGitCollection.preloadDataForRepositories(all_repositories)
     # Store the pre-fetched objects on the sourcepackagerecipedatas
     # objects.
     branch_to_recipe_data = {
         instr.branch_id: instr.recipe_data_id
         for instr in sprd_instructions if instr.branch_id is not None
     }
     repository_to_recipe_data = {
         instr.git_repository_id: instr.recipe_data_id
         for instr in sprd_instructions
         if instr.git_repository_id is not None
     }
     caches = {
         sprd.id: [sprd, get_property_cache(sprd)]
         for sprd in sourcepackagerecipedatas
     }
     for _, [sprd, cache] in caches.items():
         cache._referenced_branches = [sprd.base]
     for branch in sub_branches:
         cache = caches[branch_to_recipe_data[branch.id]][1]
         cache._referenced_branches.append(branch)
     for repository in sub_repositories:
         cache = caches[repository_to_recipe_data[repository.id]][1]
         cache._referenced_branches.append(repository)
Пример #35
0
 def getMilestoneWidgetValues(self):
     """Return data used to render the milestone checkboxes."""
     tasks = self.searchUnbatched()
     milestones = sorted(
         load_related(Milestone, tasks, ['milestoneID']),
         key=milestone_sort_key, reverse=True)
     return [
         dict(title=milestone.title, value=milestone.id, checked=False)
         for milestone in milestones]
Пример #36
0
 def preloadForBuildFarmJobs(self, builds):
     """See `IBuildQueueSet`."""
     from lp.buildmaster.model.builder import Builder
     bqs = list(
         IStore(BuildQueue).find(
             BuildQueue,
             BuildQueue._build_farm_job_id.is_in([
                 removeSecurityProxy(b).build_farm_job_id for b in builds
             ])))
     load_related(Builder, bqs, ['builderID'])
     prefetched_data = dict(
         (removeSecurityProxy(buildqueue)._build_farm_job_id, buildqueue)
         for buildqueue in bqs)
     for build in builds:
         bq = prefetched_data.get(
             removeSecurityProxy(build).build_farm_job_id)
         get_property_cache(build).buildqueue_record = bq
     return bqs
Пример #37
0
 def fetchProjectsForDisplay(self, user):
     """See `ITranslationGroup`."""
     # Avoid circular imports.
     from lp.registry.model.product import (
         get_precached_products,
         Product,
         ProductSet,
         )
     products = list(IStore(Product).find(
         Product,
         Product.translationgroupID == self.id,
         Product.active == True,
         ProductSet.getProductPrivacyFilter(user),
         ).order_by(Product.display_name))
     get_precached_products(products, need_licences=True)
     icons = bulk.load_related(LibraryFileAlias, products, ['iconID'])
     bulk.load_related(LibraryFileContent, icons, ['contentID'])
     return products
Пример #38
0
 def test_load_related(self):
     owning_objects = [
         self.factory.makeBug(),
         self.factory.makeBug(),
     ]
     expected = set(bug.owner for bug in owning_objects)
     self.assertEqual(
         expected,
         set(bulk.load_related(Person, owning_objects, ['ownerID'])))
Пример #39
0
 def eager_load(rows):
     subscriptions = map(itemgetter(0), rows)
     precache_permission_for_objects(None, 'launchpad.View',
                                     subscriptions)
     archives = load_related(Archive, subscriptions, ['archive_id'])
     list(
         getUtility(IPersonSet).getPrecachedPersonsFromIDs(
             [archive.ownerID for archive in archives],
             need_validity=True))
    def _getDirectAndDuplicateSubscriptions(self, person, bug):
        # Fetch all information for direct and duplicate
        # subscriptions (including indirect through team
        # membership) in a single query.
        with_statement = generate_subscription_with(bug, person)
        info = Store.of(person).with_(with_statement).find(
            (BugSubscription, Bug, Person),
            BugSubscription.id.is_in(
                SQL('SELECT bugsubscriptions.id FROM bugsubscriptions')),
            Person.id == BugSubscription.person_id,
            Bug.id == BugSubscription.bug_id)

        direct = RealSubscriptionInfoCollection(
            self.person, self.administrated_team_ids)
        duplicates = RealSubscriptionInfoCollection(
            self.person, self.administrated_team_ids)
        bugs = set()
        for subscription, subscribed_bug, subscriber in info:
            bugs.add(subscribed_bug)
            if subscribed_bug.id != bug.id:
                # This is a subscription through a duplicate.
                collection = duplicates
            else:
                # This is a direct subscription.
                collection = direct
            collection.add(
                subscriber, subscribed_bug, subscription)
        # Preload bug owners, then all pillars.
        list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
            [bug.ownerID for bug in bugs]))
        all_tasks = [task for task in bug.bugtasks for bug in bugs] 
        load_related(Product, all_tasks, ['productID'])
        load_related(Distribution, all_tasks, ['distributionID'])
        for bug in bugs:
            # indicate the reporter and bug_supervisor
            duplicates.annotateReporter(bug, bug.owner)
            direct.annotateReporter(bug, bug.owner)
        for task in all_tasks:
            # Get bug_supervisor.
            duplicates.annotateBugTaskResponsibilities(
                task, task.pillar, task.pillar.bug_supervisor)
            direct.annotateBugTaskResponsibilities(
                task, task.pillar, task.pillar.bug_supervisor)
        return (direct, duplicates)
Пример #41
0
    def _getDirectAndDuplicateSubscriptions(self, person, bug):
        # Fetch all information for direct and duplicate
        # subscriptions (including indirect through team
        # membership) in a single query.
        with_statement = generate_subscription_with(bug, person)
        info = Store.of(person).with_(with_statement).find(
            (BugSubscription, Bug, Person),
            BugSubscription.id.is_in(
                SQL('SELECT bugsubscriptions.id FROM bugsubscriptions')),
            Person.id == BugSubscription.person_id,
            Bug.id == BugSubscription.bug_id)

        direct = RealSubscriptionInfoCollection(
            self.person, self.administrated_team_ids)
        duplicates = RealSubscriptionInfoCollection(
            self.person, self.administrated_team_ids)
        bugs = set()
        for subscription, subscribed_bug, subscriber in info:
            bugs.add(subscribed_bug)
            if subscribed_bug.id != bug.id:
                # This is a subscription through a duplicate.
                collection = duplicates
            else:
                # This is a direct subscription.
                collection = direct
            collection.add(
                subscriber, subscribed_bug, subscription)
        # Preload bug owners, then all pillars.
        list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
            [bug.ownerID for bug in bugs]))
        all_tasks = [task for task in bug.bugtasks for bug in bugs]
        load_related(Product, all_tasks, ['productID'])
        load_related(Distribution, all_tasks, ['distributionID'])
        for bug in bugs:
            # indicate the reporter and bug_supervisor
            duplicates.annotateReporter(bug, bug.owner)
            direct.annotateReporter(bug, bug.owner)
        for task in all_tasks:
            # Get bug_supervisor.
            duplicates.annotateBugTaskResponsibilities(
                task, task.pillar, task.pillar.bug_supervisor)
            direct.annotateBugTaskResponsibilities(
                task, task.pillar, task.pillar.bug_supervisor)
        return (direct, duplicates)
Пример #42
0
def preload_work_items(work_items):
    """Bulk load data that will be needed to process `work_items`.

    :param work_items: A sequence of `WorkItem` records.
    :return: A dict mapping `POFile` ids from `work_items` to their
        respective `POFile` objects.
    """
    pofiles = load(POFile, [work_item.pofile_id for work_item in work_items])
    load_related(Language, pofiles, ['languageID'])
    templates = load_related(POTemplate, pofiles, ['potemplateID'])
    distroseries = load_related(DistroSeries, templates, ['distroseriesID'])
    load_related(Distribution, distroseries, ['distributionID'])
    productseries = load_related(ProductSeries, templates, ['productseriesID'])
    load_related(Product, productseries, ['productID'])
    return dict((pofile.id, pofile) for pofile in pofiles)
Пример #43
0
 def getMilestoneWidgetValues(self):
     """Return data used to render the milestone checkboxes."""
     tasks = self.searchUnbatched()
     milestones = sorted(load_related(Milestone, tasks, ['milestoneID']),
                         key=milestone_sort_key,
                         reverse=True)
     return [
         dict(title=milestone.title, value=milestone.id, checked=False)
         for milestone in milestones
     ]
def preload_work_items(work_items):
    """Bulk load data that will be needed to process `work_items`.

    :param work_items: A sequence of `WorkItem` records.
    :return: A dict mapping `POFile` ids from `work_items` to their
        respective `POFile` objects.
    """
    pofiles = load(POFile, [work_item.pofile_id for work_item in work_items])
    load_related(Language, pofiles, ['languageID'])
    templates = load_related(POTemplate, pofiles, ['potemplateID'])
    distroseries = load_related(DistroSeries, templates, ['distroseriesID'])
    load_related(Distribution, distroseries, ['distributionID'])
    productseries = load_related(
        ProductSeries, templates, ['productseriesID'])
    load_related(Product, productseries, ['productID'])
    return dict((pofile.id, pofile) for pofile in pofiles)
Пример #45
0
 def getPublished(self, previewdiff):
     """See `ICodeReviewInlineCommentSet`."""
     crics = IStore(CodeReviewInlineComment).find(
         CodeReviewInlineComment,
         CodeReviewInlineComment.previewdiff_id == previewdiff.id)
     getUtility(IPersonSet).getPrecachedPersonsFromIDs(
         [cric.person_id for cric in crics])
     load_related(CodeReviewComment, crics, ['comment_id'])
     sorted_crics = sorted(list(crics),
                           key=lambda c: c.comment.date_created)
     inline_comments = []
     for cric in sorted_crics:
         for line_number, text in cric.comments.iteritems():
             comment = {
                 'line_number': line_number,
                 'person': cric.person,
                 'text': text,
                 'date': cric.comment.date_created,
             }
             inline_comments.append(comment)
     return inline_comments
 def preLoadReferencedBranches(sourcepackagerecipedatas):
     # Load the related Branch, _SourcePackageRecipeDataInstruction.
     load_related(
         Branch, sourcepackagerecipedatas, ['base_branch_id'])
     sprd_instructions = load_referencing(
         _SourcePackageRecipeDataInstruction,
         sourcepackagerecipedatas, ['recipe_data_id'])
     sub_branches = load_related(
         Branch, sprd_instructions, ['branch_id'])
     # Store the pre-fetched objects on the sourcepackagerecipedatas
     # objects.
     branch_to_recipe_data = dict([
         (instr.branch_id, instr.recipe_data_id)
             for instr in sprd_instructions])
     caches = dict((sprd.id, [sprd, get_property_cache(sprd)])
         for sprd in sourcepackagerecipedatas)
     for unused, [sprd, cache] in caches.items():
         cache._referenced_branches = [sprd.base_branch]
     for recipe_data_id, branches in groupby(
         sub_branches, lambda branch: branch_to_recipe_data[branch.id]):
         cache = caches[recipe_data_id][1]
         cache._referenced_branches.extend(list(branches))
 def preloadBuildsData(cls, builds):
     # Circular imports.
     from lp.code.model.sourcepackagerecipe import SourcePackageRecipe
     from lp.services.librarian.model import LibraryFileAlias
     load_related(LibraryFileAlias, builds, ['log_id'])
     archives = load_related(Archive, builds, ['archive_id'])
     load_related(Person, archives, ['ownerID'])
     sprs = load_related(SourcePackageRecipe, builds, ['recipe_id'])
     SourcePackageRecipe.preLoadDataForSourcePackageRecipes(sprs)
Пример #48
0
 def preloadBuildsData(self, builds):
     # Circular imports.
     from lp.soyuz.model.distroarchseries import DistroArchSeries
     from lp.registry.model.distroseries import DistroSeries
     from lp.registry.model.distribution import Distribution
     from lp.soyuz.model.archive import Archive
     from lp.registry.model.person import Person
     self._prefetchBuildData(builds)
     das = load_related(DistroArchSeries, builds, ['distro_arch_series_id'])
     archives = load_related(Archive, builds, ['archive_id'])
     load_related(Person, archives, ['ownerID'])
     distroseries = load_related(DistroSeries, das, ['distroseriesID'])
     load_related(Distribution, distroseries, ['distributionID'])
Пример #49
0
    def findSourcesForDomination(self, distroseries, pocket):
        """Find binary publications that need dominating.

        This is only for traditional domination, where the latest published
        publication is always kept published.  See `find_live_source_versions`
        for this logic.

        To optimize for that logic, `findSourcesForDomination` will ignore
        publications that have no other publications competing for the same
        binary package.  There'd be nothing to do for those cases.
        """
        SPPH = SourcePackagePublishingHistory
        SPR = SourcePackageRelease

        spph_location_clauses = self._composeActiveSourcePubsCondition(
            distroseries, pocket)
        candidate_source_names = Select(
            SPPH.sourcepackagenameID,
            And(join_spph_spr(), spph_location_clauses),
            group_by=SPPH.sourcepackagenameID,
            having=(Count() > 1))

        # We'll also access the SourcePackageReleases associated with
        # the publications we find.  Since they're in the join anyway,
        # load them alongside the publications.
        # Actually we'll also want the SourcePackageNames, but adding
        # those to the (outer) query would complicate it, and
        # potentially slow it down.
        query = IStore(SPPH).find(
            (SPPH, SPR),
            join_spph_spr(),
            SPPH.sourcepackagenameID.is_in(candidate_source_names),
            spph_location_clauses)
        spphs = DecoratedResultSet(query, itemgetter(0))
        load_related(SourcePackageName, spphs, ['sourcepackagenameID'])
        return spphs
Пример #50
0
 def preloadSpecificJobData(queues):
     key = attrgetter('job_type')
     for job_type, grouped_queues in groupby(queues, key=key):
         specific_class = specific_job_classes()[job_type]
         queue_subset = list(grouped_queues)
         job_subset = load_related(Job, queue_subset, ['jobID'])
         # We need to preload the build farm jobs early to avoid
         # the call to _set_build_farm_job to look up BuildFarmBuildJobs
         # one by one.
         specific_class.preloadBuildFarmJobs(job_subset)
         specific_jobs = list(specific_class.getByJobs(job_subset))
         if len(specific_jobs) == 0:
             continue
         specific_class.preloadJobsData(specific_jobs)
         specific_jobs_dict = dict(
             (specific_job.job, specific_job)
                 for specific_job in specific_jobs)
         for queue in queue_subset:
             cache = get_property_cache(queue)
             cache.specific_job = specific_jobs_dict[queue.job]
    def subscriptions_with_tokens(self):
        """Return all the persons archive subscriptions with the token
        for each.

        The result is formatted as a list of dicts to make the TALS code
        cleaner.
        """
        subscriber_set = getUtility(IArchiveSubscriberSet)
        subs_with_tokens = subscriber_set.getBySubscriberWithActiveToken(self.context)

        subscriptions = map(itemgetter(0), subs_with_tokens)
        precache_permission_for_objects(None, "launchpad.View", subscriptions)
        archives = load_related(Archive, subscriptions, ["archive_id"])
        list(
            getUtility(IPersonSet).getPrecachedPersonsFromIDs(
                [archive.ownerID for archive in archives], need_validity=True
            )
        )
        for archive in archives:
            get_property_cache(archive)._known_subscribers = [self.user]

        # Turn the result set into a list of dicts so it can be easily
        # accessed in TAL. Note that we need to ensure that only one
        # PersonalArchiveSubscription is included for each archive,
        # as the person might have participation in multiple
        # subscriptions (via different teams).
        unique_archives = set()
        personal_subscription_tokens = []
        for subscription, token in subs_with_tokens:
            if subscription.archive in unique_archives:
                continue
            unique_archives.add(subscription.archive)

            personal_subscription = PersonalArchiveSubscription(self.context, subscription.archive)
            personal_subscription_tokens.append({"subscription": personal_subscription, "token": token})

        return personal_subscription_tokens
 def preloadJobsData(cls, jobs):
     load_related(Job, jobs, ['job_id'])
     builds = load_related(
         SourcePackageRecipeBuild, jobs, ['build_id'])
     SourcePackageRecipeBuild.preloadBuildsData(builds)
Пример #53
0
 def preloadJobsData(cls, jobs):
     from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
     from lp.services.job.model.job import Job
     load_related(Job, jobs, ['job_id'])
     builds = load_related(BinaryPackageBuild, jobs, ['build_id'])
     getUtility(IBinaryPackageBuildSet).preloadBuildsData(list(builds))
def eager_load_dsds(dsds):
    """Eager load dependencies of the given `DistroSeriesDifference`s.

    :param dsds: A concrete sequence (i.e. not a generator) of
        `DistroSeriesDifference` to eager load for.
    """
    source_pubs = dict(
        most_recent_publications(
            dsds, statuses=active_publishing_status,
            in_parent=False, match_version=False))
    parent_source_pubs = dict(
        most_recent_publications(
            dsds, statuses=active_publishing_status,
            in_parent=True, match_version=False))
    source_pubs_for_release = dict(
        most_recent_publications(
            dsds, statuses=active_publishing_status,
            in_parent=False, match_version=True))
    parent_source_pubs_for_release = dict(
        most_recent_publications(
            dsds, statuses=active_publishing_status,
            in_parent=True, match_version=True))

    latest_comment_by_dsd_id = dict(
        (comment.distro_series_difference_id, comment)
        for comment in most_recent_comments(dsds))
    latest_comments = latest_comment_by_dsd_id.values()

    # SourcePackageReleases of the parent and source pubs are often
    # referred to.
    sprs = bulk.load_related(
        SourcePackageRelease, chain(
            source_pubs.itervalues(),
            parent_source_pubs.itervalues(),
            source_pubs_for_release.itervalues(),
            parent_source_pubs_for_release.itervalues()),
        ("sourcepackagereleaseID",))

    # Get packagesets and parent_packagesets for each DSD.
    dsd_packagesets = get_packagesets(dsds, in_parent=False)
    dsd_parent_packagesets = get_packagesets(dsds, in_parent=True)

    # Cache latest messages contents (MessageChunk).
    messages = bulk.load_related(
        Message, latest_comments, ['message_id'])
    chunks = message_chunks(messages)
    for msg in messages:
        cache = get_property_cache(msg)
        cache.text_contents = Message.chunks_text(
            chunks.get(msg.id, []))

    for dsd in dsds:
        spn_id = dsd.source_package_name_id
        cache = get_property_cache(dsd)
        cache.source_pub = source_pubs.get(spn_id)
        cache.parent_source_pub = parent_source_pubs.get(spn_id)
        cache.packagesets = dsd_packagesets.get(dsd.id)
        cache.parent_packagesets = dsd_parent_packagesets.get(dsd.id)
        if spn_id in source_pubs_for_release:
            spph = source_pubs_for_release[spn_id]
            cache.source_package_release = (
                DistroSeriesSourcePackageRelease(
                    dsd.derived_series,
                    spph.sourcepackagerelease))
        else:
            cache.source_package_release = None
        if spn_id in parent_source_pubs_for_release:
            spph = parent_source_pubs_for_release[spn_id]
            cache.parent_source_package_release = (
                DistroSeriesSourcePackageRelease(
                    dsd.parent_series, spph.sourcepackagerelease))
        else:
            cache.parent_source_package_release = None
        cache.latest_comment = latest_comment_by_dsd_id.get(dsd.id)

    # SourcePackageRelease.uploader can end up getting the requester
    # for a source package recipe build.
    sprbs = bulk.load_related(
        SourcePackageRecipeBuild, sprs,
        ("source_package_recipe_build_id",))

    # SourcePackageRelease.uploader can end up getting the owner of
    # the DSC signing key.
    gpgkeys = bulk.load_related(GPGKey, sprs, ("dscsigningkeyID",))

    # Load DistroSeriesDifferenceComment owners, SourcePackageRecipeBuild
    # requesters, GPGKey owners, and SourcePackageRelease creators.
    person_ids = set().union(
        (dsdc.message.ownerID for dsdc in latest_comments),
        (sprb.requester_id for sprb in sprbs),
        (gpgkey.ownerID for gpgkey in gpgkeys),
        (spr.creatorID for spr in sprs))
    uploaders = getUtility(IPersonSet).getPrecachedPersonsFromIDs(
        person_ids, need_validity=True)
    list(uploaders)

    # Load SourcePackageNames.
    bulk.load_related(
        SourcePackageName, dsds, ("source_package_name_id",))
Пример #55
0
def check_copy_permissions(person, archive, series, pocket, sources):
    """Check that `person` has permission to copy a package.

    :param person: User attempting the upload.
    :param archive: Destination `Archive`.
    :param series: Destination `DistroSeries`.
    :param pocket: Destination `Pocket`.
    :param sources: Sequence of `SourcePackagePublishingHistory`s for the
        packages to be copied.
    :raises CannotCopy: If the copy is not allowed.
    """
    # Circular import.
    from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease

    if person is None:
        raise CannotCopy("Cannot check copy permissions (no requester).")

    if len(sources) > 1:
        # Bulk-load the data we'll need from each source publication.
        load_related(SourcePackageRelease, sources, ["sourcepackagereleaseID"])

    # If there is a requester, check that he has upload permission into
    # the destination (archive, component, pocket). This check is done
    # here rather than in the security adapter because it requires more
    # info than is available in the security adapter.
    sourcepackagenames = [
        source.sourcepackagerelease.sourcepackagename for source in sources]
    if series is None:
        # Use each source's series as the destination for that source.
        series_iter = map(attrgetter("distroseries"), sources)
    else:
        series_iter = repeat(series)
    for spn, dest_series in set(zip(sourcepackagenames, series_iter)):
        # XXX cjwatson 20120630: We should do a proper ancestry check
        # instead of simply querying for publications in any pocket.
        # Unfortunately there are currently at least three different
        # implementations of ancestry lookup:
        # NascentUpload.getSourceAncestry,
        # PackageUploadSource.getSourceAncestryForDiffs, and
        # Archive.getPublishedSources, none of which is obviously
        # correct here.  Instead of adding a fourth, we should consolidate
        # these.
        ancestries = archive.getPublishedSources(
            name=spn.name, exact_match=True, status=active_publishing_status,
            distroseries=dest_series)
        try:
            destination_component = ancestries[0].component
        except IndexError:
            destination_component = None

        # Is the destination pocket open at all?
        reason = archive.checkUploadToPocket(
            dest_series, pocket, person=person)
        if reason is not None:
            raise CannotCopy(reason)

        # If destination_component is not None, make sure the person
        # has upload permission for this component.  Otherwise, any
        # upload permission on this archive will do.
        strict_component = destination_component is not None
        reason = archive.verifyUpload(
            person, spn, destination_component, dest_series,
            strict_component=strict_component, pocket=pocket)
        if reason is not None:
            # Queue admins are allowed to copy even if they can't upload.
            if not archive.canAdministerQueue(
                person, destination_component, pocket, dest_series):
                raise CannotCopy(reason)