def preload_hook(rows):
     person_ids = set()
     work_items_by_spec = defaultdict(list)
     for spec in rows:
         if need_people: 
             person_ids |= set(
                 [spec._assigneeID, spec._approverID, spec._drafterID])
         if need_branches:
             get_property_cache(spec).linked_branches = []
     if need_workitems:
         work_items = load_referencing(
             SpecificationWorkItem, rows, ['specification_id'],
             extra_conditions=[SpecificationWorkItem.deleted == False])
         for workitem in work_items:
             person_ids.add(workitem.assignee_id)
             work_items_by_spec[workitem.specification_id].append(workitem)
     person_ids -= set([None])
     if need_people:
         list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
             person_ids, need_validity=True))
     if need_workitems:
         for spec in rows:
             get_property_cache(spec).work_items = sorted(
                 work_items_by_spec[spec.id], key=lambda wi: wi.sequence)
     if need_branches:
         spec_branches = load_referencing(
             SpecificationBranch, rows, ['specificationID'])
         for sbranch in spec_branches:
             spec_cache = get_property_cache(sbranch.specification)
             spec_cache.linked_branches.append(sbranch)
Exemplo n.º 2
0
 def setPOFile(self, pofile, sequence=None):
     """See `ITranslationMessage`."""
     self.browser_pofile = pofile
     if sequence is not None:
         get_property_cache(self).sequence = sequence
     else:
         del get_property_cache(self).sequence
Exemplo n.º 3
0
 def preload_hook(rows):
     person_ids = set()
     work_items_by_spec = defaultdict(list)
     for spec in rows:
         if need_people:
             person_ids |= set(
                 [spec._assigneeID, spec._approverID, spec._drafterID])
         if need_branches:
             get_property_cache(spec).linked_branches = []
     if need_workitems:
         work_items = load_referencing(
             SpecificationWorkItem,
             rows, ['specification_id'],
             extra_conditions=[SpecificationWorkItem.deleted == False])
         for workitem in work_items:
             person_ids.add(workitem.assignee_id)
             work_items_by_spec[workitem.specification_id].append(workitem)
     person_ids -= set([None])
     if need_people:
         list(
             getUtility(IPersonSet).getPrecachedPersonsFromIDs(
                 person_ids, need_validity=True))
     if need_workitems:
         for spec in rows:
             get_property_cache(spec).work_items = sorted(
                 work_items_by_spec[spec.id], key=lambda wi: wi.sequence)
     if need_branches:
         spec_branches = load_referencing(SpecificationBranch, rows,
                                          ['specificationID'])
         for sbranch in spec_branches:
             spec_cache = get_property_cache(sbranch.specification)
             spec_cache.linked_branches.append(sbranch)
Exemplo n.º 4
0
    def fromBranchMergeProposal(cls, bmp):
        """Create a `PreviewDiff` from a `BranchMergeProposal`.

        Includes a diff from the source to the target.
        :param bmp: The `BranchMergeProposal` to generate a `PreviewDiff` for.
        :return: A `PreviewDiff`.
        """
        source_branch = bmp.source_branch.getBzrBranch()
        source_revision = source_branch.last_revision()
        target_branch = bmp.target_branch.getBzrBranch()
        target_revision = target_branch.last_revision()
        if bmp.prerequisite_branch is not None:
            prerequisite_branch = bmp.prerequisite_branch.getBzrBranch()
        else:
            prerequisite_branch = None
        diff, conflicts = Diff.mergePreviewFromBranches(
            source_branch, source_revision, target_branch, prerequisite_branch)
        preview = cls()
        preview.source_revision_id = source_revision.decode('utf-8')
        preview.target_revision_id = target_revision.decode('utf-8')
        preview.branch_merge_proposal = bmp
        preview.diff = diff
        preview.conflicts = u''.join(
            unicode(conflict) + '\n' for conflict in conflicts)
        del get_property_cache(bmp).preview_diffs
        del get_property_cache(bmp).preview_diff
        return preview
Exemplo n.º 5
0
    def fromBranchMergeProposal(cls, bmp):
        """Create a `PreviewDiff` from a `BranchMergeProposal`.

        Includes a diff from the source to the target.
        :param bmp: The `BranchMergeProposal` to generate a `PreviewDiff` for.
        :return: A `PreviewDiff`.
        """
        source_branch = bmp.source_branch.getBzrBranch()
        source_revision = source_branch.last_revision()
        target_branch = bmp.target_branch.getBzrBranch()
        target_revision = target_branch.last_revision()
        if bmp.prerequisite_branch is not None:
            prerequisite_branch = bmp.prerequisite_branch.getBzrBranch()
        else:
            prerequisite_branch = None
        diff, conflicts = Diff.mergePreviewFromBranches(
            source_branch, source_revision, target_branch, prerequisite_branch)
        preview = cls()
        preview.source_revision_id = source_revision.decode('utf-8')
        preview.target_revision_id = target_revision.decode('utf-8')
        preview.branch_merge_proposal = bmp
        preview.diff = diff
        preview.conflicts = u''.join(
            unicode(conflict) + '\n' for conflict in conflicts)
        del get_property_cache(bmp).preview_diffs
        del get_property_cache(bmp).preview_diff
        return preview
Exemplo n.º 6
0
 def destroySelf(self):
     """Remove this record."""
     builder = self.builder
     specific_build = self.specific_build
     Store.of(self).remove(self)
     Store.of(self).flush()
     if builder is not None:
         del get_property_cache(builder).currentjob
     del get_property_cache(specific_build).buildqueue_record
     self._clear_specific_build_cache()
Exemplo n.º 7
0
 def can_infer_distro_series(self, value):
     store = Store.of(self)
     current = store.find(SnappyDistroSeries,
                          SnappyDistroSeries.snappy_series == self,
                          SnappyDistroSeries.distro_series == None).one()
     if current is None and value is True:
         store.add(SnappyDistroSeries(self, None))
         get_property_cache(self)._can_infer_distro_series = True
     elif current is not None and value is False:
         store.remove(current)
         get_property_cache(self)._can_infer_distro_series = False
Exemplo n.º 8
0
    def fromBranchMergeProposal(cls, bmp):
        """Create a `PreviewDiff` from a `BranchMergeProposal`.

        Includes a diff from the source to the target.
        :param bmp: The `BranchMergeProposal` to generate a `PreviewDiff` for.
        :return: A `PreviewDiff`.
        """
        if bmp.source_branch is not None:
            source_branch = bmp.source_branch.getBzrBranch()
            source_revision = source_branch.last_revision()
            target_branch = bmp.target_branch.getBzrBranch()
            target_revision = target_branch.last_revision()
            if bmp.prerequisite_branch is not None:
                prerequisite_branch = bmp.prerequisite_branch.getBzrBranch()
            else:
                prerequisite_branch = None
            diff, conflicts = Diff.mergePreviewFromBranches(
                source_branch, source_revision, target_branch,
                prerequisite_branch)
            preview = cls()
            preview.source_revision_id = source_revision.decode('utf-8')
            preview.target_revision_id = target_revision.decode('utf-8')
            preview.branch_merge_proposal = bmp
            preview.diff = diff
            preview.conflicts = u''.join(
                unicode(conflict) + '\n' for conflict in conflicts)
        else:
            source_repository = bmp.source_git_repository
            target_repository = bmp.target_git_repository
            if source_repository == target_repository:
                path = source_repository.getInternalPath()
            else:
                path = "%s:%s" % (
                    target_repository.getInternalPath(),
                    source_repository.getInternalPath())
            response = getUtility(IGitHostingClient).getMergeDiff(
                path, bmp.target_git_commit_sha1, bmp.source_git_commit_sha1,
                prerequisite=bmp.prerequisite_git_commit_sha1)
            conflicts = u"".join(
                u"Conflict in %s\n" % path for path in response['conflicts'])
            preview = cls.create(
                bmp, response['patch'].encode('utf-8'),
                bmp.source_git_commit_sha1, bmp.target_git_commit_sha1,
                bmp.prerequisite_git_commit_sha1, conflicts,
                strip_prefix_segments=1)
        del get_property_cache(bmp).preview_diffs
        del get_property_cache(bmp).preview_diff
        return preview
Exemplo n.º 9
0
 def addGrant(self,
              grantee,
              grantor,
              can_create=False,
              can_push=False,
              can_force_push=False,
              permissions=None):
     """See `IGitRule`."""
     if permissions is not None:
         if can_create or can_push or can_force_push:
             raise AssertionError(
                 "GitRule.addGrant takes either "
                 "can_create/can_push/can_force_push or permissions, not "
                 "both")
         can_create = GitPermissionType.CAN_CREATE in permissions
         can_push = GitPermissionType.CAN_PUSH in permissions
         can_force_push = GitPermissionType.CAN_FORCE_PUSH in permissions
     grant = GitRuleGrant(rule=self,
                          grantee=grantee,
                          can_create=can_create,
                          can_push=can_push,
                          can_force_push=can_force_push,
                          grantor=grantor,
                          date_created=DEFAULT)
     getUtility(IGitActivitySet).logGrantAdded(grant, grantor)
     del get_property_cache(self).grants
     return grant
 def test_user_with_grant_for_target_read_access(self):
     # Users with a grant for the specification's target
     # have access to a specification if the information_type
     # of the specification matches the type if the grant.
     specification = self.factory.makeSpecification()
     removeSecurityProxy(specification.target)._ensurePolicies(
         PRIVATE_INFORMATION_TYPES)
     user = self.factory.makePerson()
     permissions = {
         InformationType.PROPRIETARY: SharingPermission.ALL,
         }
     with person_logged_in(specification.target.owner):
         getUtility(IService, 'sharing').sharePillarInformation(
             specification.target, user, specification.target.owner,
             permissions)
     all_types = specification.getAllowedInformationTypes(user)
     for information_type in all_types:
         with person_logged_in(specification.target.owner):
             specification.transitionToInformationType(
                 information_type, specification.owner)
         error_expected = (
             information_type not in PUBLIC_INFORMATION_TYPES and
             information_type not in permissions)
         self.read_access_to_ISpecificationView(
             user, specification, error_expected)
         del get_property_cache(specification)._known_viewers
Exemplo n.º 11
0
 def preloadDataForBranches(branches):
     """Preload branches' cached associated targets, product series, and
     suite source packages."""
     load_related(SourcePackageName, branches, ['sourcepackagenameID'])
     load_related(DistroSeries, branches, ['distroseriesID'])
     load_related(Product, branches, ['productID'])
     caches = dict(
         (branch.id, get_property_cache(branch)) for branch in branches)
     branch_ids = caches.keys()
     for cache in caches.values():
         cache._associatedProductSeries = []
         cache._associatedSuiteSourcePackages = []
         cache.code_import = None
     # associatedProductSeries
     # Imported here to avoid circular import.
     from lp.registry.model.productseries import ProductSeries
     for productseries in IStore(ProductSeries).find(
             ProductSeries, ProductSeries.branchID.is_in(branch_ids)):
         cache = caches[productseries.branchID]
         cache._associatedProductSeries.append(productseries)
     # associatedSuiteSourcePackages
     series_set = getUtility(IFindOfficialBranchLinks)
     # Order by the pocket to get the release one first. If changing
     # this be sure to also change BranchCollection.getBranches.
     links = series_set.findForBranches(branches).order_by(
         SeriesSourcePackageBranch.pocket)
     for link in links:
         cache = caches[link.branchID]
         cache._associatedSuiteSourcePackages.append(
             link.suite_sourcepackage)
     for code_import in IStore(CodeImport).find(
             CodeImport, CodeImport.branchID.is_in(branch_ids)):
         cache = caches[code_import.branchID]
         cache.code_import = code_import
Exemplo n.º 12
0
 def preloadDataForBranches(branches):
     """Preload branches cached associated product series and
     suite source packages."""
     caches = dict((branch.id, get_property_cache(branch))
         for branch in branches)
     branch_ids = caches.keys()
     for cache in caches.values():
         cache._associatedProductSeries = []
         cache._associatedSuiteSourcePackages = []
         cache.code_import = None
     # associatedProductSeries
     # Imported here to avoid circular import.
     from lp.registry.model.productseries import ProductSeries
     for productseries in IStore(ProductSeries).find(
         ProductSeries,
         ProductSeries.branchID.is_in(branch_ids)):
         cache = caches[productseries.branchID]
         cache._associatedProductSeries.append(productseries)
     # associatedSuiteSourcePackages
     series_set = getUtility(IFindOfficialBranchLinks)
     # Order by the pocket to get the release one first. If changing
     # this be sure to also change BranchCollection.getBranches.
     links = series_set.findForBranches(branches).order_by(
         SeriesSourcePackageBranch.pocket)
     for link in links:
         cache = caches[link.branchID]
         cache._associatedSuiteSourcePackages.append(
             link.suite_sourcepackage)
     for code_import in IStore(CodeImport).find(
         CodeImport, CodeImport.branchID.is_in(branch_ids)):
         cache = caches[code_import.branchID]
         cache.code_import = code_import
Exemplo n.º 13
0
    def test_distribution_series_cache(self):
        distribution = removeSecurityProxy(
            self.factory.makeDistribution('foo'))

        cache = get_property_cache(distribution)

        # Not yet cached.
        self.assertNotIn("series", cache)

        # Now cached.
        series = distribution.series
        self.assertIs(series, cache.series)

        # Cache cleared.
        distribution.newSeries(name='bar',
                               display_name='Bar',
                               title='Bar',
                               summary='',
                               description='',
                               version='1',
                               previous_series=None,
                               registrant=self.factory.makePerson())
        self.assertNotIn("series", cache)

        # New cached value.
        series = distribution.series
        self.assertEqual(1, len(series))
        self.assertIs(series, cache.series)
Exemplo n.º 14
0
    def test_distribution_series_cache(self):
        distribution = removeSecurityProxy(self.factory.makeDistribution("foo"))

        cache = get_property_cache(distribution)

        # Not yet cached.
        self.assertNotIn("series", cache)

        # Now cached.
        series = distribution.series
        self.assertIs(series, cache.series)

        # Cache cleared.
        distribution.newSeries(
            name="bar",
            displayname="Bar",
            title="Bar",
            summary="",
            description="",
            version="1",
            previous_series=None,
            registrant=self.factory.makePerson(),
        )
        self.assertNotIn("series", cache)

        # New cached value.
        series = distribution.series
        self.assertEqual(1, len(series))
        self.assertIs(series, cache.series)
Exemplo n.º 15
0
 def test_user_with_grant_for_target_read_access(self):
     # Users with a grant for the specification's target
     # have access to a specification if the information_type
     # of the specification matches the type if the grant.
     specification = self.factory.makeSpecification()
     removeSecurityProxy(
         specification.target)._ensurePolicies(PRIVATE_INFORMATION_TYPES)
     user = self.factory.makePerson()
     permissions = {
         InformationType.PROPRIETARY: SharingPermission.ALL,
     }
     with person_logged_in(specification.target.owner):
         getUtility(IService, 'sharing').sharePillarInformation(
             specification.target, user, specification.target.owner,
             permissions)
     all_types = specification.getAllowedInformationTypes(user)
     for information_type in all_types:
         with person_logged_in(specification.target.owner):
             specification.transitionToInformationType(
                 information_type, specification.owner)
         error_expected = (information_type not in PUBLIC_INFORMATION_TYPES
                           and information_type not in permissions)
         self.read_access_to_ISpecificationView(user, specification,
                                                error_expected)
         del get_property_cache(specification)._known_viewers
    def requestDiffTo(self, requester, to_sourcepackagerelease):
        """See ISourcePackageRelease."""
        candidate = self.getDiffTo(to_sourcepackagerelease)

        if candidate is not None:
            raise PackageDiffAlreadyRequested("%s has already been requested" %
                                              candidate.title)

        if self.sourcepackagename.name == 'udev':
            # XXX 2009-11-23 Julian bug=314436
            # Currently diff output for udev will fill disks.  It's
            # disabled until diffutils is fixed in that bug.
            status = PackageDiffStatus.FAILED
        else:
            status = PackageDiffStatus.PENDING

        Store.of(to_sourcepackagerelease).flush()
        del get_property_cache(to_sourcepackagerelease).package_diffs
        packagediff = PackageDiff(from_source=self,
                                  to_source=to_sourcepackagerelease,
                                  requester=requester,
                                  status=status)
        if status == PackageDiffStatus.PENDING:
            getUtility(IPackageDiffJobSource).create(packagediff)
        return packagediff
Exemplo n.º 17
0
 def buglinks(self):
     """Return a list of dict with bug, title and can_see_bug keys
     for the linked bugs. It makes the Right Thing(tm) with private bug.
     """
     # Do a regular search to get the bugtasks so that visibility is
     # evaluated and eager loading is performed.
     bug_ids = map(attrgetter("bugID"), self.context.bug_links)
     if not bug_ids:
         return []
     bugtask_set = getUtility(IBugTaskSet)
     query = BugTaskSearchParams(user=self.user, bug=any(*bug_ids))
     bugtasks = list(bugtask_set.search(query))
     # collate by bug
     bugs = defaultdict(list)
     for task in bugtasks:
         bugs[task.bug].append(task)
     badges = bugtask_set.getBugTaskBadgeProperties(bugtasks)
     links = []
     columns_to_show = ["id", "summary", "bugtargetdisplayname", "importance", "status"]
     for bug, tasks in bugs.items():
         navigator = BugListingBatchNavigator(
             tasks, self.request, columns_to_show=columns_to_show, size=config.malone.buglist_batch_size
         )
         get_property_cache(navigator).bug_badge_properties = badges
         links.append(
             {"bug": bug, "title": bug.title, "can_view_bug": True, "tasks": tasks, "batch_navigator": navigator}
         )
     return links
Exemplo n.º 18
0
def make_merge_proposal_without_reviewers(factory, **kwargs):
    """Make a merge proposal and strip of any review votes."""
    proposal = factory.makeBranchMergeProposal(**kwargs)
    for vote in proposal.votes:
        removeSecurityProxy(vote).destroySelf()
    del get_property_cache(proposal).votes
    return proposal
Exemplo n.º 19
0
 def markAsBuilding(self, builder):
     """See `IBuildQueue`."""
     self.builder = builder
     if self.job.status != JobStatus.RUNNING:
         self.job.start()
     self.specific_job.jobStarted()
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 20
0
 def _storeSigningKey(self, pub_key):
     """Store signing key reference in the database."""
     key_owner = getUtility(ILaunchpadCelebrities).ppa_key_guard
     key, _ = getUtility(IGPGKeySet).activate(key_owner, pub_key,
                                              pub_key.can_encrypt)
     self.archive.signing_key_owner = key.owner
     self.archive.signing_key_fingerprint = key.fingerprint
     del get_property_cache(self.archive).signing_key
Exemplo n.º 21
0
 def _preloadProcessors(self, rows):
     # Grab (Builder.id, Processor.id) pairs and stuff them into the
     # Builders' processor caches.
     store = IStore(BuilderProcessor)
     pairs = list(
         store.using(BuilderProcessor, Processor).find(
             (BuilderProcessor.builder_id, BuilderProcessor.processor_id),
             BuilderProcessor.processor_id == Processor.id,
             BuilderProcessor.builder_id.is_in([
                 b.id for b in rows
             ])).order_by(BuilderProcessor.builder_id, Processor.name))
     load(Processor, [pid for bid, pid in pairs])
     for row in rows:
         get_property_cache(row)._processors_cache = []
     for bid, pid in pairs:
         cache = get_property_cache(store.get(Builder, bid))
         cache._processors_cache.append(store.get(Processor, pid))
Exemplo n.º 22
0
 def create(cls, snapbuild):
     """See `ISnapStoreUploadJobSource`."""
     snap_build_job = SnapBuildJob(snapbuild, cls.class_job_type, {})
     job = cls(snap_build_job)
     job.celeryRunOnCommit()
     del get_property_cache(snapbuild).last_store_upload_job
     notify(SnapBuildStoreUploadStatusChangedEvent(snapbuild))
     return job
Exemplo n.º 23
0
 def markAsBuilding(self, builder):
     """See `IBuildQueue`."""
     self.builder = builder
     if self.job.status != JobStatus.RUNNING:
         self.job.start()
     self.specific_job.jobStarted()
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 24
0
 def preloadForBuilders(self, builders):
     # Populate builders' currentjob cachedproperty.
     queues = load_referencing(BuildQueue, builders, ['builderID'])
     queue_builders = dict((queue.builderID, queue) for queue in queues)
     for builder in builders:
         cache = get_property_cache(builder)
         cache.currentjob = queue_builders.get(builder.id, None)
     return queues
Exemplo n.º 25
0
 def markAsBuilding(self, builder):
     """See `IBuildQueue`."""
     self.builder = builder
     self.status = BuildQueueStatus.RUNNING
     self.date_started = UTC_NOW
     self.specific_build.updateStatus(BuildStatus.BUILDING)
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 26
0
    def _createWorkItemAndReturnDataDict(self, spec):
        """Create a new work item for the given spec using the next available
        sequence number.

        Return a dict with the title, status, assignee, milestone and sequence
        attributes of the spec.
        """
        del get_property_cache(spec).work_items
        if len(spec.work_items) == 0:
            sequence = 0
        else:
            sequence = max(wi.sequence for wi in spec.work_items) + 1
        wi = self.factory.makeSpecificationWorkItem(
            specification=spec, sequence=sequence)
        del get_property_cache(spec).work_items
        return dict(
            title=wi.title, status=wi.status, assignee=wi.assignee,
            milestone=wi.milestone, sequence=sequence)
Exemplo n.º 27
0
 def preloadGrantsForRules(rules):
     """Preload the access grants related to an iterable of rules."""
     grants = load_referencing(GitRuleGrant, rules, ["rule_id"])
     grants_map = defaultdict(list)
     for grant in grants:
         grants_map[grant.rule_id].append(grant)
     for rule in rules:
         get_property_cache(rule).grants = grants_map[rule.id]
     load_related(Person, grants, ["grantee_id"])
Exemplo n.º 28
0
 def addFile(self, file, filetype=None):
     """See ISourcePackageRelease."""
     if filetype is None:
         filetype = determine_source_file_type(file.filename)
     sprf = SourcePackageReleaseFile(sourcepackagerelease=self,
                                     filetype=filetype,
                                     libraryfile=file)
     del get_property_cache(self).files
     return sprf
Exemplo n.º 29
0
 def deleteMirrorCDImageSeries(self, distroseries, flavour):
     """See IDistributionMirror"""
     mirror = MirrorCDImageDistroSeries.selectOneBy(
         distribution_mirror=self,
         distroseries=distroseries,
         flavour=flavour)
     if mirror is not None:
         mirror.destroySelf()
     del get_property_cache(self).cdimage_series
Exemplo n.º 30
0
 def __init__(self, repository, position, ref_pattern, creator,
              date_created):
     super(GitRule, self).__init__()
     self.repository = repository
     self.position = position
     self.ref_pattern = ref_pattern
     self.creator = creator
     self.date_created = date_created
     self.date_last_modified = date_created
     get_property_cache(self).grants = []
Exemplo n.º 31
0
 def linkBranch(self, branch, registrant):
     branch_link = self.getBranchLink(branch)
     if branch_link is not None:
         return branch_link
     branch_link = SpecificationBranch(
         specification=self, branch=branch, registrant=registrant)
     Store.of(self).flush()
     del get_property_cache(self).linked_branches
     notify(ObjectCreatedEvent(branch_link))
     return branch_link
Exemplo n.º 32
0
 def linkBranch(self, branch, registrant):
     branch_link = self.getBranchLink(branch)
     if branch_link is not None:
         return branch_link
     branch_link = SpecificationBranch(
         specification=self, branch=branch, registrant=registrant)
     Store.of(self).flush()
     del get_property_cache(self).linked_branches
     notify(ObjectCreatedEvent(branch_link))
     return branch_link
Exemplo n.º 33
0
 def reset(self):
     """See `IBuildQueue`."""
     builder = self.builder
     self.builder = None
     self.status = BuildQueueStatus.WAITING
     self.date_started = None
     self.logtail = None
     self.specific_build.updateStatus(BuildStatus.NEEDSBUILD)
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 34
0
 def preferred_distro_series(self, value):
     current = Store.of(self).find(
         SnappyDistroSeries, SnappyDistroSeries.snappy_series == self,
         SnappyDistroSeries.preferred == True).one()
     if current is not None:
         if current.distro_series == value:
             return
         current.preferred = False
         get_property_cache(self)._preferred_distro_series = None
     if value is not None:
         row = Store.of(self).find(
             SnappyDistroSeries, SnappyDistroSeries.snappy_series == self,
             SnappyDistroSeries.distro_series == value).one()
         if row is not None:
             row.preferred = True
         else:
             row = SnappyDistroSeries(self, value, preferred=True)
             Store.of(self).add(row)
         get_property_cache(self)._preferred_distro_series = value
Exemplo n.º 35
0
 def unsubscribe(self, person, unsubscribed_by, ignore_permissions=False):
     """See ISpecification."""
     # see if a relevant subscription exists, and if so, delete it
     if person is None:
         person = unsubscribed_by
     for sub in self.subscriptions:
         if sub.person.id == person.id:
             if (not sub.canBeUnsubscribedByUser(unsubscribed_by)
                     and not ignore_permissions):
                 raise UserCannotUnsubscribePerson(
                     '%s does not have permission to unsubscribe %s.' %
                     (unsubscribed_by.displayname, person.displayname))
             get_property_cache(self).subscriptions.remove(sub)
             SpecificationSubscription.delete(sub.id)
             artifacts_to_delete = getUtility(IAccessArtifactSource).find(
                 [self])
             getUtility(IAccessArtifactGrantSource).revokeByArtifact(
                 artifacts_to_delete, [person])
             return
    def preloadDataForBMPs(branch_merge_proposals, user):
        # Utility to load the data related to a list of bmps.
        # Circular imports.
        from lp.code.model.branch import Branch
        from lp.code.model.branchcollection import GenericBranchCollection
        from lp.registry.model.product import Product
        from lp.registry.model.distroseries import DistroSeries

        ids = set()
        source_branch_ids = set()
        person_ids = set()
        for mp in branch_merge_proposals:
            ids.add(mp.id)
            source_branch_ids.add(mp.source_branchID)
            person_ids.add(mp.registrantID)
            person_ids.add(mp.merge_reporterID)

        branches = load_related(
            Branch, branch_merge_proposals, (
                "target_branchID", "prerequisite_branchID",
                "source_branchID"))
        # The stacked on branches are used to check branch visibility.
        GenericBranchCollection.preloadVisibleStackedOnBranches(
            branches, user)

        if len(branches) == 0:
            return

        # Pre-load PreviewDiffs and Diffs.
        preview_diffs = IStore(BranchMergeProposal).find(
            PreviewDiff,
            PreviewDiff.branch_merge_proposal_id.is_in(ids)).order_by(
                PreviewDiff.branch_merge_proposal_id,
                Desc(PreviewDiff.date_created)).config(
                    distinct=[PreviewDiff.branch_merge_proposal_id])
        load_related(Diff, preview_diffs, ['diff_id'])
        for previewdiff in preview_diffs:
            cache = get_property_cache(previewdiff.branch_merge_proposal)
            cache.preview_diff = previewdiff

        # Add source branch owners' to the list of pre-loaded persons.
        person_ids.update(
            branch.ownerID for branch in branches
            if branch.id in source_branch_ids)

        # Pre-load Person and ValidPersonCache.
        list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
            person_ids, need_validity=True))

        # Pre-load branches' data.
        load_related(SourcePackageName, branches, ['sourcepackagenameID'])
        load_related(DistroSeries, branches, ['distroseriesID'])
        load_related(Product, branches, ['productID'])
        GenericBranchCollection.preloadDataForBranches(branches)
Exemplo n.º 37
0
 def test_can_rename_team_with_deleted_ppa(self):
     # A team with a deleted PPA can be renamed.
     owner = self.factory.makePerson()
     team = self.factory.makeTeam(owner=owner)
     archive = self.factory.makeArchive()
     self.factory.makeSourcePackagePublishingHistory(archive=archive)
     removeSecurityProxy(archive).status = ArchiveStatus.DELETED
     get_property_cache(team).archive = archive
     with person_logged_in(owner):
         view = create_initialized_view(team, name=self.view_name)
         self.assertFalse(view.form_fields['name'].for_display)
Exemplo n.º 38
0
 def destroySelf(self):
     """Remove this record and associated job/specific_job."""
     job = self.job
     specific_job = self.specific_job
     builder = self.builder
     SQLBase.destroySelf(self)
     specific_job.cleanUp()
     job.destroySelf()
     if builder is not None:
         del get_property_cache(builder).currentjob
     self._clear_specific_job_cache()
Exemplo n.º 39
0
 def unsubscribe(self, person, unsubscribed_by, ignore_permissions=False):
     """See ISpecification."""
     # see if a relevant subscription exists, and if so, delete it
     if person is None:
         person = unsubscribed_by
     for sub in self.subscriptions:
         if sub.person.id == person.id:
             if (not sub.canBeUnsubscribedByUser(unsubscribed_by) and
                 not ignore_permissions):
                 raise UserCannotUnsubscribePerson(
                     '%s does not have permission to unsubscribe %s.' % (
                         unsubscribed_by.displayname,
                         person.displayname))
             get_property_cache(self).subscriptions.remove(sub)
             SpecificationSubscription.delete(sub.id)
             artifacts_to_delete = getUtility(
                 IAccessArtifactSource).find([self])
             getUtility(IAccessArtifactGrantSource).revokeByArtifact(
                 artifacts_to_delete, [person])
             return
Exemplo n.º 40
0
 def destroySelf(self):
     """Remove this record and associated job/specific_job."""
     job = self.job
     specific_job = self.specific_job
     builder = self.builder
     SQLBase.destroySelf(self)
     specific_job.cleanUp()
     job.destroySelf()
     if builder is not None:
         del get_property_cache(builder).currentjob
     self._clear_specific_job_cache()
Exemplo n.º 41
0
 def test_can_rename_team_with_deleted_ppa(self):
     # A team with a deleted PPA can be renamed.
     owner = self.factory.makePerson()
     team = self.factory.makeTeam(owner=owner)
     archive = self.factory.makeArchive()
     self.factory.makeSourcePackagePublishingHistory(archive=archive)
     removeSecurityProxy(archive).status = ArchiveStatus.DELETED
     get_property_cache(team).archive = archive
     with person_logged_in(owner):
         view = create_initialized_view(team, name=self.view_name)
         self.assertFalse(view.form_fields['name'].for_display)
Exemplo n.º 42
0
 def reset(self):
     """See `IBuildQueue`."""
     builder = self.builder
     self.builder = None
     if self.job.status != JobStatus.WAITING:
         self.job.queue()
     self.job.date_started = None
     self.job.date_finished = None
     self.logtail = None
     self.specific_job.jobReset()
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 43
0
 def reset(self):
     """See `IBuildQueue`."""
     builder = self.builder
     self.builder = None
     if self.job.status != JobStatus.WAITING:
         self.job.queue()
     self.job.date_started = None
     self.job.date_finished = None
     self.logtail = None
     self.specific_job.jobReset()
     if builder is not None:
         del get_property_cache(builder).currentjob
Exemplo n.º 44
0
 def __init__(self, comment, from_superseded=False, limit_length=True):
     if limit_length:
         comment_limit = config.malone.max_comment_size
     else:
         comment_limit = None
     super(CodeReviewDisplayComment, self).__init__(comment_limit)
     self.comment = comment
     get_property_cache(self).has_body = bool(self.comment.message_body)
     self.has_footer = self.comment.vote is not None
     # The date attribute is used to sort the comments in the conversation.
     self.date = self.comment.message.datecreated
     self.from_superseded = from_superseded
 def preLoadDataForSourcePackageRecipes(sourcepackagerecipes):
     # Load the referencing SourcePackageRecipeData.
     spr_datas = load_referencing(
         SourcePackageRecipeData,
         sourcepackagerecipes, ['sourcepackage_recipe_id'])
     # Load the related branches.
     load_related(Branch, spr_datas, ['base_branch_id'])
     # Store the SourcePackageRecipeData in the sourcepackagerecipes
     # objects.
     for spr_data in spr_datas:
         cache = get_property_cache(spr_data.sourcepackage_recipe)
         cache._recipe_data = spr_data
     SourcePackageRecipeData.preLoadReferencedBranches(spr_datas)
Exemplo n.º 46
0
 def createProductRelease(self, owner, datereleased,
                          changelog=None, release_notes=None):
     """See `IMilestone`."""
     if self.product_release is not None:
         raise MultipleProductReleases()
     release = ProductRelease(
         owner=owner,
         changelog=changelog,
         release_notes=release_notes,
         datereleased=datereleased,
         milestone=self)
     del get_property_cache(self.productseries).releases
     return release
Exemplo n.º 47
0
    def restoreRequestFromSession(self):
        """Get the OpenIDRequest from our session."""
        session = self.getSession()
        cache = get_property_cache(self)
        try:
            cache.openid_parameters = session[OPENID_REQUEST_SESSION_KEY]
        except KeyError:
            raise UnexpectedFormData("No OpenID request in session")

        # Decode the request parameters and create the request object.
        self.openid_request = self.openid_server.decodeRequest(
            self.openid_parameters)
        assert zisinstance(self.openid_request, CheckIDRequest), (
            'Invalid OpenIDRequest in session')
Exemplo n.º 48
0
 def test_cannot_rename_team_with_active_ppa(self):
     # A team with an active PPA that contains publications cannot be
     # renamed.
     owner = self.factory.makePerson()
     team = self.factory.makeTeam(owner=owner)
     archive = self.factory.makeArchive(owner=team)
     self.factory.makeSourcePackagePublishingHistory(archive=archive)
     get_property_cache(team).archive = archive
     with person_logged_in(owner):
         view = create_initialized_view(team, name=self.view_name)
         self.assertTrue(view.form_fields['name'].for_display)
         self.assertEqual(
             'This team has an active PPA with packages published and '
             'may not be renamed.', view.widgets['name'].hint)
Exemplo n.º 49
0
    def set_translations_relicensing_agreement(self, value):
        """Set a translations relicensing decision by translator.

        If she has already made a decision, overrides it with the new one.
        """
        relicensing_agreement = TranslationRelicensingAgreement.selectOneBy(
            person=self.person)
        if relicensing_agreement is None:
            relicensing_agreement = TranslationRelicensingAgreement(
                person=self.person,
                allow_relicensing=value)
        else:
            relicensing_agreement.allow_relicensing = value
        del get_property_cache(self)._translations_relicensing_agreement
Exemplo n.º 50
0
 def preload_translators_count(languages):
     from lp.registry.model.person import PersonLanguage
     ids = set(language.id for language in languages).difference(
         set([None]))
     counts = IStore(Language).using(
         LeftJoin(
             Language,
             self._getTranslatorJoins(),
             PersonLanguage.languageID == Language.id),
         ).find(
             (Language, Count(PersonLanguage)),
             Language.id.is_in(ids),
         ).group_by(Language)
     for language, count in counts:
         get_property_cache(language).translators_count = count
Exemplo n.º 51
0
 def test_reject_changes_file_no_email(self):
     # If we are rejecting a mail, and the person to notify has no
     # preferred email, we should return early.
     archive = self.factory.makeArchive()
     distroseries = self.factory.makeDistroSeries()
     uploader = self.factory.makePerson()
     get_property_cache(uploader).preferredemail = None
     email = '%s <*****@*****.**>' % uploader.displayname
     changes = {'Changed-By': email, 'Maintainer': email}
     logger = BufferLogger()
     reject_changes_file(
         uploader, '/tmp/changes', changes, archive, distroseries, '',
         logger=logger)
     self.assertIn(
         'No recipients have a preferred email.', logger.getLogBuffer())
Exemplo n.º 52
0
 def test_cannot_rename_team_with_multiple_reasons(self):
     # Since public teams can have mailing lists and PPAs simultaneously,
     # there will be scenarios where more than one of these conditions are
     # actually blocking the team to be renamed.
     owner = self.factory.makePerson()
     team = self.factory.makeTeam(owner=owner)
     self.factory.makeMailingList(team, owner)
     archive = self.factory.makeArchive(owner=team)
     self.factory.makeSourcePackagePublishingHistory(archive=archive)
     get_property_cache(team).archive = archive
     with person_logged_in(owner):
         view = create_initialized_view(team, name=self.view_name)
         self.assertTrue(view.form_fields['name'].for_display)
         self.assertEqual(
             'This team has an active PPA with packages published and '
             'a mailing list and may not be renamed.',
             view.widgets['name'].hint)
Exemplo n.º 53
0
    def builders(self):
        """All active builders"""
        builders = list(self.context.getBuilders())

        # Populate builders' currentjob cachedproperty.
        queues = IStore(BuildQueue).find(
            BuildQueue,
            BuildQueue.builderID.is_in(
                builder.id for builder in builders))
        queue_builders = dict(
            (queue.builderID, queue) for queue in queues)
        for builder in builders:
            cache = get_property_cache(builder)
            cache.currentjob = queue_builders.get(builder.id, None)
        # Prefetch the jobs' data.
        BuildQueue.preloadSpecificJobData(queues)

        return builders
    def addFile(self, file):
        """See `IBinaryPackageRelease`."""
        determined_filetype = None
        if file.filename.endswith(".deb"):
            determined_filetype = BinaryPackageFileType.DEB
        elif file.filename.endswith(".rpm"):
            determined_filetype = BinaryPackageFileType.RPM
        elif file.filename.endswith(".udeb"):
            determined_filetype = BinaryPackageFileType.UDEB
        elif file.filename.endswith(".ddeb"):
            determined_filetype = BinaryPackageFileType.DDEB
        else:
            raise AssertionError(
                'Unsupported file type: %s' % file.filename)

        del get_property_cache(self).files
        return BinaryPackageFile(binarypackagerelease=self,
                                 filetype=determined_filetype,
                                 libraryfile=file)
Exemplo n.º 55
0
    def updateWorkItems(self, new_work_items):
        """See ISpecification."""
        # First mark work items with titles that are no longer present as
        # deleted.
        self._deleteWorkItemsNotMatching(
            [wi['title'] for wi in new_work_items])
        work_items = self._work_items
        # At this point the list of new_work_items is necessarily the same
        # size (or longer) than the list of existing ones, so we can just
        # iterate over it updating the existing items and creating any new
        # ones.
        to_insert = []
        existing_titles = [wi.title for wi in work_items]
        existing_title_count = self._list_to_dict_of_frequency(existing_titles)

        for i, new_wi in enumerate(new_work_items):
            if (new_wi['title'] not in existing_titles or
                existing_title_count[new_wi['title']] == 0):
                to_insert.append((i, new_wi))
            else:
                existing_title_count[new_wi['title']] -= 1
                # Get an existing work item with the same title and update
                # it to match what we have now.
                existing_wi_index = existing_titles.index(new_wi['title'])
                existing_wi = work_items[existing_wi_index]
                # Mark a work item as dirty - don't use it again this update.
                existing_titles[existing_wi_index] = None
                # Update the sequence to match its current position on the
                # list entered by the user.
                existing_wi.sequence = i
                existing_wi.status = new_wi['status']
                existing_wi.assignee = new_wi['assignee']
                milestone = new_wi['milestone']
                if milestone is not None:
                    assert milestone.target == self.target, (
                        "%s does not belong to this spec's target (%s)" %
                            (milestone.displayname, self.target.name))
                existing_wi.milestone = milestone

        for sequence, item in to_insert:
            self.newWorkItem(item['title'], sequence, item['status'],
                             item['assignee'], item['milestone'])
        del get_property_cache(self).work_items
Exemplo n.º 56
0
 def preloadSpecificJobData(queues):
     key = attrgetter('job_type')
     for job_type, grouped_queues in groupby(queues, key=key):
         specific_class = specific_job_classes()[job_type]
         queue_subset = list(grouped_queues)
         job_subset = load_related(Job, queue_subset, ['jobID'])
         # We need to preload the build farm jobs early to avoid
         # the call to _set_build_farm_job to look up BuildFarmBuildJobs
         # one by one.
         specific_class.preloadBuildFarmJobs(job_subset)
         specific_jobs = list(specific_class.getByJobs(job_subset))
         if len(specific_jobs) == 0:
             continue
         specific_class.preloadJobsData(specific_jobs)
         specific_jobs_dict = dict(
             (specific_job.job, specific_job)
                 for specific_job in specific_jobs)
         for queue in queue_subset:
             cache = get_property_cache(queue)
             cache.specific_job = specific_jobs_dict[queue.job]
Exemplo n.º 57
0
 def subscribe(self, person, subscribed_by=None, essential=False):
     """See ISpecification."""
     if subscribed_by is None:
         subscribed_by = person
     # Create or modify a user's subscription to this blueprint.
     # First see if a relevant subscription exists, and if so, return it
     sub = self.subscription(person)
     if sub is not None:
         if sub.essential != essential:
             # If a subscription already exists, but the value for
             # 'essential' changes, there's no need to create a new
             # subscription, but we modify the existing subscription
             # and notify the user about the change.
             sub.essential = essential
             # The second argument should really be a copy of sub with
             # only the essential attribute changed, but we know
             # that we can get away with not examining the attribute
             # at all - it's a boolean!
             notify(ObjectModifiedEvent(
                     sub, sub, ['essential'], user=subscribed_by))
         return sub
     # since no previous subscription existed, create and return a new one
     sub = SpecificationSubscription(specification=self,
         person=person, essential=essential)
     property_cache = get_property_cache(self)
     if 'subscription' in property_cache:
         from lp.registry.model.person import person_sort_key
         property_cache.subscriptions.append(sub)
         property_cache.subscriptions.sort(
             key=lambda sub: person_sort_key(sub.person))
     if self.information_type in PRIVATE_INFORMATION_TYPES:
         # Grant the subscriber access if they can't see the
         # specification.
         service = getUtility(IService, 'sharing')
         ignored, ignored, shared_specs = service.getVisibleArtifacts(
             person, specifications=[self], ignore_permissions=True)
         if not shared_specs:
             service.ensureAccessGrants(
                 [person], subscribed_by, specifications=[self])
     notify(ObjectCreatedEvent(sub, user=subscribed_by))
     return sub
 def initialize(self):
     # Work out the review groups
     self.review_groups = {}
     self.getter = getUtility(IBranchMergeProposalGetter)
     reviewer = self._getReviewer()
     # Listify so it works well being passed into getting the votes and
     # summaries.
     proposals = list(self.getProposals())
     all_votes = self.getter.getVotesForProposals(proposals)
     vote_summaries = self.getter.getVoteSummariesForProposals(proposals)
     for proposal in proposals:
         proposal_votes = all_votes[proposal]
         review_group = self._getReviewGroup(proposal, proposal_votes, reviewer)
         self.review_groups.setdefault(review_group, []).append(
             BranchMergeProposalListingItem(proposal, vote_summaries[proposal], None, proposal_votes)
         )
         if proposal.preview_diff is not None:
             self.show_diffs = True
     # Sort each collection...
     for group in self.review_groups.values():
         group.sort(key=attrgetter("sort_key"))
     get_property_cache(self).proposal_count = len(proposals)
 def preLoadReferencedBranches(sourcepackagerecipedatas):
     # Load the related Branch, _SourcePackageRecipeDataInstruction.
     load_related(
         Branch, sourcepackagerecipedatas, ['base_branch_id'])
     sprd_instructions = load_referencing(
         _SourcePackageRecipeDataInstruction,
         sourcepackagerecipedatas, ['recipe_data_id'])
     sub_branches = load_related(
         Branch, sprd_instructions, ['branch_id'])
     # Store the pre-fetched objects on the sourcepackagerecipedatas
     # objects.
     branch_to_recipe_data = dict([
         (instr.branch_id, instr.recipe_data_id)
             for instr in sprd_instructions])
     caches = dict((sprd.id, [sprd, get_property_cache(sprd)])
         for sprd in sourcepackagerecipedatas)
     for unused, [sprd, cache] in caches.items():
         cache._referenced_branches = [sprd.base_branch]
     for recipe_data_id, branches in groupby(
         sub_branches, lambda branch: branch_to_recipe_data[branch.id]):
         cache = caches[recipe_data_id][1]
         cache._referenced_branches.extend(list(branches))