def delete(self): """Deletes the exercise, providing it has no associated worksheets.""" if (self.worksheet_exercises.count() > 0): raise IntegrityError() for suite in self.test_suites: suite.delete() Store.of(self).remove(self)
def bequeathe_flags(source_message, target_message, incumbents=None): """Destroy `source_message`, leaving flags to `target_message`. If `source_message` holds the is_current_ubuntu flag, and there are no `incumbents` that hold the same flag, then `target_message` inherits it. Similar for the is_current_upstream flag. """ sacrifice_flags(source_message, incumbents) if (source_message.is_current_ubuntu and not target_message.is_current_ubuntu): # Transfer is_current_ubuntu flag. source_message.is_current_ubuntu = False target_message.is_current_ubuntu = True Store.of(source_message).add_flush_order(source_message, target_message) if (source_message.is_current_upstream and not target_message.is_current_upstream): # Transfer is_current_upstream flag. source_message.is_current_upstream = False target_message.is_current_upstream = True Store.of(source_message).add_flush_order(source_message, target_message) source_message.destroySelf()
def test_getByBuildFarmJobs(self): sprbs = [self.makeSourcePackageRecipeBuild() for i in range(10)] Store.of(sprbs[0]).flush() self.assertContentEqual( sprbs, SourcePackageRecipeBuild.getByBuildFarmJobs( [sprb.build_farm_job for sprb in sprbs]))
def test_generateEmail_with_null_fields(self): """GenerateEmail works when many fields are NULL.""" person = self.factory.makePerson(name='person') cake = self.factory.makeSourcePackageRecipe( name=u'recipe', owner=person) pantry_owner = self.factory.makePerson(name='archiveowner') pantry = self.factory.makeArchive(name='ppa', owner=pantry_owner) secret = self.factory.makeDistroSeries(name=u'distroseries') build = self.factory.makeSourcePackageRecipeBuild( recipe=cake, distroseries=secret, archive=pantry, status=BuildStatus.SUPERSEDED) Store.of(build).flush() ctrl = self.makeStatusEmail(build) self.assertEqual( u'[recipe build #%d] of ~person recipe in distroseries: ' 'Build for superseded Source' % (build.id), ctrl.subject) body, footer = ctrl.body.split('\n-- \n') self.assertEqual(superseded_body, body) build_url = canonical_url(build) self.assertEqual( '%s\nYou are the requester of the build.\n' % build_url, footer) self.assertEqual( config.canonical.noreply_from_address, ctrl.from_addr) self.assertEqual( 'Requester', ctrl.headers['X-Launchpad-Message-Rationale']) self.assertEqual( 'recipe-build-status', ctrl.headers['X-Launchpad-Notification-Type']) self.assertEqual( 'SUPERSEDED', ctrl.headers['X-Launchpad-Build-State'])
def delete(self): """Delete the project. Fails if can_delete is False.""" if not self.can_delete: raise IntegrityError() for assessed in self.assesseds: assessed.delete() Store.of(self).remove(self)
def delete(self): """Delete this suite, without asking questions.""" for variable in self.variables: variable.delete() for test_case in self.test_cases: test_case.delete() Store.of(self).remove(self)
def bequeathe_flags(source_message, target_message, incumbents=None): """Destroy `source_message`, leaving flags to `target_message`. If `source_message` holds the is_current_ubuntu flag, and there are no `incumbents` that hold the same flag, then `target_message` inherits it. Similar for the is_current_upstream flag. """ sacrifice_flags(source_message, incumbents) if (source_message.is_current_ubuntu and not target_message.is_current_ubuntu): # Transfer is_current_ubuntu flag. source_message.is_current_ubuntu = False target_message.is_current_ubuntu = True Store.of(source_message).add_flush_order( source_message, target_message) if (source_message.is_current_upstream and not target_message.is_current_upstream): # Transfer is_current_upstream flag. source_message.is_current_upstream = False target_message.is_current_upstream = True Store.of(source_message).add_flush_order( source_message, target_message) source_message.destroySelf()
def take(self, count): """Take some amount of parts from this pile and return the object representing this amount. Everything gets copied over.""" assert count > 0 assert count <= self.count if count == self.count: return self take = Part() take.count = count self.count -= count take.source = self.source take.date = self.date take.price = self.price take.vat = self.vat take.part_type = self.part_type take.assignment = self.assignment take.history = self.history take.soldered = self.soldered take.usable = self.usable Store.of(self).add(take) return take
def delete(self): """Delete the assessed. Fails if there are any submissions. Deletes extensions.""" if self.submissions.count() > 0: raise IntegrityError() for extension in self.extensions: extension.delete() Store.of(self).remove(self)
def cancel(self, archive_subscriber_ids, cancelled_by): """See `IArchiveSubscriberSet`.""" Store.of(cancelled_by).find( ArchiveSubscriber, ArchiveSubscriber.id.is_in(archive_subscriber_ids)).set( date_cancelled=UTC_NOW, cancelled_by_id=cancelled_by.id, status=ArchiveSubscriberStatus.CANCELLED)
def destroySelf(self, user): """See `IGitRule`.""" getUtility(IGitActivitySet).logRuleRemoved(self, user) for grant in self.grants: grant.destroySelf() rules = list(self.repository.rules) Store.of(self).remove(self) rules.remove(self) removeSecurityProxy(self.repository)._syncRulePositions(rules)
def test_builder_history(self): Store.of(self.build).flush() self.build.updateStatus(BuildStatus.FULLYBUILT, builder=self.factory.makeBuilder()) title = self.build.title browser = self.getViewBrowser(self.build.builder, "+history") self.assertTextMatchesExpressionIgnoreWhitespace( "Build history.*%s" % title, extract_text(find_main_content(browser.contents))) self.assertEqual(self.build_url, browser.getLink(title).url)
def delete(self): """Deletes the worksheet, provided it has no attempts on any exercises. Returns True if delete succeeded, or False if this worksheet has attempts attached.""" for ws_ex in self.all_worksheet_exercises: if ws_ex.saves.count() > 0 or ws_ex.attempts.count() > 0: raise IntegrityError() self.remove_all_exercises() Store.of(self).remove(self)
def test_binary_builds(self): """The binary_builds property should be populated automatically.""" spb = self.factory.makeSourcePackageRecipeBuild() multiverse = self.factory.makeComponent(name='multiverse') spr = self.factory.makeSourcePackageRelease( source_package_recipe_build=spb, component=multiverse) self.assertEqual([], list(spb.binary_builds)) binary = self.factory.makeBinaryPackageBuild(spr) self.factory.makeBinaryPackageBuild() Store.of(binary).flush() self.assertEqual([binary], list(spb.binary_builds))
def test_builder_history(self): build = self.makeRecipeBuild() Store.of(build).flush() build_url = canonical_url(build) build.updateStatus( BuildStatus.FULLYBUILT, builder=self.factory.makeBuilder()) browser = self.getViewBrowser(build.builder, '+history') self.assertTextMatchesExpressionIgnoreWhitespace( 'Build history.*~chef/chocolate/cake recipe build', extract_text(find_main_content(browser.contents))) self.assertEqual(build_url, browser.getLink('~chef/chocolate/cake recipe build').url)
def test_update_existing_record(self): '''Existing records should be updated.''' handler = self.handler() _image = handler(self.filename) images = Store.of(_image).find(models.PhotoImage, models.PhotoImage.filename == self.filename) self.assertEqual(images.count(), 1) _image = handler(self.filename) images = Store.of(_image).find(models.PhotoImage, models.PhotoImage.filename == self.filename) self.assertEqual(images.count(), 1)
def test_update_existing_record(self): '''Existing records should be updated.''' handler = self.handler() _video = handler(self.filename) videos = Store.of(_video).find(models.VideoFile, models.VideoFile.filename == self.filename) self.assertEqual(videos.count(), 1) _video = handler(self.filename) videos = Store.of(_video).find(models.VideoFile, models.VideoFile.filename == self.filename) self.assertEqual(videos.count(), 1)
def test_builder_history(self): build = self.makeRecipeBuild() Store.of(build).flush() build_url = canonical_url(build) build.updateStatus(BuildStatus.FULLYBUILT, builder=self.factory.makeBuilder()) browser = self.getViewBrowser(build.builder, '+history') self.assertTextMatchesExpressionIgnoreWhitespace( 'Build history.*~chef/chocolate/cake recipe build', extract_text(find_main_content(browser.contents))) self.assertEqual( build_url, browser.getLink('~chef/chocolate/cake recipe build').url)
def test_update_existing_record(self): '''The same file should not be indexed twice, but updated.''' handler = self.handler() _mp3 = handler(self.filename) mp3 = Store.of(_mp3).find(models.MusicTrack, models.MusicTrack.filename == self.filename).one() mp3.comment = u'Foo bar baz' Store.of(mp3).commit() _mp3 = handler(self.filename) files = Store.of(_mp3).find(models.MusicTrack, models.MusicTrack.filename == self.filename) self.assertEqual(files.count(), 1) self.assertEqual(files.one().comment, u'This is a comment')
def new(cls, distribution, sourcepackagename, is_upstream_link_allowed=False): """Create a new DSP with the given parameters. Caches the `(distro_id, spn_id) --> dsp_id` mapping. """ dsp = DistributionSourcePackageInDatabase() dsp.distribution = distribution dsp.sourcepackagename = sourcepackagename dsp.is_upstream_link_allowed = is_upstream_link_allowed Store.of(distribution).add(dsp) Store.of(distribution).flush() dsp_cache_key = distribution.id, sourcepackagename.id cls._cache[dsp_cache_key] = dsp.id return dsp
def clone_worksheets(self, source): """Clone all worksheets from the specified source to this offering.""" import ivle.worksheet.utils for worksheet in source.worksheets: newws = Worksheet() newws.seq_no = worksheet.seq_no newws.identifier = worksheet.identifier newws.name = worksheet.name newws.assessable = worksheet.assessable newws.published = worksheet.published newws.data = worksheet.data newws.format = worksheet.format newws.offering = self Store.of(self).add(newws) ivle.worksheet.utils.update_exerciselist(newws)
def usable_distro_series(self, value): enablements = dict( Store.of(self).find( (DistroSeries, SnappyDistroSeries), SnappyDistroSeries.snappy_series == self, SnappyDistroSeries.distro_series_id == DistroSeries.id)) for distro_series in enablements: if distro_series not in value: if enablements[distro_series].preferred: get_property_cache(self)._preferred_distro_series = None Store.of(self).remove(enablements[distro_series]) for distro_series in value: if distro_series not in enablements: link = SnappyDistroSeries(self, distro_series) Store.of(self).add(link)
def test_scan_branch(self): """Test that scan branches adds revisions to the database.""" self.useBzrBranches() db_branch = self.factory.makeAnyBranch() self.make_branch_with_commits_and_scan_job(db_branch) db_branch.subscribe( db_branch.registrant, BranchSubscriptionNotificationLevel.FULL, BranchSubscriptionDiffSize.WHOLEDIFF, CodeReviewNotificationLevel.FULL, db_branch.registrant) transaction.commit() self.run_script_and_assert_success() self.assertEqual(db_branch.revision_count, 3) store = Store.of(db_branch) result = store.find( BranchJob, BranchJob.jobID == Job.id, Job._status == JobStatus.WAITING, BranchJob.job_type == BranchJobType.REVISION_MAIL, BranchJob.branch == db_branch) self.assertEqual(result.count(), 1)
def specifications(self, user, sort=None, quantity=None, filter=None, need_people=False, need_branches=False, need_workitems=False): """See IHasSpecifications.""" # need_* is provided only for interface compatibility and # need_*=True is not implemented. if filter is None: filter = set([SpecificationFilter.ACCEPTED]) tables, query = self.spec_filter_clause(user, filter) # import here to avoid circular deps from lp.blueprints.model.specification import Specification results = Store.of(self).using(*tables).find(Specification, *query) if sort == SpecificationSort.DATE: order = (Desc(SprintSpecification.date_created), Specification.id) distinct = [SprintSpecification.date_created, Specification.id] # we need to establish if the listing will show specs that have # been decided only, or will include proposed specs. if (SpecificationFilter.ALL not in filter and SpecificationFilter.PROPOSED not in filter): # this will show only decided specs so use the date the spec # was accepted or declined for the sprint order = (Desc(SprintSpecification.date_decided),) + order distinct = [SprintSpecification.date_decided] + distinct results = results.order_by(*order) else: assert sort is None or sort == SpecificationSort.PRIORITY # fall back to default, which is priority, descending. distinct = True if quantity is not None: results = results[:quantity] return results.config(distinct=distinct)
def createRecipeBuildWithBuilder(self, private_branch=False, builder=None): if builder is None: builder = self.factory.makeBuilder() branch2 = self.factory.makeAnyBranch() branch1 = self.factory.makeAnyBranch() build = self.factory.makeSourcePackageRecipeBuild( recipe=self.factory.makeSourcePackageRecipe( branches=[branch1, branch2])) if private_branch: with celebrity_logged_in('admin'): branch1.setPrivate( True, getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)) Store.of(build).flush() self.markAsBuilt(build, builder) return build
def setRecipe(self, builder_recipe): """Convert the BaseRecipeBranch `builder_recipe` to the db form.""" clear_property_cache(self) if builder_recipe.format > MAX_RECIPE_FORMAT: raise TooNewRecipeFormat(builder_recipe.format, MAX_RECIPE_FORMAT) branch_map = self._scanInstructions(builder_recipe) # If this object hasn't been added to a store yet, there can't be any # instructions linking to us yet. if Store.of(self) is not None: self.instructions.find().remove() branch_lookup = getUtility(IBranchLookup) base_branch = branch_lookup.getByUrl(builder_recipe.url) if base_branch is None: raise NoSuchBranch(builder_recipe.url) if base_branch.private: raise PrivateBranchRecipe(base_branch) if builder_recipe.revspec is not None: self.revspec = unicode(builder_recipe.revspec) else: self.revspec = None self._recordInstructions( builder_recipe, parent_insn=None, branch_map=branch_map) self.base_branch = base_branch if builder_recipe.deb_version is None: self.deb_version_template = None else: self.deb_version_template = unicode(builder_recipe.deb_version) self.recipe_format = unicode(builder_recipe.format)
def findRelatedBMP(self, revision_ids): """Find merge proposals related to the revision-ids and branch. Only proposals whose source branch last-scanned-id is in the set of revision-ids and whose target_branch is the BranchJob branch are returned. Only return the most recent proposal for any given source branch. :param revision_ids: A list of revision-ids to look for. :param include_superseded: If true, include merge proposals that are superseded in the results. """ store = Store.of(self.branch) result = store.find( (BranchMergeProposal, Branch), BranchMergeProposal.target_branch == self.branch.id, BranchMergeProposal.source_branch == Branch.id, Branch.last_scanned_id.is_in(revision_ids), (BranchMergeProposal.queue_status != BranchMergeProposalStatus.SUPERSEDED)) proposals = {} for proposal, source in result: # Only show the must recent proposal for any given source. date_created = proposal.date_created source_id = source.id if (source_id not in proposals or date_created > proposals[source_id][1]): proposals[source_id] = (proposal, date_created) return sorted( [proposal for proposal, date_created in proposals.itervalues()], key=operator.attrgetter('date_created'), reverse=True)
def getReleasesAndPublishingHistory(self): """See `IDistributionSourcePackage`.""" pub_constraints = ( DistroSeries.distribution == self.distribution, SourcePackagePublishingHistory.distroseries == DistroSeries.id, SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.sourcepackagename == self.sourcepackagename, ) # Find distinct SPRs for our SPN in our archives. spr_ids = Store.of(self.distribution).find( SourcePackagePublishingHistory.sourcepackagereleaseID, *pub_constraints).order_by( Desc(SourcePackagePublishingHistory.sourcepackagereleaseID) ).config(distinct=True) def decorate(spr_ids): # Find the SPPHs for each SPR in our result. load(SourcePackageRelease, spr_ids) sprs = [SourcePackageRelease.get(spr_id) for spr_id in spr_ids] pubs = DistributionSourcePackageRelease.getPublishingHistories( self.distribution, sprs) sprs_by_id = dict((spr, list(pubs)) for (spr, pubs) in itertools.groupby( pubs, attrgetter('sourcepackagereleaseID'))) return [(DistributionSourcePackageRelease( distribution=self.distribution, sourcepackagerelease=spr), sprs_by_id[spr.id]) for spr in sprs] return DecoratedResultSet(spr_ids, bulk_decorator=decorate)
def get(cls, distribution, sourcepackagename): """Get a DSP given distribution and source package name. Attempts to use a cached `(distro_id, spn_id) --> dsp_id` mapping to avoid hitting the database. """ # Check for a cached mapping from (distro_id, spn_id) to dsp_id. dsp_cache_key = distribution.id, sourcepackagename.id dsp_id = cls._cache.get(dsp_cache_key) # If not, fetch from the database. if dsp_id is None: return cls.getDirect(distribution, sourcepackagename) # Try store.get(), allowing Storm to answer from cache if it can. store = Store.of(distribution) dsp = store.get(DistributionSourcePackageInDatabase, dsp_id) # If it's not found, query the database; the mapping might be stale. if dsp is None: return cls.getDirect(distribution, sourcepackagename) # Check that the mapping in the cache was correct. if distribution.id != dsp.distribution_id: return cls.getDirect(distribution, sourcepackagename) if sourcepackagename.id != dsp.sourcepackagename_id: return cls.getDirect(distribution, sourcepackagename) # Cache hit, phew. return dsp
def test_requestBuild(self): # requestBuild creates a new LiveFSBuild. livefs = self.factory.makeLiveFS() distroarchseries = self.factory.makeDistroArchSeries( distroseries=livefs.distro_series) build = livefs.requestBuild(livefs.owner, livefs.distro_series.main_archive, distroarchseries, PackagePublishingPocket.RELEASE) self.assertTrue(ILiveFSBuild.providedBy(build)) self.assertEqual(livefs.owner, build.requester) self.assertEqual(livefs.distro_series.main_archive, build.archive) self.assertEqual(distroarchseries, build.distro_arch_series) self.assertEqual(PackagePublishingPocket.RELEASE, build.pocket) self.assertIsNone(build.unique_key) self.assertIsNone(build.metadata_override) self.assertEqual(BuildStatus.NEEDSBUILD, build.status) store = Store.of(build) store.flush() build_queue = store.find( BuildQueue, BuildQueue._build_farm_job_id == removeSecurityProxy( build).build_farm_job_id).one() self.assertProvides(build_queue, IBuildQueue) self.assertEqual(livefs.distro_series.main_archive.require_virtualized, build_queue.virtualized) self.assertEqual(BuildQueueStatus.WAITING, build_queue.status)
def subscribe(self, subscriber, role=MemberRole.member): """See `IMailingList`.""" store = Store.of(self) if IAddress.providedBy(subscriber): member = store.find( Member, Member.role == role, Member.list_id == self._list_id, Member._address == subscriber).one() if member: raise AlreadySubscribedError( self.fqdn_listname, subscriber.email, role) elif IUser.providedBy(subscriber): if subscriber.preferred_address is None: raise MissingPreferredAddressError(subscriber) member = store.find( Member, Member.role == role, Member.list_id == self._list_id, Member._user == subscriber).one() if member: raise AlreadySubscribedError( self.fqdn_listname, subscriber, role) else: raise ValueError('subscriber must be an address or user') member = Member(role=role, list_id=self._list_id, subscriber=subscriber) member.preferences = Preferences() store.add(member) notify(SubscriptionEvent(self, member)) return member
def run(self): """See `IGitRefScanJob`.""" try: with try_advisory_lock( LockType.GIT_REF_SCAN, self.repository.id, Store.of(self.repository)): hosting_path = self.repository.getInternalPath() refs_to_upsert, refs_to_remove = ( self.repository.planRefChanges(hosting_path, logger=log)) self.repository.fetchRefCommits( hosting_path, refs_to_upsert, logger=log) # The webhook delivery includes old ref information, so # prepare it before we actually execute the changes. if getFeatureFlag('code.git.webhooks.enabled'): payload = self.composeWebhookPayload( self.repository, refs_to_upsert, refs_to_remove) getUtility(IWebhookSet).trigger( self.repository, 'git:push:0.1', payload) self.repository.synchroniseRefs( refs_to_upsert, refs_to_remove, logger=log) props = getUtility(IGitHostingClient).getProperties( hosting_path) # We don't want ref canonicalisation, nor do we want to send # this change back to the hosting service. removeSecurityProxy(self.repository)._default_branch = ( props["default_branch"]) except LostObjectError: log.info( "Skipping repository %s because it has been deleted." % self._cached_repository_name)
def getReleasesAndPublishingHistory(self): """See `IDistributionSourcePackage`.""" store = Store.of(self.distribution) result = store.find( (SourcePackageRelease, SourcePackagePublishingHistory), SourcePackagePublishingHistory.distroseries == DistroSeries.id, DistroSeries.distribution == self.distribution, SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.sourcepackagename == self.sourcepackagename, SourcePackageRelease.id == SourcePackagePublishingHistory.sourcepackagereleaseID) result.order_by( Desc(SourcePackageRelease.id), Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id)) # Collate the publishing history by SourcePackageRelease. dspr_pubs = [] for spr, pubs in itertools.groupby(result, operator.itemgetter(0)): dspr_pubs.append( (DistributionSourcePackageRelease( distribution=self.distribution, sourcepackagerelease=spr), [spph for (spr, spph) in pubs])) return dspr_pubs
def getSpecifications(self, user): """See `IMilestoneData`""" from lp.registry.model.person import Person origin = [Specification] product_origin, clauses = get_specification_active_product_filter( self) origin.extend(product_origin) clauses.extend(get_specification_privacy_filter(user)) origin.append(LeftJoin(Person, Specification._assigneeID == Person.id)) milestones = self._milestone_ids_expr(user) results = Store.of(self.target).using(*origin).find( (Specification, Person), Specification.id.is_in( Union( Select( Specification.id, tables=[Specification], where=(Specification.milestoneID.is_in(milestones))), Select( SpecificationWorkItem.specification_id, tables=[SpecificationWorkItem], where=And( SpecificationWorkItem.milestone_id.is_in( milestones), SpecificationWorkItem.deleted == False)), all=True)), *clauses) ordered_results = results.order_by( Desc(Specification.priority), Specification.definition_status, Specification.implementation_status, Specification.title) ordered_results.config(distinct=True) return DecoratedResultSet(ordered_results, itemgetter(0))
def _get_milestones(self): """See `IHasMilestones`.""" store = Store.of(self) result = store.find( Milestone, And(self._getMilestoneCondition(), Milestone.active == True)) return result.order_by(self._milestone_order)
def _get_milestones(self): """See `IHasMilestones`.""" store = Store.of(self) result = store.find(Milestone, And(self._getMilestoneCondition(), Milestone.active == True)) return result.order_by(self._milestone_order)
def getSpecifications(self, user): """See `IMilestoneData`""" from lp.registry.model.person import Person origin = [Specification] product_origin, clauses = get_specification_active_product_filter(self) origin.extend(product_origin) clauses.extend(get_specification_privacy_filter(user)) origin.append(LeftJoin(Person, Specification._assigneeID == Person.id)) milestones = self._milestone_ids_expr(user) results = Store.of(self.target).using(*origin).find( (Specification, Person), Specification.id.is_in( Union(Select( Specification.id, tables=[Specification], where=(Specification.milestoneID.is_in(milestones))), Select(SpecificationWorkItem.specification_id, tables=[SpecificationWorkItem], where=And( SpecificationWorkItem.milestone_id.is_in( milestones), SpecificationWorkItem.deleted == False)), all=True)), *clauses) ordered_results = results.order_by(Desc(Specification.priority), Specification.definition_status, Specification.implementation_status, Specification.title) ordered_results.config(distinct=True) return DecoratedResultSet(ordered_results, itemgetter(0))
def vote(self, value, user_id): store = Store.of(self) # Checks if the user has already voted for this message. existing = self.votes.find(Vote.user_id == user_id).one() if existing is not None and existing.value == value: return # Vote already recorded (should I raise an exception?) if value not in (0, 1, -1): raise ValueError("A vote can only be +1 or -1 (or 0 to cancel)") # The vote can be added, changed or cancelled. Keep it simple and # delete likes and dislikes cached values. store.cache.delete_multi(( # this message's (dis)likes count str("list:%s:email:%s:likes" % (self.list_name, self.message_id)), str("list:%s:email:%s:dislikes" % (self.list_name, self.message_id)), # this thread (dis)likes count str("list:%s:thread:%s:likes" % (self.list_name, self.thread_id)), str("list:%s:thread:%s:dislikes" % (self.list_name, self.thread_id)), # the user's vote count on this list str("user:%s:list:%s:votes" % (user_id, self.list_name)), )) if existing is not None: # vote changed or cancelled if value == 0: store.remove(existing) else: existing.value = value else: # new vote if store.get(User, user_id) is None: store.add(User(user_id)) store.add(Vote(self.list_name, self.message_id, user_id, value))
def test_ready_jobs(self): """Job.ready_jobs should include new jobs.""" preexisting = self._sampleData() job = Job() self.assertEqual( preexisting + [(job.id,)], list(Store.of(job).execute(Job.ready_jobs)))
def submit(self, principal, path, revision, who, late=False): """Submit a Subversion path and revision to a project. @param principal: The owner of the Subversion repository, and the entity on behalf of whom the submission is being made @param path: A path within that repository to submit. @param revision: The revision of that path to submit. @param who: The user who is actually making the submission. @param late: If True, will not raise a DeadlinePassed exception even after the deadline. (Default False.) """ if not self.can_submit(principal, who, late=late): raise DeadlinePassed() a = Assessed.get(Store.of(self), principal, self) ps = ProjectSubmission() # Raise SubmissionError if the path is illegal ps.path = ProjectSubmission.test_and_normalise_path(path) ps.revision = revision ps.date_submitted = datetime.datetime.now() ps.assessed = a ps.submitter = who return ps
def fromText(self, subject, content, owner=None, datecreated=None, rfc822msgid=None): """See IMessageSet.""" if datecreated is None: datecreated = UTC_NOW if rfc822msgid is None: rfc822msgid = make_msgid("launchpad") message = Message( subject=subject, rfc822msgid=rfc822msgid, owner=owner, datecreated=datecreated) MessageChunk(message=message, sequence=1, content=content) # XXX 2008-05-27 jamesh: # Ensure that BugMessages get flushed in same order as they # are created. Store.of(message).flush() return message
def getTagsData(self): """See IMilestone.""" # Prevent circular references. from lp.registry.model.milestonetag import MilestoneTag store = Store.of(self) return store.find( MilestoneTag, MilestoneTag.milestone_id == self.id ).order_by(MilestoneTag.tag)
def getTagsData(self): """See IMilestone.""" # Prevent circular references. from lp.registry.model.milestonetag import MilestoneTag store = Store.of(self) return store.find(MilestoneTag, MilestoneTag.milestone_id == self.id).order_by( MilestoneTag.tag)
def test_ready_jobs_lease_in_future(self): """Job.ready_jobs should not include jobs with active leases.""" preexisting = self._sampleData() future = datetime.fromtimestamp( time.time() + 1000, pytz.timezone('UTC')) job = Job(lease_expires=future) self.assertEqual( preexisting, list(Store.of(job).execute(Job.ready_jobs)))
def test_ready_jobs_lease_expired(self): """Job.ready_jobs should include jobs with expired leases.""" preexisting = self._sampleData() UNIX_EPOCH = datetime.fromtimestamp(0, pytz.timezone('UTC')) job = Job(lease_expires=UNIX_EPOCH) self.assertEqual( preexisting + [(job.id,)], list(Store.of(job).execute(Job.ready_jobs)))
def request_action(self, action, data): owner = data['owner'] if data['use_ppa'] == CREATE_NEW: ppa_name = data.get('ppa_name', None) ppa = owner.createPPA(ppa_name) else: ppa = data['daily_build_archive'] try: source_package_recipe = self.error_handler( getUtility(ISourcePackageRecipeSource).new, self.user, owner, data['name'], data['recipe_text'], data['description'], data['distroseries'], ppa, data['build_daily']) Store.of(source_package_recipe).flush() except ErrorHandled: return self.next_url = canonical_url(source_package_recipe)
def landing_candidates(self): """See `IGitRef`.""" return Store.of(self).find( BranchMergeProposal, BranchMergeProposal.target_git_repository == self.repository, BranchMergeProposal.target_git_path == self.path, Not(BranchMergeProposal.queue_status.is_in( BRANCH_MERGE_PROPOSAL_FINAL_STATES)))
def dependent_landings(self): """See `IGitRef`.""" return Store.of(self).find( BranchMergeProposal, BranchMergeProposal.prerequisite_git_repository == self.repository, BranchMergeProposal.prerequisite_git_path == self.path, Not(BranchMergeProposal.queue_status.is_in( BRANCH_MERGE_PROPOSAL_FINAL_STATES)))