class Email(SQLObject): address = StringCol(length=255) student = ForeignKey('Student')
class DIPerson(InheritableSQLObject): firstName = StringCol(length=100) lastName = StringCol(alternateID=True, length=100) manager = ForeignKey("DIManager", default=None)
class PhotoAlbum(SQLObject): color = StringCol(default='red') person = ForeignKey('PersonWithAlbum', dbName='test_person_id')
class DistroSeriesLanguage(SQLBase, RosettaStats): """See `IDistroSeriesLanguage`. A SQLObject based implementation of IDistroSeriesLanguage. """ implements(IDistroSeriesLanguage) _table = 'DistroSeriesLanguage' distroseries = ForeignKey(foreignKey='DistroSeries', dbName='distroseries', notNull=False, default=None) language = ForeignKey(foreignKey='Language', dbName='language', notNull=True) currentcount = IntCol(notNull=True, default=0) updatescount = IntCol(notNull=True, default=0) rosettacount = IntCol(notNull=True, default=0) unreviewed_count = IntCol(notNull=True, default=0) contributorcount = IntCol(notNull=True, default=0) dateupdated = UtcDateTimeCol(dbName='dateupdated', default=DEFAULT) @property def title(self): return '%s translations of %s %s' % ( self.language.englishname, self.distroseries.distribution.displayname, self.distroseries.displayname) @property def pofiles(self): return POFile.select(''' POFile.language = %s AND POFile.potemplate = POTemplate.id AND POTemplate.distroseries = %s AND POTemplate.iscurrent = TRUE ''' % sqlvalues(self.language.id, self.distroseries.id), clauseTables=['POTemplate'], prejoins=["potemplate.sourcepackagename"], orderBy=['-POTemplate.priority', 'POFile.id']) def getPOFilesFor(self, potemplates): """See `IDistroSeriesLanguage`.""" return get_pofiles_for(potemplates, self.language) @property def translators(self): return Translator.select(''' Translator.translationgroup = TranslationGroup.id AND Distribution.translationgroup = TranslationGroup.id AND Distribution.id = %s Translator.language = %s ''' % sqlvalues(self.distroseries.distribution.id, self.language.id), orderBy=['id'], clauseTables=['TranslationGroup', 'Distribution',], distinct=True) @property def contributor_count(self): return self.contributorcount def messageCount(self): return self.distroseries.messagecount def currentCount(self, language=None): return self.currentcount def updatesCount(self, language=None): return self.updatescount def rosettaCount(self, language=None): return self.rosettacount def unreviewedCount(self): """See `IRosettaStats`.""" return self.unreviewed_count def updateStatistics(self, ztm=None): current = 0 updates = 0 rosetta = 0 unreviewed = 0 for pofile in self.pofiles: current += pofile.currentCount() updates += pofile.updatesCount() rosetta += pofile.rosettaCount() unreviewed += pofile.unreviewedCount() self.currentcount = current self.updatescount = updates self.rosettacount = rosetta self.unreviewed_count = unreviewed contributors = self.distroseries.getPOFileContributorsByLanguage( self.language) self.contributorcount = contributors.count() self.dateupdated = UTC_NOW ztm.commit()
class Specification(SQLBase, BugLinkTargetMixin, InformationTypeMixin): """See ISpecification.""" _defaultOrder = ['-priority', 'definition_status', 'name', 'id'] # db field names name = StringCol(unique=True, notNull=True) title = StringCol(notNull=True) summary = StringCol(notNull=True) definition_status = EnumCol( schema=SpecificationDefinitionStatus, notNull=True, default=SpecificationDefinitionStatus.NEW) priority = EnumCol(schema=SpecificationPriority, notNull=True, default=SpecificationPriority.UNDEFINED) _assignee = ForeignKey(dbName='assignee', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) _drafter = ForeignKey(dbName='drafter', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) _approver = ForeignKey(dbName='approver', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=DEFAULT) product = ForeignKey(dbName='product', foreignKey='Product', notNull=False, default=None) productseries = ForeignKey(dbName='productseries', foreignKey='ProductSeries', notNull=False, default=None) distribution = ForeignKey(dbName='distribution', foreignKey='Distribution', notNull=False, default=None) distroseries = ForeignKey(dbName='distroseries', foreignKey='DistroSeries', notNull=False, default=None) goalstatus = EnumCol(schema=SpecificationGoalStatus, notNull=True, default=SpecificationGoalStatus.PROPOSED) goal_proposer = ForeignKey(dbName='goal_proposer', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_goal_proposed = UtcDateTimeCol(notNull=False, default=None) goal_decider = ForeignKey(dbName='goal_decider', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_goal_decided = UtcDateTimeCol(notNull=False, default=None) milestone = ForeignKey(dbName='milestone', foreignKey='Milestone', notNull=False, default=None) specurl = StringCol(notNull=False, default=None) whiteboard = StringCol(notNull=False, default=None) direction_approved = BoolCol(notNull=True, default=False) man_days = IntCol(notNull=False, default=None) implementation_status = EnumCol( schema=SpecificationImplementationStatus, notNull=True, default=SpecificationImplementationStatus.UNKNOWN) superseded_by = ForeignKey(dbName='superseded_by', foreignKey='Specification', notNull=False, default=None) completer = ForeignKey(dbName='completer', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_completed = UtcDateTimeCol(notNull=False, default=None) starter = ForeignKey(dbName='starter', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_started = UtcDateTimeCol(notNull=False, default=None) # useful joins _subscriptions = SQLMultipleJoin('SpecificationSubscription', joinColumn='specification', orderBy='id') subscribers = SQLRelatedJoin('Person', joinColumn='specification', otherColumn='person', intermediateTable='SpecificationSubscription', orderBy=['display_name', 'name']) sprint_links = SQLMultipleJoin('SprintSpecification', orderBy='id', joinColumn='specification') sprints = SQLRelatedJoin('Sprint', orderBy='name', joinColumn='specification', otherColumn='sprint', intermediateTable='SprintSpecification') spec_dependency_links = SQLMultipleJoin('SpecificationDependency', joinColumn='specification', orderBy='id') dependencies = SQLRelatedJoin('Specification', joinColumn='specification', otherColumn='dependency', orderBy='title', intermediateTable='SpecificationDependency') information_type = EnumCol( enum=InformationType, notNull=True, default=InformationType.PUBLIC) @cachedproperty def linked_branches(self): return list(Store.of(self).find( SpecificationBranch, SpecificationBranch.specificationID == self.id).order_by( SpecificationBranch.id)) def _fetch_children_or_parents(self, join_cond, cond, user): from lp.blueprints.model.specificationsearch import ( get_specification_privacy_filter) return list(Store.of(self).using( Specification, Join(SpecificationDependency, join_cond == self.id)).find( Specification, cond == Specification.id, *get_specification_privacy_filter(user) ).order_by(Specification.title)) def getDependencies(self, user=None): return self._fetch_children_or_parents( SpecificationDependency.specificationID, SpecificationDependency.dependencyID, user) def getBlockedSpecs(self, user=None): return self._fetch_children_or_parents( SpecificationDependency.dependencyID, SpecificationDependency.specificationID, user) def set_assignee(self, person): self.subscribeIfAccessGrantNeeded(person) self._assignee = person def get_assignee(self): return self._assignee assignee = property(get_assignee, set_assignee) def set_drafter(self, person): self.subscribeIfAccessGrantNeeded(person) self._drafter = person def get_drafter(self): return self._drafter drafter = property(get_drafter, set_drafter) def set_approver(self, person): self.subscribeIfAccessGrantNeeded(person) self._approver = person def get_approver(self): return self._approver approver = property(get_approver, set_approver) def subscribeIfAccessGrantNeeded(self, person): """Subscribe person if this specification is not public and if the person does not already have grants to access the specification. """ if person is None or self.userCanView(person): return current_user = getUtility(ILaunchBag).user self.subscribe(person, subscribed_by=current_user) @cachedproperty def subscriptions(self): """Sort the subscriptions""" from lp.registry.model.person import person_sort_key return sorted( self._subscriptions, key=lambda sub: person_sort_key(sub.person)) @property def workitems_text(self): """See ISpecification.""" workitems_lines = [] def get_header_text(milestone): if milestone is None: return "Work items:" else: return "Work items for %s:" % milestone.name if len(self.work_items) == 0: return '' milestone = self.work_items[0].milestone # Start by appending a header for the milestone of the first work # item. After this we're going to write a new header whenever we see a # work item with a different milestone. workitems_lines.append(get_header_text(milestone)) for work_item in self.work_items: if work_item.milestone != milestone: workitems_lines.append("") milestone = work_item.milestone workitems_lines.append(get_header_text(milestone)) assignee = work_item.assignee if assignee is not None: assignee_part = "[%s] " % assignee.name else: assignee_part = "" # work_items are ordered by sequence workitems_lines.append( "%s%s: %s" % ( assignee_part, work_item.title, work_item.status.name)) return "\n".join(workitems_lines) @property def target(self): """See ISpecification.""" if self.product: return self.product return self.distribution def newWorkItem(self, title, sequence, status=SpecificationWorkItemStatus.TODO, assignee=None, milestone=None): """See ISpecification.""" if milestone is not None: assert milestone.target == self.target, ( "%s does not belong to this spec's target (%s)" % (milestone.displayname, self.target.name)) return SpecificationWorkItem( title=title, status=status, specification=self, assignee=assignee, milestone=milestone, sequence=sequence) @cachedproperty def work_items(self): """See ISpecification.""" return list(self._work_items) @property def _work_items(self): return Store.of(self).find( SpecificationWorkItem, specification=self, deleted=False).order_by("sequence") def setWorkItems(self, new_work_items): field = ISpecification['workitems_text'].bind(self) self.updateWorkItems(field.parseAndValidate(new_work_items)) def _deleteWorkItemsNotMatching(self, titles): """Delete all work items whose title does not match the given ones. Also set the sequence of those deleted work items to -1. """ title_counts = self._list_to_dict_of_frequency(titles) for work_item in self._work_items: if (work_item.title not in title_counts or title_counts[work_item.title] == 0): work_item.deleted = True elif title_counts[work_item.title] > 0: title_counts[work_item.title] -= 1 def _list_to_dict_of_frequency(self, list): dictionary = {} for item in list: if not item in dictionary: dictionary[item] = 1 else: dictionary[item] += 1 return dictionary def updateWorkItems(self, new_work_items): """See ISpecification.""" # First mark work items with titles that are no longer present as # deleted. self._deleteWorkItemsNotMatching( [wi['title'] for wi in new_work_items]) work_items = self._work_items # At this point the list of new_work_items is necessarily the same # size (or longer) than the list of existing ones, so we can just # iterate over it updating the existing items and creating any new # ones. to_insert = [] existing_titles = [wi.title for wi in work_items] existing_title_count = self._list_to_dict_of_frequency(existing_titles) for i, new_wi in enumerate(new_work_items): if (new_wi['title'] not in existing_titles or existing_title_count[new_wi['title']] == 0): to_insert.append((i, new_wi)) else: existing_title_count[new_wi['title']] -= 1 # Get an existing work item with the same title and update # it to match what we have now. existing_wi_index = existing_titles.index(new_wi['title']) existing_wi = work_items[existing_wi_index] # Mark a work item as dirty - don't use it again this update. existing_titles[existing_wi_index] = None # Update the sequence to match its current position on the # list entered by the user. existing_wi.sequence = i existing_wi.status = new_wi['status'] existing_wi.assignee = new_wi['assignee'] milestone = new_wi['milestone'] if milestone is not None: assert milestone.target == self.target, ( "%s does not belong to this spec's target (%s)" % (milestone.displayname, self.target.name)) existing_wi.milestone = milestone for sequence, item in to_insert: self.newWorkItem(item['title'], sequence, item['status'], item['assignee'], item['milestone']) Store.of(self).flush() del get_property_cache(self).work_items def setTarget(self, target): """See ISpecification.""" if IProduct.providedBy(target): self.product = target self.distribution = None elif IDistribution.providedBy(target): self.product = None self.distribution = target else: raise AssertionError("Unknown target: %s" % target) def retarget(self, target): """See ISpecification.""" if self.target == target: return self.validateMove(target) # We must lose any goal we have set and approved/declined because we # are moving to a different target that will have different # policies and drivers. self.productseries = None self.distroseries = None self.goalstatus = SpecificationGoalStatus.PROPOSED self.goal_proposer = None self.date_goal_proposed = None self.milestone = None self.setTarget(target) self.priority = SpecificationPriority.UNDEFINED self.direction_approved = False def validateMove(self, target): """See ISpecification.""" if target.getSpecification(self.name) is not None: raise TargetAlreadyHasSpecification(target, self.name) @property def goal(self): """See ISpecification.""" if self.productseries: return self.productseries return self.distroseries def proposeGoal(self, goal, proposer): """See ISpecification.""" if goal is None: # we are clearing goals self.productseries = None self.distroseries = None elif (IProductSeries.providedBy(goal) and goal.product == self.target): # set the product series as a goal self.productseries = goal self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and make sure there is no leftover distroseries goal self.distroseries = None elif (IDistroSeries.providedBy(goal) and goal.distribution == self.target): # set the distroseries goal self.distroseries = goal self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and make sure there is no leftover distroseries goal self.productseries = None else: raise GoalProposeError('Inappropriate goal.') # record who made the proposal, and when self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and of course set the goal status to PROPOSED self.goalstatus = SpecificationGoalStatus.PROPOSED # the goal should now also not have a decider self.goal_decider = None self.date_goal_decided = None if goal is not None and goal.personHasDriverRights(proposer): self.acceptBy(proposer) def acceptBy(self, decider): """See ISpecification.""" self.goalstatus = SpecificationGoalStatus.ACCEPTED self.goal_decider = decider self.date_goal_decided = UTC_NOW def declineBy(self, decider): """See ISpecification.""" self.goalstatus = SpecificationGoalStatus.DECLINED self.goal_decider = decider self.date_goal_decided = UTC_NOW def getSprintSpecification(self, sprintname): """See ISpecification.""" for sprintspecification in self.sprint_links: if sprintspecification.sprint.name == sprintname: return sprintspecification return None def notificationRecipientAddresses(self): """See ISpecification.""" related_people = [ self.owner, self.assignee, self.approver, self.drafter] related_people = [ person for person in related_people if person is not None] subscribers = [ subscription.person for subscription in self.subscriptions] notify_people = set(related_people + subscribers) without_access = set( getUtility(IService, 'sharing').getPeopleWithoutAccess( self, notify_people)) notify_people -= without_access addresses = set() for person in notify_people: addresses.update(get_contact_email_addresses(person)) return sorted(addresses) # emergent properties @property def is_incomplete(self): """See ISpecification.""" return not self.is_complete @property def is_complete(self): """See `ISpecification`.""" # Implemented blueprints are by definition complete. if (self.implementation_status == SpecificationImplementationStatus.IMPLEMENTED): return True # Obsolete and superseded blueprints are considered complete. if self.definition_status in ( SpecificationDefinitionStatus.OBSOLETE, SpecificationDefinitionStatus.SUPERSEDED): return True # Approved information blueprints are also considered complete. if ((self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) and (self.definition_status == SpecificationDefinitionStatus.APPROVED)): return True else: return False @property def is_started(self): """See ISpecification. This is a code implementation of the SQL in spec_started_clause """ return (self.implementation_status not in [ SpecificationImplementationStatus.UNKNOWN, SpecificationImplementationStatus.NOTSTARTED, SpecificationImplementationStatus.DEFERRED, SpecificationImplementationStatus.INFORMATIONAL, ] or ((self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) and (self.definition_status == SpecificationDefinitionStatus.APPROVED))) @property def lifecycle_status(self): """Combine the is_complete and is_started emergent properties.""" if self.is_complete: return SpecificationLifecycleStatus.COMPLETE elif self.is_started: return SpecificationLifecycleStatus.STARTED else: return SpecificationLifecycleStatus.NOTSTARTED def setDefinitionStatus(self, definition_status, user): self.definition_status = definition_status self.updateLifecycleStatus(user) def setImplementationStatus(self, implementation_status, user): self.implementation_status = implementation_status self.updateLifecycleStatus(user) def updateLifecycleStatus(self, user): """See ISpecification.""" newstatus = None if self.is_started: if self.starterID is None: newstatus = SpecificationLifecycleStatus.STARTED self.date_started = UTC_NOW self.starter = user else: if self.starterID is not None: newstatus = SpecificationLifecycleStatus.NOTSTARTED self.date_started = None self.starter = None if self.is_complete: if self.completerID is None: newstatus = SpecificationLifecycleStatus.COMPLETE self.date_completed = UTC_NOW self.completer = user else: if self.completerID is not None: self.date_completed = None self.completer = None if self.is_started: newstatus = SpecificationLifecycleStatus.STARTED else: newstatus = SpecificationLifecycleStatus.NOTSTARTED return newstatus @property def is_blocked(self): """See ISpecification.""" for spec in self.dependencies: if spec.is_incomplete: return True return False @property def has_accepted_goal(self): """See ISpecification.""" if (self.goal is not None and self.goalstatus == SpecificationGoalStatus.ACCEPTED): return True return False def getDelta(self, old_spec, user): """See ISpecification.""" delta = ObjectDelta(old_spec, self) delta.recordNewValues(("title", "summary", "specurl", "productseries", "distroseries", "milestone")) delta.recordNewAndOld(("name", "priority", "definition_status", "target", "approver", "assignee", "drafter", "whiteboard", "workitems_text")) delta.recordListAddedAndRemoved("bugs", "bugs_linked", "bugs_unlinked") if delta.changes: changes = delta.changes changes["specification"] = self changes["user"] = user return SpecificationDelta(**changes) else: return None @property def informational(self): """For backwards compatibility: implemented as a value in implementation_status. """ return (self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) # subscriptions def subscription(self, person): """See ISpecification.""" return SpecificationSubscription.selectOneBy( specification=self, person=person) def getSubscriptionByName(self, name): """See ISpecification.""" for sub in self.subscriptions: if sub.person.name == name: return sub return None def subscribe(self, person, subscribed_by=None, essential=False): """See ISpecification.""" if subscribed_by is None: subscribed_by = person # Create or modify a user's subscription to this blueprint. # First see if a relevant subscription exists, and if so, return it sub = self.subscription(person) if sub is not None: if sub.essential != essential: # If a subscription already exists, but the value for # 'essential' changes, there's no need to create a new # subscription, but we modify the existing subscription # and notify the user about the change. with notify_modified(sub, ['essential'], user=subscribed_by): sub.essential = essential return sub # since no previous subscription existed, create and return a new one sub = SpecificationSubscription(specification=self, person=person, essential=essential) property_cache = get_property_cache(self) if 'subscription' in property_cache: from lp.registry.model.person import person_sort_key property_cache.subscriptions.append(sub) property_cache.subscriptions.sort( key=lambda sub: person_sort_key(sub.person)) if self.information_type in PRIVATE_INFORMATION_TYPES: # Grant the subscriber access if they can't see the # specification. service = getUtility(IService, 'sharing') _, _, _, shared_specs = service.getVisibleArtifacts( person, specifications=[self], ignore_permissions=True) if not shared_specs: service.ensureAccessGrants( [person], subscribed_by, specifications=[self]) notify(ObjectCreatedEvent(sub, user=subscribed_by)) return sub def unsubscribe(self, person, unsubscribed_by, ignore_permissions=False): """See ISpecification.""" # see if a relevant subscription exists, and if so, delete it if person is None: person = unsubscribed_by for sub in self.subscriptions: if sub.person.id == person.id: if (not sub.canBeUnsubscribedByUser(unsubscribed_by) and not ignore_permissions): raise UserCannotUnsubscribePerson( '%s does not have permission to unsubscribe %s.' % ( unsubscribed_by.displayname, person.displayname)) get_property_cache(self).subscriptions.remove(sub) SpecificationSubscription.delete(sub.id) artifacts_to_delete = getUtility( IAccessArtifactSource).find([self]) getUtility(IAccessArtifactGrantSource).revokeByArtifact( artifacts_to_delete, [person]) return def isSubscribed(self, person): """See lp.blueprints.interfaces.specification.ISpecification.""" if person is None: return False return bool(self.subscription(person)) @property def bugs(self): from lp.bugs.model.bug import Bug bug_ids = [ int(id) for _, id in getUtility(IXRefSet).findFrom( (u'specification', unicode(self.id)), types=[u'bug'])] return list(sorted( bulk.load(Bug, bug_ids), key=operator.attrgetter('id'))) def createBugLink(self, bug, props=None): """See BugLinkTargetMixin.""" if props is None: props = {} # XXX: Should set creator. getUtility(IXRefSet).create( {(u'specification', unicode(self.id)): {(u'bug', unicode(bug.id)): props}}) def deleteBugLink(self, bug): """See BugLinkTargetMixin.""" getUtility(IXRefSet).delete( {(u'specification', unicode(self.id)): [(u'bug', unicode(bug.id))]}) # sprint linking def linkSprint(self, sprint, user): """See ISpecification.""" from lp.blueprints.model.sprintspecification import ( SprintSpecification) for sprint_link in self.sprint_links: # sprints have unique names if sprint_link.sprint.name == sprint.name: return sprint_link sprint_link = SprintSpecification(specification=self, sprint=sprint, registrant=user) if sprint.isDriver(user): sprint_link.acceptBy(user) return sprint_link def unlinkSprint(self, sprint): """See ISpecification.""" from lp.blueprints.model.sprintspecification import ( SprintSpecification) for sprint_link in self.sprint_links: # sprints have unique names if sprint_link.sprint.name == sprint.name: SprintSpecification.delete(sprint_link.id) return sprint_link # dependencies def createDependency(self, specification): """See ISpecification.""" for deplink in self.spec_dependency_links: if deplink.dependency.id == specification.id: return deplink return SpecificationDependency(specification=self, dependency=specification) def removeDependency(self, specification): """See ISpecification.""" # see if a relevant dependency link exists, and if so, delete it for deplink in self.spec_dependency_links: if deplink.dependency.id == specification.id: SpecificationDependency.delete(deplink.id) return deplink def all_deps(self, user=None): return list(Store.of(self).with_( SQL(recursive_dependent_query(user), params=(self.id,))).find( Specification, Specification.id != self.id, Specification.id.is_in(SQL('select id from dependencies')), ).order_by(Specification.name, Specification.id)) def all_blocked(self, user=None): """See `ISpecification`.""" return list(Store.of(self).with_( SQL(recursive_blocked_query(user), params=(self.id,))).find( Specification, Specification.id != self.id, Specification.id.is_in(SQL('select id from blocked')), ).order_by(Specification.name, Specification.id)) # branches def getBranchLink(self, branch): return SpecificationBranch.selectOneBy( specificationID=self.id, branchID=branch.id) def linkBranch(self, branch, registrant): branch_link = self.getBranchLink(branch) if branch_link is not None: return branch_link branch_link = SpecificationBranch( specification=self, branch=branch, registrant=registrant) Store.of(self).flush() del get_property_cache(self).linked_branches notify(ObjectCreatedEvent(branch_link)) return branch_link def unlinkBranch(self, branch, user): spec_branch = self.getBranchLink(branch) spec_branch.destroySelf() Store.of(self).flush() del get_property_cache(self).linked_branches def getLinkedBugTasks(self, user): """See `ISpecification`.""" params = BugTaskSearchParams(user=user, linked_blueprints=self.id) tasks = getUtility(IBugTaskSet).search(params) if self.distroseries is not None: context = self.distroseries elif self.distribution is not None: context = self.distribution elif self.productseries is not None: context = self.productseries else: context = self.product return filter_bugtasks_by_context(context, tasks) def __repr__(self): return '<Specification %s %r for %r>' % ( self.id, self.name, self.target.name) def getAllowedInformationTypes(self, who): """See `ISpecification`.""" return self.target.getAllowedSpecificationInformationTypes() def transitionToInformationType(self, information_type, who): """See ISpecification.""" # avoid circular imports. from lp.registry.model.accesspolicy import ( reconcile_access_for_artifact, ) if self.information_type == information_type: return False if information_type not in self.getAllowedInformationTypes(who): raise CannotChangeInformationType("Forbidden by project policy.") self.information_type = information_type reconcile_access_for_artifact(self, information_type, [self.target]) if information_type in PRIVATE_INFORMATION_TYPES and self.subscribers: # Grant the subscribers access if they do not have a # policy grant. service = getUtility(IService, 'sharing') blind_subscribers = service.getPeopleWithoutAccess( self, self.subscribers) if len(blind_subscribers): service.ensureAccessGrants( blind_subscribers, who, specifications=[self], ignore_permissions=True) return True @cachedproperty def _known_viewers(self): """A set of known persons able to view the specifcation.""" return set() def userCanView(self, user): """See `ISpecification`.""" # Avoid circular imports. from lp.blueprints.model.specificationsearch import ( get_specification_privacy_filter) if self.information_type in PUBLIC_INFORMATION_TYPES: return True if user is None: return False if user.id in self._known_viewers: return True if not Store.of(self).find( Specification, Specification.id == self.id, *get_specification_privacy_filter(user)).is_empty(): self._known_viewers.add(user.id) return True return False
class Address(SQLObject): address = StringCol() person = ForeignKey('Person')
class BugTrackerAlias(SQLBase): """See `IBugTrackerAlias`.""" bugtracker = ForeignKey( foreignKey="BugTracker", dbName="bugtracker", notNull=True) base_url = StringCol(notNull=True)
class Address(SQLObject): street = StringCol() city = StringCol() state = StringCol(length=2) zip = StringCol(length=9) person = ForeignKey('Person')
class BranchMergeProposal(SQLBase): """A relationship between a person and a branch.""" implements(IBranchMergeProposal, IBranchNavigationMenu, IHasBranchTarget) _table = 'BranchMergeProposal' _defaultOrder = ['-date_created', 'id'] registrant = ForeignKey(dbName='registrant', foreignKey='Person', storm_validator=validate_public_person, notNull=True) source_branch = ForeignKey(dbName='source_branch', foreignKey='Branch', notNull=True) target_branch = ForeignKey(dbName='target_branch', foreignKey='Branch', notNull=True) prerequisite_branch = ForeignKey(dbName='dependent_branch', foreignKey='Branch', notNull=False) description = StringCol(default=None) whiteboard = StringCol(default=None) queue_status = EnumCol(enum=BranchMergeProposalStatus, notNull=True, default=BranchMergeProposalStatus.WORK_IN_PROGRESS) @property def private(self): return ( (self.source_branch.information_type in PRIVATE_INFORMATION_TYPES) or (self.target_branch.information_type in PRIVATE_INFORMATION_TYPES) or (self.prerequisite_branch is not None and (self.prerequisite_branch.information_type in PRIVATE_INFORMATION_TYPES))) reviewer = ForeignKey(dbName='reviewer', foreignKey='Person', storm_validator=validate_person, notNull=False, default=None) @property def next_preview_diff_job(self): # circular dependencies from lp.code.model.branchmergeproposaljob import ( BranchMergeProposalJob, BranchMergeProposalJobType, ) jobs = Store.of(self).find( BranchMergeProposalJob, BranchMergeProposalJob.branch_merge_proposal == self, BranchMergeProposalJob.job_type == BranchMergeProposalJobType.UPDATE_PREVIEW_DIFF, BranchMergeProposalJob.job == Job.id, Job._status.is_in([JobStatus.WAITING, JobStatus.RUNNING])) job = jobs.order_by(Job.scheduled_start, Job.date_created).first() if job is not None: return job.makeDerived() else: return None reviewed_revision_id = StringCol(default=None) commit_message = StringCol(default=None) queue_position = IntCol(default=None) queuer = ForeignKey(dbName='queuer', foreignKey='Person', notNull=False, default=None) queued_revision_id = StringCol(default=None) date_merged = UtcDateTimeCol(default=None) merged_revno = IntCol(default=None) merge_reporter = ForeignKey(dbName='merge_reporter', foreignKey='Person', storm_validator=validate_public_person, notNull=False, default=None) def getRelatedBugTasks(self, user): """Bug tasks which are linked to the source but not the target. Implies that these would be fixed, in the target, by the merge. """ source_tasks = self.source_branch.getLinkedBugTasks(user) target_tasks = self.target_branch.getLinkedBugTasks(user) return [ bugtask for bugtask in source_tasks if bugtask not in target_tasks ] @property def address(self): return 'mp+%d@%s' % (self.id, config.launchpad.code_domain) superseded_by = ForeignKey(dbName='superseded_by', foreignKey='BranchMergeProposal', notNull=False, default=None) supersedes = Reference("<primary key>", "superseded_by", on_remote=True) date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) date_review_requested = UtcDateTimeCol(notNull=False, default=None) date_reviewed = UtcDateTimeCol(notNull=False, default=None) @property def target(self): """See `IHasBranchTarget`.""" return self.source_branch.target root_message_id = StringCol(default=None) @property def title(self): """See `IBranchMergeProposal`.""" return "[Merge] %(source)s into %(target)s" % { 'source': self.source_branch.bzr_identity, 'target': self.target_branch.bzr_identity } @property def all_comments(self): """See `IBranchMergeProposal`.""" return CodeReviewComment.selectBy(branch_merge_proposal=self.id) def getComment(self, id): """See `IBranchMergeProposal`. This function can raise WrongBranchMergeProposal.""" comment = CodeReviewComment.get(id) if comment.branch_merge_proposal != self: raise WrongBranchMergeProposal return comment def getVoteReference(self, id): """See `IBranchMergeProposal`. This function can raise WrongBranchMergeProposal.""" vote = CodeReviewVoteReference.get(id) if vote.branch_merge_proposal != self: raise WrongBranchMergeProposal return vote @property def _preview_diffs(self): return Store.of(self).find( PreviewDiff, PreviewDiff.branch_merge_proposal_id == self.id).order_by( PreviewDiff.date_created) @cachedproperty def preview_diffs(self): return list(self._preview_diffs) @cachedproperty def preview_diff(self): return self._preview_diffs.last() date_queued = UtcDateTimeCol(notNull=False, default=None) votes = SQLMultipleJoin('CodeReviewVoteReference', joinColumn='branch_merge_proposal') def getNotificationRecipients(self, min_level): """See IBranchMergeProposal.getNotificationRecipients""" recipients = {} branch_identity_cache = { self.source_branch: self.source_branch.bzr_identity, self.target_branch: self.target_branch.bzr_identity, } branches = [self.source_branch, self.target_branch] if self.prerequisite_branch is not None: branches.append(self.prerequisite_branch) for branch in branches: branch_recipients = branch.getNotificationRecipients() for recipient in branch_recipients: # If the recipient cannot see either of the branches, skip # them. if (not self.source_branch.visibleByUser(recipient) or not self.target_branch.visibleByUser(recipient)): continue subscription, rationale = branch_recipients.getReason( recipient) if (subscription.review_level < min_level): continue recipients[recipient] = RecipientReason.forBranchSubscriber( subscription, recipient, rationale, self, branch_identity_cache=branch_identity_cache) # Add in all the individuals that have been asked for a review, # or who have reviewed. These people get added to the recipients # with the rationale of "Reviewer". # Don't add a team reviewer to the recipients as they are only going # to get emails normally if they are subscribed to one of the # branches, and if they are subscribed, they'll be getting this email # aleady. for review in self.votes: reviewer = review.reviewer pending = review.comment is None recipients[reviewer] = RecipientReason.forReviewer( self, pending, reviewer, branch_identity_cache=branch_identity_cache) # If the registrant of the proposal is getting emails, update the # rationale to say that they registered it. Don't however send them # emails if they aren't asking for any. if self.registrant in recipients: recipients[self.registrant] = RecipientReason.forRegistrant( self, branch_identity_cache=branch_identity_cache) # If the owner of the source branch is getting emails, override the # rationale to say they are the owner of the souce branch. source_owner = self.source_branch.owner if source_owner in recipients: reason = RecipientReason.forSourceOwner( self, branch_identity_cache=branch_identity_cache) if reason is not None: recipients[source_owner] = reason return recipients def isValidTransition(self, next_state, user=None): """See `IBranchMergeProposal`.""" return is_valid_transition(self, self.queue_status, next_state, user) def _transitionToState(self, next_state, user=None): """Update the queue_status of the proposal. Raise an error if the proposal is in a final state. """ if not self.isValidTransition(next_state, user): raise BadStateTransition( 'Invalid state transition for merge proposal: %s -> %s' % (self.queue_status.title, next_state.title)) # Transition to the same state occur in two particular # situations: # * stale posts # * approving a later revision # In both these cases, there is no real reason to disallow # transitioning to the same state. self.queue_status = next_state def setStatus(self, status, user=None, revision_id=None): """See `IBranchMergeProposal`.""" # XXX - rockstar - 9 Oct 2008 - jml suggested in a review that this # would be better as a dict mapping. # See bug #281060. if (self.queue_status == BranchMergeProposalStatus.QUEUED and status != BranchMergeProposalStatus.QUEUED): self.dequeue() if status == BranchMergeProposalStatus.WORK_IN_PROGRESS: self.setAsWorkInProgress() elif status == BranchMergeProposalStatus.NEEDS_REVIEW: self.requestReview() elif status == BranchMergeProposalStatus.CODE_APPROVED: self.approveBranch(user, revision_id) elif status == BranchMergeProposalStatus.REJECTED: self.rejectBranch(user, revision_id) elif status == BranchMergeProposalStatus.QUEUED: self.enqueue(user, revision_id) elif status == BranchMergeProposalStatus.MERGED: self.markAsMerged(merge_reporter=user) elif status == BranchMergeProposalStatus.MERGE_FAILED: self._transitionToState(status, user=user) else: raise AssertionError('Unexpected queue status: %s' % status) def setAsWorkInProgress(self): """See `IBranchMergeProposal`.""" self._transitionToState(BranchMergeProposalStatus.WORK_IN_PROGRESS) self._mark_unreviewed() def _mark_unreviewed(self): """Clear metadata about a previous review.""" self.reviewer = None self.date_reviewed = None self.reviewed_revision_id = None def requestReview(self, _date_requested=None): """See `IBranchMergeProposal`. :param _date_requested: used only for testing purposes to override the normal UTC_NOW for when the review was requested. """ # Don't reset the date_review_requested if we are already in the # review state. if _date_requested is None: _date_requested = UTC_NOW # If we are going from work in progress to needs review, then reset # the root message id and trigger a job to send out the email. if self.queue_status == BranchMergeProposalStatus.WORK_IN_PROGRESS: self.root_message_id = None notify(BranchMergeProposalNeedsReviewEvent(self)) if self.queue_status != BranchMergeProposalStatus.NEEDS_REVIEW: self._transitionToState(BranchMergeProposalStatus.NEEDS_REVIEW) self.date_review_requested = _date_requested # Clear out any reviewed or queued values. self._mark_unreviewed() self.queuer = None self.queued_revision_id = None def isMergable(self): """See `IBranchMergeProposal`.""" # As long as the source branch has not been merged, rejected # or superseded, then it is valid to be merged. return (self.queue_status not in FINAL_STATES) def _reviewProposal(self, reviewer, next_state, revision_id, _date_reviewed=None): """Set the proposal to next_state.""" # Check the reviewer can review the code for the target branch. old_state = self.queue_status if not self.target_branch.isPersonTrustedReviewer(reviewer): raise UserNotBranchReviewer # Check the current state of the proposal. self._transitionToState(next_state, reviewer) # Record the reviewer self.reviewer = reviewer if _date_reviewed is None: _date_reviewed = UTC_NOW self.date_reviewed = _date_reviewed # Record the reviewed revision id self.reviewed_revision_id = revision_id notify( BranchMergeProposalStatusChangeEvent(self, reviewer, old_state, next_state)) def approveBranch(self, reviewer, revision_id, _date_reviewed=None): """See `IBranchMergeProposal`.""" self._reviewProposal(reviewer, BranchMergeProposalStatus.CODE_APPROVED, revision_id, _date_reviewed) def rejectBranch(self, reviewer, revision_id, _date_reviewed=None): """See `IBranchMergeProposal`.""" self._reviewProposal(reviewer, BranchMergeProposalStatus.REJECTED, revision_id, _date_reviewed) def enqueue(self, queuer, revision_id): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.CODE_APPROVED: self.approveBranch(queuer, revision_id) last_entry = BranchMergeProposal.selectOne(""" BranchMergeProposal.queue_position = ( SELECT coalesce(MAX(queue_position), 0) FROM BranchMergeProposal) """) # The queue_position will wrap if we ever get to # two billion queue entries where the queue has # never become empty. Perhaps sometime in the future # we may want to (maybe) consider keeping track of # the maximum value here. I doubt that it'll ever be # a problem -- thumper. if last_entry is None: position = 1 else: position = last_entry.queue_position + 1 self.queue_status = BranchMergeProposalStatus.QUEUED self.queue_position = position self.queuer = queuer self.queued_revision_id = revision_id or self.reviewed_revision_id self.date_queued = UTC_NOW self.syncUpdate() def dequeue(self): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.QUEUED: raise BadStateTransition( 'Invalid state transition for merge proposal: %s -> %s' % (self.queue_state.title, BranchMergeProposalStatus.QUEUED.title)) self.queue_status = BranchMergeProposalStatus.CODE_APPROVED # Clear out the queued values. self.queuer = None self.queued_revision_id = None self.date_queued = None # Remove from the queue. self.queue_position = None def moveToFrontOfQueue(self): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.QUEUED: return first_entry = BranchMergeProposal.selectOne(""" BranchMergeProposal.queue_position = ( SELECT MIN(queue_position) FROM BranchMergeProposal) """) self.queue_position = first_entry.queue_position - 1 self.syncUpdate() def markAsMerged(self, merged_revno=None, date_merged=None, merge_reporter=None): """See `IBranchMergeProposal`.""" old_state = self.queue_status self._transitionToState(BranchMergeProposalStatus.MERGED, merge_reporter) self.merged_revno = merged_revno self.merge_reporter = merge_reporter # Remove from the queue. self.queue_position = None # The reviewer of a merged proposal is assumed to have approved, if # they rejected it remove the review metadata to avoid confusion. if old_state == BranchMergeProposalStatus.REJECTED: self._mark_unreviewed() if merged_revno is not None: branch_revision = Store.of(self).find( BranchRevision, BranchRevision.branch == self.target_branch, BranchRevision.sequence == merged_revno).one() if branch_revision is not None: date_merged = branch_revision.revision.revision_date if date_merged is None: date_merged = UTC_NOW self.date_merged = date_merged def resubmit(self, registrant, source_branch=None, target_branch=None, prerequisite_branch=DEFAULT, description=None, break_link=False): """See `IBranchMergeProposal`.""" if source_branch is None: source_branch = self.source_branch if target_branch is None: target_branch = self.target_branch # DEFAULT instead of None, because None is a valid value. proposals = BranchMergeProposalGetter.activeProposalsForBranches( source_branch, target_branch) for proposal in proposals: if proposal is not self: raise BranchMergeProposalExists(proposal) if prerequisite_branch is DEFAULT: prerequisite_branch = self.prerequisite_branch if description is None: description = self.description # You can transition from REJECTED to SUPERSEDED, but # not from MERGED or SUPERSEDED. self._transitionToState(BranchMergeProposalStatus.SUPERSEDED, registrant) # This sync update is needed as the add landing target does # a database query to identify if there are any active proposals # with the same source and target branches. self.syncUpdate() review_requests = list( set((vote.reviewer, vote.review_type) for vote in self.votes)) proposal = source_branch.addLandingTarget( registrant=registrant, target_branch=target_branch, prerequisite_branch=prerequisite_branch, description=description, needs_review=True, review_requests=review_requests) if not break_link: self.superseded_by = proposal # This sync update is needed to ensure that the transitive # properties of supersedes and superseded_by are visible to # the old and the new proposal. self.syncUpdate() return proposal def _normalizeReviewType(self, review_type): """Normalse the review type. If review_type is None, it stays None. Otherwise the review_type is converted to lower case, and if the string is empty is gets changed to None. """ if review_type is not None: review_type = review_type.strip() if review_type == '': review_type = None else: review_type = review_type.lower() return review_type def _subscribeUserToStackedBranch(self, branch, user, checked_branches=None): """Subscribe the user to the branch and those it is stacked on.""" if checked_branches is None: checked_branches = [] branch.subscribe(user, BranchSubscriptionNotificationLevel.NOEMAIL, BranchSubscriptionDiffSize.NODIFF, CodeReviewNotificationLevel.FULL, user) if branch.stacked_on is not None: checked_branches.append(branch) if branch.stacked_on not in checked_branches: self._subscribeUserToStackedBranch(branch.stacked_on, user, checked_branches) def _acceptable_to_give_visibility(self, branch, reviewer): # If the branch is private, only exclusive teams can be subscribed to # prevent leaks. if (branch.information_type in PRIVATE_INFORMATION_TYPES and reviewer.is_team and reviewer.anyone_can_join()): return False return True def _ensureAssociatedBranchesVisibleToReviewer(self, reviewer): """ A reviewer must be able to see the source and target branches. Currently, we ensure the required visibility by subscribing the user to the branch and those on which it is stacked. We do not subscribe the reviewer if the branch is private and the reviewer is an open team. """ source = self.source_branch if (not source.visibleByUser(reviewer) and self._acceptable_to_give_visibility(source, reviewer)): self._subscribeUserToStackedBranch(source, reviewer) target = self.target_branch if (not target.visibleByUser(reviewer) and self._acceptable_to_give_visibility(source, reviewer)): self._subscribeUserToStackedBranch(target, reviewer) def nominateReviewer(self, reviewer, registrant, review_type=None, _date_created=DEFAULT, _notify_listeners=True): """See `IBranchMergeProposal`.""" # Return the existing vote reference or create a new one. # Lower case the review type. review_type = self._normalizeReviewType(review_type) vote_reference = self.getUsersVoteReference(reviewer, review_type) # If there is no existing review for the reviewer, then create a new # one. If the reviewer is a team, then we don't care if there is # already an existing pending review, as some projects expect multiple # reviews from a team. if vote_reference is None or reviewer.is_team: vote_reference = CodeReviewVoteReference( branch_merge_proposal=self, registrant=registrant, reviewer=reviewer, date_created=_date_created) self._ensureAssociatedBranchesVisibleToReviewer(reviewer) vote_reference.review_type = review_type if _notify_listeners: notify(ReviewerNominatedEvent(vote_reference)) return vote_reference def deleteProposal(self): """See `IBranchMergeProposal`.""" # Delete this proposal, but keep the superseded chain linked. if self.supersedes is not None: self.supersedes.superseded_by = self.superseded_by # Delete the related CodeReviewVoteReferences. for vote in self.votes: vote.destroySelf() # Delete the related CodeReviewComments. for comment in self.all_comments: comment.destroySelf() # Delete all jobs referring to the BranchMergeProposal, whether # or not they have completed. from lp.code.model.branchmergeproposaljob import BranchMergeProposalJob for job in BranchMergeProposalJob.selectBy( branch_merge_proposal=self.id): job.destroySelf() self._preview_diffs.remove() self.destroySelf() def getUnlandedSourceBranchRevisions(self): """See `IBranchMergeProposal`.""" store = Store.of(self) source = SQL("""source AS (SELECT BranchRevision.branch, BranchRevision.revision, Branchrevision.sequence FROM BranchRevision WHERE BranchRevision.branch = %s and BranchRevision.sequence IS NOT NULL ORDER BY BranchRevision.branch DESC, BranchRevision.sequence DESC LIMIT 10)""" % self.source_branch.id) where = SQL("""BranchRevision.revision NOT IN (SELECT revision from BranchRevision AS target where target.branch = %s and BranchRevision.revision = target.revision)""" % self.target_branch.id) using = SQL("""source as BranchRevision""") revisions = store.with_(source).using(using).find( BranchRevision, where) return list( revisions.order_by(Desc(BranchRevision.sequence)).config(limit=10)) def createComment(self, owner, subject, content=None, vote=None, review_type=None, parent=None, _date_created=DEFAULT, _notify_listeners=True): """See `IBranchMergeProposal`.""" #:param _date_created: The date the message was created. Provided # only for testing purposes, as it can break # BranchMergeProposal.root_message. review_type = self._normalizeReviewType(review_type) assert owner is not None, 'Merge proposal messages need a sender' parent_message = None if parent is not None: assert parent.branch_merge_proposal == self, \ 'Replies must use the same merge proposal as their parent' parent_message = parent.message if not subject: # Get the subject from the parent if there is one, or use a nice # default. if parent is None: subject = self.title else: subject = parent.message.subject if not subject.startswith('Re: '): subject = 'Re: ' + subject # Avoid circular dependencies. from lp.services.messages.model.message import Message, MessageChunk msgid = make_msgid('codereview') message = Message(parent=parent_message, owner=owner, rfc822msgid=msgid, subject=subject, datecreated=_date_created) MessageChunk(message=message, content=content, sequence=1) return self.createCommentFromMessage( message, vote, review_type, original_email=None, _notify_listeners=_notify_listeners, _validate=False) def getUsersVoteReference(self, user, review_type=None): """Get the existing vote reference for the given user.""" # Lower case the review type. review_type = self._normalizeReviewType(review_type) if user is None: return None if user.is_team: query = And(CodeReviewVoteReference.reviewer == user, CodeReviewVoteReference.review_type == review_type) else: query = CodeReviewVoteReference.reviewer == user return Store.of(self).find( CodeReviewVoteReference, CodeReviewVoteReference.branch_merge_proposal == self, query).order_by(CodeReviewVoteReference.date_created).first() def _getTeamVoteReference(self, user, review_type): """Get a vote reference where the user is in the review team. Only return those reviews where the review_type matches. """ refs = Store.of(self).find( CodeReviewVoteReference, CodeReviewVoteReference.branch_merge_proposal == self, CodeReviewVoteReference.review_type == review_type, CodeReviewVoteReference.comment == None) for ref in refs.order_by(CodeReviewVoteReference.date_created): if user.inTeam(ref.reviewer): return ref return None def _getVoteReference(self, user, review_type): """Get the vote reference for the user. The returned vote reference will either: * the existing vote reference for the user * a vote reference of the same type that has been requested of a team that the user is a member of * a new vote reference for the user """ # Firstly look for a vote reference for the user. ref = self.getUsersVoteReference(user) if ref is not None: return ref # Get all the unclaimed CodeReviewVoteReferences with the review_type # specified. team_ref = self._getTeamVoteReference(user, review_type) if team_ref is not None: return team_ref # If the review_type is not None, check to see if there is an # outstanding team review requested with no specified type. if review_type is not None: team_ref = self._getTeamVoteReference(user, None) if team_ref is not None: return team_ref # Create a new reference. return CodeReviewVoteReference(branch_merge_proposal=self, registrant=user, reviewer=user, review_type=review_type) def createCommentFromMessage(self, message, vote, review_type, original_email, _notify_listeners=True, _validate=True): """See `IBranchMergeProposal`.""" if _validate: validate_message(original_email) review_type = self._normalizeReviewType(review_type) code_review_message = CodeReviewComment(branch_merge_proposal=self, message=message, vote=vote, vote_tag=review_type) # Get the appropriate CodeReviewVoteReference for the reviewer. # If there isn't one, then create one, otherwise set the comment # reference. if vote is not None: vote_reference = self._getVoteReference(message.owner, review_type) # Just set the reviewer and review type again on the off chance # that the user has edited the review_type or claimed a team # review. vote_reference.reviewer = message.owner vote_reference.review_type = review_type vote_reference.comment = code_review_message if _notify_listeners: notify( NewCodeReviewCommentEvent(code_review_message, original_email)) return code_review_message def updatePreviewDiff(self, diff_content, source_revision_id, target_revision_id, prerequisite_revision_id=None, conflicts=None): """See `IBranchMergeProposal`.""" return PreviewDiff.create(self, diff_content, source_revision_id, target_revision_id, prerequisite_revision_id, conflicts) def getIncrementalDiffRanges(self): groups = self.getRevisionsSinceReviewStart() return [(group[0].revision.getLefthandParent(), group[-1].revision) for group in groups] def generateIncrementalDiff(self, old_revision, new_revision, diff=None): """See `IBranchMergeProposal`.""" if diff is None: source_branch = self.source_branch.getBzrBranch() ignore_branches = [self.target_branch.getBzrBranch()] if self.prerequisite_branch is not None: ignore_branches.append(self.prerequisite_branch.getBzrBranch()) diff = Diff.generateIncrementalDiff(old_revision, new_revision, source_branch, ignore_branches) incremental_diff = IncrementalDiff() incremental_diff.diff = diff incremental_diff.branch_merge_proposal = self incremental_diff.old_revision = old_revision incremental_diff.new_revision = new_revision IMasterStore(IncrementalDiff).add(incremental_diff) return incremental_diff def getIncrementalDiffs(self, revision_list): """See `IBranchMergeProposal`.""" diffs = Store.of(self).find( IncrementalDiff, IncrementalDiff.branch_merge_proposal_id == self.id) diff_dict = dict( ((diff.old_revision, diff.new_revision), diff) for diff in diffs) return [diff_dict.get(revisions) for revisions in revision_list] @property def revision_end_date(self): """The cutoff date for showing revisions. If the proposal has been merged, then we stop at the merged date. If it is rejected, we stop at the reviewed date. For superseded proposals, it should ideally use the non-existant date_last_modified, but could use the last comment date. """ status = self.queue_status if status == BranchMergeProposalStatus.MERGED: return self.date_merged if status == BranchMergeProposalStatus.REJECTED: return self.date_reviewed # Otherwise return None representing an open end date. return None def _getNewerRevisions(self): start_date = self.date_review_requested if start_date is None: start_date = self.date_created return self.source_branch.getMainlineBranchRevisions( start_date, self.revision_end_date, oldest_first=True) def getRevisionsSinceReviewStart(self): """Get the grouped revisions since the review started.""" entries = [((comment.date_created, -1), comment) for comment in self.all_comments] revisions = self._getNewerRevisions() entries.extend(((revision.date_created, branch_revision.sequence), branch_revision) for branch_revision, revision in revisions) entries.sort() current_group = [] for sortkey, entry in entries: if IBranchRevision.providedBy(entry): current_group.append(entry) else: if current_group != []: yield current_group current_group = [] if current_group != []: yield current_group def getMissingIncrementalDiffs(self): ranges = self.getIncrementalDiffRanges() diffs = self.getIncrementalDiffs(ranges) return [range_ for range_, diff in zip(ranges, diffs) if diff is None] @staticmethod def preloadDataForBMPs(branch_merge_proposals, user): # Utility to load the data related to a list of bmps. # Circular imports. from lp.code.model.branch import Branch from lp.code.model.branchcollection import GenericBranchCollection from lp.registry.model.product import Product from lp.registry.model.distroseries import DistroSeries ids = set() source_branch_ids = set() person_ids = set() for mp in branch_merge_proposals: ids.add(mp.id) source_branch_ids.add(mp.source_branchID) person_ids.add(mp.registrantID) person_ids.add(mp.merge_reporterID) branches = load_related( Branch, branch_merge_proposals, ("target_branchID", "prerequisite_branchID", "source_branchID")) # The stacked on branches are used to check branch visibility. GenericBranchCollection.preloadVisibleStackedOnBranches(branches, user) if len(branches) == 0: return # Pre-load PreviewDiffs and Diffs. preview_diffs = IStore(BranchMergeProposal).find( PreviewDiff, PreviewDiff.branch_merge_proposal_id.is_in(ids)).order_by( PreviewDiff.branch_merge_proposal_id, Desc(PreviewDiff.date_created)).config( distinct=[PreviewDiff.branch_merge_proposal_id]) load_related(Diff, preview_diffs, ['diff_id']) for previewdiff in preview_diffs: cache = get_property_cache(previewdiff.branch_merge_proposal) cache.preview_diff = previewdiff # Add source branch owners' to the list of pre-loaded persons. person_ids.update(branch.ownerID for branch in branches if branch.id in source_branch_ids) # Pre-load Person and ValidPersonCache. list( getUtility(IPersonSet).getPrecachedPersonsFromIDs( person_ids, need_validity=True)) # Pre-load branches' data. load_related(SourcePackageName, branches, ['sourcepackagenameID']) load_related(DistroSeries, branches, ['distroseriesID']) load_related(Product, branches, ['productID']) GenericBranchCollection.preloadDataForBranches(branches)
class EditorCapsule(SQLObject, PluginCapsule, metaclass=SQLObjectAndABCMeta): name = StringCol() owner = ForeignKey('User', cascade='null', default=None) channel = ForeignKey('PluginChannel', cascade=True) capsule_id = DatabaseIndex('name', 'channel', unique=True) creation_date = DateTimeCol(notNone=True, default=lambda: datetime.now()) slides = SQLMultipleJoin('EditorSlide', joinColumn='capsule_id') theme = StringCol( default=lambda: web.ctx.app_stack[0].config['default_theme']) c_order = IntCol(notNone=True) validity_from = DateTimeCol(notNone=True) validity_to = DateTimeCol(notNone=True) @classmethod def rectify_c_order(cls, channel_id): capsules_list = list( EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order)) if len(capsules_list ) > 0 and capsules_list[0].c_order == 0 and capsules_list[ -1].c_order == len(capsules_list) - 1: return EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order) i = 0 for c in capsules_list: c.c_order = i i += 1 return EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order) def insert_slide_at(self, slide, index): """ inserts the slide at the correct position of the slides list of the capsule, updating the s_order of the slides located after the index position in the list. """ # get the slides of the capsule, ordered by their s_order slides = list( EditorSlide.select(EditorSlide.q.capsule == self.id, orderBy=EditorSlide.q.s_order)) # set the s_order of the new slide slide.s_order = index slide.capsule = self.id # update the s_order of all the slides with a s_order >= the s_order of this slide for i in range(index, len(slides)): slides[i].s_order += 1 EditorSlide.rectify_s_order(self.id) def to_plugin_capsule(self) -> EditorPluginCapsule: caps = EditorPluginCapsule(theme=self.theme) for s in sorted(self.slides, key=lambda slide: slide.s_order): caps.add_slide(s.to_plugin_slide()) return caps def get_slides(self) -> Iterable[PluginSlide]: return self.slides def get_theme(self) -> str: return self.theme def _get_is_active(self): now = datetime.now() return self.validity_from <= now < self.validity_to def _get_pretty_from(self): return str(self.validity_from.replace(microsecond=0).isoformat(' ')) def duplicate(self, owner_id, c_order=None): """ :return: a duplicate of this capsule belonging to the specified owner_id and containing a duplicate of the slides of this capsule. If c_order is not specified, the duplicate has the same c_order as this capsule. """ c_order = c_order if c_order is not None else self.c_order def create_capsule(name): try: return EditorCapsule(name=name + '-copy', channel=self.channel, ownerID=owner_id, creation_date=self.creation_date, c_order=c_order, validity_from=self.validity_from, validity_to=self.validity_to) except DuplicateEntryError: return create_capsule(name + '-copy') duplicate = create_capsule(str(self.name)) for slide in self.slides: EditorSlide.from_slide(slide=slide, capsule=duplicate) return duplicate def to_json_api(self): return { 'id': self.id, 'name': self.name, 'slides': [s.to_json_api() for s in self.slides], 'validity': [ int(self.validity_from.timestamp()), int(self.validity_to.timestamp()) ], 'theme': self.theme, }
class FAQ(SQLBase): """See `IFAQ`.""" implements(IFAQ) _table = 'FAQ' _defaultOrder = ['date_created', 'id'] owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) title = StringCol(notNull=True) keywords = StringCol(dbName="tags", notNull=False, default=None) content = StringCol(notNull=False, default=None) date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) last_updated_by = ForeignKey( dbName='last_updated_by', foreignKey='Person', storm_validator=validate_public_person, notNull=False, default=None) date_last_updated = UtcDateTimeCol(notNull=False, default=None) product = ForeignKey( dbName='product', foreignKey='Product', notNull=False, default=None) distribution = ForeignKey( dbName='distribution', foreignKey='Distribution', notNull=False, default=None) related_questions = SQLMultipleJoin( 'Question', joinColumn='faq', orderBy=['Question.datecreated']) @property def target(self): """See `IFAQ`.""" if self.product: return self.product else: return self.distribution @staticmethod def new(owner, title, content, keywords=keywords, date_created=None, product=None, distribution=None): """Factory method to create a new FAQ. Ensure that only one of product or distribution is given. """ if not IPerson.providedBy(owner): raise AssertionError( 'owner parameter should be an IPerson, not %s' % type(owner)) if product is not None and distribution is not None: raise AssertionError( "only one of product or distribution should be provided") if product is None and distribution is None: raise AssertionError("product or distribution must be provided") if date_created is None: date_created = DEFAULT faq = FAQ( owner=owner, title=title, content=content, keywords=keywords, date_created=date_created, product=product, distribution=distribution) notify(ObjectCreatedEvent(faq)) return faq @staticmethod def findSimilar(summary, product=None, distribution=None): """Return the FAQs similar to summary. See `IFAQTarget.findSimilarFAQs` for details. """ assert not (product and distribution), ( 'only one of product or distribution should be provided') if product: target_constraint = 'product = %s' % sqlvalues(product) elif distribution: target_constraint = 'distribution = %s' % sqlvalues(distribution) else: raise AssertionError('must provide product or distribution') phrases = nl_phrase_search(summary, FAQ, target_constraint) if not phrases: # No useful words to search on in that summary. return FAQ.select('1 = 2') return FAQ.select( And(target_constraint, fti_search(FAQ, phrases, ftq=False)), orderBy=[ rank_by_fti(FAQ, phrases, ftq=False), "-FAQ.date_created"]) @staticmethod def getForTarget(id, target): """Return the FAQ with the requested id. When target is not None, the target will be checked to make sure that the FAQ is in the expected target or return None otherwise. """ try: faq = FAQ.get(id) if target is None or target == faq.target: return faq else: return None except SQLObjectNotFound: return None
class EditorSlide(SQLObject, PluginSlide, metaclass=SQLObjectAndABCMeta): duration = IntCol(notNone=True) content = JSONCol(notNone=True, default={}) s_order = IntCol(notNone=True) template = StringCol(notNone=True) capsule = ForeignKey('EditorCapsule', cascade=True) asset_mappings = SQLMultipleJoin('AssetSlideMapping') @classmethod def from_slide(cls, slide: PluginSlide, capsule, slide_order=0): def create_asset_mappings(slide): for field, inputs in slide.get_content().items(): if 'file' in inputs: AssetSlideMapping(slide=slide, asset=inputs['file']) s = EditorSlide(content=slide.get_content(), duration=slide.get_duration(), template=slide.get_template(), capsule=capsule, s_order=slide_order) create_asset_mappings(s) return s @classmethod def from_video(cls, video, storage_manager, transcoding_manager, capsule, user, background_color): def create_slide(asset_id, capsule_id): video_slide = cls.from_slide(VideoSlide( {'file': asset_id}, template='template-image-bg'), capsule=capsule_id) video_slide.content['background-1'].update({ 'size': 'contain', 'color': background_color }) video_slide.content = video_slide.content # Force SQLObject update capsule = EditorCapsule.get(capsule_id) capsule.insert_slide_at(video_slide, capsule.slides.count()) return video_slide # TODO: Stream asset to disk instead of loading it into memory video_blob = video.file.read() if magic.from_buffer(video_blob, mime=True) != 'video/webm': def transcode_callback(success_status): if success_status: create_slide(video_asset_id, capsule_id) video_asset.file_size = os.path.getsize(video_asset.path) else: video_asset.destroySelf() original_video_asset.destroySelf() original_video_asset = storage_manager.store_file( video_blob, filename=video.filename, user=user) video_asset = storage_manager.create_asset( filename=video.filename + os.extsep + '.webm', user=user, mime_type='video/webm') video_asset_id, capsule_id = video_asset.id, capsule.id transcoding_manager.enqueue_task(original_video_asset.path, video_asset.path, transcode_callback) return video_asset.path else: video_asset = storage_manager.store_file(video_blob, filename=video.filename, user=user) return create_slide(video_asset.id, capsule.id) def _init(self, id, connection=None, selectResults=None): return super()._init(id, connection, selectResults) @classmethod def rectify_s_order(cls, capsule_id): slide_list = list( EditorSlide.select(EditorSlide.q.capsule == capsule_id, orderBy=EditorSlide.q.s_order)) if len(slide_list) > 0 and slide_list[0].s_order == 0 and slide_list[ -1].s_order == len(slide_list) - 1: return slide_list i = 0 for s in slide_list: s.s_order = i i += 1 return slide_list def to_plugin_slide(self) -> EditorPluginSlide: return EditorPluginSlide(content=self.content, template=self.template, duration=int(self.duration)) def get_duration(self) -> int: return self.duration def get_duration_or_default(self): int(self.capsule.channel.plugin_config['duration']) * 1000 if 'duration' in self.capsule.channel.plugin_config \ else int(self.capsule.channel.plugin.channels_params['duration']['default']) * 1000 def get_content(self): return self.content def get_template(self) -> str: return self.template def duplicate(self, capsule=None, s_order=None): """ :return: a slide identical to this slide. If the capsule and arguments are not specified, they are the same as this slide. It also duplicates the AssetSlideMappings of this slide """ capsule = capsule if capsule is not None else self.capsule s_order = s_order if s_order is not None else self.s_order duplicate = EditorSlide(duration=self.duration, content=self.get_content(), s_order=s_order, template=self.get_template(), capsule=capsule) for mapping in AssetSlideMapping.selectBy(slide=self.id): AssetSlideMapping(assetID=mapping.asset.id, slideID=duplicate.id) return duplicate def get_render_path(self, ictv_home=None): if ictv_home is None: ictv_home = web.ctx.home return '%s%s/%d/%d' % (ictv_home, 'render', self.capsule.id, self.id) @property def contains_video(self): for field, inputs in self.content.items(): if 'file' in inputs: if Asset.get(inputs['file']).mime_type.startswith('video'): return True elif 'video' in inputs: return True return False def to_json_api(self): return { 'id': self.id, 'duration': self.duration, 'content': self.content, 'template': self.template, }
class AssetSlideMapping(SQLObject): asset = ForeignKey('Asset', cascade=True) slide = ForeignKey('EditorSlide', cascade=True)
class ProjectGroup(SQLBase, BugTargetBase, HasSpecificationsMixin, MakesAnnouncements, HasSprintsMixin, HasAliasMixin, KarmaContextMixin, StructuralSubscriptionTargetMixin, HasBranchesMixin, HasMergeProposalsMixin, HasMilestonesMixin, HasDriversMixin, TranslationPolicyMixin): """A ProjectGroup""" implements(IBugSummaryDimension, IProjectGroup, IFAQCollection, IHasIcon, IHasLogo, IHasMugshot, ISearchableByQuestionOwner) _table = "Project" # db field names owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_person_or_closed_team, notNull=True) registrant = ForeignKey(dbName='registrant', foreignKey='Person', storm_validator=validate_public_person, notNull=True) name = StringCol(dbName='name', notNull=True) displayname = StringCol(dbName='displayname', notNull=True) title = StringCol(dbName='title', notNull=True) summary = StringCol(dbName='summary', notNull=True) description = StringCol(dbName='description', notNull=True) datecreated = UtcDateTimeCol(dbName='datecreated', notNull=True, default=UTC_NOW) driver = ForeignKey(dbName="driver", foreignKey="Person", storm_validator=validate_public_person, notNull=False, default=None) homepageurl = StringCol(dbName='homepageurl', notNull=False, default=None) homepage_content = StringCol(default=None) icon = ForeignKey(dbName='icon', foreignKey='LibraryFileAlias', default=None) logo = ForeignKey(dbName='logo', foreignKey='LibraryFileAlias', default=None) mugshot = ForeignKey(dbName='mugshot', foreignKey='LibraryFileAlias', default=None) wikiurl = StringCol(dbName='wikiurl', notNull=False, default=None) sourceforgeproject = StringCol(dbName='sourceforgeproject', notNull=False, default=None) freshmeatproject = StringCol(dbName='freshmeatproject', notNull=False, default=None) lastdoap = StringCol(dbName='lastdoap', notNull=False, default=None) translationgroup = ForeignKey(dbName='translationgroup', foreignKey='TranslationGroup', notNull=False, default=None) translationpermission = EnumCol(dbName='translationpermission', notNull=True, schema=TranslationPermission, default=TranslationPermission.OPEN) active = BoolCol(dbName='active', notNull=True, default=True) reviewed = BoolCol(dbName='reviewed', notNull=True, default=False) bugtracker = ForeignKey(foreignKey="BugTracker", dbName="bugtracker", notNull=False, default=None) bug_reporting_guidelines = StringCol(default=None) bug_reported_acknowledgement = StringCol(default=None) @property def pillar_category(self): """See `IPillar`.""" return "Project Group" def getProducts(self, user): results = Store.of(self).find(Product, Product.project == self, Product.active == True, ProductSet.getProductPrivacyFilter(user)) return results.order_by(Product.displayname) @cachedproperty def products(self): return list(self.getProducts(getUtility(ILaunchBag).user)) def getProduct(self, name): return Product.selectOneBy(project=self, name=name) def getConfigurableProducts(self): return [ product for product in self.products if check_permission('launchpad.Edit', product) ] @property def drivers(self): """See `IHasDrivers`.""" if self.driver is not None: return [self.driver] return [] def getTranslatables(self): """Return an iterator over products that are translatable in LP. Only products with IProduct.translations_usage set to ServiceUsage.LAUNCHPAD are considered translatable. """ store = Store.of(self) origin = [ Product, Join(ProductSeries, Product.id == ProductSeries.productID), Join(POTemplate, ProductSeries.id == POTemplate.productseriesID), ] return store.using(*origin).find( Product, Product.project == self.id, Product.translations_usage == ServiceUsage.LAUNCHPAD, ).config(distinct=True) @cachedproperty def translatables(self): """See `IProjectGroup`.""" return list(self.getTranslatables()) def has_translatable(self): """See `IProjectGroup`.""" return len(self.translatables) > 0 def sharesTranslationsWithOtherSide(self, person, language, sourcepackage=None, purportedly_upstream=False): """See `ITranslationPolicy`.""" assert sourcepackage is None, ( "Got a SourcePackage for a ProjectGroup!") # ProjectGroup translations are considered upstream. They are # automatically shared. return True def has_branches(self): """ See `IProjectGroup`.""" return not self.getBranches().is_empty() def _getBaseQueryAndClauseTablesForQueryingSprints(self): query = """ Product.project = %s AND Specification.product = Product.id AND Specification.id = SprintSpecification.specification AND SprintSpecification.sprint = Sprint.id AND SprintSpecification.status = %s """ % sqlvalues(self, SprintSpecificationStatus.ACCEPTED) return query, ['Product', 'Specification', 'SprintSpecification'] def specifications(self, user, sort=None, quantity=None, filter=None, series=None, need_people=True, need_branches=True, need_workitems=False): """See `IHasSpecifications`.""" base_clauses = [ Specification.productID == Product.id, Product.projectID == self.id ] tables = [Specification] if series: base_clauses.append(ProductSeries.name == series) tables.append( Join(ProductSeries, Specification.productseriesID == ProductSeries.id)) return search_specifications(self, base_clauses, user, sort, quantity, filter, tables=tables, need_people=need_people, need_branches=need_branches, need_workitems=need_workitems) def _customizeSearchParams(self, search_params): """Customize `search_params` for this milestone.""" search_params.setProject(self) def _getOfficialTagClause(self): """See `OfficialBugTagTargetMixin`.""" And(ProjectGroup.id == Product.projectID, Product.id == OfficialBugTag.productID) @property def official_bug_tags(self): """See `IHasBugs`.""" store = Store.of(self) result = store.find( OfficialBugTag.tag, OfficialBugTag.product == Product.id, Product.project == self.id).order_by(OfficialBugTag.tag) result.config(distinct=True) return result def getBugSummaryContextWhereClause(self): """See BugTargetBase.""" # Circular fail. from lp.bugs.model.bugsummary import BugSummary product_ids = [product.id for product in self.products] if not product_ids: return False return BugSummary.product_id.is_in(product_ids) # IQuestionCollection def searchQuestions(self, search_text=None, status=QUESTION_STATUS_DEFAULT_SEARCH, language=None, sort=None, owner=None, needs_attention_from=None, unsupported=False): """See `IQuestionCollection`.""" if unsupported: unsupported_target = self else: unsupported_target = None return QuestionTargetSearch( project=self, search_text=search_text, status=status, language=language, sort=sort, owner=owner, needs_attention_from=needs_attention_from, unsupported_target=unsupported_target).getResults() def getQuestionLanguages(self): """See `IQuestionCollection`.""" return set( Language.select(""" Language.id = Question.language AND Question.product = Product.id AND Product.project = %s""" % sqlvalues(self.id), clauseTables=['Question', 'Product'], distinct=True)) @property def bugtargetname(self): """See IBugTarget.""" return self.name # IFAQCollection def getFAQ(self, id): """See `IQuestionCollection`.""" faq = FAQ.getForTarget(id, None) if (faq is not None and IProduct.providedBy(faq.target) and faq.target in self.products): # Filter out faq not related to this project. return faq else: return None def searchFAQs(self, search_text=None, owner=None, sort=None): """See `IQuestionCollection`.""" return FAQSearch(search_text=search_text, owner=owner, sort=sort, project=self).getResults() def hasProducts(self): """Returns True if a project has products associated with it, False otherwise. If the project group has < 1 product, selected links will be disabled. This is to avoid situations where users try to file bugs against empty project groups (Malone bug #106523). """ return len(self.products) != 0 def _getMilestoneCondition(self): """See `HasMilestonesMixin`.""" user = getUtility(ILaunchBag).user privacy_filter = ProductSet.getProductPrivacyFilter(user) return And(Milestone.productID == Product.id, Product.projectID == self.id, privacy_filter) def _getMilestones(self, user, only_active): """Return a list of milestones for this project group. If only_active is True, only active milestones are returned, else all milestones. A project group has a milestone named 'A', if at least one of its products has a milestone named 'A'. """ store = Store.of(self) columns = ( Milestone.name, SQL('MIN(Milestone.dateexpected)'), SQL('BOOL_OR(Milestone.active)'), ) privacy_filter = ProductSet.getProductPrivacyFilter(user) conditions = And(Milestone.product == Product.id, Product.project == self, Product.active == True, privacy_filter) result = store.find(columns, conditions) result.group_by(Milestone.name) if only_active: result.having('BOOL_OR(Milestone.active) = TRUE') # MIN(Milestone.dateexpected) has to be used to match the # aggregate function in the `columns` variable. result.order_by( 'milestone_sort_key(MIN(Milestone.dateexpected), Milestone.name) ' 'DESC') # An extra query is required here in order to get the correct # products without affecting the group/order of the query above. products_by_name = {} if result.any() is not None: milestone_names = [data[0] for data in result] product_conditions = And(Product.project == self, Milestone.product == Product.id, Product.active == True, privacy_filter, In(Milestone.name, milestone_names)) for product, name in (store.find((Product, Milestone.name), product_conditions)): if name not in products_by_name.keys(): products_by_name[name] = product return shortlist([ ProjectMilestone(self, name, dateexpected, active, products_by_name.get(name, None)) for name, dateexpected, active in result ]) @property def has_milestones(self): """See `IHasMilestones`.""" store = Store.of(self) result = store.find( Milestone.id, And(Milestone.product == Product.id, Product.project == self, Product.active == True)) return result.any() is not None @property def milestones(self): """See `IProjectGroup`.""" user = getUtility(ILaunchBag).user return self._getMilestones(user, only_active=True) @property def product_milestones(self): """Hack to avoid the ProjectMilestone in MilestoneVocabulary.""" # XXX: bug=644977 Robert Collins - this is a workaround for # inconsistency in project group milestone use. return self._get_milestones() @property def all_milestones(self): """See `IProjectGroup`.""" user = getUtility(ILaunchBag).user return self._getMilestones(user, only_active=False) def getMilestone(self, name): """See `IProjectGroup`.""" for milestone in self.all_milestones: if milestone.name == name: return milestone return None def getSeries(self, series_name): """See `IProjectGroup.`""" has_series = ProductSeries.selectFirst(AND( ProductSeries.q.productID == Product.q.id, ProductSeries.q.name == series_name, Product.q.projectID == self.id), orderBy='id') if has_series is None: return None return ProjectGroupSeries(self, series_name) def _get_usage(self, attr): """Determine ProjectGroup usage based on individual projects. By default, return ServiceUsage.UNKNOWN. If any project uses Launchpad, return ServiceUsage.LAUNCHPAD. Otherwise, return the ServiceUsage of the last project that was not ServiceUsage.UNKNOWN. """ result = ServiceUsage.UNKNOWN for product in self.products: product_usage = getattr(product, attr) if product_usage != ServiceUsage.UNKNOWN: result = product_usage if product_usage == ServiceUsage.LAUNCHPAD: break return result @property def answers_usage(self): return self._get_usage('answers_usage') @property def blueprints_usage(self): return self._get_usage('blueprints_usage') @property def translations_usage(self): if self.has_translatable(): return ServiceUsage.LAUNCHPAD return ServiceUsage.UNKNOWN @property def codehosting_usage(self): # Project groups do not support submitting code. return ServiceUsage.NOT_APPLICABLE @property def bug_tracking_usage(self): return self._get_usage('bug_tracking_usage') @property def uses_launchpad(self): if (self.answers_usage == ServiceUsage.LAUNCHPAD or self.blueprints_usage == ServiceUsage.LAUNCHPAD or self.translations_usage == ServiceUsage.LAUNCHPAD or self.codehosting_usage == ServiceUsage.LAUNCHPAD or self.bug_tracking_usage == ServiceUsage.LAUNCHPAD): return True return False
class BuildQueue(SQLBase): _table = "BuildQueue" _defaultOrder = "id" def __init__(self, build_farm_job, estimated_duration=DEFAULT, virtualized=DEFAULT, processor=DEFAULT, lastscore=None): super(BuildQueue, self).__init__(_build_farm_job=build_farm_job, virtualized=virtualized, processor=processor, estimated_duration=estimated_duration, lastscore=lastscore) if lastscore is None and self.specific_build is not None: self.score() _build_farm_job_id = Int(name='build_farm_job') _build_farm_job = Reference(_build_farm_job_id, 'BuildFarmJob.id') status = EnumCol(enum=BuildQueueStatus, default=BuildQueueStatus.WAITING) date_started = DateTime(tzinfo=pytz.UTC) builder = ForeignKey(dbName='builder', foreignKey='Builder', default=None) logtail = StringCol(dbName='logtail', default=None) lastscore = IntCol(dbName='lastscore', default=0) manual = BoolCol(dbName='manual', default=False) estimated_duration = IntervalCol() processor = ForeignKey(dbName='processor', foreignKey='Processor') virtualized = BoolCol(dbName='virtualized') @cachedproperty def specific_build(self): """See `IBuildQueue`.""" bfj = self._build_farm_job specific_source = specific_build_farm_job_sources()[bfj.job_type] return specific_source.getByBuildFarmJob(bfj) @property def build_cookie(self): """See `IBuildQueue`.""" return self.specific_build.build_cookie def _clear_specific_build_cache(self): del get_property_cache(self).specific_build @staticmethod def preloadSpecificBuild(queues): from lp.buildmaster.model.buildfarmjob import BuildFarmJob queues = [removeSecurityProxy(bq) for bq in queues] load_related(BuildFarmJob, queues, ['_build_farm_job_id']) bfj_to_bq = dict((bq._build_farm_job, bq) for bq in queues) key = attrgetter('_build_farm_job.job_type') for job_type, group in groupby(sorted(queues, key=key), key=key): source = getUtility(ISpecificBuildFarmJobSource, job_type.name) builds = source.getByBuildFarmJobs( [bq._build_farm_job for bq in group]) for build in builds: bq = bfj_to_bq[removeSecurityProxy(build).build_farm_job] get_property_cache(bq).specific_build = build @property def current_build_duration(self): """See `IBuildQueue`.""" date_started = self.date_started if date_started is None: return None else: return self._now() - date_started def destroySelf(self): """Remove this record.""" builder = self.builder specific_build = self.specific_build Store.of(self).remove(self) Store.of(self).flush() if builder is not None: del get_property_cache(builder).currentjob del get_property_cache(specific_build).buildqueue_record self._clear_specific_build_cache() def manualScore(self, value): """See `IBuildQueue`.""" self.lastscore = value self.manual = True def score(self): """See `IBuildQueue`.""" if self.manual: return # Allow the `IBuildFarmJob` instance with the data/logic specific to # the job at hand to calculate the score as appropriate. self.lastscore = self.specific_build.calculateScore() def markAsBuilding(self, builder): """See `IBuildQueue`.""" self.builder = builder self.status = BuildQueueStatus.RUNNING self.date_started = UTC_NOW self.specific_build.updateStatus(BuildStatus.BUILDING) if builder is not None: del get_property_cache(builder).currentjob def collectStatus(self, slave_status): """See `IBuildQueue`.""" builder_status = slave_status["builder_status"] if builder_status == "BuilderStatus.ABORTING": self.logtail = "Waiting for slave process to be terminated" elif slave_status.get("logtail") is not None: # slave_status["logtail"] is normally an xmlrpclib.Binary # instance, and the contents might include invalid UTF-8 due to # being a fixed number of bytes from the tail of the log. Turn # it into Unicode as best we can. self.logtail = str(slave_status.get("logtail")).decode( "UTF-8", errors="replace") def suspend(self): """See `IBuildQueue`.""" if self.status != BuildQueueStatus.WAITING: raise AssertionError("Only waiting jobs can be suspended.") self.status = BuildQueueStatus.SUSPENDED def resume(self): """See `IBuildQueue`.""" if self.status != BuildQueueStatus.SUSPENDED: raise AssertionError("Only suspended jobs can be resumed.") self.status = BuildQueueStatus.WAITING def reset(self): """See `IBuildQueue`.""" builder = self.builder self.builder = None self.status = BuildQueueStatus.WAITING self.date_started = None self.logtail = None self.specific_build.updateStatus(BuildStatus.NEEDSBUILD) if builder is not None: del get_property_cache(builder).currentjob def cancel(self): """See `IBuildQueue`.""" if self.status == BuildQueueStatus.WAITING: # If the job's not yet on a slave then we can just # short-circuit to completed cancellation. self.markAsCancelled() elif self.status == BuildQueueStatus.RUNNING: # Otherwise set the statuses to CANCELLING so buildd-manager # can kill the slave, grab the log, and call # markAsCancelled() when it's done. self.status = BuildQueueStatus.CANCELLING self.specific_build.updateStatus(BuildStatus.CANCELLING) else: raise AssertionError("Tried to cancel %r from %s" % (self, self.status.name)) def markAsCancelled(self): """See `IBuildQueue`.""" self.specific_build.updateStatus(BuildStatus.CANCELLED) self.destroySelf() def getEstimatedJobStartTime(self, now=None): """See `IBuildQueue`.""" from lp.buildmaster.queuedepth import estimate_job_start_time return estimate_job_start_time(self, now or self._now()) @staticmethod def _now(): """Return current time (UTC). Overridable for test purposes.""" return datetime.now(pytz.utc)
class SipAccountData(SQLObject): class sqlmeta: table = 'sip_accounts_data' account = ForeignKey('SipAccount', cascade=True) profile = JSONCol()
class FileData(SQLObject): name = StringCol() relative_path = ForeignKey("DirectoryData") displayed = BoolCol(default=False) metadata = ForeignKey("FileMetaData")
class SOTestFKValidationB(SQLObject): name = StringCol() afk = ForeignKey("SOTestFKValidationA")
class BugTracker(SQLBase): """A class to access the BugTracker table in the database. Each BugTracker is a distinct instance of that bug tracking tool. For example, each Bugzilla deployment is a separate BugTracker. bugzilla.mozilla.org and bugzilla.gnome.org are each distinct BugTrackers. """ _table = 'BugTracker' bugtrackertype = EnumCol( dbName='bugtrackertype', schema=BugTrackerType, notNull=True) name = StringCol(notNull=True, unique=True) title = StringCol(notNull=True) summary = StringCol(notNull=False) baseurl = StringCol(notNull=True) active = Bool( name='active', allow_none=False, default=True) owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) contactdetails = StringCol(notNull=False) has_lp_plugin = BoolCol(notNull=False, default=False) products = SQLMultipleJoin( 'Product', joinColumn='bugtracker', orderBy='name') watches = SQLMultipleJoin( 'BugWatch', joinColumn='bugtracker', orderBy='-datecreated', prejoins=['bug']) _filing_url_patterns = { BugTrackerType.BUGZILLA: ( "%(base_url)s/enter_bug.cgi?product=%(remote_product)s" "&short_desc=%(summary)s&long_desc=%(description)s"), BugTrackerType.GITHUB: ( "%(base_url)s/new?title=%(summary)s&body=%(description)s"), BugTrackerType.GITLAB: ( "%(base_url)s/new" "?issue[title]=%(summary)s&issue[description]=%(description)s"), BugTrackerType.GOOGLE_CODE: ( "%(base_url)s/entry?summary=%(summary)s&" "comment=%(description)s"), BugTrackerType.MANTIS: ( "%(base_url)s/bug_report_advanced_page.php" "?summary=%(summary)s&description=%(description)s"), BugTrackerType.PHPPROJECT: ( "%(base_url)s/report.php" "?in[sdesc]=%(summary)s&in[ldesc]=%(description)s"), BugTrackerType.ROUNDUP: ( "%(base_url)s/issue?@template=item&title=%(summary)s" "&@note=%(description)s"), BugTrackerType.RT: ( "%(base_url)s/Ticket/Create.html?Queue=%(remote_product)s" "&Subject=%(summary)s&Content=%(description)s"), BugTrackerType.SAVANE: ( "%(base_url)s/bugs/?func=additem&group=%(remote_product)s"), BugTrackerType.SOURCEFORGE: ( "%(base_url)s/%(tracker)s/?func=add&" "group_id=%(group_id)s&atid=%(at_id)s"), BugTrackerType.TRAC: ( "%(base_url)s/newticket?summary=%(summary)s&" "description=%(description)s"), } _search_url_patterns = { BugTrackerType.BUGZILLA: ( "%(base_url)s/query.cgi?product=%(remote_product)s" "&short_desc=%(summary)s"), BugTrackerType.GITHUB: ( "%(base_url)s?utf8=%%E2%%9C%%93" "&q=is%%3Aissue%%20is%%3Aopen%%20%(summary)s"), BugTrackerType.GITLAB: ( "%(base_url)s?scope=all&utf8=%%E2%%9C%%93&state=opened" "&search=%(summary)s"), BugTrackerType.GOOGLE_CODE: "%(base_url)s/list?q=%(summary)s", BugTrackerType.DEBBUGS: ( "%(base_url)s/cgi-bin/search.cgi?phrase=%(summary)s" "&attribute_field=package&attribute_operator=STROREQ" "&attribute_value=%(remote_product)s"), BugTrackerType.MANTIS: "%(base_url)s/view_all_bug_page.php", BugTrackerType.PHPPROJECT: ( "%(base_url)s/search.php?search_for=%(summary)s"), BugTrackerType.ROUNDUP: ( "%(base_url)s/issue?@template=search&@search_text=%(summary)s"), BugTrackerType.RT: ( "%(base_url)s/Search/Build.html?Query=Queue = " "'%(remote_product)s' AND Subject LIKE '%(summary)s'"), BugTrackerType.SAVANE: ( "%(base_url)s/bugs/?func=search&group=%(remote_product)s"), BugTrackerType.SOURCEFORGE: ( "%(base_url)s/search/?group_id=%(group_id)s" "&some_word=%(summary)s&type_of_search=artifact"), BugTrackerType.TRAC: "%(base_url)s/search?ticket=on&q=%(summary)s", } @property def _custom_filing_url_patterns(self): """Return a dict of bugtracker-specific bugfiling URL patterns.""" gnome_bugzilla = getUtility(ILaunchpadCelebrities).gnome_bugzilla return { gnome_bugzilla: ( "%(base_url)s/enter_bug.cgi?product=%(remote_product)s" "&short_desc=%(summary)s&comment=%(description)s"), } @property def latestwatches(self): """See `IBugTracker`.""" return self.watches[:10] @property def multi_product(self): """Return True if this BugTracker tracks multiple projects.""" if self.bugtrackertype not in SINGLE_PRODUCT_BUGTRACKERTYPES: return True else: return False def getBugFilingAndSearchLinks(self, remote_product, summary=None, description=None, remote_component=None): """See `IBugTracker`.""" bugtracker_urls = {'bug_filing_url': None, 'bug_search_url': None} if remote_product is None and self.multi_product: # Don't try to return anything if remote_product is required # for this BugTrackerType and one hasn't been passed. return bugtracker_urls if remote_product is None: # Turn the remote product into an empty string so that # quote() doesn't blow up later on. remote_product = '' if remote_component is None: # Ditto for remote component. remote_component = '' if self in self._custom_filing_url_patterns: # Some bugtrackers are customised to accept different # querystring parameters from the default. We special-case # these. bug_filing_pattern = self._custom_filing_url_patterns[self] else: bug_filing_pattern = self._filing_url_patterns.get( self.bugtrackertype, None) bug_search_pattern = self._search_url_patterns.get( self.bugtrackertype, None) # Make sure that we don't put > 1 '/' in returned URLs. base_url = self.baseurl.rstrip('/') # If summary or description are None, convert them to empty # strings to that we don't try to pass anything to the upstream # bug tracker. if summary is None: summary = '' if description is None: description = '' # UTF-8 encode the description and summary so that quote() # doesn't break if they contain unicode characters it doesn't # understand. summary = summary.encode('utf-8') description = description.encode('utf-8') if self.bugtrackertype == BugTrackerType.SOURCEFORGE: try: # SourceForge bug trackers use a group ID and an ATID to # file a bug, rather than a product name. remote_product # should be an ampersand-separated string in the form # 'group_id&atid' group_id, at_id = remote_product.split('&') except ValueError: # If remote_product contains something that's not valid # in a SourceForge context we just return early. return None # If this bug tracker is the SourceForge celebrity the link # is to the new bug tracker rather than the old one. sf_celeb = getUtility(ILaunchpadCelebrities).sourceforge_tracker if self == sf_celeb: tracker = 'tracker2' else: tracker = 'tracker' url_components = { 'base_url': base_url, 'tracker': quote(tracker), 'group_id': quote(group_id), 'at_id': quote(at_id), 'summary': quote(summary), 'description': quote(description), } else: url_components = { 'base_url': base_url, 'remote_product': quote(remote_product), 'remote_component': quote(remote_component), 'summary': quote(summary), 'description': quote(description), } if bug_filing_pattern is not None: bugtracker_urls['bug_filing_url'] = ( bug_filing_pattern % url_components) if bug_search_pattern is not None: bugtracker_urls['bug_search_url'] = ( bug_search_pattern % url_components) return bugtracker_urls def getBugsWatching(self, remotebug): """See `IBugTracker`.""" # We special-case email address bug trackers. Since we don't # record a remote bug id for them we can never know which bugs # are already watching a remote bug. if self.bugtrackertype == BugTrackerType.EMAILADDRESS: return [] return shortlist( Store.of(self).find( Bug, BugWatch.bugID == Bug.id, BugWatch.bugtrackerID == self.id, BugWatch.remotebug == remotebug).config( distinct=True).order_by(Bug.datecreated)) @property def watches_ready_to_check(self): return Store.of(self).find( BugWatch, BugWatch.bugtracker == self, Not(BugWatch.next_check == None), BugWatch.next_check <= datetime.now(timezone('UTC'))) @property def watches_with_unpushed_comments(self): return Store.of(self).find( BugWatch, BugWatch.bugtracker == self, BugMessage.bugwatch == BugWatch.id, BugMessage.remote_comment_id == None).config(distinct=True) @property def watches_needing_update(self): """All watches needing some sort of update. :return: The union of `watches_ready_to_check` and `watches_with_unpushed_comments`. """ return self.watches_ready_to_check.union( self.watches_with_unpushed_comments) # Join to return a list of BugTrackerAliases relating to this # BugTracker. _bugtracker_aliases = SQLMultipleJoin( 'BugTrackerAlias', joinColumn='bugtracker') def _get_aliases(self): """See `IBugTracker.aliases`.""" alias_urls = set(alias.base_url for alias in self._bugtracker_aliases) # Although it does no harm if the current baseurl is also an # alias, we hide it and all its permutations to avoid # confusion. alias_urls.difference_update(base_url_permutations(self.baseurl)) return tuple(sorted(alias_urls)) def _set_aliases(self, alias_urls): """See `IBugTracker.aliases`.""" if alias_urls is None: alias_urls = set() else: alias_urls = set(alias_urls) current_aliases_by_url = dict( (alias.base_url, alias) for alias in self._bugtracker_aliases) # Make a set of the keys, i.e. a set of current URLs. current_alias_urls = set(current_aliases_by_url) # URLs we need to add as aliases. to_add = alias_urls - current_alias_urls # URL aliases we need to delete. to_del = current_alias_urls - alias_urls for url in to_add: BugTrackerAlias(bugtracker=self, base_url=url) for url in to_del: alias = current_aliases_by_url[url] alias.destroySelf() aliases = property( _get_aliases, _set_aliases, None, """A list of the alias URLs. See `IBugTracker`. The aliases are found by querying BugTrackerAlias. Assign an iterable of URLs or None to set or remove aliases. """) @property def imported_bug_messages(self): """See `IBugTracker`.""" return Store.of(self).find( BugMessage, BugMessage.bugwatchID == BugWatch.id, BugWatch.bugtrackerID == self.id).order_by(BugMessage.id) def getLinkedPersonByName(self, name): """Return the Person with a given name on this bugtracker.""" return BugTrackerPerson.selectOneBy(name=name, bugtracker=self) def linkPersonToSelf(self, name, person): """See `IBugTrackerSet`.""" # Check that this name isn't already in use for this bugtracker. if self.getLinkedPersonByName(name) is not None: raise BugTrackerPersonAlreadyExists( "Name '%s' is already in use for bugtracker '%s'." % (name, self.name)) bugtracker_person = BugTrackerPerson( name=name, bugtracker=self, person=person) return bugtracker_person def ensurePersonForSelf( self, display_name, email, rationale, creation_comment): """Return a Person that is linked to this bug tracker.""" # If we have an email address to work with we can use # ensurePerson() to get the Person we need. if email is not None: return getUtility(IPersonSet).ensurePerson( email, display_name, rationale, creation_comment) # First, see if there's already a BugTrackerPerson for this # display_name on this bugtracker. If there is, return it. bugtracker_person = self.getLinkedPersonByName(display_name) if bugtracker_person is not None: return bugtracker_person.person # Generate a valid Launchpad name for the Person. base_canonical_name = ( "%s-%s" % (sanitize_name(display_name.lower()), self.name)) canonical_name = base_canonical_name person_set = getUtility(IPersonSet) index = 0 while person_set.getByName(canonical_name) is not None: index += 1 canonical_name = "%s-%s" % (base_canonical_name, index) person = person_set.createPersonWithoutEmail( canonical_name, rationale, creation_comment, displayname=display_name) # Link the Person to the bugtracker for future reference. bugtracker_person = self.linkPersonToSelf(display_name, person) return person def resetWatches(self, new_next_check=None): """See `IBugTracker`.""" if new_next_check is None: new_next_check = SQL( "now() at time zone 'UTC' + (random() * interval '1 day')") store = Store.of(self) store.find(BugWatch, BugWatch.bugtracker == self).set( next_check=new_next_check, lastchecked=None, last_error_type=None) def addRemoteComponentGroup(self, component_group_name): """See `IBugTracker`.""" if component_group_name is None: component_group_name = "default" component_group = BugTrackerComponentGroup() component_group.name = component_group_name component_group.bug_tracker = self store = IStore(BugTrackerComponentGroup) store.add(component_group) store.commit() return component_group def getAllRemoteComponentGroups(self): """See `IBugTracker`.""" component_groups = [] component_groups = Store.of(self).find( BugTrackerComponentGroup, BugTrackerComponentGroup.bug_tracker == self.id) component_groups = component_groups.order_by( BugTrackerComponentGroup.name) return component_groups def getRemoteComponentGroup(self, component_group_name): """See `IBugTracker`.""" component_group = None store = IStore(BugTrackerComponentGroup) if component_group_name is None: return None elif component_group_name.isdigit(): component_group_id = int(component_group_name) component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.id == component_group_id).one() else: component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.name == component_group_name).one() return component_group def getRemoteComponentForDistroSourcePackageName( self, distribution, sourcepackagename): """See `IBugTracker`.""" if distribution is None: return None dsp = distribution.getSourcePackage(sourcepackagename) if dsp is None: return None return Store.of(self).find( BugTrackerComponent, BugTrackerComponent.distribution == distribution.id, BugTrackerComponent.source_package_name == dsp.sourcepackagename.id).one() def getRelatedPillars(self, user=None): """See `IBugTracker`.""" products = IStore(Product).find( Product, Product.bugtrackerID == self.id, Product.active == True, ProductSet.getProductPrivacyFilter(user)).order_by(Product.name) groups = IStore(ProjectGroup).find( ProjectGroup, ProjectGroup.bugtrackerID == self.id, ProjectGroup.active == True).order_by(ProjectGroup.name) return groups, products
class SOTestWorkKey(SQLObject): class sqlmeta: idName = "work_id" composer = ForeignKey('SOTestComposerKey', cascade=True) title = StringCol()
class Revision(SQLBase): """See IRevision.""" date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) log_body = StringCol(notNull=True) gpgkey = ForeignKey(dbName='gpgkey', foreignKey='GPGKey', default=None) revision_author_id = Int(name='revision_author', allow_none=False) revision_author = Reference(revision_author_id, 'RevisionAuthor.id') revision_id = StringCol(notNull=True, alternateID=True, alternateMethodName='byRevisionID') revision_date = UtcDateTimeCol(notNull=False) karma_allocated = BoolCol(default=False, notNull=True) properties = SQLMultipleJoin('RevisionProperty', joinColumn='revision') @property def parents(self): """See IRevision.parents""" return shortlist( RevisionParent.selectBy(revision=self, orderBy='sequence')) @property def parent_ids(self): """Sequence of globally unique ids for the parents of this revision. The corresponding Revision objects can be retrieved, if they are present in the database, using the RevisionSet Zope utility. """ return [parent.parent_id for parent in self.parents] def getLefthandParent(self): if len(self.parent_ids) == 0: parent_id = NULL_REVISION else: parent_id = self.parent_ids[0] return RevisionSet().getByRevisionId(parent_id) def getProperties(self): """See `IRevision`.""" return dict((prop.name, prop.value) for prop in self.properties) def allocateKarma(self, branch): """See `IRevision`.""" # Always set karma_allocated to True so that Lp does not reprocess # junk and invalid user branches because they do not get karma. self.karma_allocated = True # If we know who the revision author is, give them karma. author = self.revision_author.person if author is not None and branch is not None: # Backdate the karma to the time the revision was created. If the # revision_date on the revision is in future (for whatever weird # reason) we will use the date_created from the revision (which # will be now) as the karma date created. Having future karma # events is both wrong, as the revision has been created (and it # is lying), and a problem with the way the Launchpad code # currently does its karma degradation over time. karma_date = min(self.revision_date, self.date_created) karma = branch.target.assignKarma(author, 'revisionadded', karma_date) return karma else: return None def getBranch(self, allow_private=False, allow_junk=True): """See `IRevision`.""" from lp.code.model.branch import Branch from lp.code.model.branchrevision import BranchRevision store = Store.of(self) query = And(self.id == BranchRevision.revision_id, BranchRevision.branch_id == Branch.id) if not allow_private: query = And( query, Branch.information_type.is_in(PUBLIC_INFORMATION_TYPES)) if not allow_junk: query = And( query, # Not-junk branches are either associated with a product # or with a source package. Or((Branch.product != None), And(Branch.sourcepackagename != None, Branch.distroseries != None))) result_set = store.find(Branch, query) if self.revision_author.person is None: result_set.order_by(Asc(BranchRevision.sequence)) else: result_set.order_by( Branch.ownerID != self.revision_author.personID, Asc(BranchRevision.sequence)) return result_set.first()
class SOTestOtherColumn(SQLObject): key1 = ForeignKey('SOTestComposerKey', default=None) key2 = ForeignKey('SOTestComposerKey', refColumn='id2', default=None)
class Builder(SQLBase): _table = 'Builder' _defaultOrder = ['id'] url = StringCol(dbName='url', notNull=True) name = StringCol(dbName='name', notNull=True) title = StringCol(dbName='title', notNull=True) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) _builderok = BoolCol(dbName='builderok', notNull=True) failnotes = StringCol(dbName='failnotes') virtualized = BoolCol(dbName='virtualized', default=True, notNull=True) manual = BoolCol(dbName='manual', default=False) vm_host = StringCol(dbName='vm_host') active = BoolCol(dbName='active', notNull=True, default=True) failure_count = IntCol(dbName='failure_count', default=0, notNull=True) version = StringCol(dbName='version') clean_status = EnumCol(enum=BuilderCleanStatus, default=BuilderCleanStatus.DIRTY) vm_reset_protocol = EnumCol(enum=BuilderResetProtocol) date_clean_status_changed = UtcDateTimeCol() def _getBuilderok(self): return self._builderok def _setBuilderok(self, value): self._builderok = value if value is True: self.resetFailureCount() self.setCleanStatus(BuilderCleanStatus.DIRTY) builderok = property(_getBuilderok, _setBuilderok) def gotFailure(self): """See `IBuilder`.""" self.failure_count += 1 def resetFailureCount(self): """See `IBuilder`.""" self.failure_count = 0 @cachedproperty def _processors_cache(self): """See `IBuilder`.""" # This _cache method is a quick hack to get a settable # cachedproperty, mostly for the webservice's benefit. return list( Store.of(self).find( Processor, BuilderProcessor.processor_id == Processor.id, BuilderProcessor.builder == self).order_by(Processor.name)) def _processors(self): return self._processors_cache def _set_processors(self, processors): existing = set(self.processors) wanted = set(processors) # Enable the wanted but missing. for processor in (wanted - existing): bp = BuilderProcessor() bp.builder = self bp.processor = processor Store.of(self).add(bp) # Disable the unwanted but present. Store.of(self).find( BuilderProcessor, BuilderProcessor.builder == self, BuilderProcessor.processor_id.is_in( processor.id for processor in existing - wanted)).remove() del get_property_cache(self)._processors_cache processors = property(_processors, _set_processors) @property def processor(self): """See `IBuilder`.""" try: return self.processors[0] except IndexError: return None @processor.setter def processor(self, processor): self.processors = [processor] @cachedproperty def currentjob(self): """See IBuilder""" return getUtility(IBuildQueueSet).getByBuilder(self) @property def current_build(self): if self.currentjob is None: return None return self.currentjob.specific_build def setCleanStatus(self, status): """See `IBuilder`.""" if status != self.clean_status: self.clean_status = status self.date_clean_status_changed = UTC_NOW def failBuilder(self, reason): """See IBuilder""" # XXX cprov 2007-04-17: ideally we should be able to notify the # the buildd-admins about FAILED builders. One alternative is to # make the buildd_cronscript (slave-scanner, in this case) to exit # with error, for those cases buildd-sequencer automatically sends # an email to admins with the script output. self.builderok = False self.failnotes = reason def getBuildRecords(self, build_state=None, name=None, pocket=None, arch_tag=None, user=None, binary_only=True): """See IHasBuildRecords.""" if binary_only: return getUtility(IBinaryPackageBuildSet).getBuildsForBuilder( self.id, build_state, name, pocket, arch_tag, user) else: if arch_tag is not None or name is not None or pocket is not None: raise IncompatibleArguments( "The 'arch_tag', 'name', and 'pocket' parameters can be " "used only with binary_only=True.") return getUtility(IBuildFarmJobSet).getBuildsForBuilder( self, status=build_state, user=user) def _getSlaveScannerLogger(self): """Return the logger instance from buildd-slave-scanner.py.""" # XXX cprov 20071120: Ideally the Launchpad logging system # should be able to configure the root-logger instead of creating # a new object, then the logger lookups won't require the specific # name argument anymore. See bug 164203. logger = logging.getLogger('slave-scanner') return logger def acquireBuildCandidate(self): """See `IBuilder`.""" candidate = self._findBuildCandidate() if candidate is not None: candidate.markAsBuilding(self) transaction.commit() return candidate def _findBuildCandidate(self): """Find a candidate job for dispatch to an idle buildd slave. The pending BuildQueue item with the highest score for this builder or None if no candidate is available. :return: A candidate job. """ logger = self._getSlaveScannerLogger() job_type_conditions = [] job_sources = specific_build_farm_job_sources() for job_type, job_source in job_sources.iteritems(): query = job_source.addCandidateSelectionCriteria( self.processor, self.virtualized) if query: job_type_conditions.append( Or(BuildFarmJob.job_type != job_type, Exists(SQL(query)))) def get_int_feature_flag(flag): value_str = getFeatureFlag(flag) if value_str is not None: try: return int(value_str) except ValueError: logger.error('invalid %s %r', flag, value_str) score_conditions = [] minimum_scores = set() for processor in self.processors: minimum_scores.add( get_int_feature_flag('buildmaster.minimum_score.%s' % processor.name)) minimum_scores.add(get_int_feature_flag('buildmaster.minimum_score')) minimum_scores.discard(None) # If there are minimum scores set for any of the processors # supported by this builder, use the highest of them. This is a bit # weird and not completely ideal, but it's a safe conservative # option and avoids substantially complicating the candidate query. if minimum_scores: score_conditions.append( BuildQueue.lastscore >= max(minimum_scores)) store = IStore(self.__class__) candidate_jobs = store.using(BuildQueue, BuildFarmJob).find( (BuildQueue.id, ), BuildFarmJob.id == BuildQueue._build_farm_job_id, BuildQueue.status == BuildQueueStatus.WAITING, Or( BuildQueue.processorID.is_in( Select(BuilderProcessor.processor_id, tables=[BuilderProcessor], where=BuilderProcessor.builder == self)), BuildQueue.processor == None), BuildQueue.virtualized == self.virtualized, BuildQueue.builder == None, And(*(job_type_conditions + score_conditions))).order_by( Desc(BuildQueue.lastscore), BuildQueue.id) # Only try the first handful of jobs. It's much easier on the # database, the chance of a large prefix of the queue being # bad candidates is negligible, and we want reasonably bounded # per-cycle performance even if the prefix is large. for (candidate_id, ) in candidate_jobs[:10]: candidate = getUtility(IBuildQueueSet).get(candidate_id) job_source = job_sources[removeSecurityProxy( candidate)._build_farm_job.job_type] candidate_approved = job_source.postprocessCandidate( candidate, logger) if candidate_approved: return candidate return None
def test2(): SOTestWorkKey._connection = getConnection() InstalledTestDatabase.drop(SOTestWorkKey) setupClass([SOTestComposerKey, SOTestWorkKey2], force=True) SOTestWorkKey2.sqlmeta.addColumn(ForeignKey('SOTestComposerKey'), changeSchema=True)
class Announcement(SQLBase): """A news item. These allow us to generate lists of recent news for project groups, products and distributions. """ _defaultOrder = ['-date_announced', '-date_created'] date_created = UtcDateTimeCol(dbName='date_created', notNull=True, default=UTC_NOW) date_announced = UtcDateTimeCol(default=None) date_last_modified = UtcDateTimeCol(dbName='date_updated', default=None) registrant = ForeignKey(dbName='registrant', foreignKey='Person', storm_validator=validate_public_person, notNull=True) product = ForeignKey(dbName='product', foreignKey='Product') projectgroup = ForeignKey(dbName='project', foreignKey='ProjectGroup') distribution = ForeignKey(dbName='distribution', foreignKey='Distribution') title = StringCol(notNull=True) summary = StringCol(default=None) url = StringCol(default=None) active = BoolCol(notNull=True, default=True) def modify(self, title, summary, url): if self.title != title: self.title = title self.date_last_modified = UTC_NOW if self.summary != summary: self.summary = summary self.date_last_modified = UTC_NOW if self.url != url: self.url = url self.date_last_modified = UTC_NOW @property def target(self): if self.product is not None: return self.product elif self.projectgroup is not None: return self.projectgroup elif self.distribution is not None: return self.distribution else: raise AssertionError('Announcement has no obvious target') @property def date_updated(self): if self.date_last_modified is not None: return self.date_last_modified return self.date_created def retarget(self, target): """See `IAnnouncement`.""" if IProduct.providedBy(target): self.product = target self.distribution = None self.projectgroup = None elif IDistribution.providedBy(target): self.distribution = target self.projectgroup = None self.product = None elif IProjectGroup.providedBy(target): self.projectgroup = target self.distribution = None self.product = None else: raise AssertionError('Unknown target') self.date_last_modified = UTC_NOW def retract(self): """See `IAnnouncement`.""" self.active = False self.date_last_modified = UTC_NOW def setPublicationDate(self, publication_date): """See `IAnnouncement`.""" self.date_announced = publication_date self.date_last_modified = None self.active = True @property def future(self): """See `IAnnouncement`.""" if self.date_announced is None: return True return self.date_announced > utc_now() @property def published(self): """See `IAnnouncement`.""" if self.active is False: return False return not self.future
class SOTestFKValidationA(SQLObject): name = StringCol() bfk = ForeignKey("SOTestFKValidationB") cfk = ForeignKey("SOTestFKValidationC", default=None)
class SRThrough1(SQLObject): three = ForeignKey('SRThrough3') twos = SQLMultipleJoin('SRThrough2', joinColumn='oneID')
class TranslationGroup(SQLBase): """A TranslationGroup.""" implements(ITranslationGroup) # default to listing alphabetically _defaultOrder = 'name' # db field names name = StringCol(unique=True, alternateID=True, notNull=True) title = StringCol(notNull=True) summary = StringCol(notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=DEFAULT) owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) # useful joins distributions = SQLMultipleJoin('Distribution', joinColumn='translationgroup') languages = SQLRelatedJoin('Language', joinColumn='translationgroup', intermediateTable='Translator', otherColumn='language') translators = SQLMultipleJoin('Translator', joinColumn='translationgroup') translation_guide_url = StringCol(notNull=False, default=None) def __getitem__(self, language_code): """See `ITranslationGroup`.""" query = Store.of(self).find( Translator, Translator.translationgroup == self, Translator.languageID == Language.id, Language.code == language_code) translator = query.one() if translator is None: raise NotFoundError(language_code) return translator # used to note additions def add(self, content): """See ITranslationGroup.""" return content # adding and removing translators def remove_translator(self, translator): """See ITranslationGroup.""" Translator.delete(translator.id) # get a translator by language or code def query_translator(self, language): """See ITranslationGroup.""" return Translator.selectOneBy(language=language, translationgroup=self) @property def products(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.product import Product return Product.selectBy(translationgroup=self.id, active=True) @property def projects(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.projectgroup import ProjectGroup return ProjectGroup.selectBy(translationgroup=self.id, active=True) # A limit of projects to get for the `top_projects`. TOP_PROJECTS_LIMIT = 6 @property def top_projects(self): """See `ITranslationGroup`.""" # XXX Danilo 2009-08-25: We should make this list show a list # of projects based on the top translations karma (bug #418493). goal = self.TOP_PROJECTS_LIMIT projects = list(self.distributions[:goal]) found = len(projects) if found < goal: projects.extend( list(self.projects[:goal-found])) found = len(projects) if found < goal: projects.extend( list(self.products[:goal-found])) return projects @property def number_of_remaining_projects(self): """See `ITranslationGroup`.""" total = ( self.projects.count() + self.products.count() + self.distributions.count()) if total > self.TOP_PROJECTS_LIMIT: return total - self.TOP_PROJECTS_LIMIT else: return 0 def fetchTranslatorData(self): """See `ITranslationGroup`.""" # Fetch Translator, Language, and Person; but also prefetch the # icon information. using = [ Translator, Language, Person, LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( Translator, Language, Person, LibraryFileAlias, LibraryFileContent, ) translator_data = Store.of(self).using(*using).find( tables, Translator.translationgroup == self, Language.id == Translator.languageID, Person.id == Translator.translatorID) translator_data = translator_data.order_by(Language.englishname) mapper = lambda row: row[slice(0, 3)] return DecoratedResultSet(translator_data, mapper) def fetchProjectsForDisplay(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.product import ( Product, ProductWithLicenses, ) using = [ Product, LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Product.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] columns = ( Product, ProductWithLicenses.composeLicensesColumn(), LibraryFileAlias, LibraryFileContent, ) product_data = ISlaveStore(Product).using(*using).find( columns, Product.translationgroupID == self.id, Product.active == True) product_data = product_data.order_by(Product.displayname) return [ ProductWithLicenses(product, tuple(licenses)) for product, licenses, icon_alias, icon_content in product_data] def fetchProjectGroupsForDisplay(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.projectgroup import ProjectGroup using = [ ProjectGroup, LeftJoin( LibraryFileAlias, LibraryFileAlias.id == ProjectGroup.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( ProjectGroup, LibraryFileAlias, LibraryFileContent, ) project_data = ISlaveStore(ProjectGroup).using(*using).find( tables, ProjectGroup.translationgroupID == self.id, ProjectGroup.active == True).order_by(ProjectGroup.displayname) return DecoratedResultSet(project_data, operator.itemgetter(0)) def fetchDistrosForDisplay(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.distribution import Distribution using = [ Distribution, LeftJoin( LibraryFileAlias, LibraryFileAlias.id == Distribution.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( Distribution, LibraryFileAlias, LibraryFileContent, ) distro_data = ISlaveStore(Distribution).using(*using).find( tables, Distribution.translationgroupID == self.id).order_by( Distribution.displayname) return DecoratedResultSet(distro_data, operator.itemgetter(0))
class SRThrough2(SQLObject): one = ForeignKey('SRThrough1') threes = SQLRelatedJoin('SRThrough3', addRemoveName='Three')
from sqlobject import ForeignKey, MixedCaseStyle, MixedCaseUnderscoreStyle, \ SQLObject, StringCol, Style # Hash of styles versus the database names resulting from 'columns' below. columns = ["ABCUpper", "abc_lower", "ABCamelCaseColumn"] styles = { Style: columns, MixedCaseUnderscoreStyle: ["abc_upper", "abc_lower", "ab_camel_case_column"], MixedCaseStyle: ["ABCUpper", "Abc_lower", "ABCamelCaseColumn"], } # Hash of styles versus the database names # resulting from a foreign key named 'FKey'. fkey = ForeignKey("DefaultStyleTest", name="FKey") fkeys = { Style: "FKeyID", MixedCaseUnderscoreStyle: "f_key_id", MixedCaseStyle: "FKeyID", } def make_columns(): global columns columns = [] for col_name in columns: columns.append(StringCol(name=col_name, length=10)) def do_col_test(DefaultStyleTest, style, dbnames):