class SourcePackageName(SQLBase): _table = 'SourcePackageName' name = StringCol(dbName='name', notNull=True, unique=True, alternateID=True) potemplates = SQLMultipleJoin('POTemplate', joinColumn='sourcepackagename') packagings = SQLMultipleJoin('Packaging', joinColumn='sourcepackagename', orderBy='Packaging.id') def __unicode__(self): return self.name def __repr__(self): return "<%s '%s'>" % (self.__class__.__name__, self.name) def ensure(klass, name): try: return klass.byName(name) except SQLObjectNotFound: return klass(name=name) ensure = classmethod(ensure)
class ViewPhone(ViewSQLObject): class sqlmeta: idName = PhoneNumber.q.id clause = PhoneCall.q.phoneNumberID == PhoneNumber.q.id minutes = IntCol(dbName=func.SUM(PhoneCall.q.minutes)) numberOfCalls = IntCol(dbName=func.COUNT(PhoneCall.q.phoneNumberID)) number = StringCol(dbName=PhoneNumber.q.number) phoneNumber = ForeignKey('PhoneNumber', dbName=PhoneNumber.q.id) calls = SQLMultipleJoin('PhoneCall', joinColumn='phoneNumberID') vCalls = SQLMultipleJoin('ViewPhoneCall', joinColumn='phoneNumberID', orderBy='id')
class Cve(SQLBase, BugLinkTargetMixin): """A CVE database record.""" implements(ICve, IBugLinkTarget) _table = 'Cve' sequence = StringCol(notNull=True, alternateID=True) status = EnumCol(dbName='status', schema=CveStatus, notNull=True) description = StringCol(notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) datemodified = UtcDateTimeCol(notNull=True, default=UTC_NOW) # joins bugs = SQLRelatedJoin('Bug', intermediateTable='BugCve', joinColumn='cve', otherColumn='bug', orderBy='id') bug_links = SQLMultipleJoin('BugCve', joinColumn='cve', orderBy='id') references = SQLMultipleJoin( 'CveReference', joinColumn='cve', orderBy='id') @property def url(self): """See ICve.""" return ('http://www.cve.mitre.org/cgi-bin/cvename.cgi?name=%s' % self.sequence) @property def displayname(self): return 'CVE-%s' % self.sequence @property def title(self): return 'CVE-%s (%s)' % (self.sequence, self.status.title) # CveReference's def createReference(self, source, content, url=None): """See ICveReference.""" return CveReference(cve=self, source=source, content=content, url=url) def removeReference(self, ref): assert ref.cve == self CveReference.delete(ref.id) # Template methods for BugLinkTargetMixin buglinkClass = BugCve def createBugLink(self, bug): """See BugLinkTargetMixin.""" return BugCve(cve=self, bug=bug)
class KarmaCategory(SQLBase): """See IKarmaCategory.""" _defaultOrder = ['title', 'id'] name = StringCol(notNull=True, alternateID=True) title = StringCol(notNull=True) summary = StringCol(notNull=True) karmaactions = SQLMultipleJoin( 'KarmaAction', joinColumn='category', orderBy='name')
class Plugin(ICTVObject): name = StringCol(notNone=True, alternateID=True) description = StringCol(default=None) version = IntCol(notNone=True, default=0) activated = EnumCol(notNone=True, enumValues=['yes', 'no', 'notfound']) webapp = BoolCol(notNone=True, default=False) static = BoolCol(notNone=True, default=False) channels_params = JSONCol( notNone=True, default={} ) # The type and default param's values needed by every plugin instance channels = SQLMultipleJoin('PluginChannel') params_access_rights = SQLMultipleJoin('PluginParamAccessRights') cache_activated_default = BoolCol(default=True) cache_validity_default = IntCol(default=60) keep_noncomplying_capsules_default = BoolCol(default=False) def _get_channels_number(self): """ Return the number of channels instantiated with this plugin. """ return self.channels.count() def _get_screens_number(self): """ Return the number of screens that are subscribed to channels of this plugin. """ plugin_channels = PluginChannel.select().filter( PluginChannel.q.plugin == self) screens = set(plugin_channels.throughTo.subscriptions.throughTo.screen. distinct()) bundles = set(c for c in ChannelBundle.select() if any(bc.plugin == self for bc in c.flatten())) for b in bundles: screens |= set(Subscription.select().filter( Subscription.q.channel == b).throughTo.screen.distinct()) return len(screens) def _get_package_path(self): """ Returns the path to the package of this plugin. """ try: m = importlib.import_module('ictv.plugins.' + self.name) return m.__path__[0] except ImportError: return None @classmethod def update_plugins(cls, dirs): """ Takes the list of the plugins directories located in ictv/plugins and updates the database if they're not in db :param dirs: The directory listing of ictv/plugins :return: the list of plugins present in updated database """ s = set() plugins_list = [] for p in Plugin.select(): s.add(p.name) if p.name not in dirs: # Plugin exists in database but was not found in the plugins directory p.activated = 'notfound' else: path = os.path.join(p.package_path, 'config.yaml') if os.path.isfile(path): # Plugin is considered to be found if p.activated == 'notfound': p.activated = 'no' with open(path, 'r') as f: config = yaml.load(f, Loader=yamlordereddictloader.Loader) p.webapp = config['plugin']['webapp'] p.static = config['plugin']['static'] p.description = config['plugin'].get( 'description', None) if 'channels_params' in config: # The plugin has channel specific parameters that can be changed from channel to channel order = 0 for k, v in config['channels_params'].items(): p.channels_params[ k] = v # Sets the parameter to its default value if 'order' not in p.channels_params[k]: p.channels_params[k]['order'] = order order += 1 if PluginParamAccessRights.selectBy( plugin=p, name=k).getOne(None) is None: PluginParamAccessRights(plugin=p, name=k) for k in list(p.channels_params): if k not in config['channels_params'].keys(): p.channels_params.pop(k) PluginParamAccessRights.deleteBy(plugin=p, name=k) p.channels_params = p.channels_params # Force SQLObject update else: p.activated = 'notfound' plugins_list.append(p) for p in dirs: if p not in s: # Plugin was not in database, it should be added but not activated plugins_list.append(Plugin(name=p, activated='no')) return plugins_list
class CodeImport(SQLBase): """See `ICodeImport`.""" implements(ICodeImport) _table = 'CodeImport' _defaultOrder = ['id'] date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) branch = ForeignKey(dbName='branch', foreignKey='Branch', notNull=True) registrant = ForeignKey( dbName='registrant', foreignKey='Person', storm_validator=validate_public_person, notNull=True) owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) assignee = ForeignKey( dbName='assignee', foreignKey='Person', storm_validator=validate_public_person, notNull=False, default=None) review_status = EnumCol(schema=CodeImportReviewStatus, notNull=True, default=CodeImportReviewStatus.REVIEWED) rcs_type = EnumCol(schema=RevisionControlSystems, notNull=False, default=None) cvs_root = StringCol(default=None) cvs_module = StringCol(default=None) url = StringCol(default=None) date_last_successful = UtcDateTimeCol(default=None) update_interval = IntervalCol(default=None) @property def effective_update_interval(self): """See `ICodeImport`.""" if self.update_interval is not None: return self.update_interval default_interval_dict = { RevisionControlSystems.CVS: config.codeimport.default_interval_cvs, RevisionControlSystems.SVN: config.codeimport.default_interval_subversion, RevisionControlSystems.BZR_SVN: config.codeimport.default_interval_subversion, RevisionControlSystems.GIT: config.codeimport.default_interval_git, RevisionControlSystems.BZR: config.codeimport.default_interval_bzr, } # The default can be removed when HG is fully purged. seconds = default_interval_dict.get(self.rcs_type, 21600) return timedelta(seconds=seconds) import_job = Reference("<primary key>", "CodeImportJob.code_importID", on_remote=True) def getImportDetailsForDisplay(self): """See `ICodeImport`.""" assert self.rcs_type is not None, ( "Only makes sense for series with import details set.") if self.rcs_type == RevisionControlSystems.CVS: return '%s %s' % (self.cvs_root, self.cvs_module) elif self.rcs_type in ( RevisionControlSystems.SVN, RevisionControlSystems.GIT, RevisionControlSystems.BZR_SVN, RevisionControlSystems.HG, RevisionControlSystems.BZR): return self.url else: raise AssertionError( "Unknown rcs type: %s" % self.rcs_type.title) def _removeJob(self): """If there is a pending job, remove it.""" job = self.import_job if job is not None: if job.state == CodeImportJobState.PENDING: CodeImportJobWorkflow().deletePendingJob(self) results = SQLMultipleJoin( 'CodeImportResult', joinColumn='code_import', orderBy=['-date_job_started']) @property def consecutive_failure_count(self): """See `ICodeImport`.""" # This SQL translates as "how many code import results have there been # for this code import since the last successful one". # This is not very efficient for long lists of code imports. last_success = Func( "coalesce", Select( CodeImportResult.id, And(CodeImportResult.status.is_in( CodeImportResultStatus.successes), CodeImportResult.code_import == self), order_by=Desc(CodeImportResult.id), limit=1), 0) return Store.of(self).find( CodeImportResult, CodeImportResult.code_import == self, CodeImportResult.id > last_success).count() def updateFromData(self, data, user): """See `ICodeImport`.""" event_set = getUtility(ICodeImportEventSet) new_whiteboard = None if 'whiteboard' in data: whiteboard = data.pop('whiteboard') if whiteboard != self.branch.whiteboard: if whiteboard is None: new_whiteboard = '' else: new_whiteboard = whiteboard self.branch.whiteboard = whiteboard token = event_set.beginModify(self) for name, value in data.items(): setattr(self, name, value) if 'review_status' in data: if data['review_status'] == CodeImportReviewStatus.REVIEWED: if self.import_job is None: CodeImportJobWorkflow().newJob(self) else: self._removeJob() event = event_set.newModify(self, user, token) if event is not None or new_whiteboard is not None: code_import_updated(self, event, new_whiteboard, user) return event def __repr__(self): return "<CodeImport for %s>" % self.branch.unique_name def tryFailingImportAgain(self, user): """See `ICodeImport`.""" if self.review_status != CodeImportReviewStatus.FAILING: raise AssertionError( "review_status is %s not FAILING" % self.review_status.name) self.updateFromData( {'review_status': CodeImportReviewStatus.REVIEWED}, user) getUtility(ICodeImportJobWorkflow).requestJob(self.import_job, user) def requestImport(self, requester, error_if_already_requested=False): """See `ICodeImport`.""" if self.import_job is None: # Not in automatic mode. raise CodeImportNotInReviewedState( "This code import is %s, and must be Reviewed for you to " "call requestImport." % self.review_status.name) if self.import_job.state != CodeImportJobState.PENDING: assert self.import_job.state == CodeImportJobState.RUNNING raise CodeImportAlreadyRunning( "This code import is already running.") elif self.import_job.requesting_user is not None: if error_if_already_requested: raise CodeImportAlreadyRequested("This code import has " "already been requested to run.", self.import_job.requesting_user) else: getUtility(ICodeImportJobWorkflow).requestJob( self.import_job, requester)
class PluginChannel(Channel): plugin = ForeignKey('Plugin', cascade=True) plugin_config = JSONCol(notNone=True, default={}) assets = SQLMultipleJoin('Asset') cache_activated = BoolCol(default=None) cache_validity = IntCol(default=None) keep_noncomplying_capsules = BoolCol(default=None) def give_permission_to_user(self, user: User, permission_level: UserPermissions = UserPermissions.channel_contributor) -> None: """ Give permission to the user or modify existing permission previously given. :param user: The user receiving the permission level on this channel. :param permission_level: The level of permission to give on this channel. :return: None """ role = Role.selectBy(user=user, channel=self).getOne(None) if role is None: Role(user=user, channel=self, permission_level=permission_level) else: role.permission_level = permission_level def remove_permission_to_user(self, user: User) -> None: """ Remove previously given permission to the user :param user: The user to withdraw permission on this channel. :return: """ role = Role.selectBy(user=user, channel=self).getOne(None) if role is not None: role.destroySelf() def get_channel_permissions_of(self, user): """ Return the permission level of this user on this channel. :return: UserPermissions """ role = Role.selectBy(user=user, channel=self).getOne(None) if role is not None: return role.permission_level return UserPermissions.no_permission def has_admin(self, user): """ Return whether this user has sufficient permission to be considered as admin of this channel. """ return UserPermissions.channel_administrator in self.get_channel_permissions_of(user) def has_contrib(self, user): """ Return whether this user has sufficient permission to be considered as contributor of this channel. """ return UserPermissions.channel_contributor in self.get_channel_permissions_of(user) def _get_users_with_permissions(self, permission_level): """ Return a list of users with sufficient permission on this channel. """ return Role.selectBy(channel=self, permission_level=UserPermissions.get_permission_string( permission_level)).throughTo.user def get_admins(self): """ Return a list of users with administrator permission on this channel. """ return self._get_users_with_permissions(UserPermissions.channel_administrator) def get_contribs(self): """ Return a list of users with contributor permission on this channel. """ return self._get_users_with_permissions(UserPermissions.channel_contributor) def _get_users_as_dict(self): """ Return a dictionary in the form { user.id: UserPermissions integer value, ... } """ return {role.user.id: role.permission_level.value for role in Role.selectBy(channel=self)} def get_users_as_json(self): """ Return the string representation of a JSON object in the form { user.id: UserPermissions integer value, ... } """ return json.dumps(self._get_users_as_dict()) @classmethod def get_channels_users_as_json(cls, channels): """ Return the string representation of a JSON object in the form { channel.id: { user.id: UserPermissions integer value, ... } } """ return json.dumps({c.id: c._get_users_as_dict() for c in channels}) def get_config_param(self, param): """ Returns the value of the given parameter according to this channel configuration or the default value in the plugin configuration if one exists. Otherwise raises a KeyError """ default = self.plugin.channels_params[param]['default'] value_type = self.plugin.channels_params[param]['type'] if param not in self.plugin_config and (type(default) is not str or len(default) > 0): return vars(builtins)[value_type](default) if value_type in vars(builtins) else default if param in self.plugin_config: if value_type in vars(builtins): return vars(builtins)[value_type](self.plugin_config[param]) return self.plugin_config[param] def has_visible_params_for(self, user): """ Returns true if the given user has access to one or more parameters of this channel. """ if user.super_admin: return True if user.admin: return PluginParamAccessRights.selectBy(plugin=self.plugin, administrator_read=True).count() > 0 if self.has_admin(user): return PluginParamAccessRights.selectBy(plugin=self.plugin, channel_administrator_read=True).count() > 0 if self.has_contrib(user): return PluginParamAccessRights.selectBy(plugin=self.plugin, channel_contributor_read=True).count() > 0 return False def get_access_rights_for(self, param_name, user): """ Returns a tuple of booleans (read_access, write_access) indicating which type of rights this user has on the given param of this channel depending on they role and the param access configuration. """ if user.super_admin: return True, True rights = PluginParamAccessRights.selectBy(plugin=self.plugin, name=param_name).getOne((False, False)) if user.admin: return rights.administrator_read, rights.administrator_write if self.has_admin(user): return rights.channel_administrator_read, rights.channel_administrator_write if self.has_contrib(user): return rights.channel_contributor_read, rights.channel_contributor_write return False, False def _get_cache_activated(self): value = self._SO_get_cache_activated() if value is None: return self.plugin.cache_activated_default return value def _get_cache_validity(self): value = self._SO_get_cache_validity() if value is None: return self.plugin.cache_validity_default return value def _get_keep_noncomplying_capsules(self): value = self._SO_get_keep_noncomplying_capsules() if value is None: return self.plugin.keep_noncomplying_capsules_default return value def flatten(self, keep_disabled_channels=False): return [self] if self.enabled or keep_disabled_channels else [] def get_type_name(self): """ Returns a string representing the name of the subtype to be used in the UI for this class. """ return 'Plugin %s' % self.plugin.name
class TranslationGroup(SQLBase): """A TranslationGroup.""" # default to listing alphabetically _defaultOrder = 'name' # db field names name = StringCol(unique=True, alternateID=True, notNull=True) title = StringCol(notNull=True) summary = StringCol(notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=DEFAULT) owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) # useful joins distributions = SQLMultipleJoin('Distribution', joinColumn='translationgroup') languages = SQLRelatedJoin('Language', joinColumn='translationgroup', intermediateTable='Translator', otherColumn='language') translators = SQLMultipleJoin('Translator', joinColumn='translationgroup') translation_guide_url = StringCol(notNull=False, default=None) def __getitem__(self, language_code): """See `ITranslationGroup`.""" query = Store.of(self).find( Translator, Translator.translationgroup == self, Translator.languageID == Language.id, Language.code == language_code) translator = query.one() if translator is None: raise NotFoundError(language_code) return translator # used to note additions def add(self, content): """See ITranslationGroup.""" return content # adding and removing translators def remove_translator(self, translator): """See ITranslationGroup.""" Translator.delete(translator.id) # get a translator by language or code def query_translator(self, language): """See ITranslationGroup.""" return Translator.selectOneBy(language=language, translationgroup=self) @property def products(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.product import Product return Product.selectBy(translationgroup=self.id, active=True) @property def projects(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.projectgroup import ProjectGroup return ProjectGroup.selectBy(translationgroup=self.id, active=True) # A limit of projects to get for the `top_projects`. TOP_PROJECTS_LIMIT = 6 @property def top_projects(self): """See `ITranslationGroup`.""" # XXX Danilo 2009-08-25: We should make this list show a list # of projects based on the top translations karma (bug #418493). goal = self.TOP_PROJECTS_LIMIT projects = list(self.distributions[:goal]) found = len(projects) if found < goal: projects.extend( list(self.projects[:goal - found])) found = len(projects) if found < goal: projects.extend( list(self.products[:goal - found])) return projects @property def number_of_remaining_projects(self): """See `ITranslationGroup`.""" total = ( self.projects.count() + self.products.count() + self.distributions.count()) if total > self.TOP_PROJECTS_LIMIT: return total - self.TOP_PROJECTS_LIMIT else: return 0 def fetchTranslatorData(self): """See `ITranslationGroup`.""" # Fetch Translator, Language, and Person; but also prefetch the # icon information. using = [ Translator, Language, Person, LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( Translator, Language, Person, LibraryFileAlias, LibraryFileContent, ) translator_data = Store.of(self).using(*using).find( tables, Translator.translationgroup == self, Language.id == Translator.languageID, Person.id == Translator.translatorID) translator_data = translator_data.order_by(Language.englishname) mapper = lambda row: row[slice(0, 3)] return DecoratedResultSet(translator_data, mapper) def fetchProjectsForDisplay(self, user): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.product import ( get_precached_products, Product, ProductSet, ) products = list(IStore(Product).find( Product, Product.translationgroupID == self.id, Product.active == True, ProductSet.getProductPrivacyFilter(user), ).order_by(Product.display_name)) get_precached_products(products, need_licences=True) icons = bulk.load_related(LibraryFileAlias, products, ['iconID']) bulk.load_related(LibraryFileContent, icons, ['contentID']) return products def fetchProjectGroupsForDisplay(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.projectgroup import ProjectGroup using = [ ProjectGroup, LeftJoin( LibraryFileAlias, LibraryFileAlias.id == ProjectGroup.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( ProjectGroup, LibraryFileAlias, LibraryFileContent, ) project_data = ISlaveStore(ProjectGroup).using(*using).find( tables, ProjectGroup.translationgroupID == self.id, ProjectGroup.active == True).order_by(ProjectGroup.display_name) return DecoratedResultSet(project_data, operator.itemgetter(0)) def fetchDistrosForDisplay(self): """See `ITranslationGroup`.""" # Avoid circular imports. from lp.registry.model.distribution import Distribution using = [ Distribution, LeftJoin( LibraryFileAlias, LibraryFileAlias.id == Distribution.iconID), LeftJoin( LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] tables = ( Distribution, LibraryFileAlias, LibraryFileContent, ) distro_data = ISlaveStore(Distribution).using(*using).find( tables, Distribution.translationgroupID == self.id).order_by( Distribution.display_name) return DecoratedResultSet(distro_data, operator.itemgetter(0))
class BranchMergeProposal(SQLBase): """A relationship between a person and a branch.""" implements(IBranchMergeProposal, IBranchNavigationMenu, IHasBranchTarget) _table = 'BranchMergeProposal' _defaultOrder = ['-date_created', 'id'] registrant = ForeignKey(dbName='registrant', foreignKey='Person', storm_validator=validate_public_person, notNull=True) source_branch = ForeignKey(dbName='source_branch', foreignKey='Branch', notNull=True) target_branch = ForeignKey(dbName='target_branch', foreignKey='Branch', notNull=True) prerequisite_branch = ForeignKey(dbName='dependent_branch', foreignKey='Branch', notNull=False) description = StringCol(default=None) whiteboard = StringCol(default=None) queue_status = EnumCol(enum=BranchMergeProposalStatus, notNull=True, default=BranchMergeProposalStatus.WORK_IN_PROGRESS) @property def private(self): return ( (self.source_branch.information_type in PRIVATE_INFORMATION_TYPES) or (self.target_branch.information_type in PRIVATE_INFORMATION_TYPES) or (self.prerequisite_branch is not None and (self.prerequisite_branch.information_type in PRIVATE_INFORMATION_TYPES))) reviewer = ForeignKey(dbName='reviewer', foreignKey='Person', storm_validator=validate_person, notNull=False, default=None) @property def next_preview_diff_job(self): # circular dependencies from lp.code.model.branchmergeproposaljob import ( BranchMergeProposalJob, BranchMergeProposalJobType, ) jobs = Store.of(self).find( BranchMergeProposalJob, BranchMergeProposalJob.branch_merge_proposal == self, BranchMergeProposalJob.job_type == BranchMergeProposalJobType.UPDATE_PREVIEW_DIFF, BranchMergeProposalJob.job == Job.id, Job._status.is_in([JobStatus.WAITING, JobStatus.RUNNING])) job = jobs.order_by(Job.scheduled_start, Job.date_created).first() if job is not None: return job.makeDerived() else: return None reviewed_revision_id = StringCol(default=None) commit_message = StringCol(default=None) queue_position = IntCol(default=None) queuer = ForeignKey(dbName='queuer', foreignKey='Person', notNull=False, default=None) queued_revision_id = StringCol(default=None) date_merged = UtcDateTimeCol(default=None) merged_revno = IntCol(default=None) merge_reporter = ForeignKey(dbName='merge_reporter', foreignKey='Person', storm_validator=validate_public_person, notNull=False, default=None) def getRelatedBugTasks(self, user): """Bug tasks which are linked to the source but not the target. Implies that these would be fixed, in the target, by the merge. """ source_tasks = self.source_branch.getLinkedBugTasks(user) target_tasks = self.target_branch.getLinkedBugTasks(user) return [ bugtask for bugtask in source_tasks if bugtask not in target_tasks ] @property def address(self): return 'mp+%d@%s' % (self.id, config.launchpad.code_domain) superseded_by = ForeignKey(dbName='superseded_by', foreignKey='BranchMergeProposal', notNull=False, default=None) supersedes = Reference("<primary key>", "superseded_by", on_remote=True) date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) date_review_requested = UtcDateTimeCol(notNull=False, default=None) date_reviewed = UtcDateTimeCol(notNull=False, default=None) @property def target(self): """See `IHasBranchTarget`.""" return self.source_branch.target root_message_id = StringCol(default=None) @property def title(self): """See `IBranchMergeProposal`.""" return "[Merge] %(source)s into %(target)s" % { 'source': self.source_branch.bzr_identity, 'target': self.target_branch.bzr_identity } @property def all_comments(self): """See `IBranchMergeProposal`.""" return CodeReviewComment.selectBy(branch_merge_proposal=self.id) def getComment(self, id): """See `IBranchMergeProposal`. This function can raise WrongBranchMergeProposal.""" comment = CodeReviewComment.get(id) if comment.branch_merge_proposal != self: raise WrongBranchMergeProposal return comment def getVoteReference(self, id): """See `IBranchMergeProposal`. This function can raise WrongBranchMergeProposal.""" vote = CodeReviewVoteReference.get(id) if vote.branch_merge_proposal != self: raise WrongBranchMergeProposal return vote @property def _preview_diffs(self): return Store.of(self).find( PreviewDiff, PreviewDiff.branch_merge_proposal_id == self.id).order_by( PreviewDiff.date_created) @cachedproperty def preview_diffs(self): return list(self._preview_diffs) @cachedproperty def preview_diff(self): return self._preview_diffs.last() date_queued = UtcDateTimeCol(notNull=False, default=None) votes = SQLMultipleJoin('CodeReviewVoteReference', joinColumn='branch_merge_proposal') def getNotificationRecipients(self, min_level): """See IBranchMergeProposal.getNotificationRecipients""" recipients = {} branch_identity_cache = { self.source_branch: self.source_branch.bzr_identity, self.target_branch: self.target_branch.bzr_identity, } branches = [self.source_branch, self.target_branch] if self.prerequisite_branch is not None: branches.append(self.prerequisite_branch) for branch in branches: branch_recipients = branch.getNotificationRecipients() for recipient in branch_recipients: # If the recipient cannot see either of the branches, skip # them. if (not self.source_branch.visibleByUser(recipient) or not self.target_branch.visibleByUser(recipient)): continue subscription, rationale = branch_recipients.getReason( recipient) if (subscription.review_level < min_level): continue recipients[recipient] = RecipientReason.forBranchSubscriber( subscription, recipient, rationale, self, branch_identity_cache=branch_identity_cache) # Add in all the individuals that have been asked for a review, # or who have reviewed. These people get added to the recipients # with the rationale of "Reviewer". # Don't add a team reviewer to the recipients as they are only going # to get emails normally if they are subscribed to one of the # branches, and if they are subscribed, they'll be getting this email # aleady. for review in self.votes: reviewer = review.reviewer pending = review.comment is None recipients[reviewer] = RecipientReason.forReviewer( self, pending, reviewer, branch_identity_cache=branch_identity_cache) # If the registrant of the proposal is getting emails, update the # rationale to say that they registered it. Don't however send them # emails if they aren't asking for any. if self.registrant in recipients: recipients[self.registrant] = RecipientReason.forRegistrant( self, branch_identity_cache=branch_identity_cache) # If the owner of the source branch is getting emails, override the # rationale to say they are the owner of the souce branch. source_owner = self.source_branch.owner if source_owner in recipients: reason = RecipientReason.forSourceOwner( self, branch_identity_cache=branch_identity_cache) if reason is not None: recipients[source_owner] = reason return recipients def isValidTransition(self, next_state, user=None): """See `IBranchMergeProposal`.""" return is_valid_transition(self, self.queue_status, next_state, user) def _transitionToState(self, next_state, user=None): """Update the queue_status of the proposal. Raise an error if the proposal is in a final state. """ if not self.isValidTransition(next_state, user): raise BadStateTransition( 'Invalid state transition for merge proposal: %s -> %s' % (self.queue_status.title, next_state.title)) # Transition to the same state occur in two particular # situations: # * stale posts # * approving a later revision # In both these cases, there is no real reason to disallow # transitioning to the same state. self.queue_status = next_state def setStatus(self, status, user=None, revision_id=None): """See `IBranchMergeProposal`.""" # XXX - rockstar - 9 Oct 2008 - jml suggested in a review that this # would be better as a dict mapping. # See bug #281060. if (self.queue_status == BranchMergeProposalStatus.QUEUED and status != BranchMergeProposalStatus.QUEUED): self.dequeue() if status == BranchMergeProposalStatus.WORK_IN_PROGRESS: self.setAsWorkInProgress() elif status == BranchMergeProposalStatus.NEEDS_REVIEW: self.requestReview() elif status == BranchMergeProposalStatus.CODE_APPROVED: self.approveBranch(user, revision_id) elif status == BranchMergeProposalStatus.REJECTED: self.rejectBranch(user, revision_id) elif status == BranchMergeProposalStatus.QUEUED: self.enqueue(user, revision_id) elif status == BranchMergeProposalStatus.MERGED: self.markAsMerged(merge_reporter=user) elif status == BranchMergeProposalStatus.MERGE_FAILED: self._transitionToState(status, user=user) else: raise AssertionError('Unexpected queue status: %s' % status) def setAsWorkInProgress(self): """See `IBranchMergeProposal`.""" self._transitionToState(BranchMergeProposalStatus.WORK_IN_PROGRESS) self._mark_unreviewed() def _mark_unreviewed(self): """Clear metadata about a previous review.""" self.reviewer = None self.date_reviewed = None self.reviewed_revision_id = None def requestReview(self, _date_requested=None): """See `IBranchMergeProposal`. :param _date_requested: used only for testing purposes to override the normal UTC_NOW for when the review was requested. """ # Don't reset the date_review_requested if we are already in the # review state. if _date_requested is None: _date_requested = UTC_NOW # If we are going from work in progress to needs review, then reset # the root message id and trigger a job to send out the email. if self.queue_status == BranchMergeProposalStatus.WORK_IN_PROGRESS: self.root_message_id = None notify(BranchMergeProposalNeedsReviewEvent(self)) if self.queue_status != BranchMergeProposalStatus.NEEDS_REVIEW: self._transitionToState(BranchMergeProposalStatus.NEEDS_REVIEW) self.date_review_requested = _date_requested # Clear out any reviewed or queued values. self._mark_unreviewed() self.queuer = None self.queued_revision_id = None def isMergable(self): """See `IBranchMergeProposal`.""" # As long as the source branch has not been merged, rejected # or superseded, then it is valid to be merged. return (self.queue_status not in FINAL_STATES) def _reviewProposal(self, reviewer, next_state, revision_id, _date_reviewed=None): """Set the proposal to next_state.""" # Check the reviewer can review the code for the target branch. old_state = self.queue_status if not self.target_branch.isPersonTrustedReviewer(reviewer): raise UserNotBranchReviewer # Check the current state of the proposal. self._transitionToState(next_state, reviewer) # Record the reviewer self.reviewer = reviewer if _date_reviewed is None: _date_reviewed = UTC_NOW self.date_reviewed = _date_reviewed # Record the reviewed revision id self.reviewed_revision_id = revision_id notify( BranchMergeProposalStatusChangeEvent(self, reviewer, old_state, next_state)) def approveBranch(self, reviewer, revision_id, _date_reviewed=None): """See `IBranchMergeProposal`.""" self._reviewProposal(reviewer, BranchMergeProposalStatus.CODE_APPROVED, revision_id, _date_reviewed) def rejectBranch(self, reviewer, revision_id, _date_reviewed=None): """See `IBranchMergeProposal`.""" self._reviewProposal(reviewer, BranchMergeProposalStatus.REJECTED, revision_id, _date_reviewed) def enqueue(self, queuer, revision_id): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.CODE_APPROVED: self.approveBranch(queuer, revision_id) last_entry = BranchMergeProposal.selectOne(""" BranchMergeProposal.queue_position = ( SELECT coalesce(MAX(queue_position), 0) FROM BranchMergeProposal) """) # The queue_position will wrap if we ever get to # two billion queue entries where the queue has # never become empty. Perhaps sometime in the future # we may want to (maybe) consider keeping track of # the maximum value here. I doubt that it'll ever be # a problem -- thumper. if last_entry is None: position = 1 else: position = last_entry.queue_position + 1 self.queue_status = BranchMergeProposalStatus.QUEUED self.queue_position = position self.queuer = queuer self.queued_revision_id = revision_id or self.reviewed_revision_id self.date_queued = UTC_NOW self.syncUpdate() def dequeue(self): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.QUEUED: raise BadStateTransition( 'Invalid state transition for merge proposal: %s -> %s' % (self.queue_state.title, BranchMergeProposalStatus.QUEUED.title)) self.queue_status = BranchMergeProposalStatus.CODE_APPROVED # Clear out the queued values. self.queuer = None self.queued_revision_id = None self.date_queued = None # Remove from the queue. self.queue_position = None def moveToFrontOfQueue(self): """See `IBranchMergeProposal`.""" if self.queue_status != BranchMergeProposalStatus.QUEUED: return first_entry = BranchMergeProposal.selectOne(""" BranchMergeProposal.queue_position = ( SELECT MIN(queue_position) FROM BranchMergeProposal) """) self.queue_position = first_entry.queue_position - 1 self.syncUpdate() def markAsMerged(self, merged_revno=None, date_merged=None, merge_reporter=None): """See `IBranchMergeProposal`.""" old_state = self.queue_status self._transitionToState(BranchMergeProposalStatus.MERGED, merge_reporter) self.merged_revno = merged_revno self.merge_reporter = merge_reporter # Remove from the queue. self.queue_position = None # The reviewer of a merged proposal is assumed to have approved, if # they rejected it remove the review metadata to avoid confusion. if old_state == BranchMergeProposalStatus.REJECTED: self._mark_unreviewed() if merged_revno is not None: branch_revision = Store.of(self).find( BranchRevision, BranchRevision.branch == self.target_branch, BranchRevision.sequence == merged_revno).one() if branch_revision is not None: date_merged = branch_revision.revision.revision_date if date_merged is None: date_merged = UTC_NOW self.date_merged = date_merged def resubmit(self, registrant, source_branch=None, target_branch=None, prerequisite_branch=DEFAULT, description=None, break_link=False): """See `IBranchMergeProposal`.""" if source_branch is None: source_branch = self.source_branch if target_branch is None: target_branch = self.target_branch # DEFAULT instead of None, because None is a valid value. proposals = BranchMergeProposalGetter.activeProposalsForBranches( source_branch, target_branch) for proposal in proposals: if proposal is not self: raise BranchMergeProposalExists(proposal) if prerequisite_branch is DEFAULT: prerequisite_branch = self.prerequisite_branch if description is None: description = self.description # You can transition from REJECTED to SUPERSEDED, but # not from MERGED or SUPERSEDED. self._transitionToState(BranchMergeProposalStatus.SUPERSEDED, registrant) # This sync update is needed as the add landing target does # a database query to identify if there are any active proposals # with the same source and target branches. self.syncUpdate() review_requests = list( set((vote.reviewer, vote.review_type) for vote in self.votes)) proposal = source_branch.addLandingTarget( registrant=registrant, target_branch=target_branch, prerequisite_branch=prerequisite_branch, description=description, needs_review=True, review_requests=review_requests) if not break_link: self.superseded_by = proposal # This sync update is needed to ensure that the transitive # properties of supersedes and superseded_by are visible to # the old and the new proposal. self.syncUpdate() return proposal def _normalizeReviewType(self, review_type): """Normalse the review type. If review_type is None, it stays None. Otherwise the review_type is converted to lower case, and if the string is empty is gets changed to None. """ if review_type is not None: review_type = review_type.strip() if review_type == '': review_type = None else: review_type = review_type.lower() return review_type def _subscribeUserToStackedBranch(self, branch, user, checked_branches=None): """Subscribe the user to the branch and those it is stacked on.""" if checked_branches is None: checked_branches = [] branch.subscribe(user, BranchSubscriptionNotificationLevel.NOEMAIL, BranchSubscriptionDiffSize.NODIFF, CodeReviewNotificationLevel.FULL, user) if branch.stacked_on is not None: checked_branches.append(branch) if branch.stacked_on not in checked_branches: self._subscribeUserToStackedBranch(branch.stacked_on, user, checked_branches) def _acceptable_to_give_visibility(self, branch, reviewer): # If the branch is private, only exclusive teams can be subscribed to # prevent leaks. if (branch.information_type in PRIVATE_INFORMATION_TYPES and reviewer.is_team and reviewer.anyone_can_join()): return False return True def _ensureAssociatedBranchesVisibleToReviewer(self, reviewer): """ A reviewer must be able to see the source and target branches. Currently, we ensure the required visibility by subscribing the user to the branch and those on which it is stacked. We do not subscribe the reviewer if the branch is private and the reviewer is an open team. """ source = self.source_branch if (not source.visibleByUser(reviewer) and self._acceptable_to_give_visibility(source, reviewer)): self._subscribeUserToStackedBranch(source, reviewer) target = self.target_branch if (not target.visibleByUser(reviewer) and self._acceptable_to_give_visibility(source, reviewer)): self._subscribeUserToStackedBranch(target, reviewer) def nominateReviewer(self, reviewer, registrant, review_type=None, _date_created=DEFAULT, _notify_listeners=True): """See `IBranchMergeProposal`.""" # Return the existing vote reference or create a new one. # Lower case the review type. review_type = self._normalizeReviewType(review_type) vote_reference = self.getUsersVoteReference(reviewer, review_type) # If there is no existing review for the reviewer, then create a new # one. If the reviewer is a team, then we don't care if there is # already an existing pending review, as some projects expect multiple # reviews from a team. if vote_reference is None or reviewer.is_team: vote_reference = CodeReviewVoteReference( branch_merge_proposal=self, registrant=registrant, reviewer=reviewer, date_created=_date_created) self._ensureAssociatedBranchesVisibleToReviewer(reviewer) vote_reference.review_type = review_type if _notify_listeners: notify(ReviewerNominatedEvent(vote_reference)) return vote_reference def deleteProposal(self): """See `IBranchMergeProposal`.""" # Delete this proposal, but keep the superseded chain linked. if self.supersedes is not None: self.supersedes.superseded_by = self.superseded_by # Delete the related CodeReviewVoteReferences. for vote in self.votes: vote.destroySelf() # Delete the related CodeReviewComments. for comment in self.all_comments: comment.destroySelf() # Delete all jobs referring to the BranchMergeProposal, whether # or not they have completed. from lp.code.model.branchmergeproposaljob import BranchMergeProposalJob for job in BranchMergeProposalJob.selectBy( branch_merge_proposal=self.id): job.destroySelf() self._preview_diffs.remove() self.destroySelf() def getUnlandedSourceBranchRevisions(self): """See `IBranchMergeProposal`.""" store = Store.of(self) source = SQL("""source AS (SELECT BranchRevision.branch, BranchRevision.revision, Branchrevision.sequence FROM BranchRevision WHERE BranchRevision.branch = %s and BranchRevision.sequence IS NOT NULL ORDER BY BranchRevision.branch DESC, BranchRevision.sequence DESC LIMIT 10)""" % self.source_branch.id) where = SQL("""BranchRevision.revision NOT IN (SELECT revision from BranchRevision AS target where target.branch = %s and BranchRevision.revision = target.revision)""" % self.target_branch.id) using = SQL("""source as BranchRevision""") revisions = store.with_(source).using(using).find( BranchRevision, where) return list( revisions.order_by(Desc(BranchRevision.sequence)).config(limit=10)) def createComment(self, owner, subject, content=None, vote=None, review_type=None, parent=None, _date_created=DEFAULT, _notify_listeners=True): """See `IBranchMergeProposal`.""" #:param _date_created: The date the message was created. Provided # only for testing purposes, as it can break # BranchMergeProposal.root_message. review_type = self._normalizeReviewType(review_type) assert owner is not None, 'Merge proposal messages need a sender' parent_message = None if parent is not None: assert parent.branch_merge_proposal == self, \ 'Replies must use the same merge proposal as their parent' parent_message = parent.message if not subject: # Get the subject from the parent if there is one, or use a nice # default. if parent is None: subject = self.title else: subject = parent.message.subject if not subject.startswith('Re: '): subject = 'Re: ' + subject # Avoid circular dependencies. from lp.services.messages.model.message import Message, MessageChunk msgid = make_msgid('codereview') message = Message(parent=parent_message, owner=owner, rfc822msgid=msgid, subject=subject, datecreated=_date_created) MessageChunk(message=message, content=content, sequence=1) return self.createCommentFromMessage( message, vote, review_type, original_email=None, _notify_listeners=_notify_listeners, _validate=False) def getUsersVoteReference(self, user, review_type=None): """Get the existing vote reference for the given user.""" # Lower case the review type. review_type = self._normalizeReviewType(review_type) if user is None: return None if user.is_team: query = And(CodeReviewVoteReference.reviewer == user, CodeReviewVoteReference.review_type == review_type) else: query = CodeReviewVoteReference.reviewer == user return Store.of(self).find( CodeReviewVoteReference, CodeReviewVoteReference.branch_merge_proposal == self, query).order_by(CodeReviewVoteReference.date_created).first() def _getTeamVoteReference(self, user, review_type): """Get a vote reference where the user is in the review team. Only return those reviews where the review_type matches. """ refs = Store.of(self).find( CodeReviewVoteReference, CodeReviewVoteReference.branch_merge_proposal == self, CodeReviewVoteReference.review_type == review_type, CodeReviewVoteReference.comment == None) for ref in refs.order_by(CodeReviewVoteReference.date_created): if user.inTeam(ref.reviewer): return ref return None def _getVoteReference(self, user, review_type): """Get the vote reference for the user. The returned vote reference will either: * the existing vote reference for the user * a vote reference of the same type that has been requested of a team that the user is a member of * a new vote reference for the user """ # Firstly look for a vote reference for the user. ref = self.getUsersVoteReference(user) if ref is not None: return ref # Get all the unclaimed CodeReviewVoteReferences with the review_type # specified. team_ref = self._getTeamVoteReference(user, review_type) if team_ref is not None: return team_ref # If the review_type is not None, check to see if there is an # outstanding team review requested with no specified type. if review_type is not None: team_ref = self._getTeamVoteReference(user, None) if team_ref is not None: return team_ref # Create a new reference. return CodeReviewVoteReference(branch_merge_proposal=self, registrant=user, reviewer=user, review_type=review_type) def createCommentFromMessage(self, message, vote, review_type, original_email, _notify_listeners=True, _validate=True): """See `IBranchMergeProposal`.""" if _validate: validate_message(original_email) review_type = self._normalizeReviewType(review_type) code_review_message = CodeReviewComment(branch_merge_proposal=self, message=message, vote=vote, vote_tag=review_type) # Get the appropriate CodeReviewVoteReference for the reviewer. # If there isn't one, then create one, otherwise set the comment # reference. if vote is not None: vote_reference = self._getVoteReference(message.owner, review_type) # Just set the reviewer and review type again on the off chance # that the user has edited the review_type or claimed a team # review. vote_reference.reviewer = message.owner vote_reference.review_type = review_type vote_reference.comment = code_review_message if _notify_listeners: notify( NewCodeReviewCommentEvent(code_review_message, original_email)) return code_review_message def updatePreviewDiff(self, diff_content, source_revision_id, target_revision_id, prerequisite_revision_id=None, conflicts=None): """See `IBranchMergeProposal`.""" return PreviewDiff.create(self, diff_content, source_revision_id, target_revision_id, prerequisite_revision_id, conflicts) def getIncrementalDiffRanges(self): groups = self.getRevisionsSinceReviewStart() return [(group[0].revision.getLefthandParent(), group[-1].revision) for group in groups] def generateIncrementalDiff(self, old_revision, new_revision, diff=None): """See `IBranchMergeProposal`.""" if diff is None: source_branch = self.source_branch.getBzrBranch() ignore_branches = [self.target_branch.getBzrBranch()] if self.prerequisite_branch is not None: ignore_branches.append(self.prerequisite_branch.getBzrBranch()) diff = Diff.generateIncrementalDiff(old_revision, new_revision, source_branch, ignore_branches) incremental_diff = IncrementalDiff() incremental_diff.diff = diff incremental_diff.branch_merge_proposal = self incremental_diff.old_revision = old_revision incremental_diff.new_revision = new_revision IMasterStore(IncrementalDiff).add(incremental_diff) return incremental_diff def getIncrementalDiffs(self, revision_list): """See `IBranchMergeProposal`.""" diffs = Store.of(self).find( IncrementalDiff, IncrementalDiff.branch_merge_proposal_id == self.id) diff_dict = dict( ((diff.old_revision, diff.new_revision), diff) for diff in diffs) return [diff_dict.get(revisions) for revisions in revision_list] @property def revision_end_date(self): """The cutoff date for showing revisions. If the proposal has been merged, then we stop at the merged date. If it is rejected, we stop at the reviewed date. For superseded proposals, it should ideally use the non-existant date_last_modified, but could use the last comment date. """ status = self.queue_status if status == BranchMergeProposalStatus.MERGED: return self.date_merged if status == BranchMergeProposalStatus.REJECTED: return self.date_reviewed # Otherwise return None representing an open end date. return None def _getNewerRevisions(self): start_date = self.date_review_requested if start_date is None: start_date = self.date_created return self.source_branch.getMainlineBranchRevisions( start_date, self.revision_end_date, oldest_first=True) def getRevisionsSinceReviewStart(self): """Get the grouped revisions since the review started.""" entries = [((comment.date_created, -1), comment) for comment in self.all_comments] revisions = self._getNewerRevisions() entries.extend(((revision.date_created, branch_revision.sequence), branch_revision) for branch_revision, revision in revisions) entries.sort() current_group = [] for sortkey, entry in entries: if IBranchRevision.providedBy(entry): current_group.append(entry) else: if current_group != []: yield current_group current_group = [] if current_group != []: yield current_group def getMissingIncrementalDiffs(self): ranges = self.getIncrementalDiffRanges() diffs = self.getIncrementalDiffs(ranges) return [range_ for range_, diff in zip(ranges, diffs) if diff is None] @staticmethod def preloadDataForBMPs(branch_merge_proposals, user): # Utility to load the data related to a list of bmps. # Circular imports. from lp.code.model.branch import Branch from lp.code.model.branchcollection import GenericBranchCollection from lp.registry.model.product import Product from lp.registry.model.distroseries import DistroSeries ids = set() source_branch_ids = set() person_ids = set() for mp in branch_merge_proposals: ids.add(mp.id) source_branch_ids.add(mp.source_branchID) person_ids.add(mp.registrantID) person_ids.add(mp.merge_reporterID) branches = load_related( Branch, branch_merge_proposals, ("target_branchID", "prerequisite_branchID", "source_branchID")) # The stacked on branches are used to check branch visibility. GenericBranchCollection.preloadVisibleStackedOnBranches(branches, user) if len(branches) == 0: return # Pre-load PreviewDiffs and Diffs. preview_diffs = IStore(BranchMergeProposal).find( PreviewDiff, PreviewDiff.branch_merge_proposal_id.is_in(ids)).order_by( PreviewDiff.branch_merge_proposal_id, Desc(PreviewDiff.date_created)).config( distinct=[PreviewDiff.branch_merge_proposal_id]) load_related(Diff, preview_diffs, ['diff_id']) for previewdiff in preview_diffs: cache = get_property_cache(previewdiff.branch_merge_proposal) cache.preview_diff = previewdiff # Add source branch owners' to the list of pre-loaded persons. person_ids.update(branch.ownerID for branch in branches if branch.id in source_branch_ids) # Pre-load Person and ValidPersonCache. list( getUtility(IPersonSet).getPrecachedPersonsFromIDs( person_ids, need_validity=True)) # Pre-load branches' data. load_related(SourcePackageName, branches, ['sourcepackagenameID']) load_related(DistroSeries, branches, ['distroseriesID']) load_related(Product, branches, ['productID']) GenericBranchCollection.preloadDataForBranches(branches)
class EditorCapsule(SQLObject, PluginCapsule, metaclass=SQLObjectAndABCMeta): name = StringCol() owner = ForeignKey('User', cascade='null', default=None) channel = ForeignKey('PluginChannel', cascade=True) capsule_id = DatabaseIndex('name', 'channel', unique=True) creation_date = DateTimeCol(notNone=True, default=lambda: datetime.now()) slides = SQLMultipleJoin('EditorSlide', joinColumn='capsule_id') theme = StringCol( default=lambda: web.ctx.app_stack[0].config['default_theme']) c_order = IntCol(notNone=True) validity_from = DateTimeCol(notNone=True) validity_to = DateTimeCol(notNone=True) @classmethod def rectify_c_order(cls, channel_id): capsules_list = list( EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order)) if len(capsules_list ) > 0 and capsules_list[0].c_order == 0 and capsules_list[ -1].c_order == len(capsules_list) - 1: return EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order) i = 0 for c in capsules_list: c.c_order = i i += 1 return EditorCapsule.select(EditorCapsule.q.channel == channel_id, orderBy=EditorCapsule.q.c_order) def insert_slide_at(self, slide, index): """ inserts the slide at the correct position of the slides list of the capsule, updating the s_order of the slides located after the index position in the list. """ # get the slides of the capsule, ordered by their s_order slides = list( EditorSlide.select(EditorSlide.q.capsule == self.id, orderBy=EditorSlide.q.s_order)) # set the s_order of the new slide slide.s_order = index slide.capsule = self.id # update the s_order of all the slides with a s_order >= the s_order of this slide for i in range(index, len(slides)): slides[i].s_order += 1 EditorSlide.rectify_s_order(self.id) def to_plugin_capsule(self) -> EditorPluginCapsule: caps = EditorPluginCapsule(theme=self.theme) for s in sorted(self.slides, key=lambda slide: slide.s_order): caps.add_slide(s.to_plugin_slide()) return caps def get_slides(self) -> Iterable[PluginSlide]: return self.slides def get_theme(self) -> str: return self.theme def _get_is_active(self): now = datetime.now() return self.validity_from <= now < self.validity_to def _get_pretty_from(self): return str(self.validity_from.replace(microsecond=0).isoformat(' ')) def duplicate(self, owner_id, c_order=None): """ :return: a duplicate of this capsule belonging to the specified owner_id and containing a duplicate of the slides of this capsule. If c_order is not specified, the duplicate has the same c_order as this capsule. """ c_order = c_order if c_order is not None else self.c_order def create_capsule(name): try: return EditorCapsule(name=name + '-copy', channel=self.channel, ownerID=owner_id, creation_date=self.creation_date, c_order=c_order, validity_from=self.validity_from, validity_to=self.validity_to) except DuplicateEntryError: return create_capsule(name + '-copy') duplicate = create_capsule(str(self.name)) for slide in self.slides: EditorSlide.from_slide(slide=slide, capsule=duplicate) return duplicate def to_json_api(self): return { 'id': self.id, 'name': self.name, 'slides': [s.to_json_api() for s in self.slides], 'validity': [ int(self.validity_from.timestamp()), int(self.validity_to.timestamp()) ], 'theme': self.theme, }
class EditorSlide(SQLObject, PluginSlide, metaclass=SQLObjectAndABCMeta): duration = IntCol(notNone=True) content = JSONCol(notNone=True, default={}) s_order = IntCol(notNone=True) template = StringCol(notNone=True) capsule = ForeignKey('EditorCapsule', cascade=True) asset_mappings = SQLMultipleJoin('AssetSlideMapping') @classmethod def from_slide(cls, slide: PluginSlide, capsule, slide_order=0): def create_asset_mappings(slide): for field, inputs in slide.get_content().items(): if 'file' in inputs: AssetSlideMapping(slide=slide, asset=inputs['file']) s = EditorSlide(content=slide.get_content(), duration=slide.get_duration(), template=slide.get_template(), capsule=capsule, s_order=slide_order) create_asset_mappings(s) return s @classmethod def from_video(cls, video, storage_manager, transcoding_manager, capsule, user, background_color): def create_slide(asset_id, capsule_id): video_slide = cls.from_slide(VideoSlide( {'file': asset_id}, template='template-image-bg'), capsule=capsule_id) video_slide.content['background-1'].update({ 'size': 'contain', 'color': background_color }) video_slide.content = video_slide.content # Force SQLObject update capsule = EditorCapsule.get(capsule_id) capsule.insert_slide_at(video_slide, capsule.slides.count()) return video_slide # TODO: Stream asset to disk instead of loading it into memory video_blob = video.file.read() if magic.from_buffer(video_blob, mime=True) != 'video/webm': def transcode_callback(success_status): if success_status: create_slide(video_asset_id, capsule_id) video_asset.file_size = os.path.getsize(video_asset.path) else: video_asset.destroySelf() original_video_asset.destroySelf() original_video_asset = storage_manager.store_file( video_blob, filename=video.filename, user=user) video_asset = storage_manager.create_asset( filename=video.filename + os.extsep + '.webm', user=user, mime_type='video/webm') video_asset_id, capsule_id = video_asset.id, capsule.id transcoding_manager.enqueue_task(original_video_asset.path, video_asset.path, transcode_callback) return video_asset.path else: video_asset = storage_manager.store_file(video_blob, filename=video.filename, user=user) return create_slide(video_asset.id, capsule.id) def _init(self, id, connection=None, selectResults=None): return super()._init(id, connection, selectResults) @classmethod def rectify_s_order(cls, capsule_id): slide_list = list( EditorSlide.select(EditorSlide.q.capsule == capsule_id, orderBy=EditorSlide.q.s_order)) if len(slide_list) > 0 and slide_list[0].s_order == 0 and slide_list[ -1].s_order == len(slide_list) - 1: return slide_list i = 0 for s in slide_list: s.s_order = i i += 1 return slide_list def to_plugin_slide(self) -> EditorPluginSlide: return EditorPluginSlide(content=self.content, template=self.template, duration=int(self.duration)) def get_duration(self) -> int: return self.duration def get_duration_or_default(self): int(self.capsule.channel.plugin_config['duration']) * 1000 if 'duration' in self.capsule.channel.plugin_config \ else int(self.capsule.channel.plugin.channels_params['duration']['default']) * 1000 def get_content(self): return self.content def get_template(self) -> str: return self.template def duplicate(self, capsule=None, s_order=None): """ :return: a slide identical to this slide. If the capsule and arguments are not specified, they are the same as this slide. It also duplicates the AssetSlideMappings of this slide """ capsule = capsule if capsule is not None else self.capsule s_order = s_order if s_order is not None else self.s_order duplicate = EditorSlide(duration=self.duration, content=self.get_content(), s_order=s_order, template=self.get_template(), capsule=capsule) for mapping in AssetSlideMapping.selectBy(slide=self.id): AssetSlideMapping(assetID=mapping.asset.id, slideID=duplicate.id) return duplicate def get_render_path(self, ictv_home=None): if ictv_home is None: ictv_home = web.ctx.home return '%s%s/%d/%d' % (ictv_home, 'render', self.capsule.id, self.id) @property def contains_video(self): for field, inputs in self.content.items(): if 'file' in inputs: if Asset.get(inputs['file']).mime_type.startswith('video'): return True elif 'video' in inputs: return True return False def to_json_api(self): return { 'id': self.id, 'duration': self.duration, 'content': self.content, 'template': self.template, }
class ProductSeries(SQLBase, BugTargetBase, HasMilestonesMixin, HasSpecificationsMixin, HasTranslationImportsMixin, HasTranslationTemplatesMixin, StructuralSubscriptionTargetMixin, SeriesMixin): """A series of product releases.""" implements( IBugSummaryDimension, IProductSeries, IServiceUsage, ISeriesBugTarget) delegates(ISpecificationTarget, 'product') _table = 'ProductSeries' product = ForeignKey(dbName='product', foreignKey='Product', notNull=True) status = EnumCol( notNull=True, schema=SeriesStatus, default=SeriesStatus.DEVELOPMENT) name = StringCol(notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) owner = ForeignKey( dbName="owner", foreignKey="Person", storm_validator=validate_person, notNull=True) driver = ForeignKey( dbName="driver", foreignKey="Person", storm_validator=validate_person, notNull=False, default=None) branch = ForeignKey(foreignKey='Branch', dbName='branch', default=None) def validate_autoimport_mode(self, attr, value): # Perform the normal validation for None if value is None: return value if (self.product.private and value != TranslationsBranchImportMode.NO_IMPORT): raise ProprietaryProduct('Translations are disabled for' ' proprietary projects.') return value translations_autoimport_mode = EnumCol( dbName='translations_autoimport_mode', notNull=True, schema=TranslationsBranchImportMode, default=TranslationsBranchImportMode.NO_IMPORT, storm_validator=validate_autoimport_mode) translations_branch = ForeignKey( dbName='translations_branch', foreignKey='Branch', notNull=False, default=None) # where are the tarballs released from this branch placed? releasefileglob = StringCol(default=None) releaseverstyle = StringCol(default=None) packagings = SQLMultipleJoin('Packaging', joinColumn='productseries', orderBy=['-id']) @property def pillar(self): """See `IBugTarget`.""" return self.product @property def series(self): """See `ISeriesBugTarget`.""" return self @property def answers_usage(self): """See `IServiceUsage.`""" return self.product.answers_usage @property def blueprints_usage(self): """See `IServiceUsage.`""" return self.product.blueprints_usage @property def translations_usage(self): """See `IServiceUsage.`""" return self.product.translations_usage @property def codehosting_usage(self): """See `IServiceUsage.`""" return self.product.codehosting_usage @property def bug_tracking_usage(self): """See `IServiceUsage.`""" return self.product.bug_tracking_usage @property def uses_launchpad(self): """ See `IServiceUsage.`""" return ( service_uses_launchpad(self.blueprints_usage) or service_uses_launchpad(self.translations_usage) or service_uses_launchpad(self.answers_usage) or service_uses_launchpad(self.codehosting_usage) or service_uses_launchpad(self.bug_tracking_usage)) def _getMilestoneCondition(self): """See `HasMilestonesMixin`.""" return (Milestone.productseries == self) @property def releases(self): """See `IProductSeries`.""" store = Store.of(self) # The Milestone is cached too because most uses of a ProductRelease # need it. The decorated resultset returns just the ProductRelease. def decorate(row): product_release, milestone = row return product_release result = store.find( (ProductRelease, Milestone), Milestone.productseries == self, ProductRelease.milestone == Milestone.id) result = result.order_by(Desc('datereleased')) return DecoratedResultSet(result, decorate) @cachedproperty def _cached_releases(self): return self.releases def getCachedReleases(self): """See `IProductSeries`.""" return self._cached_releases @property def release_files(self): """See `IProductSeries`.""" files = set() for release in self.releases: files = files.union(release.files) return files @property def displayname(self): return self.name @property def parent(self): """See IProductSeries.""" return self.product @property def bugtargetdisplayname(self): """See IBugTarget.""" return "%s %s" % (self.product.displayname, self.name) @property def bugtargetname(self): """See IBugTarget.""" return "%s/%s" % (self.product.name, self.name) @property def bugtarget_parent(self): """See `ISeriesBugTarget`.""" return self.parent def getPOTemplate(self, name): """See IProductSeries.""" return POTemplate.selectOne( "productseries = %s AND name = %s" % sqlvalues(self.id, name)) @property def title(self): return '%s %s series' % (self.product.displayname, self.displayname) @property def bug_reporting_guidelines(self): """See `IBugTarget`.""" return self.product.bug_reporting_guidelines @property def bug_reported_acknowledgement(self): """See `IBugTarget`.""" return self.product.bug_reported_acknowledgement @property def enable_bugfiling_duplicate_search(self): """See `IBugTarget`.""" return self.product.enable_bugfiling_duplicate_search @property def sourcepackages(self): """See IProductSeries""" from lp.registry.model.sourcepackage import SourcePackage ret = self.packagings ret = [SourcePackage(sourcepackagename=r.sourcepackagename, distroseries=r.distroseries) for r in ret] ret.sort(key=lambda a: a.distribution.name + a.distroseries.version + a.sourcepackagename.name) return ret @property def is_development_focus(self): """See `IProductSeries`.""" return self == self.product.development_focus def specifications(self, user, sort=None, quantity=None, filter=None, need_people=True, need_branches=True, need_workitems=False): """See IHasSpecifications. The rules for filtering are that there are three areas where you can apply a filter: - acceptance, which defaults to ACCEPTED if nothing is said, - completeness, which defaults to showing BOTH if nothing is said - informational, which defaults to showing BOTH if nothing is said """ base_clauses = [Specification.productseriesID == self.id] return search_specifications( self, base_clauses, user, sort, quantity, filter, default_acceptance=True, need_people=need_people, need_branches=need_branches, need_workitems=need_workitems) @property def all_specifications(self): return Store.of(self).find( Specification, Specification.productseriesID == self.id) def _customizeSearchParams(self, search_params): """Customize `search_params` for this product series.""" search_params.setProductSeries(self) def _getOfficialTagClause(self): return self.product._getOfficialTagClause() @property def official_bug_tags(self): """See `IHasBugs`.""" return self.product.official_bug_tags def getBugSummaryContextWhereClause(self): """See BugTargetBase.""" # Circular fail. from lp.bugs.model.bugsummary import BugSummary return BugSummary.productseries_id == self.id def getLatestRelease(self): """See `IProductRelease.`""" try: return self.releases[0] except IndexError: return None def getRelease(self, version): return getUtility(IProductReleaseSet).getBySeriesAndVersion( self, version) def getPackage(self, distroseries): """See IProductSeries.""" for pkg in self.sourcepackages: if pkg.distroseries == distroseries: return pkg # XXX sabdfl 2005-06-23: This needs to search through the ancestry of # the distroseries to try to find a relevant packaging record raise NotFoundError(distroseries) def getUbuntuTranslationFocusPackage(self): """See `IProductSeries`.""" ubuntu = getUtility(ILaunchpadCelebrities).ubuntu translation_focus = ubuntu.translation_focus current_series = ubuntu.currentseries candidate = None for package in self.sourcepackages: if package.distroseries == translation_focus: return package if package.distroseries == current_series: candidate = package elif package.distroseries.distribution == ubuntu: if candidate is None: candidate = package return candidate def setPackaging(self, distroseries, sourcepackagename, owner): """See IProductSeries.""" if distroseries.distribution.full_functionality: source_package = distroseries.getSourcePackage(sourcepackagename) if source_package.currentrelease is None: raise AssertionError( "The source package is not published in %s." % distroseries.displayname) for pkg in self.packagings: if (pkg.distroseries == distroseries and pkg.sourcepackagename == sourcepackagename): # we have found a matching Packaging record # and it has the same source package name return pkg # ok, we didn't find a packaging record that matches, let's go ahead # and create one pkg = PackagingUtil.createPackaging( distroseries=distroseries, sourcepackagename=sourcepackagename, productseries=self, packaging=PackagingType.PRIME, owner=owner) pkg.sync() # convert UTC_NOW to actual datetime return pkg def getPackagingInDistribution(self, distribution): """See IProductSeries.""" history = [] for pkging in self.packagings: if pkging.distroseries.distribution == distribution: history.append(pkging) return history def newMilestone(self, name, dateexpected=None, summary=None, code_name=None, tags=None): """See IProductSeries.""" milestone = Milestone( name=name, dateexpected=dateexpected, summary=summary, product=self.product, productseries=self, code_name=code_name) if tags: milestone.setTags(tags.split()) return milestone def getTemplatesCollection(self): """See `IHasTranslationTemplates`.""" return TranslationTemplatesCollection().restrictProductSeries(self) def getSharingPartner(self): """See `IHasTranslationTemplates`.""" return self.getUbuntuTranslationFocusPackage() @property def potemplate_count(self): """See `IProductSeries`.""" return self.getCurrentTranslationTemplates().count() @property def productserieslanguages(self): """See `IProductSeries`.""" store = Store.of(self) english = getUtility(ILaunchpadCelebrities).english results = [] if self.potemplate_count == 1: # If there is only one POTemplate in a ProductSeries, fetch # Languages and corresponding POFiles with one query, along # with their stats, and put them into ProductSeriesLanguage # objects. origin = [Language, POFile, POTemplate] query = store.using(*origin).find( (Language, POFile), POFile.language == Language.id, Language.visible == True, POFile.potemplate == POTemplate.id, POTemplate.productseries == self, POTemplate.iscurrent == True, Language.id != english.id) ordered_results = query.order_by(['Language.englishname']) for language, pofile in ordered_results: psl = ProductSeriesLanguage(self, language, pofile=pofile) total = pofile.potemplate.messageCount() imported = pofile.currentCount() changed = pofile.updatesCount() rosetta = pofile.rosettaCount() unreviewed = pofile.unreviewedCount() translated = imported + rosetta new = rosetta - changed psl.setCounts(total, translated, new, changed, unreviewed) psl.last_changed_date = pofile.date_changed results.append(psl) else: # If there is more than one template, do a single # query to count total messages in all templates. query = store.find( Sum(POTemplate.messagecount), POTemplate.productseries == self, POTemplate.iscurrent == True) total, = query # And another query to fetch all Languages with translations # in this ProductSeries, along with their cumulative stats # for imported, changed, rosetta-provided and unreviewed # translations. query = store.find( (Language, Sum(POFile.currentcount), Sum(POFile.updatescount), Sum(POFile.rosettacount), Sum(POFile.unreviewed_count), Max(POFile.date_changed)), POFile.language == Language.id, Language.visible == True, POFile.potemplate == POTemplate.id, POTemplate.productseries == self, POTemplate.iscurrent == True, Language.id != english.id).group_by(Language) ordered_results = query.order_by(['Language.englishname']) for (language, imported, changed, rosetta, unreviewed, last_changed) in ordered_results: psl = ProductSeriesLanguage(self, language) translated = imported + rosetta new = rosetta - changed psl.setCounts(total, translated, new, changed, unreviewed) psl.last_changed_date = last_changed results.append(psl) return results def getTimeline(self, include_inactive=False): landmarks = [] for milestone in self.all_milestones[:MAX_TIMELINE_MILESTONES]: if milestone.product_release is None: # Skip inactive milestones, but include releases, # even if include_inactive is False. if not include_inactive and not milestone.active: continue node_type = 'milestone' date = milestone.dateexpected uri = canonical_url(milestone, path_only_if_possible=True) else: node_type = 'release' date = milestone.product_release.datereleased uri = canonical_url( milestone.product_release, path_only_if_possible=True) if isinstance(date, datetime.datetime): date = date.date().isoformat() elif isinstance(date, datetime.date): date = date.isoformat() entry = dict( name=milestone.name, code_name=milestone.code_name, type=node_type, date=date, uri=uri) landmarks.append(entry) landmarks = sorted_dotted_numbers(landmarks, key=landmark_key) landmarks.reverse() return TimelineProductSeries( name=self.name, is_development_focus=self.is_development_focus, status=self.status, uri=canonical_url(self, path_only_if_possible=True), landmarks=landmarks, product=self.product) def getBugTaskWeightFunction(self): """Provide a weight function to determine optimal bug task. Full weight is given to tasks for this product series. If the series isn't found, the product task is better than others. """ seriesID = self.id productID = self.productID def weight_function(bugtask): if bugtask.productseriesID == seriesID: return OrderedBugTask(1, bugtask.id, bugtask) elif bugtask.productID == productID: return OrderedBugTask(2, bugtask.id, bugtask) else: return OrderedBugTask(3, bugtask.id, bugtask) return weight_function def userCanView(self, user): """See `IproductSeriesPublic`.""" # Deleate the permission check to the parent product. return self.product.userCanView(user)
class SBPerson(SQLObject): name = StringCol() addresses = SQLMultipleJoin('SBAddress', joinColumn='personID') sharedAddresses = SQLRelatedJoin('SBAddress', addRemoveName='SharedAddress')
class SourcePackageRelease(SQLBase): implements(ISourcePackageRelease) _table = 'SourcePackageRelease' section = ForeignKey(foreignKey='Section', dbName='section') creator = ForeignKey(dbName='creator', foreignKey='Person', storm_validator=validate_public_person, notNull=True) component = ForeignKey(foreignKey='Component', dbName='component') sourcepackagename = ForeignKey(foreignKey='SourcePackageName', dbName='sourcepackagename', notNull=True) maintainer = ForeignKey(dbName='maintainer', foreignKey='Person', storm_validator=validate_public_person, notNull=True) dscsigningkey = ForeignKey(foreignKey='GPGKey', dbName='dscsigningkey') urgency = EnumCol(dbName='urgency', schema=SourcePackageUrgency, default=SourcePackageUrgency.LOW, notNull=True) dateuploaded = UtcDateTimeCol(dbName='dateuploaded', notNull=True, default=UTC_NOW) dsc = StringCol(dbName='dsc') version = StringCol(dbName='version', notNull=True) changelog = ForeignKey(foreignKey='LibraryFileAlias', dbName='changelog') changelog_entry = StringCol(dbName='changelog_entry') builddepends = StringCol(dbName='builddepends') builddependsindep = StringCol(dbName='builddependsindep') build_conflicts = StringCol(dbName='build_conflicts') build_conflicts_indep = StringCol(dbName='build_conflicts_indep') architecturehintlist = StringCol(dbName='architecturehintlist') homepage = StringCol(dbName='homepage') format = EnumCol(dbName='format', schema=SourcePackageType, default=SourcePackageType.DPKG, notNull=True) upload_distroseries = ForeignKey(foreignKey='DistroSeries', dbName='upload_distroseries') upload_archive = ForeignKey(foreignKey='Archive', dbName='upload_archive', notNull=True) source_package_recipe_build_id = Int(name='sourcepackage_recipe_build') source_package_recipe_build = Reference(source_package_recipe_build_id, 'SourcePackageRecipeBuild.id') # XXX cprov 2006-09-26: Those fields are set as notNull and required in # ISourcePackageRelease, however they can't be not NULL in DB since old # records doesn't satisfy this condition. We will sort it before using # 'NoMoreAptFtparchive' implementation for PRIMARY archive. For PPA # (primary target) we don't need to populate old records. dsc_maintainer_rfc822 = StringCol(dbName='dsc_maintainer_rfc822') dsc_standards_version = StringCol(dbName='dsc_standards_version') dsc_format = StringCol(dbName='dsc_format') dsc_binaries = StringCol(dbName='dsc_binaries') # MultipleJoins files = SQLMultipleJoin('SourcePackageReleaseFile', joinColumn='sourcepackagerelease', orderBy="libraryfile") publishings = SQLMultipleJoin('SourcePackagePublishingHistory', joinColumn='sourcepackagerelease', orderBy="-datecreated") _user_defined_fields = StringCol(dbName='user_defined_fields') def __init__(self, *args, **kwargs): if 'user_defined_fields' in kwargs: kwargs['_user_defined_fields'] = simplejson.dumps( kwargs['user_defined_fields']) del kwargs['user_defined_fields'] # copyright isn't on the Storm class, since we don't want it # loaded every time. Set it separately. if 'copyright' in kwargs: copyright = kwargs.pop('copyright') super(SourcePackageRelease, self).__init__(*args, **kwargs) self.copyright = copyright @property def copyright(self): """See `ISourcePackageRelease`.""" store = Store.of(self) store.flush() return store.execute( "SELECT copyright FROM sourcepackagerelease WHERE id=%s", (self.id, )).get_one()[0] @copyright.setter def copyright(self, content): """See `ISourcePackageRelease`.""" store = Store.of(self) store.flush() if content is not None: content = unicode(content) store.execute( "UPDATE sourcepackagerelease SET copyright=%s WHERE id=%s", (content, self.id)) @property def user_defined_fields(self): """See `IBinaryPackageRelease`.""" if self._user_defined_fields is None: return [] return simplejson.loads(self._user_defined_fields) @cachedproperty def package_diffs(self): return list( Store.of(self).find(PackageDiff, to_source=self).order_by( Desc(PackageDiff.date_requested))) @property def builds(self): """See `ISourcePackageRelease`.""" # Excluding PPA builds may seem like a strange thing to do, but, # since Archive.copyPackage can copy packages across archives, a # build may well have a different archive to the corresponding # sourcepackagerelease. return BinaryPackageBuild.select(""" source_package_release = %s AND archive.id = binarypackagebuild.archive AND archive.purpose IN %s """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), orderBy=['-date_created', 'id'], clauseTables=['Archive']) @property def age(self): """See ISourcePackageRelease.""" now = datetime.datetime.now(pytz.timezone('UTC')) return now - self.dateuploaded @property def latest_build(self): builds = self._cached_builds if len(builds) > 0: return builds[0] return None def failed_builds(self): return [ build for build in self._cached_builds if build.buildstate == BuildStatus.FAILEDTOBUILD ] @property def needs_building(self): for build in self._cached_builds: if build.status in [ BuildStatus.NEEDSBUILD, BuildStatus.MANUALDEPWAIT, BuildStatus.CHROOTWAIT ]: return True return False @cachedproperty def _cached_builds(self): # The reason we have this as a cachedproperty is that all the # *build* methods here need access to it; better not to # recalculate it multiple times. return list(self.builds) @property def name(self): return self.sourcepackagename.name @property def sourcepackage(self): """See ISourcePackageRelease.""" # By supplying the sourcepackagename instead of its string name, # we avoid doing an extra query doing getSourcepackage. # XXX 2008-06-16 mpt bug=241298: cprov says this property "won't be as # useful as it looks once we start supporting derivation ... [It] is # dangerous and should be renamed (or removed)". series = self.upload_distroseries return series.getSourcePackage(self.sourcepackagename) @property def distrosourcepackage(self): """See ISourcePackageRelease.""" # By supplying the sourcepackagename instead of its string name, # we avoid doing an extra query doing getSourcepackage distribution = self.upload_distroseries.distribution return distribution.getSourcePackage(self.sourcepackagename) @property def title(self): return '%s - %s' % (self.sourcepackagename.name, self.version) @property def current_publishings(self): """See ISourcePackageRelease.""" from lp.soyuz.model.distroseriessourcepackagerelease import ( DistroSeriesSourcePackageRelease) return [ DistroSeriesSourcePackageRelease(pub.distroseries, self) for pub in self.publishings ] @cachedproperty def published_archives(self): archives = set(pub.archive for pub in self.publishings.prejoin(['archive'])) return sorted(archives, key=operator.attrgetter('id')) def addFile(self, file): """See ISourcePackageRelease.""" return SourcePackageReleaseFile(sourcepackagerelease=self, filetype=determine_source_file_type( file.filename), libraryfile=file) def getFileByName(self, filename): """See `ISourcePackageRelease`.""" sprf = Store.of(self).find( SourcePackageReleaseFile, SourcePackageReleaseFile.sourcepackagerelease == self.id, LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID, LibraryFileAlias.filename == filename).one() if sprf: return sprf.libraryfile else: raise NotFoundError(filename) def getPackageSize(self): """See ISourcePackageRelease.""" size_query = """ SELECT SUM(LibraryFileContent.filesize)/1024.0 FROM SourcePackagereLease JOIN SourcePackageReleaseFile ON SourcePackageReleaseFile.sourcepackagerelease = SourcePackageRelease.id JOIN LibraryFileAlias ON SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id JOIN LibraryFileContent ON LibraryFileAlias.content = LibraryFileContent.id WHERE SourcePackageRelease.id = %s """ % sqlvalues(self) cur = cursor() cur.execute(size_query) results = cur.fetchone() if len(results) == 1 and results[0] is not None: return float(results[0]) else: return 0.0 def createBuild(self, distro_arch_series, pocket, archive, processor=None, status=None): """See ISourcePackageRelease.""" # If a processor is not provided, use the DAS' processor. if processor is None: processor = distro_arch_series.processor if status is None: status = BuildStatus.NEEDSBUILD # Force the current timestamp instead of the default # UTC_NOW for the transaction, avoid several row with # same datecreated. date_created = datetime.datetime.now(pytz.timezone('UTC')) return getUtility(IBinaryPackageBuildSet).new( distro_arch_series=distro_arch_series, source_package_release=self, processor=processor, status=status, date_created=date_created, pocket=pocket, archive=archive) def findBuildsByArchitecture(self, distroseries, archive): """Find associated builds, by architecture. Looks for `BinaryPackageBuild` records for this source package release, with publication records in the distroseries associated with `distroarchseries`. There should be at most one of these per architecture. :param distroarchseries: `DistroArchSeries` to look for. :return: A dict mapping architecture tags (in string form, e.g. 'i386') to `BinaryPackageBuild`s for that build. """ # Avoid circular imports. from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease from lp.soyuz.model.distroarchseries import DistroArchSeries from lp.soyuz.model.publishing import BinaryPackagePublishingHistory BuildDAS = ClassAlias(DistroArchSeries, 'BuildDAS') PublishDAS = ClassAlias(DistroArchSeries, 'PublishDAS') query = Store.of(self).find( (BuildDAS.architecturetag, BinaryPackageBuild), BinaryPackageBuild.source_package_release == self, BinaryPackageRelease.buildID == BinaryPackageBuild.id, BuildDAS.id == BinaryPackageBuild.distro_arch_series_id, BinaryPackagePublishingHistory.binarypackagereleaseID == BinaryPackageRelease.id, BinaryPackagePublishingHistory.archiveID == archive.id, PublishDAS.id == BinaryPackagePublishingHistory.distroarchseriesID, PublishDAS.distroseriesID == distroseries.id, # Architecture-independent binary package releases are built # in the nominated arch-indep architecture but published in # all architectures. This condition makes sure we consider # only builds that have been published in their own # architecture. PublishDAS.architecturetag == BuildDAS.architecturetag) results = list(query.config(distinct=True)) mapped_results = dict(results) assert len(mapped_results) == len(results), ( "Found multiple build candidates per architecture: %s. " "This may mean that we have a serious problem in our DB model. " "Further investigation is required." % [(tag, build.id) for tag, build in results]) return mapped_results def getBuildByArch(self, distroarchseries, archive): """See ISourcePackageRelease.""" # First we try to follow any binaries built from the given source # in a distroarchseries with the given architecturetag and published # in the given (distroarchseries, archive) location. # (Querying all architectures and then picking the right one out # of the result turns out to be much faster than querying for # just the architecture we want). builds_by_arch = self.findBuildsByArchitecture( distroarchseries.distroseries, archive) build = builds_by_arch.get(distroarchseries.architecturetag) if build is not None: # If there was any published binary we can use its original build. # This case covers the situations when both source and binaries # got copied from another location. return build # If there was no published binary we have to try to find a # suitable build in all possible location across the distroseries # inheritance tree. See below. clause_tables = ['DistroArchSeries'] queries = [ "DistroArchSeries.id = BinaryPackageBuild.distro_arch_series AND " "BinaryPackageBuild.archive = %s AND " "DistroArchSeries.architecturetag = %s AND " "BinaryPackageBuild.source_package_release = %s" % (sqlvalues(archive.id, distroarchseries.architecturetag, self)) ] # Query only the last build record for this sourcerelease # across all possible locations. query = " AND ".join(queries) return BinaryPackageBuild.selectFirst(query, clauseTables=clause_tables, orderBy=['-date_created']) def override(self, component=None, section=None, urgency=None): """See ISourcePackageRelease.""" if component is not None: self.component = component # See if the new component requires a new archive: distribution = self.upload_distroseries.distribution new_archive = distribution.getArchiveByComponent(component.name) if new_archive is not None: self.upload_archive = new_archive else: raise QueueInconsistentStateError( "New component '%s' requires a non-existent archive.") if section is not None: self.section = section if urgency is not None: self.urgency = urgency @property def upload_changesfile(self): """See `ISourcePackageRelease`.""" package_upload = self.package_upload # Cope with `SourcePackageRelease`s imported by gina, they do not # have a corresponding `PackageUpload` record. if package_upload is None: return None return package_upload.changesfile @property def package_upload(self): """See `ISourcepackageRelease`.""" store = Store.of(self) # The join on 'changesfile' is used for pre-fetching the # corresponding library file, so callsites don't have to issue an # extra query. origin = [ PackageUploadSource, Join(PackageUpload, PackageUploadSource.packageuploadID == PackageUpload.id), Join(LibraryFileAlias, LibraryFileAlias.id == PackageUpload.changes_file_id), Join(LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] results = store.using(*origin).find( (PackageUpload, LibraryFileAlias, LibraryFileContent), PackageUploadSource.sourcepackagerelease == self, PackageUpload.archive == self.upload_archive, PackageUpload.distroseries == self.upload_distroseries) # Return the unique `PackageUpload` record that corresponds to the # upload of this `SourcePackageRelease`, load the `LibraryFileAlias` # and the `LibraryFileContent` in cache because it's most likely # they will be needed. return DecoratedResultSet(results, operator.itemgetter(0)).one() @property def uploader(self): """See `ISourcePackageRelease`""" if self.source_package_recipe_build is not None: return self.source_package_recipe_build.requester if self.dscsigningkey is not None: return self.dscsigningkey.owner return None @property def change_summary(self): """See ISourcePackageRelease""" # this regex is copied from apt-listchanges.py courtesy of MDZ new_stanza_line = re.compile( '^\S+ \((?P<version>.*)\) .*;.*urgency=(?P<urgency>\w+).*') logfile = StringIO(self.changelog_entry) change = '' top_stanza = False for line in logfile.readlines(): match = new_stanza_line.match(line) if match: if top_stanza: break top_stanza = True change += line return change def attachTranslationFiles(self, tarball_alias, by_maintainer, importer=None): """See ISourcePackageRelease.""" tarball = tarball_alias.read() if importer is None: importer = getUtility(ILaunchpadCelebrities).rosetta_experts queue = getUtility(ITranslationImportQueue) only_templates = self.sourcepackage.has_sharing_translation_templates queue.addOrUpdateEntriesFromTarball( tarball, by_maintainer, importer, sourcepackagename=self.sourcepackagename, distroseries=self.upload_distroseries, filename_filter=_filter_ubuntu_translation_file, only_templates=only_templates) def getDiffTo(self, to_sourcepackagerelease): """See ISourcePackageRelease.""" return PackageDiff.selectOneBy(from_source=self, to_source=to_sourcepackagerelease) def requestDiffTo(self, requester, to_sourcepackagerelease): """See ISourcePackageRelease.""" candidate = self.getDiffTo(to_sourcepackagerelease) if candidate is not None: raise PackageDiffAlreadyRequested("%s has already been requested" % candidate.title) if self.sourcepackagename.name == 'udev': # XXX 2009-11-23 Julian bug=314436 # Currently diff output for udev will fill disks. It's # disabled until diffutils is fixed in that bug. status = PackageDiffStatus.FAILED else: status = PackageDiffStatus.PENDING Store.of(to_sourcepackagerelease).flush() del get_property_cache(to_sourcepackagerelease).package_diffs packagediff = PackageDiff(from_source=self, to_source=to_sourcepackagerelease, requester=requester, status=status) if status == PackageDiffStatus.PENDING: getUtility(IPackageDiffJobSource).create(packagediff) return packagediff def aggregate_changelog(self, since_version): """See `ISourcePackagePublishingHistory`.""" if self.changelog is None: return None apt_pkg.init_system() chunks = [] changelog = self.changelog # The python-debian API for parsing changelogs is pretty awful. The # only useful way of extracting info is to use the iterator on # Changelog and then compare versions. try: changelog_text = changelog.read().decode("UTF-8", "replace") for block in Changelog(changelog_text): version = block._raw_version if (since_version and apt_pkg.version_compare(version, since_version) <= 0): break # Poking in private attributes is not nice but again the # API is terrible. We want to ensure that the name/date # line is omitted from these composite changelogs. block._no_trailer = True try: # python-debian adds an extra blank line to the chunks # so we'll have to sort this out. chunks.append(str(block).rstrip()) except ChangelogCreateError: continue if not since_version: # If a particular version was not requested we just # return the most recent changelog entry. break except ChangelogParseError: return None output = "\n\n".join(chunks) return output.decode("utf-8", "replace") def getActiveArchSpecificPublications(self, archive, distroseries, pocket): """Find architecture-specific binary publications for this release. For example, say source package release contains binary packages of: * "foo" for i386 (pending in i386) * "foo" for amd64 (published in amd64) * "foo-common" for the "all" architecture (pending or published in various real processor architectures) In that case, this search will return foo(i386) and foo(amd64). The dominator uses this when figuring out whether foo-common can be superseded: we don't track dependency graphs, but we know that the architecture-specific "foo" releases are likely to depend on the architecture-independent foo-common release. :param archive: The `Archive` to search. :param distroseries: The `DistroSeries` to search. :param pocket: The `PackagePublishingPocket` to search. :return: A Storm result set of active, architecture-specific `BinaryPackagePublishingHistory` objects for this source package release and the given `archive`, `distroseries`, and `pocket`. """ # Avoid circular imports. from lp.soyuz.interfaces.publishing import active_publishing_status from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease from lp.soyuz.model.distroarchseries import DistroArchSeries from lp.soyuz.model.publishing import BinaryPackagePublishingHistory return Store.of(self).find( BinaryPackagePublishingHistory, BinaryPackageBuild.source_package_release_id == self.id, BinaryPackageRelease.build == BinaryPackageBuild.id, BinaryPackagePublishingHistory.binarypackagereleaseID == BinaryPackageRelease.id, BinaryPackagePublishingHistory.archiveID == archive.id, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseriesID == distroseries.id, BinaryPackagePublishingHistory.pocket == pocket, BinaryPackagePublishingHistory.status.is_in( active_publishing_status), BinaryPackageRelease.architecturespecific == True)
class Cve(SQLBase, BugLinkTargetMixin): """A CVE database record.""" _table = 'Cve' sequence = StringCol(notNull=True, alternateID=True) status = EnumCol(dbName='status', schema=CveStatus, notNull=True) description = StringCol(notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) datemodified = UtcDateTimeCol(notNull=True, default=UTC_NOW) references = SQLMultipleJoin('CveReference', joinColumn='cve', orderBy='id') @property def url(self): """See ICve.""" return ('https://cve.mitre.org/cgi-bin/cvename.cgi?name=%s' % self.sequence) @property def displayname(self): return 'CVE-%s' % self.sequence @property def title(self): return 'CVE-%s (%s)' % (self.sequence, self.status.title) @property def bugs(self): bug_ids = [ int(id) for _, id in getUtility(IXRefSet).findFrom((u'cve', self.sequence), types=[u'bug']) ] return list( sorted(bulk.load(Bug, bug_ids), key=operator.attrgetter('id'))) # CveReference's def createReference(self, source, content, url=None): """See ICveReference.""" return CveReference(cve=self, source=source, content=content, url=url) def removeReference(self, ref): assert ref.cve == self CveReference.delete(ref.id) def createBugLink(self, bug, props=None): """See BugLinkTargetMixin.""" if props is None: props = {} # XXX: Should set creator. getUtility(IXRefSet).create({ (u'cve', self.sequence): { (u'bug', unicode(bug.id)): props } }) def deleteBugLink(self, bug): """See BugLinkTargetMixin.""" getUtility(IXRefSet).delete({ (u'cve', self.sequence): [(u'bug', unicode(bug.id))] })
class ProductRelease(SQLBase): """A release of a product.""" _table = 'ProductRelease' _defaultOrder = ['-datereleased'] datereleased = UtcDateTimeCol(notNull=True) release_notes = StringCol(notNull=False, default=None) changelog = StringCol(notNull=False, default=None) datecreated = UtcDateTimeCol(dbName='datecreated', notNull=True, default=UTC_NOW) owner = ForeignKey(dbName="owner", foreignKey="Person", storm_validator=validate_person, notNull=True) milestone = ForeignKey(dbName='milestone', foreignKey='Milestone') _files = SQLMultipleJoin('ProductReleaseFile', joinColumn='productrelease', orderBy='-date_uploaded', prejoins=['productrelease']) @cachedproperty def files(self): return self._files @property def version(self): """See `IProductRelease`.""" return self.milestone.name @property def productseries(self): """See `IProductRelease`.""" return self.milestone.productseries @property def product(self): """See `IProductRelease`.""" return self.milestone.productseries.product @property def displayname(self): """See `IProductRelease`.""" return self.milestone.displayname @property def title(self): """See `IProductRelease`.""" return self.milestone.title @property def can_have_release_files(self): """See `IProductRelease`.""" return self.product.information_type == InformationType.PUBLIC @staticmethod def normalizeFilename(filename): # Replace slashes in the filename with less problematic dashes. return filename.replace('/', '-') def destroySelf(self): """See `IProductRelease`.""" assert self._files.count() == 0, ( "You can't delete a product release which has files associated " "with it.") SQLBase.destroySelf(self) def _getFileObjectAndSize(self, file_or_data): """Return an object and length for file_or_data. :param file_or_data: A string or a file object or StringIO object. :return: file object or StringIO object and size. """ if isinstance(file_or_data, basestring): file_size = len(file_or_data) file_obj = StringIO(file_or_data) else: assert isinstance( file_or_data, (file, StringIO)), ("file_or_data is not an expected type") file_obj = file_or_data start = file_obj.tell() file_obj.seek(0, os.SEEK_END) file_size = file_obj.tell() file_obj.seek(start) return file_obj, file_size def addReleaseFile(self, filename, file_content, content_type, uploader, signature_filename=None, signature_content=None, file_type=UpstreamFileType.CODETARBALL, description=None, from_api=False): """See `IProductRelease`.""" if not self.can_have_release_files: raise ProprietaryProduct( "Only public projects can have download files.") if self.hasReleaseFile(filename): raise InvalidFilename # Create the alias for the file. filename = self.normalizeFilename(filename) # XXX: StevenK 2013-02-06 bug=1116954: We should not need to refetch # the file content from the request, since the passed in one has been # wrongly encoded. if from_api: file_content = get_raw_form_value_from_current_request( file_content, 'file_content') file_obj, file_size = self._getFileObjectAndSize(file_content) alias = getUtility(ILibraryFileAliasSet).create( name=filename, size=file_size, file=file_obj, contentType=content_type) if signature_filename is not None and signature_content is not None: # XXX: StevenK 2013-02-06 bug=1116954: We should not need to # refetch the file content from the request, since the passed in # one has been wrongly encoded. if from_api: signature_content = get_raw_form_value_from_current_request( signature_content, 'signature_content') signature_obj, signature_size = self._getFileObjectAndSize( signature_content) signature_filename = self.normalizeFilename(signature_filename) signature_alias = getUtility(ILibraryFileAliasSet).create( name=signature_filename, size=signature_size, file=signature_obj, contentType='application/pgp-signature') else: signature_alias = None return ProductReleaseFile(productrelease=self, libraryfile=alias, signature=signature_alias, filetype=file_type, description=description, uploader=uploader) def getFileAliasByName(self, name): """See `IProductRelease`.""" for file_ in self.files: if file_.libraryfile.filename == name: return file_.libraryfile elif file_.signature and file_.signature.filename == name: return file_.signature raise NotFoundError(name) def getProductReleaseFileByName(self, name): """See `IProductRelease`.""" for file_ in self.files: if file_.libraryfile.filename == name: return file_ raise NotFoundError(name) def hasReleaseFile(self, name): """See `IProductRelease`.""" try: self.getProductReleaseFileByName(name) return True except NotFoundError: return False
class Message(SQLBase): """A message. This is an RFC822-style message, typically it would be coming into the bug system, or coming in from a mailing list. """ _table = 'Message' _defaultOrder = '-id' datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) subject = StringCol(notNull=False, default=None) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=False) parent = ForeignKey(foreignKey='Message', dbName='parent', notNull=False, default=None) rfc822msgid = StringCol(notNull=True) bugs = SQLRelatedJoin('Bug', joinColumn='message', otherColumn='bug', intermediateTable='BugMessage') _chunks = SQLMultipleJoin('MessageChunk', joinColumn='message') @cachedproperty def chunks(self): return list(self._chunks) raw = ForeignKey(foreignKey='LibraryFileAlias', dbName='raw', default=None) _bugattachments = SQLMultipleJoin('BugAttachment', joinColumn='_message') @cachedproperty def bugattachments(self): return list(self._bugattachments) visible = BoolCol(notNull=True, default=True) def __repr__(self): return "<Message at 0x%x id=%s>" % (id(self), self.id) def __iter__(self): """See IMessage.__iter__""" return iter(self.chunks) def setVisible(self, visible): self.visible = visible @property def title(self): """See IMessage.""" return self.subject @property def sender(self): """See IMessage.""" return self.owner @cachedproperty def text_contents(self): """See IMessage.""" return Message.chunks_text(self.chunks) @classmethod def chunks_text(cls, chunks): bits = [unicode(chunk) for chunk in chunks if chunk.content] return '\n\n'.join(bits) # XXX flacoste 2006-09-08: Bogus attribute only present so that # verifyObject doesn't fail. That attribute is part of the # interface because it is used as a UI field in MessageAddView content = None def getAPIParent(self): """See `IMessage`.""" return None
class Revision(SQLBase): """See IRevision.""" date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) log_body = StringCol(notNull=True) gpgkey = ForeignKey(dbName='gpgkey', foreignKey='GPGKey', default=None) revision_author_id = Int(name='revision_author', allow_none=False) revision_author = Reference(revision_author_id, 'RevisionAuthor.id') revision_id = StringCol(notNull=True, alternateID=True, alternateMethodName='byRevisionID') revision_date = UtcDateTimeCol(notNull=False) karma_allocated = BoolCol(default=False, notNull=True) properties = SQLMultipleJoin('RevisionProperty', joinColumn='revision') @property def parents(self): """See IRevision.parents""" return shortlist( RevisionParent.selectBy(revision=self, orderBy='sequence')) @property def parent_ids(self): """Sequence of globally unique ids for the parents of this revision. The corresponding Revision objects can be retrieved, if they are present in the database, using the RevisionSet Zope utility. """ return [parent.parent_id for parent in self.parents] def getLefthandParent(self): if len(self.parent_ids) == 0: parent_id = NULL_REVISION else: parent_id = self.parent_ids[0] return RevisionSet().getByRevisionId(parent_id) def getProperties(self): """See `IRevision`.""" return dict((prop.name, prop.value) for prop in self.properties) def allocateKarma(self, branch): """See `IRevision`.""" # Always set karma_allocated to True so that Lp does not reprocess # junk and invalid user branches because they do not get karma. self.karma_allocated = True # If we know who the revision author is, give them karma. author = self.revision_author.person if author is not None and branch is not None: # Backdate the karma to the time the revision was created. If the # revision_date on the revision is in future (for whatever weird # reason) we will use the date_created from the revision (which # will be now) as the karma date created. Having future karma # events is both wrong, as the revision has been created (and it # is lying), and a problem with the way the Launchpad code # currently does its karma degradation over time. karma_date = min(self.revision_date, self.date_created) karma = branch.target.assignKarma(author, 'revisionadded', karma_date) return karma else: return None def getBranch(self, allow_private=False, allow_junk=True): """See `IRevision`.""" from lp.code.model.branch import Branch from lp.code.model.branchrevision import BranchRevision store = Store.of(self) query = And(self.id == BranchRevision.revision_id, BranchRevision.branch_id == Branch.id) if not allow_private: query = And( query, Branch.information_type.is_in(PUBLIC_INFORMATION_TYPES)) if not allow_junk: query = And( query, # Not-junk branches are either associated with a product # or with a source package. Or((Branch.product != None), And(Branch.sourcepackagename != None, Branch.distroseries != None))) result_set = store.find(Branch, query) if self.revision_author.person is None: result_set.order_by(Asc(BranchRevision.sequence)) else: result_set.order_by( Branch.ownerID != self.revision_author.personID, Asc(BranchRevision.sequence)) return result_set.first()
class BugTracker(SQLBase): """A class to access the BugTracker table in the database. Each BugTracker is a distinct instance of that bug tracking tool. For example, each Bugzilla deployment is a separate BugTracker. bugzilla.mozilla.org and bugzilla.gnome.org are each distinct BugTrackers. """ implements(IBugTracker) _table = 'BugTracker' bugtrackertype = EnumCol(dbName='bugtrackertype', schema=BugTrackerType, notNull=True) name = StringCol(notNull=True, unique=True) title = StringCol(notNull=True) summary = StringCol(notNull=False) baseurl = StringCol(notNull=True) active = Bool(name='active', allow_none=False, default=True) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) contactdetails = StringCol(notNull=False) has_lp_plugin = BoolCol(notNull=False, default=False) products = SQLMultipleJoin('Product', joinColumn='bugtracker', orderBy='name') watches = SQLMultipleJoin('BugWatch', joinColumn='bugtracker', orderBy='-datecreated', prejoins=['bug']) _filing_url_patterns = { BugTrackerType.BUGZILLA: ("%(base_url)s/enter_bug.cgi?product=%(remote_product)s" "&short_desc=%(summary)s&long_desc=%(description)s"), BugTrackerType.GOOGLE_CODE: ("%(base_url)s/entry?summary=%(summary)s&" "comment=%(description)s"), BugTrackerType.MANTIS: ("%(base_url)s/bug_report_advanced_page.php" "?summary=%(summary)s&description=%(description)s"), BugTrackerType.PHPPROJECT: ("%(base_url)s/report.php" "?in[sdesc]=%(summary)s&in[ldesc]=%(description)s"), BugTrackerType.ROUNDUP: ("%(base_url)s/issue?@template=item&title=%(summary)s" "&@note=%(description)s"), BugTrackerType.RT: ("%(base_url)s/Ticket/Create.html?Queue=%(remote_product)s" "&Subject=%(summary)s&Content=%(description)s"), BugTrackerType.SAVANE: ("%(base_url)s/bugs/?func=additem&group=%(remote_product)s"), BugTrackerType.SOURCEFORGE: ("%(base_url)s/%(tracker)s/?func=add&" "group_id=%(group_id)s&atid=%(at_id)s"), BugTrackerType.TRAC: ("%(base_url)s/newticket?summary=%(summary)s&" "description=%(description)s"), } _search_url_patterns = { BugTrackerType.BUGZILLA: ("%(base_url)s/query.cgi?product=%(remote_product)s" "&short_desc=%(summary)s"), BugTrackerType.GOOGLE_CODE: "%(base_url)s/list?q=%(summary)s", BugTrackerType.DEBBUGS: ("%(base_url)s/cgi-bin/search.cgi?phrase=%(summary)s" "&attribute_field=package&attribute_operator=STROREQ" "&attribute_value=%(remote_product)s"), BugTrackerType.MANTIS: "%(base_url)s/view_all_bug_page.php", BugTrackerType.PHPPROJECT: ("%(base_url)s/search.php?search_for=%(summary)s"), BugTrackerType.ROUNDUP: ("%(base_url)s/issue?@template=search&@search_text=%(summary)s"), BugTrackerType.RT: ("%(base_url)s/Search/Build.html?Query=Queue = " "'%(remote_product)s' AND Subject LIKE '%(summary)s'"), BugTrackerType.SAVANE: ("%(base_url)s/bugs/?func=search&group=%(remote_product)s"), BugTrackerType.SOURCEFORGE: ("%(base_url)s/search/?group_id=%(group_id)s" "&some_word=%(summary)s&type_of_search=artifact"), BugTrackerType.TRAC: "%(base_url)s/search?ticket=on&q=%(summary)s", } @property def _custom_filing_url_patterns(self): """Return a dict of bugtracker-specific bugfiling URL patterns.""" gnome_bugzilla = getUtility(ILaunchpadCelebrities).gnome_bugzilla return { gnome_bugzilla: ("%(base_url)s/enter_bug.cgi?product=%(remote_product)s" "&short_desc=%(summary)s&comment=%(description)s"), } @property def latestwatches(self): """See `IBugTracker`.""" return self.watches[:10] @property def multi_product(self): """Return True if this BugTracker tracks multiple projects.""" if self.bugtrackertype not in SINGLE_PRODUCT_BUGTRACKERTYPES: return True else: return False def getBugFilingAndSearchLinks(self, remote_product, summary=None, description=None, remote_component=None): """See `IBugTracker`.""" bugtracker_urls = {'bug_filing_url': None, 'bug_search_url': None} if remote_product is None and self.multi_product: # Don't try to return anything if remote_product is required # for this BugTrackerType and one hasn't been passed. return bugtracker_urls if remote_product is None: # Turn the remote product into an empty string so that # quote() doesn't blow up later on. remote_product = '' if remote_component is None: # Ditto for remote component. remote_component = '' if self in self._custom_filing_url_patterns: # Some bugtrackers are customised to accept different # querystring parameters from the default. We special-case # these. bug_filing_pattern = self._custom_filing_url_patterns[self] else: bug_filing_pattern = self._filing_url_patterns.get( self.bugtrackertype, None) bug_search_pattern = self._search_url_patterns.get( self.bugtrackertype, None) # Make sure that we don't put > 1 '/' in returned URLs. base_url = self.baseurl.rstrip('/') # If summary or description are None, convert them to empty # strings to that we don't try to pass anything to the upstream # bug tracker. if summary is None: summary = '' if description is None: description = '' # UTF-8 encode the description and summary so that quote() # doesn't break if they contain unicode characters it doesn't # understand. summary = summary.encode('utf-8') description = description.encode('utf-8') if self.bugtrackertype == BugTrackerType.SOURCEFORGE: try: # SourceForge bug trackers use a group ID and an ATID to # file a bug, rather than a product name. remote_product # should be an ampersand-separated string in the form # 'group_id&atid' group_id, at_id = remote_product.split('&') except ValueError: # If remote_product contains something that's not valid # in a SourceForge context we just return early. return None # If this bug tracker is the SourceForge celebrity the link # is to the new bug tracker rather than the old one. sf_celeb = getUtility(ILaunchpadCelebrities).sourceforge_tracker if self == sf_celeb: tracker = 'tracker2' else: tracker = 'tracker' url_components = { 'base_url': base_url, 'tracker': quote(tracker), 'group_id': quote(group_id), 'at_id': quote(at_id), 'summary': quote(summary), 'description': quote(description), } else: url_components = { 'base_url': base_url, 'remote_product': quote(remote_product), 'remote_component': quote(remote_component), 'summary': quote(summary), 'description': quote(description), } if bug_filing_pattern is not None: bugtracker_urls['bug_filing_url'] = (bug_filing_pattern % url_components) if bug_search_pattern is not None: bugtracker_urls['bug_search_url'] = (bug_search_pattern % url_components) return bugtracker_urls def getBugsWatching(self, remotebug): """See `IBugTracker`.""" # We special-case email address bug trackers. Since we don't # record a remote bug id for them we can never know which bugs # are already watching a remote bug. if self.bugtrackertype == BugTrackerType.EMAILADDRESS: return [] return shortlist( Store.of(self).find(Bug, BugWatch.bugID == Bug.id, BugWatch.bugtrackerID == self.id, BugWatch.remotebug == remotebug).config( distinct=True).order_by(Bug.datecreated)) @property def watches_ready_to_check(self): return Store.of(self).find( BugWatch, BugWatch.bugtracker == self, Not(BugWatch.next_check == None), BugWatch.next_check <= datetime.now(timezone('UTC'))) @property def watches_with_unpushed_comments(self): return Store.of(self).find( BugWatch, BugWatch.bugtracker == self, BugMessage.bugwatch == BugWatch.id, BugMessage.remote_comment_id == None).config(distinct=True) @property def watches_needing_update(self): """All watches needing some sort of update. :return: The union of `watches_ready_to_check` and `watches_with_unpushed_comments`. """ return self.watches_ready_to_check.union( self.watches_with_unpushed_comments) # Join to return a list of BugTrackerAliases relating to this # BugTracker. _bugtracker_aliases = SQLMultipleJoin('BugTrackerAlias', joinColumn='bugtracker') def _get_aliases(self): """See `IBugTracker.aliases`.""" alias_urls = set(alias.base_url for alias in self._bugtracker_aliases) # Although it does no harm if the current baseurl is also an # alias, we hide it and all its permutations to avoid # confusion. alias_urls.difference_update(base_url_permutations(self.baseurl)) return tuple(sorted(alias_urls)) def _set_aliases(self, alias_urls): """See `IBugTracker.aliases`.""" if alias_urls is None: alias_urls = set() else: alias_urls = set(alias_urls) current_aliases_by_url = dict( (alias.base_url, alias) for alias in self._bugtracker_aliases) # Make a set of the keys, i.e. a set of current URLs. current_alias_urls = set(current_aliases_by_url) # URLs we need to add as aliases. to_add = alias_urls - current_alias_urls # URL aliases we need to delete. to_del = current_alias_urls - alias_urls for url in to_add: BugTrackerAlias(bugtracker=self, base_url=url) for url in to_del: alias = current_aliases_by_url[url] alias.destroySelf() aliases = property( _get_aliases, _set_aliases, None, """A list of the alias URLs. See `IBugTracker`. The aliases are found by querying BugTrackerAlias. Assign an iterable of URLs or None to set or remove aliases. """) @property def imported_bug_messages(self): """See `IBugTracker`.""" return Store.of(self).find( BugMessage, BugMessage.bugwatchID == BugWatch.id, BugWatch.bugtrackerID == self.id).order_by(BugMessage.id) def getLinkedPersonByName(self, name): """Return the Person with a given name on this bugtracker.""" return BugTrackerPerson.selectOneBy(name=name, bugtracker=self) def linkPersonToSelf(self, name, person): """See `IBugTrackerSet`.""" # Check that this name isn't already in use for this bugtracker. if self.getLinkedPersonByName(name) is not None: raise BugTrackerPersonAlreadyExists( "Name '%s' is already in use for bugtracker '%s'." % (name, self.name)) bugtracker_person = BugTrackerPerson(name=name, bugtracker=self, person=person) return bugtracker_person def ensurePersonForSelf(self, display_name, email, rationale, creation_comment): """Return a Person that is linked to this bug tracker.""" # If we have an email address to work with we can use # ensurePerson() to get the Person we need. if email is not None: return getUtility(IPersonSet).ensurePerson(email, display_name, rationale, creation_comment) # First, see if there's already a BugTrackerPerson for this # display_name on this bugtracker. If there is, return it. bugtracker_person = self.getLinkedPersonByName(display_name) if bugtracker_person is not None: return bugtracker_person.person # Generate a valid Launchpad name for the Person. base_canonical_name = ("%s-%s" % (sanitize_name(display_name), self.name)) canonical_name = base_canonical_name person_set = getUtility(IPersonSet) index = 0 while person_set.getByName(canonical_name) is not None: index += 1 canonical_name = "%s-%s" % (base_canonical_name, index) person = person_set.createPersonWithoutEmail(canonical_name, rationale, creation_comment, displayname=display_name) # Link the Person to the bugtracker for future reference. bugtracker_person = self.linkPersonToSelf(display_name, person) return person def resetWatches(self, new_next_check=None): """See `IBugTracker`.""" if new_next_check is None: new_next_check = SQL( "now() at time zone 'UTC' + (random() * interval '1 day')") store = Store.of(self) store.find(BugWatch, BugWatch.bugtracker == self).set(next_check=new_next_check, lastchecked=None, last_error_type=None) def addRemoteComponentGroup(self, component_group_name): """See `IBugTracker`.""" if component_group_name is None: component_group_name = "default" component_group = BugTrackerComponentGroup() component_group.name = component_group_name component_group.bug_tracker = self store = IStore(BugTrackerComponentGroup) store.add(component_group) store.commit() return component_group def getAllRemoteComponentGroups(self): """See `IBugTracker`.""" component_groups = [] component_groups = Store.of(self).find( BugTrackerComponentGroup, BugTrackerComponentGroup.bug_tracker == self.id) component_groups = component_groups.order_by( BugTrackerComponentGroup.name) return component_groups def getRemoteComponentGroup(self, component_group_name): """See `IBugTracker`.""" component_group = None store = IStore(BugTrackerComponentGroup) if component_group_name is None: return None elif component_group_name.isdigit(): component_group_id = int(component_group_name) component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.id == component_group_id).one() else: component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.name == component_group_name).one() return component_group def getRemoteComponentForDistroSourcePackageName(self, distribution, sourcepackagename): """See `IBugTracker`.""" if distribution is None: return None dsp = distribution.getSourcePackage(sourcepackagename) if dsp is None: return None return Store.of(self).find( BugTrackerComponent, BugTrackerComponent.distribution == distribution.id, BugTrackerComponent.source_package_name == dsp.sourcepackagename.id).one() def getRelatedPillars(self, user=None): """See `IBugTracker`.""" products = IStore(Product).find( Product, Product.bugtrackerID == self.id, Product.active == True, ProductSet.getProductPrivacyFilter(user)).order_by(Product.name) groups = IStore(ProjectGroup).find( ProjectGroup, ProjectGroup.bugtrackerID == self.id, ProjectGroup.active == True).order_by(ProjectGroup.name) return groups, products
class SRThrough3(SQLObject): name = StringCol() ones = SQLMultipleJoin('SRThrough1', joinColumn='threeID') twos = SQLRelatedJoin('SRThrough2')
class FAQ(SQLBase): """See `IFAQ`.""" _table = 'FAQ' _defaultOrder = ['date_created', 'id'] owner = ForeignKey( dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) title = StringCol(notNull=True) keywords = StringCol(dbName="tags", notNull=False, default=None) content = StringCol(notNull=False, default=None) date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) last_updated_by = ForeignKey( dbName='last_updated_by', foreignKey='Person', storm_validator=validate_public_person, notNull=False, default=None) date_last_updated = UtcDateTimeCol(notNull=False, default=None) product = ForeignKey( dbName='product', foreignKey='Product', notNull=False, default=None) distribution = ForeignKey( dbName='distribution', foreignKey='Distribution', notNull=False, default=None) related_questions = SQLMultipleJoin( 'Question', joinColumn='faq', orderBy=['Question.datecreated']) @property def target(self): """See `IFAQ`.""" if self.product: return self.product else: return self.distribution def destroySelf(self): if self.related_questions: raise CannotDeleteFAQ( "Cannot delete FAQ: questions must be unlinked first.") super(FAQ, self).destroySelf() @staticmethod def new(owner, title, content, keywords=keywords, date_created=None, product=None, distribution=None): """Factory method to create a new FAQ. Ensure that only one of product or distribution is given. """ if not IPerson.providedBy(owner): raise AssertionError( 'owner parameter should be an IPerson, not %s' % type(owner)) if product is not None and distribution is not None: raise AssertionError( "only one of product or distribution should be provided") if product is None and distribution is None: raise AssertionError("product or distribution must be provided") if date_created is None: date_created = DEFAULT faq = FAQ( owner=owner, title=title, content=content, keywords=keywords, date_created=date_created, product=product, distribution=distribution) notify(ObjectCreatedEvent(faq)) return faq @staticmethod def findSimilar(summary, product=None, distribution=None): """Return the FAQs similar to summary. See `IFAQTarget.findSimilarFAQs` for details. """ assert not (product and distribution), ( 'only one of product or distribution should be provided') if product: target_constraint = 'product = %s' % sqlvalues(product) elif distribution: target_constraint = 'distribution = %s' % sqlvalues(distribution) else: raise AssertionError('must provide product or distribution') phrases = nl_phrase_search(summary, FAQ, target_constraint) if not phrases: # No useful words to search on in that summary. return FAQ.select('1 = 2') return FAQ.select( And(target_constraint, fti_search(FAQ, phrases, ftq=False)), orderBy=[ rank_by_fti(FAQ, phrases, ftq=False), "-FAQ.date_created"]) @staticmethod def getForTarget(id, target): """Return the FAQ with the requested id. When target is not None, the target will be checked to make sure that the FAQ is in the expected target or return None otherwise. """ try: faq = FAQ.get(id) if target is None or target == faq.target: return faq else: return None except SQLObjectNotFound: return None
class SRThrough1(SQLObject): three = ForeignKey('SRThrough3') twos = SQLMultipleJoin('SRThrough2', joinColumn='oneID')
class Channel(InheritableSQLObject): name = StringCol(notNone=True, unique=True) description = StringCol(default=None) enabled = BoolCol(notNone=True, default=True) subscription_right = EnumCol(enumValues=['public', 'restricted', 'private']) authorized_subscribers = SQLRelatedJoin('User') secret = StringCol(notNone=True, default=lambda: utils.generate_secret()) subscriptions = SQLMultipleJoin('Subscription') bundles = SQLRelatedJoin('ChannelBundle') def can_subscribe(self, user): """ Return whether this user has sufficient permission to be able to subscribe to this channel or not. """ return self.subscription_right == 'public' or UserPermissions.administrator in user.highest_permission_level or user in self.authorized_subscribers def safe_add_user(self, user): """ Avoid user duplication in channel authorized subscribers. """ if user not in self.authorized_subscribers: self.addUser(user) @classmethod def get_channels_authorized_subscribers_as_json(cls, channels): """ Return the string representation of a dictionary in the form { channel.id: [ user.id, ... ] } """ channels_authorized_subscribers = {} for channel in channels: channels_authorized_subscribers[channel.id] = [u.id for u in channel.authorized_subscribers] return json.dumps(channels_authorized_subscribers) @classmethod def get_visible_channels_of(cls, user): """ Returns the channels that are accessible for the user (public channels or channels with the user specified in authorized_subscribers, or all channels if the user is superadmin) :param user: The user to retrieve the accessible channels. :return: A iterable with the accessible channels (iterable of sqlobjects) """ if UserPermissions.administrator in user.highest_permission_level: return set(Channel.select()) public_channels = set(Channel.selectBy(subscription_right='public')) if UserPermissions.screen_administrator in \ user.highest_permission_level else set() return public_channels | set(Role.selectBy(user=user).throughTo.channel) | set( User.selectBy(id=user.id).throughTo.authorized_channels) @classmethod def get_screens_channels_from(cls, user): """ Return the intersection between 3 sets of channels: all the public channels, all the channel this user is authorized to subscribe and all the channel the screens this user has access are subscribed to. The resulting data type is a set of Channel instances. """ if user.super_admin: return set(Channel.select()) return set(c for c in Channel.selectBy(subscription_right='public')) | \ set(c for c in user.authorized_channels) | \ set(c for c in user.screens.throughTo.subscriptions.throughTo.channel) def get_preview_link(self): """ Returns the secret preview link of this channel. """ return '/preview/channels/' + str(self.id) + '/' + self.secret @abstractmethod def flatten(self, keep_disabled_channels=False): """ Returns all the channels contained in this channel and the channels it contains as an Iterable[Channel]""" @abstractmethod def get_type_name(self): """ Returns a string representing the name of the subtype to be used in the UI for this class. """
class Specification(SQLBase, BugLinkTargetMixin, InformationTypeMixin): """See ISpecification.""" implements(ISpecification, IBugLinkTarget, IInformationType) _defaultOrder = ['-priority', 'definition_status', 'name', 'id'] # db field names name = StringCol(unique=True, notNull=True) title = StringCol(notNull=True) summary = StringCol(notNull=True) definition_status = EnumCol(schema=SpecificationDefinitionStatus, notNull=True, default=SpecificationDefinitionStatus.NEW) priority = EnumCol(schema=SpecificationPriority, notNull=True, default=SpecificationPriority.UNDEFINED) _assignee = ForeignKey(dbName='assignee', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) _drafter = ForeignKey(dbName='drafter', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) _approver = ForeignKey(dbName='approver', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) datecreated = UtcDateTimeCol(notNull=True, default=DEFAULT) product = ForeignKey(dbName='product', foreignKey='Product', notNull=False, default=None) productseries = ForeignKey(dbName='productseries', foreignKey='ProductSeries', notNull=False, default=None) distribution = ForeignKey(dbName='distribution', foreignKey='Distribution', notNull=False, default=None) distroseries = ForeignKey(dbName='distroseries', foreignKey='DistroSeries', notNull=False, default=None) goalstatus = EnumCol(schema=SpecificationGoalStatus, notNull=True, default=SpecificationGoalStatus.PROPOSED) goal_proposer = ForeignKey(dbName='goal_proposer', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_goal_proposed = UtcDateTimeCol(notNull=False, default=None) goal_decider = ForeignKey(dbName='goal_decider', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_goal_decided = UtcDateTimeCol(notNull=False, default=None) milestone = ForeignKey(dbName='milestone', foreignKey='Milestone', notNull=False, default=None) specurl = StringCol(notNull=False, default=None) whiteboard = StringCol(notNull=False, default=None) direction_approved = BoolCol(notNull=True, default=False) man_days = IntCol(notNull=False, default=None) implementation_status = EnumCol( schema=SpecificationImplementationStatus, notNull=True, default=SpecificationImplementationStatus.UNKNOWN) superseded_by = ForeignKey(dbName='superseded_by', foreignKey='Specification', notNull=False, default=None) completer = ForeignKey(dbName='completer', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_completed = UtcDateTimeCol(notNull=False, default=None) starter = ForeignKey(dbName='starter', notNull=False, foreignKey='Person', storm_validator=validate_public_person, default=None) date_started = UtcDateTimeCol(notNull=False, default=None) # useful joins _subscriptions = SQLMultipleJoin('SpecificationSubscription', joinColumn='specification', orderBy='id') subscribers = SQLRelatedJoin('Person', joinColumn='specification', otherColumn='person', intermediateTable='SpecificationSubscription', orderBy=['displayname', 'name']) sprint_links = SQLMultipleJoin('SprintSpecification', orderBy='id', joinColumn='specification') sprints = SQLRelatedJoin('Sprint', orderBy='name', joinColumn='specification', otherColumn='sprint', intermediateTable='SprintSpecification') bug_links = SQLMultipleJoin('SpecificationBug', joinColumn='specification', orderBy='id') bugs = SQLRelatedJoin('Bug', joinColumn='specification', otherColumn='bug', intermediateTable='SpecificationBug', orderBy='id') spec_dependency_links = SQLMultipleJoin('SpecificationDependency', joinColumn='specification', orderBy='id') dependencies = SQLRelatedJoin('Specification', joinColumn='specification', otherColumn='dependency', orderBy='title', intermediateTable='SpecificationDependency') information_type = EnumCol(enum=InformationType, notNull=True, default=InformationType.PUBLIC) @cachedproperty def linked_branches(self): return list( Store.of(self).find( SpecificationBranch, SpecificationBranch.specificationID == self.id).order_by( SpecificationBranch.id)) def _fetch_children_or_parents(self, join_cond, cond, user): from lp.blueprints.model.specificationsearch import ( get_specification_privacy_filter) return list( Store.of(self).using( Specification, Join(SpecificationDependency, join_cond == self.id)).find( Specification, cond == Specification.id, *get_specification_privacy_filter(user)).order_by( Specification.title)) def getDependencies(self, user=None): return self._fetch_children_or_parents( SpecificationDependency.specificationID, SpecificationDependency.dependencyID, user) def getBlockedSpecs(self, user=None): return self._fetch_children_or_parents( SpecificationDependency.dependencyID, SpecificationDependency.specificationID, user) def set_assignee(self, person): self.subscribeIfAccessGrantNeeded(person) self._assignee = person def get_assignee(self): return self._assignee assignee = property(get_assignee, set_assignee) def set_drafter(self, person): self.subscribeIfAccessGrantNeeded(person) self._drafter = person def get_drafter(self): return self._drafter drafter = property(get_drafter, set_drafter) def set_approver(self, person): self.subscribeIfAccessGrantNeeded(person) self._approver = person def get_approver(self): return self._approver approver = property(get_approver, set_approver) def subscribeIfAccessGrantNeeded(self, person): """Subscribe person if this specification is not public and if the person does not already have grants to access the specification. """ if person is None or self.userCanView(person): return current_user = getUtility(ILaunchBag).user self.subscribe(person, subscribed_by=current_user) @cachedproperty def subscriptions(self): """Sort the subscriptions""" from lp.registry.model.person import person_sort_key return sorted(self._subscriptions, key=lambda sub: person_sort_key(sub.person)) @property def workitems_text(self): """See ISpecification.""" workitems_lines = [] def get_header_text(milestone): if milestone is None: return "Work items:" else: return "Work items for %s:" % milestone.name if len(self.work_items) == 0: return '' milestone = self.work_items[0].milestone # Start by appending a header for the milestone of the first work # item. After this we're going to write a new header whenever we see a # work item with a different milestone. workitems_lines.append(get_header_text(milestone)) for work_item in self.work_items: if work_item.milestone != milestone: workitems_lines.append("") milestone = work_item.milestone workitems_lines.append(get_header_text(milestone)) assignee = work_item.assignee if assignee is not None: assignee_part = "[%s] " % assignee.name else: assignee_part = "" # work_items are ordered by sequence workitems_lines.append( "%s%s: %s" % (assignee_part, work_item.title, work_item.status.name)) return "\n".join(workitems_lines) @property def target(self): """See ISpecification.""" if self.product: return self.product return self.distribution def newWorkItem(self, title, sequence, status=SpecificationWorkItemStatus.TODO, assignee=None, milestone=None): """See ISpecification.""" if milestone is not None: assert milestone.target == self.target, ( "%s does not belong to this spec's target (%s)" % (milestone.displayname, self.target.name)) return SpecificationWorkItem(title=title, status=status, specification=self, assignee=assignee, milestone=milestone, sequence=sequence) @cachedproperty def work_items(self): """See ISpecification.""" return list(self._work_items) @property def _work_items(self): return Store.of(self).find(SpecificationWorkItem, specification=self, deleted=False).order_by("sequence") def setWorkItems(self, new_work_items): field = ISpecification['workitems_text'].bind(self) self.updateWorkItems(field.parseAndValidate(new_work_items)) def _deleteWorkItemsNotMatching(self, titles): """Delete all work items whose title does not match the given ones. Also set the sequence of those deleted work items to -1. """ title_counts = self._list_to_dict_of_frequency(titles) for work_item in self._work_items: if (work_item.title not in title_counts or title_counts[work_item.title] == 0): work_item.deleted = True elif title_counts[work_item.title] > 0: title_counts[work_item.title] -= 1 def _list_to_dict_of_frequency(self, list): dictionary = {} for item in list: if not item in dictionary: dictionary[item] = 1 else: dictionary[item] += 1 return dictionary def updateWorkItems(self, new_work_items): """See ISpecification.""" # First mark work items with titles that are no longer present as # deleted. self._deleteWorkItemsNotMatching( [wi['title'] for wi in new_work_items]) work_items = self._work_items # At this point the list of new_work_items is necessarily the same # size (or longer) than the list of existing ones, so we can just # iterate over it updating the existing items and creating any new # ones. to_insert = [] existing_titles = [wi.title for wi in work_items] existing_title_count = self._list_to_dict_of_frequency(existing_titles) for i, new_wi in enumerate(new_work_items): if (new_wi['title'] not in existing_titles or existing_title_count[new_wi['title']] == 0): to_insert.append((i, new_wi)) else: existing_title_count[new_wi['title']] -= 1 # Get an existing work item with the same title and update # it to match what we have now. existing_wi_index = existing_titles.index(new_wi['title']) existing_wi = work_items[existing_wi_index] # Mark a work item as dirty - don't use it again this update. existing_titles[existing_wi_index] = None # Update the sequence to match its current position on the # list entered by the user. existing_wi.sequence = i existing_wi.status = new_wi['status'] existing_wi.assignee = new_wi['assignee'] milestone = new_wi['milestone'] if milestone is not None: assert milestone.target == self.target, ( "%s does not belong to this spec's target (%s)" % (milestone.displayname, self.target.name)) existing_wi.milestone = milestone for sequence, item in to_insert: self.newWorkItem(item['title'], sequence, item['status'], item['assignee'], item['milestone']) del get_property_cache(self).work_items def setTarget(self, target): """See ISpecification.""" if IProduct.providedBy(target): self.product = target self.distribution = None elif IDistribution.providedBy(target): self.product = None self.distribution = target else: raise AssertionError("Unknown target: %s" % target) def retarget(self, target): """See ISpecification.""" if self.target == target: return self.validateMove(target) # We must lose any goal we have set and approved/declined because we # are moving to a different target that will have different # policies and drivers. self.productseries = None self.distroseries = None self.goalstatus = SpecificationGoalStatus.PROPOSED self.goal_proposer = None self.date_goal_proposed = None self.milestone = None self.setTarget(target) self.priority = SpecificationPriority.UNDEFINED self.direction_approved = False def validateMove(self, target): """See ISpecification.""" if target.getSpecification(self.name) is not None: raise TargetAlreadyHasSpecification(target, self.name) @property def goal(self): """See ISpecification.""" if self.productseries: return self.productseries return self.distroseries def proposeGoal(self, goal, proposer): """See ISpecification.""" if goal is None: # we are clearing goals self.productseries = None self.distroseries = None elif (IProductSeries.providedBy(goal) and goal.product == self.target): # set the product series as a goal self.productseries = goal self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and make sure there is no leftover distroseries goal self.distroseries = None elif (IDistroSeries.providedBy(goal) and goal.distribution == self.target): # set the distroseries goal self.distroseries = goal self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and make sure there is no leftover distroseries goal self.productseries = None else: raise GoalProposeError('Inappropriate goal.') # record who made the proposal, and when self.goal_proposer = proposer self.date_goal_proposed = UTC_NOW # and of course set the goal status to PROPOSED self.goalstatus = SpecificationGoalStatus.PROPOSED # the goal should now also not have a decider self.goal_decider = None self.date_goal_decided = None if goal is not None and goal.personHasDriverRights(proposer): self.acceptBy(proposer) def acceptBy(self, decider): """See ISpecification.""" self.goalstatus = SpecificationGoalStatus.ACCEPTED self.goal_decider = decider self.date_goal_decided = UTC_NOW def declineBy(self, decider): """See ISpecification.""" self.goalstatus = SpecificationGoalStatus.DECLINED self.goal_decider = decider self.date_goal_decided = UTC_NOW def getSprintSpecification(self, sprintname): """See ISpecification.""" for sprintspecification in self.sprint_links: if sprintspecification.sprint.name == sprintname: return sprintspecification return None def notificationRecipientAddresses(self): """See ISpecification.""" related_people = [ self.owner, self.assignee, self.approver, self.drafter ] related_people = [ person for person in related_people if person is not None ] subscribers = [ subscription.person for subscription in self.subscriptions ] notify_people = set(related_people + subscribers) without_access = set( getUtility(IService, 'sharing').getPeopleWithoutAccess(self, notify_people)) notify_people -= without_access addresses = set() for person in notify_people: addresses.update(get_contact_email_addresses(person)) return sorted(addresses) # emergent properties @property def is_incomplete(self): """See ISpecification.""" return not self.is_complete @property def is_complete(self): """See `ISpecification`.""" # Implemented blueprints are by definition complete. if (self.implementation_status == SpecificationImplementationStatus.IMPLEMENTED): return True # Obsolete and superseded blueprints are considered complete. if self.definition_status in ( SpecificationDefinitionStatus.OBSOLETE, SpecificationDefinitionStatus.SUPERSEDED): return True # Approved information blueprints are also considered complete. if ((self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) and (self.definition_status == SpecificationDefinitionStatus.APPROVED)): return True else: return False @property def is_started(self): """See ISpecification. This is a code implementation of the SQL in spec_started_clause """ return (self.implementation_status not in [ SpecificationImplementationStatus.UNKNOWN, SpecificationImplementationStatus.NOTSTARTED, SpecificationImplementationStatus.DEFERRED, SpecificationImplementationStatus.INFORMATIONAL, ] or ((self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) and (self.definition_status == SpecificationDefinitionStatus.APPROVED))) @property def lifecycle_status(self): """Combine the is_complete and is_started emergent properties.""" if self.is_complete: return SpecificationLifecycleStatus.COMPLETE elif self.is_started: return SpecificationLifecycleStatus.STARTED else: return SpecificationLifecycleStatus.NOTSTARTED def setDefinitionStatus(self, definition_status, user): self.definition_status = definition_status self.updateLifecycleStatus(user) def setImplementationStatus(self, implementation_status, user): self.implementation_status = implementation_status self.updateLifecycleStatus(user) def updateLifecycleStatus(self, user): """See ISpecification.""" newstatus = None if self.is_started: if self.starterID is None: newstatus = SpecificationLifecycleStatus.STARTED self.date_started = UTC_NOW self.starter = user else: if self.starterID is not None: newstatus = SpecificationLifecycleStatus.NOTSTARTED self.date_started = None self.starter = None if self.is_complete: if self.completerID is None: newstatus = SpecificationLifecycleStatus.COMPLETE self.date_completed = UTC_NOW self.completer = user else: if self.completerID is not None: self.date_completed = None self.completer = None if self.is_started: newstatus = SpecificationLifecycleStatus.STARTED else: newstatus = SpecificationLifecycleStatus.NOTSTARTED return newstatus @property def is_blocked(self): """See ISpecification.""" for spec in self.dependencies: if spec.is_incomplete: return True return False @property def has_accepted_goal(self): """See ISpecification.""" if (self.goal is not None and self.goalstatus == SpecificationGoalStatus.ACCEPTED): return True return False def getDelta(self, old_spec, user): """See ISpecification.""" delta = ObjectDelta(old_spec, self) delta.recordNewValues(("title", "summary", "specurl", "productseries", "distroseries", "milestone")) delta.recordNewAndOld( ("name", "priority", "definition_status", "target", "approver", "assignee", "drafter", "whiteboard", "workitems_text")) delta.recordListAddedAndRemoved("bugs", "bugs_linked", "bugs_unlinked") if delta.changes: changes = delta.changes changes["specification"] = self changes["user"] = user return SpecificationDelta(**changes) else: return None @property def informational(self): """For backwards compatibility: implemented as a value in implementation_status. """ return (self.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) # subscriptions def subscription(self, person): """See ISpecification.""" return SpecificationSubscription.selectOneBy(specification=self, person=person) def getSubscriptionByName(self, name): """See ISpecification.""" for sub in self.subscriptions: if sub.person.name == name: return sub return None def subscribe(self, person, subscribed_by=None, essential=False): """See ISpecification.""" if subscribed_by is None: subscribed_by = person # Create or modify a user's subscription to this blueprint. # First see if a relevant subscription exists, and if so, return it sub = self.subscription(person) if sub is not None: if sub.essential != essential: # If a subscription already exists, but the value for # 'essential' changes, there's no need to create a new # subscription, but we modify the existing subscription # and notify the user about the change. sub.essential = essential # The second argument should really be a copy of sub with # only the essential attribute changed, but we know # that we can get away with not examining the attribute # at all - it's a boolean! notify( ObjectModifiedEvent(sub, sub, ['essential'], user=subscribed_by)) return sub # since no previous subscription existed, create and return a new one sub = SpecificationSubscription(specification=self, person=person, essential=essential) property_cache = get_property_cache(self) if 'subscription' in property_cache: from lp.registry.model.person import person_sort_key property_cache.subscriptions.append(sub) property_cache.subscriptions.sort( key=lambda sub: person_sort_key(sub.person)) if self.information_type in PRIVATE_INFORMATION_TYPES: # Grant the subscriber access if they can't see the # specification. service = getUtility(IService, 'sharing') ignored, ignored, shared_specs = service.getVisibleArtifacts( person, specifications=[self], ignore_permissions=True) if not shared_specs: service.ensureAccessGrants([person], subscribed_by, specifications=[self]) notify(ObjectCreatedEvent(sub, user=subscribed_by)) return sub def unsubscribe(self, person, unsubscribed_by, ignore_permissions=False): """See ISpecification.""" # see if a relevant subscription exists, and if so, delete it if person is None: person = unsubscribed_by for sub in self.subscriptions: if sub.person.id == person.id: if (not sub.canBeUnsubscribedByUser(unsubscribed_by) and not ignore_permissions): raise UserCannotUnsubscribePerson( '%s does not have permission to unsubscribe %s.' % (unsubscribed_by.displayname, person.displayname)) get_property_cache(self).subscriptions.remove(sub) SpecificationSubscription.delete(sub.id) artifacts_to_delete = getUtility(IAccessArtifactSource).find( [self]) getUtility(IAccessArtifactGrantSource).revokeByArtifact( artifacts_to_delete, [person]) return def isSubscribed(self, person): """See lp.blueprints.interfaces.specification.ISpecification.""" if person is None: return False return bool(self.subscription(person)) # Template methods for BugLinkTargetMixin buglinkClass = SpecificationBug def createBugLink(self, bug): """See BugLinkTargetMixin.""" return SpecificationBug(specification=self, bug=bug) # sprint linking def linkSprint(self, sprint, user): """See ISpecification.""" from lp.blueprints.model.sprintspecification import ( SprintSpecification) for sprint_link in self.sprint_links: # sprints have unique names if sprint_link.sprint.name == sprint.name: return sprint_link sprint_link = SprintSpecification(specification=self, sprint=sprint, registrant=user) if sprint.isDriver(user): sprint_link.acceptBy(user) return sprint_link def unlinkSprint(self, sprint): """See ISpecification.""" from lp.blueprints.model.sprintspecification import ( SprintSpecification) for sprint_link in self.sprint_links: # sprints have unique names if sprint_link.sprint.name == sprint.name: SprintSpecification.delete(sprint_link.id) return sprint_link # dependencies def createDependency(self, specification): """See ISpecification.""" for deplink in self.spec_dependency_links: if deplink.dependency.id == specification.id: return deplink return SpecificationDependency(specification=self, dependency=specification) def removeDependency(self, specification): """See ISpecification.""" # see if a relevant dependency link exists, and if so, delete it for deplink in self.spec_dependency_links: if deplink.dependency.id == specification.id: SpecificationDependency.delete(deplink.id) return deplink def all_deps(self, user=None): return list( Store.of(self).with_( SQL(recursive_dependent_query(user), params=(self.id, ))).find( Specification, Specification.id != self.id, Specification.id.is_in(SQL('select id from dependencies')), ).order_by(Specification.name, Specification.id)) def all_blocked(self, user=None): """See `ISpecification`.""" return list( Store.of(self).with_( SQL(recursive_blocked_query(user), params=(self.id, ))).find( Specification, Specification.id != self.id, Specification.id.is_in(SQL('select id from blocked')), ).order_by(Specification.name, Specification.id)) # branches def getBranchLink(self, branch): return SpecificationBranch.selectOneBy(specificationID=self.id, branchID=branch.id) def linkBranch(self, branch, registrant): branch_link = self.getBranchLink(branch) if branch_link is not None: return branch_link branch_link = SpecificationBranch(specification=self, branch=branch, registrant=registrant) Store.of(self).flush() del get_property_cache(self).linked_branches notify(ObjectCreatedEvent(branch_link)) return branch_link def unlinkBranch(self, branch, user): spec_branch = self.getBranchLink(branch) spec_branch.destroySelf() Store.of(self).flush() del get_property_cache(self).linked_branches def getLinkedBugTasks(self, user): """See `ISpecification`.""" params = BugTaskSearchParams(user=user, linked_blueprints=self.id) tasks = getUtility(IBugTaskSet).search(params) if self.distroseries is not None: context = self.distroseries elif self.distribution is not None: context = self.distribution elif self.productseries is not None: context = self.productseries else: context = self.product return filter_bugtasks_by_context(context, tasks) def __repr__(self): return '<Specification %s %r for %r>' % (self.id, self.name, self.target.name) def getAllowedInformationTypes(self, who): """See `ISpecification`.""" return self.target.getAllowedSpecificationInformationTypes() def transitionToInformationType(self, information_type, who): """See ISpecification.""" # avoid circular imports. from lp.registry.model.accesspolicy import ( reconcile_access_for_artifact, ) if self.information_type == information_type: return False if information_type not in self.getAllowedInformationTypes(who): raise CannotChangeInformationType("Forbidden by project policy.") self.information_type = information_type reconcile_access_for_artifact(self, information_type, [self.target]) if information_type in PRIVATE_INFORMATION_TYPES and self.subscribers: # Grant the subscribers access if they do not have a # policy grant. service = getUtility(IService, 'sharing') blind_subscribers = service.getPeopleWithoutAccess( self, self.subscribers) if len(blind_subscribers): service.ensureAccessGrants(blind_subscribers, who, specifications=[self], ignore_permissions=True) return True @cachedproperty def _known_viewers(self): """A set of known persons able to view the specifcation.""" return set() def userCanView(self, user): """See `ISpecification`.""" # Avoid circular imports. from lp.blueprints.model.specificationsearch import ( get_specification_privacy_filter) if self.information_type in PUBLIC_INFORMATION_TYPES: return True if user is None: return False if user.id in self._known_viewers: return True if not Store.of(self).find( Specification, Specification.id == self.id, *get_specification_privacy_filter(user)).is_empty(): self._known_viewers.add(user.id) return True return False
class CodeImportMachine(SQLBase): """See `ICodeImportMachine`.""" _defaultOrder = ['hostname'] date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) hostname = StringCol(default=None) state = EnumCol(enum=CodeImportMachineState, notNull=True, default=CodeImportMachineState.OFFLINE) heartbeat = UtcDateTimeCol(notNull=False) current_jobs = SQLMultipleJoin('CodeImportJob', joinColumn='machine', orderBy=['date_started', 'id']) events = SQLMultipleJoin('CodeImportEvent', joinColumn='machine', orderBy=['-date_created', '-id']) def shouldLookForJob(self, worker_limit): """See `ICodeImportMachine`.""" job_count = self.current_jobs.count() if self.state == CodeImportMachineState.OFFLINE: return False self.heartbeat = UTC_NOW if self.state == CodeImportMachineState.QUIESCING: if job_count == 0: self.setOffline(CodeImportMachineOfflineReason.QUIESCED) return False elif self.state == CodeImportMachineState.ONLINE: return job_count < worker_limit else: raise AssertionError("Unknown machine state %r??" % self.state) def setOnline(self, user=None, message=None): """See `ICodeImportMachine`.""" if self.state not in (CodeImportMachineState.OFFLINE, CodeImportMachineState.QUIESCING): raise AssertionError("State of machine %s was %s." % (self.hostname, self.state.name)) self.state = CodeImportMachineState.ONLINE getUtility(ICodeImportEventSet).newOnline(self, user, message) def setOffline(self, reason, user=None, message=None): """See `ICodeImportMachine`.""" if self.state not in (CodeImportMachineState.ONLINE, CodeImportMachineState.QUIESCING): raise AssertionError("State of machine %s was %s." % (self.hostname, self.state.name)) self.state = CodeImportMachineState.OFFLINE getUtility(ICodeImportEventSet).newOffline(self, reason, user, message) def setQuiescing(self, user, message=None): """See `ICodeImportMachine`.""" if self.state != CodeImportMachineState.ONLINE: raise AssertionError("State of machine %s was %s." % (self.hostname, self.state.name)) self.state = CodeImportMachineState.QUIESCING getUtility(ICodeImportEventSet).newQuiesce(self, user, message)
class User(ICTVObject): username = StringCol(unique=True, default=None, length=255) fullname = StringCol(default=None) email = StringCol(notNone=True, alternateID=True, length=255) super_admin = BoolCol(notNone=True, default=False) admin = BoolCol(notNone=True, default=False) disabled = BoolCol(notNone=True, default=True) capsules = SQLMultipleJoin('Capsule', joinColumn='owner_id') screens = SQLRelatedJoin('Screen') authorized_channels = SQLRelatedJoin('Channel') roles = SQLMultipleJoin('Role', joinColumn='user_id') password = StringCol(default=None) # Used for local login reset_secret = StringCol( notNone=True) # Used for local login to reset password has_toured = BoolCol(default=False) # Has the user completed the app tour def __init__(self, *args, **kwargs): kwargs['reset_secret'] = utils.generate_secret() super().__init__(**kwargs) # WARN: user.log_name cannot longer be used in templates since the __getattr__ will not be triggered in jinja def _get_log_name(self): """ Returns a log friendly and unique name for this user. """ return "%s (%d)" % (self.fullname if self.fullname else self.email, self.id) # WARN: user.readable_name cannot longer be used in templates since the __getattr__ will not be triggered in jinja def _get_readable_name(self): """ Returns a user friendly and unique name for this user. """ return self.fullname if self.fullname is not None else self.username if self.username is not None else self.email # WARN: user.highest_permission_level cannot longer be used in templates since the __getattr__ will not be triggered in jinja def _get_highest_permission_level(self): """ Return the highest permission level of this user. """ if self.super_admin: return UserPermissions.super_administrator if self.admin: return UserPermissions.administrator highest = UserPermissions.no_permission for role in self.roles: if role.permission_level not in highest: highest = role.permission_level if highest == UserPermissions.channel_administrator: break if self.owns_screen(): highest = highest | UserPermissions.screen_administrator return highest def get_channels_with_permission_level(self, permission_level): """ Returns the channels which precisely grants this permission level to the user. """ return Role.selectBy( user=self, permission_level=UserPermissions.get_permission_string( permission_level)).throughTo.channel def owns_screen(self): """ Return wheter or not this user is owner of screens. """ return self.screens.count() > 0 def get_subscriptions_of_owned_screens(self): """ Return the subscriptions of the screens possessed by this user. """ if UserPermissions.administrator in self.highest_permission_level: return Subscription.select() return self.screens.throughTo.subscriptions class sqlmeta: table = "user_table" # prevent table name to collide with reserved keywords of some databases
class SourcePackageRelease(SQLBase): _table = 'SourcePackageRelease' section = ForeignKey(foreignKey='Section', dbName='section') creator = ForeignKey(dbName='creator', foreignKey='Person', storm_validator=validate_public_person, notNull=True) component = ForeignKey(foreignKey='Component', dbName='component') sourcepackagename = ForeignKey(foreignKey='SourcePackageName', dbName='sourcepackagename', notNull=True) maintainer = ForeignKey(dbName='maintainer', foreignKey='Person', storm_validator=validate_public_person, notNull=True) signing_key_owner_id = Int(name="signing_key_owner") signing_key_owner = Reference(signing_key_owner_id, 'Person.id') signing_key_fingerprint = Unicode() urgency = EnumCol(dbName='urgency', schema=SourcePackageUrgency, default=SourcePackageUrgency.LOW, notNull=True) dateuploaded = UtcDateTimeCol(dbName='dateuploaded', notNull=True, default=UTC_NOW) dsc = StringCol(dbName='dsc') version = StringCol(dbName='version', notNull=True) changelog = ForeignKey(foreignKey='LibraryFileAlias', dbName='changelog') changelog_entry = StringCol(dbName='changelog_entry') buildinfo = ForeignKey(foreignKey='LibraryFileAlias', dbName='buildinfo') builddepends = StringCol(dbName='builddepends') builddependsindep = StringCol(dbName='builddependsindep') build_conflicts = StringCol(dbName='build_conflicts') build_conflicts_indep = StringCol(dbName='build_conflicts_indep') architecturehintlist = StringCol(dbName='architecturehintlist') homepage = StringCol(dbName='homepage') format = EnumCol(dbName='format', schema=SourcePackageType, default=SourcePackageType.DPKG, notNull=True) upload_distroseries = ForeignKey(foreignKey='DistroSeries', dbName='upload_distroseries') upload_archive = ForeignKey(foreignKey='Archive', dbName='upload_archive', notNull=True) source_package_recipe_build_id = Int(name='sourcepackage_recipe_build') source_package_recipe_build = Reference(source_package_recipe_build_id, 'SourcePackageRecipeBuild.id') # XXX cprov 2006-09-26: Those fields are set as notNull and required in # ISourcePackageRelease, however they can't be not NULL in DB since old # records doesn't satisfy this condition. We will sort it before using # 'NoMoreAptFtparchive' implementation for PRIMARY archive. For PPA # (primary target) we don't need to populate old records. dsc_maintainer_rfc822 = StringCol(dbName='dsc_maintainer_rfc822') dsc_standards_version = StringCol(dbName='dsc_standards_version') dsc_format = StringCol(dbName='dsc_format') dsc_binaries = StringCol(dbName='dsc_binaries') # MultipleJoins publishings = SQLMultipleJoin('SourcePackagePublishingHistory', joinColumn='sourcepackagerelease', orderBy="-datecreated") _user_defined_fields = StringCol(dbName='user_defined_fields') def __init__(self, *args, **kwargs): if 'user_defined_fields' in kwargs: kwargs['_user_defined_fields'] = json.dumps( kwargs['user_defined_fields']) del kwargs['user_defined_fields'] # copyright isn't on the Storm class, since we don't want it # loaded every time. Set it separately. if 'copyright' in kwargs: copyright = kwargs.pop('copyright') super(SourcePackageRelease, self).__init__(*args, **kwargs) self.copyright = copyright def __repr__(self): """Returns an informative representation of a SourcePackageRelease.""" return '<{cls} {pkg_name} (id: {id}, version: {version})>'.format( cls=self.__class__.__name__, pkg_name=self.name, id=self.id, version=self.version) @property def copyright(self): """See `ISourcePackageRelease`.""" store = Store.of(self) store.flush() return store.execute( "SELECT copyright FROM sourcepackagerelease WHERE id=%s", (self.id, )).get_one()[0] @copyright.setter def copyright(self, content): """See `ISourcePackageRelease`.""" store = Store.of(self) store.flush() if content is not None: content = unicode(content) store.execute( "UPDATE sourcepackagerelease SET copyright=%s WHERE id=%s", (content, self.id)) @property def user_defined_fields(self): """See `IBinaryPackageRelease`.""" if self._user_defined_fields is None: return [] user_defined_fields = json.loads(self._user_defined_fields) if user_defined_fields is None: return [] return user_defined_fields def getUserDefinedField(self, name): for k, v in self.user_defined_fields: if k.lower() == name.lower(): return v @cachedproperty def package_diffs(self): return list( Store.of(self).find(PackageDiff, to_source=self).order_by( Desc(PackageDiff.date_requested))) @property def builds(self): """See `ISourcePackageRelease`.""" # Excluding PPA builds may seem like a strange thing to do, but, # since Archive.copyPackage can copy packages across archives, a # build may well have a different archive to the corresponding # sourcepackagerelease. return BinaryPackageBuild.select(""" source_package_release = %s AND archive.id = binarypackagebuild.archive AND archive.purpose IN %s """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), orderBy=['-date_created', 'id'], clauseTables=['Archive']) @property def age(self): """See ISourcePackageRelease.""" now = datetime.datetime.now(pytz.timezone('UTC')) return now - self.dateuploaded def failed_builds(self): return [ build for build in self._cached_builds if build.buildstate == BuildStatus.FAILEDTOBUILD ] @property def needs_building(self): for build in self._cached_builds: if build.status in [ BuildStatus.NEEDSBUILD, BuildStatus.MANUALDEPWAIT, BuildStatus.CHROOTWAIT ]: return True return False @cachedproperty def _cached_builds(self): # The reason we have this as a cachedproperty is that all the # *build* methods here need access to it; better not to # recalculate it multiple times. return list(self.builds) @property def name(self): return self.sourcepackagename.name @property def title(self): return '%s - %s' % (self.sourcepackagename.name, self.version) @cachedproperty def published_archives(self): archives = set(pub.archive for pub in self.publishings.prejoin(['archive'])) return sorted(archives, key=operator.attrgetter('id')) def addFile(self, file, filetype=None): """See ISourcePackageRelease.""" if filetype is None: filetype = determine_source_file_type(file.filename) sprf = SourcePackageReleaseFile(sourcepackagerelease=self, filetype=filetype, libraryfile=file) del get_property_cache(self).files return sprf @cachedproperty def files(self): """See `ISourcePackageRelease`.""" return list( Store.of(self).find(SourcePackageReleaseFile, sourcepackagerelease=self).order_by( SourcePackageReleaseFile.libraryfileID)) def getFileByName(self, filename): """See `ISourcePackageRelease`.""" sprf = Store.of(self).find( SourcePackageReleaseFile, SourcePackageReleaseFile.sourcepackagerelease == self.id, LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID, LibraryFileAlias.filename == filename).one() if sprf: return sprf.libraryfile else: raise NotFoundError(filename) def getPackageSize(self): """See ISourcePackageRelease.""" size_query = """ SELECT SUM(LibraryFileContent.filesize)/1024.0 FROM SourcePackagereLease JOIN SourcePackageReleaseFile ON SourcePackageReleaseFile.sourcepackagerelease = SourcePackageRelease.id JOIN LibraryFileAlias ON SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id JOIN LibraryFileContent ON LibraryFileAlias.content = LibraryFileContent.id WHERE SourcePackageRelease.id = %s """ % sqlvalues(self) cur = cursor() cur.execute(size_query) results = cur.fetchone() if len(results) == 1 and results[0] is not None: return float(results[0]) else: return 0.0 def override(self, component=None, section=None, urgency=None): """See ISourcePackageRelease.""" if component is not None: self.component = component # See if the new component requires a new archive: distribution = self.upload_distroseries.distribution new_archive = distribution.getArchiveByComponent(component.name) if new_archive is not None: self.upload_archive = new_archive else: raise QueueInconsistentStateError( "New component '%s' requires a non-existent archive.") if section is not None: self.section = section if urgency is not None: self.urgency = urgency @property def upload_changesfile(self): """See `ISourcePackageRelease`.""" package_upload = self.package_upload # Cope with `SourcePackageRelease`s imported by gina, they do not # have a corresponding `PackageUpload` record. if package_upload is None: return None return package_upload.changesfile @property def package_upload(self): """See `ISourcepackageRelease`.""" store = Store.of(self) # The join on 'changesfile' is used for pre-fetching the # corresponding library file, so callsites don't have to issue an # extra query. origin = [ PackageUploadSource, Join(PackageUpload, PackageUploadSource.packageuploadID == PackageUpload.id), Join(LibraryFileAlias, LibraryFileAlias.id == PackageUpload.changes_file_id), Join(LibraryFileContent, LibraryFileContent.id == LibraryFileAlias.contentID), ] results = store.using(*origin).find( (PackageUpload, LibraryFileAlias, LibraryFileContent), PackageUploadSource.sourcepackagerelease == self, PackageUpload.archive == self.upload_archive, PackageUpload.distroseries == self.upload_distroseries) # Return the unique `PackageUpload` record that corresponds to the # upload of this `SourcePackageRelease`, load the `LibraryFileAlias` # and the `LibraryFileContent` in cache because it's most likely # they will be needed. return DecoratedResultSet(results, operator.itemgetter(0)).one() @property def uploader(self): """See `ISourcePackageRelease`""" if self.source_package_recipe_build is not None: return self.source_package_recipe_build.requester if self.signing_key_owner is not None: return self.signing_key_owner return None @property def change_summary(self): """See ISourcePackageRelease""" # this regex is copied from apt-listchanges.py courtesy of MDZ new_stanza_line = re.compile( '^\S+ \((?P<version>.*)\) .*;.*urgency=(?P<urgency>\w+).*') logfile = StringIO(self.changelog_entry) change = '' top_stanza = False for line in logfile.readlines(): match = new_stanza_line.match(line) if match: if top_stanza: break top_stanza = True change += line return change def getDiffTo(self, to_sourcepackagerelease): """See ISourcePackageRelease.""" return PackageDiff.selectOneBy(from_source=self, to_source=to_sourcepackagerelease) def requestDiffTo(self, requester, to_sourcepackagerelease): """See ISourcePackageRelease.""" candidate = self.getDiffTo(to_sourcepackagerelease) if candidate is not None: raise PackageDiffAlreadyRequested("%s has already been requested" % candidate.title) Store.of(to_sourcepackagerelease).flush() del get_property_cache(to_sourcepackagerelease).package_diffs packagediff = PackageDiff(from_source=self, to_source=to_sourcepackagerelease, requester=requester) getUtility(IPackageDiffJobSource).create(packagediff) return packagediff def aggregate_changelog(self, since_version): """See `ISourcePackagePublishingHistory`.""" if self.changelog is None: return None apt_pkg.init_system() chunks = [] changelog = self.changelog # The python-debian API for parsing changelogs is pretty awful. The # only useful way of extracting info is to use the iterator on # Changelog and then compare versions. try: changelog_text = changelog.read().decode("UTF-8", "replace") for block in Changelog(changelog_text): version = block._raw_version if (since_version and apt_pkg.version_compare(version, since_version) <= 0): break # Poking in private attributes is not nice but again the # API is terrible. We want to ensure that the name/date # line is omitted from these composite changelogs. block._no_trailer = True try: # python-debian adds an extra blank line to the chunks # so we'll have to sort this out. chunks.append(str(block).rstrip()) except ChangelogCreateError: continue if not since_version: # If a particular version was not requested we just # return the most recent changelog entry. break except ChangelogParseError: return None output = "\n\n".join(chunks) return output.decode("utf-8", "replace")
class Race(SQLObject): name = StringCol() fightersAsList = MultipleJoin('RFighter', joinColumn="rf_id") fightersAsSResult = SQLMultipleJoin('RFighter', joinColumn="rf_id")
class PhoneNumber(SQLObject): number = StringCol() calls = SQLMultipleJoin('PhoneCall') incoming = SQLMultipleJoin('PhoneCall', joinColumn='toID')
class Screen(ICTVObject): name = StringCol(notNone=True) building = ForeignKey('Building', notNone=True, cascade=False) location = StringCol(default=None) # A free text field to precise the screen location screen_id = DatabaseIndex('name', 'building', unique=True) owners = SQLRelatedJoin('User') subscriptions = SQLMultipleJoin('Subscription') secret = StringCol(notNone=True, default=utils.generate_secret) macs = SQLMultipleJoin('ScreenMac') last_ip = StringCol(default=None) last_access = DateTimeCol(default=None) shuffle = BoolCol(default=False) comment = StringCol(default=None) show_postit = BoolCol(default=False) show_slide_number = BoolCol(default=False) orientation = EnumCol(enumValues=['Landscape','Portrait'],default='Landscape') @property def subscribed_channels(self): return self.subscriptions.throughTo.channel def subscribe_to(self, user, channel, weight=1): """ Subscribes this screen to the channel. If this screen is already subscribed to the channel by the user, it changes the weight if needed. :param user: The user requesting the subscription. :param channel: The channel to subscribe this screen to. :param weight: The optional positive non null weight to give to the channel by this screen. :return: None """ if weight > 0: sub = Subscription.selectBy(screen=self, channel=channel).getOne(None) if sub is not None: if sub.weight != weight: sub.weight = weight if sub.created_by != user: sub.created_by = user else: Subscription(screen=self, channel=channel, weight=weight, created_by=user) def unsubscribe_from(self, user, channel): """ Deletes the user's subscription from this screen to the channel if one exists, otherwise do nothing. :param user: The user requesting the subscription deletion. :param channel: The channel to unsubscribe this screen from. :return: None """ sub = Subscription.selectBy(screen=self, channel=channel).getOne(None) # TODO: Commit this to user history if sub is not None: sub.destroySelf() def is_subscribed_to(self, channel): return channel in self.subscriptions.throughTo.channel @classmethod def get_visible_screens_of(cls, user): """ Returns the screens that are managed by the user (or all screens for the superadmin) :param user: The user to retrieve the managed screens. :return: A iterable with the managed screens (iterable of sqlobjects) """ if UserPermissions.administrator in user.highest_permission_level: return Screen.select() return user.screens def safe_add_user(self, user): """ Avoid user duplication in screen owners. """ if user not in self.owners: self.addUser(user) def get_view_link(self): """ Returns a relative URL to the screen rendered view. """ return '/screens/%d/view/%s' % (self.id, self.secret) def get_client_link(self): """ Returns a relative URL to web-based client for this screen. """ return '/screens/%d/client/%s' % (self.id, self.secret) def get_macs_string(self): return ';'.join(mac.get_pretty_mac() for mac in self.macs) def get_channels_content(self, app): """ Returns all the capsules provided by the channels of this screen as an Iterable[PluginCapsule] ignoring channel duplicates """ screen_capsules_iterables = [] already_added_channels = set() plugin_manager = app.plugin_manager def add_content(channel): for c in channel.flatten(): # do not add duplicates if c.id not in already_added_channels: screen_capsules_iterables.append(plugin_manager.get_plugin_content(c)) already_added_channels.add(c.id) for sub in self.subscriptions: add_content(sub.channel) screen_capsules = list(itertools.chain.from_iterable(screen_capsules_iterables)) if self.shuffle: random.shuffle(screen_capsules) return screen_capsules