class PackageDiffJobDerived(BaseRunnableJob): __metaclass__ = EnumeratedSubclass delegates(IPackageDiffJob) classProvides(IPackageDiffJobSource) config = config.IPackageDiffJobSource def __init__(self, job): assert job.base_job_type == JobType.GENERATE_PACKAGE_DIFF self.job = job self.context = self @classmethod def create(cls, packagediff): job = Job( base_job_type=JobType.GENERATE_PACKAGE_DIFF, requester=packagediff.requester, base_json_data=simplejson.dumps({'packagediff': packagediff.id})) derived = cls(job) derived.celeryRunOnCommit() return derived @classmethod def iterReady(cls): jobs = IStore(Job).find( Job, Job.id.is_in(Job.ready_jobs), Job.base_job_type == JobType.GENERATE_PACKAGE_DIFF) return (cls(job) for job in jobs)
class ArchiveSourcePublication: """Delegates to `ISourcePackagePublishingHistory`. It receives the expensive external references when it is created and provide them as through the decorated interface transparently. """ delegates(ISourcePackagePublishingHistory) def __init__(self, context, changesfile, status_summary): self.context = context self._changesfile = changesfile self._status_summary = status_summary @property def sourcepackagerelease(self): if self._changesfile is not None: changesfile = ProxiedLibraryFileAlias(self._changesfile, self.context.archive) else: changesfile = None return ArchiveSourcePackageRelease(self.context.sourcepackagerelease, changesfile) def getStatusSummaryForBuilds(self): """See `ISourcePackagePublishingHistory`.""" return self._status_summary
class DecoratedDistributionSourcePackageRelease: """A decorated DistributionSourcePackageRelease. The publishing history and package diffs for the release are pre-cached. """ delegates(IDistributionSourcePackageRelease, 'context') def __init__(self, distributionsourcepackagerelease, publishing_history, package_diffs, person_data, user): self.context = distributionsourcepackagerelease self._publishing_history = publishing_history self._package_diffs = package_diffs self._person_data = person_data self._user = user @property def publishing_history(self): """ See `IDistributionSourcePackageRelease`.""" return self._publishing_history @property def package_diffs(self): """ See `ISourcePackageRelease`.""" return self._package_diffs @property def change_summary(self): """ See `ISourcePackageRelease`.""" return linkify_changelog(self._user, self.context.change_summary, self._person_data)
class DiffAttachment: """An attachment that we are going to display.""" implements(ILibraryFileAlias) delegates(ILibraryFileAlias, 'alias') def __init__(self, alias): self.alias = alias @cachedproperty def text(self): """Read the text out of the librarin.""" self.alias.open() try: return self.alias.read(config.diff.max_read_size) finally: self.alias.close() @cachedproperty def diff_text(self): """Get the text and attempt to decode it.""" try: diff = self.text.decode('utf-8') except UnicodeDecodeError: diff = self.text.decode('windows-1252', 'replace') # Strip off the trailing carriage returns. return diff.rstrip('\n')
class TestJob(BaseRunnableJob): """A dummy job.""" implements(IRunnableJob) delegates(IJob, 'job') config = config.launchpad def __init__(self, job_id=None): if job_id is not None: store = IStore(Job) self.job = store.find(Job, id=job_id)[0] else: self.job = Job(max_retries=2) def run(self): pass @classmethod def makeInstance(cls, job_id): return cls(job_id) @classmethod def getDBClass(cls): return cls
class IncrementalDiff(Storm): """See `IIncrementalDiff.""" implements(IIncrementalDiff) delegates(IDiff, context='diff') __storm_table__ = 'IncrementalDiff' id = Int(primary=True, allow_none=False) diff_id = Int(name='diff', allow_none=False) diff = Reference(diff_id, 'Diff.id') branch_merge_proposal_id = Int(name='branch_merge_proposal', allow_none=False) branch_merge_proposal = Reference(branch_merge_proposal_id, "BranchMergeProposal.id") old_revision_id = Int(name='old_revision', allow_none=False) old_revision = Reference(old_revision_id, 'Revision.id') new_revision_id = Int(name='new_revision', allow_none=False) new_revision = Reference(new_revision_id, 'Revision.id')
class CodeReviewDisplayComment(MessageComment): """A code review comment or activity or both. The CodeReviewComment itself does not implement the IComment interface as this is purely a display interface, and doesn't make sense to have display only code in the model itself. """ implements(ICodeReviewDisplayComment) delegates(ICodeReviewComment, 'comment') def __init__(self, comment, from_superseded=False, limit_length=True): if limit_length: comment_limit = config.malone.max_comment_size else: comment_limit = None super(CodeReviewDisplayComment, self).__init__(comment_limit) self.comment = comment get_property_cache(self).has_body = bool(self.comment.message_body) self.has_footer = self.comment.vote is not None # The date attribute is used to sort the comments in the conversation. self.date = self.comment.message.datecreated self.from_superseded = from_superseded @property def index(self): return self.comment.id @property def extra_css_class(self): if self.from_superseded: return 'from-superseded' else: return '' @cachedproperty def body_text(self): """Get the body text for the message.""" return self.comment.message_body @cachedproperty def all_attachments(self): return self.comment.getAttachments() @cachedproperty def display_attachments(self): # Attachments to show. return [DiffAttachment(alias) for alias in self.all_attachments[0]] @cachedproperty def other_attachments(self): # Attachments to not show. return self.all_attachments[1] @property def download_url(self): return canonical_url(self.comment, view_name='+download')
class SubscriptionAttrDecorator: """A SpecificationSubscription with added attributes for HTML/JS.""" delegates(ISpecificationSubscription, 'subscription') def __init__(self, subscription): self.subscription = subscription @property def css_name(self): return 'subscriber-%s' % self.subscription.person.id
class CompleteBuild: """Super object to store related IBinaryPackageBuild & IBuildQueue.""" delegates(IBinaryPackageBuild) def __init__(self, build, buildqueue_record): self.context = build self._buildqueue_record = buildqueue_record def buildqueue_record(self): return self._buildqueue_record
class ApportJobDerived(BaseRunnableJob): """Intermediate class for deriving from ApportJob.""" __metaclass__ = EnumeratedSubclass delegates(IApportJob) classProvides(IApportJobSource) def __init__(self, job): self.context = job @classmethod def create(cls, blob): """See `IApportJob`.""" # If there's already a job for the blob, don't create a new one. job = ApportJob(blob, cls.class_job_type, {}) derived = cls(job) derived.celeryRunOnCommit() return derived @classmethod def get(cls, job_id): """Get a job by id. :return: the ApportJob with the specified id, as the current ApportJobDerived subclass. :raises: SQLObjectNotFound if there is no job with the specified id, or its job_type does not match the desired subclass. """ job = ApportJob.get(job_id) if job.job_type != cls.class_job_type: raise SQLObjectNotFound( 'No object found with id %d and type %s' % (job_id, cls.class_job_type.title)) return cls(job) @classmethod def iterReady(cls): """Iterate through all ready ApportJobs.""" jobs = IStore(ApportJob).find( ApportJob, And(ApportJob.job_type == cls.class_job_type, ApportJob.job == Job.id, Job.id.is_in(Job.ready_jobs))) return (cls(job) for job in jobs) def getOopsVars(self): """See `IRunnableJob`.""" vars = BaseRunnableJob.getOopsVars(self) vars.extend([ ('apport_blob_uuid', self.context.blob.uuid), ('apport_blob_librarian_url', self.context.blob.file_alias.getURL()), ('apport_job_id', self.context.id), ('apport_job_type', self.context.job_type.title), ]) return vars
class QuestionMessage(SQLBase): """A table linking questions and messages.""" implements(IQuestionMessage) delegates(IMessage, context='message') _table = 'QuestionMessage' question = ForeignKey(dbName='question', foreignKey='Question', notNull=True) message = ForeignKey(dbName='message', foreignKey='Message', notNull=True) action = EnumCol(schema=QuestionAction, notNull=True, default=QuestionAction.COMMENT) new_status = EnumCol(schema=QuestionStatus, notNull=True, default=QuestionStatus.OPEN) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person, notNull=True) def __init__(self, **kwargs): if 'owner' not in kwargs: # Although a trigger will set the owner after the SQL # INSERT has been executed, we must specify the parameter # explicitly to fulfill the DB constraint OWNER NOT NULL, # otherweise we'll get an error from the DB server. kwargs['owner'] = kwargs['message'].owner super(QuestionMessage, self).__init__(**kwargs) def __iter__(self): """See IMessage.""" # Delegates do not proxy __ methods, because of the name mangling. return iter(self.chunks) @cachedproperty def index(self): return list(self.question.messages).index(self) @cachedproperty def display_index(self): # Return the index + 1 so that messages appear 1-indexed in the UI. return self.index + 1 @property def visible(self): """See `IQuestionMessage.`""" return self.message.visible
class TranslationSharingJob(StormBase): """Base class for jobs related to a packaging.""" __storm_table__ = 'PackagingJob' id = Int(primary=True) job_id = Int('job') job = Reference(job_id, Job.id) delegates(IJob, 'job') job_type = EnumCol(enum=TranslationSharingJobType, notNull=True) productseries_id = Int('productseries') productseries = Reference(productseries_id, ProductSeries.id) distroseries_id = Int('distroseries') distroseries = Reference(distroseries_id, DistroSeries.id) sourcepackagename_id = Int('sourcepackagename') sourcepackagename = Reference(sourcepackagename_id, SourcePackageName.id) potemplate_id = Int('potemplate') potemplate = Reference(potemplate_id, POTemplate.id) def __init__(self, job, job_type, productseries, distroseries, sourcepackagename, potemplate=None): """"Constructor. :param job: The `Job` to use for storing basic job state. :param productseries: The ProductSeries side of the Packaging. :param distroseries: The distroseries of the Packaging sourcepackage. :param sourcepackagename: The name of the Packaging sourcepackage. """ self.job = job self.job_type = job_type self.distroseries = distroseries self.sourcepackagename = sourcepackagename self.productseries = productseries self.potemplate = potemplate def makeDerived(self): return TranslationSharingJobDerived.makeSubclass(self)
class PersonTransferJobDerived(BaseRunnableJob): """Intermediate class for deriving from PersonTransferJob. Storm classes can't simply be subclassed or you can end up with multiple objects referencing the same row in the db. This class uses lazr.delegates, which is a little bit simpler than storm's infoheritance solution to the problem. Subclasses need to override the run() method. """ __metaclass__ = EnumeratedSubclass delegates(IPersonTransferJob) classProvides(IPersonTransferJobSource) def __init__(self, job): self.context = job @classmethod def create(cls, minor_person, major_person, metadata, requester=None): """See `IPersonTransferJob`.""" if not IPerson.providedBy(minor_person): raise TypeError("minor_person must be IPerson: %s" % repr(minor_person)) if not IPerson.providedBy(major_person): raise TypeError("major_person must be IPerson: %s" % repr(major_person)) job = PersonTransferJob( minor_person=minor_person, major_person=major_person, job_type=cls.class_job_type, metadata=metadata, requester=requester) derived = cls(job) derived.celeryRunOnCommit() return derived @classmethod def iterReady(cls): """Iterate through all ready PersonTransferJobs.""" store = IMasterStore(PersonTransferJob) jobs = store.find( PersonTransferJob, And(PersonTransferJob.job_type == cls.class_job_type, PersonTransferJob.job_id.is_in(Job.ready_jobs))) return (cls(job) for job in jobs) def getOopsVars(self): """See `IRunnableJob`.""" vars = BaseRunnableJob.getOopsVars(self) vars.extend([ ('major_person_name', self.context.major_person.name), ('minor_person_name', self.context.minor_person.name), ]) return vars
class DecoratedEvent: """A CodeImportEvent with cached items.""" delegates(ICodeImportEvent, 'event') def __init__(self, event): self.event = event @cachedproperty def items(self): """Avoid hitting the database multiple times by caching the result.""" return self.event.items()
class MenuLink: """Adapter from ILinkData to ILink.""" implements(ILink) delegates(ILinkData, context='_linkdata') # These attributes are set by the menus infrastructure. name = None url = None linked = True # This attribute is used to override self.enabled when it is # set, without writing to the object being adapted. _enabled_override = None def __init__(self, linkdata): # Take a copy of the linkdata attributes. self._linkdata = linkdata def set_enabled(self, value): self._enabled_override = value def get_enabled(self): if self._enabled_override is None: return self._linkdata.enabled return self._enabled_override enabled = property(get_enabled, set_enabled) @property def escapedtext(self): # This is often an IStructuredString, which html_escape knows # to not double-escape. return html_escape(self._linkdata.text) @property def icon_url(self): """The full URL of this link's associated icon, if it has one.""" if not self.icon: return else: return '/@@/%s' % self.icon def render(self): """See `ILink`.""" return getMultiAdapter((self, get_current_browser_request()), name="+inline")() @property def path(self): """See `ILink`.""" return self.url.path
class LibraryFileAliasWithParent: """A LibraryFileAlias variant that has a parent.""" adapts(ILibraryFileAlias, Interface) implements(ILibraryFileAliasWithParent) delegates(ILibraryFileAlias) def __init__(self, libraryfile, parent): self.context = libraryfile self.__parent__ = parent def createToken(self): """See `ILibraryFileAliasWithParent`.""" return TimeLimitedToken.allocate(self.private_url)
class ArchiveSourcePackageRelease: """Decorated `SourcePackageRelease` with cached 'upload_changesfile'. It receives the related upload changesfile, so it doesn't need to be recalculated. """ delegates(ISourcePackageRelease) def __init__(self, context, changesfile): self.context = context self._changesfile = changesfile @property def upload_changesfile(self): """See `ISourcePackageRelease`.""" return self._changesfile
class ProxiedLibraryFileAlias: """A `LibraryFileAlias` decorator for use in URL generation. The URL's output by this decorator will always point at the webapp. This is useful when: - the webapp has to be contacted to get access to a file (required for restricted files). - files might change from public to private and thus not work even if the user has access to the once its private, unless they go via the webapp. This should be used anywhere we are outputting URL's to LibraryFileAliases other than directly in rendered pages. For rendered pages, using a LibraryFileAlias directly is OK as at that point the status of the file is known. Overrides `ILibraryFileAlias.http_url` to always point to the webapp URL, even when called from the webservice domain. """ delegates(ILibraryFileAlias) def __init__(self, context, parent): self.context = context self.parent = parent @property def http_url(self): """Return the webapp URL for the context `LibraryFileAlias`. Preserve the `LibraryFileAlias.http_url` behavior for deleted files, returning None. Mask webservice requests if it's the case, so the returned URL will be always relative to the parent webapp URL. """ if self.context.deleted: return None request = get_current_browser_request() if WebServiceLayer.providedBy(request): request = IWebBrowserOriginatingRequest(request) parent_url = canonical_url(self.parent, request=request) traversal_url = urlappend(parent_url, '+files') url = urlappend( traversal_url, url_path_quote(self.context.filename.encode('utf-8'))) return url
class DistributionJobDerived(BaseRunnableJob): """Abstract class for deriving from DistributionJob.""" __metaclass__ = EnumeratedSubclass delegates(IDistributionJob) def __init__(self, job): self.context = job @classmethod def get(cls, job_id): """Get a job by id. :return: the DistributionJob with the specified id, as the current DistributionJobDerived subclass. :raises: NotFoundError if there is no job with the specified id, or its job_type does not match the desired subclass. """ job = DistributionJob.get(job_id) if job.job_type != cls.class_job_type: raise NotFoundError( 'No object found with id %d and type %s' % (job_id, cls.class_job_type.title)) return cls(job) @classmethod def iterReady(cls): """Iterate through all ready DistributionJobs.""" jobs = IStore(DistributionJob).find( DistributionJob, And(DistributionJob.job_type == cls.class_job_type, DistributionJob.job == Job.id, Job.id.is_in(Job.ready_jobs))) return (cls(job) for job in jobs) def getOopsVars(self): """See `IRunnableJob`.""" vars = super(DistributionJobDerived, self).getOopsVars() vars.extend([ ('distribution_id', self.context.distribution.id), ('distroseries_id', self.context.distroseries.id), ('distribution_job_id', self.context.id), ('distribution_job_type', self.context.job_type.title), ]) return vars
class ProxiedPackageDiff: """A `PackageDiff` extension. Instead of `LibraryFileAlias` returns `ProxiedLibraryFileAlias`, so their 'http_url' attribute can be used in the template. """ delegates(IPackageDiff) def __init__(self, context, parent): self.context = context self.parent = parent @property def diff_content(self): library_file = self.context.diff_content if library_file is None: return None return ProxiedLibraryFileAlias(library_file, self.parent)
class CheckedCopy: """Representation of a copy that was checked and approved. Decorates `ISourcePackagePublishingHistory`, tweaking `getStatusSummaryForBuilds` to return `BuildSetStatus.NEEDSBUILD` for source-only copies. """ delegates(ISourcePackagePublishingHistory) def __init__(self, context, include_binaries): self.context = context self.include_binaries = include_binaries def getStatusSummaryForBuilds(self): """Always `BuildSetStatus.NEEDSBUILD` for source-only copies.""" if self.include_binaries: return self.context.getStatusSummaryForBuilds() else: return {'status': BuildSetStatus.NEEDSBUILD}
class PackageTranslationsUploadJobDerived(BaseRunnableJob): __metaclass__ = EnumeratedSubclass delegates(IPackageTranslationsUploadJob) classProvides(IPackageTranslationsUploadJobSource) config = config.IPackageTranslationsUploadJobSource def __init__(self, job): assert job.base_job_type == JobType.UPLOAD_PACKAGE_TRANSLATIONS self.job = job self.context = self @classmethod def create(cls, sourcepackagerelease, libraryfilealias, requester): job = Job( base_job_type=JobType.UPLOAD_PACKAGE_TRANSLATIONS, requester=requester, base_json_data=simplejson.dumps( {'sourcepackagerelease': sourcepackagerelease.id, 'libraryfilealias': libraryfilealias.id})) derived = cls(job) derived.celeryRunOnCommit() return derived @classmethod def iterReady(cls): jobs = IStore(Job).find( Job, Job.id.is_in(Job.ready_jobs), Job.base_job_type == JobType.UPLOAD_PACKAGE_TRANSLATIONS) return (cls(job) for job in jobs) def getErrorRecipients(self): if self.requester is not None: return [format_address_for_person(self.requester)] return []
class LatestPersonSourcePackageReleaseCache(Storm): """See `LatestPersonSourcePackageReleaseCache`.""" implements(ILatestPersonSourcePackageReleaseCache) delegates(ISourcePackageRelease, context='sourcepackagerelease') __storm_table__ = 'LatestPersonSourcePackageReleaseCache' cache_id = Int(name='id', primary=True) publication_id = Int(name='publication') publication = Reference(publication_id, 'SourcePackagePublishingHistory.id') dateuploaded = DateTime(name='date_uploaded') creator_id = Int(name='creator') maintainer_id = Int(name='maintainer') upload_archive_id = Int(name='upload_archive') upload_archive = Reference(upload_archive_id, 'Archive.id') archive_purpose = EnumCol(schema=ArchivePurpose) upload_distroseries_id = Int(name='upload_distroseries') upload_distroseries = Reference(upload_distroseries_id, 'DistroSeries.id') sourcepackagename_id = Int(name='sourcepackagename') sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id') sourcepackagerelease_id = Int(name='sourcepackagerelease') sourcepackagerelease = Reference(sourcepackagerelease_id, 'SourcePackageRelease.id')
class DistributionSourcePackageRelease: """This is a "Magic Distribution Source Package Release". It is not an SQLObject, but it represents the concept of a specific source package release in the distribution. You can then query it for useful information. """ implements(IDistributionSourcePackageRelease) delegates(ISourcePackageRelease, context='sourcepackagerelease') def __init__(self, distribution, sourcepackagerelease): self.distribution = distribution self.sourcepackagerelease = sourcepackagerelease @property def sourcepackage(self): """See IDistributionSourcePackageRelease""" return self.distribution.getSourcePackage( self.sourcepackagerelease.sourcepackagename) @property def displayname(self): """See IDistributionSourcePackageRelease.""" return '%s in %s' % (self.name, self.distribution.name) @property def title(self): """See IDistributionSourcePackageRelease.""" return '"%s" %s source package in %s' % ( self.name, self.version, self.distribution.displayname) @property def publishing_history(self): """See IDistributionSourcePackageRelease.""" from lp.registry.model.distroseries import DistroSeries res = Store.of(self.distribution).find( SourcePackagePublishingHistory, SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.distroseriesID == DistroSeries.id, DistroSeries.distribution == self.distribution, SourcePackagePublishingHistory.sourcepackagerelease == self.sourcepackagerelease) return res.order_by( Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id)) @property def builds(self): """See IDistributionSourcePackageRelease.""" # We want to return all the builds for this distribution that # were built for a main archive together with the builds for this # distribution that were built for a PPA but have been published # in a main archive. builds_for_distro_exprs = ( (BinaryPackageBuild.source_package_release == self.sourcepackagerelease), BinaryPackageBuild.distribution == self.distribution, ) # First, get all the builds built in a main archive (this will # include new and failed builds.) builds_built_in_main_archives = Store.of(self.distribution).find( BinaryPackageBuild, builds_for_distro_exprs, BinaryPackageBuild.archive == Archive.id, Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) # Next get all the builds that have a binary published in the # main archive... this will include many of those in the above # query, but not the new/failed ones. It will also include # ppa builds that have been published in main archives. builds_published_in_main_archives = Store.of(self.distribution).find( BinaryPackageBuild, builds_for_distro_exprs, BinaryPackageRelease.build == BinaryPackageBuild.id, BinaryPackagePublishingHistory.binarypackagerelease == BinaryPackageRelease.id, BinaryPackagePublishingHistory.archive == Archive.id, Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)).config( distinct=True) return builds_built_in_main_archives.union( builds_published_in_main_archives).order_by( Desc(BinaryPackageBuild.id)) @property def binary_package_names(self): """See IDistributionSourcePackageRelease.""" return BinaryPackageName.select(""" BinaryPackageName.id = BinaryPackageRelease.binarypackagename AND BinaryPackageRelease.build = BinaryPackageBuild.id AND BinaryPackageBuild.source_package_release = %s """ % sqlvalues(self.sourcepackagerelease.id), clauseTables=['BinaryPackageRelease', 'BinaryPackageBuild'], orderBy='name', distinct=True) @property def sample_binary_packages(self): """See IDistributionSourcePackageRelease.""" #avoid circular imports. from lp.registry.model.distroseries import DistroSeries from lp.soyuz.model.distroarchseries import DistroArchSeries from lp.soyuz.model.distroseriespackagecache import ( DistroSeriesPackageCache) archive_ids = list(self.distribution.all_distro_archive_ids) result_row = ( SQL('DISTINCT ON(BinaryPackageName.name) 0 AS ignore'), BinaryPackagePublishingHistory, DistroSeriesPackageCache, BinaryPackageRelease, BinaryPackageName) tables = ( BinaryPackagePublishingHistory, Join( DistroArchSeries, DistroArchSeries.id == BinaryPackagePublishingHistory.distroarchseriesID), Join( DistroSeries, DistroArchSeries.distroseriesID == DistroSeries.id), Join( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID), Join( BinaryPackageName, BinaryPackageName.id == BinaryPackageRelease.binarypackagenameID), Join( BinaryPackageBuild, BinaryPackageBuild.id == BinaryPackageRelease.buildID), LeftJoin( DistroSeriesPackageCache, And( DistroSeriesPackageCache.distroseries == DistroSeries.id, DistroSeriesPackageCache.archiveID.is_in(archive_ids), DistroSeriesPackageCache.binarypackagename == BinaryPackageName.id))) all_published = Store.of(self.distribution).using(*tables).find( result_row, DistroSeries.distribution == self.distribution, BinaryPackagePublishingHistory.archiveID.is_in(archive_ids), BinaryPackageBuild.source_package_release == self.sourcepackagerelease) all_published = all_published.order_by( BinaryPackageName.name) def make_dsb_package(row): publishing = row[1] package_cache = row[2] return DistroSeriesBinaryPackage( publishing.distroarchseries.distroseries, publishing.binarypackagerelease.binarypackagename, package_cache) return DecoratedResultSet(all_published, make_dsb_package)
class PackageCopyJobDerived(BaseRunnableJob): """Abstract class for deriving from PackageCopyJob.""" __metaclass__ = EnumeratedSubclass delegates(IPackageCopyJob) def __init__(self, job): self.context = job self.logger = logging.getLogger() @classmethod def get(cls, job_id): """Get a job by id. :return: the PackageCopyJob with the specified id, as the current PackageCopyJobDerived subclass. :raises: NotFoundError if there is no job with the specified id, or its job_type does not match the desired subclass. """ job = IStore(PackageCopyJob).get(PackageCopyJob, job_id) if job.job_type != cls.class_job_type: raise NotFoundError('No object found with id %d and type %s' % (job_id, cls.class_job_type.title)) return cls(job) @classmethod def iterReady(cls): """Iterate through all ready PackageCopyJobs. Even though it's slower, we repeat the query each time in order that very long queues of mass syncs can be pre-empted by other jobs. """ seen = set() while True: jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.job == Job.id, Job.id.is_in(Job.ready_jobs), Not(Job.id.is_in(seen))) jobs.order_by(PackageCopyJob.copy_policy) job = jobs.first() if job is None: break seen.add(job.job_id) yield cls(job) def getOopsVars(self): """See `IRunnableJob`.""" vars = super(PackageCopyJobDerived, self).getOopsVars() vars.extend([ ('source_archive_id', self.context.source_archive_id), ('target_archive_id', self.context.target_archive_id), ('target_distroseries_id', self.context.target_distroseries_id), ('package_copy_job_id', self.context.id), ('package_copy_job_type', self.context.job_type.title), ]) return vars def getOperationDescription(self): """See `IPlainPackageCopyJob`.""" return "copying a package" def getErrorRecipients(self): """See `IPlainPackageCopyJob`.""" return [format_address_for_person(self.requester)] @property def copy_policy(self): """See `PlainPackageCopyJob`.""" return self.context.copy_policy
class SourcePackageRecipe(Storm): """See `ISourcePackageRecipe` and `ISourcePackageRecipeSource`.""" __storm_table__ = 'SourcePackageRecipe' def __str__(self): return '%s/%s' % (self.owner.name, self.name) implements(ISourcePackageRecipe) classProvides(ISourcePackageRecipeSource) delegates(ISourcePackageRecipeData, context='_recipe_data') id = Int(primary=True) daily_build_archive_id = Int(name='daily_build_archive', allow_none=True) daily_build_archive = Reference(daily_build_archive_id, 'Archive.id') date_created = UtcDateTimeCol(notNull=True) date_last_modified = UtcDateTimeCol(notNull=True) owner_id = Int(name='owner', allow_none=True) owner = Reference(owner_id, 'Person.id') registrant_id = Int(name='registrant', allow_none=True) registrant = Reference(registrant_id, 'Person.id') distroseries = ReferenceSet( id, _SourcePackageRecipeDistroSeries.sourcepackagerecipe_id, _SourcePackageRecipeDistroSeries.distroseries_id, DistroSeries.id) build_daily = Bool() is_stale = Bool() @property def _sourcepackagename_text(self): return self.sourcepackagename.name name = Unicode(allow_none=True) description = Unicode(allow_none=True) @cachedproperty def _recipe_data(self): return Store.of(self).find( SourcePackageRecipeData, SourcePackageRecipeData.sourcepackage_recipe == self).one() @property def builder_recipe(self): """Accesses of the recipe go to the SourcePackageRecipeData.""" return self._recipe_data.getRecipe() @property def base_branch(self): return self._recipe_data.base_branch @staticmethod def preLoadDataForSourcePackageRecipes(sourcepackagerecipes): # Load the referencing SourcePackageRecipeData. spr_datas = load_referencing(SourcePackageRecipeData, sourcepackagerecipes, ['sourcepackage_recipe_id']) # Load the related branches. load_related(Branch, spr_datas, ['base_branch_id']) # Store the SourcePackageRecipeData in the sourcepackagerecipes # objects. for spr_data in spr_datas: cache = get_property_cache(spr_data.sourcepackage_recipe) cache._recipe_data = spr_data SourcePackageRecipeData.preLoadReferencedBranches(spr_datas) def setRecipeText(self, recipe_text): parsed = SourcePackageRecipeData.getParsedRecipe(recipe_text) self._recipe_data.setRecipe(parsed) @property def recipe_text(self): return self.builder_recipe.get_recipe_text() def updateSeries(self, distroseries): if distroseries != self.distroseries: self.distroseries.clear() for distroseries_item in distroseries: self.distroseries.add(distroseries_item) @staticmethod def new(registrant, owner, name, recipe, description, distroseries=None, daily_build_archive=None, build_daily=False, date_created=DEFAULT): """See `ISourcePackageRecipeSource.new`.""" store = IMasterStore(SourcePackageRecipe) sprecipe = SourcePackageRecipe() builder_recipe = SourcePackageRecipeData.getParsedRecipe(recipe) SourcePackageRecipeData(builder_recipe, sprecipe) sprecipe.registrant = registrant sprecipe.owner = owner sprecipe.name = name if distroseries is not None: for distroseries_item in distroseries: sprecipe.distroseries.add(distroseries_item) sprecipe.description = description sprecipe.daily_build_archive = daily_build_archive sprecipe.build_daily = build_daily sprecipe.date_created = date_created sprecipe.date_last_modified = date_created store.add(sprecipe) return sprecipe @staticmethod def findStaleDailyBuilds(): one_day_ago = datetime.now(utc) - timedelta(hours=23, minutes=50) joins = ( SourcePackageRecipe, LeftJoin( SourcePackageRecipeBuild, And( SourcePackageRecipeBuild.recipe_id == SourcePackageRecipe.id, SourcePackageRecipeBuild.archive_id == SourcePackageRecipe.daily_build_archive_id, SourcePackageRecipeBuild.date_created > one_day_ago)), ) return IStore(SourcePackageRecipe).using(*joins).find( SourcePackageRecipe, SourcePackageRecipe.is_stale == True, SourcePackageRecipe.build_daily == True, SourcePackageRecipeBuild.date_created == None, ).config(distinct=True) @staticmethod def exists(owner, name): """See `ISourcePackageRecipeSource.new`.""" store = IMasterStore(SourcePackageRecipe) recipe = store.find(SourcePackageRecipe, SourcePackageRecipe.owner == owner, SourcePackageRecipe.name == name).one() if recipe: return True else: return False def destroySelf(self): store = Store.of(self) self.distroseries.clear() self._recipe_data.instructions.find().remove() builds = store.find(SourcePackageRecipeBuild, SourcePackageRecipeBuild.recipe == self) builds.set(recipe_id=None) store.remove(self._recipe_data) store.remove(self) def isOverQuota(self, requester, distroseries): """See `ISourcePackageRecipe`.""" return SourcePackageRecipeBuild.getRecentBuilds( requester, self, distroseries).count() >= 5 def containsUnbuildableSeries(self, archive): buildable_distros = set(BuildableDistroSeries.findSeries( archive.owner)) return len(set(self.distroseries).difference(buildable_distros)) >= 1 def requestBuild(self, archive, requester, distroseries, pocket=PackagePublishingPocket.RELEASE, manual=False): """See `ISourcePackageRecipe`.""" if not archive.is_ppa: raise NonPPABuildRequest buildable_distros = BuildableDistroSeries.findSeries(archive.owner) if distroseries not in buildable_distros: raise BuildNotAllowedForDistro(self, distroseries) reject_reason = archive.checkUpload(requester, distroseries, None, archive.default_component, pocket) if reject_reason is not None: raise reject_reason if self.isOverQuota(requester, distroseries): raise TooManyBuilds(self, distroseries) pending = IStore(self).find( SourcePackageRecipeBuild, SourcePackageRecipeBuild.recipe_id == self.id, SourcePackageRecipeBuild.distroseries_id == distroseries.id, SourcePackageRecipeBuild.archive_id == archive.id, SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD) if pending.any() is not None: raise BuildAlreadyPending(self, distroseries) build = getUtility(ISourcePackageRecipeBuildSource).new( distroseries, self, requester, archive) build.queueBuild() queue_record = build.buildqueue_record if manual: queue_record.manualScore(queue_record.lastscore + 100) return build def performDailyBuild(self): """See `ISourcePackageRecipe`.""" builds = [] self.is_stale = False buildable_distros = set( BuildableDistroSeries.findSeries(self.daily_build_archive.owner)) build_for = set(self.distroseries).intersection(buildable_distros) for distroseries in build_for: try: build = self.requestBuild(self.daily_build_archive, self.owner, distroseries, PackagePublishingPocket.RELEASE) builds.append(build) except BuildAlreadyPending: continue return builds @property def builds(self): """See `ISourcePackageRecipe`.""" order_by = (Desc( Greatest(SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished)), Desc(SourcePackageRecipeBuild.date_created), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(None, order_by) @property def completed_builds(self): """See `ISourcePackageRecipe`.""" filter_term = (SourcePackageRecipeBuild.status != BuildStatus.NEEDSBUILD) order_by = (Desc( Greatest(SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished)), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(filter_term, order_by) @property def pending_builds(self): """See `ISourcePackageRecipe`.""" filter_term = ( SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD) # We want to order by date_created but this is the same as ordering # by id (since id increases monotonically) and is less expensive. order_by = Desc(SourcePackageRecipeBuild.id) return self._getBuilds(filter_term, order_by) def _getBuilds(self, filter_term, order_by): """The actual query to get the builds.""" query_args = [ SourcePackageRecipeBuild.recipe == self, SourcePackageRecipeBuild.archive_id == Archive.id, Archive._enabled == True, ] if filter_term is not None: query_args.append(filter_term) result = Store.of(self).find(SourcePackageRecipeBuild, *query_args) result.order_by(order_by) return result def getPendingBuildInfo(self): """See `ISourcePackageRecipe`.""" builds = self.pending_builds result = [] for build in builds: result.append({ "distroseries": build.distroseries.displayname, "archive": '%s/%s' % (build.archive.owner.name, build.archive.name) }) return result @property def last_build(self): """See `ISourcePackageRecipeBuild`.""" return self._getBuilds( True, Desc(SourcePackageRecipeBuild.date_finished)).first() def getMedianBuildDuration(self): """Return the median duration of builds of this recipe.""" store = IStore(self) result = store.find(SourcePackageRecipeBuild, SourcePackageRecipeBuild.recipe == self.id, SourcePackageRecipeBuild.date_finished != None) durations = [ build.date_finished - build.date_started for build in result ] if len(durations) == 0: return None durations.sort(reverse=True) return durations[len(durations) / 2]
class DecoratedBranch(BzrIdentityMixin): """Wrap a number of the branch accessors to cache results. This avoids repeated db queries. """ implements(IPrivacy) delegates([IBranch, IInformationType], 'branch') def __init__(self, branch): self.branch = branch @property def displayname(self): """Override the default model property. If left to the underlying model, it would call the bzr_identity on the underlying branch rather than the cached bzr_identity on the decorated branch. And that would cause two database queries. """ return self.bzr_identity @cachedproperty def bzr_identity(self): """Cache the result of the bzr identity. The property is defined in the bzrIdentityMixin class. This uses the associatedProductSeries and associatedSuiteSourcePackages methods. """ return super(DecoratedBranch, self).bzr_identity @cachedproperty def is_series_branch(self): """A simple property to see if there are any series links.""" # True if linked to a product series or suite source package. return ( len(self.associated_product_series) > 0 or len(self.suite_source_packages) > 0) def associatedProductSeries(self): """Override the IBranch.associatedProductSeries.""" return self.associated_product_series def associatedSuiteSourcePackages(self): """Override the IBranch.associatedSuiteSourcePackages.""" return self.suite_source_packages @cachedproperty def associated_product_series(self): """Cache the realized product series links.""" return list(self.branch.associatedProductSeries()) @cachedproperty def suite_source_packages(self): """Cache the realized suite source package links.""" return list(self.branch.associatedSuiteSourcePackages()) @cachedproperty def upgrade_pending(self): """Cache the result as the property hits the database.""" return self.branch.upgrade_pending @cachedproperty def subscriptions(self): """Cache the realized branch subscription objects.""" return list(self.branch.subscriptions) def hasSubscription(self, user): """Override the default branch implementation. The default implementation hits the database. Since we have a full list of subscribers anyway, a simple check over the list is sufficient. """ if user is None: return False return user.id in [sub.personID for sub in self.subscriptions] @cachedproperty def latest_revisions(self): """Cache the query result. When a tal:repeat is used, the method is called twice. Firstly to check that there is something to iterate over, and secondly for the actual iteration. Without the cached property, the database is hit twice. """ return list(self.branch.latest_revisions())
class BranchJobDerived(BaseRunnableJob): __metaclass__ = EnumeratedSubclass delegates(IBranchJob) def __init__(self, branch_job): self.context = branch_job def __repr__(self): branch = self.branch return '<%(job_type)s branch job (%(id)s) for %(branch)s>' % { 'job_type': self.context.job_type.name, 'id': self.context.id, 'branch': branch.unique_name, } # XXX: henninge 2009-02-20 bug=331919: These two standard operators # should be implemented by delegates(). def __eq__(self, other): # removeSecurityProxy, since 'other' might well be a delegated object # and the context attribute is not exposed by design. from zope.security.proxy import removeSecurityProxy return (self.__class__ == other.__class__ and self.context == removeSecurityProxy(other).context) def __ne__(self, other): return not (self == other) @classmethod def iterReady(cls): """See `IRevisionMailJobSource`.""" jobs = IMasterStore(Branch).find( (BranchJob), And(BranchJob.job_type == cls.class_job_type, BranchJob.job == Job.id, Job.id.is_in(Job.ready_jobs))) return (cls(job) for job in jobs) @classmethod def get(cls, key): """Return the instance of this class whose key is supplied. :raises: SQLObjectNotFound """ instance = IStore(BranchJob).get(BranchJob, key) if instance is None or instance.job_type != cls.class_job_type: raise SQLObjectNotFound('No occurrence of %s has key %s' % (cls.__name__, key)) return cls(instance) def getOopsVars(self): """See `IRunnableJob`.""" vars = BaseRunnableJob.getOopsVars(self) vars.extend([('branch_job_id', self.context.id), ('branch_job_type', self.context.job_type.title)]) if self.context.branch is not None: vars.append(('branch_name', self.context.branch.unique_name)) return vars def getErrorRecipients(self): if self.requester is None: return [] return [format_address_for_person(self.requester)]
class BranchMergeProposalJobDerived(BaseRunnableJob): """Intermediate class for deriving from BranchMergeProposalJob.""" __metaclass__ = EnumeratedSubclass delegates(IBranchMergeProposalJob) def __init__(self, job): self.context = job def __repr__(self): bmp = self.branch_merge_proposal return '<%(job_type)s job for merge %(merge_id)s on %(branch)s>' % { 'job_type': self.context.job_type.name, 'merge_id': bmp.id, 'branch': bmp.source_branch.unique_name, } @classmethod def create(cls, bmp): """See `IMergeProposalCreationJob`.""" return cls._create(bmp, {}) @classmethod def _create(cls, bmp, metadata): base_job = BranchMergeProposalJob(bmp, cls.class_job_type, metadata) job = cls(base_job) job.celeryRunOnCommit() return job @classmethod def get(cls, job_id): """Get a job by id. :return: the BranchMergeProposalJob with the specified id, as the current BranchMergeProposalJobDereived subclass. :raises: SQLObjectNotFound if there is no job with the specified id, or its job_type does not match the desired subclass. """ job = BranchMergeProposalJob.get(job_id) if job.job_type != cls.class_job_type: raise SQLObjectNotFound('No object found with id %d and type %s' % (job_id, cls.class_job_type.title)) return cls(job) @classmethod def iterReady(klass): """Iterate through all ready BranchMergeProposalJobs.""" from lp.code.model.branch import Branch jobs = IMasterStore(Branch).find( (BranchMergeProposalJob), And( BranchMergeProposalJob.job_type == klass.class_job_type, BranchMergeProposalJob.job == Job.id, Job.id.is_in(Job.ready_jobs), BranchMergeProposalJob.branch_merge_proposal == BranchMergeProposal.id, BranchMergeProposal.source_branch == Branch.id, # A proposal isn't considered ready if it has no revisions, # or if it is hosted but pending a mirror. Branch.revision_count > 0, Or(Branch.next_mirror_time == None, Branch.branch_type != BranchType.HOSTED))) return (klass(job) for job in jobs) def getOopsVars(self): """See `IRunnableJob`.""" vars = BaseRunnableJob.getOopsVars(self) bmp = self.context.branch_merge_proposal vars.extend([('branchmergeproposal_job_id', self.context.id), ('branchmergeproposal_job_type', self.context.job_type.title), ('source_branch', bmp.source_branch.unique_name), ('target_branch', bmp.target_branch.unique_name)]) return vars
class PreviewDiff(Storm): """See `IPreviewDiff`.""" implements(IPreviewDiff) delegates(IDiff, context='diff') __storm_table__ = 'PreviewDiff' id = Int(primary=True) diff_id = Int(name='diff') diff = Reference(diff_id, 'Diff.id') source_revision_id = Unicode(allow_none=False) target_revision_id = Unicode(allow_none=False) prerequisite_revision_id = Unicode(name='dependent_revision_id') branch_merge_proposal_id = Int(name='branch_merge_proposal', allow_none=False) branch_merge_proposal = Reference(branch_merge_proposal_id, 'BranchMergeProposal.id') date_created = UtcDateTimeCol(dbName='date_created', default=UTC_NOW, notNull=True) conflicts = Unicode() @property def has_conflicts(self): return self.conflicts is not None and self.conflicts != '' @classmethod def fromBranchMergeProposal(cls, bmp): """Create a `PreviewDiff` from a `BranchMergeProposal`. Includes a diff from the source to the target. :param bmp: The `BranchMergeProposal` to generate a `PreviewDiff` for. :return: A `PreviewDiff`. """ source_branch = bmp.source_branch.getBzrBranch() source_revision = source_branch.last_revision() target_branch = bmp.target_branch.getBzrBranch() target_revision = target_branch.last_revision() if bmp.prerequisite_branch is not None: prerequisite_branch = bmp.prerequisite_branch.getBzrBranch() else: prerequisite_branch = None diff, conflicts = Diff.mergePreviewFromBranches( source_branch, source_revision, target_branch, prerequisite_branch) preview = cls() preview.source_revision_id = source_revision.decode('utf-8') preview.target_revision_id = target_revision.decode('utf-8') preview.branch_merge_proposal = bmp preview.diff = diff preview.conflicts = u''.join( unicode(conflict) + '\n' for conflict in conflicts) del get_property_cache(bmp).preview_diffs del get_property_cache(bmp).preview_diff return preview @classmethod def create(cls, bmp, diff_content, source_revision_id, target_revision_id, prerequisite_revision_id, conflicts): """Create a PreviewDiff with specified values. :param bmp: The `BranchMergeProposal` this diff references. :param diff_content: The text of the dift, as bytes. :param source_revision_id: The revision_id of the source branch. :param target_revision_id: The revision_id of the target branch. :param prerequisite_revision_id: The revision_id of the prerequisite branch. :param conflicts: The conflicts, as text. :return: A `PreviewDiff` with specified values. """ filename = str(uuid1()) + '.txt' size = len(diff_content) diff = Diff.fromFile(StringIO(diff_content), size, filename) preview = cls() preview.branch_merge_proposal = bmp preview.source_revision_id = source_revision_id preview.target_revision_id = target_revision_id preview.prerequisite_revision_id = prerequisite_revision_id preview.conflicts = conflicts preview.diff = diff return preview @property def stale(self): """See `IPreviewDiff`.""" # A preview diff is stale if the revision ids used to make the diff # are different from the tips of the source or target branches. bmp = self.branch_merge_proposal if (self.source_revision_id != bmp.source_branch.last_scanned_id or self.target_revision_id != bmp.target_branch.last_scanned_id): # This is the simple frequent case. return True # More complex involves the prerequisite branch too. if (bmp.prerequisite_branch is not None and (self.prerequisite_revision_id != bmp.prerequisite_branch.last_scanned_id)): return True else: return False def getFileByName(self, filename): """See `IPreviewDiff`.""" if filename == 'preview.diff' and self.diff_text is not None: return self.diff_text else: raise NotFoundError(filename)