class BuildFarmJob(Storm): """A base implementation for `IBuildFarmJob` classes.""" __storm_table__ = 'BuildFarmJob' id = Int(primary=True) date_created = DateTime(name='date_created', allow_none=False, tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', allow_none=True, tzinfo=pytz.UTC) builder_id = Int(name='builder', allow_none=True) builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', allow_none=False, enum=BuildStatus) job_type = DBEnum(name='job_type', allow_none=False, enum=BuildFarmJobType) archive_id = Int(name='archive') archive = Reference(archive_id, 'Archive.id') def __init__(self, job_type, status=BuildStatus.NEEDSBUILD, date_created=None, builder=None, archive=None): super(BuildFarmJob, self).__init__() (self.job_type, self.status, self.builder, self.archive) = (job_type, status, builder, archive) if date_created is not None: self.date_created = date_created @classmethod def new(cls, job_type, status=BuildStatus.NEEDSBUILD, date_created=None, builder=None, archive=None): """See `IBuildFarmJobSource`.""" build_farm_job = BuildFarmJob(job_type, status, date_created, builder, archive) store = IMasterStore(BuildFarmJob) store.add(build_farm_job) return build_farm_job
class BugSubscription(StormBase): """A relationship between a person and a bug.""" __storm_table__ = 'BugSubscription' id = Int(primary=True) person_id = Int("person", allow_none=False, validator=validate_person) person = Reference(person_id, "Person.id") bug_id = Int("bug", allow_none=False) bug = Reference(bug_id, "Bug.id") bug_notification_level = DBEnum(enum=BugNotificationLevel, default=BugNotificationLevel.COMMENTS, allow_none=False) date_created = DateTime(allow_none=False, default=UTC_NOW, tzinfo=pytz.UTC) subscribed_by_id = Int("subscribed_by", allow_none=False, validator=validate_person) subscribed_by = Reference(subscribed_by_id, "Person.id") def __init__(self, bug=None, person=None, subscribed_by=None, bug_notification_level=BugNotificationLevel.COMMENTS): super(BugSubscription, self).__init__() self.bug = bug self.person = person self.subscribed_by = subscribed_by self.bug_notification_level = bug_notification_level @property def display_subscribed_by(self): """See `IBugSubscription`.""" if self.person_id == self.subscribed_by_id: return u'Self-subscribed' else: return u'Subscribed by %s (%s)' % (self.subscribed_by.displayname, self.subscribed_by.name) @property def display_duplicate_subscribed_by(self): """See `IBugSubscription`.""" if self.person == self.subscribed_by: return u'Self-subscribed to bug %s' % (self.bug_id) else: return u'Subscribed to bug %s by %s (%s)' % ( self.bug_id, self.subscribed_by.displayname, self.subscribed_by.name) def canBeUnsubscribedByUser(self, user): """See `IBugSubscription`.""" if user is None: return False return (user.inTeam(self.person) or user.inTeam(self.subscribed_by) or IPersonRoles(user).in_admin)
class BugSubscriptionFilterStatus(StormBase): """Statuses to filter.""" __storm_table__ = "BugSubscriptionFilterStatus" __storm_primary__ = ('filter_id', 'status') filter_id = Int("filter", allow_none=False) filter = Reference(filter_id, "BugSubscriptionFilter.id") status = DBEnum(enum=BugTaskStatus, allow_none=False)
class BugSubscriptionFilterImportance(StormBase): """Importances to filter.""" __storm_table__ = "BugSubscriptionFilterImportance" __storm_primary__ = ('filter_id', 'importance') filter_id = Int("filter", allow_none=False) filter = Reference(filter_id, "BugSubscriptionFilter.id") importance = DBEnum(enum=BugTaskImportance, allow_none=False)
class BugSubscriptionFilterInformationType(StormBase): """Information types to filter.""" __storm_table__ = "BugSubscriptionFilterInformationType" __storm_primary__ = ('filter_id', 'information_type') filter_id = Int("filter", allow_none=False) filter = Reference(filter_id, "BugSubscriptionFilter.id") information_type = DBEnum(enum=InformationType, allow_none=False)
class SourcePackageFormatSelection(Storm): """See ISourcePackageFormatSelection.""" implements(ISourcePackageFormatSelection) __storm_table__ = 'sourcepackageformatselection' id = Int(primary=True) distroseries_id = Int(name="distroseries") distroseries = Reference(distroseries_id, 'DistroSeries.id') format = DBEnum(enum=SourcePackageFormat)
class SeriesSourcePackageBranch(Storm): """See `ISeriesSourcePackageBranch`.""" __storm_table__ = 'SeriesSourcePackageBranch' id = Int(primary=True) distroseriesID = Int('distroseries') distroseries = Reference(distroseriesID, 'DistroSeries.id') pocket = DBEnum(enum=PackagePublishingPocket) sourcepackagenameID = Int('sourcepackagename') sourcepackagename = Reference( sourcepackagenameID, 'SourcePackageName.id') branchID = Int('branch') branch = Reference(branchID, 'Branch.id') registrantID = Int('registrant') registrant = Reference(registrantID, 'Person.id') date_created = DateTime(allow_none=False) def __init__(self, distroseries, pocket, sourcepackagename, branch, registrant, date_created): """Construct an `ISeriesSourcePackageBranch`.""" self.distroseries = distroseries self.pocket = pocket self.sourcepackagename = sourcepackagename self.branch = branch self.registrant = registrant self.date_created = date_created @property def sourcepackage(self): return self.distroseries.getSourcePackage(self.sourcepackagename) @property def suite_sourcepackage(self): return self.sourcepackage.getSuiteSourcePackage(self.pocket)
class AccessPolicy(StormBase): __storm_table__ = 'AccessPolicy' id = Int(primary=True) product_id = Int(name='product') product = Reference(product_id, 'Product.id') distribution_id = Int(name='distribution') distribution = Reference(distribution_id, 'Distribution.id') type = DBEnum(allow_none=True, enum=InformationType) person_id = Int(name='person') person = Reference(person_id, 'Person.id') @property def pillar(self): return self.product or self.distribution @classmethod def create(cls, policies): from lp.registry.interfaces.distribution import IDistribution from lp.registry.interfaces.product import IProduct insert_values = [] for pillar, type in policies: if IProduct.providedBy(pillar): insert_values.append((pillar, None, type)) elif IDistribution.providedBy(pillar): insert_values.append((None, pillar, type)) else: raise ValueError("%r is not a supported pillar" % pillar) return create((cls.product, cls.distribution, cls.type), insert_values, get_objects=True) @classmethod def createForTeams(cls, teams): insert_values = [] for team in teams: if team is None or not team.is_team: raise ValueError("A team must be specified") insert_values.append((None, None, None, team)) return create((cls.product, cls.distribution, cls.type, cls.person), insert_values, get_objects=True) @classmethod def _constraintForPillar(cls, pillar): from lp.registry.interfaces.distribution import IDistribution from lp.registry.interfaces.product import IProduct if IProduct.providedBy(pillar): col = cls.product elif IDistribution.providedBy(pillar): col = cls.distribution else: raise ValueError("%r is not a supported pillar" % pillar) return col == pillar @classmethod def find(cls, pillars_and_types): """See `IAccessPolicySource`.""" pillars_and_types = list(pillars_and_types) if len(pillars_and_types) == 0: return EmptyResultSet() return IStore(cls).find( cls, Or(*(And(cls._constraintForPillar(pillar), cls.type == type) for (pillar, type) in pillars_and_types))) @classmethod def findByID(cls, ids): """See `IAccessPolicySource`.""" return IStore(cls).find(cls, cls.id.is_in(ids)) @classmethod def findByPillar(cls, pillars): """See `IAccessPolicySource`.""" return IStore(cls).find( cls, Or(*(cls._constraintForPillar(pillar) for pillar in pillars))) @classmethod def findByTeam(cls, teams): """See `IAccessPolicySource`.""" return IStore(cls).find(cls, Or(*(cls.person == team for team in teams))) @classmethod def delete(cls, pillars_and_types): """See `IAccessPolicySource`.""" cls.find(pillars_and_types).remove()
class OAuthRequestToken(OAuthBase, StormBase): """See `IOAuthRequestToken`.""" __storm_table__ = 'OAuthRequestToken' id = Int(primary=True) consumer_id = Int(name='consumer', allow_none=False) consumer = Reference(consumer_id, 'OAuthConsumer.id') person_id = Int(name='person', allow_none=True, default=None) person = Reference(person_id, 'Person.id') date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=UTC_NOW) date_expires = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) key = Unicode(allow_none=False) _secret = Unicode(name='secret', allow_none=True, default=u'') permission = DBEnum(enum=OAuthPermission, allow_none=True, default=None) date_reviewed = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) product_id = Int(name='product', allow_none=True, default=None) product = Reference(product_id, 'Product.id') projectgroup_id = Int(name='project', allow_none=True, default=None) projectgroup = Reference(projectgroup_id, 'ProjectGroup.id') sourcepackagename_id = Int(name='sourcepackagename', allow_none=True, default=None) sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id') distribution_id = Int(name='distribution', allow_none=True, default=None) distribution = Reference(distribution_id, 'Distribution.id') def __init__(self, consumer, key, secret=u'', permission=None, person=None, date_expires=None, product=None, projectgroup=None, distribution=None, sourcepackagename=None): super(OAuthRequestToken, self).__init__() self.consumer = consumer self.permission = permission self.key = key self._secret = sha256_digest(secret) self.person = person self.date_expires = date_expires self.product = product self.projectgroup = projectgroup self.distribution = distribution self.sourcepackagename = sourcepackagename @property def context(self): """See `IOAuthToken`.""" if self.product: return self.product elif self.projectgroup: return self.projectgroup elif self.distribution: if self.sourcepackagename: return self.distribution.getSourcePackage( self.sourcepackagename) else: return self.distribution else: return None @property def is_expired(self): now = datetime.now(pytz.UTC) expires = self.date_created + timedelta(hours=REQUEST_TOKEN_VALIDITY) return expires <= now def isSecretValid(self, secret): """See `IOAuthToken`.""" return sha256_digest(secret) == self._secret def review(self, user, permission, context=None, date_expires=None): """See `IOAuthRequestToken`.""" if self.is_reviewed: raise OAuthValidationError( "Request tokens can be reviewed only once.") if self.is_expired: raise OAuthValidationError( 'This request token has expired and can no longer be ' 'reviewed.') self.date_reviewed = datetime.now(pytz.UTC) self.date_expires = date_expires self.person = user self.permission = permission if IProduct.providedBy(context): self.product = context elif IProjectGroup.providedBy(context): self.projectgroup = context elif IDistribution.providedBy(context): self.distribution = context elif IDistributionSourcePackage.providedBy(context): self.sourcepackagename = context.sourcepackagename self.distribution = context.distribution else: assert context is None, ("Unknown context type: %r." % context) def createAccessToken(self): """See `IOAuthRequestToken`.""" if not self.is_reviewed: raise OAuthValidationError( 'Cannot create an access token from an unreviewed request ' 'token.') if self.permission == OAuthPermission.UNAUTHORIZED: raise OAuthValidationError( 'The user did not grant access to this consumer.') if self.is_expired: raise OAuthValidationError( 'This request token has expired and can no longer be ' 'exchanged for an access token.') key, secret = create_token_key_and_secret(table=OAuthAccessToken) access_level = AccessLevel.items[self.permission.name] access_token = OAuthAccessToken( consumer=self.consumer, person=self.person, key=key, secret=secret, permission=access_level, date_expires=self.date_expires, product=self.product, projectgroup=self.projectgroup, distribution=self.distribution, sourcepackagename=self.sourcepackagename) OAuthAccessToken._getStore().add(access_token) # We want to notify the user that this oauth token has been generated # for them for security reasons. self.person.security_field_changed( "OAuth token generated in Launchpad", "A new OAuth token consumer was enabled in Launchpad.") self._getStore().remove(self) return access_token, secret @property def is_reviewed(self): """See `IOAuthRequestToken`.""" return self.date_reviewed is not None
class BugSubscriptionFilter(StormBase): """A filter to specialize a *structural* subscription.""" implements(IBugSubscriptionFilter) __storm_table__ = "BugSubscriptionFilter" id = Int(primary=True) structural_subscription_id = Int("structuralsubscription", allow_none=False) structural_subscription = Reference(structural_subscription_id, "StructuralSubscription.id") bug_notification_level = DBEnum(enum=BugNotificationLevel, default=BugNotificationLevel.COMMENTS, allow_none=False) find_all_tags = Bool(allow_none=False, default=False) include_any_tags = Bool(allow_none=False, default=False) exclude_any_tags = Bool(allow_none=False, default=False) other_parameters = Unicode() description = Unicode('description') def _get_collection(self, cls, attribute): kind = getattr(cls, attribute) return frozenset( IStore(cls).find(cls, cls.filter == self).values(kind)) def _set_collection(self, cls, enum, attribute, current_set, desired_set): desired_set = frozenset(desired_set) if desired_set == frozenset(enum.items): # Setting all is the same as setting none, and setting none is # cheaper for reading and storage. desired_set = frozenset() # Add missing. store = IStore(cls) for kind in desired_set.difference(current_set): bsf = cls() bsf.filter = self setattr(bsf, attribute, kind) store.add(bsf) # Remove unused. kind = getattr(cls, attribute) store.find(cls, cls.filter == self, kind.is_in(current_set.difference(desired_set))).remove() def _get_statuses(self): return self._get_collection(BugSubscriptionFilterStatus, 'status') def _set_statuses(self, statuses): self._set_collection(BugSubscriptionFilterStatus, BugTaskStatus, 'status', self.statuses, statuses) statuses = property(_get_statuses, _set_statuses, doc=("A frozenset of statuses filtered on.")) def _get_importances(self): return self._get_collection(BugSubscriptionFilterImportance, 'importance') def _set_importances(self, importances): self._set_collection(BugSubscriptionFilterImportance, BugTaskImportance, 'importance', self.importances, importances) importances = property(_get_importances, _set_importances, doc=("A frozenset of importances filtered on.")) def _get_tags(self): """Return a frozenset of tags to filter on.""" wildcards = [] if self.include_any_tags: wildcards.append(u"*") if self.exclude_any_tags: wildcards.append(u"-*") tags = (tag_filter.qualified_tag for tag_filter in IStore(BugSubscriptionFilterTag).find( BugSubscriptionFilterTag, BugSubscriptionFilterTag.filter == self)) return frozenset(chain(wildcards, tags)) def _set_tags(self, tags): """Update the tags to filter on. The tags can be qualified with a leading hyphen, and can be bundled in any iterable. If they are passed within a `searchbuilder.any` or `searchbuilder.all` object, the `find_all_tags` attribute will be updated to match. Wildcard tags - `*` and `-*` - can be given too, and will update `include_any_tags` and `exclude_any_tags`. """ # Deal with searchbuilder terms. if isinstance(tags, searchbuilder.all): self.find_all_tags = True tags = frozenset(tags.query_values) elif isinstance(tags, searchbuilder.any): self.find_all_tags = False tags = frozenset(tags.query_values) else: # Leave find_all_tags unchanged. tags = frozenset(tags) wildcards = frozenset((u"*", u"-*")).intersection(tags) # Set wildcards. self.include_any_tags = "*" in wildcards self.exclude_any_tags = "-*" in wildcards # Deal with other tags. tags = tags - wildcards store = IStore(BugSubscriptionFilterTag) current_tag_filters = dict( (tag_filter.qualified_tag, tag_filter) for tag_filter in store.find( BugSubscriptionFilterTag, BugSubscriptionFilterTag.filter == self)) # Remove unused tags. for tag in set(current_tag_filters).difference(tags): tag_filter = current_tag_filters.pop(tag) store.remove(tag_filter) # Add additional tags. for tag in tags.difference(current_tag_filters): tag_filter = BugSubscriptionFilterTag() tag_filter.filter = self tag_filter.include = not tag.startswith("-") tag_filter.tag = tag.lstrip("-") store.add(tag_filter) tags = property(_get_tags, _set_tags, doc=("A frozenset of tags filtered on.")) def _get_information_types(self): return self._get_collection(BugSubscriptionFilterInformationType, 'information_type') def _set_information_types(self, information_types): self._set_collection(BugSubscriptionFilterInformationType, InformationType, 'information_type', self.information_types, information_types) information_types = property( _get_information_types, _set_information_types, doc=("A frozenset of information_types filtered on.")) def delete(self): """See `IBugSubscriptionFilter`.""" BugSubscriptionFilter.deleteMultiple([self.id]) Store.of(self).remove(self) @classmethod def deleteMultiple(cls, ids): from lp.bugs.model.structuralsubscription import StructuralSubscription store = IStore(BugSubscriptionFilter) structsub_ids = list( store.find(BugSubscriptionFilter.structural_subscription_id, BugSubscriptionFilter.id.is_in(ids))) kinds = [ BugSubscriptionFilterImportance, BugSubscriptionFilterStatus, BugSubscriptionFilterTag, BugSubscriptionFilterInformationType ] for kind in kinds: store.find(kind, kind.filter_id.is_in(ids)).remove() store.find(BugSubscriptionFilter, BugSubscriptionFilter.id.is_in(ids)).remove() # Now delete any structural subscriptions that have no filters. # Take out a SHARE lock on the filters that we use as evidence # for keeping structsubs, to ensure that they haven't been # deleted under us. filter_expr = Select( 1, tables=[BugSubscriptionFilter], where=(BugSubscriptionFilter.structural_subscription_id == StructuralSubscription.id)) locked_filter_expr = SQL( convert_storm_clause_to_string(filter_expr) + ' FOR SHARE') store.find(StructuralSubscription, StructuralSubscription.id.is_in(structsub_ids), Not(Exists(locked_filter_expr))).remove() def isMuteAllowed(self, person): """See `IBugSubscriptionFilter`.""" subscriber = self.structural_subscription.subscriber # The person can mute the Subscription if the subscription is via a # team of which they are a member and the team doesn't have a contact # address (because if the team does, then the mute would be # ineffectual). return (subscriber.is_team and person.inTeam(subscriber) and subscriber.preferredemail is None) def muted(self, person): store = Store.of(self) existing_mutes = store.find( BugSubscriptionFilterMute, BugSubscriptionFilterMute.filter_id == self.id, BugSubscriptionFilterMute.person_id == person.id) if not existing_mutes.is_empty(): return existing_mutes.one().date_created def mute(self, person): """See `IBugSubscriptionFilter`.""" subscriber = self.structural_subscription.subscriber if subscriber.is_team and subscriber.preferredemail: raise MuteNotAllowed( "This subscription cannot be muted because team %s has a " "contact address." % subscriber.name) if not self.isMuteAllowed(person): raise MuteNotAllowed("This subscription cannot be muted for %s" % person.name) store = Store.of(self) existing_mutes = store.find( BugSubscriptionFilterMute, BugSubscriptionFilterMute.filter_id == self.id, BugSubscriptionFilterMute.person_id == person.id) if existing_mutes.is_empty(): mute = BugSubscriptionFilterMute() mute.person = person mute.filter = self.id store.add(mute) def unmute(self, person): """See `IBugSubscriptionFilter`.""" store = Store.of(self) existing_mutes = store.find( BugSubscriptionFilterMute, BugSubscriptionFilterMute.filter_id == self.id, BugSubscriptionFilterMute.person_id == person.id) existing_mutes.remove()
class DistroSeriesDifference(StormBase): """See `DistroSeriesDifference`.""" implements(IDistroSeriesDifference) classProvides(IDistroSeriesDifferenceSource) __storm_table__ = 'DistroSeriesDifference' id = Int(primary=True) derived_series_id = Int(name='derived_series', allow_none=False) derived_series = Reference(derived_series_id, 'DistroSeries.id') parent_series_id = Int(name='parent_series', allow_none=False) parent_series = Reference(parent_series_id, 'DistroSeries.id') source_package_name_id = Int(name='source_package_name', allow_none=False) source_package_name = Reference(source_package_name_id, 'SourcePackageName.id') package_diff_id = Int(name='package_diff', allow_none=True) package_diff = Reference(package_diff_id, 'PackageDiff.id') parent_package_diff_id = Int(name='parent_package_diff', allow_none=True) parent_package_diff = Reference(parent_package_diff_id, 'PackageDiff.id') status = DBEnum(name='status', allow_none=False, enum=DistroSeriesDifferenceStatus) difference_type = DBEnum(name='difference_type', allow_none=False, enum=DistroSeriesDifferenceType) source_version = StringCol(dbName='source_version', notNull=False) parent_source_version = StringCol(dbName='parent_source_version', notNull=False) base_version = StringCol(dbName='base_version', notNull=False) @staticmethod def new(derived_series, source_package_name, parent_series): """See `IDistroSeriesDifferenceSource`.""" dsps = getUtility(IDistroSeriesParentSet) dsp = dsps.getByDerivedAndParentSeries(derived_series, parent_series) if dsp is None: raise NotADerivedSeriesError() store = IMasterStore(DistroSeriesDifference) diff = DistroSeriesDifference() diff.derived_series = derived_series diff.parent_series = parent_series diff.source_package_name = source_package_name # The status and type is set to default values - they will be # updated appropriately during the update() call. diff.status = DistroSeriesDifferenceStatus.NEEDS_ATTENTION diff.difference_type = DistroSeriesDifferenceType.DIFFERENT_VERSIONS diff.update() return store.add(diff) @staticmethod def getForDistroSeries(distro_series, difference_type=None, name_filter=None, status=None, child_version_higher=False, parent_series=None, packagesets=None, changed_by=None): """See `IDistroSeriesDifferenceSource`.""" if isinstance(status, DBItem): status = (status, ) if IPerson.providedBy(changed_by): changed_by = (changed_by, ) # Aliases, to improve readability. DSD = DistroSeriesDifference PSS = PackagesetSources SPN = SourcePackageName SPPH = SourcePackagePublishingHistory SPR = SourcePackageRelease TP = TeamParticipation conditions = [ DSD.derived_series == distro_series, DSD.source_package_name == SPN.id, # For ordering. ] if difference_type is not None: conditions.append(DSD.difference_type == difference_type) if status is not None: conditions.append(DSD.status.is_in(tuple(status))) if child_version_higher: conditions.append(DSD.source_version > DSD.parent_source_version) if parent_series: conditions.append(DSD.parent_series == parent_series.id) # Take a copy of the conditions specified thus far. basic_conditions = list(conditions) if name_filter: name_matches = [SPN.name == name_filter] try: packageset = getUtility(IPackagesetSet).getByName( name_filter, distroseries=distro_series) except NoSuchPackageSet: packageset = None if packageset is not None: name_matches.append( DSD.source_package_name_id.is_in( Select(PSS.sourcepackagename_id, PSS.packageset == packageset))) conditions.append(Or(*name_matches)) if packagesets is not None: set_ids = [packageset.id for packageset in packagesets] conditions.append( DSD.source_package_name_id.is_in( Select(PSS.sourcepackagename_id, PSS.packageset_id.is_in(set_ids)))) store = IStore(DSD) columns = (DSD, SPN.name) differences = store.find(columns, And(*conditions)) if changed_by is not None: # Identify all DSDs referring to SPRs created by changed_by for # this distroseries. The set of DSDs for the given distroseries # can then be discovered as the intersection between this set and # the already established differences. differences_changed_by_conditions = And( basic_conditions, SPPH.archiveID == distro_series.main_archive.id, SPPH.distroseriesID == distro_series.id, SPPH.sourcepackagereleaseID == SPR.id, SPPH.status.is_in(active_publishing_status), SPR.creatorID == TP.personID, SPR.sourcepackagenameID == DSD.source_package_name_id, TP.teamID.is_in(person.id for person in changed_by)) differences_changed_by = store.find( columns, differences_changed_by_conditions) differences = differences.intersection(differences_changed_by) differences = differences.order_by(SPN.name) def pre_iter_hook(rows): # Each row is (dsd, spn.name). Modify the results in place. rows[:] = (dsd for (dsd, spn_name) in rows) # Eager load everything to do with DSDs. return eager_load_dsds(rows) return DecoratedResultSet(differences, pre_iter_hook=pre_iter_hook) @staticmethod def getByDistroSeriesNameAndParentSeries(distro_series, source_package_name, parent_series): """See `IDistroSeriesDifferenceSource`.""" return IStore(DistroSeriesDifference).find( DistroSeriesDifference, DistroSeriesDifference.derived_series == distro_series, DistroSeriesDifference.parent_series == parent_series, DistroSeriesDifference.source_package_name == ( SourcePackageName.id), SourcePackageName.name == source_package_name).one() @staticmethod def getSimpleUpgrades(distro_series): """See `IDistroSeriesDifferenceSource`. Eager-load related `ISourcePackageName` records. """ differences = IStore(DistroSeriesDifference).find( (DistroSeriesDifference, SourcePackageName), DistroSeriesDifference.derived_series == distro_series, DistroSeriesDifference.difference_type == DistroSeriesDifferenceType.DIFFERENT_VERSIONS, DistroSeriesDifference.status == DistroSeriesDifferenceStatus.NEEDS_ATTENTION, DistroSeriesDifference.parent_source_version != DistroSeriesDifference.base_version, DistroSeriesDifference.source_version == DistroSeriesDifference.base_version, SourcePackageName.id == DistroSeriesDifference.source_package_name_id) return DecoratedResultSet(differences, itemgetter(0)) @property def sourcepackagename(self): """See `IDistroSeriesDifference`""" return self.source_package_name.name @cachedproperty def source_pub(self): """See `IDistroSeriesDifference`.""" return self._getLatestSourcePub() @cachedproperty def parent_source_pub(self): """See `IDistroSeriesDifference`.""" return self._getLatestSourcePub(for_parent=True) def _getLatestSourcePub(self, for_parent=False): """Helper to keep source_pub/parent_source_pub DRY.""" distro_series = self.derived_series if for_parent: distro_series = self.parent_series pubs = distro_series.getPublishedSources(self.source_package_name, include_pending=True) # The most recent published source is the first one. try: return pubs[0] except IndexError: return None @cachedproperty def base_source_pub(self): """See `IDistroSeriesDifference`.""" if self.base_version is not None: parent = self.parent_series result = parent.main_archive.getPublishedSources( name=self.source_package_name.name, version=self.base_version).first() if result is None: # If the base version isn't in the parent, it may be # published in the child distroseries. child = self.derived_series result = child.main_archive.getPublishedSources( name=self.source_package_name.name, version=self.base_version).first() return result return None @property def owner(self): """See `IDistroSeriesDifference`.""" return self.derived_series.owner @property def title(self): """See `IDistroSeriesDifference`.""" parent_name = self.parent_series.displayname return ("Difference between distroseries '%(parent_name)s' and " "'%(derived_name)s' for package '%(pkg_name)s' " "(%(parent_version)s/%(source_version)s)" % { 'parent_name': parent_name, 'derived_name': self.derived_series.displayname, 'pkg_name': self.source_package_name.name, 'parent_version': self.parent_source_version, 'source_version': self.source_version, }) def getAncestry(self, spr): """Return the version ancestry for the given SPR, or None.""" if spr.changelog is None: return None versions = set() # It would be nicer to use .versions() here, but it won't catch the # ValueError from malformed versions, and we don't want them leaking # into the ancestry. for raw_version in Changelog(spr.changelog.read())._raw_versions(): try: version = Version(raw_version) except ValueError: continue versions.add(version) return versions def _getPackageDiffURL(self, package_diff): """Check status and return URL if appropriate.""" if package_diff is None or (package_diff.status != PackageDiffStatus.COMPLETED): return None return package_diff.diff_content.getURL() @property def package_diff_url(self): """See `IDistroSeriesDifference`.""" return self._getPackageDiffURL(self.package_diff) @property def parent_package_diff_url(self): """See `IDistroSeriesDifference`.""" return self._getPackageDiffURL(self.parent_package_diff) @cachedproperty def packagesets(self): """See `IDistroSeriesDifference`.""" if self.derived_series is not None: return list( getUtility(IPackagesetSet).setsIncludingSource( self.source_package_name, self.derived_series)) else: return [] @cachedproperty def parent_packagesets(self): """See `IDistroSeriesDifference`.""" return list( getUtility(IPackagesetSet).setsIncludingSource( self.source_package_name, self.parent_series)) @property def package_diff_status(self): """See `IDistroSeriesDifference`.""" if self.package_diff is None: return None else: return self.package_diff.status @property def parent_package_diff_status(self): """See `IDistroSeriesDifference`.""" if self.parent_package_diff is None: return None else: return self.parent_package_diff.status @cachedproperty def parent_source_package_release(self): return self._package_release(self.parent_series, self.parent_source_version) @cachedproperty def source_package_release(self): return self._package_release(self.derived_series, self.source_version) def _package_release(self, distro_series, version): pubs = distro_series.main_archive.getPublishedSources( name=self.source_package_name.name, version=version, distroseries=distro_series, exact_match=True) # Get the most recent publication (pubs are ordered by # (name, id)). pub = IResultSet(pubs).first() if pub is None: return None else: return DistroSeriesSourcePackageRelease(distro_series, pub.sourcepackagerelease) @cachedproperty def base_distro_source_package_release(self): """See `IDistroSeriesDifference`.""" return DistributionSourcePackageRelease( self.parent_series.distribution, self.parent_source_package_release) def update(self, manual=False): """See `IDistroSeriesDifference`.""" # Updating is expected to be a heavy operation (not called # during requests). We clear the cache beforehand - even though # it is not currently necessary - so that in the future it # won't cause a hard-to find bug if a script ever creates a # difference, copies/publishes a new version and then calls # update() (like the tests for this method do). clear_property_cache(self) self._updateType() updated = self._updateVersionsAndStatus(manual=manual) if updated is True: self._setPackageDiffs() return updated def _updateType(self): """Helper for update() interface method. Check whether the presence of a source in the derived or parent series has changed (which changes the type of difference). """ if self.source_pub is None: new_type = DistroSeriesDifferenceType.MISSING_FROM_DERIVED_SERIES elif self.parent_source_pub is None: new_type = DistroSeriesDifferenceType.UNIQUE_TO_DERIVED_SERIES else: new_type = DistroSeriesDifferenceType.DIFFERENT_VERSIONS if new_type != self.difference_type: self.difference_type = new_type def _updateVersionsAndStatus(self, manual): """Helper for the update() interface method. Check whether the status of this difference should be updated. :param manual: Boolean, True if this is a user-requested change. This overrides auto-blacklisting. """ # XXX 2011-05-20 bigjools bug=785657 # This method needs updating to use some sort of state # transition dictionary instead of this crazy mess of # conditions. updated = False new_source_version = new_parent_source_version = None if self.source_pub: new_source_version = self.source_pub.source_package_version if self.source_version is None or apt_pkg.version_compare( self.source_version, new_source_version) != 0: self.source_version = new_source_version updated = True # If the derived version has change and the previous version # was blacklisted, then we remove the blacklist now. if self.status == ( DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT): self.status = DistroSeriesDifferenceStatus.NEEDS_ATTENTION if self.parent_source_pub: new_parent_source_version = ( self.parent_source_pub.source_package_version) if self.parent_source_version is None or apt_pkg.version_compare( self.parent_source_version, new_parent_source_version) != 0: self.parent_source_version = new_parent_source_version updated = True if not self.source_pub or not self.parent_source_pub: # This is unlikely to happen in reality but return early so # that bad data cannot make us OOPS. return updated # If this difference was resolved but now the versions don't match # then we re-open the difference. if self.status == DistroSeriesDifferenceStatus.RESOLVED: if apt_pkg.version_compare(self.source_version, self.parent_source_version) < 0: # Higher parent version. updated = True self.status = DistroSeriesDifferenceStatus.NEEDS_ATTENTION elif (apt_pkg.version_compare(self.source_version, self.parent_source_version) > 0 and not manual): # The child was updated with a higher version so it's # auto-blacklisted. updated = True self.status = DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT # If this difference was needing attention, or the current version # was blacklisted and the versions now match we resolve it. Note: # we don't resolve it if this difference was blacklisted for all # versions. elif self.status in (DistroSeriesDifferenceStatus.NEEDS_ATTENTION, DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT): if apt_pkg.version_compare(self.source_version, self.parent_source_version) == 0: updated = True self.status = DistroSeriesDifferenceStatus.RESOLVED elif (apt_pkg.version_compare(self.source_version, self.parent_source_version) > 0 and not manual): # If the derived version is lower than the parent's, we # ensure the diff status is blacklisted. self.status = DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT if self._updateBaseVersion(): updated = True return updated def _updateBaseVersion(self): """Check for the most-recently published common version. Return whether the record was updated or not. """ if self.difference_type != ( DistroSeriesDifferenceType.DIFFERENT_VERSIONS): return False ancestry = self.getAncestry(self.source_pub.sourcepackagerelease) parent_ancestry = self.getAncestry( self.parent_source_pub.sourcepackagerelease) # If the ancestry for the parent and the descendant is available, we # can reliably work out the most recent common ancestor using set # arithmetic. if ancestry is not None and parent_ancestry is not None: intersection = ancestry.intersection(parent_ancestry) if len(intersection) > 0: self.base_version = unicode(max(intersection)) return True return False def _setPackageDiffs(self): """Set package diffs if they exist.""" if self.base_version is None or self.base_source_pub is None: self.package_diff = None self.parent_package_diff = None return pds = getUtility(IPackageDiffSet) if self.source_pub is None: self.package_diff = None else: self.package_diff = pds.getDiffBetweenReleases( self.base_source_pub.sourcepackagerelease, self.source_pub.sourcepackagerelease) if self.parent_source_pub is None: self.parent_package_diff = None else: self.parent_package_diff = pds.getDiffBetweenReleases( self.base_source_pub.sourcepackagerelease, self.parent_source_pub.sourcepackagerelease) def addComment(self, commenter, comment): """See `IDistroSeriesDifference`.""" return getUtility(IDistroSeriesDifferenceCommentSource).new( self, commenter, comment) @cachedproperty def latest_comment(self): """See `IDistroSeriesDifference`.""" return self.getComments().first() def getComments(self): """See `IDistroSeriesDifference`.""" DSDComment = DistroSeriesDifferenceComment comments = IStore(DSDComment).find( DistroSeriesDifferenceComment, DSDComment.distro_series_difference == self) return comments.order_by(Desc(DSDComment.id)) def _getCommentWithStatusChange(self, new_status, comment=None): return get_comment_with_status_change(self.status, new_status, comment) def blacklist(self, commenter, all=False, comment=None): """See `IDistroSeriesDifference`.""" if all: new_status = DistroSeriesDifferenceStatus.BLACKLISTED_ALWAYS else: new_status = DistroSeriesDifferenceStatus.BLACKLISTED_CURRENT new_comment = self._getCommentWithStatusChange(new_status, comment) dsd_comment = self.addComment(commenter, new_comment) self.status = new_status return dsd_comment def unblacklist(self, commenter, comment=None): """See `IDistroSeriesDifference`.""" new_status = DistroSeriesDifferenceStatus.NEEDS_ATTENTION new_comment = self._getCommentWithStatusChange(new_status, comment) self.status = new_status dsd_comment = self.addComment(commenter, new_comment) self.update(manual=True) return dsd_comment def requestPackageDiffs(self, requestor): """See `IDistroSeriesDifference`.""" if (self.base_source_pub is None or self.source_pub is None or self.parent_source_pub is None): raise DistroSeriesDifferenceError( "A derived, parent and base version are required to " "generate package diffs.") if self.status == DistroSeriesDifferenceStatus.RESOLVED: raise DistroSeriesDifferenceError( "Can not generate package diffs for a resolved difference.") base_spr = self.base_source_pub.sourcepackagerelease derived_spr = self.source_pub.sourcepackagerelease parent_spr = self.parent_source_pub.sourcepackagerelease if self.source_version != self.base_version: self.package_diff = base_spr.requestDiffTo( requestor, to_sourcepackagerelease=derived_spr) if self.parent_source_version != self.base_version: self.parent_package_diff = base_spr.requestDiffTo( requestor, to_sourcepackagerelease=parent_spr)
class OAuthAccessToken(OAuthBase, StormBase): """See `IOAuthAccessToken`.""" __storm_table__ = 'OAuthAccessToken' id = Int(primary=True) consumer_id = Int(name='consumer', allow_none=False) consumer = Reference(consumer_id, 'OAuthConsumer.id') person_id = Int(name='person', allow_none=False) person = Reference(person_id, 'Person.id') date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=UTC_NOW) date_expires = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) key = Unicode(allow_none=False) _secret = Unicode(name='secret', allow_none=True, default=u'') permission = DBEnum(enum=AccessLevel, allow_none=False) product_id = Int(name='product', allow_none=True, default=None) product = Reference(product_id, 'Product.id') projectgroup_id = Int(name='project', allow_none=True, default=None) projectgroup = Reference(projectgroup_id, 'ProjectGroup.id') sourcepackagename_id = Int(name='sourcepackagename', allow_none=True, default=None) sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id') distribution_id = Int(name='distribution', allow_none=True, default=None) distribution = Reference(distribution_id, 'Distribution.id') def __init__(self, consumer, permission, key, secret=u'', person=None, date_expires=None, product=None, projectgroup=None, distribution=None, sourcepackagename=None): super(OAuthAccessToken, self).__init__() self.consumer = consumer self.permission = permission self.key = key self._secret = sha256_digest(secret) self.person = person self.date_expires = date_expires self.product = product self.projectgroup = projectgroup self.distribution = distribution self.sourcepackagename = sourcepackagename @property def context(self): """See `IOAuthToken`.""" if self.product: return self.product elif self.projectgroup: return self.projectgroup elif self.distribution: if self.sourcepackagename: return self.distribution.getSourcePackage( self.sourcepackagename) else: return self.distribution else: return None @property def is_expired(self): now = datetime.now(pytz.UTC) return self.date_expires is not None and self.date_expires <= now def isSecretValid(self, secret): """See `IOAuthToken`.""" return sha256_digest(secret) == self._secret
class TranslationTemplatesBuild(SpecificBuildFarmJobSourceMixin, BuildFarmJobMixin, Storm): """A `BuildFarmJob` extension for translation templates builds.""" __storm_table__ = 'TranslationTemplatesBuild' job_type = BuildFarmJobType.TRANSLATIONTEMPLATESBUILD id = Int(name='id', primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') branch_id = Int(name='branch', allow_none=False) branch = Reference(branch_id, 'Branch.id') processor_id = Int(name='processor') processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime(name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') failure_count = Int(name='failure_count', allow_none=False) @property def title(self): return u'Translation template build for %s' % (self.branch.displayname) def __init__(self, build_farm_job, branch, processor): super(TranslationTemplatesBuild, self).__init__() self.build_farm_job = build_farm_job self.branch = branch self.status = BuildStatus.NEEDSBUILD self.processor = processor self.virtualized = True def estimateDuration(self): """See `IBuildFarmJob`.""" return timedelta(seconds=10) @classmethod def _getStore(cls, store=None): """Return `store` if given, or the default.""" if store is None: return IStore(cls) else: return store @classmethod def _getBuildArch(cls): """Returns an `IProcessor` to queue a translation build for.""" # XXX Danilo Segan bug=580429: we hard-code processor to the Ubuntu # default processor architecture. This stops the buildfarm from # accidentally dispatching the jobs to private builders. ubuntu = getUtility(ILaunchpadCelebrities).ubuntu return ubuntu.currentseries.nominatedarchindep.processor @classmethod def _hasPotteryCompatibleSetup(cls, branch): """Does `branch` look as if pottery can generate templates for it? :param branch: A `Branch` object. """ bzr_branch = removeSecurityProxy(branch).getBzrBranch() return is_intltool_structure(bzr_branch.basis_tree()) @classmethod def generatesTemplates(cls, branch): """See `ITranslationTemplatesBuildSource`.""" logger = logging.getLogger('translation-templates-build') if branch.private: # We don't support generating template from private branches # at the moment. logger.debug("Branch %s is private.", branch.unique_name) return False utility = getUtility(IRosettaUploadJobSource) if not utility.providesTranslationFiles(branch): # Nobody asked for templates generated from this branch. logger.debug("No templates requested for branch %s.", branch.unique_name) return False if not cls._hasPotteryCompatibleSetup(branch): # Nothing we could do with this branch if we wanted to. logger.debug("Branch %s is not pottery-compatible.", branch.unique_name) return False # Yay! We made it. return True @classmethod def create(cls, branch): """See `ITranslationTemplatesBuildSource`.""" processor = cls._getBuildArch() build_farm_job = getUtility(IBuildFarmJobSource).new( BuildFarmJobType.TRANSLATIONTEMPLATESBUILD) build = TranslationTemplatesBuild(build_farm_job, branch, processor) store = cls._getStore() store.add(build) store.flush() return build @classmethod def scheduleTranslationTemplatesBuild(cls, branch): """See `ITranslationTemplatesBuildSource`.""" logger = logging.getLogger('translation-templates-build') if not config.rosetta.generate_templates: # This feature is disabled by default. logging.debug("Templates generation is disabled.") return try: if cls.generatesTemplates(branch): # This branch is used for generating templates. logger.info("Requesting templates build for branch %s.", branch.unique_name) cls.create(branch).queueBuild() except Exception as e: logger.error(e) raise @classmethod def getByID(cls, build_id, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) return store.get(TranslationTemplatesBuild, build_id) @classmethod def getByBuildFarmJob(cls, buildfarmjob, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) match = store.find(TranslationTemplatesBuild, build_farm_job_id=buildfarmjob.id) return match.one() @classmethod def getByBuildFarmJobs(cls, buildfarmjobs, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) rows = store.find( TranslationTemplatesBuild, TranslationTemplatesBuild.build_farm_job_id.is_in( bfj.id for bfj in buildfarmjobs)) return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData) @classmethod def preloadBuildsData(cls, builds): # Circular imports. from lp.services.librarian.model import LibraryFileAlias # Load the related branches. branches = load_related(Branch, builds, ['branch_id']) # Preload branches' cached associated targets, product series, and # suite source packages for all the related branches. GenericBranchCollection.preloadDataForBranches(branches) load_related(LibraryFileAlias, builds, ['log_id']) @classmethod def findByBranch(cls, branch, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) return store.find(TranslationTemplatesBuild, TranslationTemplatesBuild.branch == branch) @property def log_url(self): """See `IBuildFarmJob`.""" if self.log is None: return None return self.log.http_url def calculateScore(self): """See `IBuildFarmJob`.""" # Hard-code score for now. Most PPA jobs start out at 2510; # TranslationTemplateBuild are fast so we want them at a higher # priority. return HARDCODED_TRANSLATIONTEMPLATESBUILD_SCORE
class ArchiveSubscriber(Storm): """See `IArchiveSubscriber`.""" __storm_table__ = 'ArchiveSubscriber' id = Int(primary=True) archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') registrant_id = Int(name='registrant', allow_none=False) registrant = Reference(registrant_id, 'Person.id') date_created = DateTime(name='date_created', allow_none=False, tzinfo=pytz.UTC) subscriber_id = Int(name='subscriber', allow_none=False, validator=validate_person) subscriber = Reference(subscriber_id, 'Person.id') date_expires = DateTime(name='date_expires', allow_none=True, tzinfo=pytz.UTC) status = DBEnum(name='status', allow_none=False, enum=ArchiveSubscriberStatus) description = Unicode(name='description', allow_none=True) date_cancelled = DateTime(name='date_cancelled', allow_none=True, tzinfo=pytz.UTC) cancelled_by_id = Int(name='cancelled_by', allow_none=True) cancelled_by = Reference(cancelled_by_id, 'Person.id') @property def displayname(self): """See `IArchiveSubscriber`.""" return "%s's access to %s" % (self.subscriber.displayname, self.archive.displayname) def cancel(self, cancelled_by): """See `IArchiveSubscriber`.""" # The bulk cancel normally has stricter permissions, but if we've # got this far then we know the caller has enough permissions to # cancel just this subscription. removeSecurityProxy(getUtility(IArchiveSubscriberSet)).cancel( [self.id], cancelled_by) def getNonActiveSubscribers(self): """See `IArchiveSubscriber`.""" store = Store.of(self) if self.subscriber.is_team: # We get all the people who already have active tokens for # this archive (for example, through separate subscriptions). auth_token = LeftJoin( ArchiveAuthToken, And(ArchiveAuthToken.person_id == Person.id, ArchiveAuthToken.archive_id == self.archive_id, ArchiveAuthToken.date_deactivated == None)) team_participation = Join(TeamParticipation, TeamParticipation.personID == Person.id) # Only return people with preferred email address set. preferred_email = Join(EmailAddress, EmailAddress.personID == Person.id) # We want to get all participants who are themselves # individuals, not teams: non_active_subscribers = store.using( Person, team_participation, preferred_email, auth_token).find( (Person, EmailAddress), EmailAddress.status == EmailAddressStatus.PREFERRED, TeamParticipation.teamID == self.subscriber_id, Person.teamowner == None, # There is no existing archive auth token. ArchiveAuthToken.person_id == None) non_active_subscribers.order_by(Person.name) return non_active_subscribers else: # Subscriber is not a team. token_set = getUtility(IArchiveAuthTokenSet) if token_set.getActiveTokenForArchiveAndPerson( self.archive, self.subscriber) is not None: # There are active tokens, so return an empty result # set. return EmptyResultSet() # Otherwise return a result set containing only the # subscriber and their preferred email address. return store.find( (Person, EmailAddress), Person.id == self.subscriber_id, EmailAddress.personID == Person.id, EmailAddress.status == EmailAddressStatus.PREFERRED)
class SourcePackageRecipeBuild(SpecificBuildFarmJobSourceMixin, PackageBuildMixin, Storm): __storm_table__ = 'SourcePackageRecipeBuild' job_type = BuildFarmJobType.RECIPEBRANCHBUILD id = Int(primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id) @property def binary_builds(self): """See `ISourcePackageRecipeBuild`.""" return Store.of(self).find( BinaryPackageBuild, BinaryPackageBuild.source_package_release == SourcePackageRelease.id, SourcePackageRelease.source_package_recipe_build == self.id) @property def current_component(self): # Only PPAs currently have a sane default component at the # moment, but we only support recipes for PPAs. component = self.archive.default_component assert component is not None return component archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') distroseries_id = Int(name='distroseries', allow_none=True) distroseries = Reference(distroseries_id, 'DistroSeries.id') distro_series = distroseries pocket = DBEnum(name='pocket', enum=PackagePublishingPocket, allow_none=False) @property def distribution(self): """See `IPackageBuild`.""" return self.distroseries.distribution recipe_id = Int(name='recipe') recipe = Reference(recipe_id, 'SourcePackageRecipe.id') requester_id = Int(name='requester', allow_none=False) requester = Reference(requester_id, 'Person.id') upload_log_id = Int(name='upload_log') upload_log = Reference(upload_log_id, 'LibraryFileAlias.id') dependencies = Unicode(name='dependencies') processor_id = Int(name='processor') processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime(name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') failure_count = Int(name='failure_count', allow_none=False) manifest = Reference( id, 'SourcePackageRecipeData.sourcepackage_recipe_build_id', on_remote=True) def setManifestText(self, text): if text is None: if self.manifest is not None: IStore(self.manifest).remove(self.manifest) elif self.manifest is None: getUtility(ISourcePackageRecipeDataSource).createManifestFromText( text, self) else: parsed, recipe_branch_type = ( getUtility(IRecipeBranchSource).getParsedRecipe(text)) self.manifest.setRecipe(parsed, recipe_branch_type) def getManifestText(self): if self.manifest is None: return None return str(self.manifest.getRecipe()) @property def source_package_release(self): """See `ISourcePackageRecipeBuild`.""" return Store.of(self).find(SourcePackageRelease, source_package_recipe_build=self).one() @property def title(self): if self.recipe is None: branch_name = 'deleted' else: branch_name = self.recipe.base.unique_name return '%s recipe build in %s %s' % ( branch_name, self.distribution.name, self.distroseries.name) def __init__(self, build_farm_job, distroseries, recipe, requester, archive, pocket, date_created): """Construct a SourcePackageRecipeBuild.""" processor = distroseries.nominatedarchindep.processor super(SourcePackageRecipeBuild, self).__init__() self.build_farm_job = build_farm_job self.distroseries = distroseries self.recipe = recipe self.requester = requester self.archive = archive self.pocket = pocket self.status = BuildStatus.NEEDSBUILD self.processor = processor self.virtualized = True if date_created is not None: self.date_created = date_created @classmethod def new(cls, distroseries, recipe, requester, archive, pocket=None, date_created=None, duration=None): """See `ISourcePackageRecipeBuildSource`.""" store = IMasterStore(SourcePackageRecipeBuild) if pocket is None: pocket = PackagePublishingPocket.RELEASE if date_created is None: date_created = UTC_NOW build_farm_job = getUtility(IBuildFarmJobSource).new( cls.job_type, BuildStatus.NEEDSBUILD, date_created, None, archive) spbuild = cls(build_farm_job, distroseries, recipe, requester, archive, pocket, date_created) store.add(spbuild) return spbuild @staticmethod def makeDailyBuilds(logger=None): from lp.code.model.sourcepackagerecipe import SourcePackageRecipe recipes = SourcePackageRecipe.findStaleDailyBuilds() if logger is None: logger = logging.getLogger() builds = [] for recipe in recipes: recipe.is_stale = False logger.debug('Recipe %s/%s is stale', recipe.owner.name, recipe.name) if recipe.daily_build_archive is None: logger.debug(' - No daily build archive specified.') continue for distroseries in recipe.distroseries: series_name = distroseries.named_version try: build = recipe.requestBuild( recipe.daily_build_archive, recipe.owner, distroseries, PackagePublishingPocket.RELEASE) except BuildAlreadyPending: logger.debug(' - build already pending for %s', series_name) continue except CannotUploadToArchive as e: # This will catch all PPA related issues - # disabled, security, wrong pocket etc logger.debug(' - daily build failed for %s: %s', series_name, repr(e)) except BuildNotAllowedForDistro: logger.debug(' - cannot build against %s.' % series_name) except ProgrammingError: raise except: logger.exception(' - problem with %s', series_name) else: logger.debug(' - build requested for %s', series_name) builds.append(build) return builds @property def can_be_rescored(self): """See `IBuild`.""" return self.status is BuildStatus.NEEDSBUILD @property def can_be_cancelled(self): """See `ISourcePackageRecipeBuild`.""" if not self.buildqueue_record: return False cancellable_statuses = [ BuildStatus.BUILDING, BuildStatus.NEEDSBUILD, ] return self.status in cancellable_statuses def cancel(self): """See `ISourcePackageRecipeBuild`.""" if not self.can_be_cancelled: return # BuildQueue.cancel() will decide whether to go straight to # CANCELLED, or go through CANCELLING to let buildd-manager # clean up the slave. self.buildqueue_record.cancel() def destroySelf(self): if self.buildqueue_record is not None: self.buildqueue_record.destroySelf() store = Store.of(self) releases = store.find( SourcePackageRelease, SourcePackageRelease.source_package_recipe_build == self.id) for release in releases: release.source_package_recipe_build = None store.remove(self) store.remove(self.build_farm_job) def calculateScore(self): return 2510 + self.archive.relative_build_score @classmethod def getByID(cls, build_id): """See `ISourcePackageRecipeBuildSource`.""" store = IMasterStore(SourcePackageRecipeBuild) return store.find(cls, cls.id == build_id).one() @classmethod def getByBuildFarmJob(cls, build_farm_job): """See `ISpecificBuildFarmJobSource`.""" return Store.of(build_farm_job).find( cls, build_farm_job_id=build_farm_job.id).one() @classmethod def preloadBuildsData(cls, builds): # Circular imports. from lp.code.model.sourcepackagerecipe import SourcePackageRecipe from lp.registry.model.distribution import Distribution from lp.registry.model.distroseries import DistroSeries from lp.services.librarian.model import LibraryFileAlias load_related(LibraryFileAlias, builds, ['log_id']) archives = load_related(Archive, builds, ['archive_id']) load_related(Person, archives, ['ownerID']) distroseries = load_related(DistroSeries, builds, ['distroseries_id']) load_related(Distribution, distroseries, ['distributionID']) sprs = load_related(SourcePackageRecipe, builds, ['recipe_id']) SourcePackageRecipe.preLoadDataForSourcePackageRecipes(sprs) @classmethod def getByBuildFarmJobs(cls, build_farm_jobs): """See `ISpecificBuildFarmJobSource`.""" if len(build_farm_jobs) == 0: return EmptyResultSet() rows = Store.of(build_farm_jobs[0]).find( cls, cls.build_farm_job_id.is_in(bfj.id for bfj in build_farm_jobs)) return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData) def estimateDuration(self): """See `IPackageBuild`.""" median = self.recipe.getMedianBuildDuration() if median is not None: return median return timedelta(minutes=10) def verifySuccessfulUpload(self): return self.source_package_release is not None def notify(self, extra_info=None): """See `IPackageBuild`.""" # If our recipe has been deleted, any notification will fail. if self.recipe is None: return if self.status == BuildStatus.FULLYBUILT: # Don't send mail for successful recipe builds; it can be just # too much. return mailer = SourcePackageRecipeBuildMailer.forStatus(self) mailer.sendAll() def lfaUrl(self, lfa): """Return the URL for a LibraryFileAlias, in the context of self. """ if lfa is None: return None return ProxiedLibraryFileAlias(lfa, self).http_url @property def log_url(self): """See `IPackageBuild`. Overridden here so that it uses the SourcePackageRecipeBuild as context. """ return self.lfaUrl(self.log) @property def upload_log_url(self): """See `IPackageBuild`. Overridden here so that it uses the SourcePackageRecipeBuild as context. """ return self.lfaUrl(self.upload_log) def getFileByName(self, filename): """See `ISourcePackageRecipeBuild`.""" files = dict((lfa.filename, lfa) for lfa in [self.log, self.upload_log] if lfa is not None) try: return files[filename] except KeyError: raise NotFoundError(filename) def getUploader(self, changes): """See `IPackageBuild`.""" return self.requester
class LiveFSBuild(PackageBuildMixin, Storm): """See `ILiveFSBuild`.""" __storm_table__ = 'LiveFSBuild' job_type = BuildFarmJobType.LIVEFSBUILD id = Int(name='id', primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') requester_id = Int(name='requester', allow_none=False) requester = Reference(requester_id, 'Person.id') livefs_id = Int(name='livefs', allow_none=False) livefs = Reference(livefs_id, 'LiveFS.id') archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') distro_arch_series_id = Int(name='distro_arch_series', allow_none=False) distro_arch_series = Reference(distro_arch_series_id, 'DistroArchSeries.id') pocket = DBEnum(enum=PackagePublishingPocket, allow_none=False) processor_id = Int(name='processor', allow_none=False) processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') unique_key = Unicode(name='unique_key') metadata_override = JSON('json_data_override') _version = Unicode(name='version') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime(name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') upload_log_id = Int(name='upload_log') upload_log = Reference(upload_log_id, 'LibraryFileAlias.id') dependencies = Unicode(name='dependencies') failure_count = Int(name='failure_count', allow_none=False) def __init__(self, build_farm_job, requester, livefs, archive, distro_arch_series, pocket, processor, virtualized, unique_key, metadata_override, version, date_created): """Construct a `LiveFSBuild`.""" if not getFeatureFlag(LIVEFS_FEATURE_FLAG): raise LiveFSFeatureDisabled super(LiveFSBuild, self).__init__() self.build_farm_job = build_farm_job self.requester = requester self.livefs = livefs self.archive = archive self.distro_arch_series = distro_arch_series self.pocket = pocket self.processor = processor self.virtualized = virtualized self.unique_key = unique_key self.metadata_override = metadata_override self._version = version self.date_created = date_created self.status = BuildStatus.NEEDSBUILD @property def is_private(self): """See `IBuildFarmJob`.""" return self.livefs.owner.private or self.archive.private @property def title(self): das = self.distro_arch_series name = self.livefs.name if self.unique_key is not None: name += " (%s)" % self.unique_key return "%s build of %s livefs in %s %s" % ( das.architecturetag, name, das.distroseries.distribution.name, das.distroseries.getSuite(self.pocket)) @property def distribution(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries.distribution @property def distro_series(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries @property def current_component(self): component = self.archive.default_component if component is not None: return component else: # XXX cjwatson 2014-04-22: Hardcode to universe for the time being. return getUtility(IComponentSet)["universe"] @property def version(self): """See `ILiveFSBuild`.""" if self._version is not None: return self._version else: return self.date_created.strftime("%Y%m%d-%H%M%S") @property def score(self): """See `ILiveFSBuild`.""" if self.buildqueue_record is None: return None else: return self.buildqueue_record.lastscore @property def can_be_rescored(self): """See `ILiveFSBuild`.""" return (self.buildqueue_record is not None and self.status is BuildStatus.NEEDSBUILD) @property def can_be_cancelled(self): """See `ILiveFSBuild`.""" if not self.buildqueue_record: return False cancellable_statuses = [ BuildStatus.BUILDING, BuildStatus.NEEDSBUILD, ] return self.status in cancellable_statuses def rescore(self, score): """See `ILiveFSBuild`.""" assert self.can_be_rescored, "Build %s cannot be rescored" % self.id self.buildqueue_record.manualScore(score) def cancel(self): """See `ILiveFSBuild`.""" if not self.can_be_cancelled: return # BuildQueue.cancel() will decide whether to go straight to # CANCELLED, or go through CANCELLING to let buildd-manager clean up # the slave. self.buildqueue_record.cancel() def calculateScore(self): return (2510 + self.archive.relative_build_score + self.livefs.relative_build_score) def getMedianBuildDuration(self): """Return the median duration of our successful builds.""" store = IStore(self) result = store.find( (LiveFSBuild.date_started, LiveFSBuild.date_finished), LiveFSBuild.livefs == self.livefs_id, LiveFSBuild.distro_arch_series == self.distro_arch_series_id, LiveFSBuild.status == BuildStatus.FULLYBUILT) result.order_by(Desc(LiveFSBuild.date_finished)) durations = [row[1] - row[0] for row in result[:9]] if len(durations) == 0: return None durations.sort() return durations[len(durations) // 2] def estimateDuration(self): """See `IBuildFarmJob`.""" median = self.getMedianBuildDuration() if median is not None: return median return timedelta(minutes=30) def getFiles(self): """See `ILiveFSBuild`.""" result = Store.of(self).find( (LiveFSFile, LibraryFileAlias, LibraryFileContent), LiveFSFile.livefsbuild == self.id, LibraryFileAlias.id == LiveFSFile.libraryfile_id, LibraryFileContent.id == LibraryFileAlias.contentID) return result.order_by([LibraryFileAlias.filename, LiveFSFile.id]) def getFileByName(self, filename): """See `ILiveFSBuild`.""" if filename.endswith(".txt.gz"): file_object = self.log elif filename.endswith("_log.txt"): file_object = self.upload_log else: file_object = Store.of(self).find( LibraryFileAlias, LiveFSFile.livefsbuild == self.id, LibraryFileAlias.id == LiveFSFile.libraryfile_id, LibraryFileAlias.filename == filename).one() if file_object is not None and file_object.filename == filename: return file_object raise NotFoundError(filename) def addFile(self, lfa): """See `ILiveFSBuild`.""" livefsfile = LiveFSFile(livefsbuild=self, libraryfile=lfa) IMasterStore(LiveFSFile).add(livefsfile) return livefsfile def verifySuccessfulUpload(self): """See `IPackageBuild`.""" return not self.getFiles().is_empty() def notify(self, extra_info=None): """See `IPackageBuild`.""" if not config.builddmaster.send_build_notification: return if self.status == BuildStatus.FULLYBUILT: return mailer = LiveFSBuildMailer.forStatus(self) mailer.sendAll() def lfaUrl(self, lfa): """Return the URL for a LibraryFileAlias in this context.""" if lfa is None: return None return ProxiedLibraryFileAlias(lfa, self).http_url @property def log_url(self): """See `IBuildFarmJob`.""" return self.lfaUrl(self.log) @property def upload_log_url(self): """See `IPackageBuild`.""" return self.lfaUrl(self.upload_log) def getFileUrls(self): return [self.lfaUrl(lfa) for _, lfa, _ in self.getFiles()]
class GitRuleGrant(StormBase, GitRuleGrantMixin): """See `IGitRuleGrant`.""" __storm_table__ = 'GitRuleGrant' id = Int(primary=True) repository_id = Int(name='repository', allow_none=False) repository = Reference(repository_id, 'GitRepository.id') rule_id = Int(name='rule', allow_none=False) rule = Reference(rule_id, 'GitRule.id') grantee_type = DBEnum(name='grantee_type', enum=GitGranteeType, allow_none=False) grantee_id = Int(name='grantee', allow_none=True, validator=validate_person) grantee = Reference(grantee_id, 'Person.id') can_create = Bool(name='can_create', allow_none=False) can_push = Bool(name='can_push', allow_none=False) can_force_push = Bool(name='can_force_push', allow_none=False) grantor_id = Int(name='grantor', allow_none=False, validator=validate_public_person) grantor = Reference(grantor_id, 'Person.id') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_last_modified = DateTime(name='date_last_modified', tzinfo=pytz.UTC, allow_none=False) def __init__(self, rule, grantee, can_create, can_push, can_force_push, grantor, date_created): if isinstance(grantee, DBItem) and grantee.enum == GitGranteeType: if grantee == GitGranteeType.PERSON: raise ValueError( "grantee may not be GitGranteeType.PERSON; pass a person " "object instead") grantee_type = grantee grantee = None else: grantee_type = GitGranteeType.PERSON self.repository = rule.repository self.rule = rule self.grantee_type = grantee_type self.grantee = grantee self.can_create = can_create self.can_push = can_push self.can_force_push = can_force_push self.grantor = grantor self.date_created = date_created self.date_last_modified = date_created @property def combined_grantee(self): if self.grantee_type == GitGranteeType.PERSON: return self.grantee else: return self.grantee_type def __repr__(self): if self.grantee_type == GitGranteeType.PERSON: grantee_name = "~%s" % self.grantee.name else: grantee_name = self.grantee_type.title.lower() return "<GitRuleGrant [%s] to %s for %s:%s>" % ( ", ".join(describe_git_permissions(self.permissions)), grantee_name, self.repository.unique_name, self.rule.ref_pattern) def toDataForJSON(self, media_type): """See `IJSONPublishable`.""" if media_type != "application/json": raise ValueError("Unhandled media type %s" % media_type) request = get_current_browser_request() field = InlineObject(schema=IGitNascentRuleGrant).bind(self) marshaller = getMultiAdapter((field, request), IFieldMarshaller) return marshaller.unmarshall(None, self) def destroySelf(self, user=None): """See `IGitRuleGrant`.""" if user is not None: getUtility(IGitActivitySet).logGrantRemoved(self, user) del get_property_cache(self.rule).grants Store.of(self).remove(self)
class TranslationTemplatesBuild(BuildFarmJobMixin, Storm): """A `BuildFarmJob` extension for translation templates builds.""" implements(ITranslationTemplatesBuild) classProvides(ITranslationTemplatesBuildSource) __storm_table__ = 'TranslationTemplatesBuild' job_type = BuildFarmJobType.TRANSLATIONTEMPLATESBUILD id = Int(name='id', primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') branch_id = Int(name='branch', allow_none=False) branch = Reference(branch_id, 'Branch.id') processor_id = Int(name='processor') processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime(name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') failure_count = Int(name='failure_count', allow_none=False) @property def title(self): return u'Translation template build for %s' % (self.branch.displayname) def __init__(self, build_farm_job, branch, processor): super(TranslationTemplatesBuild, self).__init__() self.build_farm_job = build_farm_job self.branch = branch self.status = BuildStatus.NEEDSBUILD self.processor = processor def makeJob(self): """See `IBuildFarmJobOld`.""" store = IStore(BranchJob) # Pass public HTTP URL for the branch. metadata = { 'branch_url': self.branch.composePublicURL(), 'build_id': self.id, } branch_job = BranchJob(self.branch, BranchJobType.TRANSLATION_TEMPLATES_BUILD, metadata) store.add(branch_job) return TranslationTemplatesBuildJob(branch_job) @classmethod def _getStore(cls, store=None): """Return `store` if given, or the default.""" if store is None: return IStore(cls) else: return store @classmethod def _getBuildArch(cls): """Returns an `IProcessor` to queue a translation build for.""" # XXX Danilo Segan bug=580429: we hard-code processor to the Ubuntu # default processor architecture. This stops the buildfarm from # accidentally dispatching the jobs to private builders. ubuntu = getUtility(ILaunchpadCelebrities).ubuntu return ubuntu.currentseries.nominatedarchindep.processor @classmethod def create(cls, branch): """See `ITranslationTemplatesBuildSource`.""" processor = cls._getBuildArch() build_farm_job = getUtility(IBuildFarmJobSource).new( BuildFarmJobType.TRANSLATIONTEMPLATESBUILD) build = TranslationTemplatesBuild(build_farm_job, branch, processor) store = cls._getStore() store.add(build) store.flush() return build @classmethod def getByID(cls, build_id, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) match = store.find(TranslationTemplatesBuild, TranslationTemplatesBuild.id == build_id) return match.one() @classmethod def getByBuildFarmJob(cls, buildfarmjob, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) match = store.find(TranslationTemplatesBuild, build_farm_job_id=buildfarmjob.id) return match.one() @classmethod def getByBuildFarmJobs(cls, buildfarmjobs, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) rows = store.find( TranslationTemplatesBuild, TranslationTemplatesBuild.build_farm_job_id.is_in( bfj.id for bfj in buildfarmjobs)) return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData) @classmethod def preloadBuildsData(cls, builds): # Circular imports. from lp.services.librarian.model import LibraryFileAlias # Load the related branches, products. branches = load_related(Branch, builds, ['branch_id']) load_related(Product, branches, ['productID']) # Preload branches cached associated product series and # suite source packages for all the related branches. GenericBranchCollection.preloadDataForBranches(branches) load_related(LibraryFileAlias, builds, ['log_id']) @classmethod def findByBranch(cls, branch, store=None): """See `ITranslationTemplatesBuildSource`.""" store = cls._getStore(store) return store.find(TranslationTemplatesBuild, TranslationTemplatesBuild.branch == branch) @property def log_url(self): """See `IBuildFarmJob`.""" if self.log is None: return None return self.log.http_url
class SnapBuild(PackageBuildMixin, Storm): """See `ISnapBuild`.""" __storm_table__ = 'SnapBuild' job_type = BuildFarmJobType.SNAPBUILD id = Int(name='id', primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') build_request_id = Int(name='build_request', allow_none=True) requester_id = Int(name='requester', allow_none=False) requester = Reference(requester_id, 'Person.id') snap_id = Int(name='snap', allow_none=False) snap = Reference(snap_id, 'Snap.id') archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') distro_arch_series_id = Int(name='distro_arch_series', allow_none=False) distro_arch_series = Reference( distro_arch_series_id, 'DistroArchSeries.id') pocket = DBEnum(enum=PackagePublishingPocket, allow_none=False) channels = JSON('channels', allow_none=True) processor_id = Int(name='processor', allow_none=False) processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') date_created = DateTime( name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime( name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) revision_id = Unicode(name='revision_id') log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') upload_log_id = Int(name='upload_log') upload_log = Reference(upload_log_id, 'LibraryFileAlias.id') dependencies = Unicode(name='dependencies') failure_count = Int(name='failure_count', allow_none=False) store_upload_metadata = JSON('store_upload_json_data', allow_none=True) def __init__(self, build_farm_job, requester, snap, archive, distro_arch_series, pocket, channels, processor, virtualized, date_created, store_upload_metadata=None, build_request=None): """Construct a `SnapBuild`.""" super(SnapBuild, self).__init__() self.build_farm_job = build_farm_job self.requester = requester self.snap = snap self.archive = archive self.distro_arch_series = distro_arch_series self.pocket = pocket self.channels = channels self.processor = processor self.virtualized = virtualized self.date_created = date_created self.store_upload_metadata = store_upload_metadata if build_request is not None: self.build_request_id = build_request.id self.status = BuildStatus.NEEDSBUILD @property def build_request(self): """See `ISnapBuild`.""" if self.build_request_id is not None: return self.snap.getBuildRequest(self.build_request_id) @property def is_private(self): """See `IBuildFarmJob`.""" return ( self.snap.private or self.snap.owner.private or self.archive.private ) @property def title(self): das = self.distro_arch_series snap_title = "%s snap package" % self.snap.name if (self.snap.store_name is not None and self.snap.store_name != self.snap.name): snap_title += " (%s)" % self.snap.store_name return "%s build of %s in %s %s" % ( das.architecturetag, snap_title, das.distroseries.distribution.name, das.distroseries.getSuite(self.pocket)) @property def distribution(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries.distribution @property def distro_series(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries @property def arch_tag(self): """See `ISnapBuild`.""" return self.distro_arch_series.architecturetag @property def current_component(self): component = self.archive.default_component if component is not None: return component else: # XXX cjwatson 2015-07-17: Hardcode to multiverse for the time # being. return getUtility(IComponentSet)["multiverse"] @property def score(self): """See `ISnapBuild`.""" if self.buildqueue_record is None: return None else: return self.buildqueue_record.lastscore @property def can_be_rescored(self): """See `ISnapBuild`.""" return ( self.buildqueue_record is not None and self.status is BuildStatus.NEEDSBUILD) @property def can_be_cancelled(self): """See `ISnapBuild`.""" if not self.buildqueue_record: return False cancellable_statuses = [ BuildStatus.BUILDING, BuildStatus.NEEDSBUILD, ] return self.status in cancellable_statuses def rescore(self, score): """See `ISnapBuild`.""" assert self.can_be_rescored, "Build %s cannot be rescored" % self.id self.buildqueue_record.manualScore(score) def cancel(self): """See `ISnapBuild`.""" if not self.can_be_cancelled: return # BuildQueue.cancel() will decide whether to go straight to # CANCELLED, or go through CANCELLING to let buildd-manager clean up # the slave. self.buildqueue_record.cancel() def calculateScore(self): return 2510 + self.archive.relative_build_score def getMedianBuildDuration(self): """Return the median duration of our successful builds.""" store = IStore(self) result = store.find( (SnapBuild.date_started, SnapBuild.date_finished), SnapBuild.snap == self.snap_id, SnapBuild.distro_arch_series == self.distro_arch_series_id, SnapBuild.status == BuildStatus.FULLYBUILT) result.order_by(Desc(SnapBuild.date_finished)) durations = [row[1] - row[0] for row in result[:9]] if len(durations) == 0: return None durations.sort() return durations[len(durations) // 2] def estimateDuration(self): """See `IBuildFarmJob`.""" median = self.getMedianBuildDuration() if median is not None: return median return timedelta(minutes=30) def getFiles(self): """See `ISnapBuild`.""" result = Store.of(self).find( (SnapFile, LibraryFileAlias, LibraryFileContent), SnapFile.snapbuild == self.id, LibraryFileAlias.id == SnapFile.libraryfile_id, LibraryFileContent.id == LibraryFileAlias.contentID) return result.order_by([LibraryFileAlias.filename, SnapFile.id]) def getFileByName(self, filename): """See `ISnapBuild`.""" if filename.endswith(".txt.gz"): file_object = self.log elif filename.endswith("_log.txt"): file_object = self.upload_log else: file_object = Store.of(self).find( LibraryFileAlias, SnapFile.snapbuild == self.id, LibraryFileAlias.id == SnapFile.libraryfile_id, LibraryFileAlias.filename == filename).one() if file_object is not None and file_object.filename == filename: return file_object raise NotFoundError(filename) def addFile(self, lfa): """See `ISnapBuild`.""" snapfile = SnapFile(snapbuild=self, libraryfile=lfa) IMasterStore(SnapFile).add(snapfile) return snapfile def verifySuccessfulUpload(self): """See `IPackageBuild`.""" return not self.getFiles().is_empty() def updateStatus(self, status, builder=None, slave_status=None, date_started=None, date_finished=None, force_invalid_transition=False): """See `IBuildFarmJob`.""" old_status = self.status super(SnapBuild, self).updateStatus( status, builder=builder, slave_status=slave_status, date_started=date_started, date_finished=date_finished, force_invalid_transition=force_invalid_transition) if slave_status is not None: revision_id = slave_status.get("revision_id") if revision_id is not None: self.revision_id = unicode(revision_id) if status != old_status: notify(SnapBuildStatusChangedEvent(self)) def notify(self, extra_info=None): """See `IPackageBuild`.""" if not config.builddmaster.send_build_notification: return if self.status == BuildStatus.FULLYBUILT: return mailer = SnapBuildMailer.forStatus(self) mailer.sendAll() def lfaUrl(self, lfa): """Return the URL for a LibraryFileAlias in this context.""" if lfa is None: return None return ProxiedLibraryFileAlias(lfa, self).http_url @property def log_url(self): """See `IBuildFarmJob`.""" return self.lfaUrl(self.log) @property def upload_log_url(self): """See `IPackageBuild`.""" return self.lfaUrl(self.upload_log) def getFileUrls(self): return [self.lfaUrl(lfa) for _, lfa, _ in self.getFiles()] @cachedproperty def eta(self): """The datetime when the build job is estimated to complete. This is the BuildQueue.estimated_duration plus the Job.date_started or BuildQueue.getEstimatedJobStartTime. """ if self.buildqueue_record is None: return None queue_record = self.buildqueue_record if queue_record.status == BuildQueueStatus.WAITING: start_time = queue_record.getEstimatedJobStartTime() else: start_time = queue_record.date_started if start_time is None: return None duration = queue_record.estimated_duration return start_time + duration @property def estimate(self): """If true, the date value is an estimate.""" if self.date_finished is not None: return False return self.eta is not None @property def date(self): """The date when the build completed or is estimated to complete.""" if self.estimate: return self.eta return self.date_finished @property def store_upload_jobs(self): jobs = Store.of(self).find( SnapBuildJob, SnapBuildJob.snapbuild == self, SnapBuildJob.job_type == SnapBuildJobType.STORE_UPLOAD) jobs.order_by(Desc(SnapBuildJob.job_id)) def preload_jobs(rows): load_related(Job, rows, ["job_id"]) return DecoratedResultSet( jobs, lambda job: job.makeDerived(), pre_iter_hook=preload_jobs) @cachedproperty def last_store_upload_job(self): return self.store_upload_jobs.first() @property def store_upload_status(self): job = self.last_store_upload_job if job is None or job.job.status == JobStatus.SUSPENDED: return SnapBuildStoreUploadStatus.UNSCHEDULED elif job.job.status in (JobStatus.WAITING, JobStatus.RUNNING): return SnapBuildStoreUploadStatus.PENDING elif job.job.status == JobStatus.COMPLETED: return SnapBuildStoreUploadStatus.UPLOADED else: if job.store_url: return SnapBuildStoreUploadStatus.FAILEDTORELEASE else: return SnapBuildStoreUploadStatus.FAILEDTOUPLOAD @property def store_upload_url(self): job = self.last_store_upload_job return job and job.store_url @property def store_upload_revision(self): job = self.last_store_upload_job return job and job.store_revision @property def store_upload_error_message(self): job = self.last_store_upload_job return job and job.error_message @property def store_upload_error_messages(self): job = self.last_store_upload_job if job: if job.error_messages: return job.error_messages elif job.error_message: return [{"message": job.error_message}] return [] def scheduleStoreUpload(self): """See `ISnapBuild`.""" if not self.snap.can_upload_to_store: raise CannotScheduleStoreUpload( "Cannot upload this package to the store because it is not " "properly configured.") if not self.was_built or self.getFiles().is_empty(): raise CannotScheduleStoreUpload( "Cannot upload this package because it has no files.") if self.store_upload_status == SnapBuildStoreUploadStatus.PENDING: raise CannotScheduleStoreUpload( "An upload of this package is already in progress.") elif self.store_upload_status == SnapBuildStoreUploadStatus.UPLOADED: raise CannotScheduleStoreUpload( "Cannot upload this package because it has already been " "uploaded.") getUtility(ISnapStoreUploadJobSource).create(self)