class HWSubmission(SQLBase): """See `IHWSubmission`.""" _table = 'HWSubmission' date_created = UtcDateTimeCol(notNull=True, default=UTC_NOW) date_submitted = UtcDateTimeCol(notNull=True, default=UTC_NOW) format = EnumCol(enum=HWSubmissionFormat, notNull=True) status = EnumCol(enum=HWSubmissionProcessingStatus, notNull=True) private = BoolCol(notNull=True) contactable = BoolCol(notNull=True) submission_key = StringCol(notNull=True) owner = ForeignKey(dbName='owner', foreignKey='Person', storm_validator=validate_public_person) distroarchseries = ForeignKey(dbName='distroarchseries', foreignKey='DistroArchSeries') raw_submission = ForeignKey(dbName='raw_submission', foreignKey='LibraryFileAlias', notNull=False, default=DEFAULT) system_fingerprint = ForeignKey(dbName='system_fingerprint', foreignKey='HWSystemFingerprint', notNull=True) raw_emailaddress = StringCol() @property def devices(self): return HWSubmissionDeviceSet().getDevices(submission=self)
class GPGKey(SQLBase): _table = 'GPGKey' _defaultOrder = ['owner', 'keyid'] owner = ForeignKey(dbName='owner', foreignKey='Person', notNull=True) keyid = StringCol(dbName='keyid', notNull=True) fingerprint = StringCol(dbName='fingerprint', notNull=True) keysize = IntCol(dbName='keysize', notNull=True) algorithm = EnumCol(dbName='algorithm', notNull=True, enum=GPGKeyAlgorithm) active = BoolCol(dbName='active', notNull=True) can_encrypt = BoolCol(dbName='can_encrypt', notNull=False) @property def keyserverURL(self): return getUtility(IGPGHandler).getURLForKeyInServer(self.fingerprint, public=True) @property def displayname(self): return '%s%s/%s' % (self.keysize, self.algorithm.title, self.fingerprint)
class Profile(SQLObject): principals = StringCol(default=None) force_command = StringCol(default=None) source_address = StringCol(default=None) agent_forwarding = BoolCol(default=None) x11_forwarding = BoolCol(default=None) port_forwarding = BoolCol(default=None) pty = BoolCol(default=None) user_rc = BoolCol(default=None) validity = StringCol(default=None) def delete_profile(self): self.delete(self.id)
class Key(SQLObject): name = UnicodeCol(unique=True) bits = IntCol() pubkey = StringCol(unique=True) revoked = BoolCol(default=False) exported = BoolCol(default=False) certs = MultipleJoin("Cert", joinColumn="key_id") ca = ForeignKey("CA", default=None) is_ca = BoolCol(default=False) def delete_key(self): for cert in self.certs: cert.delete_cert() self.delete(self.id)
class PillarName(SQLBase): implements(IPillarName) _table = 'PillarName' _defaultOrder = 'name' name = StringCol(dbName='name', notNull=True, unique=True, alternateID=True) product = ForeignKey(foreignKey='Product', dbName='product') project = ForeignKey(foreignKey='ProjectGroup', dbName='project') distribution = ForeignKey(foreignKey='Distribution', dbName='distribution') active = BoolCol(dbName='active', notNull=True, default=True) alias_for = ForeignKey(foreignKey='PillarName', dbName='alias_for', default=None) @property def pillar(self): pillar_name = self if self.alias_for is not None: pillar_name = self.alias_for if pillar_name.distribution is not None: return pillar_name.distribution elif pillar_name.project is not None: return pillar_name.project elif pillar_name.product is not None: return pillar_name.product else: raise AssertionError("Unknown pillar type: %s" % pillar_name.name)
class BugNotification(SQLBase): """A textual representation about a bug change.""" implements(IBugNotification) message = ForeignKey(dbName='message', notNull=True, foreignKey='Message') activity = ForeignKey(dbName='activity', notNull=False, foreignKey='BugActivity') bug = ForeignKey(dbName='bug', notNull=True, foreignKey='Bug') is_comment = BoolCol(notNull=True) date_emailed = UtcDateTimeCol(notNull=False) status = EnumCol(dbName='status', schema=BugNotificationStatus, default=BugNotificationStatus.PENDING, notNull=True) @property def recipients(self): """See `IBugNotification`.""" return BugNotificationRecipient.selectBy(bug_notification=self, orderBy='id') @property def bug_filters(self): """See `IStructuralSubscription`.""" return IStore(BugSubscriptionFilter).find( BugSubscriptionFilter, (BugSubscriptionFilter.id == BugNotificationFilter.bug_subscription_filter_id), BugNotificationFilter.bug_notification == self)
class UserAccount(SQLObject): """A store of user information. A user account is created when a user is registered. """ username = StringCol(length=20, notNone=True, unique=True, alternateID=True) password = StringCol(length=40, notNone=True) # Store SHA-1 hex hashes. allowLogin = BoolCol(default=True) # If False, account login is disabled. email = StringCol(default=None, length=320) # See RFC 2821 section 4.5.3.1. # Don't split name field - see http://people.w3.org/rishida/blog/?p=100 realname = UnicodeCol(default=None, length=40) profile = UnicodeCol(default=None) #country = StringCol(length=2, default=None) # ISO 3166 country code. created = DateTimeCol(default=datetime.now) lastLogin = DateTimeCol(default=None) # friends = MultipleJoin('UserFriend', joinColumn='from_user') def _set_username(self, value): if not isinstance(value, str) or not(1 <= len(value) <= 20): raise ValueError("Invalid specification of username") if re.search("[^A-z0-9_]", value): raise ValueError("Username may only be alphanumeric characters") self._SO_set_username(value) def _set_password(self, value): if not isinstance(value, str) or not(1 <= len(value) <= 40): raise ValueError("Invalid specification of password") self._SO_set_password(value) def _set_email(self, value): # This regexp matches virtually all well-formatted email addresses. if value and not email_re.search(value): raise ValueError("Invalid or ill-formatted email address") self._SO_set_email(value)
class CA(SQLObject): name = UnicodeCol(unique=True) key = ForeignKey("Key") serial = IntCol(default=0) hostca = BoolCol(default=False) signed = MultipleJoin("Key", joinColumn="ca_id") krl = BLOBCol()
class Privilege(SQLObject): repository = ForeignKey("Repository") tag = StringCol(length=255) branch = StringCol(length=255) crud = StringCol(length=4) member = ForeignKey("Member") role = ForeignKey("Role") public = BoolCol(default=False)
class SignedCodeOfConduct(SQLBase): """Code of Conduct.""" _table = 'SignedCodeOfConduct' owner = ForeignKey(foreignKey="Person", dbName="owner", notNull=True) signedcode = StringCol(dbName='signedcode', notNull=False, default=None) signing_key_fingerprint = Unicode() datecreated = UtcDateTimeCol(dbName='datecreated', notNull=True, default=UTC_NOW) recipient = ForeignKey(foreignKey="Person", dbName="recipient", notNull=False, default=None) admincomment = StringCol(dbName='admincomment', notNull=False, default=None) active = BoolCol(dbName='active', notNull=True, default=False) @cachedproperty def signingkey(self): if self.signing_key_fingerprint is not None: return getUtility(IGPGKeySet).getByFingerprint( self.signing_key_fingerprint) @property def displayname(self): """Build a Fancy Title for CoC.""" displayname = self.datecreated.strftime('%Y-%m-%d') if self.signingkey: displayname += ( ': digitally signed by %s (%s)' % (self.owner.displayname, self.signingkey.displayname)) else: displayname += (': paper submission accepted by %s' % self.recipient.displayname) return displayname def sendAdvertisementEmail(self, subject, content): """See ISignedCodeOfConduct.""" assert self.owner.preferredemail template = get_email_template('signedcoc-acknowledge.txt', app='registry') fromaddress = format_address("Launchpad Code Of Conduct System", config.canonical.noreply_from_address) replacements = {'user': self.owner.displayname, 'content': content} message = template % replacements simple_sendmail(fromaddress, str(self.owner.preferredemail.email), subject, message)
class Config(SQLObject): """yokadi config""" class sqlmeta: defaultOrder = "name" name = UnicodeCol(alternateID=True, notNone=True) value = UnicodeCol(default="", notNone=True) system = BoolCol(default=False, notNone=True) desc = UnicodeCol(default="", notNone=True)
class PodEntry(SQLObject): '''A lookup table of all your podcast subscriptions''' pod_title = StringCol() hashed = UnicodeCol() pub_date = DateTimeCol() cover = StringCol() description = StringCol() category = StringCol() active = BoolCol()
class Distribution(SQLObject): class sqlmeta: table = 'ons_distribution' uri = StringCol() national_statistic = BoolCol() version = StringCol() edition = StringCol() release_date = DateTimeCol() next_release = DateCol() contacts = RelatedJoin('Contact')
class SpecificationMessage(SQLBase): """A table linking specifictions and messages.""" _table = 'SpecificationMessage' specification = ForeignKey(dbName='specification', foreignKey='Specification', notNull=True) message = ForeignKey(dbName='message', foreignKey='Message', notNull=True) visible = BoolCol(notNull=True, default=True)
class Competition(SQLObject): name = UnicodeCol() place = UnicodeCol() plannedStartTime = DateTimeCol(default=None) startTime = DateTimeCol(default=None) finishTime = DateTimeCol(default=None) notes = UnicodeCol(default=None) active = BoolCol(default=None) competitors = MultipleJoin('Competitor') splits = MultipleJoin('Split')
class PhysicalCardSet(SQLObject): tableversion = 7 name = UnicodeCol(alternateID=True, length=MAX_ID_LENGTH) author = UnicodeCol(default='') comment = UnicodeCol(default='') annotations = UnicodeCol(default='') inuse = BoolCol(default=False) parent = ForeignKey('PhysicalCardSet', default=None) cards = RelatedJoin('PhysicalCard', intermediateTable='physical_map', createRelatedTable=False) parentIndex = DatabaseIndex(parent)
class Competitor(SQLObject): name = UnicodeCol(default=None) contact = UnicodeCol(default=None) team = UnicodeCol(default=None) number = IntCol(default=None) starting = BoolCol(default=False) category = ForeignKey('Category') competition = ForeignKey('Competition') splits = MultipleJoin('Split')
class Project(SQLObject): class sqlmeta: defaultOrder = "name" name = UnicodeCol(alternateID=True, notNone=True) active = BoolCol(default=True) keywords = RelatedJoin("Keyword", createRelatedTable=False, intermediateTable="project_keyword", joinColumn="project_id", otherColumn="keyword_id") def __unicode__(self): keywords = self.getKeywordsAsString() if keywords: return "%s (%s)" % (self.name, keywords) else: return self.name def setKeywordDict(self, dct): """ Defines keywords of a project. Dict is of the form: keywordName => value """ for projectKeyword in ProjectKeyword.selectBy(project=self): projectKeyword.destroySelf() for name, value in dct.items(): keyword = Keyword.selectBy(name=name)[0] ProjectKeyword(project=self, keyword=keyword, value=value) def getKeywordDict(self): """ Returns all keywords of a project as a dict of the form: keywordName => value """ dct = {} for keyword in ProjectKeyword.selectBy(project=self): dct[keyword.keyword.name] = keyword.value return dct def getKeywordsAsString(self): """ Returns all keywords as a string like "key1=value1, key2=value2..." Value is not displayed if none """ result = [] for key, value in self.getKeywordDict().items(): if value: result.append("%s=%s" % (key, value)) else: result.append(key) return ", ".join(result)
class PollOption(SQLBase): """See IPollOption.""" _table = 'PollOption' _defaultOrder = ['title', 'id'] poll = ForeignKey(dbName='poll', foreignKey='Poll', notNull=True) name = StringCol(notNull=True) title = StringCol(notNull=True) active = BoolCol(notNull=True, default=False)
class SOValidation(SQLObject): name = StringCol(validator=validators.PlainText(), default='x', dbName='name_col') name2 = StringCol(validator2=validators.ConfirmType(type=str), default='y') name3 = IntCol(validator=validators.Wrapper(fromPython=int), default=100) name4 = FloatCol(default=2.718) name5 = PickleCol(default=None) name6 = BoolCol(default=None) name7 = UnicodeCol(default=None) name8 = IntCol(default=None) name9 = IntCol(validator=validator1, validator2=validator2, default=0)
class system_modules(SQLObject): name = StringCol(alternateID=True, unique=True) type = StringCol( ) # It's the old block column: Gatherer, MathsAnalysis ... version = FloatCol() active = BoolCol() advices = MultipleJoin('system_advices') marks = MultipleJoin('system_modules_marks') def _get_rates(self): actions = system_modules_marks.select( AND( system_modules_marks.q.moduleID == self.q.id, system_modules_marks.q.date > datetime.datetime.now() - datetime.timedelta(3600 * 24))) rates = {'rateshort': 0.00, 'ratemedium': 0.00, 'ratelong': 0.00} actions = list(actions) for action in actions: rates['rateshort'] += action.actionS rates['ratemedium'] += action.actionM rates['ratelong'] += action.actionL if rates['rateshort'] <= 0: rates['rateshort'] = 0 if rates['ratemedium'] <= 0: rates['ratemedium'] = 0 if rates['ratelong'] <= 0: rates['ratelong'] = 0 rates['rateshort'] = rates['rateshort'] / len(actions) rates['ratemedium'] = rates['ratemedium'] / len(actions) rates['ratelong'] = rates['ratelong'] / len(actions) return rates def _get_rateshort(self): rates = self._get_rates() return rates['rateshort'] def _get_ratemedium(self): rates = self._get_rates() return rates['ratemedium'] def _get_ratelong(self): rates = self._get_rates() return rates['ratelong']
class Asset(ICTVObject): """ Represents the metadata of a file stored by the StorageManager. """ plugin_channel = ForeignKey('PluginChannel', notNone=True, cascade=True) user = ForeignKey('User') # The user who uploaded the file, if known filename = StringCol( default=None ) # The original filename of the asset, beginning with a period mime_type = StringCol( default=None) # The MIME type associated with the file extension = StringCol(default=None) file_size = BigIntCol(default=None) # File size in kilobytes created = DateTimeCol(default=DateTimeCol.now) last_reference = DateTimeCol(default=DateTimeCol.now) in_flight = BoolCol( default=False) # Is this asset being cached at the moment is_cached = BoolCol( default=False) # Is this asset a cached asset from CacheManager def _get_path(self, force=False): """ Returns the path to the asset on the filesystem or None if the asset file is being cached. """ self.last_reference = datetime.now() if not force and self.in_flight: return None elif force: self.in_flight = False # Prevent failures in the caching process to block asset in flight mode return os.path.join( 'static', 'storage', str(self.plugin_channel.id), str(self.id) + (self.extension if self.extension is not None else '')) def write_to_asset_file(self, content): """ Writes the content to the asset file. """ asset_path = os.path.join(get_root_path(), self.path) os.makedirs(os.path.dirname(asset_path), exist_ok=True) with open(asset_path, 'wb') as f: f.write(content)
class TranslationRelicensingAgreement(SQLBase): _table = 'TranslationRelicensingAgreement' person = ForeignKey(foreignKey='Person', dbName='person', notNull=True, storm_validator=validate_public_person) allow_relicensing = BoolCol(dbName='allow_relicensing', notNull=True, default=True) date_decided = UtcDateTimeCol(dbName='date_decided', notNull=True, default=UTC_NOW)
class PhysicalCardSet_PCv3(SQLObject): """Physical Card Set to update from Physical Card v2""" class sqlmeta: """meta class used to set the correct table""" table = PhysicalCardSet.sqlmeta.table cacheValues = False name = UnicodeCol(alternateID=True, length=MAX_ID_LENGTH) author = UnicodeCol(default='') comment = UnicodeCol(default='') annotations = UnicodeCol(default='') inuse = BoolCol(default=False) parent = ForeignKey('PhysicalCardSet_PCv3', default=None) cards = RelatedJoin('PhysicalCard_v2', intermediateTable='physical_map', createRelatedTable=False)
class PersonLocation(SQLBase): """A person's location.""" _defaultOrder = ['id'] date_created = UtcDateTimeCol(notNull=True, default=UTC_NOW) person = ForeignKey( dbName='person', foreignKey='Person', storm_validator=validate_public_person, notNull=True, unique=True) latitude = FloatCol(notNull=False) longitude = FloatCol(notNull=False) time_zone = StringCol(notNull=True) last_modified_by = ForeignKey( dbName='last_modified_by', foreignKey='Person', storm_validator=validate_public_person, notNull=True) date_last_modified = UtcDateTimeCol(notNull=True, default=UTC_NOW) visible = BoolCol(notNull=True, default=True)
class SpecificationSubscription(SQLBase): """A subscription for person to a spec.""" implements(ISpecificationSubscription) _table = 'SpecificationSubscription' specification = ForeignKey(dbName='specification', foreignKey='Specification', notNull=True) person = ForeignKey( dbName='person', foreignKey='Person', storm_validator=validate_person, notNull=True) essential = BoolCol(notNull=True, default=False) def canBeUnsubscribedByUser(self, user): """See `ISpecificationSubscription`.""" if user is None: return False if not IPersonRoles.providedBy(user): user = IPersonRoles(user) if ( user.inTeam(self.specification.owner) or user.inTeam(self.person) or user.in_admin): return True # XXX Abel Deuring 2012-11-21, bug=1081677 # People who subscribed users should be able to unsubscribe # them again, similar to branch subscriptions. This is # essential if somebody was erroneuosly subscribed to a # proprietary or embargoed specification. Unfortunately, # SpecificationSubscription does not record who subscribed # somebody else, but if the specification is private, we can # check who issued the artifact grant. artifacts = getUtility(IAccessArtifactSource).find( [self.specification]) wanted = [(artifact, self.person) for artifact in artifacts] if len(wanted) == 0: return False for grant in getUtility(IAccessArtifactGrantSource).find(wanted): if user.inTeam(grant.grantor): return True return False
class PluginParamAccessRights(ICTVObject): plugin = ForeignKey('Plugin', cascade=True) name = StringCol(notNone=True) channel_contributor_read = BoolCol(default=False) channel_contributor_write = BoolCol(default=False) channel_administrator_read = BoolCol(default=True) channel_administrator_write = BoolCol(default=False) administrator_read = BoolCol(default=True) administrator_write = BoolCol(default=True) def get_access_rights_for(self, permission_level): """ Returns a tuple of booleans (read_access, write_access) indicating which type of rights this permission level gives on this. """ if permission_level is UserPermissions.super_administrator: return True, True if permission_level is UserPermissions.administrator: return self.administrator_read, self.administrator_write if permission_level is UserPermissions.channel_administrator: return self.channel_administrator_read, self.channel_administrator_write if permission_level is UserPermissions.channel_contributor: return self.channel_contributor_read, self.channel_contributor_write return False, False
class BuildQueue(SQLBase): implements(IBuildQueue) _table = "BuildQueue" _defaultOrder = "id" def __init__(self, job, job_type=DEFAULT, estimated_duration=DEFAULT, virtualized=DEFAULT, processor=DEFAULT, lastscore=None): super(BuildQueue, self).__init__(job_type=job_type, job=job, virtualized=virtualized, processor=processor, estimated_duration=estimated_duration, lastscore=lastscore) if lastscore is None and self.specific_job is not None: self.score() job = ForeignKey(dbName='job', foreignKey='Job', notNull=True) job_type = EnumCol(enum=BuildFarmJobType, notNull=True, default=BuildFarmJobType.PACKAGEBUILD, dbName='job_type') builder = ForeignKey(dbName='builder', foreignKey='Builder', default=None) logtail = StringCol(dbName='logtail', default=None) lastscore = IntCol(dbName='lastscore', default=0) manual = BoolCol(dbName='manual', default=False) estimated_duration = IntervalCol() processor = ForeignKey(dbName='processor', foreignKey='Processor') virtualized = BoolCol(dbName='virtualized') @cachedproperty def specific_job(self): """See `IBuildQueue`.""" specific_class = specific_job_classes()[self.job_type] return specific_class.getByJob(self.job) def _clear_specific_job_cache(self): del get_property_cache(self).specific_job @staticmethod def preloadSpecificJobData(queues): key = attrgetter('job_type') for job_type, grouped_queues in groupby(queues, key=key): specific_class = specific_job_classes()[job_type] queue_subset = list(grouped_queues) job_subset = load_related(Job, queue_subset, ['jobID']) # We need to preload the build farm jobs early to avoid # the call to _set_build_farm_job to look up BuildFarmBuildJobs # one by one. specific_class.preloadBuildFarmJobs(job_subset) specific_jobs = list(specific_class.getByJobs(job_subset)) if len(specific_jobs) == 0: continue specific_class.preloadJobsData(specific_jobs) specific_jobs_dict = dict((specific_job.job, specific_job) for specific_job in specific_jobs) for queue in queue_subset: cache = get_property_cache(queue) cache.specific_job = specific_jobs_dict[queue.job] @property def date_started(self): """See `IBuildQueue`.""" return self.job.date_started @property def current_build_duration(self): """See `IBuildQueue`.""" date_started = self.date_started if date_started is None: return None else: return self._now() - date_started def destroySelf(self): """Remove this record and associated job/specific_job.""" job = self.job specific_job = self.specific_job builder = self.builder SQLBase.destroySelf(self) specific_job.cleanUp() job.destroySelf() if builder is not None: del get_property_cache(builder).currentjob self._clear_specific_job_cache() def manualScore(self, value): """See `IBuildQueue`.""" self.lastscore = value self.manual = True def score(self): """See `IBuildQueue`.""" if self.manual: return # Allow the `IBuildFarmJob` instance with the data/logic specific to # the job at hand to calculate the score as appropriate. self.lastscore = self.specific_job.score() def markAsBuilding(self, builder): """See `IBuildQueue`.""" self.builder = builder if self.job.status != JobStatus.RUNNING: self.job.start() self.specific_job.jobStarted() if builder is not None: del get_property_cache(builder).currentjob def reset(self): """See `IBuildQueue`.""" builder = self.builder self.builder = None if self.job.status != JobStatus.WAITING: self.job.queue() self.job.date_started = None self.job.date_finished = None self.logtail = None self.specific_job.jobReset() if builder is not None: del get_property_cache(builder).currentjob def cancel(self): """See `IBuildQueue`.""" self.specific_job.jobCancel() self.destroySelf() def _getFreeBuildersCount(self, processor, virtualized): """How many builders capable of running jobs for the given processor and virtualization combination are idle/free at present?""" query = """ SELECT COUNT(id) FROM builder WHERE builderok = TRUE AND manual = FALSE AND id NOT IN ( SELECT builder FROM BuildQueue WHERE builder IS NOT NULL) AND virtualized = %s """ % sqlvalues(normalize_virtualization(virtualized)) if processor is not None: query += """ AND processor = %s """ % sqlvalues(processor) result_set = IStore(BuildQueue).execute(query) free_builders = result_set.get_one()[0] return free_builders def _estimateTimeToNextBuilder(self): """Estimate time until next builder becomes available. For the purpose of estimating the dispatch time of the job of interest (JOI) we need to know how long it will take until the job at the head of JOI's queue is dispatched. There are two cases to consider here: the head job is - processor dependent: only builders with the matching processor/virtualization combination should be considered. - *not* processor dependent: all builders with the matching virtualization setting should be considered. :return: The estimated number of seconds untils a builder capable of running the head job becomes available. """ head_job_platform = self._getHeadJobPlatform() # Return a zero delay if we still have free builders available for the # given platform/virtualization combination. free_builders = self._getFreeBuildersCount(*head_job_platform) if free_builders > 0: return 0 head_job_processor, head_job_virtualized = head_job_platform now = self._now() delay_query = """ SELECT MIN( CASE WHEN EXTRACT(EPOCH FROM (BuildQueue.estimated_duration - (((%s AT TIME ZONE 'UTC') - Job.date_started)))) >= 0 THEN EXTRACT(EPOCH FROM (BuildQueue.estimated_duration - (((%s AT TIME ZONE 'UTC') - Job.date_started)))) ELSE -- Assume that jobs that have overdrawn their estimated -- duration time budget will complete within 2 minutes. -- This is a wild guess but has worked well so far. -- -- Please note that this is entirely innocuous i.e. if our -- guess is off nothing bad will happen but our estimate will -- not be as good as it could be. 120 END) FROM BuildQueue, Job, Builder WHERE BuildQueue.job = Job.id AND BuildQueue.builder = Builder.id AND Builder.manual = False AND Builder.builderok = True AND Job.status = %s AND Builder.virtualized = %s """ % sqlvalues(now, now, JobStatus.RUNNING, normalize_virtualization(head_job_virtualized)) if head_job_processor is not None: # Only look at builders with specific processor types. delay_query += """ AND Builder.processor = %s """ % sqlvalues(head_job_processor) result_set = IStore(BuildQueue).execute(delay_query) head_job_delay = result_set.get_one()[0] return (0 if head_job_delay is None else int(head_job_delay)) def _getPendingJobsClauses(self): """WHERE clauses for pending job queries, used for dipatch time estimation.""" virtualized = normalize_virtualization(self.virtualized) clauses = """ BuildQueue.job = Job.id AND Job.status = %s AND ( -- The score must be either above my score or the -- job must be older than me in cases where the -- score is equal. BuildQueue.lastscore > %s OR (BuildQueue.lastscore = %s AND Job.id < %s)) -- The virtualized values either match or the job -- does not care about virtualization and the job -- of interest (JOI) is to be run on a virtual builder -- (we want to prevent the execution of untrusted code -- on native builders). AND COALESCE(buildqueue.virtualized, TRUE) = %s """ % sqlvalues(JobStatus.WAITING, self.lastscore, self.lastscore, self.job, virtualized) processor_clause = """ AND ( -- The processor values either match or the candidate -- job is processor-independent. buildqueue.processor = %s OR buildqueue.processor IS NULL) """ % sqlvalues(self.processor) # We don't care about processors if the estimation is for a # processor-independent job. if self.processor is not None: clauses += processor_clause return clauses def _getHeadJobPlatform(self): """Find the processor and virtualization setting for the head job. Among the jobs that compete with the job of interest (JOI) for builders and are queued ahead of it the head job is the one in pole position i.e. the one to be dispatched to a builder next. :return: A (processor, virtualized) tuple which is the head job's platform or None if the JOI is the head job. """ my_platform = (getattr(self.processor, 'id', None), normalize_virtualization(self.virtualized)) query = """ SELECT processor, virtualized FROM BuildQueue, Job WHERE """ query += self._getPendingJobsClauses() query += """ ORDER BY lastscore DESC, job LIMIT 1 """ result = IStore(BuildQueue).execute(query).get_one() return (my_platform if result is None else result) def _estimateJobDelay(self, builder_stats): """Sum of estimated durations for *pending* jobs ahead in queue. For the purpose of estimating the dispatch time of the job of interest (JOI) we need to know the delay caused by all the pending jobs that are ahead of the JOI in the queue and that compete with it for builders. :param builder_stats: A dictionary with builder counts where the key is a (processor, virtualized) combination (aka "platform") and the value is the number of builders that can take on jobs requiring that combination. :return: An integer value holding the sum of delays (in seconds) caused by the jobs that are ahead of and competing with the JOI. """ def jobs_compete_for_builders(a, b): """True if the two jobs compete for builders.""" a_processor, a_virtualized = a b_processor, b_virtualized = b if a_processor is None or b_processor is None: # If either of the jobs is platform-independent then the two # jobs compete for the same builders if the virtualization # settings match. if a_virtualized == b_virtualized: return True else: # Neither job is platform-independent, match processor and # virtualization settings. return a == b my_platform = (getattr(self.processor, 'id', None), normalize_virtualization(self.virtualized)) query = """ SELECT BuildQueue.processor, BuildQueue.virtualized, COUNT(BuildQueue.job), CAST(EXTRACT( EPOCH FROM SUM(BuildQueue.estimated_duration)) AS INTEGER) FROM BuildQueue, Job WHERE """ query += self._getPendingJobsClauses() query += """ GROUP BY BuildQueue.processor, BuildQueue.virtualized """ delays_by_platform = IStore(BuildQueue).execute(query).get_all() # This will be used to capture per-platform delay totals. delays = defaultdict(int) # This will be used to capture per-platform job counts. job_counts = defaultdict(int) # Divide the estimated duration of the jobs as follows: # - if a job is tied to a processor TP then divide the estimated # duration of that job by the number of builders that target TP # since only these can build the job. # - if the job is processor-independent then divide its estimated # duration by the total number of builders with the same # virtualization setting because any one of them may run it. for processor, virtualized, job_count, delay in delays_by_platform: virtualized = normalize_virtualization(virtualized) platform = (processor, virtualized) builder_count = builder_stats.get(platform, 0) if builder_count == 0: # There is no builder that can run this job, ignore it # for the purpose of dispatch time estimation. continue if jobs_compete_for_builders(my_platform, platform): # The jobs that target the platform at hand compete with # the JOI for builders, add their delays. delays[platform] += delay job_counts[platform] += job_count sum_of_delays = 0 # Now devide the delays based on a jobs/builders comparison. for platform, duration in delays.iteritems(): jobs = job_counts[platform] builders = builder_stats[platform] # If there are less jobs than builders that can take them on, # the delays should be averaged/divided by the number of jobs. denominator = (jobs if jobs < builders else builders) if denominator > 1: duration = int(duration / float(denominator)) sum_of_delays += duration return sum_of_delays def getEstimatedJobStartTime(self): """See `IBuildQueue`. The estimated dispatch time for the build farm job at hand is calculated from the following ingredients: * the start time for the head job (job at the head of the respective build queue) * the estimated build durations of all jobs that precede the job of interest (JOI) in the build queue (divided by the number of machines in the respective build pool) """ # This method may only be invoked for pending jobs. if self.job.status != JobStatus.WAITING: raise AssertionError( "The start time is only estimated for pending jobs.") builder_stats = get_builder_data() platform = (getattr(self.processor, 'id', None), self.virtualized) if builder_stats[platform] == 0: # No builders that can run the job at hand # -> no dispatch time estimation available. return None # Get the sum of the estimated run times for *pending* jobs that are # ahead of us in the queue. sum_of_delays = self._estimateJobDelay(builder_stats) # Get the minimum time duration until the next builder becomes # available. min_wait_time = self._estimateTimeToNextBuilder() # A job will not get dispatched in less than 5 seconds no matter what. start_time = max(5, min_wait_time + sum_of_delays) result = self._now() + timedelta(seconds=start_time) return result @staticmethod def _now(): """Return current time (UTC). Overridable for test purposes.""" return datetime.now(pytz.UTC)
class BinaryPackageRelease(SQLBase): implements(IBinaryPackageRelease) _table = 'BinaryPackageRelease' binarypackagename = ForeignKey(dbName='binarypackagename', notNull=True, foreignKey='BinaryPackageName') version = StringCol(dbName='version', notNull=True) summary = StringCol(dbName='summary', notNull=True, default="") description = StringCol(dbName='description', notNull=True) build = ForeignKey(dbName='build', foreignKey='BinaryPackageBuild', notNull=True) binpackageformat = EnumCol(dbName='binpackageformat', notNull=True, schema=BinaryPackageFormat) component = ForeignKey(dbName='component', foreignKey='Component', notNull=True) section = ForeignKey(dbName='section', foreignKey='Section', notNull=True) priority = EnumCol(dbName='priority', notNull=True, schema=PackagePublishingPriority) shlibdeps = StringCol(dbName='shlibdeps') depends = StringCol(dbName='depends') recommends = StringCol(dbName='recommends') suggests = StringCol(dbName='suggests') conflicts = StringCol(dbName='conflicts') replaces = StringCol(dbName='replaces') provides = StringCol(dbName='provides') pre_depends = StringCol(dbName='pre_depends') enhances = StringCol(dbName='enhances') breaks = StringCol(dbName='breaks') essential = BoolCol(dbName='essential', default=False) installedsize = IntCol(dbName='installedsize') architecturespecific = BoolCol(dbName='architecturespecific', notNull=True) homepage = StringCol(dbName='homepage') datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) debug_package = ForeignKey(dbName='debug_package', foreignKey='BinaryPackageRelease') _user_defined_fields = StringCol(dbName='user_defined_fields') def __init__(self, *args, **kwargs): if 'user_defined_fields' in kwargs: kwargs['_user_defined_fields'] = simplejson.dumps( kwargs['user_defined_fields']) del kwargs['user_defined_fields'] super(BinaryPackageRelease, self).__init__(*args, **kwargs) @property def user_defined_fields(self): """See `IBinaryPackageRelease`.""" if self._user_defined_fields is None: return [] return simplejson.loads(self._user_defined_fields) @property def title(self): """See `IBinaryPackageRelease`.""" return '%s-%s' % (self.binarypackagename.name, self.version) @property def name(self): """See `IBinaryPackageRelease`.""" return self.binarypackagename.name @property def distributionsourcepackagerelease(self): """See `IBinaryPackageRelease`.""" # import here to avoid circular import problems from lp.soyuz.model.distributionsourcepackagerelease \ import DistributionSourcePackageRelease return DistributionSourcePackageRelease( distribution=self.build.distribution, sourcepackagerelease=self.build.source_package_release) @property def sourcepackagename(self): """See `IBinaryPackageRelease`.""" return self.build.source_package_release.sourcepackagename.name @property def is_new(self): """See `IBinaryPackageRelease`.""" distroarchseries = self.build.distro_arch_series distroarchseries_binary_package = distroarchseries.getBinaryPackage( self.binarypackagename) return distroarchseries_binary_package.currentrelease is None @property def properties(self): """See `IBinaryPackageRelease`.""" return { "name": self.name, "version": self.version, "is_new": self.is_new, "architecture": self.build.arch_tag, "component": self.component.name, "section": self.section.name, "priority": self.priority.name, } @cachedproperty def files(self): return list( Store.of(self).find(BinaryPackageFile, binarypackagerelease=self)) def addFile(self, file): """See `IBinaryPackageRelease`.""" determined_filetype = None if file.filename.endswith(".deb"): determined_filetype = BinaryPackageFileType.DEB elif file.filename.endswith(".rpm"): determined_filetype = BinaryPackageFileType.RPM elif file.filename.endswith(".udeb"): determined_filetype = BinaryPackageFileType.UDEB elif file.filename.endswith(".ddeb"): determined_filetype = BinaryPackageFileType.DDEB else: raise AssertionError('Unsupported file type: %s' % file.filename) del get_property_cache(self).files return BinaryPackageFile(binarypackagerelease=self, filetype=determined_filetype, libraryfile=file) def override(self, component=None, section=None, priority=None): """See `IBinaryPackageRelease`.""" if component is not None: self.component = component if section is not None: self.section = section if priority is not None: self.priority = priority
class Milestone(SQLBase, MilestoneData, StructuralSubscriptionTargetMixin, HasBugsBase): implements(IHasBugs, IMilestone, IBugSummaryDimension) active = BoolCol(notNull=True, default=True) # XXX: EdwinGrubbs 2009-02-06 bug=326384: # The Milestone.dateexpected should be changed into a date column, # since the class defines the field as a DateCol, so that a list of # milestones can't have some dateexpected attributes that are # datetimes and others that are dates, which can't be compared. dateexpected = DateCol(notNull=False, default=None) # XXX: Guilherme Salgado 2007-03-27 bug=40978: # Milestones should be associated with productseries/distroseries # so these columns are not needed. product = ForeignKey(dbName='product', foreignKey='Product', default=None) distribution = ForeignKey(dbName='distribution', foreignKey='Distribution', default=None) productseries = ForeignKey(dbName='productseries', foreignKey='ProductSeries', default=None) distroseries = ForeignKey(dbName='distroseries', foreignKey='DistroSeries', default=None) name = StringCol(notNull=True) summary = StringCol(notNull=False, default=None) code_name = StringCol(dbName='codename', notNull=False, default=None) def _milestone_ids_expr(self, user): return (self.id, ) @property def target(self): """See IMilestone.""" if self.product: return self.product elif self.distribution: return self.distribution @property def product_release(self): store = Store.of(self) result = store.find(ProductRelease, ProductRelease.milestone == self.id) releases = list(result) if len(releases) == 0: return None else: return releases[0] @property def series_target(self): """See IMilestone.""" if self.productseries: return self.productseries elif self.distroseries: return self.distroseries @property def title(self): """See IMilestone.""" if not self.code_name: # XXX sinzui 2009-07-16 bug=400477: code_name may be None or ''. return self.displayname return ('%s "%s"') % (self.displayname, self.code_name) def _customizeSearchParams(self, search_params): """Customize `search_params` for this milestone.""" search_params.milestone = self @property def official_bug_tags(self): """See `IHasBugs`.""" return self.target.official_bug_tags def createProductRelease(self, owner, datereleased, changelog=None, release_notes=None): """See `IMilestone`.""" if self.product_release is not None: raise MultipleProductReleases() release = ProductRelease(owner=owner, changelog=changelog, release_notes=release_notes, datereleased=datereleased, milestone=self) del get_property_cache(self.productseries).releases return release def closeBugsAndBlueprints(self, user): """See `IMilestone`.""" search = BugTaskSet().open_bugtask_search for bugtask in self.searchTasks(search): if bugtask.status == BugTaskStatus.FIXCOMMITTED: bugtask.bug.setStatus(bugtask.target, BugTaskStatus.FIXRELEASED, user) def destroySelf(self): """See `IMilestone`.""" params = BugTaskSearchParams(milestone=self, user=None) bugtasks = getUtility(IBugTaskSet).search(params) subscriptions = IResultSet(self.getSubscriptions()) assert subscriptions.is_empty(), ( "You cannot delete a milestone which has structural " "subscriptions.") assert bugtasks.is_empty(), ( "You cannot delete a milestone which has bugtasks targeted " "to it.") assert self.all_specifications.is_empty(), ( "You cannot delete a milestone which has specifications targeted " "to it.") assert self.product_release is None, ( "You cannot delete a milestone which has a product release " "associated with it.") super(Milestone, self).destroySelf() def getBugSummaryContextWhereClause(self): """See BugTargetBase.""" # Circular fail. from lp.bugs.model.bugsummary import BugSummary return BugSummary.milestone_id == self.id def setTags(self, tags, user): """See IMilestone.""" # Circular reference prevention. from lp.registry.model.milestonetag import MilestoneTag, validate_tags store = Store.of(self) if tags: if not validate_tags(tags): raise InvalidTags() current_tags = set(self.getTags()) new_tags = set(tags) if new_tags == current_tags: return # Removing deleted tags. to_remove = current_tags.difference(new_tags) if to_remove: store.find(MilestoneTag, MilestoneTag.tag.is_in(to_remove)).remove() # Adding new tags. for tag in new_tags.difference(current_tags): store.add(MilestoneTag(self, tag, user)) else: store.find(MilestoneTag, MilestoneTag.milestone_id == self.id).remove() def getTagsData(self): """See IMilestone.""" # Prevent circular references. from lp.registry.model.milestonetag import MilestoneTag store = Store.of(self) return store.find(MilestoneTag, MilestoneTag.milestone_id == self.id).order_by( MilestoneTag.tag) def getTags(self): """See IMilestone.""" # Prevent circular references. from lp.registry.model.milestonetag import MilestoneTag return list(self.getTagsData().values(MilestoneTag.tag)) def userCanView(self, user): """See `IMilestone`.""" # A database constraint ensures that either self.product # or self.distribution is not None. if self.product is None: # Distributions are always public, and so are their # milestones. return True # Delegate the permission check return self.product.userCanView(user)