class Anomalies_v_22(Model): __storm_table__ = 'anomalies' stored_when = Unicode() alarm = Int() events = JSON()
class Notification(Model): """ This table has only one instance, and contain all the notification information for the node templates are imported in the handler, but settings are expected all at once. """ server = Unicode(validator=shorttext_v, default=u"mail.headstrong.de") port = Int(default=587) username = Unicode(validator=shorttext_v, default=u"*****@*****.**") password = Unicode(validator=shorttext_v, default=u"sendaccount99") source_name = Unicode(validator=shorttext_v, default=u"Default GlobaLeaks sender") source_email = Unicode(validator=shorttext_v, default=u"*****@*****.**") security = Unicode(validator=shorttext_v, default=u"TLS") # security_types: 'TLS', 'SSL' torify = Int(default=True) # Admin Template admin_pgp_alert_mail_title = JSON(validator=longlocal_v) admin_pgp_alert_mail_template = JSON(validator=longlocal_v) admin_anomaly_mail_template = JSON(validator=longlocal_v) admin_anomaly_mail_title = JSON(validator=longlocal_v) admin_anomaly_disk_low = JSON(validator=longlocal_v) admin_anomaly_disk_medium = JSON(validator=longlocal_v) admin_anomaly_disk_high = JSON(validator=longlocal_v) admin_anomaly_activities = JSON(validator=longlocal_v) # Receiver Template tip_mail_template = JSON(validator=longlocal_v) tip_mail_title = JSON(validator=longlocal_v) file_mail_template = JSON(validator=longlocal_v) file_mail_title = JSON(validator=longlocal_v) comment_mail_template = JSON(validator=longlocal_v) comment_mail_title = JSON(validator=longlocal_v) message_mail_template = JSON(validator=longlocal_v) message_mail_title = JSON(validator=longlocal_v) tip_expiration_mail_template = JSON(validator=longlocal_v) tip_expiration_mail_title = JSON(validator=longlocal_v) pgp_alert_mail_title = JSON(validator=longlocal_v) pgp_alert_mail_template = JSON(validator=longlocal_v) receiver_notification_limit_reached_mail_template = JSON(validator=longlocal_v) receiver_notification_limit_reached_mail_title = JSON(validator=longlocal_v) zip_description = JSON(validator=longlocal_v) # Experimental Receiver template ping_mail_template = JSON(validator=longlocal_v) ping_mail_title = JSON(validator=longlocal_v) notification_digest_mail_title = JSON(validator=longlocal_v) disable_admin_notification_emails = Bool(default=False) disable_receivers_notification_emails = Bool(default=False) send_email_for_every_event = Bool(default=True) notification_threshold_per_hour = Int(default=20) notification_suspension_time=Int(default=(2 * 3600)) unicode_keys = [ 'server', 'username', 'password', 'source_name', 'source_email', 'security' ] localized_strings = [ 'admin_anomaly_mail_title', 'admin_anomaly_mail_template', 'admin_anomaly_disk_low', 'admin_anomaly_disk_medium', 'admin_anomaly_disk_high', 'admin_anomaly_activities', 'admin_pgp_alert_mail_title', 'admin_pgp_alert_mail_template', 'pgp_alert_mail_title', 'pgp_alert_mail_template', 'tip_mail_template', 'tip_mail_title', 'file_mail_template', 'file_mail_title', 'comment_mail_template', 'comment_mail_title', 'message_mail_template', 'message_mail_title', 'tip_expiration_mail_template', 'tip_expiration_mail_title', 'notification_digest_mail_title', 'zip_description', 'ping_mail_template', 'ping_mail_title', 'receiver_notification_limit_reached_mail_template', 'receiver_notification_limit_reached_mail_title' ] int_keys = [ 'port', 'notification_threshold_per_hour', 'notification_suspension_time' ] bool_keys = [ 'disable_admin_notification_emails', 'disable_receivers_notification_emails', 'send_email_for_every_event' ]
class FieldAnswer_v_29(ModelWithID): __storm_table__ = 'fieldanswer' internaltip_id = Unicode() key = Unicode(default=u'') is_leaf = Bool(default=True) value = Unicode(default=u'')
class FieldAnswerGroupFieldAnswer_v_29(Model): __storm_table__ = 'fieldanswergroup_fieldanswer' __storm_primary__ = 'fieldanswergroup_id', 'fieldanswer_id' fieldanswergroup_id = Unicode() fieldanswer_id = Unicode()
class User_v_32(ModelWithID): __storm_table__ = 'user' creation_date = DateTime(default_factory=datetime_now) username = Unicode(validator=shorttext_v) password = Unicode() salt = Unicode() deletable = Bool(default=True) name = Unicode(validator=shorttext_v) description = JSON(validator=longlocal_v) public_name = Unicode(validator=shorttext_v) role = Unicode() state = Unicode() last_login = DateTime(default_factory=datetime_null) mail_address = Unicode() language = Unicode() timezone = Int() password_change_needed = Bool(default=True) password_change_date = DateTime(default_factory=datetime_null) pgp_key_info = Unicode(default=u'') pgp_key_fingerprint = Unicode(default=u'') pgp_key_public = Unicode(default=u'') pgp_key_expiration = DateTime(default_factory=datetime_null) pgp_key_status = Unicode(default=u'disabled')
class ArchivedSchema_v_23(ModelWithID): __storm_table__ = 'archivedschema' hash = Unicode() type = Unicode() language = Unicode() schema = JSON()
class OAuthConsumer(OAuthBase, StormBase): """See `IOAuthConsumer`.""" __storm_table__ = 'OAuthConsumer' id = Int(primary=True) date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=UTC_NOW) disabled = Bool(allow_none=False, default=False) key = Unicode(allow_none=False) _secret = Unicode(name='secret', allow_none=True, default=u'') def __init__(self, key, secret): super(OAuthConsumer, self).__init__() self.key = key self._secret = sha256_digest(secret) # This regular expression singles out a consumer key that # represents any and all apps running on a specific computer. The # regular expression identifies the system type (eg. the OS) and # the name of the computer (eg. the hostname). # # A client can send whatever string they want, as long as it # matches the regular expression, but here are some values we've # seen from the lazr.restfulclient code for generating this # string. # # System-wide: Ubuntu (hostname) # - An Ubuntu computer called "hostname" # System-wide: debian (hostname) # - A Debian computer called "hostname" # (A Nokia N900 phone also sends this string.) # System-wide: Windows (hostname) # - A Windows computer called "hostname" # System-wide: Microsoft (hostname) # - A Windows computer called "hostname", running an old version # of Python # System-wide: Darwin (hostname) # - A Mac OS X computer called "hostname" # (Presumably an iPhone will also send this string, # but we're not sure.) integrated_desktop_re = re.compile("^System-wide: (.*) \(([^)]*)\)$") def _integrated_desktop_match_group(self, position): """Return information about a desktop integration token. A convenience method that runs the desktop integration regular expression against the consumer key. :param position: The match group to return if the regular expression matches. :return: The value of one of the match groups, or None. """ match = self.integrated_desktop_re.match(self.key) if match is None: return None return match.groups()[position] @property def is_integrated_desktop(self): """See `IOAuthConsumer`.""" return self.integrated_desktop_re.match(self.key) is not None @property def integrated_desktop_type(self): """See `IOAuthConsumer`.""" return self._integrated_desktop_match_group(0) @property def integrated_desktop_name(self): """See `IOAuthConsumer`.""" return self._integrated_desktop_match_group(1) def isSecretValid(self, secret): """See `IOAuthConsumer`.""" return sha256_digest(secret) == self._secret def newRequestToken(self): """See `IOAuthConsumer`.""" key, secret = create_token_key_and_secret(table=OAuthRequestToken) token = OAuthRequestToken(consumer=self, key=key, secret=secret) OAuthRequestToken._getStore().add(token) return token, secret def getAccessToken(self, key): """See `IOAuthConsumer`.""" return OAuthAccessToken._getStore().find( OAuthAccessToken, OAuthAccessToken.key == key, OAuthAccessToken.consumer == self).one() def getRequestToken(self, key): """See `IOAuthConsumer`.""" return OAuthRequestToken._getStore().find( OAuthRequestToken, OAuthRequestToken.key == key, OAuthRequestToken.consumer == self).one()
class Post(object): __storm_table__ = "post" id = Int(primary=True) user_id = Int() header = Unicode() day = Unicode()
class OAuthAccessToken(OAuthBase, StormBase): """See `IOAuthAccessToken`.""" __storm_table__ = 'OAuthAccessToken' id = Int(primary=True) consumer_id = Int(name='consumer', allow_none=False) consumer = Reference(consumer_id, 'OAuthConsumer.id') person_id = Int(name='person', allow_none=False) person = Reference(person_id, 'Person.id') date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=UTC_NOW) date_expires = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) key = Unicode(allow_none=False) _secret = Unicode(name='secret', allow_none=True, default=u'') permission = DBEnum(enum=AccessLevel, allow_none=False) product_id = Int(name='product', allow_none=True, default=None) product = Reference(product_id, 'Product.id') projectgroup_id = Int(name='project', allow_none=True, default=None) projectgroup = Reference(projectgroup_id, 'ProjectGroup.id') sourcepackagename_id = Int(name='sourcepackagename', allow_none=True, default=None) sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id') distribution_id = Int(name='distribution', allow_none=True, default=None) distribution = Reference(distribution_id, 'Distribution.id') def __init__(self, consumer, permission, key, secret=u'', person=None, date_expires=None, product=None, projectgroup=None, distribution=None, sourcepackagename=None): super(OAuthAccessToken, self).__init__() self.consumer = consumer self.permission = permission self.key = key self._secret = sha256_digest(secret) self.person = person self.date_expires = date_expires self.product = product self.projectgroup = projectgroup self.distribution = distribution self.sourcepackagename = sourcepackagename @property def context(self): """See `IOAuthToken`.""" if self.product: return self.product elif self.projectgroup: return self.projectgroup elif self.distribution: if self.sourcepackagename: return self.distribution.getSourcePackage( self.sourcepackagename) else: return self.distribution else: return None @property def is_expired(self): now = datetime.now(pytz.UTC) return self.date_expires is not None and self.date_expires <= now def isSecretValid(self, secret): """See `IOAuthToken`.""" return sha256_digest(secret) == self._secret
class OAuthRequestToken(OAuthBase, StormBase): """See `IOAuthRequestToken`.""" __storm_table__ = 'OAuthRequestToken' id = Int(primary=True) consumer_id = Int(name='consumer', allow_none=False) consumer = Reference(consumer_id, 'OAuthConsumer.id') person_id = Int(name='person', allow_none=True, default=None) person = Reference(person_id, 'Person.id') date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=UTC_NOW) date_expires = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) key = Unicode(allow_none=False) _secret = Unicode(name='secret', allow_none=True, default=u'') permission = DBEnum(enum=OAuthPermission, allow_none=True, default=None) date_reviewed = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) product_id = Int(name='product', allow_none=True, default=None) product = Reference(product_id, 'Product.id') projectgroup_id = Int(name='project', allow_none=True, default=None) projectgroup = Reference(projectgroup_id, 'ProjectGroup.id') sourcepackagename_id = Int(name='sourcepackagename', allow_none=True, default=None) sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id') distribution_id = Int(name='distribution', allow_none=True, default=None) distribution = Reference(distribution_id, 'Distribution.id') def __init__(self, consumer, key, secret=u'', permission=None, person=None, date_expires=None, product=None, projectgroup=None, distribution=None, sourcepackagename=None): super(OAuthRequestToken, self).__init__() self.consumer = consumer self.permission = permission self.key = key self._secret = sha256_digest(secret) self.person = person self.date_expires = date_expires self.product = product self.projectgroup = projectgroup self.distribution = distribution self.sourcepackagename = sourcepackagename @property def context(self): """See `IOAuthToken`.""" if self.product: return self.product elif self.projectgroup: return self.projectgroup elif self.distribution: if self.sourcepackagename: return self.distribution.getSourcePackage( self.sourcepackagename) else: return self.distribution else: return None @property def is_expired(self): now = datetime.now(pytz.UTC) expires = self.date_created + timedelta(hours=REQUEST_TOKEN_VALIDITY) return expires <= now def isSecretValid(self, secret): """See `IOAuthToken`.""" return sha256_digest(secret) == self._secret def review(self, user, permission, context=None, date_expires=None): """See `IOAuthRequestToken`.""" if self.is_reviewed: raise OAuthValidationError( "Request tokens can be reviewed only once.") if self.is_expired: raise OAuthValidationError( 'This request token has expired and can no longer be ' 'reviewed.') self.date_reviewed = datetime.now(pytz.UTC) self.date_expires = date_expires self.person = user self.permission = permission if IProduct.providedBy(context): self.product = context elif IProjectGroup.providedBy(context): self.projectgroup = context elif IDistribution.providedBy(context): self.distribution = context elif IDistributionSourcePackage.providedBy(context): self.sourcepackagename = context.sourcepackagename self.distribution = context.distribution else: assert context is None, ("Unknown context type: %r." % context) def createAccessToken(self): """See `IOAuthRequestToken`.""" if not self.is_reviewed: raise OAuthValidationError( 'Cannot create an access token from an unreviewed request ' 'token.') if self.permission == OAuthPermission.UNAUTHORIZED: raise OAuthValidationError( 'The user did not grant access to this consumer.') if self.is_expired: raise OAuthValidationError( 'This request token has expired and can no longer be ' 'exchanged for an access token.') key, secret = create_token_key_and_secret(table=OAuthAccessToken) access_level = AccessLevel.items[self.permission.name] access_token = OAuthAccessToken( consumer=self.consumer, person=self.person, key=key, secret=secret, permission=access_level, date_expires=self.date_expires, product=self.product, projectgroup=self.projectgroup, distribution=self.distribution, sourcepackagename=self.sourcepackagename) OAuthAccessToken._getStore().add(access_token) # We want to notify the user that this oauth token has been generated # for them for security reasons. self.person.security_field_changed( "OAuth token generated in Launchpad", "A new OAuth token consumer was enabled in Launchpad.") self._getStore().remove(self) return access_token, secret @property def is_reviewed(self): """See `IOAuthRequestToken`.""" return self.date_reviewed is not None
class SnapBuild(PackageBuildMixin, Storm): """See `ISnapBuild`.""" __storm_table__ = 'SnapBuild' job_type = BuildFarmJobType.SNAPBUILD id = Int(name='id', primary=True) build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') build_request_id = Int(name='build_request', allow_none=True) requester_id = Int(name='requester', allow_none=False) requester = Reference(requester_id, 'Person.id') snap_id = Int(name='snap', allow_none=False) snap = Reference(snap_id, 'Snap.id') archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') distro_arch_series_id = Int(name='distro_arch_series', allow_none=False) distro_arch_series = Reference( distro_arch_series_id, 'DistroArchSeries.id') pocket = DBEnum(enum=PackagePublishingPocket, allow_none=False) channels = JSON('channels', allow_none=True) processor_id = Int(name='processor', allow_none=False) processor = Reference(processor_id, 'Processor.id') virtualized = Bool(name='virtualized') date_created = DateTime( name='date_created', tzinfo=pytz.UTC, allow_none=False) date_started = DateTime(name='date_started', tzinfo=pytz.UTC) date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC) date_first_dispatched = DateTime( name='date_first_dispatched', tzinfo=pytz.UTC) builder_id = Int(name='builder') builder = Reference(builder_id, 'Builder.id') status = DBEnum(name='status', enum=BuildStatus, allow_none=False) revision_id = Unicode(name='revision_id') log_id = Int(name='log') log = Reference(log_id, 'LibraryFileAlias.id') upload_log_id = Int(name='upload_log') upload_log = Reference(upload_log_id, 'LibraryFileAlias.id') dependencies = Unicode(name='dependencies') failure_count = Int(name='failure_count', allow_none=False) store_upload_metadata = JSON('store_upload_json_data', allow_none=True) def __init__(self, build_farm_job, requester, snap, archive, distro_arch_series, pocket, channels, processor, virtualized, date_created, store_upload_metadata=None, build_request=None): """Construct a `SnapBuild`.""" super(SnapBuild, self).__init__() self.build_farm_job = build_farm_job self.requester = requester self.snap = snap self.archive = archive self.distro_arch_series = distro_arch_series self.pocket = pocket self.channels = channels self.processor = processor self.virtualized = virtualized self.date_created = date_created self.store_upload_metadata = store_upload_metadata if build_request is not None: self.build_request_id = build_request.id self.status = BuildStatus.NEEDSBUILD @property def build_request(self): """See `ISnapBuild`.""" if self.build_request_id is not None: return self.snap.getBuildRequest(self.build_request_id) @property def is_private(self): """See `IBuildFarmJob`.""" return ( self.snap.private or self.snap.owner.private or self.archive.private ) @property def title(self): das = self.distro_arch_series snap_title = "%s snap package" % self.snap.name if (self.snap.store_name is not None and self.snap.store_name != self.snap.name): snap_title += " (%s)" % self.snap.store_name return "%s build of %s in %s %s" % ( das.architecturetag, snap_title, das.distroseries.distribution.name, das.distroseries.getSuite(self.pocket)) @property def distribution(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries.distribution @property def distro_series(self): """See `IPackageBuild`.""" return self.distro_arch_series.distroseries @property def arch_tag(self): """See `ISnapBuild`.""" return self.distro_arch_series.architecturetag @property def current_component(self): component = self.archive.default_component if component is not None: return component else: # XXX cjwatson 2015-07-17: Hardcode to multiverse for the time # being. return getUtility(IComponentSet)["multiverse"] @property def score(self): """See `ISnapBuild`.""" if self.buildqueue_record is None: return None else: return self.buildqueue_record.lastscore @property def can_be_rescored(self): """See `ISnapBuild`.""" return ( self.buildqueue_record is not None and self.status is BuildStatus.NEEDSBUILD) @property def can_be_cancelled(self): """See `ISnapBuild`.""" if not self.buildqueue_record: return False cancellable_statuses = [ BuildStatus.BUILDING, BuildStatus.NEEDSBUILD, ] return self.status in cancellable_statuses def rescore(self, score): """See `ISnapBuild`.""" assert self.can_be_rescored, "Build %s cannot be rescored" % self.id self.buildqueue_record.manualScore(score) def cancel(self): """See `ISnapBuild`.""" if not self.can_be_cancelled: return # BuildQueue.cancel() will decide whether to go straight to # CANCELLED, or go through CANCELLING to let buildd-manager clean up # the slave. self.buildqueue_record.cancel() def calculateScore(self): return 2510 + self.archive.relative_build_score def getMedianBuildDuration(self): """Return the median duration of our successful builds.""" store = IStore(self) result = store.find( (SnapBuild.date_started, SnapBuild.date_finished), SnapBuild.snap == self.snap_id, SnapBuild.distro_arch_series == self.distro_arch_series_id, SnapBuild.status == BuildStatus.FULLYBUILT) result.order_by(Desc(SnapBuild.date_finished)) durations = [row[1] - row[0] for row in result[:9]] if len(durations) == 0: return None durations.sort() return durations[len(durations) // 2] def estimateDuration(self): """See `IBuildFarmJob`.""" median = self.getMedianBuildDuration() if median is not None: return median return timedelta(minutes=30) def getFiles(self): """See `ISnapBuild`.""" result = Store.of(self).find( (SnapFile, LibraryFileAlias, LibraryFileContent), SnapFile.snapbuild == self.id, LibraryFileAlias.id == SnapFile.libraryfile_id, LibraryFileContent.id == LibraryFileAlias.contentID) return result.order_by([LibraryFileAlias.filename, SnapFile.id]) def getFileByName(self, filename): """See `ISnapBuild`.""" if filename.endswith(".txt.gz"): file_object = self.log elif filename.endswith("_log.txt"): file_object = self.upload_log else: file_object = Store.of(self).find( LibraryFileAlias, SnapFile.snapbuild == self.id, LibraryFileAlias.id == SnapFile.libraryfile_id, LibraryFileAlias.filename == filename).one() if file_object is not None and file_object.filename == filename: return file_object raise NotFoundError(filename) def addFile(self, lfa): """See `ISnapBuild`.""" snapfile = SnapFile(snapbuild=self, libraryfile=lfa) IMasterStore(SnapFile).add(snapfile) return snapfile def verifySuccessfulUpload(self): """See `IPackageBuild`.""" return not self.getFiles().is_empty() def updateStatus(self, status, builder=None, slave_status=None, date_started=None, date_finished=None, force_invalid_transition=False): """See `IBuildFarmJob`.""" old_status = self.status super(SnapBuild, self).updateStatus( status, builder=builder, slave_status=slave_status, date_started=date_started, date_finished=date_finished, force_invalid_transition=force_invalid_transition) if slave_status is not None: revision_id = slave_status.get("revision_id") if revision_id is not None: self.revision_id = unicode(revision_id) if status != old_status: notify(SnapBuildStatusChangedEvent(self)) def notify(self, extra_info=None): """See `IPackageBuild`.""" if not config.builddmaster.send_build_notification: return if self.status == BuildStatus.FULLYBUILT: return mailer = SnapBuildMailer.forStatus(self) mailer.sendAll() def lfaUrl(self, lfa): """Return the URL for a LibraryFileAlias in this context.""" if lfa is None: return None return ProxiedLibraryFileAlias(lfa, self).http_url @property def log_url(self): """See `IBuildFarmJob`.""" return self.lfaUrl(self.log) @property def upload_log_url(self): """See `IPackageBuild`.""" return self.lfaUrl(self.upload_log) def getFileUrls(self): return [self.lfaUrl(lfa) for _, lfa, _ in self.getFiles()] @cachedproperty def eta(self): """The datetime when the build job is estimated to complete. This is the BuildQueue.estimated_duration plus the Job.date_started or BuildQueue.getEstimatedJobStartTime. """ if self.buildqueue_record is None: return None queue_record = self.buildqueue_record if queue_record.status == BuildQueueStatus.WAITING: start_time = queue_record.getEstimatedJobStartTime() else: start_time = queue_record.date_started if start_time is None: return None duration = queue_record.estimated_duration return start_time + duration @property def estimate(self): """If true, the date value is an estimate.""" if self.date_finished is not None: return False return self.eta is not None @property def date(self): """The date when the build completed or is estimated to complete.""" if self.estimate: return self.eta return self.date_finished @property def store_upload_jobs(self): jobs = Store.of(self).find( SnapBuildJob, SnapBuildJob.snapbuild == self, SnapBuildJob.job_type == SnapBuildJobType.STORE_UPLOAD) jobs.order_by(Desc(SnapBuildJob.job_id)) def preload_jobs(rows): load_related(Job, rows, ["job_id"]) return DecoratedResultSet( jobs, lambda job: job.makeDerived(), pre_iter_hook=preload_jobs) @cachedproperty def last_store_upload_job(self): return self.store_upload_jobs.first() @property def store_upload_status(self): job = self.last_store_upload_job if job is None or job.job.status == JobStatus.SUSPENDED: return SnapBuildStoreUploadStatus.UNSCHEDULED elif job.job.status in (JobStatus.WAITING, JobStatus.RUNNING): return SnapBuildStoreUploadStatus.PENDING elif job.job.status == JobStatus.COMPLETED: return SnapBuildStoreUploadStatus.UPLOADED else: if job.store_url: return SnapBuildStoreUploadStatus.FAILEDTORELEASE else: return SnapBuildStoreUploadStatus.FAILEDTOUPLOAD @property def store_upload_url(self): job = self.last_store_upload_job return job and job.store_url @property def store_upload_revision(self): job = self.last_store_upload_job return job and job.store_revision @property def store_upload_error_message(self): job = self.last_store_upload_job return job and job.error_message @property def store_upload_error_messages(self): job = self.last_store_upload_job if job: if job.error_messages: return job.error_messages elif job.error_message: return [{"message": job.error_message}] return [] def scheduleStoreUpload(self): """See `ISnapBuild`.""" if not self.snap.can_upload_to_store: raise CannotScheduleStoreUpload( "Cannot upload this package to the store because it is not " "properly configured.") if not self.was_built or self.getFiles().is_empty(): raise CannotScheduleStoreUpload( "Cannot upload this package because it has no files.") if self.store_upload_status == SnapBuildStoreUploadStatus.PENDING: raise CannotScheduleStoreUpload( "An upload of this package is already in progress.") elif self.store_upload_status == SnapBuildStoreUploadStatus.UPLOADED: raise CannotScheduleStoreUpload( "Cannot upload this package because it has already been " "uploaded.") getUtility(ISnapStoreUploadJobSource).create(self)
class WhistleblowerTip_v_34(models.ModelWithID): __storm_table__ = 'whistleblowertip' internaltip_id = Unicode() receipt_hash = Unicode() access_counter = Int(default=0)
class FieldAnswerGroup(Model): number = Int(default=0) fieldanswer_id = Unicode() unicode_keys = ['fieldanswer_id'] int_keys = ['number']
class StepField_v_20(Model): __storm_table__ = 'step_field' __storm_primary__ = 'step_id', 'field_id' step_id = Unicode() field_id = Unicode()
class SourcePackageRecipe(Storm): """See `ISourcePackageRecipe` and `ISourcePackageRecipeSource`.""" __storm_table__ = 'SourcePackageRecipe' def __str__(self): return '%s/%s' % (self.owner.name, self.name) id = Int(primary=True) daily_build_archive_id = Int(name='daily_build_archive', allow_none=True) daily_build_archive = Reference(daily_build_archive_id, 'Archive.id') date_created = UtcDateTimeCol(notNull=True) date_last_modified = UtcDateTimeCol(notNull=True) owner_id = Int(name='owner', allow_none=True) owner = Reference(owner_id, 'Person.id') registrant_id = Int(name='registrant', allow_none=True) registrant = Reference(registrant_id, 'Person.id') distroseries = ReferenceSet( id, _SourcePackageRecipeDistroSeries.sourcepackagerecipe_id, _SourcePackageRecipeDistroSeries.distroseries_id, DistroSeries.id) build_daily = Bool() is_stale = Bool() name = Unicode(allow_none=True) description = Unicode(allow_none=True) @cachedproperty def _recipe_data(self): return Store.of(self).find( SourcePackageRecipeData, SourcePackageRecipeData.sourcepackage_recipe == self).one() @property def builder_recipe(self): """Accesses of the recipe go to the SourcePackageRecipeData.""" return self._recipe_data.getRecipe() @property def base_branch(self): return self._recipe_data.base_branch @property def base_git_repository(self): return self._recipe_data.base_git_repository @property def base(self): if self.base_branch is not None: return self.base_branch else: assert self.base_git_repository is not None return self.base_git_repository @staticmethod def preLoadDataForSourcePackageRecipes(sourcepackagerecipes): # Load the referencing SourcePackageRecipeData. spr_datas = load_referencing( SourcePackageRecipeData, sourcepackagerecipes, ['sourcepackage_recipe_id']) # Store the SourcePackageRecipeData in the sourcepackagerecipes # objects. for spr_data in spr_datas: cache = get_property_cache(spr_data.sourcepackage_recipe) cache._recipe_data = spr_data SourcePackageRecipeData.preLoadReferencedBranches(spr_datas) owner_ids = set(map(attrgetter('owner_id'), sourcepackagerecipes)) list(getUtility(IPersonSet).getPrecachedPersonsFromIDs( owner_ids, need_validity=True)) def setRecipeText(self, recipe_text): parsed, recipe_branch_type = ( getUtility(IRecipeBranchSource).getParsedRecipe(recipe_text)) self._recipe_data.setRecipe(parsed, recipe_branch_type) def getRecipeText(self, validate=False): """See `ISourcePackageRecipe`.""" recipe_text = self.builder_recipe.get_recipe_text(validate=validate) # For git-based recipes, mangle the header line to say # "git-build-recipe" to reduce confusion; bzr-builder's recipe # parser will always round-trip this to "bzr-builder". if self.base_git_repository is not None: recipe_text = re.sub( r"^(#\s*)bzr-builder", r"\1git-build-recipe", recipe_text) return recipe_text @property def recipe_text(self): return self.getRecipeText() def updateSeries(self, distroseries): if distroseries != self.distroseries: self.distroseries.clear() for distroseries_item in distroseries: self.distroseries.add(distroseries_item) @staticmethod def new(registrant, owner, name, recipe, description, distroseries=None, daily_build_archive=None, build_daily=False, date_created=DEFAULT): """See `ISourcePackageRecipeSource.new`.""" store = IMasterStore(SourcePackageRecipe) sprecipe = SourcePackageRecipe() builder_recipe, recipe_branch_type = ( getUtility(IRecipeBranchSource).getParsedRecipe(recipe)) SourcePackageRecipeData(builder_recipe, recipe_branch_type, sprecipe) sprecipe.registrant = registrant sprecipe.owner = owner sprecipe.name = name if distroseries is not None: for distroseries_item in distroseries: sprecipe.distroseries.add(distroseries_item) sprecipe.description = description sprecipe.daily_build_archive = daily_build_archive sprecipe.build_daily = build_daily sprecipe.date_created = date_created sprecipe.date_last_modified = date_created store.add(sprecipe) return sprecipe @staticmethod def findStaleDailyBuilds(): one_day_ago = datetime.now(utc) - timedelta(hours=23, minutes=50) joins = ( SourcePackageRecipe, LeftJoin( SourcePackageRecipeBuild, And(SourcePackageRecipeBuild.recipe_id == SourcePackageRecipe.id, SourcePackageRecipeBuild.archive_id == SourcePackageRecipe.daily_build_archive_id, SourcePackageRecipeBuild.date_created > one_day_ago)), ) return IStore(SourcePackageRecipe).using(*joins).find( SourcePackageRecipe, SourcePackageRecipe.is_stale == True, SourcePackageRecipe.build_daily == True, SourcePackageRecipeBuild.date_created == None, ).config(distinct=True) @staticmethod def exists(owner, name): """See `ISourcePackageRecipeSource.new`.""" store = IMasterStore(SourcePackageRecipe) recipe = store.find( SourcePackageRecipe, SourcePackageRecipe.owner == owner, SourcePackageRecipe.name == name).one() if recipe: return True else: return False def destroySelf(self): store = Store.of(self) self.distroseries.clear() self._recipe_data.instructions.find().remove() builds = store.find( SourcePackageRecipeBuild, SourcePackageRecipeBuild.recipe == self) builds.set(recipe_id=None) store.remove(self._recipe_data) store.remove(self) def containsUnbuildableSeries(self, archive): buildable_distros = set( BuildableDistroSeries.findSeries(archive.owner)) return len(set(self.distroseries).difference(buildable_distros)) >= 1 def requestBuild(self, archive, requester, distroseries, pocket=PackagePublishingPocket.RELEASE, manual=False): """See `ISourcePackageRecipe`.""" if not archive.is_ppa: raise NonPPABuildRequest buildable_distros = BuildableDistroSeries.findSeries(archive.owner) if distroseries not in buildable_distros: raise BuildNotAllowedForDistro(self, distroseries) reject_reason = archive.checkUpload( requester, distroseries, None, archive.default_component, pocket) if reject_reason is not None: raise reject_reason pending = IStore(self).find(SourcePackageRecipeBuild, SourcePackageRecipeBuild.recipe_id == self.id, SourcePackageRecipeBuild.distroseries_id == distroseries.id, SourcePackageRecipeBuild.archive_id == archive.id, SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD) if pending.any() is not None: raise BuildAlreadyPending(self, distroseries) build = getUtility(ISourcePackageRecipeBuildSource).new(distroseries, self, requester, archive) build.queueBuild() queue_record = build.buildqueue_record if manual: queue_record.manualScore(queue_record.lastscore + 100) return build def performDailyBuild(self): """See `ISourcePackageRecipe`.""" builds = [] self.is_stale = False buildable_distros = set(BuildableDistroSeries.findSeries( self.daily_build_archive.owner)) build_for = set(self.distroseries).intersection(buildable_distros) for distroseries in build_for: try: build = self.requestBuild( self.daily_build_archive, self.owner, distroseries, PackagePublishingPocket.RELEASE) builds.append(build) except BuildAlreadyPending: continue return builds @property def builds(self): """See `ISourcePackageRecipe`.""" order_by = ( NullsLast(Desc(Greatest( SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished))), Desc(SourcePackageRecipeBuild.date_created), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(None, order_by) @property def completed_builds(self): """See `ISourcePackageRecipe`.""" filter_term = ( SourcePackageRecipeBuild.status != BuildStatus.NEEDSBUILD) order_by = ( NullsLast(Desc(Greatest( SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished))), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(filter_term, order_by) @property def pending_builds(self): """See `ISourcePackageRecipe`.""" filter_term = ( SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD) # We want to order by date_created but this is the same as ordering # by id (since id increases monotonically) and is less expensive. order_by = Desc(SourcePackageRecipeBuild.id) return self._getBuilds(filter_term, order_by) def _getBuilds(self, filter_term, order_by): """The actual query to get the builds.""" query_args = [ SourcePackageRecipeBuild.recipe == self, SourcePackageRecipeBuild.archive_id == Archive.id, Archive._enabled == True, ] if filter_term is not None: query_args.append(filter_term) result = Store.of(self).find(SourcePackageRecipeBuild, *query_args) result.order_by(order_by) return result def getPendingBuildInfo(self): """See `ISourcePackageRecipe`.""" builds = self.pending_builds result = [] for build in builds: result.append( {"distroseries": build.distroseries.displayname, "archive": build.archive.reference}) return result @property def last_build(self): """See `ISourcePackageRecipeBuild`.""" return self._getBuilds( True, Desc(SourcePackageRecipeBuild.date_finished)).first() def getMedianBuildDuration(self): """Return the median duration of builds of this recipe.""" store = IStore(self) result = store.find( (SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished), SourcePackageRecipeBuild.recipe == self.id, SourcePackageRecipeBuild.status == BuildStatus.FULLYBUILT, SourcePackageRecipeBuild.date_finished != None) result.order_by(Desc(SourcePackageRecipeBuild.date_finished)) durations = [row[1] - row[0] for row in result[:9]] if len(durations) == 0: return None durations.sort(reverse=True) return durations[len(durations) / 2]
class Person(object): __storm_table__ = "login" id = Int(primary=True) username = Unicode() password = Unicode()
class GitRule(StormBase): """See `IGitRule`.""" __storm_table__ = 'GitRule' id = Int(primary=True) repository_id = Int(name='repository', allow_none=False) repository = Reference(repository_id, 'GitRepository.id') position = Int(name='position', allow_none=False) ref_pattern = Unicode(name='ref_pattern', allow_none=False) creator_id = Int(name='creator', allow_none=False, validator=validate_public_person) creator = Reference(creator_id, 'Person.id') date_created = DateTime(name='date_created', tzinfo=pytz.UTC, allow_none=False) date_last_modified = DateTime(name='date_last_modified', tzinfo=pytz.UTC, allow_none=False) def __init__(self, repository, position, ref_pattern, creator, date_created): super(GitRule, self).__init__() self.repository = repository self.position = position self.ref_pattern = ref_pattern self.creator = creator self.date_created = date_created self.date_last_modified = date_created get_property_cache(self).grants = [] def __repr__(self): return "<GitRule '%s' for %s>" % (self.ref_pattern, self.repository.unique_name) def toDataForJSON(self, media_type): """See `IJSONPublishable`.""" if media_type != "application/json": raise ValueError("Unhandled media type %s" % media_type) request = get_current_browser_request() field = InlineObject(schema=IGitNascentRule).bind(self) marshaller = getMultiAdapter((field, request), IFieldMarshaller) return marshaller.unmarshall(None, self) @cachedproperty def grants(self): """See `IGitRule`.""" return list( Store.of(self).find(GitRuleGrant, GitRuleGrant.rule_id == self.id)) def addGrant(self, grantee, grantor, can_create=False, can_push=False, can_force_push=False, permissions=None): """See `IGitRule`.""" if permissions is not None: if can_create or can_push or can_force_push: raise AssertionError( "GitRule.addGrant takes either " "can_create/can_push/can_force_push or permissions, not " "both") can_create = GitPermissionType.CAN_CREATE in permissions can_push = GitPermissionType.CAN_PUSH in permissions can_force_push = GitPermissionType.CAN_FORCE_PUSH in permissions grant = GitRuleGrant(rule=self, grantee=grantee, can_create=can_create, can_push=can_push, can_force_push=can_force_push, grantor=grantor, date_created=DEFAULT) getUtility(IGitActivitySet).logGrantAdded(grant, grantor) del get_property_cache(self).grants return grant def _validateGrants(self, grants): """Validate a new iterable of access grants.""" for grant in grants: if grant.grantee_type == GitGranteeType.PERSON: if grant.grantee is None: raise ValueError( "Permission grant for %s has grantee_type 'Person' " "but no grantee" % self.ref_pattern) else: if grant.grantee is not None: raise ValueError( "Permission grant for %s has grantee_type '%s', " "contradicting grantee ~%s" % (self.ref_pattern, grant.grantee_type, grant.grantee.name)) def setGrants(self, grants, user): """See `IGitRule`.""" self._validateGrants(grants) existing_grants = {(grant.grantee_type, grant.grantee): grant for grant in self.grants} new_grants = OrderedDict( ((grant.grantee_type, grant.grantee), grant) for grant in grants) for grant_key, grant in existing_grants.items(): if grant_key not in new_grants: grant.destroySelf(user) for grant_key, new_grant in new_grants.items(): grant = existing_grants.get(grant_key) if grant is None: new_grantee = (new_grant.grantee if new_grant.grantee_type == GitGranteeType.PERSON else new_grant.grantee_type) grant = self.addGrant(new_grantee, user, can_create=new_grant.can_create, can_push=new_grant.can_push, can_force_push=new_grant.can_force_push) else: edited_fields = [] with notify_modified(grant, edited_fields): for field in ("can_create", "can_push", "can_force_push"): if getattr(grant, field) != getattr(new_grant, field): setattr(grant, field, getattr(new_grant, field)) edited_fields.append(field) @staticmethod def preloadGrantsForRules(rules): """Preload the access grants related to an iterable of rules.""" grants = load_referencing(GitRuleGrant, rules, ["rule_id"]) grants_map = defaultdict(list) for grant in grants: grants_map[grant.rule_id].append(grant) for rule in rules: get_property_cache(rule).grants = grants_map[rule.id] load_related(Person, grants, ["grantee_id"]) def destroySelf(self, user): """See `IGitRule`.""" getUtility(IGitActivitySet).logRuleRemoved(self, user) for grant in self.grants: grant.destroySelf() rules = list(self.repository.rules) Store.of(self).remove(self) rules.remove(self) removeSecurityProxy(self.repository)._syncRulePositions(rules)
class Texto(object): __storm_table__ = "texto" id = Int(primary=True) post_id = Int() conteudo = Unicode()
class User_v_24(ModelWithID): __storm_table__ = 'user' creation_date = DateTime() username = Unicode() password = Unicode() salt = Unicode() deletable = Bool() name = Unicode() description = JSON() role = Unicode() state = Unicode() last_login = DateTime() mail_address = Unicode() language = Unicode() timezone = Int() password_change_needed = Bool() password_change_date = DateTime() pgp_key_info = Unicode() pgp_key_fingerprint = Unicode() pgp_key_public = Unicode() pgp_key_expiration = DateTime() pgp_key_status = Unicode()
class Node_v_32(ModelWithID): __storm_table__ = 'node' version = Unicode(default=unicode(__version__)) version_db = Unicode(default=unicode(DATABASE_VERSION)) name = Unicode(validator=shorttext_v, default=u'') basic_auth = Bool(default=False) basic_auth_username = Unicode(default=u'') basic_auth_password = Unicode(default=u'') public_site = Unicode(validator=shorttext_v, default=u'') hidden_service = Unicode(validator=shorttext_v, default=u'') receipt_salt = Unicode(validator=shorttext_v) languages_enabled = JSON(default=LANGUAGES_SUPPORTED_CODES) default_language = Unicode(validator=shorttext_v, default=u'en') default_timezone = Int(default=0) default_password = Unicode(validator=longtext_v, default=u'globaleaks') description = JSON(validator=longlocal_v, default=empty_localization) presentation = JSON(validator=longlocal_v, default=empty_localization) footer = JSON(validator=longlocal_v, default=empty_localization) security_awareness_title = JSON(validator=longlocal_v, default=empty_localization) security_awareness_text = JSON(validator=longlocal_v, default=empty_localization) maximum_namesize = Int(default=128) maximum_textsize = Int(default=4096) maximum_filesize = Int(default=30) tor2web_admin = Bool(default=True) tor2web_custodian = Bool(default=True) tor2web_whistleblower = Bool(default=False) tor2web_receiver = Bool(default=True) tor2web_unauth = Bool(default=True) allow_unencrypted = Bool(default=False) disable_encryption_warnings = Bool(default=False) allow_iframes_inclusion = Bool(default=False) submission_minimum_delay = Int(default=10) submission_maximum_ttl = Int(default=10800) can_postpone_expiration = Bool(default=False) can_delete_submission = Bool(default=False) can_grant_permissions = Bool(default=False) ahmia = Bool(default=False) allow_indexing = Bool(default=False) wizard_done = Bool(default=False) disable_submissions = Bool(default=False) disable_privacy_badge = Bool(default=False) disable_security_awareness_badge = Bool(default=False) disable_security_awareness_questions = Bool(default=False) disable_key_code_hint = Bool(default=False) disable_donation_panel = Bool(default=False) enable_captcha = Bool(default=True) enable_proof_of_work = Bool(default=True) enable_experimental_features = Bool(default=False) whistleblowing_question = JSON(validator=longlocal_v, default=empty_localization) whistleblowing_button = JSON(validator=longlocal_v, default=empty_localization) whistleblowing_receipt_prompt = JSON(validator=longlocal_v, default=empty_localization) simplified_login = Bool(default=True) enable_custom_privacy_badge = Bool(default=False) custom_privacy_badge_tor = JSON(validator=longlocal_v, default=empty_localization) custom_privacy_badge_none = JSON(validator=longlocal_v, default=empty_localization) header_title_homepage = JSON(validator=longlocal_v, default=empty_localization) header_title_submissionpage = JSON(validator=longlocal_v, default=empty_localization) header_title_receiptpage = JSON(validator=longlocal_v, default=empty_localization) header_title_tippage = JSON(validator=longlocal_v, default=empty_localization) widget_comments_title = JSON(validator=shortlocal_v, default=empty_localization) widget_messages_title = JSON(validator=shortlocal_v, default=empty_localization) widget_files_title = JSON(validator=shortlocal_v, default=empty_localization) landing_page = Unicode(default=u'homepage') contexts_clarification = JSON(validator=longlocal_v, default=empty_localization) show_small_context_cards = Bool(default=False) show_contexts_in_alphabetical_order = Bool(default=False) threshold_free_disk_megabytes_high = Int(default=200) threshold_free_disk_megabytes_medium = Int(default=500) threshold_free_disk_megabytes_low = Int(default=1000) threshold_free_disk_percentage_high = Int(default=3) threshold_free_disk_percentage_medium = Int(default=5) threshold_free_disk_percentage_low = Int(default=10) context_selector_type = Unicode(validator=shorttext_v, default=u'list')
class ArchiveSubscriber(Storm): """See `IArchiveSubscriber`.""" implements(IArchiveSubscriber) __storm_table__ = 'ArchiveSubscriber' id = Int(primary=True) archive_id = Int(name='archive', allow_none=False) archive = Reference(archive_id, 'Archive.id') registrant_id = Int(name='registrant', allow_none=False) registrant = Reference(registrant_id, 'Person.id') date_created = DateTime(name='date_created', allow_none=False, tzinfo=pytz.UTC) subscriber_id = Int(name='subscriber', allow_none=False, validator=validate_person) subscriber = Reference(subscriber_id, 'Person.id') date_expires = DateTime(name='date_expires', allow_none=True, tzinfo=pytz.UTC) status = DBEnum(name='status', allow_none=False, enum=ArchiveSubscriberStatus) description = Unicode(name='description', allow_none=True) date_cancelled = DateTime(name='date_cancelled', allow_none=True, tzinfo=pytz.UTC) cancelled_by_id = Int(name='cancelled_by', allow_none=True) cancelled_by = Reference(cancelled_by_id, 'Person.id') @property def displayname(self): """See `IArchiveSubscriber`.""" return "%s's access to %s" % (self.subscriber.displayname, self.archive.displayname) def cancel(self, cancelled_by): """See `IArchiveSubscriber`.""" self.date_cancelled = UTC_NOW self.cancelled_by = cancelled_by self.status = ArchiveSubscriberStatus.CANCELLED def getNonActiveSubscribers(self): """See `IArchiveSubscriber`.""" store = Store.of(self) if self.subscriber.is_team: # We get all the people who already have active tokens for # this archive (for example, through separate subscriptions). auth_token = LeftJoin( ArchiveAuthToken, And(ArchiveAuthToken.person_id == Person.id, ArchiveAuthToken.archive_id == self.archive_id, ArchiveAuthToken.date_deactivated == None)) team_participation = Join(TeamParticipation, TeamParticipation.personID == Person.id) # Only return people with preferred email address set. preferred_email = Join(EmailAddress, EmailAddress.personID == Person.id) # We want to get all participants who are themselves # individuals, not teams: non_active_subscribers = store.using( Person, team_participation, preferred_email, auth_token).find( (Person, EmailAddress), EmailAddress.status == EmailAddressStatus.PREFERRED, TeamParticipation.teamID == self.subscriber_id, Person.teamowner == None, # There is no existing archive auth token. ArchiveAuthToken.person_id == None) non_active_subscribers.order_by(Person.name) return non_active_subscribers else: # Subscriber is not a team. token_set = getUtility(IArchiveAuthTokenSet) if token_set.getActiveTokenForArchiveAndPerson( self.archive, self.subscriber) is not None: # There are active tokens, so return an empty result # set. return EmptyResultSet() # Otherwise return a result set containing only the # subscriber and their preferred email address. return store.find( (Person, EmailAddress), Person.id == self.subscriber_id, EmailAddress.personID == Person.id, EmailAddress.status == EmailAddressStatus.PREFERRED)
class Node_v_29(ModelWithID): __storm_table__ = 'node' version = Unicode() version_db = Unicode() name = Unicode() public_site = Unicode() hidden_service = Unicode() receipt_salt = Unicode() languages_enabled = JSON() default_language = Unicode() default_timezone = Int() description = JSON() presentation = JSON() footer = JSON() security_awareness_title = JSON() security_awareness_text = JSON() context_selector_label = JSON() maximum_namesize = Int() maximum_textsize = Int() maximum_filesize = Int() tor2web_admin = Bool() tor2web_custodian = Bool() tor2web_whistleblower = Bool() tor2web_receiver = Bool() tor2web_unauth = Bool() allow_unencrypted = Bool() allow_iframes_inclusion = Bool() submission_minimum_delay = Int() submission_maximum_ttl = Int() can_postpone_expiration = Bool() can_delete_submission = Bool() can_grant_permissions = Bool() ahmia = Bool() wizard_done = Bool() disable_submissions = Bool() disable_privacy_badge = Bool() disable_security_awareness_badge = Bool() disable_security_awareness_questions = Bool() disable_key_code_hint = Bool() disable_donation_panel = Bool() enable_captcha = Bool() enable_proof_of_work = Bool() enable_experimental_features = Bool() whistleblowing_question = JSON() whistleblowing_button = JSON() simplified_login = Bool() enable_custom_privacy_badge = Bool() custom_privacy_badge_tor = JSON() custom_privacy_badge_none = JSON() header_title_homepage = JSON() header_title_submissionpage = JSON() header_title_receiptpage = JSON() header_title_tippage = JSON() widget_comments_title = JSON() widget_messages_title = JSON() widget_files_title = JSON() landing_page = Unicode() show_contexts_in_alphabetical_order = Bool() threshold_free_disk_megabytes_high = Int() threshold_free_disk_megabytes_medium = Int() threshold_free_disk_megabytes_low = Int() threshold_free_disk_percentage_high = Int() threshold_free_disk_percentage_medium = Int() threshold_free_disk_percentage_low = Int()
class IcepapDriver(Storm): __storm_table__ = "icepapdriver" __storm_primary__ = ("icepapsystem_name", "addr") icepapsystem_name = Unicode() addr = Int() name = Unicode() mode = Unicode() """ references """ icepap_system = Reference(icepapsystem_name, "IcepapSystem.name") historic_cfgs = ReferenceSet( (icepapsystem_name, addr), ("IcepapDriverCfg.icepapsystem_name", "IcepapDriverCfg.driver_addr")) log = logging.getLogger('{}.IcepapDriver'.format(__name__)) @loggingInfo def __init__(self, icepap_name, addr): self.icepapsystem_name = str(icepap_name) self.addr = addr self.current_cfg = None self.initialize() @loggingInfo def __storm_loaded__(self): self.current_cfg = self.historic_cfgs.order_by("date").last() self.initialize() @loggingInfo def initialize(self): self.drivernr = self.addr % 10 self.cratenr = self.addr // 10 self._undo_list = [] self.startup_cfg = self.current_cfg self.conflict = Conflict.NO_CONFLICT @loggingInfo def addConfiguration(self, cfg, current=True): if current: if self.current_cfg is not None: self._undo_list.append(self.current_cfg) else: self.startup_cfg = cfg self.current_cfg = cfg cfg.setDriver(self) self.historic_cfgs.add(cfg) @loggingInfo def setConflict(self, conflict): self.conflict = conflict @loggingInfo def getName(self): return self.name @loggingInfo def setName(self, name): self.name = str(name) @loggingInfo def setMode(self, mode): self.mode = str(mode) @loggingInfo def signDriver(self): # AS ESRF SAYS, WHEN SIGNING THE DRIVER CONFIG, THE COMMIT SHOULD # BE DONE IN THE DATABASE FIRST, AND IF NO ERRORS, THEN COMMUNICATE # THE DRIVER THAT THE VALUES SHOULD BE SIGNED. try: user = ConfigManager().username host = socket.gethostname() signature = user + "@" + host + "_" + \ datetime.now().strftime('%Y/%m/%d_%H:%M:%S') IcepapsManager().signDriverConfiguration(self.icepapsystem_name, self.addr, signature) self.mode = str(Mode.OPER) db = StormManager() db.commitTransaction() self.current_cfg.name = str(time.ctime()) self.current_cfg.setSignature(signature) self.startup_cfg = self.current_cfg self.conflict = Conflict.NO_CONFLICT except Exception as e: self.log.error("some exception while trying to sign the driver %s", e) @loggingInfo def setStartupCfg(self): self.current_cfg = self.startup_cfg self.conflict = Conflict.NO_CONFLICT @loggingInfo def undo(self, config): self.addConfiguration(config) # THE CURRENT CONFIGURATION SHOULD NOT BE IN THE UNDO LIST return self._undo_list.pop() @loggingInfo def getUndoList(self): return self._undo_list.pop() @loggingInfo def hasUndoList(self): return len(self._undo_list) > 0 @loggingInfo def saveHistoricCfg(self, now, name, desc): self.current_cfg.name = str(name) self.current_cfg.description = str(desc) @loggingInfo def deleteHistoricCfg(self, cfg): self.historic_cfgs.remove(cfg) @loggingInfo def __ne__(self, other): return not self.__eq__(other) @loggingInfo def __eq__(self, other): if self.current_cfg == other.current_cfg: self.setConflict(Conflict.NO_CONFLICT) return True self.setConflict(Conflict.DRIVER_CHANGED) return False # TO SORT THE ICEPAP DRIVERS IN THE TREE @loggingInfo def __lt__(self, other): if isinstance(other, IcepapDriver): return self.addr < other.addr
class FieldAnswerGroup_v_29(ModelWithID): __storm_table__ = 'fieldanswergroup' number = Int(default=0) fieldanswer_id = Unicode()
class UserToUserEmail(Storm): """See `IUserToUserEmail`.""" implements(IUserToUserEmail) __storm_table__ = 'UserToUserEmail' id = Int(primary=True) sender_id = Int(name='sender') sender = Reference(sender_id, 'Person.id') recipient_id = Int(name='recipient') recipient = Reference(recipient_id, 'Person.id') date_sent = DateTime(allow_none=False) subject = Unicode(allow_none=False) message_id = Unicode(allow_none=False) def __init__(self, message): """Create a new user-to-user email entry. :param message: the message being sent :type message: `email.message.Message` """ super(UserToUserEmail, self).__init__() person_set = getUtility(IPersonSet) # Find the person who is sending this message. realname, address = parseaddr(message['from']) assert address, 'Message has no From: field' sender = person_set.getByEmail(address) assert sender is not None, 'No person for sender email: %s' % address # Find the person who is the recipient. realname, address = parseaddr(message['to']) assert address, 'Message has no To: field' recipient = person_set.getByEmail(address) assert recipient is not None, ('No person for recipient email: %s' % address) # Convert the date string into a UTC datetime. date = message['date'] assert date is not None, 'Message has no Date: field' self.date_sent = utcdatetime_from_field(date) # Find the subject and message-id. message_id = message['message-id'] assert message_id is not None, 'Message has no Message-ID: field' subject = message['subject'] assert subject is not None, 'Message has no Subject: field' # Initialize. self.sender = sender self.recipient = recipient self.message_id = unicode(message_id, 'ascii') self.subject = unicode(make_header(decode_header(subject))) # Add the object to the store of the sender. Our StormMigrationGuide # recommends against this saying "Note that the constructor should not # usually add the object to a store -- leave that for a FooSet.new() # method, or let it be inferred by a relation." # # On the other hand, we really don't need a UserToUserEmailSet for any # other purpose. There isn't any other relationship that can be # inferred, so in this case I think it makes fine sense for the # constructor to add self to the store. Also, this closely mimics # what the SQLObject compatibility layer does. Store.of(sender).add(self)
class DBLaunchpadIssueExt(object): """ """ __storm_table__ = 'issues_ext_launchpad' id = Int(primary=True) status = Unicode() issue_id = Int() description = Unicode() web_link = Unicode() bug_target_display_name = Unicode() bug_target_name = Unicode() date_assigned = DateTime() date_closed = DateTime() date_confirmed = DateTime() date_created = DateTime() date_fix_committed = DateTime() date_fix_released = DateTime() date_in_progress = DateTime() date_incomplete = DateTime() date_left_closed = DateTime() date_left_new = DateTime() date_triaged = DateTime() date_last_message = DateTime() date_last_updated = DateTime() #FIXME a new table for this would be better milestone_code_name = Unicode() milestone_data_targeted = Unicode() milestone_name = Unicode() milestone_summary = Unicode() milestone_title = Unicode() milestone_web_link = Unicode() heat = Int() linked_branches = Unicode() #messages tags = Unicode() title = Unicode() users_affected_count = Int() web_link_standalone = Unicode() issue = Reference(issue_id, DBIssue.id) def __init__(self, issue_id): self.issue_id = issue_id
class FieldOption_v_20(ModelWithID): __storm_table__ = 'fieldoption' field_id = Unicode() number = Int() attrs = JSON()
class FieldOption_v_22(Model): __storm_table__ = 'fieldoption' field_id = Unicode() presentation_order = Int() attrs = JSON()
class Node(Model): """ This table has only one instance, has the "id", but would not exists a second element of this table. This table acts, more or less, like the configuration file of the previous GlobaLeaks release (and some of the GL 0.1 details are specified in Context) This table represent the System-wide settings """ name = Unicode(validator=shorttext_v) public_site = Unicode(validator=shorttext_v) hidden_service = Unicode(validator=shorttext_v) email = Unicode(validator=shorttext_v) receipt_salt = Unicode(validator=shorttext_v) languages_enabled = JSON() default_language = Unicode(validator=shorttext_v) default_timezone = Int(default=0) # localized strings description = JSON(validator=longlocal_v) presentation = JSON(validator=longlocal_v) footer = JSON(validator=longlocal_v) security_awareness_title = JSON(validator=longlocal_v) security_awareness_text = JSON(validator=longlocal_v) context_selector_label = JSON(validator=longlocal_v) # Advanced settings maximum_namesize = Int() maximum_textsize = Int() maximum_filesize = Int() tor2web_admin = Bool() tor2web_submission = Bool() tor2web_receiver = Bool() tor2web_unauth = Bool() allow_unencrypted = Bool() allow_iframes_inclusion = Bool() submission_minimum_delay = Int(default=10) submission_maximum_ttl = Int(default=10800) # privileges configurable in node/context/receiver can_postpone_expiration = Bool(default=False) can_delete_submission = Bool(default=False) ahmia = Bool(default=False) wizard_done = Bool(default=False) disable_privacy_badge = Bool(default=False) disable_security_awareness_badge = Bool(default=False) disable_security_awareness_questions = Bool(default=False) disable_key_code_hint = Bool(default=False) whistleblowing_question = JSON(validator=longlocal_v) whistleblowing_button = JSON(validator=longlocal_v) enable_custom_privacy_badge = Bool(default=False) custom_privacy_badge_tor = JSON(validator=longlocal_v) custom_privacy_badge_none = JSON(validator=longlocal_v) header_title_homepage = JSON(validator=longlocal_v) header_title_submissionpage = JSON(validator=longlocal_v) header_title_receiptpage = JSON(validator=longlocal_v) landing_page = Unicode() show_contexts_in_alphabetical_order = Bool(default=False) exception_email = Unicode() unicode_keys = ['name', 'public_site', 'email', 'hidden_service', 'exception_email', 'default_language', 'landing_page'] int_keys = ['maximum_namesize', 'maximum_textsize', 'maximum_filesize', 'default_timezone', 'show_contexts_in_alphabetical_order', 'submission_minimum_delay', 'submission_maximum_ttl' ] bool_keys = ['tor2web_admin', 'tor2web_receiver', 'tor2web_submission', 'tor2web_unauth', 'can_postpone_expiration', 'can_delete_submission', 'ahmia', 'allow_unencrypted', 'allow_iframes_inclusion', 'disable_privacy_badge', 'disable_security_awareness_badge', 'disable_security_awareness_questions', 'enable_custom_privacy_badge', 'disable_key_code_hint'] # wizard_done is not checked because it's set by the backend localized_strings = ['description', 'presentation', 'footer', 'security_awareness_title', 'security_awareness_text', 'whistleblowing_question', 'whistleblowing_button', 'custom_privacy_badge_tor', 'custom_privacy_badge_none', 'header_title_homepage', 'header_title_submissionpage', 'header_title_receiptpage', 'context_selector_label']