class SubScheduler(Item, SchedulerMixin): """ Track and execute persistent timed events for a substore. """ schemaVersion = 1 typeName = 'axiom_subscheduler' implements(IScheduler) powerupInterfaces = (IScheduler, ) eventsRun = integer(default=0) lastEventAt = timestamp() nextEventAt = timestamp() # Also testing hooks callLater = inmemory() now = inmemory() def __repr__(self): return '<SubScheduler for %r>' % (self.store, ) def activate(self): self.callLater = reactor.callLater self.now = Time def _transientSchedule(self, when, now): if self.store.parent is not None: loginAccount = self.store.parent.getItemByID(self.store.idInParent) hook = self.store.parent.findOrCreate( _SubSchedulerParentHook, lambda hook: installOn(hook, hook.store), loginAccount=loginAccount) hook._schedule(when) def migrateDown(self): """ Remove the components in the site store for this SubScheduler. """ loginAccount = self.store.parent.getItemByID(self.store.idInParent) ssph = self.store.parent.findUnique( _SubSchedulerParentHook, _SubSchedulerParentHook.loginAccount == loginAccount, default=None) if ssph is not None: te = self.store.parent.findUnique(TimedEvent, TimedEvent.runnable == ssph, default=None) if te is not None: te.deleteFromStore() ssph.deleteFromStore() def migrateUp(self): """ Recreate the hooks in the site store to trigger this SubScheduler. """ te = self.store.findFirst(TimedEvent, sort=TimedEvent.time.descending) if te is not None: self._transientSchedule(te.time, self.now)
class TimedEventFailureLog(Item): typeName = 'timed_event_failure_log' schemaVersion = 1 desiredTime = timestamp() actualTime = timestamp() runnable = reference() traceback = bytes()
class Call(Item): typeName = "sine_call" schemaVersion = 1 name = text() uri = text() time = timestamp() kind = text()
class Paste(Item): """ Paste item. """ created = timestamp(defaultFactory=lambda: Time(), doc=u'Creation timestamp') languageHint = text(doc=u'Paste content language hint') name = text(allowNone=False, indexed=True, doc=u'Paste name') content = text(allowNone=False, doc=u'Paste content') def run(self): self.deleteFromStore() def toJSON(self): """ Describe the L{Paste} item as I{JSON}. """ attrs = dict(self.persistentValues()) attrs['id'] = attrs['name'] return json.dumps(attrs, default=jsonSerialize) @classmethod def findByName(cls, store, name): """ Get a L{Paste} item by name. """ return store.findUnique(Paste, Paste.name == name)
class Recording(Item, website.PrefixURLMixin): typeName = "sine_confession_recording" schemaVersion = 1 prefixURL = text() length = integer() #seconds in recording fromAddress = text() time = timestamp() sessioned = True sessionless = False def __init__(self, **args): super(Recording, self).__init__(**args) self.time = Time() self.prefixURL = unicode("private/recordings/%s.wav" % str(self.storeID)) def getFile(self): dir = self.store.newDirectory("recordings") if not dir.exists(): dir.makedirs() #should i really have to do this? return dir.child("%s.wav" % self.storeID) file = property(getFile) def audioFromFile(self, filename): f = self.file.path filepath.FilePath(filename).moveTo(self.file) w = wave.open(f) self.length = w.getnframes() / w.getframerate() w.close() def createResource(self): return static.File(self.file.path)
class SystemVersion(item.Item): """ Represents a set of software package versions which, taken together, comprise a "system version" of the software that can have affected the contents of a Store. By recording the changes of these versions in the store itself we can better reconstruct its history later. """ creation = attributes.timestamp( doc="When this system version set was recorded.", allowNone=False) def __repr__(self): return '<SystemVersion %s>' % (self.creation,) def longWindedRepr(self): """ @return: A string representation of this SystemVersion suitable for display to the user. """ return '\n\t'.join( [repr(self)] + [repr(sv) for sv in self.store.query( SoftwareVersion, SoftwareVersion.systemVersion == self)])
class PersistentSession(item.Item): """A session that persists on the database. These sessions should not store any state, but are used only to determine that the user has previously authenticated and should be given a transient session (a regular guard session, not database persistant) without providing credentials again. """ typeName = 'persistent_session' schemaVersion = 1 sessionKey = attributes.bytes() lastUsed = attributes.timestamp() authenticatedAs = attributes.bytes() # The username and domain # that this session was # authenticated as. def __init__(self, **kw): assert kw.get( 'sessionKey' ) is not None, "None cookie propogated to PersistentSession" kw['lastUsed'] = extime.Time() super(PersistentSession, self).__init__(**kw) def __repr__(self): return "PersistentSession(%r)" % (self.sessionKey, ) def renew(self): """Renew the lifetime of this object. Call this when the user logs in so this session does not expire. """ self.lastUsed = extime.Time()
class TimedEvent(Item): typeName = 'timed_event' schemaVersion = 1 time = timestamp(indexed=True) runnable = reference() running = inmemory(doc='True if this event is currently running.') def activate(self): self.running = False def _rescheduleFromRun(self, newTime): """ Schedule this event to be run at the indicated time, or if the indicated time is None, delete this event. """ if newTime is None: self.deleteFromStore() else: self.time = newTime def invokeRunnable(self): """ Run my runnable, and reschedule or delete myself based on its result. Must be run in a transaction. """ runnable = self.runnable if runnable is None: self.deleteFromStore() else: try: self.running = True newTime = runnable.run() finally: self.running = False self._rescheduleFromRun(newTime) def handleError(self, now, failureObj): """ An error occurred running my runnable. Check my runnable for an error-handling method called 'timedEventErrorHandler' that will take the given failure as an argument, and execute that if available: otherwise, create a TimedEventFailureLog with information about what happened to this event. Must be run in a transaction. """ errorHandler = getattr(self.runnable, 'timedEventErrorHandler', None) if errorHandler is not None: self._rescheduleFromRun(errorHandler(self, failureObj)) else: self._defaultErrorHandler(now, failureObj) def _defaultErrorHandler(self, now, failureObj): TimedEventFailureLog(store=self.store, desiredTime=self.time, actualTime=now, runnable=self.runnable, traceback=failureObj.getTraceback()) self.deleteFromStore()
class EmailAddressExtract(SimpleExtractMixin, Item): typeName = 'quotient_email_address_extract' schemaVersion = 2 start = attributes.integer() end = attributes.integer() text = attributes.text(indexed=True) message = attributes.reference() part = attributes.reference() timestamp = attributes.timestamp() person = attributes.reference() regex = re.compile(ur'[\w\-\.]+@(?:[a-z0-9-]+\.)+[a-z]+', re.UNICODE | re.IGNORECASE) def worthStoring(message, extractedText): return not message.sender == extractedText worthStoring = staticmethod(worthStoring) def asStan(self): return tags.b[self.text]
class TrivialContact(Item): implements(sip.IContact, ixmantissa.INavigableElement) typeName = "sine_trivialcontact" schemaVersion = 1 physicalURL = bytes() altcontact = bytes() expiryTime = timestamp() installedOn = reference() powerupInterfaces = (ixmantissa.INavigableElement, sip.IContact) def registerAddress(self, physicalURL, expiryTime): self.physicalURL = physicalURL.toString() self.expiryTime = Time.fromPOSIXTimestamp(time.time() + expiryTime) return [(physicalURL, self.expiryTime)] def unregisterAddress(self, physicalURL): storedURL = sip.parseURL(self.physicalURL) if storedURL != physicalURL: raise ValueError, "what" self.physicalURL = None return [(physicalURL, 0)] def getRegistrationInfo(self, caller): registered = False if self.physicalURL is not None: now = time.time() if now < self.expiryTime.asPOSIXTimestamp(): registered = True if registered: return [(sip.parseURL(self.physicalURL), int(self.expiryTime.asPOSIXTimestamp() - now))] elif self.altcontact: return [(sip.parseURL(self.altcontact), -1)] else: return defer.fail(sip.RegistrationError(480)) def placeCall(self, target): svc = self.store.parent.findUnique(SIPServer) svc.setupCallBetween(("", self.getRegistrationInfo(target)[0][0], {}), ("", target, {})) def callIncoming(self, name, uri, caller): Call(store=self.store, name=name, time=Time(), uri=unicode(str(uri)), kind=u'from') def callOutgoing(self, name, uri): Call(store=self.store, name=name, time=Time(), uri=unicode(str(uri)), kind=u'to') def getTabs(self): return [webnav.Tab('Voice', self.storeID, 0.25)]
class _SignupTracker(Item): """ Signup-system private Item used to track which signup mechanisms have been created. """ signupItem = reference() createdOn = timestamp() createdBy = text()
def processor(forType): """ Create an Axiom Item type which is suitable to use as a batch processor for the given Axiom Item type. Processors created this way depend on a L{iaxiom.IScheduler} powerup on the on which store they are installed. @type forType: L{item.MetaItem} @param forType: The Axiom Item type for which to create a batch processor type. @rtype: L{item.MetaItem} @return: An Axiom Item type suitable for use as a batch processor. If such a type previously existed, it will be returned. Otherwise, a new type is created. """ MILLI = 1000 try: processor = _processors[forType] except KeyError: def __init__(self, *a, **kw): item.Item.__init__(self, *a, **kw) self.store.powerUp(self, iaxiom.IBatchProcessor) attrs = { '__name__': 'Batch_' + forType.__name__, '__module__': forType.__module__, '__init__': __init__, '__repr__': lambda self: '<Batch of %s #%d>' % (reflect.qual(self.workUnitType), self.storeID), 'schemaVersion': 2, 'workUnitType': forType, 'scheduled': attributes.timestamp(doc=""" The next time at which this processor is scheduled to run. """, default=None), # MAGIC NUMBERS AREN'T THEY WONDERFUL? 'busyInterval': attributes.integer(doc="", default=MILLI / 10), } _processors[forType] = processor = item.MetaItem( attrs['__name__'], (item.Item, _BatchProcessorMixin), attrs) registerUpgrader(upgradeProcessor1to2, _processors[forType].typeName, 1, 2) return processor
class Sample(Item): """ A sample item which will be used as the rows in the table displayed. """ quantity = integer(indexed=True) title = text() date = timestamp(indexed=True) color = text(allowNone=False)
class Recording(Item): """ A certain recording. """ created = timestamp() caller_id = text() filename = text() duration = integer() # in frames
def processor(forType): """ Create an Axiom Item type which is suitable to use as a batch processor for the given Axiom Item type. Processors created this way depend on a L{iaxiom.IScheduler} powerup on the on which store they are installed. @type forType: L{item.MetaItem} @param forType: The Axiom Item type for which to create a batch processor type. @rtype: L{item.MetaItem} @return: An Axiom Item type suitable for use as a batch processor. If such a type previously existed, it will be returned. Otherwise, a new type is created. """ MILLI = 1000 try: processor = _processors[forType] except KeyError: def __init__(self, *a, **kw): item.Item.__init__(self, *a, **kw) self.store.powerUp(self, iaxiom.IBatchProcessor) attrs = { '__name__': 'Batch_' + forType.__name__, '__module__': forType.__module__, '__init__': __init__, '__repr__': lambda self: '<Batch of %s #%d>' % (reflect.qual(self.workUnitType), self.storeID), 'schemaVersion': 2, 'workUnitType': forType, 'scheduled': attributes.timestamp(doc=""" The next time at which this processor is scheduled to run. """, default=None), # MAGIC NUMBERS AREN'T THEY WONDERFUL? 'busyInterval': attributes.integer(doc="", default=MILLI / 10), } _processors[forType] = processor = item.MetaItem( attrs['__name__'], (item.Item, _BatchProcessorMixin), attrs) registerUpgrader( upgradeProcessor1to2, _processors[forType].typeName, 1, 2) return processor
class Factoid(Item): """ A factoid. The crux of this item is the key/value concept. The goal is to have keys mapping to multiple values, which can be built up into a simple snippets of information tied to topics. """ typeName = 'eridanus_plugins_factoid_factoid' schemaVersion = 1 created = timestamp(doc=""" Creation time of this Factoid. """, defaultFactory=lambda: Time()) creator = text(doc=""" The name of the original creator. """, allowNone=False) modified = timestamp(doc=""" Modification time of this Factoid. """, defaultFactory=lambda: Time()) editor = text(doc=""" The name of the last person to modify this factoid. """, allowNone=False) key = text(doc=""" The factoid key. """, indexed=True, allowNone=False) value = text(doc=""" A factoid value. """, allowNone=False) def touchFactoid(self, editor): self.editor = editor self.modified = Time()
class _PendingUpload(Item): """ Marker for a pending upload to a backend store. """ objectId = text(allowNone=False) backend = reference(allowNone=False) # reftype=IBackendStore scheduled = timestamp(indexed=True, allowNone=False, defaultFactory=lambda: Time()) def _nextAttempt(self): """ Determine the time to schedule the next attempt. """ return Time() + timedelta(minutes=2) def run(self): self.attemptUpload() def attemptUpload(self): """ Attempt an upload of an object to a backend store. If the upload fails, it will be rescheduled; if it succeeds, this item will be deleted. """ def _uploadObject(obj): d = obj.getContent() d.addCallback(lambda content: self.backend.storeObject( content, obj.contentType, obj.metadata, obj.created, objectId=self.objectId)) return d def _reschedule(f): # We do this instead of returning a Time from attemptUpload, # because that can only be done synchronously. log.failure( 'Error uploading object {objectId!r} to backend store {backend!r}', failure=f, objectId=self.objectId, backend=self.backend) self.scheduled = self._nextAttempt() self.schedule() d = succeed(None) d.addCallback( lambda ign: IContentStore(self.store).getObject(self.objectId)) d.addCallback(_uploadObject) d.addCallbacks(lambda ign: self.deleteFromStore(), _reschedule) return d def schedule(self): IScheduler(self.store).schedule(self, self.scheduled)
class Post(item.Item): typeName = "BlogenginePost" schemaVersion = 1 id = attributes.integer(indexed=True, allowNone=False) created = attributes.timestamp(indexed=True) modified = attributes.timestamp(indexed=True) title = attributes.text(indexed=True, allowNone=False) author = attributes.text(indexed=True, allowNone=False) category = attributes.text(indexed=True) content = attributes.text(indexed=True) def __init__(self, **kw): now = Time() kw.update({'created': now, 'modified': now}) super(Post, self).__init__(**kw) def setModified(self): self.modified = Time()
class QueryStatBucket(item.Item): """ Obsolete. Only present for schema compatibility. Do not use. """ type = attributes.text("the SQL query string") value = attributes.ieee754_double(default=0.0, doc='Total number of events for this time period') interval = attributes.text(doc='A time period, e.g. "quarter-hour" or "minute" or "day"') index = attributes.integer(doc='The position in the round-robin list for non-daily stats') time = attributes.timestamp(doc='When this bucket was last updated') attributes.compoundIndex(interval, type, index)
class _PasswordResetAttempt(Item): """ I represent as as-yet incomplete attempt at password reset """ typeName = 'password_reset_attempt' schemaVersion = 1 key = text() username = text() timestamp = timestamp()
class KitchenSink(Item): """ An item with one of everything, more or less. """ t = text() i = integer() ts = timestamp() tl = textlist() d = ieee754_double() p1d = point1decimal() m = money()
class Book(Item): typeName = 'book' schemaVersion = 1 title = text() author = text() isbn = text() pages = integer() datePublished = timestamp() lentTo = reference() library = reference()
class AuthenticationApplication(item.Item): typeName = 'mantissa_web_authentication_application' schemaVersion = 1 lastCredentialsChange = attributes.timestamp(allowNone=False) def __init__(self, **kw): if 'lastCredentialsChange' not in kw: kw['lastCredentialsChange'] = extime.Time() super(AuthenticationApplication, self).__init__(**kw) def _account(self): substore = self.store.parent.getItemByID(self.store.idInParent) for account in self.store.parent.query( userbase.LoginAccount, userbase.LoginAccount.avatars == substore): return account raise NonExistentAccount() def _username(self): for (localpart, domain) in userbase.getAccountNames(self.store): return (localpart + '@' + domain).encode('utf-8') def hasCurrentPassword(self): return defer.succeed(self._account().password is not None) def changePassword(self, oldPassword, newPassword): account = self._account() if account.password is not None and account.password != oldPassword: raise InvalidPassword() else: account.password = newPassword self.lastCredentialsChange = extime.Time() def persistentSessions(self): username = self._username() return self.store.parent.query( websession.PersistentSession, websession.PersistentSession.authenticatedAs == username) def cancelPersistentSession(self, uid): username = self._username() for sess in self.store.parent.query( websession.PersistentSession, attributes.AND( websession.PersistentSession.authenticatedAs == username, websession.PersistentSession.sessionKey == uid)): sess.deleteFromStore() break else: raise NoSuchSession()
class ScheduleCallingItem(Item): """ Item which invokes C{schedule} on its store's L{IScheduler} from its own C{run} method. """ ran = boolean(default=False) rescheduleFor = timestamp() def run(self): scheduler = IScheduler(self.store) scheduler.schedule(self, self.rescheduleFor) self.ran = True
class Sample(Item): # we didn't originally set typeName, so it was generated from the # fully-qualified classname ("diskwatcher.Sample"), then Axiom # automatically lowercases and un-dot-ifies it to get # "diskwatcher_sample". Now we explicitly provide a name. typeName = "diskwatcher_sample" # version 2 added the 'total' field schemaVersion = 2 url = text(indexed=True) when = timestamp(indexed=True) total = integer() used = integer() avail = integer()
class Traceback(Item): typeName = 'mantissa_traceback' schemaVersion = 1 when = timestamp() traceback = bytes() collector = reference() def __init__(self, store, collector, failure): when = extime.Time() traceback = failure.getTraceback() super(Traceback, self).__init__( store=store, traceback=traceback, when=when, collector=collector)
class ImmutableObject(Item): """ An immutable object. Immutable objects are addressed by content hash, and consist of the object data as a binary blob, and object key/value metadata pairs. """ implements(IContentObject) hash = text(allowNone=False) contentDigest = text(allowNone=False, indexed=True) content = path(allowNone=False) contentType = text(allowNone=False) created = timestamp(allowNone=False, defaultFactory=lambda: Time()) _deferToThreadPool = inmemory() def activate(self): self._deferToThreadPool = execute @property def metadata(self): return {} @property def objectId(self): return u'%s:%s' % (self.hash, self.contentDigest) def _getDigest(self): fp = self.content.open() try: h = getHash(self.hash)(fp.read()) return unicode(h.hexdigest(), 'ascii') finally: fp.close() def verify(self): digest = self._getDigest() if self.contentDigest != digest: raise CorruptObject('expected: %r actual: %r' % (self.contentDigest, digest)) def getContent(self): return self._deferToThreadPool(self.content.getContent)
class URLExtract(SimpleExtractMixin, Item): typeName = 'quotient_url_extract' schemaVersion = 2 start = attributes.integer() end = attributes.integer() text = attributes.text(indexed=True) message = attributes.reference() part = attributes.reference() timestamp = attributes.timestamp() person = attributes.reference() regex = re.compile(ur'(?:\w+:\/\/|www\.)[^\s\<\>\'\(\)\"]+[^\s\<\>\(\)\'\"\?\.]', re.UNICODE | re.IGNORECASE) def asStan(self): return tags.b[tags.a(href=self.text)[self.text]]
class PastBlurb(Item): """ This is an old version of a blurb. It contains the text as it used to be at a particular point in time. """ typeName = 'hyperbola_past_blurb' schemaVersion = 1 dateEdited = timestamp() title = text() body = text() hits = integer( doc="The number of times that this blurb has been displayed to users.") author = reference(reftype=Role, allowNone=False) blurb = reference(reftype=Blurb)
class Status(item.Item): """ Represents the latest status of a particular grabber. """ when = attributes.timestamp(doc=""" Time at which this status was set. """) message = attributes.text(doc=""" A short string describing the current state of the grabber. """) success = attributes.boolean(doc=""" Flag indicating whether this status indicates a successful action or not. """) changeObservers = attributes.inmemory(doc=""" List of single-argument callables which will be invoked each time this status changes. """) def __repr__(self): return '<Status %r>' % (self.message, ) def activate(self): self.changeObservers = [] self.message = u"idle" def addChangeObserver(self, observer): self.changeObservers.append(observer) return lambda: self.changeObservers.remove(observer) def setStatus(self, message, success=True): self.when = extime.Time() self.message = message self.success = success for L in self.changeObservers: try: L(message) except: log.err(None, "Failure in status update")
class Recording(Item): """ A certain recording. """ created = timestamp() caller_id = text() filename = text() duration = integer() # in frames use_in_ending = boolean() user_recording = boolean() def filenameAsPath(self, app): """ Return absolute filename without extension """ return app.recordingsPath.child(self.filename).path def filenameAsURL(self): """ Return filename as MP3 url """ return "/recordings/" + self.filename + ".mp3" def filenameAsAsterisk(self): return "weareforests-recordings/%s" % self.filename @staticmethod def userRecordingFilename(app): """ Generate a new filename for a user recording. """ base = "user-%d" % time.time() fn = base f = app.recordingsPath.child(fn) i = 1 while f.exists(): fn = base + ("-%d" % i) f = app.recordingsPath.child(fn) i += 1 return fn
def nowAttribute(allowNone=False, defaultFactory=Time): return A.timestamp(doc='Date the item was added.', allowNone=allowNone, defaultFactory=defaultFactory)
def setServiceParent(self, parent): """ L{Scheduler} is no longer an L{IService}, but still provides this method as a no-op in case an instance which was still an L{IService} powerup is loaded (in which case it will be used like a service once). """ declareLegacyItem( Scheduler.typeName, 1, dict(eventsRun=integer(default=0), lastEventAt=timestamp(), nextEventAt=timestamp())) def scheduler1to2(old): new = old.upgradeVersion(Scheduler.typeName, 1, 2) new.store.powerDown(new, IService) new.store.powerDown(new, IScheduler) return new registerUpgrader(scheduler1to2, Scheduler.typeName, 1, 2) class _SubSchedulerParentHook(Item): schemaVersion = 4 typeName = 'axiom_subscheduler_parent_hook'
def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)