def __init__(self, **data): """ constructor of Superclass """ Persistent.__init__(self) ISuperclass['objectID'].readonly = False self.objectID = generateOid(self) self.ikName = self.objectID for (name, value) in data.items(): if name in ISuperclass.names(): setattr(self, name, value) ISuperclass['objectID'].readonly = True self.ikAuthor = u"" self.dbgLevel = NOTSET self.history = RingBuffer(20) self.inpEQueue = Queue() self.outEQueue = Queue() self.outEReceiver = None self.workflows = {} self.wf_worklist = [] interaction = queryInteraction() if interaction is not None: for participation in interaction.participations: #principalid = participation.principal.id principal_title = participation.principal.title self.ikAuthor += unicode(principal_title) self.myFactory = str(self.__class__).split("'")[1] self.ikRevision = __version__
def makeNewObjQueue(self, senderObj): """ will create a new input and output queue for this sender object """ objId = senderObj.getObjectId() if not self.inpEQueues.has_key(objId): self.inpEQueues[objId] = Queue() if not self.outEQueues.has_key(objId): self.outEQueues[objId] = Queue() return True
def __call__(self): self.install_upgrade_profile() annotations = IAnnotations(self.portal) jobs = annotations.get('publisher-queue', ()) if hasattr(jobs, 'values'): jobs = jobs.values() queue = annotations['publisher-queue'] = Queue() map(queue.put, ProgressLogger('Migrate jobs to new queue storage', jobs))
def __init__(self): # logging.getLogger('scheduler') = logging.getLogger('scheduler') # logging.getLogger('scheduler').warning('Creating incomingQueue and runningList..') self._waitingQueue = PersistentWaitingQueue() self._runningList = [] # Failed tasks (there is an object still in the DB) self._failedIndex = IOIndex(IIndexableByArbitraryDateTime) # Finished tasks (no object data, just metadata) self._finishedIndex = IOIndex(IIndexableByArbitraryDateTime) # Stores all tasks self._taskIdx = IOBTree() self._taskCounter = Length(0) # Is the scheduler running self._schedulerStatus = False # Temporary area where all the tasks stay before being # added to the waiting list self._taskSpool = Queue()
class Superclass(Persistent): """ the superclass """ # implements(IKeyReference, ISuperclass) # implements(ISuperclass, IEventIfSuperclass) implements(ISuperclass) shortName = "generic" key_type_id = 'org.ict_ok.components.superclass.keyreference' objectID = FieldProperty(ISuperclass['objectID']) ikName = FieldProperty(ISuperclass['ikName']) ikComment = FieldProperty(ISuperclass['ikComment']) ikNotes = FieldProperty(ISuperclass['ikNotes']) ikAuthor = FieldProperty(ISuperclass['ikAuthor']) ikEventTarget = FieldProperty(ISuperclass['ikEventTarget']) #ref = FieldProperty(ISuperclass['ref']) fullTextSearchFields = ['objectID', 'ikName', 'ikComment', 'ikAuthor'] # IEventIfSuperclass #eventInpObjs_Ping = FieldProperty(IEventIfSuperclass['eventInpObjs_Ping']) #eventOutObjs_Pong = FieldProperty(IEventIfSuperclass['eventOutObjs_Pong']) def __init__(self, **data): """ constructor of Superclass """ Persistent.__init__(self) ISuperclass['objectID'].readonly = False self.objectID = generateOid(self) self.ikName = self.objectID for (name, value) in data.items(): if name in ISuperclass.names(): setattr(self, name, value) ISuperclass['objectID'].readonly = True self.ikAuthor = u"" self.dbgLevel = NOTSET self.history = RingBuffer(20) self.inpEQueue = Queue() self.outEQueue = Queue() self.outEReceiver = None self.workflows = {} self.wf_worklist = [] interaction = queryInteraction() if interaction is not None: for participation in interaction.participations: #principalid = participation.principal.id principal_title = participation.principal.title self.ikAuthor += unicode(principal_title) self.myFactory = str(self.__class__).split("'")[1] self.ikRevision = __version__ def __post_init__(self, **data): """ triggerd after constructor has been finished """ newEntry = Entry(u"Object created", self, level=u"info") newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) self.connectToEventXbar() newEntry = Entry(u"Object connected to event crossbar", self, level=u"info") newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) dc = IZopeDublinCore(self, None) if dc is not None: now = datetime.now(pytz.utc) dc.created = now dc.modified = now def canBeDeleted(self): """ a object can be deleted with normal delete permission special objects can overload this for special delete rules (e.g. IAdmUtilCatHostGroup) return True or False """ return True def enabledDebug(self): """ is debug output enabled? return True or False """ return self.dbgLevel > 0 def getDebugLevel(self): """ get debug level of object 0: none 1: normal 2: all """ return self.dbgLevel def getObjectId(self): """ get 'Universe ID' of object returns str """ return str(self.objectID) def setObjectId(self, arg_oid): """ set 'Universe ID' of object only for backup/restore functions """ if oidIsValid(arg_oid): ISuperclass['objectID'].readonly = False self.objectID = arg_oid ISuperclass['objectID'].readonly = True def getShortname(self): """ get a short class name of object returns str """ return self.shortName def getParent(self): """ returns parent object """ return zapi.getParent(self) def outputDebug(self): """ normal debug output """ if self.enabledDebug(): log(self.getDebugLevel(), "I'm Superclass: %s" % self.__name__) def setDebugLevel(self, dbgLevel): """ set debug level of object 0: none 1: normal 2: all """ self.__setattr__("dbgLevel", dbgLevel) def getDcTitle(self): """ get the Title from Dublin Core """ try: dcore = IWriteZopeDublinCore(self) if len(dcore.title) > 0: return dcore.title else: return self.ikName except TypeError: return self.ikName def setDcTitle(self, title): """ set the Title to Dublin Core """ dcore = IWriteZopeDublinCore(self) dcore.title = unicode(title) def getDisplayTitle(self): """ display text for some views """ return self.getDcTitle() def getLongTitle(self): """ display text for some views """ return self.getDcTitle() def getModifiedTime(self): """ get the modified time from Dublin Core """ return IDCTimes(self).modified def appendHistoryEntry(self, entryText, level=u"info", request=None, withAuthor=False, dontCount=False): """ append an text entry to the history """ if withAuthor and request is not None: principalId = request.principal.id.split('.')[1] pau_utility = queryUtility(IAuthentication) if pau_utility.has_key('principals'): internalPrincipal = pau_utility['principals'][principalId] if pau_utility.has_key('LDAPAuthentication'): ldapAuth = pau_utility[u'LDAPAuthentication'] internalPrincipal = ldapAuth.principalInfo(\ ldapAuth.prefix+principalId) entryText = u'%s (%s)' % (entryText, internalPrincipal.title) lastEntry = self.history.get()[-1] if not dontCount and entryText == lastEntry.getText(): lastEntry.appendRepeatCounter() lastEntry._p_changed = 1 else: newEntry = Entry(entryText, self, level) newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) self._p_changed = 1 def isConnectedToEvent(self): for attrName in self.__dict__: attrObjsPrefix = "eventInpObjs_" if attrName.find( attrObjsPrefix) == 0: # attribute name starts with ... objs = getattr(self, attrName, None) if len(objs) > 0: return True attrObjsPrefix = "eventOutObjs_" if attrName.find( attrObjsPrefix) == 0: # attribute name starts with ... objs = getattr(self, attrName, None) if len(objs) > 0: return True return False def processEvents(self): while len(self.inpEQueue) > 0: # temp. direct connect eventMsg = self.inpEQueue.pull() if not eventMsg.hasSeen(self): #print ">>> %s / %s [fnctName:%s]" % \ #(self.getObjectId(), #eventMsg.oidEventObject, #eventMsg.targetFunctionName) # call possible event ipnut methods by name fnctList = [] for attrName in self.__dict__: attrObjsPrefix = "eventInpObjs_" attrFnctPrefix = "eventInp_" if attrName.find(attrObjsPrefix ) == 0: # attribute name starts with ... fnctName = attrFnctPrefix + attrName[len(attrObjsPrefix ):] objs = getattr(self, attrName, None) fnct = getattr(self, fnctName, None) if fnct is not None and \ objs is not None: # find the RIGHT object list if eventMsg.oidEventObject in objs: fnctList.append(fnct) elif eventMsg.targetFunctionName == \ attrName[len(attrObjsPrefix):]: fnctList.append(fnct) for fnct in fnctList: fnct(eventMsg) if len(fnctList) == 0: # direct input to output self.outEQueue.put(eventMsg) else: eventMsg.stopit(self, "cycle!") def processOutEQueue(self): if self.outEReceiver is not None: while len(self.outEQueue) > 0: utilXbar = queryUtility(IAdmUtilEventCrossbar) event = iter(self.outEQueue).next() # don't delete if utilXbar.injectEventFromObj(self, event): self.outEQueue.pull() # now delete def processInpEQueue(self): pass def injectInpEQueue(self, event): self.inpEQueue.put(event) return True def injectOutEQueue(self, event): if self.outEReceiver is not None: self.outEQueue.put(event) return True def tickerEvent(self): """ got ticker event from ticker thread """ ## debug if queue not empty if len(self.inpEQueue) + len(self.outEQueue) > 0: log(INFO, "tickerEvent (n:%s, n(i):%s, n(o):%s)" % \ (self.ikName, len(self.inpEQueue), len(self.outEQueue))) self.processOutEQueue() self.processEvents() self.processInpEQueue() ## TODO test pupose #import time #if time.gmtime()[5] == 10: #if self.getDcTitle()==u'Host1': #inst_event = MsgEvent(self) #self.injectOutEQueue(inst_event) def connectToEventXbar(self): if self.outEReceiver is None: utilXbar = queryUtility(IAdmUtilEventCrossbar) if utilXbar is None: return False if utilXbar.makeNewObjQueue(self): self.outEReceiver = utilXbar.getObjectId() return True else: # can't create raise Exception, "connection failed" else: # already connected self.disconnectFromEventXbar() return self.connectToEventXbar() return False def disconnectFromEventXbar(self): if self.outEReceiver is not None: utilXbar = queryUtility(IAdmUtilEventCrossbar) if utilXbar is None: return False if utilXbar.destroyObjQueue(self): self.outEReceiver = None return True else: # can't destroy raise Exception, "destruction failed" return False def getAllOutEventObjs(self): """ returns a list of all active referenced event object oids for update purpose attribute name must start with 'eventOutObjs_' """ retSet = set([]) for attrName in self.__dict__: if attrName.find( "eventOutObjs_") == 0: # attribute name starts with ... for tmpOid in self.__dict__[attrName]: retSet.add(tmpOid) return retSet def getAllInpEventObjs(self): """ returns a list of all active referenced event object oids for update purpose attribute name must start with 'eventInpObjs_' """ retSet = set([]) for attrName in self.__dict__: if attrName.find( "eventInpObjs_") == 0: # attribute name starts with ... for tmpOid in self.__dict__[attrName]: retSet.add(tmpOid) print "getAllInpEventObjs:", retSet return retSet def getAllInpEventNames(self): """ returns a list of all input event methods attribute name must start with 'eventInpObjs_' """ retDict = {} for attrName in self.__dict__: if attrName.find( "eventInpObjs_") == 0: # attribute name starts with ... retDict[attrName[len('eventInpObjs_'):]] = \ self.__dict__[attrName] #retList.append(attrName[len('eventInpObjs_'):]) print "getAllInpEventNames:", retDict return retDict def getAllOutEventNames(self): """ returns a list of all output event methods attribute name must start with 'eventOutObjs_' """ retDict = {} for attrName in self.__dict__: if attrName.find( "eventOutObjs_") == 0: # attribute name starts with ... retDict[attrName[len('eventOutObjs_'):]] = \ self.__dict__[attrName] #retList.append(attrName[len('eventOutObjs_'):]) print "getAllInpEventNames:", retDict return retDict def addToEventInpObjs(self, inputName, eventObj): """ add the event to the list of inp event object oids """ if "eventInpObjs_" + inputName in self.__dict__: myObjIdSet = self.__dict__["eventInpObjs_" + inputName] print "myObjIdSet 1 :", myObjIdSet newOid = eventObj.getObjectId() if newOid not in myObjIdSet: myObjIdSet.add(newOid) self._p_changed = True print "myObjIdSet 2 :", myObjIdSet def delFromEventInpObjs(self, inputName, eventObj): """ add the event to the list of inp event object oids """ if "eventInpObjs_" + inputName in self.__dict__: myObjIdSet = self.__dict__["eventInpObjs_" + inputName] print "myObjIdSet 1 :", myObjIdSet oldOid = eventObj.getObjectId() if oldOid in myObjIdSet: myObjIdSet.remove(oldOid) self._p_changed = True print "myObjIdSet 2 :", myObjIdSet def generatePdf(self, absFilename, authorStr, versionStr, request=None): """ will generate a object pdf report steps to do: - toReportSet = set([]) - 1. select of objects (e.g. locations), append toReportSet - 2. select of objects (e.g. buildings), append toReportSet - 3. select of objects (e.g. rooms), append toReportSet - generate Report (1st run, content) - generate Report (2nd run, references) """ ## ## ## TODO: ## ## evil, very alpha, evaluation code for some kind of "Query Language" ## ... to be removed ... ## ## #-> from ZODB.interfaces import IConnection #-> connection = IConnection(self) #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(447)generatePdf() #-> from gocept.objectquery.collection import ObjectCollection #(Pdb) connection #<Connection at 01f19910> #(Pdb) dir(connection) #['_Connection__onCloseCallbacks', '__class__', '__delattr__', '__dict__', '__doc__', '__getattribute__', '__getitem__', '__hash__', '__implemented__', '__init__', '__module__', '__new__', '__providedBy__', '__provides__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__str__', '__weakref__', '_abort', '_abort_savepoint', '_added', '_added_during_commit', '_cache', '_cache_items', '_code_timestamp', '_commit', '_commit_savepoint', '_conflicts', '_creating', '_db', '_debug_info', '_flush_invalidations', '_handle_independent', '_handle_one_serial', '_handle_serial', '_implicitlyAdding', '_import', '_importDuringCommit', '_inv_lock', '_invalidate_creating', '_invalidated', '_invalidatedCache', '_load_before_or_conflict', '_load_count', '_log', '_modified', '_needs_to_join', '_normal_storage', '_opened', '_pre_cache', '_reader', '_register', '_registered_objects', '_resetCache', '_reset_counter', '_rollback', '_savepoint_storage', '_setstate', '_setstate_noncurrent', '_storage', '_storage_sync', '_store_count', '_store_objects', '_tpc_cleanup', '_txn_time', '_version', 'abort', 'add', 'afterCompletion', 'beforeCompletion', 'cacheGC', 'cacheMinimize', 'close', 'commit', 'connections', 'db', 'exchange', 'exportFile', 'get', 'getDebugInfo', 'getTransferCounts', 'getVersion', 'get_connection', 'importFile', 'invalidate', 'invalidateCache', 'isReadOnly', 'modifiedInVersion', 'newTransaction', 'new_oid', 'oldstate', 'onCloseCallback', 'open', 'register', 'root', 'savepoint', 'setDebugInfo', 'setstate', 'sortKey', 'sync', 'tpc_abort', 'tpc_begin', 'tpc_finish', 'tpc_vote', 'transaction_manager'] #(Pdb) connection.transaction_manager #<transaction._manager.ThreadTransactionManager object at 0x1239bf0> #(Pdb) pp dir(connection.transaction_manager) #['__class__', # '__delattr__', # '__dict__', # '__doc__', # '__getattribute__', # '__hash__', # '__init__', # '__module__', # '__new__', # '__reduce__', # '__reduce_ex__', # '__repr__', # '__setattr__', # '__str__', # '__weakref__', # '_synchs', # '_txns', # 'abort', # 'begin', # 'commit', # 'doom', # 'free', # 'get', # 'isDoomed', # 'registerSynch', # 'savepoint', # 'unregisterSynch'] #(Pdb) connection.transaction_manager.registerSynch(index_synch) #(Pdb) connection.root()['_oq_collection'] = oc #*** NameError: name 'oc' is not defined #(Pdb) l #442 publ = request.publication #443 import pdb #444 pdb.set_trace() #445 from ZODB.interfaces import IConnection #446 connection = IConnection(self) #447 -> from gocept.objectquery.collection import ObjectCollection #448 from gocept.objectquery.pathexpressions import RPEQueryParser #449 from gocept.objectquery.processor import QueryProcessor #450 from zope.app import zapi #451 parser = RPEQueryParser() #452 oc = ObjectCollection(connection) #(Pdb) n #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(448)generatePdf() #-> from gocept.objectquery.pathexpressions import RPEQueryParser #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(449)generatePdf() #-> from gocept.objectquery.processor import QueryProcessor #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(450)generatePdf() #-> from zope.app import zapi #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(451)generatePdf() #-> parser = RPEQueryParser() #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(452)generatePdf() #-> oc = ObjectCollection(connection) #(Pdb) #> /Users/markus/Projekte/ict_ok.org/inst/lib/python/org/ict_ok/components/superclass/superclass.py(453)generatePdf() #-> d1 = zapi.getParent(self) #(Pdb) connection.root()['_oq_collection'] = oc #(Pdb) connection.transaction_manager.commit() #(Pdb) from org.ict_ok.components.happliance.interfaces import IHardwareAppliance if IHardwareAppliance.providedBy(self): publ = request.publication from ZODB.interfaces import IConnection connection = IConnection(self) from gocept.objectquery.collection import ObjectCollection from gocept.objectquery.pathexpressions import RPEQueryParser from gocept.objectquery.processor import QueryProcessor from zope.app import zapi parser = RPEQueryParser() oc = ObjectCollection(connection) d1 = zapi.getParent(self) d2 = zapi.getParent(d1) d3 = zapi.getParent(d2) from pprint import pprint print "-" * 80 print "class_index:" pprint(list(oc.class_index._index)) # print "attribute_index:" # pprint(list(oc.attribute_index._index)) # print "structure_index:" # pprint(list(oc.structure_index.paths)) oc.index(connection.root()) print "-" * 80 print "class_index:" pprint(list(oc.class_index._index)) print "attribute_index:" pprint(list(oc.attribute_index._index)) print "structure_index:" pprint(list(oc.structure_index.paths)) oc.index(d2) print "-" * 80 print "class_index:" pprint(list(oc.class_index._index)) print "attribute_index:" pprint(list(oc.attribute_index._index)) print "structure_index:" pprint(list(oc.structure_index.paths)) oc.index(d1) print "-" * 80 print "class_index:" pprint(list(oc.class_index._index)) print "attribute_index:" pprint(list(oc.attribute_index._index)) print "structure_index:" pprint(list(oc.structure_index.paths)) oc.index(self) print "-" * 80 print "class_index:" pprint(list(oc.class_index._index)) print "attribute_index:" pprint(list(oc.attribute_index._index)) print "structure_index:" pprint(list(oc.structure_index.paths)) print "-" * 80 if0 = self.interfaces[0] oc.index(if0) oc3 = connection.root()['_oq_collection'] query = QueryProcessor(parser, oc3) tt2 = oc3.is_child(self._p_oid, d1._p_oid) tt1 = oc3.is_child(d1._p_oid, d2._p_oid) tt3 = oc3.is_child(self._p_oid, if0._p_oid) tt4 = oc3.is_child(if0._p_oid, self._p_oid) ee = query('/Folder') ff = query('/Folder/HardwareApplianceFolder/HardwareAppliance') print "ff: ", ff files2delete = [] document = RptDocument(absFilename) #document.setVolumeNo("1") document.setAuthorName(authorStr) document.setVersionStr(versionStr) adapterRptPdf = IRptPdf(self) if adapterRptPdf: adapterRptPdf.document = document adapterRptPdf.traverse4Rpt(1, True) files2delete.extend(adapterRptPdf.files2delete) del adapterRptPdf document.buildPdf() document.outConsoleTree(0) for i_filename in files2delete: try: os.remove(i_filename) except OSError: pass def generateXML(self, absFilename, authorStr, versionStr): """ will generate a object pdf report """ #files2delete = [] document = xml.dom.minidom.Document() #document.setVolumeNo("1") #document.setAuthorName(authorStr) #document.setVersionStr(versionStr) adapterRptXML = IRptXML(self) if adapterRptXML: adapterRptXML.document = document adapterRptXML.traverse4Rpt(1, True) #files2delete.extend(adapterRptXML.files2delete) del adapterRptXML #document.buildPdf() file_object = open(absFilename, "w") # what here? xml.dom.ext.PrettyPrint(document, file_object) file_object.close()
class Superclass(Persistent): """ the superclass """ # implements(IKeyReference, ISuperclass) implements(ISuperclass, IEventIfSuperclass) key_type_id = 'org.ict_ok.components.superclass.keyreference' objectID = FieldProperty(ISuperclass['objectID']) ikName = FieldProperty(ISuperclass['ikName']) ikComment = FieldProperty(ISuperclass['ikComment']) ikNotes = FieldProperty(ISuperclass['ikNotes']) ikAuthor = FieldProperty(ISuperclass['ikAuthor']) ikEventTarget = FieldProperty(ISuperclass['ikEventTarget']) ref = FieldProperty(ISuperclass['ref']) # IEventIfSuperclass #eventInpObjs_Ping = FieldProperty(IEventIfSuperclass['eventInpObjs_Ping']) #eventOutObjs_Pong = FieldProperty(IEventIfSuperclass['eventOutObjs_Pong']) def __init__(self, **data): """ constructor of Superclass """ Persistent.__init__(self) ISuperclass['objectID'].readonly = False self.objectID = generateOid(self) ISuperclass['objectID'].readonly = True self.ikName = self.objectID for (name, value) in data.items(): if name in ISuperclass.names(): setattr(self, name, value) self.ikAuthor = u"" self.dbgLevel = NOTSET self.history = RingBuffer(20) self.inpEQueue = Queue() self.outEQueue = Queue() self.outEReceiver = None self.workflows = {} self.wf_worklist = [] interaction = queryInteraction() if interaction is not None: for participation in interaction.participations: #principalid = participation.principal.id principal_title = participation.principal.title self.ikAuthor += unicode(principal_title) self.myFactory = str(self.__class__).split("'")[1] self.ikRevision = __version__ def __post_init__(self, **data): """ triggerd after constructor has been finished """ newEntry = Entry(u"Object created", self, level=u"info") newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) self.connectToEventXbar() newEntry = Entry(u"Object connected to event crossbar", self, level=u"info") newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) def enabledDebug(self): """ is debug output enabled? return True or False """ return self.dbgLevel > 0 def getDebugLevel(self): """ get debug level of object 0: none 1: normal 2: all """ return self.dbgLevel def getObjectId(self): """ get 'Universe ID' of object returns str """ return self.objectID def outputDebug(self): """ normal debug output """ if self.enabledDebug(): log(self.getDebugLevel(), "I'm Superclass: %s" % self.__name__) def setDebugLevel(self, dbgLevel): """ set debug level of object 0: none 1: normal 2: all """ self.__setattr__("dbgLevel", dbgLevel) def getDcTitle(self): """ get the Title from Dublin Core """ dcore = IWriteZopeDublinCore(self) return dcore.title def setDcTitle(self, title): """ set the Title to Dublin Core """ dcore = IWriteZopeDublinCore(self) dcore.title = unicode(title) def appendHistoryEntry(self, entryText): """ append an text entry to the history """ newEntry = Entry(entryText, self, level=u"info") newEntry.setObjVersion(self.ikRevision) self.history.append(newEntry) def isConnectedToEvent(self): for attrName in self.__dict__: attrObjsPrefix = "eventInpObjs_" if attrName.find( attrObjsPrefix) == 0: # attribute name starts with ... objs = getattr(self, attrName, None) if len(objs) > 0: return True attrObjsPrefix = "eventOutObjs_" if attrName.find( attrObjsPrefix) == 0: # attribute name starts with ... objs = getattr(self, attrName, None) if len(objs) > 0: return True return False def processEvents(self): while len(self.inpEQueue) > 0: # temp. direct connect eventMsg = self.inpEQueue.pull() if not eventMsg.hasSeen(self): #print ">>> %s / %s [fnctName:%s]" % \ #(self.getObjectId(), #eventMsg.oidEventObject, #eventMsg.targetFunctionName) # call possible event ipnut methods by name fnctList = [] for attrName in self.__dict__: attrObjsPrefix = "eventInpObjs_" attrFnctPrefix = "eventInp_" if attrName.find(attrObjsPrefix ) == 0: # attribute name starts with ... fnctName = attrFnctPrefix + attrName[len(attrObjsPrefix ):] objs = getattr(self, attrName, None) fnct = getattr(self, fnctName, None) if fnct is not None and \ objs is not None: # find the RIGHT object list if eventMsg.oidEventObject in objs: fnctList.append(fnct) elif eventMsg.targetFunctionName == \ attrName[len(attrObjsPrefix):]: fnctList.append(fnct) for fnct in fnctList: fnct(eventMsg) if len(fnctList) == 0: # direct input to output self.outEQueue.put(eventMsg) else: eventMsg.stopit(self, "cycle!") def processOutEQueue(self): if self.outEReceiver is not None: while len(self.outEQueue) > 0: utilXbar = queryUtility(IAdmUtilEventCrossbar) event = iter(self.outEQueue).next() # don't delete if utilXbar.injectEventFromObj(self, event): self.outEQueue.pull() # now delete def processInpEQueue(self): pass def injectInpEQueue(self, event): self.inpEQueue.put(event) return True def injectOutEQueue(self, event): if self.outEReceiver is not None: self.outEQueue.put(event) return True def tickerEvent(self): """ got ticker event from ticker thread """ ## debug if queue not empty if len(self.inpEQueue) + len(self.outEQueue) > 0: log(INFO, "tickerEvent (n:%s, n(i):%s, n(o):%s)" % \ (self.getDcTitle(), len(self.inpEQueue), len(self.outEQueue))) self.processOutEQueue() self.processEvents() self.processInpEQueue() ## TODO test pupose #import time #if time.gmtime()[5] == 10: #if self.getDcTitle()==u'Host1': #inst_event = MsgEvent(self) #self.injectOutEQueue(inst_event) def connectToEventXbar(self): if self.outEReceiver is None: utilXbar = queryUtility(IAdmUtilEventCrossbar) if utilXbar is None: return False if utilXbar.makeNewObjQueue(self): self.outEReceiver = utilXbar.getObjectId() return True else: # can't create raise Exception, "connection failed" else: # already connected self.disconnectFromEventXbar() return self.connectToEventXbar() return False def disconnectFromEventXbar(self): if self.outEReceiver is not None: utilXbar = queryUtility(IAdmUtilEventCrossbar) if utilXbar is None: return False if utilXbar.destroyObjQueue(self): self.outEReceiver = None return True else: # can't destroy raise Exception, "destruction failed" return False def getAllOutEventObjs(self): """ returns a list of all active referenced event object oids for update purpose attribute name must start with 'eventOutObjs_' """ retSet = set([]) for attrName in self.__dict__: if attrName.find( "eventOutObjs_") == 0: # attribute name starts with ... for tmpOid in self.__dict__[attrName]: retSet.add(tmpOid) return retSet def getAllInpEventObjs(self): """ returns a list of all active referenced event object oids for update purpose attribute name must start with 'eventInpObjs_' """ retSet = set([]) for attrName in self.__dict__: if attrName.find( "eventInpObjs_") == 0: # attribute name starts with ... for tmpOid in self.__dict__[attrName]: retSet.add(tmpOid) return retSet def getAllInpEventNames(self): """ returns a list of all input event methods attribute name must start with 'eventInpObjs_' """ retDict = {} for attrName in self.__dict__: if attrName.find( "eventInpObjs_") == 0: # attribute name starts with ... retDict[attrName[len('eventInpObjs_'):]] = \ self.__dict__[attrName] #retList.append(attrName[len('eventInpObjs_'):]) return retDict def getAllOutEventNames(self): """ returns a list of all output event methods attribute name must start with 'eventOutObjs_' """ retDict = {} for attrName in self.__dict__: if attrName.find( "eventOutObjs_") == 0: # attribute name starts with ... retDict[attrName[len('eventOutObjs_'):]] = \ self.__dict__[attrName] #retList.append(attrName[len('eventOutObjs_'):]) return retDict def addToEventInpObjs(self, inputName, eventObj): """ add the event to the list of inp event object oids """ if "eventInpObjs_" + inputName in self.__dict__: myObjIdSet = self.__dict__["eventInpObjs_" + inputName] print "myObjIdSet 1 :", myObjIdSet newOid = eventObj.getObjectId() if newOid not in myObjIdSet: myObjIdSet.add(newOid) self._p_changed = True print "myObjIdSet 2 :", myObjIdSet def delFromEventInpObjs(self, inputName, eventObj): """ add the event to the list of inp event object oids """ if "eventInpObjs_" + inputName in self.__dict__: myObjIdSet = self.__dict__["eventInpObjs_" + inputName] print "myObjIdSet 1 :", myObjIdSet oldOid = eventObj.getObjectId() if oldOid in myObjIdSet: myObjIdSet.remove(oldOid) self._p_changed = True print "myObjIdSet 2 :", myObjIdSet
class SchedulerModule(Module): id = "scheduler" def __init__(self): # logging.getLogger('scheduler') = logging.getLogger('scheduler') # logging.getLogger('scheduler').warning('Creating incomingQueue and runningList..') self._waitingQueue = PersistentWaitingQueue() self._runningList = [] # Failed tasks (there is an object still in the DB) self._failedIndex = IOIndex(IIndexableByArbitraryDateTime) # Finished tasks (no object data, just metadata) self._finishedIndex = IOIndex(IIndexableByArbitraryDateTime) # Stores all tasks self._taskIdx = IOBTree() self._taskCounter = Length(0) # Is the scheduler running self._schedulerStatus = False self._hostname = None self._pid = None # Temporary area where all the tasks stay before being # added to the waiting list self._taskSpool = Queue() def _assertTaskStatus(self, task, status): """ Confirm the status of this task """ if task.status != status: raise base.TaskInconsistentStatusException( "%s status is not %s" % (task, base.status(status))) if status == base.TASK_STATUS_RUNNING and \ task not in self._runningList: raise base.TaskInconsistentStatusException( 'task %s was not found in the running task list' % task) # TODO: remaining elifs def _indexTask(self, task): """ Provide the task with an id and add it to the task index """ # give it a serial id task.initialize(self._taskCounter(), base.TASK_STATUS_SPOOLED) # index it and increase the count self._taskIdx[task.id] = task self._taskCounter.change(1) logging.getLogger('scheduler').debug( 'Added %s to index..' % task) ## These are all interface methods, called by different modules def getStatus(self): """ Returns some basic info """ return { 'state': self._schedulerStatus, 'hostname': getattr(self, '_hostname', None), 'pid': getattr(self, '_pid', None), 'waiting': len(self._waitingQueue), 'running': len(self._runningList), 'spooled': len(self._taskSpool), 'failed': self._failedIndex._num_objs() , 'finished': self._finishedIndex._num_objs() } def getTaskById(self, taskId): return self._taskIdx[taskId] def getSpool(self): return self._taskSpool def clearSpool(self): i = 0 try: while(self._taskSpool.pull()): i += 1 except IndexError: pass return i def spool(self, op, obj): """ Adds an 'instruction' to the spool, in the form (op, obj) """ self._taskSpool.put((op, obj)) logging.getLogger('scheduler').debug( 'Added instruction %s to spool..' % ((op, obj),)) return True def removeRunningTask(self, task): """ Remove a task from the running list """ try: self._runningList.remove(task) self._p_changed = True except ValueError: logging.getLogger('scheduler').exception("Problem removing running task: %s" % self._runningList) def moveTask(self, task, moveFrom, status, occurrence=None, nocheck=False): """ Move a task somewhere """ if not occurrence: occurrence = task if not nocheck: self._assertTaskStatus(task, moveFrom) if moveFrom == base.TASK_STATUS_RUNNING: # actually remove it from list self.removeRunningTask(task) elif moveFrom == base.TASK_STATUS_QUEUED: idx_timestamp = int_timestamp(task.getStartOn()) self._waitingQueue.dequeue(idx_timestamp, task) elif moveFrom == base.TASK_STATUS_FAILED: self._failedIndex.unindex_obj(task) # index it either in finished or failed # (or queue it up again) if status == base.TASK_STATUS_FINISHED: self._finishedIndex.index_obj(occurrence) elif status in [base.TASK_STATUS_FAILED, base.TASK_STATUS_TERMINATED]: self._failedIndex.index_obj(occurrence) elif status == base.TASK_STATUS_QUEUED: self.addTaskToWaitingQueue(occurrence) def changeTaskStartDate(self, oldTS, task): newTS = int_timestamp(task.getStartOn()) # enqueue-dequeue try: self._waitingQueue.dequeue(oldTS, task) except: logging.getLogger('scheduler').error( "%s was supposed to be changed but it was not " "found in the waiting queue!" % task) return self._waitingQueue.enqueue(newTS, task) logging.getLogger('scheduler').info( '%s moved from bin %s to %s...' % (task, oldTS, newTS)) def addTaskToWaitingQueue(self, task, index=False): if index: self._indexTask(task) # get an int timestamp if task.getStartOn(): timestamp = int_timestamp(task.getStartOn()) self._waitingQueue.enqueue(timestamp, task) # make it "officially" queued task.setStatus(base.TASK_STATUS_QUEUED) logging.getLogger('scheduler').debug( 'Added %s to waitingQueue..' % task) def popNextWaitingTask(self): return self._waitingQueue.pop() def peekNextWaitingTask(self): return self._waitingQueue.peek() def removeWaitingTask(self, timestamp, task): return self._waitingQueue.dequeue(timestamp, task) def getRunningList(self): return self._runningList def getWaitingQueue(self): return self._waitingQueue def getFailedIndex(self): return self._failedIndex def getFinishedIndex(self): return self._finishedIndex def getTaskIndex(self): return self._taskIdx def setSchedulerRunningStatus(self, status): self._schedulerStatus = status self._hostname = socket.getfqdn() if status else None self._pid = os.getpid() if status else None def addTaskToRunningList(self, task): logging.getLogger('scheduler').debug( 'Added task %s to runningList..' % task.id) self._runningList.append(task) self._p_changed = True
class SchedulerModule(Module): id = "scheduler" def __init__(self): # logging.getLogger('scheduler') = logging.getLogger('scheduler') # logging.getLogger('scheduler').warning('Creating incomingQueue and runningList..') self._waitingQueue = PersistentWaitingQueue() self._runningList = [] # Failed tasks (there is an object still in the DB) self._failedIndex = IOIndex(IIndexableByArbitraryDateTime) # Finished tasks (no object data, just metadata) self._finishedIndex = IOIndex(IIndexableByArbitraryDateTime) # Stores all tasks self._taskIdx = IOBTree() self._taskCounter = Length(0) # Is the scheduler running self._schedulerStatus = False # Temporary area where all the tasks stay before being # added to the waiting list self._taskSpool = Queue() def _assertTaskStatus(self, task, status): """ Confirm the status of this task """ if task.status != status: raise base.TaskInconsistentStatusException( "%s status is not %s" % (task, base.status(status))) if status == base.TASK_STATUS_RUNNING and \ task not in self._runningList: raise base.TaskInconsistentStatusException( 'task %s was not found in the running task list' % task) # TODO: remaining elifs def _indexTask(self, task): """ Provide the task with an id and add it to the task index """ # give it a serial id task.initialize(self._taskCounter(), base.TASK_STATUS_SPOOLED) # index it and increase the count self._taskIdx[task.id] = task self._taskCounter.change(1) logging.getLogger('scheduler').debug( 'Added %s to index..' % task) ## These are all interface methods, called by different modules def getStatus(self): """ Returns some basic info """ return { 'state': self._schedulerStatus, 'waiting': len(self._waitingQueue), 'running': len(self._runningList), 'spooled': len(self._taskSpool), 'failed': self._failedIndex._num_objs() , 'finished': self._finishedIndex._num_objs() } def getTaskById(self, taskId): return self._taskIdx[taskId] def getSpool(self): return self._taskSpool def clearSpool(self): i = 0 try: while(self._taskSpool.pull()): i += 1 except IndexError: pass return i def spool(self, op, obj): """ Adds an 'instruction' to the spool, in the form (op, obj) """ self._taskSpool.put((op, obj)) logging.getLogger('scheduler').debug( 'Added instruction %s to spool..' % ((op, obj),)) return True def removeRunningTask(self, task): """ Remove a task from the running list """ try: self._runningList.remove(task) self._p_changed = True except ValueError: logging.getLogger('scheduler').exception("Problem removing running task: %s" % self._runningList) def moveTask(self, task, moveFrom, status, occurrence=None, nocheck=False): """ Move a task somewhere """ if not occurrence: occurrence = task if not nocheck: self._assertTaskStatus(task, moveFrom) if moveFrom == base.TASK_STATUS_RUNNING: # actually remove it from list self.removeRunningTask(task) elif moveFrom == base.TASK_STATUS_QUEUED: idx_timestamp = int_timestamp(task.getStartOn()) self._waitingQueue.dequeue(idx_timestamp, task) elif moveFrom == base.TASK_STATUS_FAILED: self._failedIndex.unindex_obj(task) # index it either in finished or failed # (or queue it up again) if status == base.TASK_STATUS_FINISHED: self._finishedIndex.index_obj(occurrence) elif status in [base.TASK_STATUS_FAILED, base.TASK_STATUS_TERMINATED]: self._failedIndex.index_obj(occurrence) elif status == base.TASK_STATUS_QUEUED: self.addTaskToWaitingQueue(occurrence) def changeTaskStartDate(self, oldTS, task): newTS = int_timestamp(task.getStartOn()) # enqueue-dequeue try: self._waitingQueue.dequeue(oldTS, task) except: logging.getLogger('scheduler').error( "%s was supposed to be changed but it was not " "found in the waiting queue!" % task) return self._waitingQueue.enqueue(newTS, task) logging.getLogger('scheduler').info( '%s moved from bin %s to %s...' % (task, oldTS, newTS)) def addTaskToWaitingQueue(self, task, index=False): if index: self._indexTask(task) # get an int timestamp timestamp = int_timestamp(task.getStartOn()) self._waitingQueue.enqueue(timestamp, task) # make it "officially" queued task.setStatus(base.TASK_STATUS_QUEUED) logging.getLogger('scheduler').debug( 'Added %s to waitingQueue..' % task) def popNextWaitingTask(self): return self._waitingQueue.pop() def peekNextWaitingTask(self): return self._waitingQueue.peek() def removeWaitingTask(self, timestamp, task): return self._waitingQueue.dequeue(timestamp, task) def getRunningList(self): return self._runningList def getWaitingQueue(self): return self._waitingQueue def getFailedIndex(self): return self._failedIndex def getFinishedIndex(self): return self._finishedIndex def getTaskIndex(self): return self._taskIdx def setSchedulerRunningStatus(self, status): self._schedulerStatus = status def addTaskToRunningList(self, task): logging.getLogger('scheduler').debug( 'Added task %s to runningList..' % task.id) self._runningList.append(task) self._p_changed = True