def __init__(self, cr): DistributedPartyActivity.__init__( self, cr, PartyGlobals.ActivityIds.PartyCatch, PartyGlobals.ActivityTypes.HostInitiated, wantRewardGui=True) self.setUsesSmoothing() self.setUsesLookAround() self._sNumGen = SerialNumGen()
class InputStateToken: _SerialGen = SerialNumGen() Inval = 'invalidatedToken' def __init__(self, inputState): self._id = InputStateToken._SerialGen.next() self._hash = self._id self._inputState = inputState def release(self): # subclasses will override assert False def isValid(self): return self._id != InputStateToken.Inval def invalidate(self): self._id = InputStateToken.Inval def __hash__(self): return self._hash #snake_case alias: is_valid = isValid
class SpeedMonitor: notify = directNotify.newCategory('SpeedMonitor') SerialGen = SerialNumGen() TrackingPeriod = 30.0 def __init__(self, name): self._name = name self._nodepaths = {} self._maxSpeeds = {} self._prevPosQueue = {} self._speedLimits = {} self._trackTask = taskMgr.add( self._trackSpeedsTask, 'speedMonitorTask-%s-%s' % (self._name, id(self))) def destroy(self): taskMgr.remove(self._trackTask) def _allocToken(self): return 'speedMonitorToken-%s-%s-%s' % (self._name, id(self), SpeedMonitor.SerialGen.next()) def addNodepath(self, nodepath): token = self._allocToken() self._nodepaths[token] = nodepath self.resetMaxSpeed(token) return token def setSpeedLimit(self, token, limit, callback): self._speedLimits[token] = (limit, callback) def removeNodepath(self, token): del self._nodepaths[token] del self._maxSpeeds[token] del self._prevPosQueue[token] if token in self._speedLimits: self._speedLimits.pop(token) def getMaxSpeed(self, token): return self._maxSpeeds[token] def resetMaxSpeed(self, token): self._maxSpeeds[token] = 0.0 nodepath = self._nodepaths[token] self._prevPosQueue[token] = [ (nodepath.getPos(), globalClock.getFrameTime() - SpeedMonitor.TrackingPeriod, 0.0) ] def _trackSpeedsTask(self, task=None): for (token, nodepath) in self._nodepaths.items(): curT = globalClock.getFrameTime() curPos = nodepath.getPos() while len(self._prevPosQueue[token]) > 1: (oldestPos, oldestT, oldestDistance) = self._prevPosQueue[token][1] if curT - oldestT > SpeedMonitor.TrackingPeriod: self._prevPosQueue[token] = self._prevPosQueue[token][1:] else: break storeCurPos = False if len(self._prevPosQueue[token]) == 0: storeCurPos = True curDistance = 0.0 else: (prevPos, prevT, prevDistance) = self._prevPosQueue[token][-1] if curPos != prevPos: storeCurPos = True curDistance = (curPos - prevPos).length() if storeCurPos: self._prevPosQueue[token].append((curPos, curT, curDistance)) if len(self._prevPosQueue[token]) > 1: (oldestPos, oldestT, oldestDistance) = self._prevPosQueue[token][0] (newestPos, newestT, newestDistance) = self._prevPosQueue[token][-1] tDelta = newestT - oldestT if tDelta >= SpeedMonitor.TrackingPeriod: totalDistance = 0.0 for (pos, t, distance) in self._prevPosQueue[token][1:]: totalDistance += distance speed = totalDistance / tDelta if speed > self._maxSpeeds[token]: if self.notify.getDebug(): self.notify.debug('new max speed(%s): %s' % (nodepath, speed)) self._maxSpeeds[token] = speed (limit, callback) = self._speedLimits[token] if speed > limit: self.notify.warning( '%s over speed limit (%s, cur speed=%s)' % (nodepath, limit, speed)) callback(speed) return Task.cont
class EventGroup(DirectObject.DirectObject): """This class allows you to group together multiple events and treat them as a single event. The EventGroup will not send out its event until all of its sub-events have occured.""" _SerialNumGen = SerialNumGen() def __init__(self, name, subEvents=None, doneEvent=None): """ Provide a meaningful name to aid debugging. doneEvent is optional. If not provided, a unique done event will be generated and is available as EventGroup.getDoneEvent(). Examples: # waits for gotRed and gotBlue, then sends out 'gotColors' EventGroup('getRedAndBlue', ('gotRed', 'gotBlue'), doneEvent='gotColors') # waits for two interests to close, then calls self._handleBothInterestsClosed() # uses EventGroup.getDoneEvent() and EventGroup.newEvent() to generate unique, # disposable event names eGroup = EventGroup('closeInterests') self.acceptOnce(eGroup.getDoneEvent(), self._handleBothInterestsClosed) base.cr.closeInterest(interest1, event=eGroup.newEvent('closeInterest1')) base.cr.closeInterest(interest2, event=eGroup.newEvent('closeInterest2')) """ self._name = name self._subEvents = set() self._completedEvents = set() if doneEvent is None: # no doneEvent provided, allocate a unique event name doneEvent = 'EventGroup-%s-%s-Done' % ( EventGroup._SerialNumGen.next(), self._name) self._doneEvent = doneEvent self._completed = False if subEvents is not None: # add the events that were passed in to start with, more may be added # later via newEvent() for event in subEvents: self.addEvent(event) def destroy(self): if hasattr(self, '_name'): # keep this around #del self._doneEvent del self._name del self._subEvents del self._completedEvents self.ignoreAll() def getName(self): return self._name def getDoneEvent(self): return self._doneEvent def isCompleted(self): return self._completed def addEvent(self, eventName): """ Adds a new event to the list of sub-events that we're waiting on. Returns the name of the event. """ if self._completed: self.notify.error( 'addEvent(\'%s\') called on completed EventGroup \'%s\'' % (eventName, self.getName())) if eventName in self._subEvents: self.notify.error( 'addEvent(\'%s\'): event already in EventGroup \'%s\'' % (eventName, self.getName())) self._subEvents.add(eventName) self.acceptOnce(eventName, Functor(self._subEventComplete, eventName)) return eventName def newEvent(self, name): """ Pass in an event name and it will be unique-ified for you and added to this EventGroup. TIP: there's no need to repeat information in this event name that is already in the name of the EventGroup object. Returns the new event name. """ return self.addEvent( '%s-SubEvent-%s-%s' % (self._name, EventGroup._SerialNumGen.next(), name)) def _subEventComplete(self, subEventName, *args, **kwArgs): if subEventName in self._completedEvents: self.notify.warning('_subEventComplete: \'%s\' already received' % subEventName) else: self._completedEvents.add(subEventName) if self._completedEvents == self._subEvents: self._signalComplete() def _signalComplete(self): self._completed = True messenger.send(self._doneEvent) self.destroy() def __repr__(self): return '%s(\'%s\', %s, doneEvent=\'%s\') # completed=%s' % ( self.__class__.__name__, self._name, tuple(self._subEvents), self._doneEvent, tuple(self._completedEvents))
class EventGroup(DirectObject.DirectObject): __module__ = __name__ _SerialNumGen = SerialNumGen() def __init__(self, name, subEvents=None, doneEvent=None): self._name = name self._subEvents = set() self._completedEvents = set() if doneEvent is None: doneEvent = 'EventGroup-%s-%s-Done' % ( EventGroup._SerialNumGen.next(), self._name) self._doneEvent = doneEvent self._completed = False if subEvents is not None: for event in subEvents: self.addEvent(event) return def destroy(self): if hasattr(self, '_name'): del self._name del self._subEvents del self._completedEvents self.ignoreAll() def getName(self): return self._name def getDoneEvent(self): return self._doneEvent def isCompleted(self): return self._completed def addEvent(self, eventName): if self._completed: self.notify.error( "addEvent('%s') called on completed EventGroup '%s'" % (eventName, self.getName())) if eventName in self._subEvents: self.notify.error( "addEvent('%s'): event already in EventGroup '%s'" % (eventName, self.getName())) self._subEvents.add(eventName) self.acceptOnce(eventName, Functor(self._subEventComplete, eventName)) return eventName def newEvent(self, name): return self.addEvent( '%s-SubEvent-%s-%s' % (self._name, EventGroup._SerialNumGen.next(), name)) def _subEventComplete(self, subEventName, *args, **kwArgs): if subEventName in self._completedEvents: self.notify.warning("_subEventComplete: '%s' already received" % subEventName) else: self._completedEvents.add(subEventName) if self._completedEvents == self._subEvents: self._signalComplete() def _signalComplete(self): self._completed = True messenger.send(self._doneEvent) self.destroy() def __repr__(self): return "%s('%s', %s, doneEvent='%s') # completed=%s" % ( self.__class__.__name__, self._name, tuple(self._subEvents), self._doneEvent, tuple(self._completedEvents))
class Job(DirectObject): """Base class for cpu-intensive or non-time-critical operations that are run through the :class:`.JobManager`. To use, subclass and override the `run()` method. """ #: Yielded from the `run()` generator method when the job is done. Done = object() #: ``yield None`` is acceptable in place of ``yield Job.Continue`` Continue = None #: Yield any remaining time for this job until next frame. Sleep = object() # These priorities determine how many timeslices a job gets relative to other # jobs. A job with priority of 1000 will run 10 times more often than a job # with priority of 100. Priorities = ScratchPad(Min=1, Low=100, Normal=1000, High=10000) _SerialGen = SerialNumGen() def __init__(self, name): self._name = name self._generator = None self._id = Job._SerialGen.next() self._printing = False self._priority = Job.Priorities.Normal self._finished = False if __debug__: self._pstats = PStatCollector("App:Show code:jobManager:%s" % self._name) def destroy(self): del self._name del self._generator del self._printing def getFinishedEvent(self): return 'job-finished-%s' % self._id def run(self): """This should be overridden with a generator that does the needful processing. yield `Job.Continue` when possible/reasonable, and try not to run longer than the JobManager's timeslice between yields. When done, yield `Job.Done`. """ raise NotImplementedError("don't call down") def getPriority(self): return self._priority def setPriority(self, priority): self._priority = priority def printingBegin(self): self._printing = True def printingEnd(self): self._printing = False def resume(self): """Called every time JobManager is going to start running this job.""" #if self._printing: # # we may be suspended/resumed multiple times per frame, that gets spammy # # if we need to pick out the output of a job, put a prefix onto each line # # of the output # print('JOB:%s:RESUME' % self._name) def suspend(self): """Called when JobManager is going to stop running this job for a while. """ #if self._printing: # #print('JOB:%s:SUSPEND' % self._name) # pass # """ def _setFinished(self): self._finished = True self.finished() def isFinished(self): return self._finished def finished(self): # called when the job finishes and has been removed from the JobManager pass def getJobName(self): return self._name def _getJobId(self): return self._id def _getGenerator(self): if self._generator is None: self._generator = self.run() return self._generator def _cleanupGenerator(self): if self._generator is not None: self._generator = None
class ExclusiveObjectPool(DirectObject): # ObjectPool specialization that excludes particular objects # IDs of objects to globally exclude from reporting _ExclObjs = [] _ExclObjIds = {} _SyncMaster = Sync('ExclusiveObjectPool.ExcludedObjectList') _SerialNumGen = SerialNumGen() @classmethod def addExclObjs(cls, *objs): for obj in makeList(objs): if id(obj) not in cls._ExclObjIds: cls._ExclObjs.append(obj) cls._ExclObjIds.setdefault(id(obj), 0) cls._ExclObjIds[id(obj)] += 1 cls._SyncMaster.change() @classmethod def removeExclObjs(cls, *objs): for obj in makeList(objs): assert id(obj) in cls._ExclObjIds cls._ExclObjIds[id(obj)] -= 1 if cls._ExclObjIds[id(obj)] == 0: del cls._ExclObjIds[id(obj)] cls._ExclObjs.remove(obj) cls._SyncMaster.change() def __init__(self, objects): self._objects = list(objects) self._postFilterObjs = [] self._sync = Sync( '%s-%s' % (self.__class__.__name__, self._SerialNumGen.next()), self._SyncMaster) self._sync.invalidate() ExclusiveObjectPool.addExclObjs(self._objects, self._postFilterObjs, self._sync) def destroy(self): self.ignoreAll() ExclusiveObjectPool.removeExclObjs(self._objects, self._postFilterObjs, self._sync) del self._objects del self._postFilterObjs del self._sync def _resync(self): if self._sync.isSynced(self._SyncMaster): return if hasattr(self, '_filteredPool'): ExclusiveObjectPool.removeExclObjs( *self._filteredPool._getInternalObjs()) ExclusiveObjectPool.removeExclObjs(self._filteredPool) del self._filteredPool del self._postFilterObjs[:] for obj in self._objects: if id(obj) not in ExclusiveObjectPool._ExclObjIds: self._postFilterObjs.append(obj) self._filteredPool = ExclusiveObjectPool(self._postFilterObjs) ExclusiveObjectPool.addExclObjs(self._filteredPool) ExclusiveObjectPool.addExclObjs(*self._filteredPool._getInternalObjs()) self._sync.sync(self._SyncMaster) def getObjsOfType(self, type): self._resync() return self._filteredPool.getObjsOfType(type) def printObjsOfType(self, type): self._resync() return self._filteredPool.printObjsOfType(type) def diff(self, other): self._resync() return self._filteredPool.diff(other._filteredPool) def typeFreqStr(self): self._resync() return self._filteredPool.typeFreqStr() def __len__(self): self._resync() return len(self._filteredPool)