def __MainLoop(self): """ This function operates in it's own thread, outside of the rest of the class. """ blue.pyos.synchro.Yield() try: while self.__killLoop == False: now = blue.os.GetWallclockTime() if self.__debugging: try: if ClockThis(self.uniqueIDstring + '::SafeThreadLoop', self.SafeThreadLoop, now) == self.KILL_ME: self.__killLoop = True except SystemExit: raise except TaskletExit: self.__killLoop = True except Exception as inst: self.__killLoop = True print 'SafeThread.__MainLoop - Unhandled Exception: ' + self.uniqueIDstring print 'Repair attempts remaining:', self.MAX_REPAIR_ATTEMPTS - self.repairCount - 1, '\n' log.LogException() print traceback.print_tb(sys.exc_info()[2]) print inst print inst.__doc__ debug.startDebugging() uthread.new(self.__RestoreSafeThreadLoop) else: try: if ClockThis(self.uniqueIDstring + '::SafeThreadLoop', self.SafeThreadLoop, now) == self.KILL_ME: self.__killLoop = True except SystemExit: raise except TaskletExit: self.__killLoop = True except Exception as e: self.__killLoop = True print 'SafeThread.__MainLoop - Unhandled Exception: ' + self.uniqueIDstring, '\n Debug mode is off, skipping straight to repair' print 'Repair attempts remaining:', self.MAX_REPAIR_ATTEMPTS - self.repairCount - 1, '\n' print traceback.print_tb(sys.exc_info()[2]) log.LogException() uthread.new(self.__RestoreSafeThreadLoop) self.__thread = None self.__active = False self.__killLoop = True raise e blue.pyos.synchro.SleepWallclock(self.__sleepTime) self.__thread = None self.__active = False except AttributeError: sys.exc_clear()
def CallUp(self, packet): if getattr(packet.destination, 'service', 0): if packet.payload[1] == 'MachoBindObject': nodeID = None else: nodeID = ClockThis( 'machoNet::GPCS::gpcs.Resolve::CallUp::MachoResolve', sm.StartServiceAndWaitForRunningState( packet.destination.service).MachoResolve, session.GetActualSession()) if type(nodeID) == types.StringType: if packet.command == const.cluster.MACHONETMSG_TYPE_RESOLVE_REQ: return packet.Response(0, nodeID) raise UnMachoDestination("Failed to resolve %s, reason='%s'" % (packet.destination.service, nodeID)) elif nodeID is None: if packet.command == const.cluster.MACHONETMSG_TYPE_RESOLVE_REQ: rsp = packet.Response(1, '') rsp.source = macho.MachoAddress( service=packet.destination.service) return rsp else: if packet.command == const.cluster.MACHONETMSG_TYPE_RESOLVE_REQ: rsp = packet.Response(1, '') rsp.source = macho.MachoAddress( nodeID=nodeID, service=packet.destination.service) return rsp if nodeID != self.machoNet.GetNodeID(): raise WrongMachoNode(nodeID) return self.ForwardCallUp(packet)
def HandlePython(self, request, response, filename, files): oldctxt = bluepy.PushTimer('HTTP::Handle::Pages') try: response.header['Expires'] = '0' response.header['Cache-Control'] = 'private, no-cache' if request.method == 'HEAD': log.LogInfo('Got a HEAD request, not executing script...') else: modified = max([ os.stat(fullpath).st_mtime for fullpath, f in files ]) if filename not in self.codeCache or modified > self.codeCache[filename][0]: with bluepy.Timer('HTTP::Handle::ExecFile::' + filename): import __builtin__ glob = {'__builtins__': __builtin__} for fn, f in files: data = f.read().replace('\r\n', '\n') f.close() code = compile(data, fn, 'exec', 0, True) exec code in glob self.codeCache[filename] = (modified, glob) else: glob = self.codeCache[filename][1] sess = request.session masque = sess.Masquerade() if not session.userid and macho.mode != 'client': raise RuntimeError('\n **********************************************************************\n * SESSION IS BROKED, HAS NO USERID. RESTART YOUR SERVER PAGE BROWSER *\n **********************************************************************\n ') try: ClockThis('HTTP::Handle::Pages::' + request.path, glob['Execute'], request, response, sess) finally: masque.UnMask() finally: bluepy.PopTimer(oldctxt)
def __init__(self, shared, objectID, cachedObject, objectVersion=None): self.__shared__ = shared self.__objectID__ = objectID self.__nodeID__ = sm.GetService('machoNet').GetNodeID() self.__cachedObject__ = cachedObject self.__compressed__ = 0 self.__thePickle__ = None self.__objectVersion__ = (blue.os.GetWallclockTimeNow(), objectVersion) if (self.__shared__ or objectVersion is None) and macho.mode != 'client': self.__thePickle__ = blue.marshal.Save(cachedObject) if len(self.__thePickle__) > 170: try: t = ClockThis('machoNet::util.CachedObject::compress', zlib.compress, self.__thePickle__, 1) except zlib.error as e: raise RuntimeError('Compression Failure: ' + strx(e)) if len(t) < len(self.__thePickle__): self.__thePickle__ = t self.__compressed__ = 1 if objectVersion is None: self.__objectVersion__ = (self.__objectVersion__[0], binascii.crc_hqx( self.__thePickle__, macho.version + 170472))
def _LookupConstValue(name, default, justChecking): if not constMap: @telemetry.ZONE_FUNCTION def MakeReverseConstValues(constMap): sets = [ [cfg.invcategories, 'category', '_categoryName', 'categoryID'], [cfg.invgroups, 'group', '_groupName', 'groupID'], [ cfg.invmetagroups.data.itervalues(), 'metaGroup', '_metaGroupName', 'metaGroupID' ], [cfg.invtypes, 'type', '_typeName', 'typeID'], [cfg.dgmattribs, 'attribute', 'attributeName', 'attributeID'], [cfg.dgmeffects, 'effect', 'effectName', 'effectID'] ] for rs, prefix, colName, constID in sets: for row in rs: constMap[util.MakeConstantName(getattr(row, colName, ''), prefix)] = getattr( row, constID, 0) ClockThis('^boot::MakeReverseConstValues', MakeReverseConstValues, constMap) if justChecking: return name in constMap or name in const.__dict__ if name in constMap: return constMap[name] if name in const.__dict__: return const.__dict__[name] if default != '.exception': return default raise AttributeError("There's no legacy const value called '%s'" % name)
def GetSession(self, request, response): try: if not request.session.userid: raise AttributeError return request.session except AttributeError: sess = ClockThis('HTTP::Handle::GetSession', GetSession, self, request, response, self.sessionsBySID, self.sessionsByFlatkaka) if sess: request.session = sess return sess
def __init__(self, shared, objectID, cachedObject, objectVersion=None): """ shared is true if and only if this is an object that is generally speaking shared by multiple users, and thus worth keeping around in cache on proxies. objectID must be a unique, persistable, comparable identifier that totally uniquely identifies cachedObject. objectVersion is added to objectID. Basically, objectID+objectVersion says "it's this object, and this particular version of it." Only the most recently received version of an object is stored in cache, replacing any previous version. cachedObject is the actual object that is going to be cached. It must be passable by value. """ self.__shared__ = shared self.__objectID__ = objectID self.__nodeID__ = sm.GetService('machoNet').GetNodeID() self.__cachedObject__ = cachedObject self.__compressed__ = 0 self.__thePickle__ = None self.__objectVersion__ = (blue.os.GetWallclockTimeNow(), objectVersion) if (self.__shared__ or objectVersion is None) and macho.mode != 'client': self.__thePickle__ = blue.marshal.Save(cachedObject) if len(self.__thePickle__) > 170: try: t = ClockThis('machoNet::util.CachedObject::compress', zlib.compress, self.__thePickle__, 1) except zlib.error as e: raise RuntimeError('Compression Failure: ' + strx(e)) if len(t) < len(self.__thePickle__): self.__thePickle__ = t self.__compressed__ = 1 if objectVersion is None: self.__objectVersion__ = (self.__objectVersion__[0], binascii.crc_hqx( self.__thePickle__, macho.version + 170472))
def GetCachableObject(self, shared, objectID, objectVersion, nodeID): """ Returns a cached copy of object 'objectID' or returns None if 'objectID' is not found in cache. If the cached version is older than 'objectVersion': acquire it from node nodeID (through our proxy, of course, if we're a client). 'shared' is true iff object has been cached in "shared" mode (meaning: a copy is cached on servers and proxies). Returns a utilCachedObject or objectCaching.CachedObject. """ callTimer = base.CallTimer('objectCaching::GetCachableObject') try: if macho.mode == 'client': gpcs = sm.services['machoNet'] else: gpcs = sm.services['machoNet'] gpcs.GetGPCS() if objectID in self.cachedObjects and isinstance( self.cachedObjects[objectID], utilCachedObject) and macho.mode == 'proxy': if sm.services['machoNet'].GetTransportOfNode( self.cachedObjects[objectID].__nodeID__) is None: del self.cachedObjects[objectID] if objectID in self.cachedObjects and isinstance( self.cachedObjects[objectID], uthread.Semaphore ) or objectID not in self.cachedObjects or not isinstance( self.cachedObjects[objectID], uthread.Semaphore) and self.__OlderVersion( self.cachedObjects[objectID].version, objectVersion): if objectID not in self.cachedObjects or not isinstance( self.cachedObjects[objectID], uthread.Semaphore) and self.__OlderVersion( self.cachedObjects[objectID].version, objectVersion): self.cachedObjects[objectID] = uthread.Semaphore( ('objectCaching', objectID)) while 1: semaphore = self.cachedObjects[objectID] semaphore.acquire() try: if not isinstance(self.cachedObjects[objectID], uthread.Semaphore): self.__UpdateStatisticsGetCachableObject( objectID, objectVersion, self.cachedObjects[objectID]) return self.cachedObjects[objectID] if macho.mode == 'client': if shared: if not sm.services['machoNet'].myProxyNodeID: proxyNodeID = nodeID else: proxyNodeID = sm.services[ 'machoNet'].myProxyNodeID remoteObject = gpcs.RemoteServiceCall( session, proxyNodeID, 'objectCaching', 'GetCachableObject', shared, objectID, objectVersion, nodeID) else: remoteObject = gpcs.RemoteServiceCall( session, nodeID, 'objectCaching', 'GetCachableObject', shared, objectID, objectVersion, nodeID) self.cachedObjects[objectID] = remoteObject self.__CacheIsDirty('cachedObjects', objectID) elif macho.mode == 'proxy': remoteObject = gpcs.RemoteServiceCall( self.session, nodeID, 'objectCaching', 'GetCachableObject', shared, objectID, objectVersion, nodeID) if not remoteObject.compressed and len( remoteObject.pickle) > 200: try: t = ClockThis('objectCaching::compress', zlib.compress, remoteObject.pickle, 1) except zlib.error as e: raise RuntimeError( 'Compression Failure: ' + strx(e)) if len(t) < len(remoteObject.pickle): remoteObject.compressed = 1 remoteObject.pickle = t if remoteObject.shared: self.cachedObjects[objectID] = remoteObject self.__CacheIsDirty('cachedObjects', objectID) else: del self.cachedObjects[objectID] return remoteObject elif macho.mode == 'server': self.LogError( "Some dude asked me for a cached object I just don't have, objectID=", objectID, ', objectVersion=', objectVersion, ', nodeID=', nodeID) del self.cachedObjects[objectID] raise RuntimeError( "I don't have %s, which just don't make sense...." % repr(objectID)) finally: semaphore.release() if not isinstance(self.cachedObjects[objectID], uthread.Semaphore): self.__UpdateStatisticsGetCachableObject( objectID, objectVersion, self.cachedObjects[objectID]) return self.cachedObjects[objectID] else: if macho.mode == 'server' and not self.cachedObjects[ objectID].shared: tmp = self.cachedObjects[objectID] self.__UpdateStatisticsGetCachableObject( objectID, objectVersion, self.cachedObjects[objectID]) del self.cachedObjects[objectID] return tmp self.__UpdateStatisticsGetCachableObject( objectID, objectVersion, self.cachedObjects[objectID]) return self.cachedObjects[objectID] finally: callTimer.Done()
def HandleConnection(self, ep): if not hasattr(self, 'caching'): ClockThis('HTTP::Handle::Init', self.Init) request = Request(self, ep) lastRequest = '' requestCount = 0 sess = None self.openConnectionsInHttpService.Add() try: while 1: sys.exc_clear() if request.method != 'HEAD': response = Response(self, ep) else: response = HeadResponse(self, ep) errfile = None lastRequest = request.path try: ClockThis('HTTP::Handle::request.ParseHeader', request.ParseHeader) if request.method == 'OPTION': tmp = request.DumpRequestToList() self.LogError('OPTION REQUEST, sorry not really an error') self.LogError('The client %s made this request' % ep.address) for s in tmp: self.LogError(s) response.SendNotImplemented() continue requestCount += 1 sess = self.GetSession(request, response) if sess: sess.requestCount += 1 else: continue if self.HandleCaching(ep, request, response): continue response.cookie['flatkaka'] = sess.esps.GetFlatkaka() filename, files = self.GetFileFromRequest(request) if not files: errfile = filename raise IOError('file %s not found in www roots' % filename) try: self.HandleRequestFile(request, response, filename, files) finally: for _, f in files: f.Close() except GPSTransportClosed: self.LogInfo('closed retrieving [%s], before that [%s].' % (request.path, lastRequest)) self.LogInfo('Total requests served with this connection: %d' % requestCount) break except Exception as ex: self.HandleException(sess, request, response, ex, errfile) try: response.Flush() if request.proto == 'HTTP/1.0': break except GPSTransportClosed: sys.exc_clear() self.LogWarn('Trying to send response for [%s] but the connection was closed, prev: %s' % (request.path, lastRequest)) self.LogWarn('Total requests served with this connection: %d' % requestCount) break finally: if not getattr(ep.socket, 'isFake', False): ep.Close() self.openConnectionsInHttpService.Dec() if sess: if sess.esps.contents.has_key('timeoutTimer'): if sess.esps.contents['timeoutTimer'] == None: sess.esps.contents['timeoutTimer'] = 1 uthread.new(self.CheckSessionTimeout, sess, sess.requestCount)
def __init__(self): chains, self.threadTimes, self.processTimes = ClockThis( 'TaskletSnapshot::GetChains', GetChains) timers = ClockThis('TaskletSnapshot::TimersFromChains', TimersFromChains, chains) self.update(timers)
def ConstValueExists(name): return ClockThis('SKITMIX::LookupConstValue', _LookupConstValue, name, '', True)
def LookupConstValue(name, default = '.exception'): return ClockThis('SKITMIX::LookupConstValue', _LookupConstValue, name, default, False)