def stop(self): """ When called, causes the thread to exit, the next time the main loop runs. """ _loginfo("CACHE: Trying to stop cache") self.lock.acquire() try: self.stopme = True finally: self.lock.release()
def __init__(self,gui,rank,category,name,*args): _loginfo("FILELIGHT_W: Workspace created") CompWorkspace.__init__(self, gui, rank, category, 'filelight', name, [self.FileLightSiteView(self),self.FileLightGroupView(self)]) gui._addJSFragment("%s/javascript/Overview/FileLight.js" % gui.contentpath) self.source = None for s in gui.sources: if s.plothook=='filelight': self.source = s if self.source==None: _logerr("FILELIGHT_W: Couldn't find FileLightSource") raise Exception, "FILELIGHT_W Couldn't find source"
def fast_requestor(cache,name): ops=10000 while ops>0: _loginfo("%s: alive, remain %s"%(name,ops)) v1,v2 = random.random(),random.random() key = random.choice(keys) fill = fast_producer if v2>0.1 else slow_producer if v1>0.9: cache.asyncCacheFill(key,fill) elif v1>0.3: cache.lockedCacheFetch(key,fill) else: cache.cacheFetch(key,fill) ops-=1 _loginfo("%s: finished"%name)
def _plotMaker(self,query): """ PlotMaker is a cache maker function which returns the appropriate plot,legend,map tuple if they aren't found in cache. """ _loginfo("PRODMON_S: _plotMaker: %s"%query.cacheKey('PA_PLOT')) plot = query['plot'] if plot=='pie': plotdata = self._plotPie(query) elif plot=='bar': plotdata = self._plotBar(query) elif plot=='cumulative': plotdata = self._plotCumulative(query) elif plot=='baobab': plotdata = self._plotBaobab(query) else: pass # Plot type validity has already been checked plotdata = plotdata[0],plotdata[1],plotdata[2],stitchPlotAndLegend(plotdata[0],plotdata[1]),makeThumbnail(plotdata[0]) return plotdata,1800
def __init__(self,gui,rank,category,name,*args): """ Create a new ProdMonWorkspace instance. Note that we need to find ProdMonSource in the server source list, or we'll exit ingloriously. TODO: good mechanism for finding sources TODO: SiteDB source integration TODO: ProdRequest integration """ _loginfo("PRODMON_W: Workspace created") CompWorkspace.__init__(self, gui, rank, category, 'prodmon', name, [ProdMonWorkspace.ProdMonSummary(self),ProdMonWorkspace.ProdMonPlot(self)]) gui._addJSFragment("%s/javascript/Overview/ProdMon.js" % gui.contentpath) gui._addCSSFragment("%s/css/Overview/ProdMon.css" % gui.contentpath) self.source = None for s in gui.sources: if s.plothook=='prodmon': self.source = s if self.source==None: _logerr("PRODMON_W: Couldn't find ProdMonSource") raise Exception, "PW: Failed to find prodmon source. Fatal."
def __init__(self,gui,interval=3,sizelimit=100000000,itemlimit=1000,wait_expiry=120,wait_interval=5): """ Create a new ProdmonCache. Note that it is not started until the .start() method is called. @param interval: interval in seconds between the main loop running (3) @param cachesize: maximum size in bytes before old entries start getting culled. Note that this is done *imprecisely* and will not be strictly adhered to! (100MB) """ _loginfo("CACHE: Creating new Overview cache interval=%s sec, sizelimit=%s bytes, itemlimit=%s"%(interval,sizelimit,itemlimit)) Thread.__init__(self,name="OverviewCache") self.lock = Lock() self.stopme = False self.cachesize = 0 self.cache = {} # objkey: tuple (expiry, size, data) self.interval = interval self.sizelimit = sizelimit self.itemlimit = itemlimit self.cacheevents = {} self.wait_expiry = wait_expiry self.wait_interval = wait_interval engine.subscribe('stop',self.stop) self.start()
def __init__(self,server,*args): _loginfo("PRODMON_S: Source created.") self.cache = [e for e in server.extensions if e.exthook=="OverviewCache"][0] self.cache.asyncCacheFill('PA_WFSUMMARY',self._getWFSummary)
class EVDSnapshotUpload: STATUS_OK = 100 # Requested operation succeeded STATUS_BAD_REQUEST = 200 # The request is malformed STATUS_ERROR_PARAMETER = 300 # Request parameter value is unacceptable. STATUS_ERROR_EXISTS = 301 # Cannot overwrite an existing object. STATUS_ERROR_NOT_EXISTS = 302 # Requested file does not exist. STATUS_FAIL_EXECUTE = 400 # Failed to execute the request. def refresh(self, *args): pass def __init__(self, server, datadir): self.lock = Lock() self.server = server self.datadir = datadir if not os.path.exists(datadir): os.makedirs(datadir) tree.mount(self, script_name=server.baseUrl + "/iguana-snapshot", config={"/": {'request.show_tracebacks': False}}) # Set response headers to indicate our status data. def _status(self, code, message, detail=None): response.headers['evd-status-code'] = str(code) response.headers['evd-status-message'] = message response.headers['evd-status-detail'] = detail # Set response headers to indicate an error, then get out. def _error(self, code, message, detail=None): _logerr("code=%d, message=%s, detail=%s" % (code, message, detail)) self._status(code, message, detail) raise HTTPError(500, message) # Check that a required parameter has been given just once, # and the value matches the given regular expression. def _check(self, name, arg, rx): if not arg or not isinstance(arg, str): self._error(self.STATUS_BAD_REQUEST, "Incorrect or missing %s parameter" % name, "Must provide single argument") if not re.match(rx, arg): self._error(self.STATUS_BAD_REQUEST, "Malformed %s argument" % name, "Argument must match regular expression '%s'" % rx) # ------------------------------------------------------------------ # Store an event display image to the server. Validates all the # parameters then attempts to save the file safely. Sets headers in # the response to indicate what happened and the operation success. @expose def store(self, file=None, size=None, meta=None, *args, **kwargs): # Basic argument validation. if file == None \ or not getattr(file, "file", None) \ or not getattr(file, "filename", None): self._error(self.STATUS_BAD_REQUEST, "Incorrect or missing file argument", "Must provide a single file-type argument") self._check("filename", file.filename, r"^screenShot-[\d:.]+-[\d.]+\.png$") self._check("size", size, r"^\d+$") self._check("meta", meta, r"^.+\n") size = int(size) (base, time, date) = re.match(r"(.*)-([\d:.]+)-([\d.]+)\.png", file.filename).groups() # Determine where we would save this file. dir = "%s/%s" % (self.datadir, date) fname = "%s/%s" % (dir, file.filename) tmp = None if os.path.exists(fname) or os.path.exists(fname + ".info"): self._error(self.STATUS_ERROR_EXISTS, "File exists, cannot overwrite", fname) # Try saving the file safely. First we write the file to a # temporary directory, with a fake name, to the same directory as # the final file would be. We then move it in place atomically # and leave a marker to indicate where the MSS replica of the file # can be found. If anything goes wrong, clean up so the upload can # be re-attempted later without tripping over the checks above. try: self.lock.acquire() if not os.path.exists(dir): os.makedirs(dir) (fd, tmp) = tempfile.mkstemp(".upload", "", dir) nsaved = 0 while True: data = file.file.read(32*1024) if not data: break os.write(fd, data) nsaved += len(data) os.close(fd) os.chmod(tmp, 0644) if nsaved != size: self._error(self.STATUS_FAIL_EXECUTE, "Failed to save file data", "Wrote %d bytes, expected to write %d" % (nsaved, size)) shutil.move(tmp, fname) open(fname + ".info", "w").write(meta) self.lock.release() except Exception, e: self.lock.release() if os.path.exists(fname): os.remove(fname) if os.path.exists(fname + ".info"): os.remove(fname + ".info") if tmp and os.path.exists(tmp): os.remove(tmp) if isinstance(e, HTTPError): raise e self._error(self.STATUS_FAIL_EXECUTE, "Failed to save file data", str(e).replace("\n", "; ")) # Indicate success. self._status(self.STATUS_OK, "File saved", "Wrote %d bytes" % nsaved) _loginfo("saved file path=%s size=%d" % (fname, size)) return "Thanks.\n"
def __init__(self,server,*args): _loginfo("FILELIGHT_S: Source created.") self.cache = [e for e in server.extensions if e.exthook=="OverviewCache"][0] self.cache.asyncCacheFill('FL_NODENAMES',self._nodeNames) self.cache.asyncCacheFill('FL_GROUPNAMES',self._groupNames)