def __init__(self): self.name = "WorkerRollingAverageSubtraction" # For logging Logger.log(self.name, "Generated") self.allIntensities = [] self.allQ = [] self.allErrors = [] self.aveIntensities = [] self.aveQ = [] self.aveErrors = [] self.subtractedIntensities = [] self.ave = AverageList.AverageList() self.datWriter = DatFileWriter.DatFileWriter() self.firstTime = True self.newSampleCheck = False self.name = "" self.user = "" self.experiment = ""
def subtract(self, buffer): subtractedIntensities = [] for i in range(len(self.aveIntensities)): value = (self.aveIntensities[i] - buffer[i]) subtractedIntensities.insert(i, value) self.subtractedIntensities = subtractedIntensities Logger.log(self.name, "Subtraction Completed")
def eh(self): self.name = "WorkerBufferAverage" #For logging Logger.log(self.name, "Generated") self.allIntensities = [] self.aveIntensities = [] self.ave = AverageList.AverageList() self.user = "" self.experiment = ""
def readLatestLine(self): noLines = True while (noLines): try: Logger.log(self.name, "Opening LogFile") v = open(self.logFile, "r") try: self.latestLine = v.readlines()[self.index] if (self.latestLine in self.lines): time.sleep(0.05) else: self.lines.append(self.latestLine) self.logLines.append(LogLine.LogLine(self.latestLine)) self.index = self.index + 1 #Send logline to be added to DB self.dbWorker.send("logLine") self.dbWorker.send_pyobj(self.logLines[self.index-1]) noLines = False except IndexError: Logger.log(self.name, "IndexError - trying to read last line from logfile") pass v.close() except IOError: Logger.log(self.name, "IOERROR - trying to read last line from logfile") Logger.log(self.name, self.logFile) time.sleep(0.2) pass
def clear(self): """Function for clearing data from the worker, for the next experiment""" self.allIntensities = [] self.allQ = [] self.allErrors = [] self.aveIntensities = [] self.aveQ = [] self.aveErrors = [] self.subtractedIntensities = [] self.firstTime = True Logger.log(self.name, "Worker Cleared - forgotten all previous buffers and datfiles")
def run(self, datFile, aveBuffer): self.allIntensities.append(datFile.intensities) self.allQ.append(datFile.q) self.allErrors.append(datFile.errors) #averaging out self.aveIntensities = self.ave.average(self.allIntensities) self.aveQ = self.ave.average(self.allQ) self.aveErrors = self.ave.average(self.allErrors) Logger.log(self.name, "Averaging Completed") self.subtract(aveBuffer) self.datWriter.writeFile("/home/ics/jack/beam/", self.name, { 'q': self.aveQ, 'i' : self.aveIntensities, 'errors':self.aveErrors})
def clear(self): """Clears out the Engine and all workers, should only occur when a new user has changed over""" Logger.log(self.name, "Commencing Clear out") self.index = 0 self.user = "" self.experiment = "EXPERIMENT_1" #self.logFile = "" self.lines = [] self.logLines = [] self.lastLine = "" self.datFiles = [] self.bufferWorker.send("clear") self.sampleWorker.send("clear") self.rollingAverageWorker.send("clear") self.lastDatFile = "" self.currentDatFile = "" Logger.log(self.name, "ENGINE and ALL WORKERS CLEARED")
def userChange(self, char_value, **kw): """Get the user_epn when a change over has occured, this will create a new DB for the user, create directory structure and clear out all workers""" Logger.log(self.name, "User change over initiated") self.clear() #Clear engine, and all workers user = self.getUser(char_value) #get new user self.user = user Logger.log(self.name, "NEW USER: "******"Folder Structure Created")
def getDatFile(self): _pass = 0 """TODO: make better... forgot the better and faster way I had the imagename """ ##returns dat file location noDatFile = True #getting actual dat file name from the log line. It will only pick up that dat file dat = self.logLines[self.index-1].getValue("ImageLocation") #this needs to be fixed to os agnostic #change to os.path.basename dat = dat.split("/") dat = dat[-1] dat = dat.split(".") dat = dat[0] + ".dat" dat = str(dat) while (noDatFile): try: datFile = (self.datFileLocation + dat) self.datFiles.append(DatFile.DatFile(datFile)) self.datIndex = self.datIndex + 1 Logger.log(self.name, "INDEX DATFILES: " + str(self.datIndex)) noDatFile = False except IOError: if (_pass > 3): print "dunno doesnt exist ?" _pass = 0 #self.index = self.index - 1 break Logger.log(self.name, "IOERROR - trying to open latest datfile") Logger.log(self.name, "DATFILE - " + str(datFile)) time.sleep(0.3) _pass = _pass + 1
#TODO, possible sub/pub so only one socket for all samples samples = context.socket(zmq.PULL) samples.connect("tcp://127.0.0.1:7885") bufferReq = context.socket(zmq.REQ) bufferReq.connect("tcp://127.0.0.1:7883") aveBuffer = [] while True: filter = samples.recv() if (str(filter) == "sample"): datFile = samples.recv_pyobj() Logger.log(worker.name, "Recieved DatFile") if (worker.firstTime): bufferReq.send("REQ-AVEBUFFER") aveBuffer = bufferReq.recv_pyobj() worker.firstTime = False worker.setName(datFile) #shitty fix if (worker.newSampleCheck): worker.setName(datFile) worker.newSampleCheck = False worker.run(datFile, aveBuffer) if (str(filter) == 'new_buffer'): worker.firstTime = True; worker.newSample()
def updateWorkers(self): self.bufferWorker.send("user") self.bufferWorker.send(self.user) Logger.log(self.name, "sent new user to WorkerBuffer") self.bufferWorker.send(self.experiment) Logger.log(self.name, "sent new experiment to WorkerBuffer") self.sampleWorker.send("user") self.sampleWorker.send(self.user) Logger.log(self.name, "Sent new user to WorkerStaticImage") self.sampleWorker.send(self.experiment) Logger.log(self.name, "Sent new experiment to WorkerStaticImage") self.rollingAverageWorker.send("user") self.rollingAverageWorker.send(self.user) Logger.log(self.name, "Sent new user to WorkerRollingImage") self.rollingAverageWorker.send(self.experiment) Logger.log(self.name, "Sent new experiment to WorkerRollingImage")
def generateDB(self): self.dbWorker.send("user") self.dbWorker.send(str(self.user)) self.dbWorker.send("Experiment") self.dbWorker.send(str(self.experiment)) Logger.log(self.name, "Database Created - " + self.user)
def imageTaken(self): """Check Logline, get all details on latest image """ Logger.log(self.name, "Image Value Changed - Shot Taken") #self.index = value #This is to foce the engine to begin where ever the experiment is self.readLatestLine() Logger.log(self.name, "Read Latest line from LogFile") self.getDatFile() Logger.log(self.name, "Retrieved DatFile") changeInName = self.checkName() #Here to test against sample change, then slap out for if its a buffer try: imageType = (self.logLines[self.index-1].data['SampleType']) except KeyError: Logger.log(self.name, "KeyError on SampleType, probably nothing") imageType = "12" if ((imageType == '0') and (changeInName)): Logger.log(self.name, "Root Name Change - Idicating Sample Change") Logger.log(self.name, "New Buffer Generated - Clearing Workers") self.bufferWorker.send("recalculate_buffer") self.sampleWorker.send("new_buffer") self.rollingAverageWorker.send("new_buffer") if ((changeInName) and (imageType != 0)): Logger.log(self.name, "Root Name Change - Idicating Sample Change") self.sampleWorker.send("new_sample") self.rollingAverageWorker.send("new_sample") #print self.datFiles[self.index-1].getDatFilePath() #print self.logLines[self.index-1].attributes #NEW FORMAT.. #imageType = (self.logLines[self.index-1].data['SampleType']) #imageType = (self.logLines[self.index-1].getValue("SMPL_TYPE")) print imageType Logger.log(self.name, "INDEX: " + str(self.index)) #if (imageType == "BUFFER"): if (imageType == '0'): #Buffer Logger.log(self.name, "BUFFER") self.bufferWorker.send("datFile") self.bufferWorker.send_pyobj(self.datFiles[self.datIndex-1]) Logger.log(self.name, "sent DatFile to WorkerBuffer") if (imageType == '1'): #Static_image Logger.log(self.name, "STATIC IMAGE") self.sampleWorker.send("sample") self.sampleWorker.send_pyobj(self.datFiles[self.datIndex-1]) Logger.log(self.name, "Sent DatFile to WorkerStaticImage") self.rollingAverageWorker.send("sample") self.rollingAverageWorker.send_pyobj(self.datFiles[self.datIndex-1]) Logger.log(self.name, "Sent DatFile to WorkerRollingImage")
def clear(self): """Clear out function for when a new experiment is starting""" self.allIntensities = [] self.aveIntensities = [] Logger.log(self.name, "Worker Cleared - all buffers forgotten")
def run(self, datFile): self.allIntensities.append(datFile.intensities) self.aveIntensities = self.ave.average(self.allIntensities) Logger.log(self.name, "Average Buffer Generated")