def analyzeBtnClicked(self, *args): self.resultsTable.clear() with pg.ProgressDialog("Analyzing..", 0, len(self.traces)) as dlg: for i, t in enumerate(self.traces): results = self.flowchart.process( dataIn=t['fileHandle'])['results'] ## make sure results has these fields regardless of what's in the flowchart results['timestamp'] = t['timestamp'] results['time'] = results['timestamp'] - self.expStart self.resultsTable.appendData([results]) results['ProtocolDir'] = self.dataModel.getParent( t['fileHandle'], 'Protocol') results['ProtocolSequenceDir'] = self.dataModel.getParent( t['fileHandle'], 'ProtocolSequence') results['CellDir'] = self.dataModel.getParent( t['fileHandle'], 'Cell') t['results'] = results dlg += 1 if dlg.wasCanceled(): self.resultsTable.horizontalHeader( ).sectionClicked.connect(self.tableColumnSelected) return self.resultsTable.horizontalHeader().sectionClicked.connect( self.tableColumnSelected)
def populate(self): self.tree.clear() mapTable = self.dbGui.getTableName('Photostim.maps') if mapTable == '': return #raise Exception("No table selected for %s" % ident) db = self.dbGui.getDb() maps = db.select(mapTable, ['rowid', '*']) paths = {} with pg.ProgressDialog("Reading map table...", 0, len(maps)) as dlg: for rec in maps: if len(rec['scans']) == 0: continue ## convert (table, rowid) to (dirhandle, rowid) before creating Map rec['scans'] = [(db.getDir(*s), s[1]) for s in rec['scans']] path = rec['scans'][0][0].parent() if path not in paths: pathItem = pg.TreeWidgetItem( [path.name(relativeTo=db.baseDir())]) self.tree.addTopLevelItem(pathItem) paths[path] = pathItem item = pg.TreeWidgetItem([rec['description']]) item.rec = rec paths[path].addChild(item) dlg += 1 if dlg.wasCanceled(): raise Exception( "User cancelled map list construction; some maps may not be displayed." ) self.tree.sortItems(0, QtCore.Qt.AscendingOrder)
def exportAll(): global v with pg.ProgressDialog("exporting all..", 0, 1000) as dlg: for day in man.baseDir.ls(): day = man.baseDir[day] for sl in day.ls(): if 'slice' not in sl: continue sl = day[sl] for cell in sl.ls(): if 'cell' not in cell: continue cell = sl[cell] try: m = cell['morphology.png'] except: continue show(cell) Qt.QApplication.processEvents() Qt.QApplication.processEvents() name = day.shortName() + "_" + sl.shortName() + "_" + cell.shortName() + ".svg" ex = pg.exporters.SVGExporter.SVGExporter(v.scene()) ex.export(name) print(name) if dlg.wasCanceled(): raise Exception("export cancelled")
def loadStubs(self): ### Turn all stubs into fully-loaded scans. with pg.ProgressDialog("Loading scans...", 0, len(self.stubs), busyCursor=True) as dlg: dlg.setValue(0) for stub in self.stubs: Qt.QApplication.processEvents() ### can we load a partial map if one scan fails? (should we?) newScans = self.host.loadScan(stub.dirHandle) if len(newScans) > 1: ## For sequence scans, we somehow need to decide how to reload the exact set of spots that were chosen for this scan before.. raise Exception( "Haven't implemented reloading sequence scans yet") elif len(newScans) == 1: newScan = newScans[0] else: raise Exception( "Photostim.loadScan returned empty list for file '%s'" % str(stub.dirHandle)) stub.treeItem.scan = newScan self.scans.append(newScan) self.scanItems[newScan] = stub.treeItem dlg += 1 if dlg.wasCanceled(): raise Exception("Map load canceled by user.") self.stubs = [] ## decide on point set, generate scatter plot self.rebuildPlot()
def generateTargets(self): self.targets = [] locations = self.getTargetList() bestTime = None bestSolution = None nTries = np.clip(int(10 - len(locations)/20), 1, 10) ## About to compute order/timing of targets; display a progress dialog deadTime = self.taskRunner.getParam('duration') state = self.stateGroup.state() minTime = state['minTime'] minDist = state['minDist'] with pg.ProgressDialog("Computing random target sequence...", 0, 1000, busyCursor=True) as dlg: for i in range(nTries): ## Run in a remote process for a little speedup for n, m in optimize.opt2(locations, minTime, minDist, deadTime, greed=1.0): ## we can update the progress dialog here. if m is None: solution = n else: prg = int(((i/float(nTries)) + ((n/float(m))/float(nTries))) * 1000) dlg.setValue(prg) if dlg.wasCanceled(): raise Exception("Target sequence computation canceled by user.") time = sum([l[1] for l in solution]) if bestTime is None or time < bestTime: bestTime = time bestSolution = solution[:] self.targets = bestSolution self.ui.timeLabel.setText('Total time: %0.1f sec'% bestTime)
def loadScanImage(self): #print 'loadScanImage called.' #dh = self.ui.fileLoader.ui.dirTree.selectedFile() #scan = self.canvas.selectedItem() dh = self.opts['handle'] dirs = [dh[d] for d in dh.subDirs()] if 'Camera' not in dirs[0].subDirs(): print("No image data for this scan.") return spotFrame = self.ui.spotFrameSpin.value() bgFrame = self.ui.bgFrameSpin.value() images = [] handles = [] nulls = [] with pg.ProgressDialog("Loading scan images..", 0, len(dirs)) as dlg: for d in dirs: if 'Camera' not in d.subDirs(): continue fh = d['Camera']['frames.ma'] handles.append(fh) frames = fh.read().asarray() if self.ui.bgFrameCheck.isChecked(): image = frames[spotFrame] - frames[bgFrame] image[frames[bgFrame] > frames[ spotFrame]] = 0. ## unsigned type; avoid negative values else: image = frames[spotFrame] mx = image.max() image *= (1000. / mx) images.append(image) if mx < 50: nulls.append(d.shortName()) dlg += 1 if dlg.wasCanceled(): raise Exception("Processing canceled by user") #print "Null frames for %s:" %dh.shortName(), nulls dlg.setLabelText("Processing scan images..") dlg.setValue(0) dlg.setMaximum(len(images)) scanImages = np.zeros(images[0].shape) for im in images: mask = im > scanImages scanImages[mask] = im[mask] dlg += 1 if dlg.wasCanceled(): raise Exception("Processing canceled by user") #info = dirs[0]['Camera']['frames.ma'].read()._info[-1] #pos = info['imagePosition'] #scale = info['pixelSize'] #image = ImageCanvasItem(scanImages, pos=pos, scale=scale, z=self.opts['z']-1, name='scanImage') image = ScanImageCanvasItem(scanImages, handles, z=self.opts['z'] - 1) item = self.canvas.addItem(image) self.scanImage = item
def runSequence(self, store=True): ## Disable all start buttons self.enableStartBtns(False) # good time to collect garbage gc.collect() ## Find all top-level items in the sequence parameter list try: ## make sure all devices are reporting their correct sequence lists items = self.ui.sequenceParamList.listParams() ## Generate parameter space params = OrderedDict() paramInds = OrderedDict() linkedParams = {} pLen = 1 for i in items: key = i[:2] params[key] = i[2] paramInds[key] = range(len(i[2])) pLen *= len(i[2]) linkedParams[key] = i[3] ## Set storage dir if store: currentDir = self.manager.getCurrentDir() name = self.currentTask.name() if name is None: name = 'protocol' info = self.taskInfo(params) info['dirType'] = 'ProtocolSequence' dh = currentDir.mkdir(name, autoIncrement=True, info=info) else: dh = None ## Tell devices to prepare for task start. for d in self.currentTask.devices: if self.currentTask.deviceEnabled(d): self.docks[d].widget().prepareTaskStart() #print params, linkedParams ## Generate the complete array of command structures. This can take a long time, so we start a progress dialog. with pg.ProgressDialog("Generating task commands..", 0, pLen) as progressDlg: self.lastQtProcessTime = ptime.time() prot = runSequence(lambda p: self.generateTask(dh, p, progressDlg), paramInds, paramInds.keys(), linkedParams=linkedParams) if dh is not None: dh.flushSignals() ## do this now rather than later when task is running self.sigTaskSequenceStarted.emit({}) logMsg('Started %s task sequence of length %i' %(self.currentTask.name(),pLen), importance=6) #print 'PR task positions: self.taskThread.startTask(prot, paramInds) except: self.enableStartBtns(True) raise
def storeDBScan(self, scan, storeEvents=True): """Store all data for a scan, using cached values if possible""" p = debug.Profiler("Photostim.storeDBScan", disabled=True) if storeEvents: self.clearDBScan(scan) with pg.BusyCursor(): #dh = scan.source() #print "Store scan:", scan.source().name() events = [] stats = [] spots = scan.spots() with pg.ProgressDialog("Preparing data for %s" % scan.name(), 0, len(spots) + 1) as dlg: ## collect events and stats from all spots in the scan for i in xrange(len(spots)): s = spots[i] fh = self.dataModel.getClampFile(s.data()) try: ev = scan.getEvents(fh)['events'] events.append(ev) except: print fh, scan.getEvents(fh) raise st = scan.getStats(s.data()) stats.append(st) dlg.setValue(i) if dlg.wasCanceled(): raise HelpfulException("Scan store canceled by user.", msgType='status') p.mark("Prepared data") dbui = self.getElement('Database') db = dbui.getDb() with db.transaction(): ## Store all events for this scan if storeEvents: events = [x for x in events if len(x) > 0] if len(events) > 0: ev = np.concatenate(events) p.mark("concatenate events") self.detector.storeToDB(ev) p.mark("stored all events") ## Store spot data self.storeStats(stats) p.mark("stored all stats") p.finish()
def storeStats(self, data): ## Store a list of dict records, one per spot. ## data: {'SourceFile': clamp file handle, 'xPos':, 'yPos':, ...other fields from stats flowchart...} ## parentDir: protocolSequence dir handle (or protocol for single spots) #print "Store stats:", fh ## If only one record was given, make it into a list of one record if isinstance(data, dict): data = [data] dbui = self.getElement('Database') identity = self.dbIdentity + '.sites' table = dbui.getTableName(identity) db = dbui.getDb() if db is None: raise Exception("No DB selected") ## determine the set of fields we expect to find in the table fields = db.describeData(data) ## override directory fields since describeData can't guess these for us fields['ProtocolDir'] = 'directory:Protocol' fields['ProtocolSequenceDir'] = 'directory:ProtocolSequence' with db.transaction(): ## Make sure target table exists and has correct columns, links to input file db.checkTable(table, owner=identity, columns=fields, create=True, addUnknownColumns=True, indexes=[['ProtocolDir'], ['ProtocolSequenceDir']]) # delete old for source in set([d['ProtocolDir'] for d in data]): #name = rec['SourceFile'] db.delete(table, where={'ProtocolDir': source}) # write new with pg.ProgressDialog("Storing spot stats...", 0, 100) as dlg: for n, nmax in db.iterInsert(table, data, chunkSize=30): dlg.setMaximum(nmax) dlg.setValue(n) if dlg.wasCanceled(): raise HelpfulException("Scan store canceled by user.", msgType='status')
def recolor(self, n=1, nMax=1, parallel=False ): ## ignore parallel here; it's plenty fast already. if not self.sPlotItem.isVisible(): return spots = self.sPlotItem.points() colors = [] with pg.ProgressDialog( "Computing map %s (%d/%d)" % (self.name(), n, nMax), 0, len(spots)) as dlg: for i in range(len(spots)): s = spots[i] data = [] sources = s.data()['sites'] for scan, dh in sources: data.append(scan.getStats(dh)) if len(data) == 0: continue if len(data) == 1: mergeData = data[0] else: mergeData = {} for k in data[0]: vals = [d[k] for d in data if k in d] try: if len(data) == 2: mergeData[k] = np.mean(vals) elif len(data) > 2: mergeData[k] = np.median(vals) elif len(data) == 1: mergeData[k] = vals[0] else: mergeData[k] = 0 except: mergeData[k] = vals[0] #print mergeData color = self.host.getColor(mergeData, s.data()) #s.setBrush(color) ## wait until after to set the colors colors.append((s, color)) dlg.setValue(i) if dlg.wasCanceled(): raise Exception("Process canceled by user.") for s, c in colors: s.setBrush(c)
def loadFileRequested(self, files): """Called by FileLoader when the load EPSP file button is clicked, once for each selected file. files - a list of the file currently selected in FileLoader """ if files is None: return n = len(files[0].ls()) with pg.ProgressDialog("Loading data..", 0, n) as dlg: for f in files: arr = np.zeros((len(f.ls())), dtype=[('timestamp', float), ('data', object), ('fileHandle', object), ('results', object)]) maxi = -1 for i, protoDir in enumerate(f.ls()): if not f[protoDir].isDir(): print("Skipping file %s" % f[protoDir].name()) continue df = self.dataModel.getClampFile(f[protoDir]) if df is None: print('Error in reading data file %s' % f[protoDir].name()) #break continue data = df.read() timestamp = data.infoCopy()[-1]['startTime'] arr[i]['fileHandle'] = df arr[i]['timestamp'] = timestamp arr[i]['data'] = data maxi += 1 # keep track of successfully read traces dlg += 1 if dlg.wasCanceled(): return self.traces = np.concatenate( (self.traces, arr[:maxi])) # only concatenate successfully read traces #self.lastAverageState = {} self.files.append(f) self.expStart = self.traces['timestamp'].min() #self.averageCtrlChanged() self.updateExptPlot() self.updateTracesPlot() return True
def storeDBScan(self): try: scans = self.selectedScans() if len(scans) == 0: raise Exception("No scans selected.") with pg.ProgressDialog('Storing scan data to DB..', maximum=len(scans)) as dlg: for scan in scans: scan.storeToDB() #self.host.storeDBScan(scan) dlg += 1 if dlg.wasCanceled(): raise Exception('Store canceled by user') self.ui.scanTree.clearSelection() ## We do this because it is too easy to forget to select the correct set of data before clicking store. self.ui.storeDBScanBtn.success("Stored.") except: self.ui.storeDBScanBtn.failure("Error.") raise
def renderStack(overlay=True): """ Export label data as a 3D, RGB image if overlay is True, multiply in the original data image """ stack = np.zeros(displayLabel.shape + (4, ), dtype=np.ubyte) with pg.ProgressDialog("Rendering label stack...", maximum=displayLabel.shape[0]) as dlg: for z in range(displayLabel.shape[0]): stack[z] = renderLabels(z) if overlay: ## multiply colors, not alpha. stack[z][..., :3] *= displayData[z].mean( axis=2)[..., np.newaxis].astype(float) / 256. print(z) dlg += 1 if dlg.wasCanceled(): raise Exception("Stack render canceled.") return stack
def loadFileRequested(self, fhList): canvas = self.getElement('Canvas') model = self.dataModel with pg.ProgressDialog("Loading data..", 0, len(fhList)) as dlg: for fh in fhList: try: ## TODO: use more clever detection of Scan data here. if fh.isFile() or model.dirType(fh) == 'Cell': canvas.addFile(fh) else: self.loadScan(fh) return True except: debug.printExc("Error loading file %s" % fh.name()) return False dlg += 1 if dlg.wasCanceled(): return
def storeToDBBtnClicked(self, *args): db = self.dbGui.getDb() if db is None: raise Exception("No database loaded.") identity = self.dbIdentity + '.traces' table = self.dbGui.getTableName(identity) data = list(self.traces['results']) fields = db.describeData(data) ## override directory fields since describeData can't guess these for us fields['CellDir'] = 'directory:Cell' fields['ProtocolDir'] = 'directory:Protocol' fields['ProtocolSequenceDir'] = 'directory:ProtocolSequence' with db.transaction(): ## Make sure target table exists and has correct columns, links to input file db.checkTable(table, owner=identity, columns=fields, create=True, addUnknownColumns=True, indexes=[['ProtocolDir'], ['ProtocolSequenceDir'], ['CellDir']]) # delete old for source in set([d['ProtocolDir'] for d in data]): #name = rec['SourceFile'] db.delete(table, where={'ProtocolDir': source}) # write new with pg.ProgressDialog("Storing data...", 0, 100) as dlg: for n, nmax in db.iterInsert(table, data, chunkSize=30): dlg.setMaximum(nmax) dlg.setValue(n) if dlg.wasCanceled(): raise HelpfulException("Canceled by user.", msgType='status')
def exportAllMaps(): global v db = man.getModule('Data Manager').currentDatabase() cells = db.select('DirTable_Cell', ['Dir'], where={'MapOK': 1}) cells.sort(key=lambda c: c['Dir'].name()) with pg.ProgressDialog("exporting all..", 0, 1000) as dlg: for rec in cells: cell = rec['Dir'] sl = cell.parent() day = sl.parent() showMap(cell) Qt.QApplication.processEvents() Qt.QApplication.processEvents() name = 'map_' + day.shortName() + "_" + sl.shortName() + "_" + cell.shortName() + ".svg" ex = pg.exporters.SVGExporter.SVGExporter(v.scene()) ex.export(name) print(name) if dlg.wasCanceled(): raise Exception("export cancelled")
def scan(self): """Scan over x and y ranges in a nPts x nPts grid, return the image recorded at each location.""" camera = str(self.ui.cameraCombo.currentText()) laser = str(self.ui.laserCombo.currentText()) ## Camera settings to use during scan camParams = self.dev.getCameraConfig(camera) duration = self.ui.scanDurationSpin.value() rate = 10000 nPts = int(rate * duration) sweeps = 20 #cameraTrigger = ones(nPts, dtype=byte) ##(cmdMin, cmdMax) = self.dev.config['commandLimits'] xRange = (self.ui.xMinSpin.value(), self.ui.xMaxSpin.value()) yRange = (self.ui.yMinSpin.value(), self.ui.yMaxSpin.value()) xDiff = xRange[1] - xRange[0] yDiff = yRange[1] - yRange[0] xCommand = np.fromfunction(lambda i: xRange[0] + ((xDiff * i * float(sweeps) / nPts) % xDiff), (nPts,), dtype=float) xCommand[-1] = 0.0 yCommand = np.empty((nPts,), dtype=float) start = 0 for i in range(sweeps): stop = start + (nPts / sweeps) yCommand[start:stop] = yRange[0] + yDiff * (float(i)/(sweeps-1)) start = stop yCommand[-1] = 0.0 daqName = self.dev.config['XAxis']['device'] ## Record 10 camera frames with the shutter closed #print "parameters:", camParams cmd = { 'protocol': {'duration': 0.0, 'timeout': 5.0}, camera: {'record': True, 'minFrames': 10, 'params': camParams, 'pushState': 'scanProt'}, #laser: {'Shutter': {'preset': 0, 'holding': 0}} } #print "\n\n====> Record background\n" task = acq4.Manager.getManager().createTask(cmd) task.execute() result = task.getResult() ## pull result, convert to ndarray float, take average over all frames background = result[camera].asArray().astype(float).mean(axis=0) #print "Background shape:", result[camera]['frames'].shape ## Record full scan. cmd = { 'protocol': {'duration': duration, 'timeout': duration+5.0}, camera: {'record': True, 'triggerProtocol': True, 'params': camParams, 'channels': { 'exposure': {'record': True}, }, 'popState': 'scanProt'}, #laser: {'shutter': {'preset': 0, 'holding': 0, 'command': np.ones(len(xCommand), dtype=byte)}}, laser: {'alignMode': True}, self.dev.name(): {'xCommand': xCommand, 'yCommand': yCommand}, daqName: {'numPts': nPts, 'rate': rate, 'triggerDevice': camera} } #print "\n\n====> Scan\n" task = acq4.Manager.getManager().createTask(cmd) task.execute(block=False) with pg.ProgressDialog("Calibrating scanner: Running scan protocol..", 0, 100) as dlg: while not task.isDone(): dlg.setValue(100.*task.runTime()/task.duration()) if dlg.wasCanceled(): task.abort() raise HelpfulException('Calibration canceled by user.', msgType='warning') time.sleep(0.2) result = task.getResult() frames = result[camera].asMetaArray() if frames._info[-1]['preciseTiming'] is not True: raise HelpfulException("Calibration could not accurately measure camera frame timing.", reasons=["The exposure signal from the camera was not recorded by the DAQ."]) #print "scan shape:", frames.shape #print "parameters:", camParams ## Generate a list of the scanner command values for each frame positions = [] for i in range(frames.shape[0]): t = frames.xvals('Time')[i] ind = int((t/duration) * nPts) if ind >= len(xCommand): break positions.append([xCommand[ind], yCommand[ind]]) if frames.ndim != 3 or frames.shape[0] < 5: raise Exception("Camera did not collect enough frames (data shape is %s)" % str(frames.shape)) if background.shape != frames.shape[1:]: raise Exception("Background measurement frame has different shape %s from scan frames %s" % (str(background.shape), str(frames.shape[1:]))) return (background, result[camera], positions)
def runCalibration(self): """The scanner calibration routine: 1) Measure background frame, then scan mirrors while collecting frames as fast as possible (self.scan()) 2) Locate spot in every frame using gaussian fit 3) Map image spot locations to coordinate system of Scanner device's parent 3) Do parabolic fit to determine mapping between voltage and position """ camera = str(self.ui.cameraCombo.currentText()) laser = str(self.ui.laserCombo.currentText()) blurRadius = 5 ## Do fast scan of entire allowed command range (background, cameraResult, positions) = self.scan() #self.calibrationResult = {'bg': background, 'frames': cameraResult, 'pos': positions} with pg.ProgressDialog("Calibrating scanner: Computing spot positions...", 0, 100) as dlg: dlg.show() dlg.raise_() # Not sure why this is needed here.. ## Forget first 2 frames since some cameras can't seem to get these right. frames = cameraResult.asArray() frames = frames[2:] positions = positions[2:] ## Do background subtraction ## take out half the data until it can do the calculation without having a MemoryError. finished = False gc.collect() while not finished: try: frames = frames.astype(np.float32) frames -= background.astype(np.float32) finished=True except MemoryError: frames = frames[::2,:,:] positions = positions[::2] finished = False ## Find a frame with a spot close to the center (within center 1/3) cx = frames.shape[1] / 3 cy = frames.shape[2] / 3 centerSlice = blur(frames[:, cx:cx*2, cy:cy*2], (0, 5, 5)).max(axis=1).max(axis=1) maxIndex = argmax(centerSlice) maxFrame = frames[maxIndex] dlg.setValue(5) ## Determine spot intensity and width mfBlur = blur(maxFrame, blurRadius) amp = mfBlur.max() - median(mfBlur) ## guess intensity of spot (x, y) = argwhere(mfBlur == mfBlur.max())[0] ## guess location of spot fit = fitGaussian2D(maxFrame, [amp, x, y, maxFrame.shape[0] / 10, 0.])[0] ## gaussian fit to locate spot exactly # convert sigma to full width at 1/e fit[3] = abs(2 * (2 ** 0.5) * fit[3]) ## sometimes the fit for width comes out negative. *shrug* someFrame = cameraResult.frames()[0] frameTransform = pg.SRTTransform(someFrame.globalTransform()) pixelSize = someFrame.info()['pixelSize'][0] spotAmplitude = fit[0] spotWidth = abs(fit[3] * pixelSize) size = self.spotSize(mfBlur) dlg.setValue(50) ## Determine location of spot within each frame, ## ignoring frames where the spot is too dim or too close to the frame edge spotLocations = [] globalSpotLocations = [] spotCommands = [] spotFrames = [] margin = fit[3] for i in range(len(positions)): dlg.setValue(50. + 50. * i / frames.shape[0]) if dlg.wasCanceled(): raise HelpfulException('Calibration canceled by user.', msgType='warning') frame = frames[i] fBlur = blur(frame.astype(np.float32), blurRadius) mx = fBlur.max() diff = mx - fBlur.min() ss = self.spotSize(fBlur) if ss < size * 0.6: #print "Ignoring spot:", ss continue #else: #print "Keeping spot:", ss (x, y) = argwhere(fBlur == mx)[0] # guess location of spot if x < margin or x > frame.shape[0] - margin: #print " ..skipping; too close to edge", x, y continue if y < margin or y > frame.shape[1] - margin: #print " ..skipping; too close to edge", x, y continue frame[x,y] = -1 ## mark location of peak in image ## convert pixel location to coordinate system of scanner's parent globalPos = frameTransform.map(pg.Point(x, y)) ## Map from frame pixel location to global coordinates localPos = self.dev.mapGlobalToParent(globalPos) ## map from global to parent coordinate system. This is the position we calibrate to. #print (x, y), (globalPos.x(), globalPos.y()), (localPos.x(), localPos.y()) spotLocations.append([localPos.x(), localPos.y()]) globalSpotLocations.append([globalPos.x(), globalPos.y()]) spotCommands.append(positions[i]) spotFrames.append(frame[newaxis]) ## sanity check on spot frame if len(spotFrames) == 0: self.ui.view.setImage(frames) raise HelpfulException('Calibration never detected laser spot! Looking for spots that are %f pixels wide.'% fit[3], reasons=['shutter is disabled', 'mirrors are disabled', 'objective is not clean', 'spot is not visible or not bright enough when shutter is open']) spotFrameMax = concatenate(spotFrames).max(axis=0) self.ui.view.setImage(spotFrameMax, transform=frameTransform) self.clearSpots() for sl in globalSpotLocations: #self.addSpot(sl, fit[3]*binning[0]) self.addSpot(sl, spotWidth) self.ui.view.autoRange() if len(spotFrames) < 10: raise HelpfulException('Calibration detected only %d frames with laser spot; need minimum of 10.' % len(spotFrames), reasons=['spot is too dim for camera sensitivity', 'objective is not clean', 'mirrors are scanning too quickly', 'mirror scanning region is not within the camera\'s view']) ## Fit all data to a map function mapParams = self.generateMap(array(spotLocations), array(spotCommands)) #print #print "Map parameters:", mapParams if spotWidth < 0: raise Exception() return (mapParams, (spotAmplitude, spotWidth))
def loadCell(cell, reloadData=False): global events if reloadData: events.pop(cell, None) if cell in events: return db = man.getModule('Data Manager').currentDatabase() mod = man.dataModel allEvents = [] hvals = {} nEv = 0 positionCache = {} tcache = {} print("Loading all events for cell", cell) tot = db.select(eventView, 'count()', where={'CellDir': cell})[0]['count()'] print(tot, "total events..") with pg.ProgressDialog('Loading event data...', maximum=tot, wait=0) as dlg: for ev in db.iterSelect(eventView, [ 'ProtocolSequenceDir', 'SourceFile', 'fitAmplitude', 'fitTime', 'fitDecayTau', 'fitRiseTau', 'fitTimeToPeak', 'fitLengthOverDecay', 'fitFractionalError', 'userTransform', 'CellType', 'CellDir', 'ProtocolDir' ], where={'CellDir': cell}, toArray=True, chunkSize=200): extra = np.empty(ev.shape, dtype=[('right', float), ('anterior', float), ('dorsal', float), ('holding', float)]) ## insert holding levels for i in range(len(ev)): sd = ev[i]['ProtocolSequenceDir'] if sd not in hvals: cf = ev[i]['SourceFile'] hvals[sd] = mod.getClampHoldingLevel(cf) #print hvals[sd], cf extra[i]['holding'] = hvals[sd] ## insert positions for i in range(len(ev)): protoDir = ev[i]['SourceFile'].parent() key = protoDir #key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile']) if key not in positionCache: #try: #dh = ev[i]['ProtocolDir'] #p1 = pg.Point(dh.info()['Scanner']['position']) #if key[0] not in tcache: #tr = pg.SRTTransform() #tr.restoreState(dh.parent().info()['userTransform']) #tcache[key[0]] = tr #trans = tcache[key[0]] #p2 = trans.map(p1) #pcache[key] = (p2.x(),p2.y()) #except: #print key #raise rec = db.select('CochlearNucleus_Protocol', where={'ProtocolDir': protoDir}) if len(rec) == 0: pos = (None, None, None) elif len(rec) == 1: pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal']) elif len(rec) == 2: raise Exception("Multiple position records for %s!" % str(protoDir)) positionCache[key] = pos extra[i]['right'] = positionCache[key][0] extra[i]['anterior'] = positionCache[key][1] extra[i]['dorsal'] = positionCache[key][2] ev = fn.concatenateColumns([ev, extra]) allEvents.append(ev) nEv += len(ev) dlg.setValue(nEv) if dlg.wasCanceled(): raise Exception('Canceled by user.') ev = np.concatenate(allEvents) numExSites = 0 numInSites = 0 for site in db.select(siteView, 'ProtocolSequenceDir', where={'CellDir': cell}): h = hvals.get(site['ProtocolSequenceDir'], None) if h is None: continue if h > -0.02: numInSites += 1 elif h < -0.04: numExSites += 1 events[cell] = (ev, numExSites, numInSites)
def storeToDB(self, data=None): p = debug.Profiler("EventDetector.storeToDB", disabled=True) if data is None: data = self.flowchart.output()['events'] dbui = self.getElement('Database') table = dbui.getTableName(self.dbIdentity) db = dbui.getDb() if db is None: raise Exception("No DB selected") p.mark("DB prep done") if len(data) == 0: ## if there is no event data, then we need to delete previous event data dh = self.currentFile.name(relativeTo=db.baseDir()) if dh[-10:] == '/Clamp1.ma' or dh[-10:] == '/Clamp2.ma': dh = dh[:-10] protocolID = db( 'Select rowid, Dir from DirTable_Protocol where Dir="%s"' % dh) if len(protocolID) > 0: protocolID = protocolID[0]['rowid'] else: return db('Delete from %s where ProtocolDir=%i' % (table, protocolID)) return ## determine the set of fields we expect to find in the table columns = db.describeData(data) columns.update({ 'ProtocolSequenceDir': 'directory:ProtocolSequence', 'ProtocolDir': 'directory:Protocol', }) p.mark("field list done") with db.transaction(): ## Make sure target table exists and has correct columns, links to input file db.checkTable(table, owner=self.dbIdentity, columns=columns, create=True, addUnknownColumns=True, indexes=[['SourceFile'], ['ProtocolSequenceDir']]) p.mark("data prepared") ## collect all protocol/Sequence dirs prots = {} seqs = {} for fh in set(data['SourceFile']): prots[fh] = fh.parent() seqs[fh] = self.dataModel.getParent(fh, 'ProtocolSequence') ## delete all records from table for current input files for fh in set(data['SourceFile']): db.delete(table, where={'SourceFile': fh}) p.mark("previous records deleted") ## assemble final list of records records = {} for col in data.dtype.names: records[col] = data[col] records['ProtocolSequenceDir'] = map(seqs.get, data['SourceFile']) records['ProtocolDir'] = map(prots.get, data['SourceFile']) p.mark("record list assembled") ## insert all data to DB with pg.ProgressDialog("Storing events...", 0, 100) as dlg: for n, nmax in db.iterInsert(table, records, chunkSize=50): dlg.setMaximum(nmax) dlg.setValue(n) if dlg.wasCanceled(): raise HelpfulException("Scan store canceled by user.", msgType='status') p.mark("records inserted") p.finish()
def storeToDB(self): try: self.update() ## Determine currently selected table to store to dbui = self.getElement('Map Loader').dbGui identity = self.dbIdentity + '.sites' mapTable = dbui.getTableName('Photostim.maps') table = dbui.getTableName(identity) db = dbui.getDb() if db is None: raise Exception("No DB selected") fields = OrderedDict([ ('Map', { 'Type': 'int', 'Link': mapTable }), #('CellDir', 'directory:Cell'), ('FirstSite', 'directory:Protocol'), ('Sites', 'blob'), ('PoissonScore', 'real'), ('PoissonScore_Pre', 'real'), ('PoissonAmpScore', 'real'), ('PoissonAmpScore_Pre', 'real'), ('HasInput', 'int'), ('FirstLatency', 'real'), ('ZScore', 'real'), ('FitAmpSum', 'real'), ('FitAmpSum_Pre', 'real'), ('NumEvents', 'real'), ('SpontRate', 'real'), ('DirectPeak', 'real'), ('Region', 'text'), ]) mapRec = self.currentMap.getRecord() data = [] for spot in self.currentMap.spots: rec = {} for k in fields: if k in spot['data']: rec[k] = spot['data'][k] #rec['CellDir'] = mapRec['cell'] rec['Map'] = self.currentMap.rowID sites = [s[1] for s in spot['data']['sites']] rec['FirstSite'] = sites[0] rec['Sites'] = [db.getDirRowID(s) for s in sites] data.append(rec) with db.transaction(): ## Make sure target table exists and has correct columns, links to input file db.checkTable(table, owner=identity, columns=fields, create=True, addUnknownColumns=True, indexes=[['Map']]) # delete old db.delete(table, where={'Map': self.currentMap.rowID}) # write new with pg.ProgressDialog("Storing map data...", 0, 100) as dlg: for n, nmax in db.iterInsert(table, data, chunkSize=100): dlg.setMaximum(nmax) dlg.setValue(n) if dlg.wasCanceled(): raise HelpfulException( "Scan store canceled by user.", msgType='status') self.storeBtn.success() except: self.storeBtn.failure() raise
def mapErrors(self, nSteps=(5, 5, 7), stepSize=(50e-6, 50e-6, 50e-6), padding=60e-6, threshold=0.4, speed='slow', show=False, intermediateDist=60e-6): """Move pipette tip randomly to locations in a grid and measure the position error at each location. All tip locations must be within the field of view. """ startTime = time.time() start = np.array(self.dev.globalPosition()) npts = nSteps[0] * nSteps[1] * nSteps[2] inds = np.mgrid[0:nSteps[0], 0:nSteps[1], 0:nSteps[2]].reshape( (3, npts)).transpose() order = np.arange(npts) np.random.shuffle(order) err = np.zeros(nSteps + (3, )) stepSize = np.array(stepSize) if show: imv = pg.image() mark1 = pg.QtGui.QGraphicsEllipseItem( pg.QtCore.QRectF(-5, -5, 10, 10)) mark1.setBrush(pg.mkBrush(255, 255, 0, 100)) mark1.setZValue(100) imv.addItem(mark1) mark2 = pg.QtGui.QGraphicsEllipseItem( pg.QtCore.QRectF(-5, -5, 10, 10)) mark2.setBrush(pg.mkBrush(255, 0, 0, 100)) mark2.setZValue(100) imv.addItem(mark2) # loop over all points in random order, and such that we do heavy computation while # pipette is moving. images = [] offsets = [] try: with pg.ProgressDialog("Acquiring error map...", 0, len(order)) as dlg: for i in range(len(order) + 1): if i > 0: lastPos = pos if i < len(order): ind = inds[order[i]] pos = start.copy() + (stepSize * ind) # Jump to position + a random 20um offset to avoid hysteresis offset = np.random.normal(size=3) offset *= intermediateDist / (offset**2).sum()**0.5 offsets.append(offset) mfut = self.dev._moveToGlobal(pos + offset, speed) ffut = self.dev.scopeDevice().setFocusDepth( pos[2], speed) if i > 0: ind = inds[order[i - 1]] print("Frame: %d %s" % (i - 1, lastPos)) err[tuple(ind)] = self.measureError( padding=padding, threshold=threshold, frame=frame, pos=lastPos) print(" error: %s" % err[tuple(ind)]) dlg += 1 if show: imv.setImage(frame.data()[0]) p1 = frame.globalTransform().inverted()[0].map( pg.Vector(lastPos)) p2 = frame.globalTransform().inverted()[0].map( pg.Vector(lastPos + err[tuple(ind)])) mark1.setPos(p1.x(), p1.y()) mark2.setPos(p2.x(), p2.y()) # wait for previous moves to complete mfut.wait(updates=True) ffut.wait(updates=True) # step back to actual target position self.dev._moveToGlobal(pos, speed).wait(updates=True) frame = self.takeFrame() if dlg.wasCanceled(): return None finally: self.dev._moveToGlobal(start, 'fast') self.dev.scopeDevice().setFocusDepth(start[2], 'fast') self.errorMap = { 'err': err, 'nSteps': nSteps, 'stepSize': stepSize, 'order': order, 'inds': inds, 'offsets': offsets, 'time': time.time() - startTime, } filename = self.dev.configFileName('error_map.np') np.save(open(filename, 'wb'), self.errorMap) return self.errorMap
def takeReferenceFrames(self, zRange=None, zStep=None, imager=None, average=8, tipLength=None): """Collect a series of images of the pipette tip at various focal depths. The collected images are used as reference templates for determining the most likely location and focal depth of the tip after the calibration is no longer valid. The focus first is moved in +z by half of *zRange*, then stepped downward by *zStep* until the entire *zRange* is covered. Images of the pipette tip are acquired and stored at each step. This method assumes that the tip is in focus near the center of the camera frame, and that its position is well-calibrated. Ideally, the illumination is flat and the area surrounding the tip is free of any artifacts. Images are filtered using `self.filterImage` before they are stored. """ imager = self._getImager(imager) # Take an initial frame with the tip in focus. centerFrame = self.takeFrame() if tipLength is None: tipLength = self.suggestTipLength(centerFrame) if zRange is None: zRange = tipLength * 1.5 if zStep is None: zStep = zRange / 30. minImgPos, maxImgPos, tipRelPos = self.getTipImageArea( centerFrame, padding=tipLength * 0.15, tipLength=tipLength) center = centerFrame.data()[0, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]] center = self.filterImage(center) # Decide how many frames to collect and at what z depths nFrames = (int(zRange / zStep) // 2) * 2 pos = self.dev.globalPosition() zStart = pos[2] + zStep * (nFrames // 2) frames = [] bg_frames = [] corr = [] print( "Collecting %d frames of %0.2fum tip length at %0.2fum resolution." % (nFrames, tipLength * 1e6, zStep * 1e6)) # Stop camera if it is currently running restart = False if imager.isRunning(): restart = True imager.stop() try: with pg.ProgressDialog('Acquiring reference frames...', 0, nFrames * 2 + 1) as dlg: # collect 2 stacks of images (second stack is for background subtraction) for j in range(2): # Set initial focus above start point to reduce hysteresis in focus mechanism scope = self.dev.scopeDevice() scope.setFocusDepth(zStart + 10e-6) # Acquire multiple frames at different depths for i in range(nFrames): #pos[2] = zStart - zStep * i # self.dev._moveToGlobal(pos, 'slow').wait() scope.setFocusDepth(zStart - zStep * i).wait() frame = imager.acquireFrames(average) img = frame.data()[:, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]].astype( float).mean(axis=0) img = self.filterImage(img) if j == 0: frames.append(img) corr.append( self._matchTemplateSingle(img, center)[1]) else: bg_frames.append(img) dlg += 1 if dlg.wasCanceled(): return if j == 0: # move tip out-of-frame to collect background images self.dev._moveToLocal([-tipLength * 3, 0, 0], 'slow').wait() else: self.dev._moveToLocal([tipLength * 3, 0, 0], 'slow') finally: # restart camera if it was running if restart: imager.start() scope.setFocusDepth(pos[2]) # find the index of the frame that most closely matches the initial, tip-focused frame maxInd = np.argmax(corr) # stack all frames into a 3D array frames = np.dstack(frames).transpose((2, 0, 1)) bg_frames = np.dstack(bg_frames).transpose((2, 0, 1)) # subtract background # frames -= bg_frame.data() # generate downsampled frame versions # (for now we generate these on the fly..) # ds = [frames] + [pg.downsample(pg.downsample(frames, n, axis=1), n, axis=2) for n in [2, 4, 8]] key = imager.getDeviceStateKey() self.reference[key] = { 'frames': frames - bg_frames, 'zStep': zStep, 'centerInd': maxInd, 'centerPos': tipRelPos, 'pixelSize': frame.info()['pixelSize'], 'tipLength': tipLength, # 'downsampledFrames' = ds, } # Store with pickle because configfile does not support arrays pickle.dump(self.reference, open(self.dev.configFileName('ref_frames.pk'), 'wb'))
def updateProfiles(self): #if not self.analyzeBtn.isChecked(): #return plots = self.getElement('profiles'), self.getElement('profile fits') for plot in plots: plot.clear() plot.setLabel('bottom', 'distance', units='m') width, height = self.normData.shape xVals = np.linspace(0, self.px[0] * width, width) fits = [] def slopeGaussian(v, x): ## gaussian + slope return fn.gaussian(v[:4], x) + v[4] * x def gaussError( v, x, y): ## center-weighted error functionfor sloped gaussian err = abs(y - slopeGaussian(v, x)) v2 = [2.0, v[1], v[2] * 0.3, 1.0, 0.0] return err * slopeGaussian(v2, x) with pg.ProgressDialog("Processing..", 0, height - 1, cancelText=None) as dlg: for i in range(height): row = self.normData[:, i] guess = [ row.max() - row.min(), xVals[int(width / 2)], self.px[0] * 3, row.max(), 0.0 ] #fit = fn.fitGaussian(xVals=xVals, yVals=row, guess=guess)[0] #fit = fn.fit(slopeGaussian, xVals=xVals, yVals=row, guess=guess)[0] fit = scipy.optimize.leastsq(gaussError, guess, args=(xVals, row))[0] fit[2] = abs(fit[2]) dist = fit[1] / (self.px[0] * width / 2.) #print fit, dist ## sanity check on fit if abs(dist - 1) > 0.5 or (0.5 < fit[3] / np.median(row) > 2.0): #print "rejected:", fit, fit[3]/np.median(row), self.px[0]*width/2. #fit = guess[:] #fit[0] = 0 fit = [0, 0, 0, 0, 0] else: # round 2: eliminate anomalous points and re-fit fitCurve = slopeGaussian(fit, xVals) diff = row - fitCurve std = diff.std() mask = abs(diff) < std * 1.5 x2 = xVals[mask] y2 = row[mask] fit = fn.fit(slopeGaussian, xVals=x2, yVals=y2, guess=fit)[0] fits.append(fit) dlg += 1 if dlg.wasCanceled(): raise Exception("Processing canceled by user") for i in range(len(fits)): ## plot in reverse order pen = pg.intColor(height - i, height * 1.4) plots[0].plot(y=self.normData[:, -1 - i], x=xVals, pen=pen) plots[1].plot(y=slopeGaussian(fits[-1 - i], xVals), x=xVals, pen=pen) yVals = np.linspace(0, self.px[0] * height, height) arr = np.array(fits) info = [{ 'name': 'depth', 'units': 'm', 'values': yVals }, { 'name': 'fitParams', 'cols': [ { 'name': 'Amplitude' }, { 'name': 'X Offset' }, { 'name': 'Sigma', 'units': 'm' }, { 'name': 'Y Offset' }, { 'name': 'Slope' }, ] }, { 'sourceImage': self.fileHandle.name(), 'dataRegion': self.dataRgn.saveState(), 'backgroundRegion': self.bgRgn.saveState(), 'description': """ The source image was normalized for background fluorescence, then each row was fit to a sloped gaussian function: v[0] * np.exp(-((x-v[1])**2) / (2 * v[2]**2)) + v[3] + v[4] * x The fit parameters v[0..4] for each image row are stored in the columns of this data set. """ }] #print info self.data = MetaArray(arr, info=info) self.showResults(self.data)