def testWithWoblyData(self): sfh = ScanFileHolder() sfh.addDataSet('i1', DatasetFactory.createFromObject([10.1,10.4,9.6, 20.1,20.2,19.9, 30,30,30])) sfh.addDataSet('i2', DatasetFactory.createFromObject([1.09,1.99,3, 1.01,2.099,3, 1,2,3])) sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9])) self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':10, 'i2':0.2}, ('i1','i2')) self.assertEquals(self.sfhi.lookupDict, {10.0: {3.0: 2, 2.0: 1, 1.0: 0}, 20.0: {3.0: 5, 2.0: 4, 1.0: 3}, 30.0: {3.0: 8, 2.0: 7, 1.0: 6}})
def plotAxisToDataVectorPlot(self, plotName, xColumnName, yColumnName): print "plotAxisToDataVectorPlot", plotName, xColumnName, yColumnName xdataset = DatasetFactory.createFromObject(self.getColumn(xColumnName)) xdataset.setName(xColumnName) ydataset = DatasetFactory.createFromObject(self.getColumn(yColumnName)) ydataset.setName(yColumnName) if plotName is not None: Plotter.plot(plotName, xdataset, [ydataset])
def setUp(self): sfh = ScanFileHolder() sfh.addDataSet('i1', DatasetFactory.createFromObject([10.1,10.4,9.6, 20.1,20.2,19.9, 30,30,30])) sfh.addDataSet('i2', DatasetFactory.createFromObject([1.09,1.99,3, 1.01,2.099,3, 1,2,3])) sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9])) self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':10, 'i2':0.2}, ('i1','i2')) self.sfhs = ScanFileHolderScannable('sfhs', sfh, ('i1','i2'), ('e1',), {'i1':10, 'i2':0.2})
def createSFH(): result = ScanFileHolder() result.addDataSet('i1', DatasetFactory.createFromObject([0,1,2,3,4,5,6,7,8,9])) result.addDataSet('i2', DatasetFactory.createFromObject([0,10,20,30,40,50,60,70,80,90])) result.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.3,.2,.1,0])) # result.addDataSet('e2', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.3,.2,.1,0])+100) result.addDataSet('e2', DatasetFactory.createFromObject([100.,100.1,100.2,100.3,100.4,100.5,100.3,100.2,100.1,100.])) return result
def setUp(self): self.x = DatasetFactory.createFromObject([ 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22., 23., 24., 25., 26. ]) self.peak = DatasetFactory.createFromObject([ 1., 1.1, 1.5, 2., 3., 5., 7., 9., 11., 9., 7., 5., 3., 2., 1.5, 1.1, 1. ]) #self.dip = DatasetFactory.createFromObject([5.,4.,3.,2.,1.,0.,1.,2.,3.,4.]) self.p = None
def reindet(self, thing): """ read in detector data you want to view per frame """ try: #print "opening x"+thing["filename"]+"x" file = open(thing["filename"], 'rb') size = thing["x"] * thing["y"] if (thing["endian"] == 0): # default big endian/motorola endstr = ">" else: # little endian/intel endstr = "<" for i in range(thing["frames"]): list = unpack(endstr + size.__str__() + 'f', file.read(size * 4)) ds = DatasetFactory.createFromObject(list) if thing["x"] > 1 and thing["y"] > 1: ds.setShape(thing["x"], thing["y"]) ds.setName( os.path.basename(thing["filename"]) + " frame " + i.__str__()) self.result.addDataSet(thing["name"] + i.__str__(), ds) file.close() self.detectors[thing["name"]] = thing["frames"] except IOError, message: print "Warning: Could not read (all of) the " + thing[ "name"] + " data: " + message.__str__()
def readout(self): if self.ds is None: if self.verbose: print "%s.readout() getting..." % self.name t = time.time() rawdata = self.pvs['DATA'].cagetArray() if self.verbose: dt, t = time.time()-t, time.time() print "%s.readout() (%fs) sign correction..." % (self.name, dt) data = map(unsign2, rawdata ) if self.verbose: dt, t = time.time()-t, time.time() print "%s.readout() (%fs) creating DataSet..." % (self.name, dt) self.ds = DatasetFactory.createFromObject(data, [int(float(self.pvs['HEIGHT'].caget())), int(float(self.pvs['WIDTH'].caget()))]) if self.verbose: dt, t = time.time()-t, time.time() print "%s.readout() (%fs) saving..." % (self.name, dt) if self.filepath is not None: self.saveImage(time.strftime("%Y%m%d%H%M%S.png", time.localtime())) if self.verbose: dt, t = time.time()-t, time.time() print "%s.readout() (%fs) ended" % (self.name, dt) return self.ds
def asynchronousMoveTo(self, newPos): ''' Moving the scanner to the newPos, Moving the flipper to position 0, then read out all detectors: [d00, d01, ... , d0n] Moving the flipper to position 1, then read out all detectors: [d10, d11, ... , d1n] ... Moving the flipper to position m, then read out all detectors: [dm0, dm1, ... , dmn] Create a DataSet that represents the about matirx for analysis ''' print 'move scanning device...' self.scanner.moveTo(newPos) self.readouts = [] self.flipPositionsReadback = [] for flipPos in self.flipPositions: print 'move ' + self.flipper.getName() + ' to ' + str(flipPos) self.flipper.moveTo(flipPos) self.flipPositionsReadback.append(self.flipper.getPosition()) print 'counting' readoutp = self.countOnce() self.readouts.append(readoutp) self.dataset = DatasetFactory.createFromObject( self.readouts ) #ds is a new DataSet with dimension [numberOfFlipPositions, NumberOfDetectors]; return
def test_process(self): y = DatasetFactory.createFromObject( [0, 0, 1, 1.000000001, 1, 1, 0, 1, 0, 0]) cen, height, width = self.p._process(self.x, y) self.assertAlmostEqual(cen, 14) self.assertAlmostEqual(height, 1.000000001) self.assertAlmostEqual(width, 5)
def test_process_ones_max_at_right_edge(self): y = DatasetFactory.createFromObject( [0, 0, 0, 0, 0, 0, 0, 1, 1, 1.000000001]) cen, height, width = self.p._process(self.x, y) self.assertAlmostEqual(cen, 18) self.assertAlmostEqual(height, 1.000000001) self.assertAlmostEqual(width, 3)
def getNewEpicsData(self, offset, size): #To check the head head = self.getHead() if offset > head: # print " No new data available. Offset exceeds Head(" + str(head) + ")."; return False # print "New data available, Offset does not exceed Head(" + str(head) + ")."; la = [] #To get the waveform data from EPICS # print "---> Debug: get waveform: start at: " + ctime(); for i in range(self.numberOfDetectors): # self.data[i]=self.chData[i].cagetArrayDouble(); #TODO: make sure that the self.data[i] is a list # self.data[i]=self.chData[i].cagetArrayDouble(); self.data[i] = self.chData[i].getController().cagetDoubleArray( self.chData[i].getChannel(), head + 1) # print "The type of subarray data from caget is: ", type(self.data[i]); # print "The subarray data from caget is: ", self.data[i]; la.append(self.data[i]) # print "---> Debug: get waveform: end at: " + ctime(); ds = DatasetFactory.createFromObject( la) #ds is a new DataSet with dimension [numberOfDetectors, size]; self.dataset = ds return True
def getCameraData(self): # self.rawData = self.frameData.getController().cagetByteArray(self.frameData.getChannel()); self.width = int(float(self.frameWidth.caget())); self.height = int(float(self.frameHeight.caget())); self.rawData = self.frameData.cagetArrayByte(); #cast the byte array to double array for dataset tempDoubleList = [float(x) for x in self.rawData]; # self.dataset=DataSet.array(self.frameData.cagetArrayDouble()); self.dataset = DatasetFactory.createFromObject(tempDoubleList) self.dataset.shape = [self.height, self.width] # self.data = ScanFileHolder(); # self.data.addDataSet(self.getName(), self.dataset); self.data.setDataSet(self.getName(), self.dataset); return self.dataset;
def _configureNewDatasetProvider(self, wait_for_exposure_callable=None): def createDatasetProvider(path): if path == '': raise IOError( "Could no load dataset: %s does not have a record of the last file saved" % self.name) path = self.replacePartOfPath(path) if path[0] != '/': #if relative path then we have to assume it's from the data directory path = gda.data.PathConstructor.createFromDefaultProperty( ) + "/" + path self.datasetProvider = LazyDataSetProvider( path, self.iFileLoader, self.fileLoadTimout, self.printNfsTimes, wait_for_exposure_callable) self.logger.debug("datasetProvider is {}", self.datasetProvider) if self.det.createsOwnFiles(): path = self.getFilepath() createDatasetProvider(path) else: # if not isinstance(dataset, DataSet, gda.device.detector.NXDetectorData): # raise Exception("If a detector does not write its own files, ProcessingDetectorWrapper %s only works with detectors that readout DataSets.") dataset = self._readout() if isinstance( dataset, gda.device.detector.NXDetectorDataWithFilepathForSrs): path = dataset.getFilepath() self.logger.debug( "dataset is NXDetectorDataWithFilepathForSrs: {}", dataset) createDatasetProvider(path) return elif isinstance(dataset, gda.device.detector.NXDetectorData): data = dataset.getNexusTree().getChildNode(1).getChildNode( 1).getData() dataset = DatasetFactory.createFromObject(data.getBuffer()) dataset.setShape(data.dimensions) dataset.squeeze() self.logger.debug("dataset is NXDetectorData: {}", dataset) else: self.logger.debug("dataset is neither: {}", dataset) self.datasetProvider = BasicDataSetProvider(dataset) self.logger.debug("datasetProvider is {}", self.datasetProvider)
def reincal(self, thing): """ read in calibration data you want to view per channel for all frames """ try: #print "opening x"+thing["filename"]+"x" file=open(thing["filename"],'rb') size=thing["x"]*thing["frames"] if (thing["endian"]==0): # default big endian/motorola endstr=">" else: # little endian/intel endstr="<" list=unpack(endstr+size.__str__()+'f', file.read(size*4)) ds = DatasetFactory.createFromObject(list) if thing["x"] > 1 and thing["frames"] > 1: ds.setShape(thing["x"], thing["frames"]) ds.setName(os.path.basename(thing["filename"])+" all frames") self.result.addDataSet(thing["name"], ds) file.close() self.detectors[thing["name"]]=thing["frames"] except IOError, message: print "Warning: Could not read (all of) the "+thing["name"]+" data: "+message.__str__()
def readout(self): if self.ds is None: if self.verbose: print "%s.readout() getting..." % self.name t = time.time() rawdata = self.pvs['DATA'].cagetArray() if self.verbose: dt, t = time.time() - t, time.time() print "%s.readout() (%fs) sign correction..." % (self.name, dt) data = map(unsign2, rawdata) if self.verbose: dt, t = time.time() - t, time.time() print "%s.readout() (%fs) creating DataSet..." % (self.name, dt) self.ds = DatasetFactory.createFromObject(data, [ int(float(self.pvs['HEIGHT'].caget())), int(float(self.pvs['WIDTH'].caget())) ]) if self.verbose: dt, t = time.time() - t, time.time() print "%s.readout() (%fs) saving..." % (self.name, dt) if self.filepath is not None: self.saveImage( time.strftime("%Y%m%d%H%M%S.png", time.localtime())) if self.verbose: dt, t = time.time() - t, time.time() print "%s.readout() (%fs) ended" % (self.name, dt) return self.ds
def setUp(self): self.p = CenFromSPEC() # -> cen, height, width self.x = DatasetFactory.createFromObject([10.,11.,12.,13.,14.,15.,16.,17.,18.,19.]) self.l = [0, 10, 20, 30, 40]
def setUp(self): sfh = ScanFileHolder() sfh.addDataSet('i1', DatasetFactory.createFromObject([10,10,10, 20,20,20, 30,30,30])) sfh.addDataSet('i2', DatasetFactory.createFromObject([1,2,3, 1,2,3, 1,2,3])) sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9])) self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':1, 'i2':1}, ('i1','i2'))
def test_process_ones_max_at_right_edge(self): y = DatasetFactory.createFromObject([0, 0, 0, 0, 0, 0, 0, 1, 1, 1.000000001]) cen, height, width = self.p._process(self.x, y) self.assertAlmostEqual(cen, 18) self.assertAlmostEqual(height, 1.000000001) self.assertAlmostEqual(width, 3)
def setUp(self): self.p = CenFromSPEC() # -> cen, height, width self.x = DatasetFactory.createFromObject( [10., 11., 12., 13., 14., 15., 16., 17., 18., 19.]) self.l = [0, 10, 20, 30, 40]
def process(self, dataset): sum = dataset.sum() return DatasetFactory.createFromObject(sum)
def process(self, dataset): max = dataset.max(False, False) return DatasetFactory.createFromObject(max)
def plotdata(filename, dataType=MAC, plotPane="DataPlot", Overlay=True): '''Plot existing data on "MAC, PSD", or "SRS" (if any) Panel, the default is MAC data on DataPlot panel and overlay is True. syntax: plotdata(filename,[MAC|SRS|PSD],["MAC"|"Mythen"|"DataPlot"],[True|False]) where: filename: the filename string in quote. dataType: the input data types or formats available MAC - plot MAC data on MAC panel PSD - plot PSD data on Mythen panel SRS - plot SRS data on SRS panel plotPane: the graph panel to display the plot Overlay: 'True': plot over the exist data on the graph (Default) 'False': clear existing plot data from the graph before plotting new data ''' print("Data plotting to " + plotPane + " panel, please wait ...") if dataType == MAC: sfh = loadMacData(filename) dataset=sfh.getAxis(1) dataset.setName(filename) if Overlay: Plotter.plotOver(plotPane, sfh.getAxis(0), dataset) else: Plotter.plot(plotPane, sfh.getAxis(0), dataset) elif dataType == SRS: sfh = loadSRSData(filename) if Overlay: Plotter.plotOver(plotPane, sfh.getAxis(0), sfh.getAxis(1)) else: Plotter.plot(plotPane, sfh.getAxis(0), sfh.getAxis(1)) elif dataType == PSD: if not str(filename).find("mythen") == -1: # mythen data file dataset = loadMythenData(filename) data=dataset.getCountDataSet() data.setName(filename) if Overlay: Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data) else: Plotter.plot(plotPane, dataset.getAngleDataSet(), data) else: parts = str(filename).split(File.separator) name=parts[-1] names=str(name).split(".") if representsInt(names[0]): # Mythen SRS file for each in loadMythenSRSFile(filename): dataset = loadMythenData(str(each) + ".dat") data=dataset.getCountDataSet() data.setName(each) if Overlay: Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data) else: Plotter.plot(plotPane, dataset.getAngleDataSet(), data) else: dataset = loadMythenData(filename) data=dataset.getCountDataSet() data.setName(filename) if Overlay: Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data) else: Plotter.plot(plotPane, dataset.getAngleDataSet(), data) elif dataType == RAW: # mythen raw data file dataset = loadMythenRawData(filename) data=DatasetFactory.createFromObject(dataset.getCountArray()) channeldata=DatasetFactory.createFromObject(dataset.getChannelArray()) data.setName(filename) if Overlay: Plotter.plotOver(plotPane, channeldata, data) SDAPlotter.addPlot(plotPane, "", channeldata, data, "delta", "counts") else: Plotter.plot(plotPane, channeldata, data) SDAPlotter.plot(plotPane, "", channeldata, data, "delta", "counts") else: print "Data Type is not recognised or supported." print "Plotting completed."
def setUp(self): self.x = DatasetFactory.createFromObject([10.,11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,26.]) self.peak = DatasetFactory.createFromObject([1.,1.1,1.5,2.,3.,5.,7.,9.,11.,9.,7.,5.,3.,2.,1.5,1.1,1.]) #self.dip = DatasetFactory.createFromObject([5.,4.,3.,2.,1.,0.,1.,2.,3.,4.]) self.p = None
from gda.analysis import RCPPlotter from uk.ac.diamond.scisoft.analysis.plotserver import GuiBean from uk.ac.diamond.scisoft.analysis.plotserver import GuiParameters from org.eclipse.dawnsci.analysis.dataset.roi import RectangularROI, RectangularROIList from org.eclipse.january.dataset import DatasetFactory pp = RCPPlotter() pp.plot("Area Detector", DatasetFactory.createFromObject(range(100))) #This is used to plot on the PlotView #d = DataSet.arange(10000) #d.shape = [100,100] d = DatasetFactory.createRange(10000) d.shape = [100, 100] # or, d = DataSet.arange(10000).reshape((100,100)) in GDA v8.12 pp.imagePlot("Area Detector", d) #This is used to plot on the ImageExploreView pp.plotImageToGrid("Image Explorer", DatasetFactory.ones([20, 30])) pp.plotImageToGrid("Image Explorer", "/home/xr56/temp/p100kImage26455.tif") #RCPPlotter().plotImageToGrid("Image Explorer","/home/xr56/temp/pilatus100K/p686905.tif") RCPPlotter().scanForImages("Image Explorer", "/dls/i06/data/2010/cm1895-1/demoImages") #ROI #Get ROI info
def process(self, dataset): mean = dataset.mean() return DatasetFactory.createFromObject(mean)
def test_process(self): y = DatasetFactory.createFromObject([0, 0, 1, 1.000000001, 1, 1, 0, 1, 0, 0]) cen, height, width = self.p._process(self.x, y) self.assertAlmostEqual(cen, 14) self.assertAlmostEqual(height, 1.000000001) self.assertAlmostEqual(width, 5)