def testWithWoblyData(self):
		sfh = ScanFileHolder()
		sfh.addDataSet('i1', DatasetFactory.createFromObject([10.1,10.4,9.6, 20.1,20.2,19.9, 30,30,30]))
		sfh.addDataSet('i2', DatasetFactory.createFromObject([1.09,1.99,3, 1.01,2.099,3, 1,2,3]))
		sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9]))
		self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':10, 'i2':0.2}, ('i1','i2'))
		self.assertEquals(self.sfhi.lookupDict, {10.0: {3.0: 2, 2.0: 1, 1.0: 0}, 20.0: {3.0: 5, 2.0: 4, 1.0: 3}, 30.0: {3.0: 8, 2.0: 7, 1.0: 6}})
示例#2
0
 def plotAxisToDataVectorPlot(self, plotName, xColumnName, yColumnName):
     print "plotAxisToDataVectorPlot", plotName, xColumnName, yColumnName
     xdataset = DatasetFactory.createFromObject(self.getColumn(xColumnName))
     xdataset.setName(xColumnName)
     ydataset = DatasetFactory.createFromObject(self.getColumn(yColumnName))
     ydataset.setName(yColumnName)
     if plotName is not None:
         Plotter.plot(plotName, xdataset, [ydataset])
	def setUp(self):
		sfh = ScanFileHolder()
		sfh.addDataSet('i1', DatasetFactory.createFromObject([10.1,10.4,9.6, 20.1,20.2,19.9, 30,30,30]))
		sfh.addDataSet('i2', DatasetFactory.createFromObject([1.09,1.99,3, 1.01,2.099,3, 1,2,3]))
		sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9]))
		self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':10, 'i2':0.2}, ('i1','i2'))

		self.sfhs = ScanFileHolderScannable('sfhs', sfh, ('i1','i2'), ('e1',), {'i1':10, 'i2':0.2})
示例#4
0
 def loadFile(self):
     dataHolder = DataHolder()
     xDataSet = DatasetFactory.zeros(1,len(self.xData),self.xData)
     xDataSet.setName(self.xName)
     dataHolder.addDataSet(xDataSet.getName(), xDataSet);
     yDataSet = DatasetFactory.zeros(1,len(self.yData),self.yData)
     yDataSet.setName(self.yName)
     dataHolder.addDataSet(yDataSet.getName(), yDataSet);
     return dataHolder
示例#5
0
 def loadFile(self):
     dataHolder = DataHolder()
     xDataSet = DatasetFactory.zeros(1, len(self.xData), self.xData)
     xDataSet.setName(self.xName)
     dataHolder.addDataSet(xDataSet.getName(), xDataSet)
     yDataSet = DatasetFactory.zeros(1, len(self.yData), self.yData)
     yDataSet.setName(self.yName)
     dataHolder.addDataSet(yDataSet.getName(), yDataSet)
     return dataHolder
def createSFH():

	result = ScanFileHolder()
	result.addDataSet('i1', DatasetFactory.createFromObject([0,1,2,3,4,5,6,7,8,9]))
	result.addDataSet('i2', DatasetFactory.createFromObject([0,10,20,30,40,50,60,70,80,90]))
	result.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.3,.2,.1,0]))
# 	result.addDataSet('e2', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.3,.2,.1,0])+100)	
	result.addDataSet('e2', DatasetFactory.createFromObject([100.,100.1,100.2,100.3,100.4,100.5,100.3,100.2,100.1,100.]))	
	return result
示例#7
0
 def setUp(self):
     self.x = DatasetFactory.createFromObject([
         10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22.,
         23., 24., 25., 26.
     ])
     self.peak = DatasetFactory.createFromObject([
         1., 1.1, 1.5, 2., 3., 5., 7., 9., 11., 9., 7., 5., 3., 2., 1.5,
         1.1, 1.
     ])
     #self.dip = DatasetFactory.createFromObject([5.,4.,3.,2.,1.,0.,1.,2.,3.,4.])
     self.p = None
示例#8
0
	def _process(self, ds, xoffset=0, yoffset=0):###dsxaxis, dsyaxis):
##		assert(dsyaxis is None)		# STUB
##		assert(dsxaxis is None)		# STUB
		
		integrator = Integrate2D()
		dsy, dsx = integrator.value(ds)
		dsyaxis = DatasetFactory.createRange(dsy.shape[0])
		dsxaxis = DatasetFactory.createRange(dsx.shape[0])
		
		gaussian = Gaussian(dsyaxis.min(), dsyaxis.max(), dsyaxis.max()-dsyaxis.min(), (dsyaxis.max()-dsyaxis.min())*(dsy.max()-dsy.min()) )
		gaussian.getParameter(2).setLowerLimit(0)
		if self.maxwidth is not None:
			gaussian.getParameter(1).setUpperLimit(self.maxwidth)
		ansy = Fitter.fit(dsyaxis, dsy, GeneticAlg(0.001), [ gaussian, Offset( dsy.min(),dsy.max() ) ] )
# 		ansy = DataSetFunctionFitter().fit( dsyaxis, dsy, GeneticAlg(.001), [ gaussian, Offset( dsy.min(),dsy.max() ) ] )
# 		ansy = ansy.functionOutput
		
		gaussian = Gaussian(dsxaxis.min(), dsxaxis.max(), dsxaxis.max()-dsxaxis.min(), (dsxaxis.max()-dsxaxis.min())*(dsx.max()-dsx.min()) )
		gaussian.getParameter(2).setLowerLimit(0)
		if self.maxwidth is not None:
			gaussian.getParameter(1).setUpperLimit(self.maxwidth)
		try:
			ansx = fitplot( dsxaxis, dsx, GeneticAlg(.001), [ gaussian, Offset( dsx.min(),dsx.max() ) ] )
		except java.lang.Exception:
			# Probably cannot find Plot_Manager on the finder
			ansx = Fitter.fit(dsxaxis, dsx, GeneticAlg(0.001), [ gaussian, Offset( dsx.min(),dsx.max() ) ] )
		#dsyaxis = dsyaxis.subSampleMean(dsy.dimensions[0]/2)
		#dsy = dsy.subSampleMean(dsy.dimensions[0]/2)
		#dsxaxis = dsxaxis.subSampleMean(dsx.dimensions[0]/2)
		#dsx = dsx.subSampleMean(dsx.dimensions[0]/2)		
		
		peaky = ansy.getParameters()[0].getValue()
		fwhmy = ansy.getParameters()[1].getValue()
		areay = ansy.getParameters()[2].getValue()
		offsety = ansy.getParameters()[3].getValue() / dsx.shape[0]
		
		peakx = ansx.getParameters()[0].getValue()
		fwhmx = ansx.getParameters()[1].getValue()
		areax = ansx.getParameters()[2].getValue()
		offsetx = ansx.getParameters()[3].getValue() / dsy.shape[0]
		
		background = (offsetx+offsety)/2.
		fwhmarea = fwhmy*fwhmx*pi/4
		topy = areay / fwhmy
		topx = areax / fwhmx
		
		if xoffset==None:
			xoffset=0
		
		if yoffset==None:
			yoffset=0
		
		return background, peakx+xoffset, peaky+yoffset, topx, topy, fwhmx, fwhmy, fwhmarea
示例#9
0
 def readout(self):
     if self.ds is None:
         if self.verbose:
             print "%s.readout() getting..." % self.name
             t = time.time()
         
         rawdata = self.pvs['DATA'].cagetArray()
         
         if self.verbose:
             dt, t = time.time()-t, time.time()
             print "%s.readout() (%fs) sign correction..." % (self.name, dt)
         
         data = map(unsign2, rawdata )
         
         if self.verbose:
             dt, t = time.time()-t, time.time()
             print "%s.readout() (%fs) creating DataSet..." % (self.name, dt)
         
         self.ds = DatasetFactory.createFromObject(data, [int(float(self.pvs['HEIGHT'].caget())), int(float(self.pvs['WIDTH'].caget()))])
         
         if self.verbose:
             dt, t = time.time()-t, time.time()
             print "%s.readout() (%fs) saving..." % (self.name, dt)
         
         if self.filepath is not None:
             self.saveImage(time.strftime("%Y%m%d%H%M%S.png", time.localtime()))
         
         if self.verbose:
             dt, t = time.time()-t, time.time()
             print "%s.readout() (%fs) ended" % (self.name, dt)
     
     return self.ds
	def testPainter(self):
		line = LinePainter(0, 0 , 0, 1)
		ds = line.paint(DatasetFactory.zeros(2, 3))
		expected = [
				[1., 1., 0.],
				[0., 0., 0.]]
		self.assertEquals(ds2lofl(ds), expected)
 def testRenderShapes(self):
     self.testAddShape()
     ds = DatasetFactory.zeros(4, 5)
     result = self.renderer.renderShapes(ds)
     expected = [[1., 1., 0., 0., 0.], [0., 1., 1., 1., 1.],
                 [0., 1., 0., 0., 1.], [0., 1., 1., 1., 1.]]
     self.assertEquals(ds2lofl(result), expected)
示例#12
0
    def asynchronousMoveTo(self, newPos):
        '''
		Moving the scanner to the newPos,
		Moving the flipper to position 0, then read out all detectors: [d00, d01, ... , d0n]
		Moving the flipper to position 1, then read out all detectors: [d10, d11, ... , d1n]
		...
		Moving the flipper to position m, then read out all detectors: [dm0, dm1, ... , dmn]
		
		Create a DataSet that represents the about matirx for analysis 
		'''

        print 'move scanning device...'
        self.scanner.moveTo(newPos)

        self.readouts = []
        self.flipPositionsReadback = []
        for flipPos in self.flipPositions:
            print 'move ' + self.flipper.getName() + ' to ' + str(flipPos)
            self.flipper.moveTo(flipPos)
            self.flipPositionsReadback.append(self.flipper.getPosition())
            print 'counting'
            readoutp = self.countOnce()
            self.readouts.append(readoutp)
        self.dataset = DatasetFactory.createFromObject(
            self.readouts
        )  #ds is a new DataSet with dimension [numberOfFlipPositions, NumberOfDetectors];
        return
示例#13
0
 def test_process_ones_max_at_right_edge(self):
     y = DatasetFactory.createFromObject(
         [0, 0, 0, 0, 0, 0, 0, 1, 1, 1.000000001])
     cen, height, width = self.p._process(self.x, y)
     self.assertAlmostEqual(cen, 18)
     self.assertAlmostEqual(height, 1.000000001)
     self.assertAlmostEqual(width, 3)
示例#14
0
 def test_process(self):
     y = DatasetFactory.createFromObject(
         [0, 0, 1, 1.000000001, 1, 1, 0, 1, 0, 0])
     cen, height, width = self.p._process(self.x, y)
     self.assertAlmostEqual(cen, 14)
     self.assertAlmostEqual(height, 1.000000001)
     self.assertAlmostEqual(width, 5)
示例#15
0
    def getNewEpicsData(self, offset, size):
        #To check the head
        head = self.getHead()
        if offset > head:
            #			print " No new data available. Offset exceeds Head(" + str(head) + ").";
            return False

#		print "New data available, Offset does not exceed Head(" + str(head) + ").";

        la = []
        #To get the waveform data from EPICS
        #		print "---> Debug: get waveform: start at: " + ctime();
        for i in range(self.numberOfDetectors):
            #			self.data[i]=self.chData[i].cagetArrayDouble();
            #TODO: make sure that the self.data[i] is a list
            #			self.data[i]=self.chData[i].cagetArrayDouble();
            self.data[i] = self.chData[i].getController().cagetDoubleArray(
                self.chData[i].getChannel(), head + 1)
            #			print "The type of subarray data from caget is: ", type(self.data[i]);
            #			print "The subarray data from caget is: ", self.data[i];
            la.append(self.data[i])
#		print "---> Debug: get waveform: end at: " + ctime();

        ds = DatasetFactory.createFromObject(
            la)  #ds is a new DataSet with dimension [numberOfDetectors, size];

        self.dataset = ds
        return True
示例#16
0
def arange(start, stop=None, step=1, dtype=None):
    '''Create a 1D dataset of given type where values range from specified start up to
    but not including stop in given steps

    Arguments:
    start -- optional starting value, defaults to 0
    stop  -- exclusive stop value
    step  -- difference between neighbouring values, defaults to 1
    dtype -- defaults to None which means the type is inferred from given start, stop, step values
    '''
    if stop is None:
        stop = start
        start = 0
    dtype = _translatenativetype(dtype)
    if dtype is None:
        if type(start) is _types.ComplexType or type(stop) is _types.ComplexType or type(step) is _types.ComplexType: 
            dtype = complex128
        elif type(start) is _types.FloatType or type(stop) is _types.FloatType or type(step) is _types.FloatType: 
            dtype = float64
        elif type(start) is _types.IntType or type(stop) is _types.IntType or type(step) is _types.IntType: 
            dtype = int32
        else:
            raise ValueError, "Unknown or invalid type of input value"
    if dtype == bool:
        return None

    return _df.createRange(start, stop, step, dtype.value)
示例#17
0
    def testSimpleCreation(self):
        #         file = NexusUtils.createNexusFile(self.abspath)
        #         g = file.getGroup("/ScanFileHolder:NXentry/datasets:NXdata", True)
        #         lazy = NexusUtils.createLazyWriteableDataset("heading1", Dataset.FLOAT64, [10, 100000], None, None)
        #         file.createData(g, lazy)
        #         dataIn = DatasetFactory.createRange(lazy.getSize(), Dataset.FLOAT64)
        #         dataIn.shape = lazy.getShape()
        #         lazy.setSlice(None, dataIn, SliceND.createSlice(lazy, None, None))
        #         file.close()
        #         os.remove(self.abspath)

        # This cannot work as the saved file is _NOT_ a valid SRS format
        #        sfh.save(AsciiScanFileHolderSaver(self.abspath+"_srs"));
        #
        dataIn = DatasetFactory.createRange(1000000, Dataset.FLOAT64)
        dataIn.shape = [10, 100000]
        sfh = ScanFileHolder()
        sfh.addDataSet("heading1", dataIn)
        #        sfh.load(SRSLoader(self.abspath+"_srs"));#@UndefinedVariable
        #        os.remove(self.abspath)
        sfh.save(SimpleNexusSaver(self.abspath))

        file = NexusUtils.openNexusFileReadOnly(self.abspath)
        g = file.getGroup("/ScanFileHolder:NXentry/datasets:NXdata", False)
        dataOut = file.getData(g, "heading1").getDataset().getSlice()
        file.close()
        if dataIn != dataOut:
            self.fail("dataIn != dataOut")
示例#18
0
    def reindet(self, thing):
        """ read in detector data you want to view per frame """
        try:
            #print "opening x"+thing["filename"]+"x"
            file = open(thing["filename"], 'rb')
            size = thing["x"] * thing["y"]
            if (thing["endian"] == 0):
                # default big endian/motorola
                endstr = ">"
            else:
                # little endian/intel
                endstr = "<"
            for i in range(thing["frames"]):
                list = unpack(endstr + size.__str__() + 'f',
                              file.read(size * 4))
                ds = DatasetFactory.createFromObject(list)

                if thing["x"] > 1 and thing["y"] > 1:
                    ds.setShape(thing["x"], thing["y"])
                ds.setName(
                    os.path.basename(thing["filename"]) + " frame " +
                    i.__str__())
                self.result.addDataSet(thing["name"] + i.__str__(), ds)
            file.close()
            self.detectors[thing["name"]] = thing["frames"]
        except IOError, message:
            print "Warning: Could not read (all of) the " + thing[
                "name"] + " data: " + message.__str__()
示例#19
0
 def renderShapes(self, targetDataset):
     # Make a blank data set
     # beware bizarre Jython bug where it cannot call correct method with only a shape argument
     image = DatasetFactory.zeros(targetDataset.getShape(), Dataset.FLOAT64)
     for shapeDict in self.shapesToPaint.values():
         for shape in shapeDict.values():
             image = shape.paint(image)
     return image
	def testPainter(self):
		rect = RectPainter(1, 1, 3, 4)
		ds = rect.paint(DatasetFactory.zeros(4, 5))
		expected = [
				[0., 0., 0., 0., 0.],
				[0., 1., 1., 1., 1.],
				[0., 1., 0., 0., 1.],
				[0., 1., 1., 1., 1.]]											
		self.assertEquals(ds2lofl(ds), expected)		
示例#21
0
 def __init__(self, shape=None, dtype=None, buffer=None, copy=False):
     # check what buffer is and convert if necessary
     if buffer is not None:
         self.__dataset = __cvt_jobj(_jinput(buffer), dtype=dtype, copy=copy, force=True)
         if shape is not None:
             self.__dataset.setShape(asIterable(shape))
     else:
         dtype = _translatenativetype(dtype)
         self.__dataset = _df.zeros(dtype.elements, asIterable(shape), dtype.value)
	def testRenderShapes(self):
		self.testAddShape()
		ds = DatasetFactory.zeros(4, 5)
		result = self.renderer.renderShapes(ds)
		expected = [
				[1., 1., 0., 0., 0.],
				[0., 1., 1., 1., 1.],
				[0., 1., 0., 0., 1.],
				[0., 1., 1., 1., 1.]]											
		self.assertEquals(ds2lofl(result), expected)
 def testRenderShapesOntoDataset(self):
     self.testAddShape()
     ds = DatasetFactory.zeros(4, 5)
     ds.set(10, (2, 2))
     result = self.renderer.renderShapesOntoDataset(ds)
     expected = [[-10.0, -10.0, 0.0, 0.0, 0.0],
                 [0.0, -10.0, -10.0, -10.0, -10.0],
                 [0.0, -10.0, 10.0, 0.0, -10.0],
                 [0.0, -10.0, -10.0, -10.0, -10.0]]
     self.assertEquals(ds2lofl(result), expected)
	def testRenderShapesOntoDataset(self):
		self.testAddShape()
		ds = DatasetFactory.zeros(4, 5)
		ds.set(10, (2, 2))
		result = self.renderer.renderShapesOntoDataset(ds)
		expected = [
				[-10.0, -10.0, 0.0, 0.0, 0.0],
				[0.0, -10.0, -10.0, -10.0, -10.0],
				[0.0, -10.0, 10.0, 0.0, -10.0],
				[0.0, -10.0, -10.0, -10.0, -10.0]]
		self.assertEquals(ds2lofl(result), expected)
示例#25
0
def zeros(shape, dtype=float64, elements=None):
    '''Create a dataset filled with 0'''
    dtype = _translatenativetype(dtype)
    if elements is not None:
        if type(dtype) is _types.FunctionType:
            dtype = dtype(elements)
        else:
            dtype.elements = elements
    elif type(dtype) is _types.FunctionType:
        raise ValueError, "Given data-type is a function and needs elements defining"

    return _df.zeros(dtype.elements, asIterable(shape), dtype.value)
示例#26
0
def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None):
    '''Create a 1D dataset from start to stop in given number of steps
    
    Arguments:
    start    -- starting value
    stop     -- stopping value
    num      -- number of steps, defaults to 50
    endpoint -- if True (default), include the stop value
    retstep  -- if False (default), do not include the calculated step value as part of return tuple
    '''
    if not endpoint:
        stop = ((num - 1) * stop + start)/num

    dtype = _translatenativetype(dtype)
    if dtype is None:
        dtype = _getdtypefromobj(((start, stop)))

        if dtype.value < float64.value:
            dtype = float64

    if dtype.value >= complex64.value:
        dtype = complex128

        if type(start) is _types.IntType:
            start = start+0j
        if type(stop) is _types.IntType:
            stop = stop+0j
        rresult = _df.createLinearSpace(start.real, stop.real, num, float64.value)
        iresult = _df.createLinearSpace(start.imag, stop.imag, num, float64.value)
        result = Sciwrap(_dsutils.createCompoundDataset(complex128.value, (rresult, iresult)))
        del rresult, iresult
    else:
        result = Sciwrap(_df.createLinearSpace(start, stop, num, dtype.value))

    if retstep:
        step = result[1] - result[0]
        return (result, step)
    else:
        return result
示例#27
0
	def collectData(self):
		#rawdata = self.pvs['DATA'].cagetArray()
		if self.determine_data_pv_based_on_zoom:
			zoom_ordinal = int(float(self.pvs['ZOOM'].caget()))
			rawdata = self.pv_data[zoom_ordinal].get()
		else:
			rawdata = self.pv_data[0].get()
		data = map(unsign2, rawdata )
		self.ds = DatasetFactory.zeros(int(float(self.pvs['HEIGHT'].caget())), int(float(self.pvs['WIDTH'].caget())), data)
		self.last_image_number += 1
		self.last_filename = self._generateCurrentFilename()
		if self.filepath is not None:
			self.saveImage(self.last_filename);
示例#28
0
def full(shape, fill_value, dtype=None, elements=None):
    '''Create a dataset filled with fill_value'''
    dtype = _translatenativetype(dtype)
    if dtype is None:
        dtype = _getdtypefromobj(fill_value)
    if elements is not None:
        if type(dtype) is _types.FunctionType:
            dtype = dtype(elements)
        else:
            dtype.elements = elements
    elif type(dtype) is _types.FunctionType:
        raise ValueError, "Given data-type is a function and needs elements defining"

    return _df.zeros(dtype.elements, asIterable(shape), dtype.value).fill(fill_value)
示例#29
0
 def collectData(self):
     #rawdata = self.pvs['DATA'].cagetArray()
     if self.determine_data_pv_based_on_zoom:
         zoom_ordinal = int(float(self.pvs['ZOOM'].caget()))
         rawdata = self.pv_data[zoom_ordinal].get()
     else:
         rawdata = self.pv_data[0].get()
     data = map(unsign2, rawdata)
     self.ds = DatasetFactory.zeros(int(float(self.pvs['HEIGHT'].caget())),
                                    int(float(self.pvs['WIDTH'].caget())),
                                    data)
     self.last_image_number += 1
     self.last_filename = self._generateCurrentFilename()
     if self.filepath is not None:
         self.saveImage(self.last_filename)
示例#30
0
    def plotWholeData(self, numberOfPoints):
        arrayEnergyPGM = self.energyPGM.cagetArrayDouble()
        arrayEnergyIDGAP = self.energyIDGAP.cagetArrayDouble()
        arrayChannel01 = self.channel01.cagetArrayDouble()
        arrayChannel02 = self.channel02.cagetArrayDouble()
        arrayChannel03 = self.channel03.cagetArrayDouble()
        arrayChannel04 = self.channel04.cagetArrayDouble()

        dataSetPGM = DatasetFactory.zeros(numberOfPoints)

        for i in range(numberOfPoints):
            dataSetPGM.set(arrayEnergyPGM[i], i)

        dvp = Plotter()
        dvp.plotOver("Data Vector", dataSetPGM.getIndexDataSet(), dataSetPGM)
示例#31
0
def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None):
    '''Create a 1D dataset of values equally spaced on a logarithmic scale'''
    if not endpoint:
        stop = ((num - 1) * stop + start)/num

    dtype = _translatenativetype(dtype)
    if complex(start).imag == 0 and complex(stop).imag == 0:
        if dtype is None:
            dtype = _getdtypefromobj(((start, stop)))

            if dtype.value < float64.value:
                dtype = float64

        return _df.createLogSpace(start, stop, num, base, dtype.value)
    else:
        result = linspace(start, stop, num, endpoint, False, dtype)
        return _maths.power(base, result)
示例#32
0
	def getCameraData(self):
#		self.rawData = self.frameData.getController().cagetByteArray(self.frameData.getChannel());
		self.width = int(float(self.frameWidth.caget()));
		self.height = int(float(self.frameHeight.caget()));
		self.rawData = self.frameData.cagetArrayByte();

		#cast the byte array to double array for dataset 
		tempDoubleList = [float(x) for x in self.rawData];

#		self.dataset=DataSet.array(self.frameData.cagetArrayDouble());
		self.dataset = DatasetFactory.createFromObject(tempDoubleList)
		self.dataset.shape = [self.height, self.width]
#		self.data = ScanFileHolder();
#		self.data.addDataSet(self.getName(), self.dataset);
		self.data.setDataSet(self.getName(), self.dataset);

		return self.dataset;
示例#33
0
    def plotData(self):
        newHead = self.getDataNumbers()
        if self.arrayHead >= newHead:
            print "No new data added for plotting"
            return
        self.arrayHead = newHead

        #to get new data
        arrayEnergyPGM = self.energyPGM.cagetArrayDouble()
        arrayEnergyIDGAP = self.energyIDGAP.cagetArrayDouble()
        arrayChannel01 = self.channel01.cagetArrayDouble()
        arrayChannel02 = self.channel02.cagetArrayDouble()
        arrayChannel03 = self.channel03.cagetArrayDouble()
        arrayChannel04 = self.channel04.cagetArrayDouble()

        dataSetEnergyPGM = DatasetFactory.zeros(newHead)
        dataSetEnergyPGM.setName("PGM Energy")

        dataSetEnergyIDGAP = DatasetFactory.zeros(newHead)
        dataSetEnergyIDGAP.setName("ID Gap Energy")

        dataSetChannel01 = DatasetFactory.zeros(newHead)
        dataSetChannel01.setName("Channel 1")

        dataSetChannel02 = DatasetFactory.zeros(newHead)
        dataSetChannel02.setName("Channel 2")

        dataSetChannel03 = DatasetFactory.zeros(newHead)
        dataSetChannel03.setName("Channel 3")

        dataSetChannel04 = DatasetFactory.zeros(newHead)
        dataSetChannel04.setName("Channel 4")

        for i in range(0, newHead):
            #print i, arrayEnergyPGM[i], arrayEnergyIDGAP[i], arrayChannel01[i], arrayChannel02[i], arrayChannel03[i], arrayChannel04[i];
            dataSetEnergyPGM[i] = arrayEnergyPGM[i]
            dataSetEnergyIDGAP[i] = arrayEnergyIDGAP[i]
            dataSetChannel01[i] = arrayChannel01[i]
            dataSetChannel02[i] = arrayChannel02[i]
            dataSetChannel03[i] = arrayChannel03[i]
            dataSetChannel04[i] = arrayChannel04[i]
            #print i, arrayEnergyPGM[i], arrayEnergyIDGAP[i], arrayChannel01[i], arrayChannel02[i], arrayChannel03[i], arrayChannel04[i];

        dvp = Plotter()
        indexDataSet = dataSetEnergyPGM.getIndexDataSet()
        #dvp.plot("Data Vector", indexDataSet, [dataSetChannel01, dataSetChannel02, dataSetChannel03, dataSetChannel04]);
        dvp.plot("Data Vector", dataSetEnergyPGM, [
            dataSetChannel01, dataSetChannel02, dataSetChannel03,
            dataSetChannel04
        ])
    def _configureNewDatasetProvider(self, wait_for_exposure_callable=None):
        def createDatasetProvider(path):
            if path == '':
                raise IOError(
                    "Could no load dataset: %s does not have a record of the last file saved"
                    % self.name)
            path = self.replacePartOfPath(path)
            if path[0] != '/':
                #if relative path then we have to assume it's from the data directory
                path = gda.data.PathConstructor.createFromDefaultProperty(
                ) + "/" + path
            self.datasetProvider = LazyDataSetProvider(
                path, self.iFileLoader, self.fileLoadTimout,
                self.printNfsTimes, wait_for_exposure_callable)
            self.logger.debug("datasetProvider is {}", self.datasetProvider)

        if self.det.createsOwnFiles():
            path = self.getFilepath()
            createDatasetProvider(path)
        else:
            #			if not isinstance(dataset, DataSet, gda.device.detector.NXDetectorData):
            #				raise Exception("If a detector does not write its own files, ProcessingDetectorWrapper %s only works with detectors that readout DataSets.")
            dataset = self._readout()
            if isinstance(
                    dataset,
                    gda.device.detector.NXDetectorDataWithFilepathForSrs):
                path = dataset.getFilepath()
                self.logger.debug(
                    "dataset is NXDetectorDataWithFilepathForSrs: {}", dataset)
                createDatasetProvider(path)
                return
            elif isinstance(dataset, gda.device.detector.NXDetectorData):
                data = dataset.getNexusTree().getChildNode(1).getChildNode(
                    1).getData()
                dataset = DatasetFactory.createFromObject(data.getBuffer())
                dataset.setShape(data.dimensions)
                dataset.squeeze()
                self.logger.debug("dataset is NXDetectorData: {}", dataset)
            else:
                self.logger.debug("dataset is neither: {}", dataset)
            self.datasetProvider = BasicDataSetProvider(dataset)
            self.logger.debug("datasetProvider is {}", self.datasetProvider)
示例#35
0
 def testSimpleCreation(self):
     abspath = os.path.abspath(TestFileFolder + "/1.nxs")
     parentPath = os.path.split(abspath)[0]
     if not os.path.exists(parentPath):
         os.makedirs(parentPath)
     file = NexusUtils.createNexusFile(abspath)
     g = file.getGroup("/ScanFileHolder:NXentry/datasets:NXdata", True)
     lazy = NexusUtils.createLazyWriteableDataset("heading1",
                                                  Dataset.FLOAT64,
                                                  [10, 100000], None, None)
     file.createData(g, lazy)
     dataIn = DatasetFactory.createRange(lazy.getSize(), Dataset.FLOAT64)
     dataIn.shape = lazy.getShape()
     lazy.setSlice(None, dataIn, SliceND.createSlice(lazy, None, None))
     file.close()
     file = NexusUtils.openNexusFileReadOnly(abspath)
     g = file.getGroup("/ScanFileHolder:NXentry/datasets:NXdata", False)
     dataOut = file.getData(g, "heading1").getDataset().getSlice()
     file.close()
     self.assertEqual(dataIn, dataOut)
示例#36
0
	def reincal(self, thing):
		""" read in calibration data you want to view per channel for all frames """
		try:
			#print "opening x"+thing["filename"]+"x"
			file=open(thing["filename"],'rb')
			size=thing["x"]*thing["frames"]
			if (thing["endian"]==0):
				# default big endian/motorola
				endstr=">"
			else:
				# little endian/intel
				endstr="<"
			list=unpack(endstr+size.__str__()+'f', file.read(size*4))
			ds = DatasetFactory.createFromObject(list)
			if thing["x"] > 1 and thing["frames"] > 1:
				ds.setShape(thing["x"], thing["frames"])
			ds.setName(os.path.basename(thing["filename"])+" all frames")
			self.result.addDataSet(thing["name"], ds)
			file.close()
			self.detectors[thing["name"]]=thing["frames"]
		except IOError, message:
			print "Warning: Could not read (all of) the "+thing["name"]+" data: "+message.__str__()
示例#37
0
    def readout(self):
        if self.ds is None:
            if self.verbose:
                print "%s.readout() getting..." % self.name
                t = time.time()

            rawdata = self.pvs['DATA'].cagetArray()

            if self.verbose:
                dt, t = time.time() - t, time.time()
                print "%s.readout() (%fs) sign correction..." % (self.name, dt)

            data = map(unsign2, rawdata)

            if self.verbose:
                dt, t = time.time() - t, time.time()
                print "%s.readout() (%fs) creating DataSet..." % (self.name,
                                                                  dt)

            self.ds = DatasetFactory.createFromObject(data, [
                int(float(self.pvs['HEIGHT'].caget())),
                int(float(self.pvs['WIDTH'].caget()))
            ])

            if self.verbose:
                dt, t = time.time() - t, time.time()
                print "%s.readout() (%fs) saving..." % (self.name, dt)

            if self.filepath is not None:
                self.saveImage(
                    time.strftime("%Y%m%d%H%M%S.png", time.localtime()))

            if self.verbose:
                dt, t = time.time() - t, time.time()
                print "%s.readout() (%fs) ended" % (self.name, dt)

        return self.ds
示例#38
0
	def setUp(self):
		self.p = CenFromSPEC() # -> cen, height, width
		self.x = DatasetFactory.createFromObject([10.,11.,12.,13.,14.,15.,16.,17.,18.,19.])
		self.l = [0, 10, 20, 30, 40]
示例#39
0
    def process(self, dataset):

        mean = dataset.mean()
        return DatasetFactory.createFromObject(mean)
示例#40
0
	def setUp(self):
		self.x =    DatasetFactory.createFromObject([10.,11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,21.,22.,23.,24.,25.,26.])
		self.peak = DatasetFactory.createFromObject([1.,1.1,1.5,2.,3.,5.,7.,9.,11.,9.,7.,5.,3.,2.,1.5,1.1,1.])
		#self.dip = DatasetFactory.createFromObject([5.,4.,3.,2.,1.,0.,1.,2.,3.,4.])
		self.p = None
示例#41
0
def zeros_like(a, dtype=None):
    z = _df.zeros(a)
    if dtype is not None:
        dtype = _translatenativetype(dtype)
        z = z.cast(dtype.value)
    return z
	def setUp(self):
		sfh = ScanFileHolder()
		sfh.addDataSet('i1', DatasetFactory.createFromObject([10,10,10, 20,20,20, 30,30,30]))
		sfh.addDataSet('i2', DatasetFactory.createFromObject([1,2,3, 1,2,3, 1,2,3]))
		sfh.addDataSet('e1', DatasetFactory.createFromObject([0,.1,.2,.3,.4,.5,.6, .7, .8, .9]))
		self.sfhi = SFHInterpolatorWithHashAccess(sfh, {'i1':1, 'i2':1}, ('i1','i2'))
示例#43
0
def ones_like(a, dtype=None):
    o = _df.ones(a)
    if dtype is not None:
        dtype = _translatenativetype(dtype)
        o = o.cast(dtype.value)
    return o
示例#44
0
def ones(shape, dtype=float64):
    '''Create a dataset filled with 1'''
    dtype = _translatenativetype(dtype)
    return _df.ones(dtype.elements, asIterable(shape), dtype.value)
示例#45
0
	def test_process_ones_max_at_right_edge(self):
		y = DatasetFactory.createFromObject([0, 0, 0, 0, 0, 0, 0, 1, 1, 1.000000001])
		cen, height, width = self.p._process(self.x, y)
		self.assertAlmostEqual(cen, 18)
		self.assertAlmostEqual(height, 1.000000001)
		self.assertAlmostEqual(width, 3)
示例#46
0
	def test_process(self):
		y = DatasetFactory.createFromObject([0, 0, 1, 1.000000001, 1, 1, 0, 1, 0, 0])
		cen, height, width = self.p._process(self.x, y)
		self.assertAlmostEqual(cen, 14)
		self.assertAlmostEqual(height, 1.000000001)
		self.assertAlmostEqual(width, 5)
示例#47
0
def plotdata(filename, dataType=MAC, plotPane="DataPlot", Overlay=True):
    '''Plot existing data on "MAC, PSD", or "SRS" (if any) Panel, the default is MAC data on DataPlot panel and overlay is True.
       
       syntax:
               plotdata(filename,[MAC|SRS|PSD],["MAC"|"Mythen"|"DataPlot"],[True|False])
       
               where:
                    filename: the filename string in quote. 
                
                    dataType: the input data types or formats available
		                MAC - plot MAC data on MAC panel
		                PSD - plot PSD data on Mythen panel
		                SRS - plot SRS data on SRS panel
		            
		            plotPane: the graph panel to display the plot
		        
		            Overlay:
                        'True': plot over the exist data on the graph (Default) 
                        'False': clear existing plot data from the graph before plotting new data
                         
    '''
    print("Data plotting to " + plotPane + " panel, please wait ...")
    if dataType == MAC:
        sfh = loadMacData(filename)
        dataset=sfh.getAxis(1)
        dataset.setName(filename)
        if Overlay:
            Plotter.plotOver(plotPane, sfh.getAxis(0), dataset)
        else:
            Plotter.plot(plotPane, sfh.getAxis(0), dataset)
    elif dataType == SRS:
        sfh = loadSRSData(filename)
        if Overlay:
            Plotter.plotOver(plotPane, sfh.getAxis(0), sfh.getAxis(1))
        else:
            Plotter.plot(plotPane, sfh.getAxis(0), sfh.getAxis(1))
    elif dataType == PSD:
        if not str(filename).find("mythen") == -1:
            # mythen data file
            dataset = loadMythenData(filename)
            data=dataset.getCountDataSet()
            data.setName(filename)
            if Overlay:
                Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data)
            else:
                Plotter.plot(plotPane, dataset.getAngleDataSet(), data)
        else:
            parts = str(filename).split(File.separator)
            name=parts[-1]
            names=str(name).split(".")
            if representsInt(names[0]):
                # Mythen SRS file
                for each in loadMythenSRSFile(filename):
                    dataset = loadMythenData(str(each) + ".dat")
                    data=dataset.getCountDataSet()
                    data.setName(each)
                    if Overlay:
                        Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data)
                    else:
                        Plotter.plot(plotPane, dataset.getAngleDataSet(), data)
            else:
                dataset = loadMythenData(filename)
                data=dataset.getCountDataSet()
                data.setName(filename)
                if Overlay:
                    Plotter.plotOver(plotPane, dataset.getAngleDataSet(), data)
                else:
                    Plotter.plot(plotPane, dataset.getAngleDataSet(), data)
    elif dataType == RAW:
            # mythen raw data file
            dataset = loadMythenRawData(filename)
            data=DatasetFactory.createFromObject(dataset.getCountArray())
            channeldata=DatasetFactory.createFromObject(dataset.getChannelArray())
            data.setName(filename)
            if Overlay:
                Plotter.plotOver(plotPane, channeldata, data)
                SDAPlotter.addPlot(plotPane, "", channeldata, data, "delta", "counts")
            else:
                Plotter.plot(plotPane, channeldata, data)
                SDAPlotter.plot(plotPane, "", channeldata, data, "delta", "counts")
    else:
        print "Data Type is not recognised or supported."
    print "Plotting completed."