def findPeaksAndTroughs(self, ydataset, delta, xdataset=None):
     '''returns a list of peaks and troughs in tuple of (peak_position, peak_value). 
     If x data set is not provided, it returns as tuple of (peak_index, peak_value)'''
     
     if xdataset is not None:
         xdataset = dnp.asarray(xdataset)
     return peakdet(dnp.asarray(ydataset), delta, xdataset)
Beispiel #2
0
	def test_process_negative_edge(self):
		# outer = 148, left half (first half of data though!)
		xds, yds = self.getInnerAndDetDatasetForGivenOuterValue(148.0)
		x = dnp.asarray(xds.data[:(len(xds) / 2)])
		y = dnp.asarray(yds.data[:(len(yds) / 2)])
		expected = 0.000000,  0.000000, -3.896408,  0.006343,  1.699449,  0.006343
		check(TwoGaussianEdges()._process(x, y), expected, TwoGaussianEdges().labelList)
Beispiel #3
0
	def test_process_positive_edge(self):
		# outer = 148, right half (second half of data though!)
		xds, yds = self.getInnerAndDetDatasetForGivenOuterValue(148.0)
		x = dnp.asarray(xds.data[(len(xds) / 2):])
		y = dnp.asarray(yds.data[(len(yds) / 2):])
		expected = -3.944682,  0.006067,  0.000000,  0.000000,  1.683015,  0.006067
		check(TwoGaussianEdges()._process(x, y), expected, TwoGaussianEdges().labelList)
Beispiel #4
0
 def testAsArray(self):
     ta = np.array([1, 2])
     ata = np.asarray(ta)
     self.assertEquals(ata.dtype, np.int_)
     self.checkitems([1, 2], ata)
     ata = np.asarray(ta, np.float)
     self.assertEquals(ata.dtype, np.float_)
     self.checkitems([1, 2], ata)
def getDatasetFromLoadedFile(loadedFile, fieldName, scanDataPointCache=None):
	'''
	Gets a dataset called fieldName from an already loaded file see loadScanFile(scanOb)
	returns dataset
	'''

	logger.debug('Getting data for %s, from %s (with cache=%s)', fieldName, loadedFile, scanDataPointCache)

	# Check if the field names are the full local names if so get just the last part of
	# the field names as this should be the node name. Keep original fieldname, it might
	# be useful later
	if '.' in fieldName:
		# with scnname.fieldname strip off scnname
		strippedFieldName = fieldName.split('.')[-1]
	else: # fieldname doesn't require stripping
		strippedFieldName = fieldName

	# If we have a scanDataPointCache use it for performance
	if(scanDataPointCache):
		return  dnp.asarray(scanDataPointCache.getPositionsFor(strippedFieldName))

	# Check if its a NeXus file
	if isinstance(loadedFile, NXroot):
		# Note: Using first node returned, this might fail if there are multiple nodes with the same name!
		# Might be possible to disambiguate this using the original fieldname?
		loadedNodes = loadedFile.getnodes(strippedFieldName, group=False, data=True)
		if len(loadedNodes) == 0:
			raise KeyError("%s not found in data file" % strippedFieldName)

		# Find nodes which have a local_name
		probableNodes = [loadedNodes[_n] for _n in xrange(len(loadedNodes))
			if 'local_name' in loadedNodes[_n].attrs]
		# Use the first local_name which matches the fieldName or fall back on using the first node
		for node in probableNodes:
			if node.attrs['local_name'] == fieldName:
				lazyDataset = node
				break
		else:
			lazyDataset = loadedNodes[0]

		# Use slicing to load the whole lazy dataset into a array i.e. non-lazy dataset
		dataset = lazyDataset[...]

		return dataset

	elif isinstance(loadedFile, DataHolder):
		datasetList = loadedFile[strippedFieldName]

		# Convert the dataset as a list into the array
		dataset = dnp.asarray(datasetList)

		return dataset

	# Not a supported file type
	else:
		print "The file format is not supported"
		print loadedFile.__class__
 def singlePeakProcess(self, xDataSet, yDataSet):
     xarray = dnp.asarray(xDataSet)
     yarray = dnp.asarray(yDataSet)
     ymax=yarray.max()
     ymaxindex=yarray.argmax()
     #print "y max index %d" % ymaxindex
     maxpos=xarray[ymaxindex]
     basey=self.baseline(xarray, yarray, 1)
     halfmax=ymax/2+basey/2
     xcrossingvalues=dnp.crossings(yarray, halfmax, xarray)
     #print xcrossingvalues, maxpos
     if len(xcrossingvalues)>2:
         print "multiple peaks exists in the data set!, only process the highest peak."
     fwhmvalue=find_gt(xcrossingvalues, maxpos)-find_lt(xcrossingvalues,maxpos)
     return [(maxpos,ymax,basey,fwhmvalue)]
 def baseline(self,xdataset, ydataset, smoothness):
     '''find the baseline y value for a peak in y dataset'''
     xdataset = dnp.asarray(xdataset)
     ydataset = dnp.asarray(ydataset)
     ymaxindex=ydataset.argmax()
     #TODO
     result=dnp.gradient(ydataset,xdataset)
     #derivative(xdataset, ydataset, smoothness)
     leftresult=result[:ymaxindex]
     rightresult=result[ymaxindex+1:]
     leftminderivativeindex=dnp.abs(leftresult).argmin()
     rightminderivativeindex=dnp.abs(rightresult).argmin()
     leftbasey=ydataset[leftminderivativeindex]
     rightbasey=ydataset[rightminderivativeindex+1+leftresult.shape[0]]
     basey=(leftbasey+rightbasey)/2
     return basey
    def findBasePoints(self, xdataset, ydataset, delta, smoothness):
        xdataset = dnp.asarray(xdataset)
        ydataset = dnp.asarray(ydataset)
        peaks=self.findPeaksAndTroughs(ydataset, delta)[0]
        #print peaks
        yslices=[]
        xslices=[]
        startindex=0
        for index,value in peaks: #@UnusedVariable
            yslices.append(ydataset[startindex:index])
            xslices.append(xdataset[startindex:index])
            startindex=index+1
        yslices.append(ydataset[startindex:])
        xslices.append(xdataset[startindex:])

        bases=[]
        for xset, yset in zip(xslices, yslices):
            result=dnp.gradient(yset, xset)
            minimumderivativeindex=dnp.abs(result).argmin()
            bases.append((xset[minimumderivativeindex],yset[minimumderivativeindex]))
        #print "Base Points (position, value)   : ", bases
        return bases
 def getDataSet(self, fName):
     nxsTree = dnp.io.load(fName)
     dataSet = dnp.asarray(nxsTree['/entry/result/data'])
     self.dataSet = dataSet.squeeze()