Exemple #1
0
    def _SetupContainers(self, ImportData):
        from DataContainer.StorageArray import ChannelizedArray
        from numpy import float64

        self.ADC_Intervals = ImportData.ADC_Intervals()
        self.RouteChannelCodes = ImportData.RouteChannelCodes()
        self.ChannelCount = len(self.RouteChannelCodes)
        self._RawBinned = ChannelizedArray(len(self.ADC_Intervals),
                                           self.ChannelCount, 'uint64')
        self.PhotonCount = ChannelizedArray(1, self.ChannelCount, 'uint64')
        self.ChannelDelayCont = ChannelizedArray(1, self.ChannelCount,
                                                 'float64')
        self.ChannelDelayDiscrete = ChannelizedArray(1, self.ChannelCount,
                                                     'uint64')

        if self.ChannelCount == 2:
            self.NormG = float64(1.0)
Exemple #2
0
 def Normalize(self, RawData):
     from DataContainer.StorageArray import ChannelizedArray
     Normed = ChannelizedArray(len(self._RawBinned), self.ChannelCount,
                               'float64')
     for CC in RawData.keys():
         # Create Normalized TAC Normalization
         from numpy import array
         Normed[CC] = array(1.0 * RawData[CC] / (1.0 * sum(RawData[CC])),
                            dtype='float64')
     return Normed.Copy
Exemple #3
0
 def _CreateTACNormData(self):
     if self.HasNorm():
         from DataContainer.StorageArray import ChannelizedArray
         self._TACNormedData = ChannelizedArray(len(self._RawBinned),
                                                self.ChannelCount,
                                                'float64')
         from numpy import zeros
         for i in range(len(self.RouteChannelCodes)):
             CC = self._TACNormedData.Prefix + str(i + 1)
             NonZero = (self.Norm.Norm()[CC] != 0)
             self._TACNormedData[CC] = zeros(len(NonZero), dtype='float64')
             self._TACNormedData[CC][NonZero] = (
                 1.0 * self._RawBinned[CC][NonZero]) / (
                     1.0 * self.Norm._RawBinned[CC][NonZero])
             self._TACNormedData[CC] = (1.0 * self._TACNormedData[CC]) / (
                 1.0 * len(self._TACNormedData[CC]))
             try:
                 del self._Normed
             except:
                 pass
         return True
     else:
         print "No Norm Data Present"
         return False
Exemple #4
0
    BackgroundR = BGIV * BackgroundAmp
    Result = SignalR + BackgroundR + Offset + BIV

    #return Result/max(Result)
    return Result, SignalR, BackgroundR


try:
    FitNormed
    Channel1
    Channel2
except:
    from ModelTesting import *

from DataContainer.StorageArray import ChannelizedArray
PlotArray = ChannelizedArray(len(Time), 3, 'float64')
PlotArray.ChangeColName('Channel_1', 'r(t)_1')
PlotArray.ChangeColName('Channel_2', 'r(t)_2')
PlotArray.ChangeColName('Channel_3', 'r(t)_{Avg}')

#Vars=[0.0676060977225,
#    4.06009692902,
#    1.00602234087,
#    0.0,
#    -0.14037043243,
#    0.672045359399,
#    0.0,
#    0.0,
#    0.0,
#    0.0,
#    0.0]
Exemple #5
0
    def _LoadAllLocalVars(self, Group, PrintOutput='Failed'):
        """
		PrintOutput: All, Failed, False
		"""
        from DataContainer.StorageArray import ChannelizedArray
        from numpy import ndarray, copy
        for var in Group:
            if PrintOutput == 'All':
                print "\n" + str(var.name) + " : " + str(var.ndim)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.flavor)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.shape)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(len(var))
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.get_attr('VarType'))
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.dtype)

            try:
                if var.get_attr('VarType') == 'ndarray':
                    vars(locals()['self'])[var.name] = var.read()

                elif var.get_attr('VarType') == 'ChannelizedArray':
                    vars(locals()['self'])[var.name] = ChannelizedArray(
                        len(var),
                        ChannelCount=len(var.colnames),
                        NumpyDataType=var.dtype[0])
                    vars(locals()['self'])[var.name]._Data = var.read()
                    vars(locals()['self'])[var.name]._SetItems()
            except:
                if PrintOutput == 'Failed':
                    print 'Failed to load: ' + str(var.name)


#			try:
#				if isinstance(vars(locals()['self'])[var], (ndarray,list)):
#					if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a ndarray"
#					Filehandle.create_array(Group, var, obj=vars(locals()['self'])[var], title='')
#
#				elif isinstance(vars(locals()['self'])[var], ChannelizedArray):
#					try:
#						CurrentVar = vars(locals()['self'])[var]
#						if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a ChannelizedArray"
#						if PrintOutput=='All': print CurrentVar._Data
#						if PrintOutput=='All': print CurrentVar
#						try:
#							CurrentLength = len(CurrentVar)
#						except:
#							CurrentLength = 1
#						if PrintOutput=='All': print "Expected Length: " + str(CurrentLength)
#						Filehandle.create_table(Group, var, filters=None, expectedrows=CurrentLength, description=CurrentVar._Data)
#						del CurrentLength
#						del CurrentVar
#					except:
#						import sys
#						e = sys.exc_info()
#						print e
#
#				else:
#					if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a Scalar"
#					Filehandle.create_array(Group, var, obj=vars(locals()['self'])[var], title='')
#
#			except:
#				if (PrintOutput=='All') or (PrintOutput=='Failed'): print "Failed to save: " + str(var)
#
#		Filehandle.flush()
        return True
Exemple #6
0
        return Amp * exp(-Time / (1.0 * Lifetime))


def AddIRF(IRF, Decay, Shift=0):
    return Decay * cumsum(roll(IRF, -Shift))


def AddIRF2(IRF, Decay):
    return IRF * cumsum(Decay)


from Display import ADCPlot
from DataContainer.StorageArray import ChannelizedArray
from numpy import sum

SummedScatter = ChannelizedArray(len(Data[0].ADC_Intervals), 2, 'float64')
for D in Data:
    print "-------------------------"
    Temp = ChannelizedArray(len(Data[0].ADC_Intervals), 2, 'float64')
    for i in range(D.ChannelCount):
        Nonzero = D._RawBinned['Channel_' + str(i + 1)].nonzero()
        Temp['Channel_' + str(i + 1)][Nonzero] = (
            1.0 * D._RawBinned['Channel_' + str(i + 1)][Nonzero])
        Temp['Channel_' +
             str(i + 1)] = roll(Temp['Channel_' + str(i + 1)],
                                (50 - argmax(Temp['Channel_' + str(i + 1)])))
        Temp['Channel_' + str(i + 1)] = Temp['Channel_' + str(i + 1)] / max(
            Temp['Channel_' + str(i + 1)])
        SummedScatter['Channel_' +
                      str(i + 1)] += Temp['Channel_' + str(i + 1)] / sum(
                          Temp['Channel_' + str(i + 1)])
Exemple #7
0
class BaseData(object):
    """
	Base Data Class : NOT FOR DIRECT USE
	"""
    def __init__(self, ImportData=None, PyTablesGroup=None):
        from numpy import int16
        from tables import Group
        self.BaseDataVersion = int16(1)
        self._DataType = 'BaseData'
        if not (ImportData == None):
            self._SetupContainers(ImportData)
            self._SetupInfoTraits(ImportData)
            # Setup RawBinned Data in Case the ImportData Object is Garbage Collected
            self._BinData(ImportData)
            return None
        elif isinstance(PyTablesGroup, Group):
            self._LoadAllLocalVars(PyTablesGroup)

    def _SetupInfoTraits(self, ImportData):
        self.TotalCounts = ImportData.PhotonCount
        self.TotalTime = ImportData.TotalTime
        self.TimeCollected = ImportData.TimeCollected
        self.DateCollected = ImportData.DateCollected
        self.TAC_Gain = ImportData.TAC_Gain
        self.Desc = ""
        self.Filename = ImportData.Filename
        self.Folder = ImportData.Folder

    def _SetupContainers(self, ImportData):
        from DataContainer.StorageArray import ChannelizedArray
        from numpy import float64

        self.ADC_Intervals = ImportData.ADC_Intervals()
        self.RouteChannelCodes = ImportData.RouteChannelCodes()
        self.ChannelCount = len(self.RouteChannelCodes)
        self._RawBinned = ChannelizedArray(len(self.ADC_Intervals),
                                           self.ChannelCount, 'uint64')
        self.PhotonCount = ChannelizedArray(1, self.ChannelCount, 'uint64')
        self.ChannelDelayCont = ChannelizedArray(1, self.ChannelCount,
                                                 'float64')
        self.ChannelDelayDiscrete = ChannelizedArray(1, self.ChannelCount,
                                                     'uint64')

        if self.ChannelCount == 2:
            self.NormG = float64(1.0)

    def _BinData(self, ImportData):
        from numpy import bincount, array, uint32, float64, sum
        for i, CC in enumerate(self.PhotonCount.keys()):
            # Create the RawBinned Data
            self._RawBinned[CC] = array(bincount(
                ImportData.ADC[ImportData.Route == (
                    self.RouteChannelCodes[i])],
                minlength=ImportData.ADC_Bins - 1),
                                        dtype='uint64')
            # Set Total Photons per Channel
            self.PhotonCount[CC] = sum(self._RawBinned[CC],
                                       dtype=self.PhotonCount[CC].dtype)
            self.ChannelDelayCont[CC] = float64(0.0)
            self.ChannelDelayDiscrete[CC] = uint32(0)

    def _CompareDataProperties(self,
                               DataSet1,
                               DataSet2=None,
                               CompareList=None):
        if not isinstance(DataSet2, BaseData):
            DataSet2 = self
        if not isinstance(CompareList, list):
            CompareList = ['TAC_Gain', 'ChannelCount']
        CurrentState = True
        for item in CompareList:
            if not (locals()['DataSet1'].__getattribute__(item)
                    == locals()['DataSet2'].__getattribute__(item)):
                print item + " : Does Not Match!!!"
                CurrentState = False
        return CurrentState

    def RawData(self):
        return self._RawBinned.Copy

    def Normalize(self, RawData):
        from DataContainer.StorageArray import ChannelizedArray
        Normed = ChannelizedArray(len(self._RawBinned), self.ChannelCount,
                                  'float64')
        for CC in RawData.keys():
            # Create Normalized TAC Normalization
            from numpy import array
            Normed[CC] = array(1.0 * RawData[CC] / (1.0 * sum(RawData[CC])),
                               dtype='float64')
        return Normed.Copy

    def ClearOutliers(self, DataSet, StdCutoff, Contiguous):
        from numpy import float64
        for CC in DataSet.keys():

            NormOutlierIndices = self.OutlierIndices(DataSet[CC],
                                                     StdCutoff=StdCutoff,
                                                     Contiguous=Contiguous)
            DataSet[CC][NormOutlierIndices] = float64(0.0)
        return self.Normalize(DataSet)

    def OutlierIndices(self, DataSet, StdCutoff, Contiguous):
        from numpy import abs, mean, std, ones, argmax, argmin, zeros
        Deviants = abs((DataSet - mean(
            DataSet[DataSet != 0]))) > StdCutoff * std(DataSet[DataSet != 0])
        if not Contiguous:
            if (Deviants == True).all():
                return zeros(len(DataSet), dtype='bool')
            return Deviants
        FirstIndex = argmin(Deviants)
        LastIndex = argmax(Deviants[FirstIndex:])
        Deviants = ones(len(Deviants), dtype='bool')
        Deviants[FirstIndex:LastIndex] = False
        if (Deviants == True).all():
            return zeros(len(DataSet), dtype='bool')
        return Deviants

    def _SaveAllLocalVars(self, Filehandle, Group, PrintOutput='Failed'):
        """
		PrintOutput: All, Failed, False
		"""
        from DataContainer.StorageArray import ChannelizedArray
        from numpy import ndarray, array
        self._DataType
        for var in vars(self):
            if PrintOutput == 'All':
                print "\n" + str(var) + " : " + str(
                    type(vars(locals()['self'])[var]))
            if str(var) == 'Norm':
                continue
            try:
                if isinstance(vars(locals()['self'])[var], (ndarray, list)):
                    if PrintOutput == 'All':
                        print "Trying to save: " + str(var) + " : As a ndarray"
                    Filehandle.create_array(Group,
                                            var,
                                            obj=vars(locals()['self'])[var],
                                            title='')
                    Group._f_get_child(var).set_attr('VarType', 'ndarray')

                elif isinstance(vars(locals()['self'])[var], ChannelizedArray):
                    try:
                        CurrentVar = vars(locals()['self'])[var]
                        if PrintOutput == 'All':
                            print "Trying to save: " + str(
                                var) + " : As a ChannelizedArray"
                        if PrintOutput == 'All': print CurrentVar._Data
                        if PrintOutput == 'All': print CurrentVar
                        try:
                            CurrentLength = len(CurrentVar)
                        except:
                            CurrentLength = 1
                        if PrintOutput == 'All':
                            print "Expected Length: " + str(CurrentLength)
                        Filehandle.create_table(Group,
                                                var,
                                                filters=None,
                                                expectedrows=CurrentLength,
                                                description=CurrentVar._Data)
                        Group._f_get_child(var).set_attr(
                            'VarType', 'ChannelizedArray')
                        del CurrentLength
                        del CurrentVar
                    except:
                        import sys
                        e = sys.exc_info()
                        print e

                else:
                    if PrintOutput == 'All':
                        print "Trying to save: " + str(var) + " : As a Scalar"
                    Filehandle.create_array(Group,
                                            var,
                                            obj=array(
                                                vars(locals()['self'])[var]),
                                            title='')
                    Group._f_get_child(var).set_attr('VarType', 'ndarray')

            except:
                if (PrintOutput == 'All') or (PrintOutput == 'Failed'):
                    print "Failed to save: " + str(var)

        Filehandle.flush()
        return True

    def _LoadAllLocalVars(self, Group, PrintOutput='Failed'):
        """
		PrintOutput: All, Failed, False
		"""
        from DataContainer.StorageArray import ChannelizedArray
        from numpy import ndarray, copy
        for var in Group:
            if PrintOutput == 'All':
                print "\n" + str(var.name) + " : " + str(var.ndim)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.flavor)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.shape)
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(len(var))
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.get_attr('VarType'))
            if PrintOutput == 'All':
                print "" + str(var.name) + " : " + str(var.dtype)

            try:
                if var.get_attr('VarType') == 'ndarray':
                    vars(locals()['self'])[var.name] = var.read()

                elif var.get_attr('VarType') == 'ChannelizedArray':
                    vars(locals()['self'])[var.name] = ChannelizedArray(
                        len(var),
                        ChannelCount=len(var.colnames),
                        NumpyDataType=var.dtype[0])
                    vars(locals()['self'])[var.name]._Data = var.read()
                    vars(locals()['self'])[var.name]._SetItems()
            except:
                if PrintOutput == 'Failed':
                    print 'Failed to load: ' + str(var.name)


#			try:
#				if isinstance(vars(locals()['self'])[var], (ndarray,list)):
#					if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a ndarray"
#					Filehandle.create_array(Group, var, obj=vars(locals()['self'])[var], title='')
#
#				elif isinstance(vars(locals()['self'])[var], ChannelizedArray):
#					try:
#						CurrentVar = vars(locals()['self'])[var]
#						if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a ChannelizedArray"
#						if PrintOutput=='All': print CurrentVar._Data
#						if PrintOutput=='All': print CurrentVar
#						try:
#							CurrentLength = len(CurrentVar)
#						except:
#							CurrentLength = 1
#						if PrintOutput=='All': print "Expected Length: " + str(CurrentLength)
#						Filehandle.create_table(Group, var, filters=None, expectedrows=CurrentLength, description=CurrentVar._Data)
#						del CurrentLength
#						del CurrentVar
#					except:
#						import sys
#						e = sys.exc_info()
#						print e
#
#				else:
#					if PrintOutput=='All': print "Trying to save: " + str(var) + " : As a Scalar"
#					Filehandle.create_array(Group, var, obj=vars(locals()['self'])[var], title='')
#
#			except:
#				if (PrintOutput=='All') or (PrintOutput=='Failed'): print "Failed to save: " + str(var)
#
#		Filehandle.flush()
        return True
Exemple #8
0
from TTSPCfromBH.DataCalc import ADCData

for g in file_handle.walk_groups():
    if g == file_handle.root:
        continue
    Result = search('(.*(mg water).*) \(Group\)', str(g))
    if Result == None or Result.group(1) == '':
        continue

    if g.TAC_Gain.read() == 3 and g.ChannelCount.read() == 2:
        if g._DataType.read() == "ADCData":
            Data.append(ADCData(PyTablesGroup=g))
        ChannelCount += g.ChannelCount.read()

from DataContainer.StorageArray import ChannelizedArray
TotalNormInt = ChannelizedArray(len(Data[0].ADC_Intervals), 2, 'uint64')
TotalNormInt._Data = file_handle.get_node('/TotalNorm')._RawBinned.read()
TotalNormInt._SetItems()
TotalNorm = ChannelizedArray(len(Data[0].ADC_Intervals), 2, 'float64')
"""
Scatter Data:
(Removed) /2013-08-28 exp 4 mg water 150 degree light
(Removed) /2013-08-28 exp 5 mg water 150 degree light again last time objective dried out
(Removed) /2013-08-28 exp 6 mg water 150 degree light again
/2013-09-05 exp 4 mg water 150 pol
/2013-09-05 exp 5 mg water 240 pol
/2013-09-05 exp 6 mg water 200 pol
/2013-09-05 exp 7 mg water cp
"""

Data = array(Data[3:])
Exemple #9
0
    exec(open("SetupModulePaths.py").read())

from TTSPCfromBH.DataCalc import ADCData
from TTSPCfromBH.NormCalc import Norm
from TTSPCfromBH.TTPhotonDataImport import Data
import numpy

#    try:
#        AL
#    except:

from DataContainer.StorageArray import ChannelizedArray
try:
    PlotArray
except:
    PlotArray = ChannelizedArray(4095, 3, 'float64')

RunName = "C1"
PlotArray.ChangeColName('Channel_1', RunName)
RunName = "C2"
PlotArray.ChangeColName('Channel_2', RunName)
RunName = "Both"
PlotArray.ChangeColName('Channel_3', RunName)
#RunName = "488 High 200"
#PlotArray.ChangeColName('Channel_4', RunName)
#RunName = "H2O 200"
#PlotArray.ChangeColName('Channel_5', RunName)

#FileName = 'exp 10 ncp low count rate 150 pol very long'
#FolderName = 'Single_Molecule_Data/local_data/September/04/'
Exemple #10
0
BG = 'Water'
#DataSet = '/2013-09-01 exp 8 1 nm dna in buffer cp'; G = 1.0; BG = 'Buffer'
#DataSet = '/2013-09-04 exp 5 dna low count rate 200 pol'; G = 2.0; BG = 'Buffer'
#DataSet = '/2013-09-02 exp 12 buffer 200 pol'; G = 2.0; BG = 'Buffer'
#DataSet = '/2013-09-02 exp 13 buffer cp'; G = 1.0; BG = 'Buffer'
#DataSet = '/2013-09-03 exp 5 low count rate alexa 200 pol'; G = 2.0; BG = 'Buffer'
#DataSet = '/2013-09-03 exp 2 low count rate alexa cp'; G = 1.0; BG = 'Buffer'
#DataSet = '/2013-09-04 exp 6 dna low count rate cp'; G = 1.0; BG = 'Buffer'
#DataSet = '/2013-09-01 exp 5 1 nm dna in buffer 150 pol'; G = 2.0; BG = 'Buffer'
#DataSet = '/2013-09-06 exp 2 ncp high count rate 150 pol'; G = 2.0; BG = 'Buffer'

from DataContainer.StorageArray import ChannelizedArray

Length = len(Time)
Dtype = 'float64'
Data = ChannelizedArray(Length, 2, Dtype)
Data._Data = FileHandle.get_node(DataSet)._RawBinned.read()

AlignedRaw = ChannelizedArray(Length, 4, 'float64')
AlignedRawNonzero = dict()
for Index in Data.keys():
    Nonzero = Norm[Index].nonzero()

    AlignedRaw[Index][Nonzero] = Data[Index][Nonzero] / Norm[Index][Nonzero]
    AlignedRaw[Index] = roll(AlignedRaw[Index],
                             (Shift + DataShift - argmax(AlignedRaw[Index])))

    AlignedRawNonzero[Index] = AlignedRaw[Index].nonzero()[0]
    Nonzero = AlignedRaw[Index].nonzero()[0]
    NonzeroLength = len(Nonzero)
Exemple #11
0
        BestTime = Times[i]
    print "Roll Value: %s" % Rolls[i]
    print MethodList[i]
    print Result.x
    print Result.fun
    print "Time: %s" % Times[i]
    print "!------------------------------!"
print "Best Values: %s" % Vars
print "Best Roll Value: %s" % RollValue
print "LowestFuncValue: %s" % LowestFuncValue
print "Method: %s" % MethodValue
print "Time: %s" % BestTime
# ------------------------------------------------
from DataContainer.StorageArray import ChannelizedArray

PlotArray = ChannelizedArray(len(Time), 2, 'float64')
PlotArray.ChangeColName('Channel_1', 'Data')
PlotArray.ChangeColName('Channel_2', 'Fit')
PlotArray['Data'] = Data
PlotArray['Fit'] = FitNormed(Data,
                             Time,
                             Vars[0],
                             Vars[1],
                             Vars[2],
                             RollValue,
                             Start=0,
                             End=len(Time))

Plot = True

if Plot:
Exemple #12
0
#                     YAxis="Intensity (Counts)"))
#     Show = False
#     if Show:
#         for Plot in Plots:
#                 Plot.show()
#
#     Save = False
#     if Save:
#         for i, Plot in enumerate(Plots):
#             Plot.savefig(str(i)+'.png', dpi=300)

    from DataContainer.StorageArray import ChannelizedArray
    from numpy import arange, exp
    from numpy.random import rand
    Time = arange(0, 4095, 1) * 16.6666 / 4095.0
    Testing = ChannelizedArray(len(Time), 2, 'float64')
    Testing.ChangeColName('Channel_1', 'Data')
    Testing.ChangeColName('Channel_2', 'Fit')
    Testing['Fit'] = exp(-Time / 4.1)
    Testing['Data'] = exp(-Time / 4.1) + (rand(4095) - 0.5) * 0.1 * Time * 0.01

    print "\nPlotting:"
    ForkDisplay(Time,
                Testing,
                Title="Display Testing",
                YAxis="Intensity (Normalized)",
                Residuals=((Testing['Data'], Testing['Fit']), ),
                Corr=True)

    print "\nPlotting:"
    ForkDisplay(Time,
Exemple #13
0
for i in StoredValues:
    print i

print "Final Values:"
for i, name in enumerate(VarNames):
    print "    %s : %s" % (name, Vars[i])

print "!------------------------------!"
print DataSet
print "LowestFuncValue: %s" % LowestFuncValue
print "Method: %s" % MethodValue
print "Time: %s" % BestTime

# ------------------------------------------------
from DataContainer.StorageArray import ChannelizedArray
PlotArray = ChannelizedArray(len(Time), 6, 'float64')
PlotArray.ChangeColName('Channel_1', 'Data')
PlotArray.ChangeColName('Channel_2', 'Data_{Fit}')
PlotArray.ChangeColName('Channel_3', 'Lifetime_1')
PlotArray.ChangeColName('Channel_4', 'Lifetime_2')
PlotArray.ChangeColName('Channel_5', 'Background')
PlotArray.ChangeColName('Channel_6', 'Fit')

CurrentData = FitChannel1 + Vars[2] * FitChannel2
CurrentData = CurrentData / max(CurrentData)
PlotArray['Data_{Fit}'][:len(CurrentData)] = CurrentData[:len(CurrentData)]

Data = Channel1 + Vars[2] * Channel2
Data = Data / max(Data)

PlotArray['Data'] = Data
Exemple #14
0
GIRF = exp(-(LongTime - StartTime)**2.0 / (2.0 * Width**2.0)) / sqrt(2.0 * pi)
GIRF = GIRF / sum(GIRF)

Decay = r_[zeros(len(Time)), Exp(Time, 1.0, 4.1)]
Convolved = RawConvolveFFT(Decay, AlignedIRF, None)
Convolved0 = RawConvolveFFT(Decay, AlignedIRF0, None)
GConvolved = RawConvolveFFT(Decay, GIRF, None)

TimeOffset = 0.0

IRFI = InterpolatedUnivariateSpline(LongTime, AlignedIRF)
IRFIV = IRFI(LongTime - TimeOffset)
IRFIV = IRFIV / sum(IRFIV)
IRFIVConv = RawConvolveFFT(Decay, IRFIV, None)

Plotting = ChannelizedArray(len(LongTime), 1, 'float64')

Plotting.ChangeColName("Channel_1", "Convolved")
Plotting['Convolved'] = roll(Convolved / max(Convolved), 4095)

Plotting = Plotting.AddCol('Gauss')
Plotting['Gauss'] = GIRF / max(GIRF)

Plotting = Plotting.AddCol('GConv')
Plotting['GConv'] = roll(GConvolved / max(GConvolved), 4095)

#Plotting = Plotting.AddCol('Scatter')
#Plotting['Scatter'] = AlignedIRF/max(AlignedIRF)

Plotting = Plotting.AddCol('IRFIV')
Plotting['IRFIV'] = IRFIV / max(IRFIV)