def evaluateSummationITD(fitLabels): # This function is specific to the summation fits def getOptimalSummKeys(z3_c,gamma): z3_0 = 0 kp = {} for pz in self.pzPos: zT = z3_0 if 'z0' in pz else z3_c kp[pz] = (zT,gamma) return kp #------------------- # Zero momentum mTag_0 = tags.momString([0,0,0]) # Temporary variable summBins = {} for ri in self.RI: summBins[ri] = {} for fit in fitLabels: # These are just the summation fit labels! for mom in self.momAvg: mTag = tags.momString(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: for gamma in self.gammaList: dkey = (mTag,z3,gamma) self.bins[fit][dkey] = {} self.mean[fit][dkey] = {} # Need separate for-loop for these because all values of ri # are needed in each ri-iteration further down tOpt = {} dkeyS = {} for ri in self.RI: tOpt[ri] = getSelectedFits(fit,ri,mTag,z3) dkeyS[ri] = getOptimalSummKeys(z3,gamma) # The off-center values are only needed for the real part for pz in self.pzPosOff: mT = mTag_0 if 'p0' in pz else mTag fpT = 'M_tL%d'%(tOpt['Re'][pz]) # Tag of the matrix element at the selected fit time summBins['Re'][pz] = self.summ.bins[fit][fpT]['Re'][mT][dkeyS['Re'][pz]] # Evaluate the ITDs for ri in self.RI: # The 'center' value is needed for both real and imaginary fpTc = 'M_tL%d'%(tOpt[ri]['c']) summBins[ri]['c'] = self.summ.bins[fit][fpTc][ri][mTag][dkeyS[ri]['c']] # Still use the Real part if z3 = 0 and/or mom = 0 self.bins[fit][dkey][ri] = ( (summBins[ri] ['c'] / summBins['Re']['z0']) * (summBins['Re']['p0z0'] / summBins['Re']['p0']) ) self.mean[fit][dkey][ri] = jackknife.mean(self.bins[fit][dkey][ri], Nbins = self.Nbins, Nspl=1) print('%s ITD for momentum %s completed'%(fit,mom))
def getDataHDF5(): print('\nWill read data from HDF5') inputHDF5 = self.dataInfo['Input Data']['HDF5 File'] h5_file = h5py.File(inputHDF5, 'r') for mom in self.moms: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] self.plainData[mTag] = {} for t0 in t0List: t0Tag = tags.t0(t0) for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): opTag = tags.src_snk(opPair) for row in range(1, Nrows + 1): rowTag = tags.row(row) dkey = (t0, iop, row) # Get the plain data dset = 'plain/%s/%s/%s/%s/data' % (mh5Tag, t0Tag, opTag, rowTag) self.plainData[mTag][dkey] = np.array( h5_file[dset]) print('Reading two-point data for momentum %s completed.' % (mTag)) h5_file.close()
def writeHDF5(self): def computeNuITD(fit_,gamma_,ri_): # nNu = 0 # for mom in self.momAvg: # dispListAvg = self.dispAvg[mTag] # nNu += len(dispListAvg) nuArr = [] for mom in self.momAvg: mTag = tags.momString(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dkey_ = (mTag,z3,gamma_) nu = mom[2]*z3*self.unitMom # Ioffe-time, nu = Pz*z3*unitMom nuArr.append([nu,self.mean[fit_][dkey_][ri_][0],self.mean[fit_][dkey_][ri_][1]]) nuArr = np.array(nuArr,dtype=np.float64) return nuArr #--------------------------- h5_file = h5py.File(self.info['HDF5 Output File'],'w') for fType in self.fitTypes.keys(): for fit in self.fitTypes[fType]: # Write momentum and z3 dependence for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dispTag = tags.disp(z3) for gamma in self.gammaList: insTag = tags.insertion(gamma) dkey = (mTag,z3,gamma) for ri in self.RI: group = '%s/%s/%s/%s/%s'%(fit,mh5Tag,dispTag,insTag,ri) dset_name_bins = 'bins/' + group dset_name_mean = 'mean/' + group h5_file.create_dataset(dset_name_bins, data = self.bins[fit][dkey][ri]) h5_file.create_dataset(dset_name_mean, data = self.mean[fit][dkey][ri],dtype='f') # End for momentum # Write the nu-dependence of the ITD for gamma in self.gammaList: for ri in self.RI: nuITD = computeNuITD(fit,gamma,ri) group = '%s/%s/%s'%(fit,insTag,ri) dset_name_nuMean = 'nuDep/' + group h5_file.create_dataset(dset_name_nuMean, data = nuITD) # End for fType h5_file.close() print('Reduced Ioffe-time distributions written in HDF5.')
def getSelectedFits(fit_,ri_,mTag_c,z3_c): mTag_0 = tags.momString([0,0,0]) z3_0 = 0 t = {} for pz in self.pzPos: zT = z3_0 if 'z0' in pz else z3_c mT = mTag_0 if 'p0' in pz else mTag_c t[pz] = self.tSelFit[fit_][ri_][(mT,zT)] return t
def makeConstantFits(fitSeq): fType = fitSeq['Type'] fLabel = fitSeq['Label'] chiCrit = fitSeq['Chi Criterion'] for mom in self.momAvg: mTag = tags.momString(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] dispListAvg = self.dispAvg[mTag] for ri in self.RI: self.Mbins[fLabel][ri][mTag] = {} self.Mmean[fLabel][ri][mTag] = {} self.chiBins[fLabel][ri][mTag] = {} self.chiMean[fLabel][ri][mTag] = {} self.optimalFit[fLabel][ri][mTag] = {} for tsep in tsepList: fAttr = self.fitAttr[mTag][tsep] for z3 in dispListAvg: for gamma in self.gammaList: dkey = (tsep,z3,gamma) self.Mbins[fLabel][ri][mTag][dkey] = {} self.Mmean[fLabel][ri][mTag][dkey] = {} self.chiBins[fLabel][ri][mTag][dkey] = {} self.chiMean[fLabel][ri][mTag][dkey] = {} self.optimalFit[fLabel][ri][mTag][dkey] = -1 # Negative number means no Optimal Fit Found optimalFitFound = False for nf in range(fAttr['Nfits']): tstart = fAttr['nf=%d'%(nf)]['tstart'] tstop = fAttr['nf=%d'%(nf)]['tstop'] self.Mbins[fLabel][ri][mTag][dkey][nf] = np.zeros(self.Nbins,dtype=np.float64) self.chiBins[fLabel][ri][mTag][dkey][nf] = np.zeros(self.Nbins,dtype=np.float64) for b in range(self.Nbins): data = self.ratioBins[ri][mTag][dkey][b,tstart:tstop+1] err = self.ratioMean[ri][mTag][dkey][1][tstart:tstop+1] if fType == 'Constant': self.Mbins[fLabel][ri][mTag][dkey][nf][b] = constFit.fit(data,err) self.chiBins[fLabel][ri][mTag][dkey][nf][b] = constFit.chiSquare(data,err,self.Mbins[fLabel][ri][mTag][dkey][nf][b]) self.Mmean[fLabel][ri][mTag][dkey][nf] = jackknife.mean(self.Mbins[fLabel][ri][mTag][dkey][nf], Nbins = self.Nbins, Nspl=1) self.chiMean[fLabel][ri][mTag][dkey][nf] = jackknife.mean(self.chiBins[fLabel][ri][mTag][dkey][nf], Nbins = self.Nbins, Nspl=1) # Determine optimal plateau fit if (self.chiMean[fLabel][ri][mTag][dkey][nf][0] <= chiCrit) and not optimalFitFound: self.optimalFit[fLabel][ri][mTag][dkey] = nf optimalFitFound = True # End for Nfits print('%s fits, with label %s for momentum %s completed.'%(fType, fLabel, mTag))
def compute(self): def logRatio(c2ptBins): Nt = self.dSetAttr[mTag]['Nt'] binsArr = np.zeros((self.Nbins,Nt),dtype=np.float128) # Need to check element by element to avoid negative log warnings for b in range(self.Nbins): for t in range(Nt): try: binsArr[b,t] = np.log(c2ptBins[b,t] / c2ptBins[b,(t+1)%Nt]) except RuntimeWarning: binsArr[b,t] = None meanArr = jackknife.mean(binsArr, self.Nbins, Nspl=Nt) return binsArr,meanArr #--------------------- for mom in self.moms: mTag = tags.momString(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] # Effective Energy for averaged data self.avgBins[mTag], self.avgMean[mTag] = logRatio(self.c2pt.avgBins[mTag]) self.plainBins[mTag] = {} self.plainMean[mTag] = {} for t0 in t0List: for iop,opPair in enumerate(self.dSetAttr[mTag]['intOpList']): for row in range(1,Nrows+1): dkey = (t0,iop,row) # Effective Energy for plain data self.plainBins[mTag][dkey], self.plainMean[mTag][dkey] = logRatio(self.c2pt.plainBins[mTag][dkey]) for mom in self.momAvg: mTag = tags.momString(mom) self.bins[mTag], self.mean[mTag] = logRatio(self.c2pt.bins[mTag]) print('Effective Energy computed.')
def getOptimalPlatFits(fit_,ri_,mTag_c,dkey_): mTag_0 = tags.momString([0,0,0]) f = {} for pz in self.pzPos: mT = mTag_0 if 'p0' in pz else mTag_c if self.plat.optimalFit[fit_][ri_][mT][dkey_[pz]] != -1: # Valid optimal fit f[pz] = self.plat.optimalFit[fit_][ri_][mT][dkey_[pz]] else: # Get value from provided plateau ranges # These are the same for all momenta and z tS = dkey_[pz][0] # Get the tsep from the key f[pz] = self.platRng[ri_][tS] return f
def computeNuITD(fit_,gamma_,ri_): # nNu = 0 # for mom in self.momAvg: # dispListAvg = self.dispAvg[mTag] # nNu += len(dispListAvg) nuArr = [] for mom in self.momAvg: mTag = tags.momString(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dkey_ = (mTag,z3,gamma_) nu = mom[2]*z3*self.unitMom # Ioffe-time, nu = Pz*z3*unitMom nuArr.append([nu,self.mean[fit_][dkey_][ri_][0],self.mean[fit_][dkey_][ri_][1]]) nuArr = np.array(nuArr,dtype=np.float64) return nuArr
def getDataHDF5(): print('\nWill read data from HDF5') inputHDF5 = self.dataInfo['Input Data']['HDF5 File'] h5_file = h5py.File(inputHDF5, 'r') for mom in self.moms: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) t0List = self.dSetAttr[mTag]['t0'] tsepList = self.dSetAttr[mTag]['tsep'] dispList = self.dSetAttr[mTag]['disp'] Nrows = self.dSetAttr[mTag]['Nrows'] for ri in self.RI: self.plainData[ri][mTag] = {} for z3 in dispList: dispTag = tags.disp(z3) for tsep in tsepList: tsepTag = tags.tsep(tsep) for t0 in t0List: t0Tag = tags.t0(t0) for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): opTag = tags.src_snk(opPair) for row in range(1, Nrows + 1): rowTag = tags.row(row) for gamma in self.gammaList: insTag = tags.insertion(gamma) dkey = (tsep, t0, z3, iop, row, gamma) dset = 'plain/%s/%s/%s/%s/%s/%s/%s/%s/data' % ( mh5Tag, dispTag, tsepTag, t0Tag, opTag, rowTag, insTag, ri) self.plainData[ri][mTag][ dkey] = np.array(h5_file[dset]) print('Reading three-point data for momentum %s completed.' % (mTag))
def dumpHDF5(fitSeq,h5_file): fType = fitSeq['Type'] fLabel = fitSeq['Label'] for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dispTag = tags.disp(z3) for gamma in self.gammaList: insTag = tags.insertion(gamma) for tsep in tsepList: dkey = (tsep,z3,gamma) tsepTag = tags.tsep(tsep) fAttr = self.fitAttr[mTag][tsep] for ri in self.RI: # Write optimalFitValues group = '%s/%s/%s/%s/%s'%(ri,mh5Tag,dispTag,insTag,tsepTag) dset_name = 'OptimalFitRanges/' + group h5_file.create_dataset(dset_name, data = np.array([self.optimalFit[fLabel][ri][mTag][dkey]])) for nf in range(fAttr['Nfits']): tstart = fAttr['nf=%d'%(nf)]['tstart'] tstop = fAttr['nf=%d'%(nf)]['tstop'] h5LabelNf = 'nf%d_%d-%d'%(nf,tstart,tstop) group = '%s/%s/%s/%s/%s/%s'%(ri,mh5Tag,dispTag,insTag,tsepTag,h5LabelNf) dset_name_Mbins = 'MatElem/bins/' + group dset_name_Mmean = 'MatElem/mean/' + group dset_name_chiBins = 'chiSquare/bins/' + group dset_name_chiMean = 'chiSquare/mean/' + group h5_file.create_dataset(dset_name_Mbins, data = self.Mbins[fLabel][ri][mTag][dkey][nf]) h5_file.create_dataset(dset_name_Mmean, data = self.Mmean[fLabel][ri][mTag][dkey][nf],dtype='f') h5_file.create_dataset(dset_name_chiBins, data = self.chiBins[fLabel][ri][mTag][dkey][nf]) h5_file.create_dataset(dset_name_chiMean, data = self.chiMean[fLabel][ri][mTag][dkey][nf],dtype='f') # End for momentum print('Plateau fitting data for type = %s, label = %s written in HDF5.'%(fType,fLabel))
def __init__(self, ratio, ratioType, fitInfo, analysisInfo): self.ratioBins = ratio.bins[ratioType] self.ratioMean = ratio.mean[ratioType] self.fitInfo = fitInfo self.analysisInfo = analysisInfo # Real-Imaginary part self.RI = ['Re','Im'] self.Mbins = {} # The matrix element (constant fit value) self.Mmean = {} # The matrix element (constant fit value) self.chiBins = {} # Chi-square of the fit self.chiMean = {} # Chi-square of the fit self.optimalFit = {} # Structure that holds the optimal plateau fits for fitSeq in self.fitInfo: fType = fitSeq['Type'] fLabel = fitSeq['Label'] if fType != 'Constant': print('PlateauFits: Supports only Constant fits for now!') self.Mbins[fLabel] = {} self.Mmean[fLabel] = {} self.chiBins[fLabel] = {} self.chiMean[fLabel] = {} self.optimalFit[fLabel] = {} for ri in self.RI: self.Mbins[fLabel][ri] = {} self.Mmean[fLabel][ri] = {} self.chiBins[fLabel][ri] = {} self.chiMean[fLabel][ri] = {} self.optimalFit[fLabel][ri] = {} self.momAvg = ratio.momAvg self.dispAvg = ratio.dispAvg self.Nbins = ratio.Nbins self.gammaList = ratio.gammaList self.dSetAttr3pt = ratio.dSetAttr3pt # Define required fit structures self.fitAttr = {} for mom in self.momAvg: mTag = tags.momString(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] self.fitAttr[mTag] = {} for tsep in tsepList: self.fitAttr[mTag][tsep] = {} self.fitAttr[mTag][tsep]['Nfits'] = tsep//2 - 1 # How many fits for each tsep tini,tfin = 1, tsep-1 # Full range, omit the source point, go up to the end self.fitAttr[mTag][tsep]['Rng'] = [] for nf in range(self.fitAttr[mTag][tsep]['Nfits']): self.fitAttr[mTag][tsep]['nf=%d'%(nf)] = {} tstart, tstop = tini+nf,tfin-nf # Range of each fit Npts = tstop-tstart+1 # How many points in each fit self.fitAttr[mTag][tsep]['nf=%d'%(nf)]['xdata'] = np.arange(Npts+1) self.fitAttr[mTag][tsep]['nf=%d'%(nf)]['tstart'] = tstart # Range self.fitAttr[mTag][tsep]['nf=%d'%(nf)]['tstop'] = tstop # of each fit self.fitAttr[mTag][tsep]['nf=%d'%(nf)]['Npts'] = Npts # How many points in each fit self.fitAttr[mTag][tsep]['Rng'].append('%d-%d'%(tstart,tstop)) print('Plateau Fits initialized')
def dumpLinearFitsHDF5(fitSeq, h5_file): fType = fitSeq['Type'] fLabel = fitSeq['Label'] tsepLowList = fitSeq['tsepLow'] for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dispTag = tags.disp(z3) for gamma in self.gammaList: insTag = tags.insertion(gamma) dkeyF = (z3, gamma) for ri in self.RI: for tL in tsepLowList: sLTag = 'tL%d' % (tL) tini = self.tsepFitX[fLabel][mTag][sLTag][0] tfin = self.tsepFitX[fLabel][mTag][sLTag][-1] h5LabelT = 'tsep_%d-%d' % (tini, tfin) # Write Chi^2 group = '%s/%s/%s/%s/%s' % ( ri, mh5Tag, dispTag, insTag, h5LabelT) dset_name_chiBins = 'chiSquare/bins/' + group dset_name_chiMean = 'chiSquare/mean/' + group h5_file.create_dataset( dset_name_chiBins, data=self.chiBins[fLabel][sLTag][ri][mTag] [dkeyF]) h5_file.create_dataset( dset_name_chiMean, data=self.chiMean[fLabel][sLTag][ri][mTag] [dkeyF], dtype='f') # Write fit bands dset_name_fitBands = 'fitBands/' + group h5_file.create_dataset( dset_name_fitBands, data=(self.fitBands[fLabel][sLTag][ri] [mTag][dkeyF]['x'], self.fitBands[fLabel][sLTag][ri] [mTag][dkeyF]['v'], self.fitBands[fLabel][sLTag][ri] [mTag][dkeyF]['e']), dtype='f') # Write Fit parameters for fP, fpH5 in zip(self.fitParams[fType], self.fitParamsH5[fType]): fpTag = fP + '_%s' % (sLTag) dset_name_bins = '%s/bins/' % ( fpH5) + group dset_name_mean = '%s/mean/' % ( fpH5) + group h5_file.create_dataset( dset_name_bins, data=self.bins[fLabel][fpTag][ri][mTag] [dkeyF]) h5_file.create_dataset( dset_name_mean, data=self.mean[fLabel][fpTag][ri][mTag] [dkeyF], dtype='f') # End for momentum print( 'Summation fitting data for type = %s, label = %s written in HDF5.' % (fType, fLabel))
def writeHDF5(self): h5_file = h5py.File(self.dataInfo['HDF5 Output File'],'w') for mom in self.moms: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] # Write the averaged data avg_group = 'data/avg/%s'%(mh5Tag) dset_name_bins = avg_group + '/bins' dset_name_mean = avg_group + '/mean' h5_file.create_dataset(dset_name_bins, data = self.avgBins[mTag]) h5_file.create_dataset(dset_name_mean, data = self.avgMean[mTag],dtype='f') for t0 in t0List: t0Tag = tags.t0(t0) for iop,opPair in enumerate(self.dSetAttr[mTag]['intOpList']): opTag = tags.src_snk(opPair) for row in range(1,Nrows+1): rowTag = tags.row(row) dkey = (t0,iop,row) # Write the plain data plain_group = 'data/plain/%s/%s/%s/%s'%(mh5Tag,t0Tag,opTag,rowTag) dset_name_plainBins = plain_group + '/bins' dset_name_plainMean = plain_group + '/mean' h5_file.create_dataset(dset_name_plainBins, data = self.plainBins[mTag][dkey]) h5_file.create_dataset(dset_name_plainMean, data = self.plainMean[mTag][dkey],dtype='f') #-------------------------------- # Write the momentum-averaged data for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(tags.momVec(mTag)) momAvg_group = 'data/momAvg/%s'%(mh5Tag) dset_name_bins = momAvg_group + '/bins' dset_name_mean = momAvg_group + '/mean' h5_file.create_dataset(dset_name_bins, data = self.bins[mTag]) h5_file.create_dataset(dset_name_mean, data = self.mean[mTag],dtype='f') #-------------------------------- # Write the Fit data for fitSeq in self.fitInfo: fType = fitSeq['Type'] for mTag in fitSeq['Ranges'].keys(): mh5Tag = tags.momH5(tags.momVec(mTag)) fit_group = 'fits/%s/momAvg/%s'%(fType,mh5Tag) dset_name_fitBins = fit_group + '/bins' dset_name_fitMean = fit_group + '/mean' dset_name_chiMean = fit_group + '/chiSquare' h5_file.create_dataset(dset_name_fitBins, data = self.fitBins[fType][mTag]) h5_file.create_dataset(dset_name_fitMean, data = self.fitMean[fType][mTag],dtype='f') h5_file.create_dataset(dset_name_chiMean, data = self.chiMean[fType][mTag],dtype='f') #-------------------------------- h5_file.close() print('Effective Energy data written in HDF5.')
def writeHDF5(self): h5_file = h5py.File(self.dataInfo['HDF5 Output File'], 'w') for mom in self.moms: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] # Write the averaged data avg_group = 'avg/%s' % (mh5Tag) dset_name_data = avg_group + '/data' dset_name_bins = avg_group + '/bins' dset_name_mean = avg_group + '/mean' h5_file.create_dataset(dset_name_data, data=self.avgData[mTag]) h5_file.create_dataset(dset_name_bins, data=self.avgBins[mTag]) h5_file.create_dataset(dset_name_mean, data=self.avgMean[mTag], dtype='f') for t0 in t0List: t0Tag = tags.t0(t0) # Write cov. matrix mean cov_group = 'cov/%s/%s' % (mh5Tag, t0Tag) dset_name_covMean = cov_group + '/mean' h5_file.create_dataset(dset_name_covMean, data=self.covMean[mTag][t0], dtype='f') for iop, opPair in enumerate(self.dSetAttr[mTag]['intOpList']): opTag = tags.src_snk(opPair) for row in range(1, Nrows + 1): rowTag = tags.row(row) dkey = (t0, iop, row) # Write the plain data plain_group = 'plain/%s/%s/%s/%s' % (mh5Tag, t0Tag, opTag, rowTag) dset_name_plainData = plain_group + '/data' dset_name_plainBins = plain_group + '/bins' dset_name_plainMean = plain_group + '/mean' h5_file.create_dataset(dset_name_plainData, data=self.plainData[mTag][dkey]) h5_file.create_dataset(dset_name_plainBins, data=self.plainBins[mTag][dkey]) h5_file.create_dataset(dset_name_plainMean, data=self.plainMean[mTag][dkey], dtype='f') # Write the momentum-averaged data for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) momAvg_group = 'momAvg/%s' % (mh5Tag) dset_name_momData = momAvg_group + '/data' dset_name_momBins = momAvg_group + '/bins' dset_name_momMean = momAvg_group + '/mean' h5_file.create_dataset(dset_name_momData, data=self.data[mTag]) h5_file.create_dataset(dset_name_momBins, data=self.bins[mTag]) h5_file.create_dataset(dset_name_momMean, data=self.mean[mTag], dtype='f') #-------------------------------- h5_file.close() print('Two-point function data written in HDF5.')
def makeLinearFitBands(fitSeq): Npts = fitSeq['Fit Bands']['Npoints'] fType = fitSeq['Type'] fLabel = fitSeq['Label'] tsepLowList = fitSeq['tsepLow'] for mom in self.momAvg: mTag = tags.momString(mom) dispListAvg = self.dispAvg[mTag] for tL in tsepLowList: sLTag = 'tL%d' % (tL) MTag = 'M' + '_%s' % (sLTag) bTag = 'b' + '_%s' % (sLTag) # Determine beginning and ending of bands, and interval xStart = self.tsepFitX[fLabel][mTag][sLTag][0] - 1 xEnd = self.tsepFitX[fLabel][mTag][sLTag][-1] + 1 dx = (xEnd - xStart) / (Npts - 1) for ri in self.RI: self.fitBands[fLabel][sLTag][ri][mTag] = {} for z3 in dispListAvg: for gamma in self.gammaList: dkeyF = (z3, gamma) self.fitBands[fLabel][sLTag][ri][mTag][ dkeyF] = { 'x': np.zeros(Npts, dtype=np.float64), # x 'v': np.zeros(Npts, dtype=np.float64), # value 'e': np.zeros(Npts, dtype=np.float64) } # error for ix in range(Npts): x = xStart + ix * dx # Current point in the band Mmean = self.mean[fLabel][MTag][ri][mTag][ dkeyF][0] # Matrix element (slope) bmean = self.mean[fLabel][bTag][ri][mTag][ dkeyF][0] # Intersection self.fitBands[fLabel][sLTag][ri][mTag][ dkeyF]['x'][ix] = x self.fitBands[fLabel][sLTag][ri][mTag][ dkeyF]['v'][ix] = linearFit.model( x, Mmean, bmean) # Determine error band errBand = np.zeros(self.Nbins, dtype=np.float64) for ib in range(self.Nbins): Mbins = self.bins[fLabel][MTag][ri][ mTag][dkeyF][ib] bbins = self.bins[fLabel][bTag][ri][ mTag][dkeyF][ib] errBand[ib] = linearFit.model( x, Mbins, bbins) self.fitBands[fLabel][sLTag][ri][mTag][ dkeyF]['e'][ix] = jackknife.mean( errBand, self.Nbins, Nspl=1)[1] # End for tsepLow ------ print('%s error bands for momentum %s completed' % (fType, mTag))
def writeHDF5(self): h5_file = h5py.File(self.dataInfo['HDF5 Output File'], 'w') for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] tsepList_rs = self.dSetAttr3pt[mTag]['tsep'][:-1] dispListAvg = self.dispAvg[mTag] Ntsep = len(tsepList) Ntsep_rs = len(tsepList_rs) for z3 in dispListAvg: dispTag = tags.disp(z3) for gamma in self.gammaList: insTag = tags.insertion(gamma) for ri in self.RI: sumRatioH5 = (np.zeros(Ntsep), np.zeros(Ntsep), np.zeros(Ntsep)) for its, tsep in enumerate(tsepList): dkey = (tsep, z3, gamma) tsepTag = tags.tsep(tsep) # Write the plain ratio bins and mean rType = 'plain' group = '%s/%s/%s/%s/%s/%s' % ( rType, mh5Tag, tsepTag, dispTag, insTag, ri) dset_name_bins = 'bins/' + group dset_name_mean = 'mean/' + group h5_file.create_dataset( dset_name_bins, data=self.bins[rType][ri][mTag][dkey]) h5_file.create_dataset( dset_name_mean, data=self.mean[rType][ri][mTag][dkey], dtype='f') #--------------------------------------------------------------- # Write the summed ratio bins rType = 'sum' group = '%s/%s/%s/%s/%s/%s' % ( rType, mh5Tag, tsepTag, dispTag, insTag, ri) dset_name_bins = 'bins/' + group h5_file.create_dataset( dset_name_bins, data=self.bins[rType][ri][mTag][dkey]) # Convert the summed ratio mean into arrays that depend on tsep sumRatioH5[0][its] = tsep # tsep (x) sumRatioH5[1][its] = self.mean[rType][ri][mTag][ dkey][0] # ratio mean (y) sumRatioH5[2][its] = self.mean[rType][ri][mTag][ dkey][1] # ratio error (y-error) # End for tsep # Write the summed ratio means rType = 'sum' group = '%s/%s/%s/%s/%s' % (rType, mh5Tag, dispTag, insTag, ri) dset_name_mean = 'mean/' + group h5_file.create_dataset(dset_name_mean, data=sumRatioH5, dtype='f') #----------------------------- # Reduced-summed ratio rSumRatioH5 = (np.zeros(Ntsep_rs), np.zeros(Ntsep_rs), np.zeros(Ntsep_rs)) for its, tsep in enumerate(tsepList_rs): dkey = (tsep, z3, gamma) tsepTag = tags.tsep(tsep) # Write the summed ratio bins rType = 'r-sum' group = '%s/%s/%s/%s/%s/%s' % ( rType, mh5Tag, tsepTag, dispTag, insTag, ri) dset_name_bins = 'bins/' + group h5_file.create_dataset( dset_name_bins, data=self.bins[rType][ri][mTag][dkey]) # Convert the reduced-summed ratio mean into arrays that depend on tsep rSumRatioH5[0][its] = tsep # tsep (x) rSumRatioH5[1][its] = self.mean[rType][ri][mTag][ dkey][0] # ratio mean (y) rSumRatioH5[2][its] = self.mean[rType][ri][mTag][ dkey][1] # ratio error (y-error) # End for tsep # Write the reduced-summed ratio means rType = 'r-sum' group = '%s/%s/%s/%s/%s' % (rType, mh5Tag, dispTag, insTag, ri) dset_name_mean = 'mean/' + group h5_file.create_dataset(dset_name_mean, data=rSumRatioH5, dtype='f') #----------------------------- # End for momentum h5_file.close() print('Ratio data written in HDF5.')
def doStatistics(self): if not self.dataLoaded: raise ValueError( 'Data must be loaded first, before doing Statistical Sampling') for mom in self.moms: mTag = tags.momString(mom) t0List = self.dSetAttr[mTag]['t0'] tsepList = self.dSetAttr[mTag]['tsep'] dispList = self.dSetAttr[mTag]['disp'] Nrows = self.dSetAttr[mTag]['Nrows'] Ncfg = self.dSetAttr[mTag]['Ncfg'] Nt0 = len(t0List) Nop = self.dSetAttr[mTag]['Nop'] # Determine the Jackknife sampling number of Bins self.Nbins = jackknife.Nbins(Ncfg, self.binsize) Navg = Nrows * Nt0 * Nop # The plain data Bins and Mean for ri in self.RI: self.plainBins[ri][mTag] = {} self.plainMean[ri][mTag] = {} self.avgData[ri][mTag] = {} self.avgBins[ri][mTag] = {} self.avgMean[ri][mTag] = {} for tsep in tsepList: Nt = tsep for z3 in dispList: for gamma in self.gammaList: dkeyAvg = (tsep, z3, gamma) for ri in self.RI: self.avgData[ri][mTag][dkeyAvg] = np.zeros( (Ncfg, Nt), dtype=np.float128) # We are averaging for the following attributes for t0 in t0List: for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): for row in range(1, Nrows + 1): dkey = (tsep, t0, z3, iop, row, gamma) for ri in self.RI: # Jackknife sampling on the Plain data self.plainBins[ri][mTag][ dkey] = np.zeros((self.Nbins, Nt), dtype=np.float128) for t in range(Nt): self.plainBins[ri][mTag][ dkey][:, t] = jackknife.sampling( self.plainData[ri] [mTag][dkey][:, t], self.Nbins, self.binsize) self.plainMean[ri][mTag][ dkey] = jackknife.mean( self.plainBins[ri][mTag][dkey], self.Nbins, Nspl=Nt) # Average over Source-Sink operators, t0's and rows self.avgData[ri][mTag][ dkeyAvg] += self.plainData[ri][ mTag][dkey] # Average over Source-Sink operators, t0's and rows for ri in self.RI: self.avgData[ri][mTag][dkeyAvg] = self.avgData[ri][ mTag][dkeyAvg] / Navg # Jackknife sampling over the averaged data, for each momentum, tsep, z3 and gamma self.avgBins[ri][mTag][dkeyAvg] = np.zeros( (self.Nbins, Nt), dtype=np.float128) for t in range(Nt): self.avgBins[ri][mTag][ dkeyAvg][:, t] = jackknife.sampling( self.avgData[ri][mTag][dkeyAvg][:, t], self.Nbins, self.binsize) self.avgMean[ri][mTag][dkeyAvg] = jackknife.mean( self.avgBins[ri][mTag][dkeyAvg], self.Nbins, Nspl=Nt) print('Jackknife analysis for momentum %s completed' % (mTag)) # End for momentum # Perform average over momenta and z3 values for mom in self.momAvg: mTag = tags.momString(mom) tsepList = self.dSetAttr[mTag]['tsep'] dispList = self.dSetAttr[mTag]['disp'] dispListAvg = self.dispAvg[mTag] Ncfg = self.dSetAttr[mTag]['Ncfg'] for ri in self.RI: self.data[ri][mTag] = {} self.bins[ri][mTag] = {} self.mean[ri][mTag] = {} for tsep in tsepList: Nt = tsep for gamma in self.gammaList: for z3 in dispListAvg: # Run over the z3>=0 dkey = (tsep, z3, gamma) for ri in self.RI: self.data[ri][mTag][dkey] = np.zeros( (Ncfg, Nt), dtype=np.float128) if mom == [0, 0, 0]: if z3 == 0 or not ( z3 in dispList and -z3 in dispList ): # Pz=0, z3=0, OR NOT both z3 and -z3 exist dkeyAvg = (tsep, -z3, gamma) if -z3 in dispList else ( tsep, z3, gamma) for ri in self.RI: self.data[ri][mTag][dkey] = self.avgData[ ri][mTag][dkeyAvg] else: # Pz=0, z3!=0 dkeyAvgPosZ = (tsep, z3, gamma) dkeyAvgNegZ = (tsep, -z3, gamma) self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTag][dkeyAvgPosZ] + self.avgData['Re'][mTag][dkeyAvgNegZ]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTag][dkeyAvgPosZ] + self.avgData['Im'][mTag][dkeyAvgNegZ]) else: momNeg = [mom[0], mom[1], -mom[2]] if mom in self.moms and momNeg in self.moms: # Negative momentum exists in global momentum list mTagPos = mTag mTagNeg = tags.momString(momNeg) if z3 == 0: # Pz!=0, z3=0 self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTagPos][dkey] + self.avgData['Re'][mTagNeg][dkey]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTagPos][dkey] + self.avgData['Im'][mTagNeg][dkey]) else: # Pz!=0, z3!=0 if z3 in dispList and -z3 in dispList: dkeyAvgPosZ = (tsep, z3, gamma) dkeyAvgNegZ = (tsep, -z3, gamma) self.data['Re'][mTag][dkey] = 0.25 * ( self.avgData['Re'][mTagPos] [dkeyAvgPosZ] + self.avgData['Re'] [mTagPos][dkeyAvgNegZ] + self.avgData['Re'][mTagNeg] [dkeyAvgPosZ] + self.avgData['Re'] [mTagNeg][dkeyAvgNegZ]) self.data['Im'][mTag][dkey] = 0.25 * ( self.avgData['Im'][mTagPos] [dkeyAvgPosZ] - self.avgData['Im'] [mTagPos][dkeyAvgNegZ] - self.avgData['Im'][mTagNeg] [dkeyAvgPosZ] + self.avgData['Im'] [mTagNeg][dkeyAvgNegZ]) elif z3 in dispList and -z3 not in dispList: dkeyAvg = (tsep, z3, gamma) self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTagPos] [dkeyAvg] + self.avgData['Re'] [mTagNeg][dkeyAvg]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTagPos] [dkeyAvg] - self.avgData['Im'] [mTagNeg][dkeyAvg]) elif -z3 in dispList and z3 not in dispList: dkeyAvg = (tsep, -z3, gamma) self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTagPos] [dkeyAvg] + self.avgData['Re'] [mTagNeg][dkeyAvg]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTagNeg] [dkeyAvg] - self.avgData['Im'] [mTagPos][dkeyAvg]) else: raise ValueError( '\n Error: Inconsistency with z3 values!!!' ) elif mom in self.moms and momNeg not in self.moms: mTagPos = mTag if z3 == 0 or not (z3 in dispList and -z3 in dispList): # Pz!=0, z3=0 dkeyAvg = (tsep, -z3, gamma) if -z3 in dispList else ( tsep, z3, gamma) for ri in self.RI: self.data[ri][mTag][ dkey] = self.avgData[ri][mTagPos][ dkeyAvg] else: # Pz!=0, z3!=0 dkeyAvgPosZ = (tsep, z3, gamma) dkeyAvgNegZ = (tsep, -z3, gamma) self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTagPos] [dkeyAvgPosZ] + self.avgData['Re'] [mTagPos][dkeyAvgNegZ]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTagPos] [dkeyAvgPosZ] - self.avgData['Im'] [mTagPos][dkeyAvgNegZ]) elif momNeg in self.moms and mom not in self.moms: mTagNeg = tags.momString(momNeg) if z3 == 0 or not (z3 in dispList and -z3 in dispList): # Pz!=0, z3=0 dkeyAvg = (tsep, -z3, gamma) if -z3 in dispList else ( tsep, z3, gamma) for ri in self.RI: self.data[ri][mTag][ dkey] = self.avgData[ri][mTagNeg][ dkeyAvg] else: # Pz!=0, z3!=0 dkeyAvgPosZ = (tsep, z3, gamma) dkeyAvgNegZ = (tsep, -z3, gamma) self.data['Re'][mTag][dkey] = 0.5 * ( self.avgData['Re'][mTagNeg] [dkeyAvgPosZ] + self.avgData['Re'] [mTagNeg][dkeyAvgNegZ]) self.data['Im'][mTag][dkey] = 0.5 * ( self.avgData['Im'][mTagNeg] [dkeyAvgNegZ] - self.avgData['Im'] [mTagNeg][dkeyAvgPosZ]) else: raise ValueError( '\n Error: Inconsistency with momenta values!!!' ) # End if mom != 0 # Jackknife sampling over the fully averaged data, for each momentum, tsep, z3 and gamma for ri in self.RI: self.bins[ri][mTag][dkey] = np.zeros( (self.Nbins, Nt), dtype=np.float128) for t in range(Nt): self.bins[ri][mTag][ dkey][:, t] = jackknife.sampling( self.data[ri][mTag][dkey][:, t], self.Nbins, self.binsize) self.mean[ri][mTag][dkey] = jackknife.mean( self.bins[ri][mTag][dkey], self.Nbins, Nspl=Nt) print('Averaging over z3 and momenta for momentum %s completed.' % (mTag))
def evaluatePlateauITD(fitLabels): # This function is specific to the plateau fits def getOptimalPlatKeys(tOpt,z3_c,gamma): z3_0 = 0 kp = {} for pz in self.pzPos: zT = z3_0 if 'z0' in pz else z3_c kp[pz] = (tOpt[pz],zT,gamma) return kp #------------------- # This function is specific to the plateau fits def getOptimalPlatFits(fit_,ri_,mTag_c,dkey_): mTag_0 = tags.momString([0,0,0]) f = {} for pz in self.pzPos: mT = mTag_0 if 'p0' in pz else mTag_c if self.plat.optimalFit[fit_][ri_][mT][dkey_[pz]] != -1: # Valid optimal fit f[pz] = self.plat.optimalFit[fit_][ri_][mT][dkey_[pz]] else: # Get value from provided plateau ranges # These are the same for all momenta and z tS = dkey_[pz][0] # Get the tsep from the key f[pz] = self.platRng[ri_][tS] return f #------------------- # Zero momentum mTag_0 = tags.momString([0,0,0]) # Temporary variable platBins = {} for ri in self.RI: platBins[ri] = {} for fit in fitLabels: # These are just the plateau fit labels! for mom in self.momAvg: mTag = tags.momString(mom) dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: for gamma in self.gammaList: dkey = (mTag,z3,gamma) self.bins[fit][dkey] = {} self.mean[fit][dkey] = {} # Need separate for-loop for these because all values of ri # are needed in each ri-iteration further down tOpt = {} dkeyP = {} optFit = {} for ri in self.RI: tOpt[ri] = getSelectedFits(fit,ri,mTag,z3) dkeyP[ri] = getOptimalPlatKeys(tOpt[ri],z3,gamma) optFit[ri] = getOptimalPlatFits(fit,ri,mTag,dkeyP[ri]) # The off-center values are only needed for the real part for pz in self.pzPosOff: mT = mTag_0 if 'p0' in pz else mTag platBins['Re'][pz] = self.plat.Mbins[fit]['Re'][mT][dkeyP['Re'][pz]][optFit['Re'][pz]] # Evaluate the ITDs for ri in self.RI: # The 'center' value is needed for both real and imaginary platBins[ri]['c'] = self.plat.Mbins[fit][ri][mTag][dkeyP[ri]['c']][optFit[ri]['c']] # Still use the Real part if z3 = 0 and/or mom = 0 self.bins[fit][dkey][ri] = ( (platBins[ri] ['c'] / platBins['Re']['z0']) * (platBins['Re']['p0z0'] / platBins['Re']['p0']) ) self.mean[fit][dkey][ri] = jackknife.mean(self.bins[fit][dkey][ri], Nbins = self.Nbins, Nspl=1) print('%s ITD for momentum %s completed'%(fit,mom))
def __init__(self, plat = None, summ = None, ITDinfo = None, fitInfo = None, ensembleInfo = None): if plat == None and summ == None: raise ValueError('All of the supported fit types are "None". Cannot define ITDs!') self.fitInfo = fitInfo self.info = ITDinfo self.ensInfo = ensembleInfo # Unit momentum = 2*pi/L # Will be needed when writing the ITD as a function of Ioffe time nu = Pz*z3*unitMom self.unitMom = 2.0*np.pi/self.ensInfo['L'] # Real-Imaginary part self.RI = ['Re','Im'] # Momentum-displacement position list # 'c' : p = mom , disp = z3 (Center point) # 'z0' : p = mom , disp = 0 # 'p0' : p = (0,0,0), disp = z3 # 'p0z0': p = (0,0,0), disp = 0 self.pzPos = ['c','z0','p0','p0z0'] self.pzPosOff = ['z0','p0','p0z0'] # No center point # Types of fits that we are considering for the ITDs self.fitLabels = self.info['Optimal Fits'].keys() self.fitTypes = {'Plateau': [], 'Summation':[]} # Make sure that the input labels are included in the fits performed earlier self.plat = plat if self.plat != None: for fitSeq in self.plat.fitInfo: fLabel = fitSeq['Label'] if fLabel not in self.fitLabels: raise ValueError('Fit Label %s not in Input Fit Labels'%(fLabel)) self.fitTypes['Plateau'].append(fLabel) self.summ = summ if self.summ != None: for fitSeq in self.summ.fitInfo: fLabel = fitSeq['Label'] if fLabel not in self.fitLabels: raise ValueError('Fit Label %s not in Input Fit Labels'%(fLabel)) self.fitTypes['Summation'].append(fLabel) #-------------------------------------- if self.plat != None: # If self.summ also != None then that's still OK, because these attributes are the same # by definition in plat and summ self.momAvg = self.plat.momAvg self.dispAvg = self.plat.dispAvg self.Nbins = self.plat.Nbins self.gammaList = self.plat.gammaList self.dSetAttr3pt = self.plat.dSetAttr3pt else: # Get these attributes from the summ fits instead, it MUST be defined otherwise ValueError is raised self.momAvg = self.summ.momAvg self.dispAvg = self.summ.dispAvg self.Nbins = self.summ.Nbins self.gammaList = self.summ.gammaList self.dSetAttr3pt = self.summ.dSetAttr3pt # The ITD bins and mean self.bins = {} self.mean = {} # Read-in the selected fits that will be used in ITD evaluation self.tSelFit = {} for fit in self.fitLabels: self.bins[fit] = {} self.mean[fit] = {} self.tSelFit[fit] = {} for ri in self.RI: self.tSelFit[fit][ri] = {} for tOpt,momDisp in self.info['Optimal Fits'][fit][ri].items(): for md in momDisp: for mom in md[0]: mTag = tags.momString(mom) for z3 in md[1]: self.tSelFit[fit][ri][(mTag,z3)] = int(tOpt) #-------------------------- # Determine if there are plateau ranges provided # Only Real-Imaginary and tsep dependence for now self.platRng = None if 'Plateau Ranges' in self.info.keys(): self.platRng = {} for ri in self.RI: self.platRng[ri] = {} for t,r in self.info['Plateau Ranges'][ri].items(): self.platRng[ri][int(t)] = r print('ITD initialized')
def getDataASCII(): print('\nWill read data from ASCII files') mainDir = self.dataInfo['Input Data']['Main Directory'] for mom in self.moms: mTag = tags.momString(mom) mFTag = tags.momFile(mom) t0List = self.dSetAttr[mTag]['t0'] tsepList = self.dSetAttr[mTag]['tsep'] dispList = self.dSetAttr[mTag]['disp'] Nrows = self.dSetAttr[mTag]['Nrows'] Ncfg = self.dSetAttr[mTag]['Ncfg'] phaseInfo = self.dSetAttr[mTag]['Phase Info'] # Determine phase tag based on momentum sign if list(phaseInfo.keys())[0] == 'unphased': phFile = phaseInfo['unphased'] phDir = 'unphased' elif list(phaseInfo.keys())[0] == 'phased': phFile = phaseInfo['phased']['Plus'] if mom[ 2] >= 0 else phaseInfo['phased']['Minus'] phDir = 'phased/' + phFile else: raise ValueError( 'Supported Phase Tag keys are ["unphased","phased"]') for ri in self.RI: self.plainData[ri][mTag] = {} for tsep in tsepList: Nt = tsep tsepTag = tags.tsep(tsep) for t0 in t0List: t0Tag = tags.t0(t0) fileDir = ioForm.getThreePointDirASCII( mainDir, phDir, t0Tag, tsepTag, mFTag) print( 'Reading three-point data for momentum %s, tsep = %d, t0 = %d' % (mTag, tsep, t0)) for z3 in dispList: dispTag = tags.disp(z3) for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): srcOp, snkOp = opPair for row in range(1, Nrows + 1): for gamma in self.gammaList: dkey = (tsep, t0, z3, iop, row, gamma) # Determine gamma matrix name and row insOp, insRow = gmat.insertionMap( gamma) fileName = ioForm.getThreePointFileNameASCII( phFile, t0Tag, tsepTag, srcOp, snkOp, row, insOp, insRow, mFTag, dispTag, self.Nvec) fileRead = '%s/%s' % (fileDir, fileName) rawData = np.zeros((Ncfg, Nt), dtype=np.complex128) with open(fileRead) as fp: line = fp.readlines() c = 0 for n in line: it = c % Nt icfg = c // Nt rawData[icfg, it] = complex( np.float128(n.split()[1]), np.float128(n.split()[2])) c += 1 # Done reading file self.plainData['Re'][mTag][ dkey] = rawData.real self.plainData['Im'][mTag][ dkey] = rawData.imag print('Reading three-point data for momentum %s completed.\n' % (mTag))
def getDataASCII(): print('\nWill read data from ASCII files') mainDir = self.dataInfo['Input Data']['Main Directory'] for mom in self.moms: mTag = tags.momString(mom) mFTag = tags.momFile(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] phaseInfo = self.dSetAttr[mTag]['Phase Info'] # Determine phase tag based on momentum sign if list(phaseInfo.keys())[0] == 'unphased': phFile = phaseInfo['unphased'] phDir = 'unphased' elif list(phaseInfo.keys())[0] == 'phased': phFile = phaseInfo['phased']['Plus'] if mom[ 2] >= 0 else phaseInfo['phased']['Minus'] phDir = 'phased/' + phFile else: raise ValueError( 'Supported Phase Tag keys are ["unphased","phased"]') # These are the dimensions of each dataset Ncfg = self.dSetAttr[mTag]['Ncfg'] Nt = self.dSetAttr[mTag]['Nt'] self.plainData[mTag] = {} for t0 in t0List: t0Tag = tags.t0(t0) fileDir = ioForm.getTwoPointDirASCII( mainDir, phDir, t0Tag, mFTag) print('Reading two-point data for momentum %s, t0 = %d' % (mTag, t0)) for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): srcOp, snkOp = opPair for row in range(1, Nrows + 1): dkey = (t0, iop, row) fileName = ioForm.getTwoPointFileNameASCII( phFile, t0Tag, srcOp, snkOp, row, mFTag, self.Nvec) fullFileName = '%s/%s' % (fileDir, fileName) self.plainData[mTag][dkey] = np.zeros( (Ncfg, Nt), dtype=np.complex128) with open(fullFileName) as fp: line = fp.readlines() c = 0 for n in line: it = c % Nt icfg = c // Nt self.plainData[mTag][dkey][ icfg, it] = complex( np.float128(n.split()[1]), np.float128(n.split()[2])) c += 1 print('Reading two-point data for momentum %s completed.' % (mTag))
def __init__(self, dataInfo, analysisInfo): self.dataInfo = dataInfo self.analysisInfo = analysisInfo # The three-point function also has a significant real and imaginary part self.RI = ['Re', 'Im'] # Data containers # The data that is read/loaded # "plain" means not averaged over t0,src-snk operators, rows, or momentum, i.e. there's depedence on these attributes self.plainData = {} self.plainBins = {} self.plainMean = {} # The averaged data over t0, src-snk operators and rows self.avgData = {} self.avgBins = {} self.avgMean = {} # The momentum- and z3-averaged data, that will be used throughout the analysis self.data = {} self.bins = {} self.mean = {} for ri in self.RI: self.plainData[ri] = {} self.plainBins[ri] = {} self.plainMean[ri] = {} self.avgData[ri] = {} self.avgBins[ri] = {} self.avgMean[ri] = {} self.data[ri] = {} self.bins[ri] = {} self.mean[ri] = {} # The number of Jackknife bins, and the binsize (same for plain and averaged data) self.Nbins = 0 self.binsize = self.analysisInfo['Binsize'] self.dataLoaded = False self.supportedDataSources = ['ASCII', 'HDF5'] # Determine data source, make some checks self.dataSource = self.dataInfo['Input Data']['Source'] if self.dataSource not in self.supportedDataSources: raise ValueError('\nUnsupported "Data Source" = %s ' % (self.dataSource)) if self.dataSource == 'ASCII' and 'Main Directory' not in self.dataInfo[ 'Input Data'].keys(): raise ValueError( '\n"Main Directory" of data must be provided in "Input Data" when data source is "ASCII"' ) if self.dataSource == 'HDF5' and 'HDF5 File' not in self.dataInfo[ 'Input Data'].keys(): raise ValueError( '\n"HDF5 File" must be provided in "Input Data" when data source is "HDF5"' ) # Fill in Attributes self.Nvec = self.analysisInfo['Nvec'] # The list of insertion operators we are considering self.gammaList = self.dataInfo['Insertion Operators'] self.moms = [] self.dispAvg = {} self.dSetAttr = {} self.dSetList = self.dataInfo['Datasets'] for dSet in self.dSetList: momList = dSet['Mom List'] if dSet['Compute X-rows']: raise ValueError( 'Does not support doing cross-rows in two-point function for now!' ) for momVec in momList: if momVec[0] != 0 and momVec[1] != 0: raise ValueError( '\n Currently support non-zero momentum only in the z-direction!' ) mTag = tags.momString( momVec) # Dataset Attributes are listed for each momentum self.dSetAttr[mTag] = {} self.moms.append(momVec) for attr in [ 't0', 'Ncfg', 'tsep', 'disp', 'Nrows', 'Compute X-rows', 'Phase Info' ]: self.dSetAttr[mTag][attr] = dSet[attr] # Determine the values of z3 that we will average over self.dispAvg[mTag] = list( dict.fromkeys(np.abs(self.dSetAttr[mTag]['disp']))) self.dispAvg[mTag].sort() # Read source-sink operators intOpFile = dSet['Interpolating Operators File'] self.dSetAttr[mTag]['intOpList'] = [] with open(intOpFile) as fp: ops = fp.readlines() for op in ops: self.dSetAttr[mTag]['intOpList'].append( (op.split()[0], op.split()[1])) self.dSetAttr[mTag]['Nop'] = len(ops) # Get the momenta that will be averaged over self.momAvg = [[ 0, 0, zm ] for zm in list(dict.fromkeys(np.abs([z for x, y, z in self.moms])))] self.momAvg.sort() # Make sure to include entries for the averaged momentum in the dataset attributes for mom in self.momAvg: mTag = tags.momString(mom) if mTag not in self.dSetAttr.keys(): mTagD = tags.momString([mom[0], mom[1], -mom[2]]) self.dSetAttr[mTag] = self.dSetAttr[mTagD] print( 'Momentum %s not in original dataset attributes. Adding from momentum %s' % (mTag, mTagD))
def doStatistics(self): if not self.dataLoaded: raise ValueError( 'Data must be loaded first, before doing Statistical Sampling') for mom in self.moms: mTag = tags.momString(mom) t0List = self.dSetAttr[mTag]['t0'] Nrows = self.dSetAttr[mTag]['Nrows'] Nt0 = len(t0List) Nop = self.dSetAttr[mTag]['Nop'] Ncfg = self.dSetAttr[mTag]['Ncfg'] Nt = self.dSetAttr[mTag]['Nt'] Navg = Nrows * Nt0 * Nop # Determine the Jackknife sampling number of Bins self.Nbins = jackknife.Nbins(Ncfg, self.binsize) # The plain data Bins and Mean self.plainBins[mTag] = {} self.plainMean[mTag] = {} # That's the averaged data self.avgData[mTag] = np.zeros((Ncfg, Nt), dtype=np.complex128) # The mean required for the covariant matrix self.covMean[mTag] = {} for t0 in t0List: covSum = np.zeros((Ncfg, Nt), dtype=np.float128) for iop, opPair in enumerate(self.dSetAttr[mTag]['intOpList']): for row in range(1, Nrows + 1): dkey = (t0, iop, row) # Jackknife sampling on the Plain data self.plainBins[mTag][dkey] = np.zeros( (self.Nbins, Nt), dtype=np.float128) for t in range(Nt): self.plainBins[mTag][dkey][:, t] = jackknife.sampling( self.plainData[mTag] [dkey][:, t].real, self.Nbins, self.binsize) self.plainMean[mTag][dkey] = jackknife.mean( self.plainBins[mTag][dkey], self.Nbins, Nspl=Nt) # Sum over Source-Sink operators, t0's and rows self.avgData[mTag] += self.plainData[mTag][dkey] # Sum over Source-Sink operators and rows covSum += self.plainData[mTag][dkey].real # Standard Mean and Error over source-sink operators and rows, for each t0 (for covariant matrix) covAvg = covSum / (Nop * Nrows) # Still a (Ncfg * Nt) array self.covMean[mTag][t0] = (np.mean( covAvg, axis=0), np.std(covAvg, axis=0) / np.sqrt(Ncfg)) # Sum over Source-Sink operators, t0's and rows self.avgData[mTag] = self.avgData[mTag] / Navg # Jackknife sampling over the averaged data, for each momentum self.avgBins[mTag] = np.zeros((self.Nbins, Nt), dtype=np.float128) for t in range(Nt): self.avgBins[mTag][:, t] = jackknife.sampling( self.avgData[mTag][:, t].real, self.Nbins, self.binsize) self.avgMean[mTag] = jackknife.mean(self.avgBins[mTag], self.Nbins, Nspl=Nt) # End for momentum ------------- # Perform averaging over momentum for mom in self.momAvg: momNeg = [mom[0], mom[1], -mom[2]] mTag = tags.momString(mom) mTagPos = mTag mTagNeg = tags.momString(momNeg) Ncfg = self.dSetAttr[mTag]['Ncfg'] Nt = self.dSetAttr[mTag]['Nt'] self.data[mTag] = np.zeros((Ncfg, Nt), dtype=np.complex128) self.bins[mTag] = np.zeros((self.Nbins, Nt), dtype=np.float128) if mom in self.moms and momNeg in self.moms: self.data[mTag] = 0.5 * (self.avgData[mTagPos] + self.avgData[mTagNeg]) self.bins[mTag] = 0.5 * (self.avgBins[mTagPos] + self.avgBins[mTagNeg]) elif mom in self.moms and momNeg not in self.moms: self.data[mTag] = self.avgData[mTagPos] self.bins[mTag] = self.avgBins[mTagPos] elif mom not in self.moms and momNeg in self.moms: self.data[mTag] = self.avgData[mTagNeg] self.bins[mTag] = self.avgBins[mTagNeg] self.mean[mTag] = jackknife.mean(self.bins[mTag], self.Nbins, Nspl=Nt) print('Statistical evaluation completed')
def writeHDF5(self): h5_file = h5py.File(self.dataInfo['HDF5 Output File'], 'w') # Write the Pz- and z3-averaged data for mom in self.momAvg: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) tsepList = self.dSetAttr[mTag]['tsep'] dispListAvg = self.dispAvg[mTag] for z3 in dispListAvg: dispTag = tags.disp(z3) for tsep in tsepList: tsepTag = tags.tsep(tsep) for gamma in self.gammaList: insTag = tags.insertion(gamma) dkeyAvg = (tsep, z3, gamma) # Write the averaged data for ri in self.RI: avg_group = 'fullavg/%s/%s/%s/%s/%s' % ( mh5Tag, dispTag, tsepTag, insTag, ri) dset_name_data = avg_group + '/data' dset_name_bins = avg_group + '/bins' dset_name_mean = avg_group + '/mean' h5_file.create_dataset( dset_name_data, data=self.data[ri][mTag][dkeyAvg]) h5_file.create_dataset( dset_name_bins, data=self.bins[ri][mTag][dkeyAvg]) h5_file.create_dataset( dset_name_mean, data=self.mean[ri][mTag][dkeyAvg], dtype='f') #-------------------------------------- for mom in self.moms: mTag = tags.momString(mom) mh5Tag = tags.momH5(mom) t0List = self.dSetAttr[mTag]['t0'] tsepList = self.dSetAttr[mTag]['tsep'] dispList = self.dSetAttr[mTag]['disp'] Nrows = self.dSetAttr[mTag]['Nrows'] for z3 in dispList: dispTag = tags.disp(z3) for tsep in tsepList: tsepTag = tags.tsep(tsep) for gamma in self.gammaList: insTag = tags.insertion(gamma) dkeyAvg = (tsep, z3, gamma) # Write Avg data for ri in self.RI: avg_group = 'avg/%s/%s/%s/%s/%s' % ( mh5Tag, dispTag, tsepTag, insTag, ri) dset_name_avgData = avg_group + '/data' dset_name_avgBins = avg_group + '/bins' dset_name_avgMean = avg_group + '/mean' h5_file.create_dataset( dset_name_avgData, data=self.avgData[ri][mTag][dkeyAvg]) h5_file.create_dataset( dset_name_avgBins, data=self.avgBins[ri][mTag][dkeyAvg]) h5_file.create_dataset( dset_name_avgMean, data=self.avgMean[ri][mTag][dkeyAvg], dtype='f') for t0 in t0List: t0Tag = tags.t0(t0) for iop, opPair in enumerate( self.dSetAttr[mTag]['intOpList']): opTag = tags.src_snk(opPair) for row in range(1, Nrows + 1): rowTag = tags.row(row) dkey = (tsep, t0, z3, iop, row, gamma) # Write the plain data for ri in self.RI: plain_group = 'plain/%s/%s/%s/%s/%s/%s/%s/%s' % ( mh5Tag, dispTag, tsepTag, insTag, t0Tag, opTag, rowTag, ri) dset_name_plainData = plain_group + '/data' dset_name_plainBins = plain_group + '/bins' dset_name_plainMean = plain_group + '/mean' h5_file.create_dataset( dset_name_plainData, data=self.plainData[ri][mTag] [dkey]) h5_file.create_dataset( dset_name_plainBins, data=self.plainBins[ri][mTag] [dkey]) h5_file.create_dataset( dset_name_plainMean, data=self.plainMean[ri][mTag] [dkey], dtype='f') #-------------------------------------- h5_file.close() print('Three-point function data written in HDF5.')
def __init__(self, dataInfo, analysisInfo): self.dataInfo = dataInfo self.analysisInfo = analysisInfo # Data containers # "plain" means not averaged over t0,src-snk operators, rows, or momentum, i.e. there's depedence on these attributes self.plainData = {} # The data that is read/loaded self.plainBins = {} # The Jackknife sampling bins of the plain data self.plainMean = {} # The Jackknife mean of the plain data self.avgData = {} # The averaged data self.avgBins = {} # The Jackknife sampling bins of the averaged data self.avgMean = {} # The Jackknife mean of the averaged data self.data = {} # The momentum-averaged data self.bins = { } # The Jackknife sampling bins of the momentum-averaged data self.mean = {} # The Jackknife mean of the momentum-averaged data self.Nbins = 0 # The number of Jackknife bins (same for plain and averaged data) self.covMean = { } # Average over all attributes but t0, needed for the Covariant Matrix self.dataLoaded = False self.supportedDataSources = ['ASCII', 'HDF5'] # Determine data source, make some checks self.dataSource = self.dataInfo['Input Data']['Source'] if self.dataSource not in self.supportedDataSources: raise ValueError('\nUnsupported "Data Source" = %s ' % (self.dataSource)) if self.dataSource == 'ASCII' and 'Main Directory' not in self.dataInfo[ 'Input Data'].keys(): raise ValueError( '\n"Main Directory" of data must be provided in "Input Data" when data source is "ASCII"' ) if self.dataSource == 'HDF5' and 'HDF5 File' not in self.dataInfo[ 'Input Data'].keys(): raise ValueError( '\n"HDF5 File" must be provided in "Input Data" when data source is "HDF5"' ) # Fill in Attributes self.Nvec = self.analysisInfo['Nvec'] self.binsize = self.analysisInfo['Binsize'] self.moms = [] self.dSetAttr = {} self.dSetList = self.dataInfo['Datasets'] for dSet in self.dSetList: momList = dSet['Mom List'] if dSet['Compute X-rows']: raise ValueError( 'Does not support doing cross-rows in two-point function for now!' ) for momVec in momList: if momVec[0] != 0 and momVec[1] != 0: raise ValueError( '\n Currently support non-zero momentum only in the z-direction!' ) mTag = tags.momString( momVec) # Dataset Attributes are listed for each momentum self.dSetAttr[mTag] = {} self.moms.append(momVec) for attr in [ 't0', 'Ncfg', 'Nt', 'Nrows', 'Compute X-rows', 'Phase Info' ]: self.dSetAttr[mTag][attr] = dSet[attr] # Read source-sink operators intOpFile = dSet['Interpolating Operators File'] self.dSetAttr[mTag]['intOpList'] = [] with open(intOpFile) as fp: ops = fp.readlines() for op in ops: self.dSetAttr[mTag]['intOpList'].append( (op.split()[0], op.split()[1])) self.dSetAttr[mTag]['Nop'] = len(ops) # Get the momenta that will be averaged over self.momAvg = [[ 0, 0, zm ] for zm in list(dict.fromkeys(np.abs([z for x, y, z in self.moms])))] self.momAvg.sort() for mom in self.momAvg: mTag = tags.momString(mom) if mTag not in self.dSetAttr.keys(): mTagD = tags.momString([mom[0], mom[1], -mom[2]]) self.dSetAttr[mTag] = self.dSetAttr[mTagD] print( 'Momentum %s not in original dataset attributes. Adding from momentum %s' % (mTag, mTagD))
def evaluate(self): for mom in self.momAvg: mTag = tags.momString(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] tsepList_rs = self.dSetAttr3pt[mTag][ 'tsep'][:-1] # Need this for the reduced-summed ratio dispListAvg = self.dispAvg[mTag] for t in self.ratioTypes: for ri in self.RI: self.bins[t][ri][mTag] = {} self.mean[t][ri][mTag] = {} for its, tsep in enumerate(tsepList): Ntins = tsep for z3 in dispListAvg: for gamma in self.gammaList: dkey = (tsep, z3, gamma) for ri in self.RI: self.bins['plain'][ri][mTag][dkey] = np.zeros( (self.Nbins, Ntins), dtype=np.float64) self.bins['sum'][ri][mTag][dkey] = np.zeros( self.Nbins, dtype=np.float64) for tins in range(Ntins): # Plain ratio self.bins['plain'][ri][mTag][dkey][:, tins] = ( self.c3pt.bins[ri][mTag][dkey][:, tins] / self.c2pt.bins[mTag][:, tsep]) # Summed ratio if tins > 0: # Exclude source contact term self.bins['sum'][ri][mTag][ dkey] += self.bins['plain'][ri][mTag][ dkey][:, tins] self.mean['plain'][ri][mTag][ dkey] = jackknife.mean( self.bins['plain'][ri][mTag][dkey], self.Nbins, Nspl=Ntins) self.mean['sum'][ri][mTag][dkey] = jackknife.mean( self.bins['sum'][ri][mTag][dkey], self.Nbins, Nspl=1) # End for tsep # Evaluate reduced-summed ratio for its, tsep in enumerate(tsepList_rs): tsepL = tsepList[its] tsepH = tsepList[its + 1] for z3 in dispListAvg: for gamma in self.gammaList: dkey = (tsep, z3, gamma) dkeyL = (tsepL, z3, gamma) dkeyH = (tsepH, z3, gamma) for ri in self.RI: self.bins['r-sum'][ri][mTag][dkey] = ( (self.bins['sum'][ri][mTag][dkeyH] - self.bins['sum'][ri][mTag][dkeyL]) / (tsepH - tsepL)) self.mean['r-sum'][ri][mTag][ dkey] = jackknife.mean( self.bins['r-sum'][ri][mTag][dkey], self.Nbins, Nspl=1) print('Ratio evaluation for %s completed' % (mTag))
def makeLinearFit(fitSeq): fType = fitSeq['Type'] fLabel = fitSeq['Label'] tsepLowList = fitSeq['tsepLow'] fPrmList = self.fitParams[fType] for mom in self.momAvg: mTag = tags.momString(mom) tsepList = self.dSetAttr3pt[mTag]['tsep'] dispListAvg = self.dispAvg[mTag] # Determine the x-data for each tLow self.tsepFitX[fLabel][mTag] = {} for tL in tsepLowList: sLTag = 'tL%d' % (tL) self.tsepFitX[fLabel][mTag][sLTag] = tsepList[tsepList. index(tL):] xData = self.tsepFitX[fLabel][mTag][sLTag] Nfdata = len(xData) for ri in self.RI: self.chiBins[fLabel][sLTag][ri][mTag] = {} self.chiMean[fLabel][sLTag][ri][mTag] = {} for fP in fPrmList: fpTag = fP + '_%s' % (sLTag) self.bins[fLabel][fpTag][ri][mTag] = {} self.mean[fLabel][fpTag][ri][mTag] = {} for z3 in dispListAvg: for gamma in self.gammaList: dkeyF = (z3, gamma) self.chiBins[fLabel][sLTag][ri][mTag][ dkeyF] = np.zeros(self.Nbins, dtype=np.float64) for fP in fPrmList: fpTag = fP + '_%s' % (sLTag) self.bins[fLabel][fpTag][ri][mTag][ dkeyF] = np.zeros(self.Nbins, dtype=np.float64) # Perform the fits for each bin for b in range(self.Nbins): ydata = np.zeros(Nfdata, dtype=np.float64) yerr = np.zeros(Nfdata, dtype=np.float64) # Fill in fit data for itL, tL in enumerate( self.tsepFitX[fLabel][mTag] [sLTag]): dkey = (tL, z3, gamma) ydata[itL] = self.ratioBins[ri][mTag][ dkey][b] yerr[itL] = self.ratioMean[ri][mTag][ dkey][1] # Perform the fit fprmRes, covRes = scipyOpt.curve_fit( linearFit.model, xData, ydata, sigma=yerr) self.chiBins[fLabel][sLTag][ri][mTag][ dkeyF][b] = linearFit.chiSquare( xData, ydata, yerr, fprmRes[0], fprmRes[1]) for ifP, fP in enumerate(fPrmList): fpTag = fP + '_%s' % (sLTag) self.bins[fLabel][fpTag][ri][mTag][ dkeyF][b] = fprmRes[ifP] # End for bins # Jackknife averages self.chiMean[fLabel][sLTag][ri][mTag][ dkeyF] = jackknife.mean( self.chiBins[fLabel][sLTag][ri][mTag] [dkeyF], Nbins=self.Nbins, Nspl=1) for fP in fPrmList: fpTag = fP + '_%s' % (sLTag) self.mean[fLabel][fpTag][ri][mTag][ dkeyF] = jackknife.mean( self.bins[fLabel][fpTag][ri][mTag] [dkeyF], Nbins=self.Nbins, Nspl=1) # End for tsepLow ------ print('%s fits for momentum %s completed' % (fType, mTag))