def test(config, log=False, kg='gauss'): if config in ['IC59','IC79']: nfiles = 5 if config=='IC79' else 14 for i in range(nfiles): s = load_sim(config, spline=False, part=i) if i == 0: y, bins = ang_hist(s) x = getMids(bins) else: h = ang_hist(s) y += h[0] if config in ['IC86','IC86-II','IC86-III']: s = load_sim(config, spline=False) y, bins = ang_hist(s) x = getMids(bins) # Normalize values y = y.astype('float') ycut = (y!=0) x = x[ycut] y = y[ycut] dy = 1/np.sqrt(y) y /= y.sum() yerr = y * dy popt, pcov = fitter(x, y, yerr, kg=kg) print config, popt, np.sqrt(np.diagonal(pcov)) f0 = np.array([gauss(i, popt[:2]) for i in x]) f1 = np.array([gauss(i, popt[2:]) for i in x]) fit = np.array([myfunc(i, *popt) for i in x]) # Test fit for gaussian with median angular resolution as sigma #sig = 3.65 #amp = 2 * 1 / (sig * np.sqrt(2*np.pi)) #f2 = np.array([gauss(i, [amp, sig]) for i in x]) # Get area of each gaussian a0, a1 = f0.sum(), f1.sum() atot = float(a0 + a1) #print a0 #print a1 print 'Fraction1: %.2f' % (a0 / atot) #print 'Unaccounted: %.2f' % (y.sum() - atot) fig, ax = plt.subplots() ax.errorbar(x, y, yerr, fmt='.') ax.plot(x, fit) #ax.plot(x, f0) #ax.plot(x, f1) #ax.plot(x, f2) ax.set_xlim(0, 20) if log: ax.set_yscale('log') ax.set_ylim(1e-6, 1) plt.show()
def eres2(config, out=False, batch=False): # Basic setup fig, ax = plt.subplots() f = np.load('/home/fmcnally/anisotropy/icesim/%s_hists.npy' % config) #lw = 2 #ms = 7*lw #pltParams = {'fmt':'.', 'lw':lw, 'ms':ms} # Energy binning information ebins = np.arange(2.75, 9.01, 0.05) x = getMids(ebins) for i, y in enumerate(f): ntot = float(y.sum()) ax.step(x, y/ntot, label=i+1) ax.set_xlim(2.75, 8.5) tPars = {'fontsize':16} ax.set_xlabel(r'True Energy ($\log_{10}(E/\mathrm{GeV})$)', **tPars) ax.set_ylabel('Fraction of Events', **tPars) #ax.set_title('Energy Distributions for Cuts', fontsize=16) plt.legend(loc='upper right') if out != False: plt.savefig(out, dpi=300, bbox_inches='tight') if not batch: plt.show()
def myplot2(): histFiles = getFiles('IT81', '201204') dates = ['20120417','20120418'] histFiles = [f for f in histFiles if any([d in f for d in dates])] fig, ax = plt.subplots() x = getMids(getEbins(reco=True)) ytot, yerrtot = np.zeros((2, len(x))) ttot = 0. c0 = (x >= 6.2) for f in histFiles: h = np.load(f) h = h.item() y, yerr = getValues(h) date = getDate(f) t = float(getRunTime('IT81', date=date)) ax.errorbar(x, y/t, np.sqrt(yerr)/t, label=date) # Increment total values ytot += y yerrtot += yerr ttot += t #ax.errorbar(x, ytot/ttot, np.sqrt(yerrtot)/ttot, label='All', fmt='k') ax.set_xlim((6, 9.5)) ax.set_yscale('log') ax.legend() plt.show()
def zfix(s, cut='llh', nbins=100, thetaMax=40., minE=5.0, plot=False, out=False): from icecube.photospline import spglam as glam thetaMax *= degree zbins = np.linspace(1, np.cos(thetaMax), nbins+1)[::-1] ebins = getEbins() t = np.log10(s['MC_energy']) r = np.log10(s['ML_energy']) z = np.pi - s['zenith'] # Calculate cut values c0 = np.logical_not(np.isnan(r)) ecut = r >= minE c0 *= ecut if cut != None: c0 *= s['cuts'][cut] # Store median and standard deviation info x1 = np.cos(z)[c0] if x1.min() > zbins.min(): zbins = zbins[zbins >= x1.min()] y = (r - t)[c0] medians, sigL, sigR, vars = getMedian(x1, y, zbins) w = 1/vars nknots = 30 step_scale = 2/5. step = (zbins.max() - zbins.min()) * step_scale mids = (zbins[1:] + zbins[:-1]) / 2. axes = [mids] knots = [np.linspace(zbins.min()-step, zbins.max()+step, nknots)] tab = glam.fit(medians, w, axes, knots, order=(4), penalties={2:1e-4}) if plot: fig, ax = plt.subplots() #ax.set_title('Energy Resolution vs Reconstructed Zenith', fontsize=18) ax.set_xlabel(r'$\cos(\mathrm{\theta_{reco}})$', fontsize=16) ax.set_ylabel(r'$\log_{10}(E_{\mathrm{LLH}}/E_{\mathrm{true}})$', fontsize=16) lw = 2 ms = 7*lw pltParams = dict(fmt='.', lw=lw, ms=ms) # Energy resolution vs zenith zbins = np.linspace(1, np.cos(thetaMax), 21)[::-1] x = getMids(zbins) medians, sigL, sigR, vars = getMedian(x1, y, zbins) ax.errorbar(x, medians, yerr=(sigL,sigR), **pltParams) # Spline fit fitx = np.linspace(x.min(), x.max(), len(x)*3) fit = glam.grideval(tab, [fitx]) ax.plot(fitx, fit) ax.set_xlim(0.8,1) if out: plt.savefig(out) plt.show() fit = glam.grideval(tab, [np.cos(z)]) return r - fit
def myplot(config, month, st, fin): histFiles = getFiles(config, month) fig, ax = plt.subplots() x = getMids(getEbins(reco=True)) ytot, yerrtot = np.zeros((2, len(x))) ttot = 0. c0 = (x >= 6.2) for f in histFiles[st:fin]: h = np.load(f) h = h.item() y, yerr = getValues(h) date = getDate(f) t = float(getRunTime(config, date=date)) ax.errorbar(x, y/t, np.sqrt(yerr)/t, label=date) # Increment total values ytot += y yerrtot += yerr ttot += t ax.errorbar(x, ytot/ttot, np.sqrt(yerrtot)/ttot, label='All', fmt='k') ax.set_xlim((6, 9.5)) ax.set_yscale('log') ax.legend() plt.show()
def getEff(s, cut, comp='joint', reco=True): eff, sig, relerr = {},{},{} a = np.log10(s['MC_energy']) Ebins = getEbins() Emids = getMids(Ebins) erangeDict = getErange() c0 = cut if comp != 'joint': compcut = s['comp'] == comp c0 = cut * compcut # Set radii for finding effective area rDict = {} keys = ['low', 'mid', 'high'] for key in keys: rDict[key] = np.array([600, 800, 1100, 1700, 2600, 2900]) rDict['low'][1] = 600 Ebreaks = np.array([4, 5, 6, 7, 8, 9]) rgrp = np.digitize(Emids, Ebreaks) - 1 for key in keys: # Get efficiency and sigma simcut = np.array([sim in erangeDict[key] for sim in s['sim']]) k = np.histogram(a[c0*simcut], bins=Ebins)[0] #k = Nfinder(a, c0*simcut) n = s['MC'][comp][key].astype('float') eff[key], sig[key], relerr[key] = np.zeros((3, len(k))) with np.errstate(divide='ignore', invalid='ignore'): eff[key] = k / n var = (k+1)*(k+2)/((n+2)*(n+3)) - (k+1)**2/((n+2)**2) sig[key] = np.sqrt(var) # Multiply by throw area r = np.array([rDict[key][i] for i in rgrp]) eff[key] *= np.pi*(r**2) sig[key] *= np.pi*(r**2) # Deal with parts of the arrays with no information for i in range(len(eff[key])): if n[i] == 0: eff[key][i] = 0 sig[key][i] = np.inf # Combine low, mid, and high energy datasets eff_tot = (np.sum([eff[key]/sig[key] for key in keys], axis=0) / np.sum([1/sig[key] for key in keys], axis=0)) sig_tot = np.sqrt(1 / np.sum([1/sig[key]**2 for key in keys], axis=0)) with np.errstate(divide='ignore'): relerr = sig_tot / eff_tot # UGH : find better way to do this if reco: eff_tot = eff_tot[20:] sig_tot = sig_tot[20:] relerr = relerr[20:] return eff_tot, sig_tot, relerr
def distro(configs=None, xaxis='energy', weight=False, zcorrect=False): # General setup labelDict = {'energy':r'$\log_{10}(E/\mathrm{GeV})$', 'zenith':r'$\cos(\theta)$', 'core':'Distance from center (m)'} h = loadHists() eList = ['p','h','o','f'] if configs == None: configs = sorted(list(set([k.split('_')[0] for k in h.keys()]))) # Total counts counts, bins = histReader(h, xaxis, configs=configs, w=weight, z=zcorrect) xlabel = labelDict[xaxis] if xaxis == 'core': binArea = np.pi * (bins[1:]**2 - bins[:-1]**2) counts /= binArea # Plot fig, ax = plt.subplots() x = getMids(bins) width = bins[1] - bins[0] ax.plot(x, counts, ls='steps') ax.set_xlabel(xlabel) ax.set_ylabel('Counts') ax.set_yscale('log') plt.show()
def counts(configs=None, weight=False, zcorrect=False): h = loadHists() eList = ['p','h','o','f'] if configs == None: configs = sorted(list(set([k.split('_')[0] for k in h.keys()]))) hParams = {'configs':configs, 'w':weight, 'z':zcorrect} # Build histograms of desired information N, Err = {},{} # Total counts N['All'], bins = histReader(h, 'energy', **hParams) Err['All'], bins = histReader(h, 'energy', err=True, **hParams) # Counts by composition for e in eList: N[e], bins = histReader(h, 'energy', e=e, **hParams) Err[e], bins = histReader(h, 'energy', e=e, err=True, **hParams) fig, ax = plt.subplots() ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$') ax.set_ylabel('Counts') # Plot reconstructions for e in eList + ['All']: pnt = getColor(e) + '.' ax.errorbar(getMids(bins), N[e], yerr=Err[e], fmt=pnt, label=e) ax.set_yscale('log') ax.legend(loc='lower left') plt.show()
def makeTable(bintype='logdist', plot=False): # Starting parameters my.setupShowerLLH(verbose=False) s = load_sim(bintype=bintype) outFile = '%s/IT73_sim/Zfix_%s.fits' % (my.llh_data, bintype) nbins = 100 thetaMax = 40. minE = 4.0 thetaMax *= np.pi / 180. zbins = np.linspace(1, np.cos(thetaMax), nbins+1)[::-1] ebins = getEbins() t = np.log10(s['MC_energy']) r = np.log10(s['ML_energy']) z = np.pi - s['zenith'] # Calculate cut values c0 = s['cuts']['llh'] ecut = r >= minE c0 *= ecut # Store median and standard deviation info x1 = np.cos(z)[c0] if x1.min() > zbins.min(): zbins = zbins[zbins >= x1.min()] y = (r - t)[c0] medians, sigL, sigR, vars = getMedian(x1, y, zbins) w = 1/vars nknots = 30 step_scale = 2/5. step = (zbins.max() - zbins.min()) * step_scale mids = (zbins[1:] + zbins[:-1]) / 2. axes = [mids] knots = [np.linspace(zbins.min()-step, zbins.max()+step, nknots)] tab = glam.fit(medians, w, axes, knots, order=(4), penalties={2:1e-4}) if os.path.exists(outFile): os.remove(outFile) splinefitstable.write(tab, outFile) if plot: fig, ax = plt.subplots() ax.set_title('Energy Resolution vs Reconstructed Zenith', fontsize=18) ax.set_xlabel('Cos(zenith)', fontsize=16) ax.set_ylabel('Ereco - Etrue (median)', fontsize=16) lw = 2 ms = 7*lw pltParams = dict(fmt='.', lw=lw, ms=ms) # Energy resolution vs zenith x = getMids(zbins) ax.errorbar(x, medians, yerr=(sigL,sigR), **pltParams) # Spline fit fitx = np.linspace(x.min(), x.max(), len(x)*3) fit = glam.grideval(tab, [fitx]) ax.plot(fitx, fit) plt.show()
def myCounts(spec=0, out=None): h = loadHists() configs = sorted(list(set([k.split('_')[0] for k in h.keys()]))) hParams = {'configs':configs, 'w':False, 'z':True} tParams = {'fontsize':14} # Minimum energy emin = 6.2 fig, ax = plt.subplots() ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$', **tParams) ax.set_ylabel('Normalized Counts', **tParams) # Spectrum for deficit (60 - 120) ND, bins = histReader(h, 'energy', ramin=60, ramax=120, **hParams) emids = getMids(bins) ecut = (emids >= emin) emids = emids[ecut] scale = (10**emids)**spec ND = ND[ecut] Err = np.sqrt(ND) Err[ND == 0] = 2.3 ntot = float(ND.sum()) NDN = ND * scale / ntot Err = Err * scale / ntot # Custom errorbars Errmin = copy(Err) for i in range(len(Errmin)): if NDN[i] - Errmin[i] < 1e-8: Errmin[i] = NDN[i] - 1e-9 ax.errorbar(emids, NDN, yerr=(Errmin, Err), fmt='.', label='Deficit') # Spectrum for other (0 - 60 + 120 - 360) N0, bins = histReader(h, 'energy', ramin=0, ramax=60, **hParams) N1, bins = histReader(h, 'energy', ramin=120, ramax=360, **hParams) N = (N0 + N1)[ecut] Err = np.sqrt(N) Err[N == 0] = 2.3 ntot = float(N.sum()) NN = N * scale / ntot Err = Err * scale / ntot # Custom errorbars Errmin = copy(Err) for i in range(len(Errmin)): if NN[i] - Errmin[i] < 1e-8: Errmin[i] = NN[i] - 1e-9 ax.errorbar(emids, NN, yerr=(Errmin, Err), fmt='.', label='Rest of sky') if spec == 0: ax.set_ylim((1e-8, 1)) ax.set_yscale('log') ax.legend() print 'Bayes factor:', bayes2(NDN, NN) if out != None: plt.savefig(out, dpi=300, bbox_inches='tight') plt.show()
def fakeSpec(s, cut, testSpec, reco=True, emin=None): print 'Creating fake spectrum...' t = np.log10(s['MC_energy']) Ebins = getEbins(reco=reco) Emids = getMids(Ebins) if emin != None: Emids = Emids[Emids > emin] Ebins = Ebins[-len(Emids)-1:] for e in testSpec.keys(): testSpec[e] = testSpec[e][-len(Emids):] #st = len(Emids) - len(testSpec[testSpec.keys()[0]]) samples = np.zeros(len(t), dtype='bool') passindex = [] #binList = np.digitize(t, Ebins[st:]) - 1 binList = np.digitize(t, Ebins) - 1 # testSpec should contain spectrum shape for proton and iron for e in testSpec.keys(): compCut = np.array([True for i in range(len(cut))]) if e in ['P','He','O','Fe']: compCut = s['comp'] == e c0 = cut * compCut #N_passed = Nfinder(t, c0)[st:] N_passed = np.histogram(t[c0], bins=Ebins)[0] # Fit the distribution to our available data diff = N_passed / testSpec[e] shift = N_passed[diff.argmin()] / testSpec[e][diff.argmin()] testSpec[e] *= shift # Wiggle within poisson errors testSpec[e] = np.around(testSpec[e]) for i in range(len(testSpec[e])): if i != diff.argmin(): testSpec[e][i] = np.random.poisson(testSpec[e][i]) # From distribution, pull out events to create spectrum #for i in range(len(Emids[st:])): for i in range(len(Emids)): # Get events in desired energy bin ecut = (binList == i) mc = (ecut * c0).sum() # Build array of randomly selected indexes temp = np.zeros(mc, dtype='bool') temp[:testSpec[e][i]] = True np.random.shuffle(temp) # Find indexes of events that passed passindex.append(np.where(ecut*c0 == True) * temp) # Fill samples with events that passed for index in passindex: samples[index] = True return samples
def counts(config, cut='llh', bintype='logdist', weight=False, zcorrect=False): dataList = getDataList(config, bintype) bins = getEbins(reco=True) # Build histograms of desired information N, Err = {},{} for cfg, date in dataList[:2]: d = load_data(cfg, date, bintype) eList = getComps(d) c0 = d['cuts'][cut] r = np.log10(d['ML_energy']) if zcorrect: r -= zfix(d['zenith'], bintype=bintype) # Total counts w = d['weights'][c0] if weight else None w2 = d['weights'][c0]**2 if weight else None counts = np.histogram(r[c0], bins=bins, weights=w)[0] errors = np.sqrt(np.histogram(r[c0], bins=bins, weights=w2)[0]) try: N['All'] += counts Err['All'] += errors except KeyError: N['All'] = counts Err['All'] = errors # Counts by composition for e in eList: ecut = d['llh_comp'] == e c1 = c0 * ecut w = d['weights'][c1] if weight else None w2 = d['weights'][c1]**2 if weight else None counts = np.histogram(r[c1], bins=bins, weights=w)[0] errors = np.sqrt(np.histogram(r[c1], bins=bins, weights=w2)[0]) try: N[e] += counts Err[e] += errors except KeyError: N[e] = counts Err[e] = errors fig, ax = plt.subplots() ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$') ax.set_ylabel('Counts') # Plot reconstructions for e in eList + ['All']: pnt = getColor(e) + '.' ax.errorbar(getMids(bins), N[e], yerr=Err[e], fmt=pnt, label=e) ax.set_yscale('log') ax.legend(loc='lower left') plt.show()
def getTest(x, xerr, args): if args.name in ['llh','llhcut']: bins = np.linspace(-20, 20, 151) if args.name == 'energy': bins = np.arange(6.25, 9.501, 0.05) mids = getMids(bins) with np.errstate(invalid='ignore'): x = np.sum((mids * x) / np.sum(x, axis=1)[:,np.newaxis], axis=1) x[x!=x] = 0. ave_x = x.mean() return x - ave_x
def histMedian(h, bins): # Assume you only want to operate on the last dimension nx, ny = h.shape median, sigL, sigR, var = np.zeros((4,nx)) binMids = getMids(bins, infvalue=100) for i in range(nx): median[i] = histPercentile(h[i], 50, binMids) sigL[i] = histPercentile(h[i], 16, binMids) sigR[i] = histPercentile(h[i], 84, binMids) var[i] = histVar(h[i], binMids) return median, sigL, sigR, var
def line_fit(s, cut, comp='joint', st=45): lineFit = lambda x, b: b Emids = getMids(getEbins()) eff, sigma, relerr = getEff(s, cut, comp=comp) Emids, eff, sigma, relerr = Emids[st:], eff[st:], sigma[st:], relerr[st:] x = Emids.astype('float64') y = eff popt, pcov = optimize.curve_fit(lineFit, x, y, sigma=sigma) yfit = lineFit(x, *popt) yfit = np.array([yfit for i in range(len(x))]) chi2 = np.sum(((yfit - y)/sigma) **2) / len(x) print 'Chi2:', chi2 return yfit, relerr
def powerFit(s0, sTable=False, st=False, reco=True): Emids = getMids(getEbins(reco=reco)) #if st: # Emids = Emids[st:] Tf = (10**Emids)**s0 if sTable: for e, spec in sTable: start = np.nonzero(Emids >= e)[0][0] shift = Tf[start] / ((10**Emids[start])**spec) Tf[start:] = shift * (10**Emids[start:])**spec sumprobs = Tf.sum() * 2 Tf /= sumprobs return Tf
def new(): fig, ax = plt.subplots() f = np.load('IC59_hists_test.npy') f = f.item() zbins = np.linspace(0, 1, 151) x = getMids(zbins) for i, y in enumerate(f['zenith']): ntot = float(y.sum()) ax.step(x, y/ntot, label=i) ax.set_ylabel('Fraction of Events') ax.set_ylabel('Cos(zenith)') ax.legend(loc='upper left') plt.show()
def percentOverlap(): files = glob.glob('IC*_hists.npy') files.sort() for f in files: h = np.load(f) nbins = 3 bins = np.arange(2.75, 9.01, 0.05) binMids = getMids(bins, infvalue=100) print f median, sigL, sigR, var = histMedian(h, bins) for i in range(1, len(h)): print 'Bin %i max = %f' % (i-1, sigR[i-1]) ecut = (binMids > sigR[i-1]) myfrac = float(h[i][ecut].sum()) / h[i].sum() print 'Fraction of events in bin %i > u.l.: %f' % (i, myfrac)
def singlePlot(x, xerr, args): fig, ax = plt.subplots() if args.name in ['llh','llhcut']: bins = np.linspace(-20, 20, 151) xyDict = {'Lighter':(33,97), 'Heavier':(33, 269)} if args.name == 'energy': bins = np.arange(6.25, 9.501, 0.05) xyDict = {'Excess':(150, 185)} mids = getMids(bins) degree = np.pi / 180. npix = len(x) nside = hp.npix2nside(npix) for i, key in enumerate(xyDict.keys()): # Plot distribution for pixel thetaPix, phiPix = xyDict[key][0]*degree, xyDict[key][1]*degree pix = hp.ang2pix(nside, thetaPix, phiPix) normFactor = float(x[pix].sum()) ax.errorbar(mids, x[pix]/normFactor, yerr=np.sqrt(xerr[pix])/normFactor, label=key) theta, phi = hp.pix2ang(nside, range(npix)) thetaRing, phiRing = hp.pix2ang(nside, pix) thetaCut = (theta == thetaRing) print key, ':', bayes2(x[pix], np.sum(x[thetaCut], axis=0)) # Plot distribution for rest of declination band normFactor = float(x[thetaCut].sum()) ax.errorbar(mids, np.sum(x[thetaCut], axis=0)/normFactor, yerr=np.sqrt(np.sum(xerr[thetaCut], axis=0))/normFactor, label='Average') #ledge, redge = getSmallRange(x[pix]) #ax.errorbar(mids[ledge:redge], x[pix][ledge:redge], # yerr=np.sqrt(xerr[pix][ledge:redge]), label=key) ax.set_xlabel(r'Energy ($\log_{10}(E/\mathrm{GeV})$)') ax.set_ylabel(r'Normalized Counts') ax.set_yscale('log') plt.legend() if args.out: plt.savefig(args.out, dpi=300, bbox_inches='tight') if not args.batch: plt.show()
def distro(config, bintype='logdist', cut='llh', xaxis='energy', weight=False): # General setup labelDict = {'energy':r'$\log_{10}(E/\mathrm{GeV})$', 'zenith':r'$\cos(\theta)$', 'core':'Distance from center (m)'} binDict = {'energy':getEbins(), 'zenith':np.linspace(0.8, 1, 41), 'core':np.linspace(0, 700, 71)} dataList = getDataList(config, bintype) bins = binDict[xaxis] xlabel = labelDict[xaxis] #fbins = fineBins(bins) # Build histograms of desired information for cfg, date in dataList[:1]: d = load_data(cfg, date, bintype) c0 = d['cuts'][cut] w = d['weights'][c0] if weight else None if xaxis == 'energy': y = np.log10(d['ML_energy']) if xaxis == 'zenith': y = np.cos(d['zenith']) if xaxis == 'core': y = np.sqrt(d['ML_x']**2 + d['ML_y']**2) counts = np.histogram(y[c0], bins=bins, weights=w)[0] try: h += counts except NameError: h = counts # Plot fig, ax = plt.subplots() x = getMids(bins) width = bins[1] - bins[0] ax.plot(x, h, ls='steps') ax.set_xlabel(xlabel) ax.set_ylabel('Counts') ax.set_yscale('log') plt.show()
def monthCheck(config): histFiles = getFiles(config) dateList = [getDate(f) for f in histFiles] monthList = sorted(list(set([d[:6] for d in dateList]))) # Build total histograms x = getMids(getEbins(reco=True)) c0 = (x >= 6.2) ycfg, yerrcfg = np.zeros((2, len(x))) tcfg = 0. for f in histFiles: h = np.load(f) h = h.item() y, yerr = getValues(h) date = getDate(f) t = float(getRunTime(config, date=date)) ycfg += y yerrcfg += yerr tcfg += t for month in monthList: tempFiles = [f for f in histFiles if getDate(f)[:6]==month] ymonth, yerrmonth = np.zeros((2, len(x))) tmonth = 0. for f in tempFiles: h = np.load(f) h = h.item() y, yerr = getValues(h) date = getDate(f) t = float(getRunTime(config, date=date)) ymonth += y yerrmonth += yerr tmonth += t fmonth = (ymonth/tmonth)[c0] ferrmonth = (yerrmonth/tmonth)[c0] ferrmonth[ferrmonth==0] = np.inf fcfg = (ycfg/tcfg)[c0] ferrcfg = (yerrcfg/tcfg)[c0] ferrcfg[ferrcfg==0] = np.inf chi2 = np.sum((fmonth - fcfg)**2 / (ferrmonth**2 + ferrcfg**2)) print '%s : %s' % (month, chi2)
def core_res(s, cut=None, nbins=40, minE=4, title=True, zcorrect=False, out=False): # Setup plot fig, ax = plt.subplots() if title: ax.set_title('Core Resolution vs True Energy', fontsize=18) ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$', fontsize=16) ax.set_ylabel(r'$\vec{x}_{\mathrm{LLH}} - \vec{x}_{\mathrm{true}} [m]$', fontsize=16) lw = 2 ms = 7*lw pltParams = dict(fmt='.', lw=lw, ms=ms) # Group into larger bins in energy Ebins = getEbins() ebins = np.linspace(Ebins.min(), Ebins.max(), nbins+1) if ebins[-1] != Ebins[-1]: ebins.append(Ebins[-1]) # Plot Ereco histograms for each energy bin in Etrue r = np.log10(s['ML_energy']) if zcorrect: r = zfix(s) c0 = np.logical_not(np.isnan(s['ML_energy'])) ecut = (r >= minE) c0 *= ecut if cut != None: c0 *= s['cuts'][cut] t = np.log10(s['MC_energy'])[c0] tx, ty = s['MC_x'][c0], s['MC_y'][c0] rx, ry = s['ML_x'][c0], s['ML_y'][c0] # Store median and standard deviation info x = getMids(ebins) y = np.sqrt((rx-tx)**2 + (ry-ty)**2) medians, sigL, sigR, vars = getMedian(t, y, ebins) ax.errorbar(x, medians, yerr=(sigL,sigR), **pltParams) if out: plt.savefig(out) plt.show()
def counts(d, param, logx, logy): binList = [d['bins'][i][0] for i in d['bins']] idx = binList.index(param) axes = [i for i in d['bins']] axes.remove(idx) axes = tuple(axes) bins = d['bins'][idx][1] x = getMids(bins) y = d['counts'].sum(axis=axes) fig, ax = plt.subplots() ax.plot(x, y, '.') if logx: ax.set_xscale('log') if logy: ax.set_yscale('log') plt.show()
def plotter(s, cut, comp='joint', emin=6.2, ndiv=False, out=False, fit=False): fig, ax = plt.subplots() #ax.set_title('Effective Area vs Energy') ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$') ax.set_ylabel(r'Effective Area ($m^2$)') Emids = getMids(getEbins(reco=True)) eff, sigma, relerr = getEff(s, cut, comp=comp) #eff2, relerr2 = line_fit(s, cut, comp=comp, st=st) lineFit = lambda x, b: b x = Emids.astype('float64') y = eff c0 = x >= emin popt, pcov = optimize.curve_fit(lineFit, x[c0], y[c0], sigma=sigma[c0]) yfit = lineFit(x[c0], *popt) yfit = np.array([yfit for i in range(len(x[c0]))]) chi2 = np.sum(((yfit - y[c0])/sigma[c0]) **2) / len(x[c0]) print 'Chi2:', chi2 eff2 = yfit # Give the option for combined bins if ndiv: eff_joint, en_joint = [], [] for j in range(len(eff)/ndiv): start = ndiv*j end = ndiv*(j+1) eff_joint.append(mean(eff[start:end])) en_joint.append(mean(Emids[start:end])) ax.plot(en_joint, eff_joint, 'o', label=comp) else: ax.errorbar(Emids, eff, yerr=sigma, fmt='.', label=comp) if fit: ax.plot(Emids[c0], eff2) #ax.legend(loc='upper left') #ax.set_yscale('log') if out: plt.savefig(out) plt.show()
def original(): files = glob.glob('IC*_hists.npy') files.sort() for file in files: fig, ax = plt.subplots() f = np.load(file) ebins = np.arange(2.75, 9.01, 0.05) x = getMids(ebins) for i, y in enumerate(f): ntot = float(y.sum()) ax.step(x, y/ntot, label=i) ax.set_title(os.path.basename(file)[:-4]) ax.set_ylabel('Fraction of Events') ax.set_xlabel(r'$\log_{10}(E/\mathrm{GeV})$') #ax.legend(loc='upper right') #ax.set_yscale('log') plt.show()
def myEnergy(): h = loadHists() configs = sorted(list(set([k.split('_')[0] for k in h.keys()]))) hParams = {'configs':configs, 'w':False, 'z':True} N = {} for e in ['p','h','o','f']: N[e], bins = histReader(h, 'energy', e=e, **hParams) emids = getMids(bins) ecut = (emids >= 6.2) N[e] = N[e][ecut] emids = emids[ecut] print e, N[e].sum() ntemp, i = 0, 0 check = False print N[e] while not check: ntemp += N[e][i] if ntemp >= sum(N[e])/2: check = True eval = emids[i] i += 1 print eval
def eres(s, cut=None, xaxis='energy', nbins=20, minE=4.0, rt='t', thetaMax=55., zcorrect=False, title=True, out=False, ax=None): titleDict = {'zenith':'Zenith Angle', 'energy':'Energy', 'core':'Core Position'} xDict = {'zenith':r'$\cos(\mathrm{\theta_{true}})$', 'energy':r'$\log_{10}(E/\mathrm{GeV})$', 'core':'Core Position (m)'} rtDict = {'r':'Reconstructed', 't':'True'} # Setup plot if ax == None: fig, ax = plt.subplots() if title: ax.set_title('Energy Resolution vs %s %s' % \ (rtDict[rt], titleDict[xaxis]), fontsize=18) ax.set_xlabel(xDict[xaxis], fontsize=16) ax.set_ylabel(r'$\log_{10}(E_{\mathrm{LLH}}/E_{\mathrm{true}})$', fontsize=16) lw = 2 ms = 7*lw pltParams = dict(fmt='.', lw=lw, ms=ms) if xaxis == 'energy': Ebins = getEbins() bins = np.linspace(Ebins.min(), Ebins.max(), nbins+1) if xaxis == 'zenith': bins = np.linspace(1, np.cos(thetaMax * degree), nbins+1)[::-1] if xaxis == 'core': bins = np.linspace(0, 1000, nbins+1) t = np.log10(s['MC_energy']) r = np.log10(s['ML_energy']) if zcorrect: r = zfix(s) if rt == 't': e = t z = s['MC_zenith'] cx, cy = s['MC_x'], s['MC_y'] if rt == 'r': e = r z = np.pi - s['zenith'] cx, cy = s['ML_x'], s['ML_y'] # Calculate cut values c0 = np.logical_not(np.isnan(r)) #ecut = (t >= minE) if rt=='t' else (r >= minE) ecut = (r >= minE) c0 *= ecut if cut != None: c0 *= s['cuts'][cut] # Store median and standard deviation info x = getMids(bins) x1 = {'energy':e, 'zenith':np.cos(z), 'core':np.sqrt(cx**2+cy**2)} x1 = x1[xaxis] y = r - t medians, sigL, sigR, vars = getMedian(x1[c0], y[c0], bins) #if xaxis == 'zenith': # x = (np.arccos(x) / degree)[::-1] # medians, sigL, sigR = medians[::-1], sigL[::-1], sigR[::-1] ax.errorbar(x, medians, yerr=(sigL,sigR), **pltParams) if out: plt.savefig(out) #if ax == None: plt.show()
fig, ax = plt.subplots() # Get relative intensity map relint = getMap(args.file, **opts) # Setup right ascension bins rabins = np.linspace(0, 2*np.pi, args.nbins+1) print('rabins = {}'.format(rabins)) # Calculate phi for each pixel npix = len(relint) nside = hp.npix2nside(npix) theta, phi = hp.pix2ang(nside, range(npix)) # Bin in right ascension phiBins = np.digitize(phi, rabins) - 1 # UNSEEN cut cut = (relint != hp.UNSEEN) x = getMids(rabins) / deg2rad y, yerr = np.zeros((2, len(x))) for i in range(len(x)): phiCut = (phiBins == i) c0 = cut * phiCut y[i] = np.mean(relint[c0]) yerr[i] = np.sqrt(np.var(relint[c0])) plt.errorbar(x, y, yerr=yerr, fmt='.') tPars = {'fontsize':16} ax.set_xlabel(r'Right Ascension', **tPars) ax.set_ylabel(r'Relative Intensity',**tPars) ax.set_xlim(0.,360.) ax.invert_xaxis() plt.show()
def flux(niter=5, configs=None, spec=0, smooth=True, zcorrect=False, weight=False, orig=True, bakh=True, emin=None, linear=False, tax=None, tlabel=None, comps=True, months=None, decmin=None, decmax=None, ramin=None, ramax=None): # Starting information Ebins = getEbins(reco=True) Emids = getMids(Ebins) scale = (10**Emids)**spec h = loadHists() hParams = {'configs':configs, 'months':months, 'decmin':decmin, 'decmax':decmax, 'ramin':ramin, 'ramax':ramax, 'w':weight, 'z':zcorrect} eList = ['p','h','o','f'] if configs == None: configs = sorted(list(set([k.split('_')[0] for k in h.keys()]))) # Load simulation information effarea, sigma, relerr = getEff_fast(linear) # Effective area p = getProbs_fast(zcorrect=zcorrect) # Probs for unfolding # Relative error due to unfolding... #fl = open('unfold_err.pkl', 'rb') #chi2, unrel = pickle.load(fl) #fl.close() # Get detector runtime t = 0. for cfg in configs: t += getRunTime(cfg, months=months) # Total counts N, Err = {},{} N['All'], bins = histReader(h, x='energy', **hParams) Err['All'], bins = histReader(h, x='energy', err=True, **hParams) # Counts by composition for e in eList: N[e], bins = histReader(h, x='energy', e=e, **hParams) Err[e], bins = histReader(h, x='energy', e=e, err=True, **hParams) # Get starting probabilities Nun, Rel, Flux, Err = {},{},{},{} N_passed = float(np.sum([N[e] for e in eList])) for e in eList: p['R'+e] = N[e] / N_passed p['T'+e] = powerFit.powerFit(-2.7) # Setup plot if tax == None: fig, ax = plt.subplots() ax.set_title('Energy Spectum') ax.set_xlabel('Log10(Energy/GeV)') ax.set_ylabel('Flux (counts/(m^2 s ster GeV)) * E^' + str(spec)) else: ax = tax # Bayesian unfolding for i in range(niter): p = unfold(p) Nun['All'] = np.sum([p['T'+e] for e in eList], axis=0) * N_passed #Rel['All'] = np.sqrt(1/Nun['All'] + relerr**2 + unrel[i]**2) with np.errstate(divide='ignore'): Rel['All'] = np.sqrt(1/Nun['All'] + relerr**2) Flux['All'] = NumToFlux(Nun['All'], effarea, t) * scale Err['All'] = Flux['All'] * Rel['All'] #ax.errorbar(Emids, Flux['All'], yerr=Err['All'], fmt='k.', label='Unfolded') if i < niter-1: for e in eList: p['T'+e] = smoother(p['T'+e]) # Find bin values and errors for e in eList: Nun[e] = p['T'+e] * N_passed Rel[e] = np.sqrt(1/Nun[e] + relerr**2) Flux[e] = NumToFlux(Nun[e], effarea, t) * scale Err[e] = Flux[e] * Rel[e] # Plot pltList = ['All'] if comps: pltList += eList for e in pltList: pnt = getColor(e)+'.' if tax==None else '-' label = e if tlabel==None else tlabel ax.errorbar(Emids, Flux[e], yerr=Err[e], fmt=pnt, label=label) # plot original (not unfolded) spectrum if orig: O_N = np.sum([N[e] for e in eList], axis=0) O_relerr = np.sqrt(1/O_N + relerr**2) O_flux = NumToFlux(O_N, effarea, t) * scale O_err = O_flux * O_relerr ax.errorbar(Emids, O_flux, yerr=O_err, fmt='kx', label='Orig') # plot Bakhtiyar's spectrum if bakh: B_mids, B_flux, B_relup, B_reldn = bakhPlot() B_flux *= ((10**B_mids)**spec) B_errup = (B_flux * B_relup) B_errdn = (B_flux * B_reldn) ax.errorbar(B_mids, B_flux, yerr=(B_errup, B_errdn), fmt='gx', label='Bakhtiyar') # plot IT26 spectrum #IT26_data = bakh.points['unfolded_twocomponent_th0-110412-shifted'] #IT26_relerr = bakh.geterrorbars('unfolded_twocomponent_th0-110412-shifted') #IT26_mids = np.log10(IT26_data['E']) #IT26_flux = np.asarray(IT26_data['dN/dE']) #IT26_flux *= ((10**IT26_mids)**spec) #IT26_err = IT26_flux * IT26_relerr #ax.errorbar(IT26_mids, IT26_flux, yerr=IT26_err, fmt='rx', label='IT-26 Two-Component') if tax == None: ax.set_yscale('log') ax.legend(loc='lower left') if emin: ax.set_xlim((emin, 9.5)) #ax.set_ylim((10**(3), 10**(5))) #ax.set_ylim((10**(-22), 10**(-10))) #plt.savefig('collab/pics/test.png') plt.show()
def counts(s, niter, sDict={'':[-0.25, [[7.5, -0.75]]]}, smooth=True, spl=False, out=False, reco=True, zcorrect=False): r = np.log10(s['ML_energy']) rEbins = getEbins(reco=True) tEbins = getEbins(reco=reco) rEmids = getMids(rEbins) tEmids = getMids(tEbins) cutName = 'llh' cut = s['cuts'][cutName] eList = getComps(s) tList = getComps(s, reco=False) fig, ax = plt.subplots() ax.set_title('Energy Spectum using '+cutName+' cut') ax.set_xlabel('Log10(Energy/GeV)') ax.set_ylabel('Counts') # Load probability tables p = getProbs(s, cut, reco=reco, zcorrect=zcorrect) # Option for splining if spl: for key in p.keys(): p[key] = 10**(spline.spline(s, p[key], nk=2, npoly=3)) print sum(p['Rf|Tp']+p['Rp|Tp'], axis=0) print sum(p['Rf|Tf']+p['Rp|Tf'], axis=0) # Create our toy MC spectrum temp = {} for key in sDict.keys(): s0 = sDict[key][0] sTable = sDict[key][1] temp[key] = powerFit.powerFit(s0, sTable=sTable, reco=reco) specCut = fakeSpec(s, cut, temp, reco=reco) #specCut = np.array([True for i in range(len(specCut))]) # Create starting arrays N_passed = float(np.histogram(r[cut*specCut], bins=rEbins)[0].sum()) N = {} for e in eList: recocut = s['llh_comp'] == e N[e] = np.histogram(r[cut*recocut*specCut], bins=rEbins)[0] p['R'+e] = N[e] / N_passed p['T'+e] = powerFit.powerFit(-2.7, reco=reco) # Get relative errors due to unfolding #chi2, unrel = load('unfold_err.npy') # Due to efficiency #effarea, sigma, relerr = eff.getEff(s, cut, smooth=smooth) effarea, sigma, relerr = eff.getEff(s, cut, reco=reco) # Bayesian unfolding for i in range(niter): p = unfold(p, reco=reco) #All_unfold = (p['Tf']+p['Tp']) * N_passed #ax.errorbar(Emids, All_unfold, yerr=unrel[i]*All_unfold, label=str(i)) # Smooth prior before next iteration (except last time) if i < niter-1: for e in eList: p['T'+e] = smoother(p['T'+e]) # Find bin values Nun, Rel, Err = {},{},{} for e in eList: Nun[e] = p['T'+e] * N_passed Nun['All'] = np.sum([Nun[e] for e in eList], axis=0) # Calculate errors for e in eList + ['All']: ## NOTE: I don't think you can use the relative errors like this ## #Rel[e] = np.sqrt(1/Nun[e] + relerr**2 + unrel[niter-1]**2) Rel[e] = np.sqrt(1/Nun[e] + relerr**2) Err[e] = Nun[e] * Rel[e] # And plot pnt = getColor(e) + '.' ax.errorbar(tEmids, Nun[e], yerr=Err[e], fmt=pnt, label=e) # Attempt to fit with broken power law #p0 = [1, 10**(-4.5), 7.5, -0.5] #yfit = powerFit.pow_fit(10**Emids, Nun['f'], Err['f'], p0) #ax.plot(Emids, yfit, label='test') #err = {} #err[0] = 1/np.sqrt(All_Nunfold) #err[1] = relerr #err[2] = unrel[niter-1] # Plot error bars nicely #errCalc = lambda i: np.sqrt(np.sum([err[j]**2 for j in range(i+1)], axis=0)) #uperr, dnerr = {}, {} #for i in range(len(err)): # uperr[i] = All_Nunfold * (1 + errCalc(i)) # dnerr[i] = All_Nunfold * (1 - errCalc(i)) # for j in range(len(dnerr[i])): # if dnerr[i][j] < 0: # dnerr[i][j] = 10**(-3) #ax.errorbar(Emids, All_Nunfold, yerr=All_err, fmt='gx', label='Unfold') #ax.plot(Emids, All_Nunfold, 'b.', label='Unfold') #for i in range(len(err)): # ax.fill_between(Emids, dnerr[i], uperr[i], facecolor='blue', alpha=0.2) # Plot true spectrum MC = {} etrue = np.log10(s['MC_energy']) MC['All'] = np.histogram(etrue[cut*specCut], bins=tEbins)[0] for t in tList: truecut = s['comp'] == t MC[t] = np.histogram(etrue[cut*specCut*truecut], bins=tEbins)[0] for t in tList + ['All']: pnt = getColor(t) + 'x' ax.plot(tEmids, MC[t], pnt, label='MC_'+t) # plot original (not unfolded) spectrum O_N = (np.sum([N[e] for e in eList], axis=0)) # Just for now... temperr = relerr if reco else relerr[20:] O_relerr = np.sqrt(1/O_N + temperr**2) O_err = O_N * O_relerr ax.errorbar(rEmids, O_N, yerr=O_err, fmt='gx', label='Original') ax.set_yscale('log') #ax.legend(loc='lower left') #ax.set_ylim((10**(-1), 10**(4))) if out: plt.savefig('collab/pics/'+out+'.png') plt.show()