def Fitfcn_max_likelihood(self, npar, gin, fcnVal, par, iflag): likelihood = 0 mf = ROOT.MyMassSpectrum() mf.SetParameters(par) ig = GaussIntegrator() ig.SetFunction(mf) ig.SetRelTolerance(0.00001) for i in range(0, self.num_bins): for lower, higher in self.exclude_regions: if lower < self.xmins[i] < higher: continue model_val = ig.Integral(self.xmins[i], self.xmaxes[i]) / ( self.xmaxes[i] - self.xmins[i]) self.background_fit_only[i] = model_val model_val += self.model_scale_values[i] * par[4] self.data_fits[i] = model_val mv = model_val di = self.data[i] #print("mv", mv, "di", di) #sys.stdout.flush() if di > 1e10 or math.isinf(mv) or math.isnan(mv): self.fit_failed = True return likelihood += mv - di if di > 0 and mv > 0: likelihood += di * (TMath.Log(di) - TMath.Log(mv)) #sys.stderr.flush() fcnVal[0] = likelihood
def GetTestStatistics( h_mass_data, h_temp_bgd, h_temp_sig ): # Compute likelihood Loglik_bgr = 0. Loglik_sb = 0. for i in range(1,h_mass_data.GetNbinsX()+1): # \mu = 0 (no signal) Loglik_bgr += TMath.Log( TMath.Poisson( h_mass_data.GetBinContent(i),h_temp_bgd.GetBinContent(i) ) ) # \mu = 1 (signal + background) Loglik_sb += TMath.Log( TMath.Poisson( h_mass_data.GetBinContent(i),h_temp_sig.GetBinContent(i)+h_temp_bgd.GetBinContent(i) ) ) # Get likelihood ratio X = 2*( Loglik_bgr-Loglik_sb ) return X
def __call__(self, x, par): #Eq. 93.16 #photon energy and angle w = x[0] d = x[1] #initial and final electron E and E' E = self.gen.Ee_n Efin = E - w t1 = 8. * self.gen.Z * self.gen.Z * self.gen.ar2 * (1. / w) * ( Efin / E) * d / ((1 + d**2)**2) t2 = ((E / Efin) + (Efin / E) - 4 * d * d / ((1 + d**2)**2)) * TMath.Log(2. * E * Efin / (self.gen.me * w)) t3 = 0.5 * ((E / Efin) + (Efin / E) + 2 - 16 * d * d / ((1 + d**2)**2)) sig = t1 * (t2 - t3) return sig
def __call__(self, ndim, x): #Eq. 93.16 from Lifshitz QED for bremsstrahlung, version for TFoam #photon energy and angle, FOAM input ranges from 0 to 1 w = x[0] * self.gen.Ee d = x[1] * self.gen.dmax if w < self.gen.emin: return 0. #initial and final electron E and E' E = self.gen.Ee Efin = E - w t1 = 8. * self.gen.Z * self.gen.Z * self.gen.ar2 * (1. / w) * ( Efin / E) * d / ((1 + d**2)**2) t2 = ((E / Efin) + (Efin / E) - 4 * d * d / ((1 + d**2)**2)) * TMath.Log(2. * E * Efin / (self.gen.me * w)) t3 = 0.5 * ((E / Efin) + (Efin / E) + 2 - 16 * d * d / ((1 + d**2)**2)) sig = t1 * (t2 - t3) return sig
def __call__(self, x, par): #formula for dSigma/dy with y = Eg/Ee y = x[0] t1 = self.gen.ar2 / y t2 = 1. + (1. - y)**2 - (2. / 3) * (1 - y) t3 = TMath.Log(self.gen.s * (1 - y) / (self.gen.mep * y)) - 0.5 return t1 * t2 * t3
def eq1(self, x): #formula for dSigma/dy with y = Eg/Ee y = x[0] t1 = self.ar2 / y t2 = 1. + (1. - y)**2 - (2. / 3) * (1 - y) t3 = TMath.Log(self.s * (1 - y) / (self.mep * y)) - 0.5 return t1 * t2 * t3
def LogLikelihood(histo, template, scale, minST, maxST): result = 0.0 firstbin = histo.FindBin(minST) lastbin = histo.FindBin(maxST) for i in range(firstbin, lastbin): n = histo.GetBinContent(i) f = scale * template.GetBinContent(i) #f = scale * template.Eval(histo.GetBinCenter(i)) log_f = TMath.Log(f) result += n * log_f - f return result
def eq1(self, x): #E_gamma Eg = x[0] #electron and proton energy Ee = self.Ee Ep = self.Ep #scattered electron Ee' Escat = Ee - Eg #if Escat < 1e-5: return 0. t1 = Escat / (Eg * Ee) t2 = (Ee / Escat) + (Escat / Ee) - 2. / 3 t3 = TMath.Log(4 * Ep * Ee * Escat / (self.mep * Eg)) - 1. / 2 return self.ar2 * t1 * t2 * t3
def BackgroundFit_f1(x, par): return par[0] * (1. - (x[0] / 8.e3))**par[1] / ((x[0] / 8.e3)**(par[2] + par[3] * TMath.Log((x[0] / 8.e3))))
def __init__(self, parse, tree, hepmc_attrib): print("Quasi-real configuration:") #electron and proton beam energy, GeV self.Ee = parse.getfloat("main", "Ee") self.Ep = parse.getfloat("main", "Ep") print("Ee =", self.Ee, "GeV") print("Ep =", self.Ep, "GeV") #electron and proton mass self.me = TDatabasePDG.Instance().GetParticle(11).Mass() mp = TDatabasePDG.Instance().GetParticle(2212).Mass() #boost vector pbvec of proton beam pbeam = TLorentzVector() pbeam.SetPxPyPzE(0, 0, TMath.Sqrt(self.Ep**2-mp**2), self.Ep) self.pbvec = pbeam.BoostVector() #electron beam energy Ee_p in proton beam rest frame ebeam = TLorentzVector() ebeam.SetPxPyPzE(0, 0, -TMath.Sqrt(self.Ee**2-self.me**2), self.Ee) ebeam.Boost(-self.pbvec.x(), -self.pbvec.y(), -self.pbvec.z()) # transform to proton beam frame self.Ee_p = ebeam.E() #center-of-mass squared s, GeV^2 self.s = self.get_s(self.Ee, self.Ep) print("s =", self.s, "GeV^2") print("sqrt(s) =", TMath.Sqrt(self.s), "GeV") #range in x xmin = parse.getfloat("main", "xmin") xmax = parse.getfloat("main", "xmax") print("xmin =", xmin) print("xmax =", xmax) #range in u = log_10(x) umin = TMath.Log10(xmin) umax = TMath.Log10(xmax) print("umin =", umin) print("umax =", umax) #range in y ymin = parse.getfloat("main", "ymin") ymax = parse.getfloat("main", "ymax") #range in W wmin = -1. wmax = -1. if parse.has_option("main", "Wmin"): wmin = parse.getfloat("main", "Wmin") print("Wmin =", wmin) if parse.has_option("main", "Wmax"): wmax = parse.getfloat("main", "Wmax") print("Wmax =", wmax) #adjust range in y according to W if wmin > 0 and ymin < wmin**2/self.s: ymin = wmin**2/self.s if wmax > 0 and ymax > wmax**2/self.s: ymax = wmax**2/self.s print("ymin =", ymin) print("ymax =", ymax) #range in v = log_10(y) vmin = TMath.Log10(ymin) vmax = TMath.Log10(ymax) print("vmin =", vmin) print("vmax =", vmax) #range in Q2 self.Q2min = parse.getfloat("main", "Q2min") self.Q2max = parse.getfloat("main", "Q2max") print("Q2min =", self.Q2min) print("Q2max =", self.Q2max) #constant term in the cross section self.const = TMath.Log(10)*TMath.Log(10)*(1./137)/(2.*math.pi) #cross section formula for d^2 sigma / dxdy, Eq. II.6 #transformed as x -> u = log_10(x) and y -> v = log_10(y) self.eq_II6_uv_par = self.eq_II6_uv(self) self.eq = TF2("d2SigDuDvII6", self.eq_II6_uv_par, umin, umax, vmin, vmax) self.eq.SetNpx(1000) self.eq.SetNpy(1000) #uniform generator for azimuthal angles self.rand = TRandom3() self.rand.SetSeed(5572323) #generator event variables in output tree tnam = ["gen_u", "gen_v", "true_x", "true_y", "true_Q2", "true_W2"] tnam += ["true_el_Q2"] tnam += ["true_el_pT", "true_el_theta", "true_el_phi", "true_el_E"] #create the tree variables tcmd = "struct gen_out { Double_t " for i in tnam: tcmd += i + ", " tcmd = tcmd[:-2] + ";};" gROOT.ProcessLine( tcmd ) self.out = rt.gen_out() #put zero to all variables for i in tnam: exec("self.out."+i+"=0") #set the variables in the tree if tree is not None: for i in tnam: tree.Branch(i, addressof(self.out, i), i+"/D") #event attributes for hepmc self.hepmc_attrib = hepmc_attrib #counters for all generated and selected events self.nall = 0 self.nsel = 0 #print generator statistics at the end atexit.register(self.show_stat) #total integrated cross section self.sigma_tot = self.eq.Integral(umin, umax, vmin, vmax) print("Total integrated cross section for a given x and y range:", self.sigma_tot, "mb") print("Quasi-real photoproduction initialized")
def MuFit(Nbins,irebin=1.): # Get histrograms h_bgr = GetMassDistribution(1) h_data = GetMassDistribution(2) h_sig = GetMassDistribution(125) h_bgr.Rebin(irebin) h_data.Rebin(irebin) h_sig.Rebin(irebin) h_sf = TH2D("scalefactor","title",Nbins,0.5,2.,Nbins,0.,5.) for i in range(1,h_sf.GetNbinsX()+1): for j in range(1,h_sf.GetNbinsY()+1): sf_bgr = h_sf.GetXaxis().GetBinCenter(i) sf_sig = h_sf.GetYaxis().GetBinCenter(j) # Loop over bins, compute likelihood loglik = 0. for iDataBin in range(1,h_data.GetNbinsX()+1): m4lepBin = h_data.GetBinCenter(iDataBin) NObsBin = h_data.GetBinContent(iDataBin) MeanBin = sf_bgr*h_bgr.GetBinContent(iDataBin) + sf_sig*h_sig.GetBinContent(iDataBin) if (MeanBin>0): loglik += TMath.Log( TMath.Poisson(NObsBin,MeanBin) ) h_sf.SetBinContent(i,j,-2.*loglik) # Get best SF parameters x=ctypes.c_int(0) y=ctypes.c_int(0) z=ctypes.c_int(0) h_sf.GetBinXYZ(h_sf.GetMinimumBin(),x,y,z) Minimum = h_sf.GetBinContent(x.value,y.value) best_alpha = h_sf.GetXaxis().GetBinCenter(x.value) best_mu = h_sf.GetYaxis().GetBinCenter(y.value) print(Minimum,best_alpha,best_mu) # Rescale histogram for i in range(1,h_sf.GetNbinsX()+1): for j in range(1,h_sf.GetNbinsY()+1): h_sf.SetBinContent( i,j, h_sf.GetBinContent(i,j)-Minimum ) canvas = TCanvas("canvas","Standard Canvas",600,400) canvas.cd() Min = TMarker(best_alpha,best_mu,29) Min.SetMarkerSize(2) h_sf.SetStats(kFALSE) h_sf.SetTitle("HIGGS PRODUCTION CROSS SECTION - PARAMETERS") h_sf.GetXaxis().SetTitle(r"#alpha") h_sf.GetYaxis().SetTitle(r"#mu") h_sf.Draw("COLZ") h_sigma = h_sf.Clone("h_sigma") h_sigma.Reset() for i in range(1,h_sigma.GetNbinsX()+1): for j in range(1,h_sigma.GetNbinsY()+1): if( h_sf.GetBinContent(i,j)<=1.): h_sigma.SetBinContent(i,j, 1.) h_sigma.SetMarkerColorAlpha(kRed,0.40) h_sigma.SetMarkerSize(10) h_sigma.Draw("same L") Min.Draw() leg1 = TLegend(0.65,0.85,0.85,0.75) leg1.SetBorderSize(1); leg1.SetFillColor(0); leg1a = leg1.AddEntry(Min, r"optimal (#alpha,#mu)", "p"); leg1a.SetTextSize(0.04); leg1.Draw() canvas.Print("Plots/MuFit.pdf")
def myAnalyzer( dictSamples, listCuts, signalName, RANGE, UNC ): outputFileName = 'Rootfiles/RUNMiniBoostedAnalysis_'+grooming+'_'+signalName+UNC+'_'+RANGE+'_'+args.version+'p7.root' outputFile = TFile( outputFileName, 'RECREATE' ) ###################################### output Tree #tree = TTree('RUNAFinTree'+grooming, 'RUNAFinTree'+grooming) #AvgMass = array( 'f', [ 0. ] ) #tree.Branch( 'AvgMass', AvgMass, 'AvgMass/F' ) #Scale = array( 'f', [ 0. ] ) #tree.Branch( 'Scale', Scale, 'Scale/F' ) ################################################################################################## Histos massBins = 100 massXmin = 0. massXmax = 500. listOfOptions = [ [ j,k] for j in range(len(listCuts)-1) for k in range(1, len(listCuts) ) if k > j ] for sam in dictSamples: allHistos[ "cutFlow_"+sam ] = TH1F( "cutflow_"+sam, "cutflow_"+sam, len(listCuts), 0., len(listCuts) ) allHistos[ "cutFlow_Scaled_"+sam ] = TH1F( "cutflow_scaled_"+sam, "cutflow_scaled_"+sam, len(listCuts), 0., len(listCuts) ) allHistos[ "cutFlow_Scaled_Weights_"+sam ] = TH1F( "cutflow_scaled_weights_"+sam, "cutflow_scaled_weights_"+sam, len(listCuts), 0., len(listCuts) ) allHistos[ "HT_"+sam ] = TH1F( "HT_"+sam, "HT_"+sam, 5000, 0., 5000 ) allHistos[ "HT_"+sam ].Sumw2() allHistos[ "MET_"+sam ] = TH1F( "MET_"+sam, "MET_"+sam, 500, 0., 500 ) allHistos[ "MET_"+sam ].Sumw2() allHistos[ "massAve_"+sam ] = TH1F( "massAve_"+sam, "massAve_"+sam, 500, 0., 500 ) allHistos[ "massAve_"+sam ].Sumw2() allHistos[ "numJets_"+sam ] = TH1F( "numJets_"+sam, "numJets_"+sam, 20, 0., 20 ) allHistos[ "numJets_"+sam ].Sumw2() allHistos[ "jet1Pt_"+sam ] = TH1F( "jet1Pt_"+sam, "jet1Pt_"+sam, 2000, 0., 2000 ) allHistos[ "jet1Pt_"+sam ].Sumw2() allHistos[ "jet2Pt_"+sam ] = TH1F( "jet2Pt_"+sam, "jet2Pt_"+sam, 2000, 0., 2000 ) allHistos[ "jet2Pt_"+sam ].Sumw2() allHistos[ "jet1CosThetaStar_"+sam ] = TH1F( "jet1CosThetaStar_"+sam, "jet1CosThetaStar_"+sam, 20, 0., 1 ) allHistos[ "jet1CosThetaStar_"+sam ].Sumw2() allHistos[ "jet2CosThetaStar_"+sam ] = TH1F( "jet2CosThetaStar_"+sam, "jet2CosThetaStar_"+sam, 20, 0., 1 ) allHistos[ "jet2CosThetaStar_"+sam ].Sumw2() allHistos[ "jet1Tau32_"+sam ] = TH1F( "jet1Tau32_"+sam, "jet1Tau32_"+sam, 20, 0., 1 ) allHistos[ "jet1Tau32_"+sam ].Sumw2() allHistos[ "jet2Tau32_"+sam ] = TH1F( "jet2Tau32_"+sam, "jet2Tau32_"+sam, 20, 0., 1 ) allHistos[ "jet2Tau32_"+sam ].Sumw2() allHistos[ "jet1RhoDDT_"+sam ] = TH1F( "jet1RhoDDT_"+sam, "jet1RhoDDT_"+sam, 200, -10, 10 ) allHistos[ "jet1RhoDDT_"+sam ].Sumw2() allHistos[ "jet2RhoDDT_"+sam ] = TH1F( "jet2RhoDDT_"+sam, "jet2RhoDDT_"+sam, 200, -10, 10 ) allHistos[ "jet2RhoDDT_"+sam ].Sumw2() allHistos[ "jet1Tau21VsRhoDDT_"+sam ] = TH2F( "jet1Tau21VsRhoDDT_"+sam, "jet1Tau21VsRhoDDT_"+sam, 20, 0., 1., 200, -10, 10 ) allHistos[ "jet1Tau21VsRhoDDT_"+sam ].Sumw2() allHistos[ "jet2Tau21VsRhoDDT_"+sam ] = TH2F( "jet2Tau21VsRhoDDT_"+sam, "jet2Tau21VsRhoDDT_"+sam, 20, 0., 1., 200, -10, 10 ) allHistos[ "jet2Tau21VsRhoDDT_"+sam ].Sumw2() allHistos[ "jet1Tau21DDT_"+sam ] = TH1F( "jet1Tau21DDT_"+sam, "jet1Tau21DDT_"+sam, 30, -1, 2 ) allHistos[ "jet1Tau21DDT_"+sam ].Sumw2() allHistos[ "jet2Tau21DDT_"+sam ] = TH1F( "jet2Tau21DDT_"+sam, "jet2Tau21DDT_"+sam, 30, -1, 2 ) allHistos[ "jet2Tau21DDT_"+sam ].Sumw2() allHistos[ "jet1Tau21DDTVsRhoDDT_"+sam ] = TH2F( "jet1Tau21DDTVsRhoDDT_"+sam, "jet1Tau21DDTVsRhoDDT_"+sam, 20, 0., 1., 200, -10, 10 ) allHistos[ "jet1Tau21DDTVsRhoDDT_"+sam ].Sumw2() allHistos[ "jet2Tau21DDTVsRhoDDT_"+sam ] = TH2F( "jet2Tau21DDTVsRhoDDT_"+sam, "jet2Tau21DDTVsRhoDDT_"+sam, 20, 0., 1., 200, -10, 10 ) allHistos[ "jet2Tau21DDTVsRhoDDT_"+sam ].Sumw2() #if 'high' in args.RANGE: allHistos[ "jet1Tau31_"+sam ] = TH1F( "jet1Tau31_"+sam, "jet1Tau31_"+sam, 20, 0., 1 ) allHistos[ "jet1Tau31_"+sam ].Sumw2() allHistos[ "jet2Tau31_"+sam ] = TH1F( "jet2Tau31_"+sam, "jet2Tau31_"+sam, 20, 0., 1 ) allHistos[ "jet2Tau31_"+sam ].Sumw2() allHistos[ "jet1SubjetPtRatio_"+sam ] = TH1F( "jet1SubjetPtRatio_"+sam, "jet1SubjetPtRatio_"+sam, 20, 0., 1 ) allHistos[ "jet1SubjetPtRatio_"+sam ].Sumw2() allHistos[ "jet2SubjetPtRatio_"+sam ] = TH1F( "jet2SubjetPtRatio_"+sam, "jet2SubjetPtRatio_"+sam, 20, 0., 1 ) allHistos[ "jet2SubjetPtRatio_"+sam ].Sumw2() allHistos[ "jet1BtagCSV_"+sam ] = TH1F( "jet1BtagCSV_"+sam, "jet1BtagCSV_"+sam, 5, 0., 5 ) allHistos[ "jet1BtagCSV_"+sam ].Sumw2() allHistos[ "jet2BtagCSV_"+sam ] = TH1F( "jet2BtagCSV_"+sam, "jet2BtagCSV_"+sam, 5, 0., 5 ) allHistos[ "jet2BtagCSV_"+sam ].Sumw2() allHistos[ "jetsBtagCSV_"+sam ] = TH1F( "jetsBtagCSV_"+sam, "jetsBtagCSV_"+sam, 5, 0., 5 ) allHistos[ "jetsBtagCSV_"+sam ].Sumw2() allHistos[ "deltaEtaDijet_n-1_"+sam ] = TH1F( "deltaEtaDijet_n-1_"+sam, "deltaEtaDijet_n-1_"+sam, 50, 0., 5 ) allHistos[ "deltaEtaDijet_n-1_"+sam ].Sumw2() allHistos[ "prunedMassAsym_n-1_"+sam ] = TH1F( "prunedMassAsym_n-1_"+sam, "prunedMassAsym_n-1_"+sam, 20, 0., 1 ) allHistos[ "prunedMassAsym_n-1_"+sam ].Sumw2() allHistos[ "jet1Tau21_n-1_"+sam ] = TH1F( "jet1Tau21_n-1_"+sam, "jet1Tau21_n-1_"+sam, 20, 0., 1 ) allHistos[ "jet1Tau21_n-1_"+sam ].Sumw2() allHistos[ "jet2Tau21_n-1_"+sam ] = TH1F( "jet2Tau21_n-1_"+sam, "jet2Tau21_n-1_"+sam, 20, 0., 1 ) allHistos[ "jet2Tau21_n-1_"+sam ].Sumw2() ''' if 'low' in args.RANGE: allHistos[ "jet1Tau31_n-1_"+sam ] = TH1F( "jet1Tau31_n-1_"+sam, "jet1Tau31_n-1_"+sam, 20, 0., 1 ) allHistos[ "jet1Tau31_n-1_"+sam ].Sumw2() allHistos[ "jet2Tau31_n-1_"+sam ] = TH1F( "jet2Tau31_n-1_"+sam, "jet2Tau31_n-1_"+sam, 20, 0., 1 ) allHistos[ "jet2Tau31_n-1_"+sam ].Sumw2() ''' listCuts.append( [ 'btag' ] ) for var in listCuts: if 'deltaEta' in var[0]: allHistos[ var[0]+'_'+sam ] = TH1F( var[0]+'_'+sam, var[0]+'_'+sam, 50, 0., 5. ) for var1 in listCuts: allHistos[ var[0]+'_'+var1[0]+"_"+sam ] = TH1F( var[0]+'_'+var1[0]+"_"+sam, var[0]+'_'+var1[0]+"_"+sam, 50, 0., 5. ) else: allHistos[ var[0]+'_'+sam ] = TH1F( var[0]+'_'+sam, var[0]+'_'+sam, 20, 0., 1. ) for var1 in listCuts: allHistos[ var[0]+'_'+var1[0]+"_"+sam ] = TH1F( var[0]+'_'+var1[0]+"_"+sam, var[0]+'_'+var1[0]+"_"+sam, 20, 0., 1. ) allHistos[ var[0]+'_'+sam ].Sumw2() allHistos[ "massAve_"+var[0]+'_'+sam ] = TH1F( "massAve_"+var[0]+'_'+sam, "massAve_"+var[0]+'_'+sam, massBins, massXmin, massXmax ) allHistos[ "massAve_"+var[0]+'_'+sam ].Sumw2() allHistos[ "HT_"+var[0]+"_"+sam ] = TH1F( "HT_"+var[0]+"_"+sam, "HT_"+var[0]+"_"+sam, 5000, 0., 5000 ) allHistos[ "HT_"+var[0]+"_"+sam ].Sumw2() allHistos[ "MET_"+var[0]+"_"+sam ] = TH1F( "MET_"+var[0]+"_"+sam, "MET_"+var[0]+"_"+sam, 500, 0., 500 ) allHistos[ "MET_"+var[0]+"_"+sam ].Sumw2() allHistos[ "numJets_"+var[0]+"_"+sam ] = TH1F( "numJets_"+var[0]+"_"+sam, "numJets_"+var[0]+"_"+sam, 20, 0., 20 ) allHistos[ "numJets_"+var[0]+"_"+sam ].Sumw2() allHistos[ "jet1Pt_"+var[0]+"_"+sam ] = TH1F( "jet1Pt_"+var[0]+"_"+sam, "jet1Pt_"+var[0]+"_"+sam, 2000, 0., 2000 ) allHistos[ "jet1Pt_"+var[0]+"_"+sam ].Sumw2() allHistos[ "jet2Pt_"+var[0]+"_"+sam ] = TH1F( "jet2Pt_"+var[0]+"_"+sam, "jet2Pt_"+var[0]+"_"+sam, 2000, 0., 2000 ) allHistos[ "jet2Pt_"+var[0]+"_"+sam ].Sumw2() listCuts.remove( ['btag'] ) for ind in listOfOptions: tmpName = listCuts[ind[0]][0]+'Vs'+listCuts[ind[1]][0]+'_'+sam allHistos[ tmpName ] = TH2F( tmpName, tmpName, (50 if 'deltaEta' in listCuts[ind[0]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[0]][0] else 1. ), (50 if 'deltaEta' in listCuts[ind[1]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[1]][0] else 1. ) ) allHistos[ tmpName ].Sumw2() #tmpNameSam = listCuts[-2][0]+'Vs'+listCuts[-1][0]+'_'+sam tmpNameSam = tmpName #listCuts[-2][0]+'Vs'+listCuts[-1][0]+'_'+sam #if 'RPV' in sam: massBins = 50 #allHistos[ "massAve_"+tmpNameSam+'_ABCDProj' ] = TH1F( "massAve_"+tmpNameSam+'_ABCDProj', "massAve_"+tmpNameSam+'_ABCDProj', len(boostedMassAveBins)-1, boostedMassAveBins) #allHistos[ "massAve_"+tmpNameSam+'_BC' ] = TH1F( "massAve_"+tmpNameSam+'_BC', "massAve_"+tmpNameSam+'_BC', len(boostedMassAveBins)-1, boostedMassAveBins ) #else: allHistos[ "massAve_"+tmpNameSam+'_ABCDProj' ] = TH1F( "massAve_"+tmpNameSam+'_ABCDProj', "massAve_"+tmpNameSam+'_ABCDProj', massBins, massXmin, massXmax ) allHistos[ "massAve_"+tmpNameSam+'_BC' ] = TH1F( "massAve_"+tmpNameSam+'_BC', "massAve_"+tmpNameSam+'_BC', massBins, massXmin, massXmax ) allHistos[ "massAve_"+tmpNameSam+'_ABCDProj' ].Sumw2() allHistos[ "massAve_"+tmpNameSam+'_BC' ].Sumw2() allHistos[ tmpNameSam+'_Bkg' ] = TH2F( tmpNameSam+'_Bkg', tmpNameSam+'_Bkg', #(50 if 'deltaEta' in listCuts[-2][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-2][0] else 1. ), #(50 if 'deltaEta' in listCuts[-1][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-1][0] else 1. ) (50 if 'deltaEta' in listCuts[ind[0]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[0]][0] else 1. ), (50 if 'deltaEta' in listCuts[ind[1]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[1]][0] else 1. ) ) allHistos[ tmpNameSam+'_Bkg' ].Sumw2() for k in [ 'A', 'B', 'C', 'D' ]: #allHistos[ "massAve_"+tmpNameSam+'_'+k ] = TH1F( "massAve_"+tmpNameSam+'_'+k, "massAve_"+tmpNameSam+'_'+k, len(boostedMassAveBins)-1, boostedMassAveBins ) allHistos[ "massAve_"+tmpNameSam+'_'+k ] = TH1F( "massAve_"+tmpNameSam+'_'+k, "massAve_"+tmpNameSam+'_'+k, massBins, massXmin, massXmax ) allHistos[ "massAve_"+tmpNameSam+'_'+k ].Sumw2() allHistos[ tmpNameSam+'_'+k ] = TH2F( tmpNameSam+'_'+k, tmpNameSam+'_'+k, #(50 if 'deltaEta' in listCuts[-2][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-2][0] else 1. ), #(50 if 'deltaEta' in listCuts[-1][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-1][0] else 1. ) (50 if 'deltaEta' in listCuts[ind[0]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[0]][0] else 1. ), (50 if 'deltaEta' in listCuts[ind[1]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[1]][0] else 1. ) ) allHistos[ tmpNameSam+'_'+k ].Sumw2() allHistos[ "massAve_"+tmpNameSam+'_btag_'+k ] = TH1F( "massAve_"+tmpNameSam+'_btag_'+k, "massAve_"+tmpNameSam+'_btag_'+k, massBins, massXmin, massXmax ) allHistos[ "massAve_"+tmpNameSam+'_btag_'+k ].Sumw2() allHistos[ tmpNameSam+'_btag_'+k ] = TH2F( tmpNameSam+'_btag_'+k, tmpNameSam+'_btag_'+k, #(50 if 'deltaEta' in listCuts[-2][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-2][0] else 1. ), #(50 if 'deltaEta' in listCuts[-1][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[-1][0] else 1. ) (50 if 'deltaEta' in listCuts[ind[0]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[0]][0] else 1. ), (50 if 'deltaEta' in listCuts[ind[1]][0] else 20 ), 0., (5. if 'deltaEta' in listCuts[ind[1]][0] else 1. ) ) allHistos[ tmpNameSam+'_btag_'+k ].Sumw2() #print allHistos ################################################################################################## Running the Analysis for sample in dictSamples: ####### Get GenTree inputFile, events, numEntries = getTree( dictSamples[ sample ], ('BoostedAnalysisPlotsPuppi'+( '' if 'PDF' in UNC else UNC)+'/RUNATree' if 'Puppi' in args.grooming else 'BoostedAnalysisPlots'+( '' if 'PDF' in UNC else UNC)+'/RUNATree' ) ) print '-'*40 print '------> ', sample print '------> Number of events: '+str(numEntries) d = 0 cutFlowList = OrderedDict() cutFlowScaledList = OrderedDict() cutFlowScaledListWeights = OrderedDict() cutFlowList[ 'Process' ] = 0 cutFlowList[ 'Preselection' ] = 0 cutFlowScaledList[ 'Process' ] = 0 cutFlowScaledList[ 'Preselection' ] = 0 cutFlowScaledListWeights[ 'Process' ] = 0 cutFlowScaledListWeights[ 'Preselection' ] = 0 for k in listCuts: cutFlowList[ k[0] ] = 0 cutFlowScaledList[ k[0] ] = 0 cutFlowScaledListWeights[ k[0] ] = 0 cutFlowList[ 'btag' ] = 0 cutFlowScaledList[ 'btag' ] = 0 cutFlowScaledListWeights[ 'btag' ] = 0 for i in xrange(numEntries): events.GetEntry(i) #---- progress of the reading -------- fraction = 10.*i/(1.*numEntries) if TMath.FloorNint(fraction) > d: print str(10*TMath.FloorNint(fraction))+'%' d = TMath.FloorNint(fraction) #if ( i > 100000 ): break Run = events.run Lumi = events.lumi NumEvent = events.event puWeight = events.puWeight if 'v06' in args.version: pdfWeight = events.pdfWeight lumiWeight = events.lumiWeight HT = events.HT MET = events.MET numJets = events.numJets massAve = getattr( events, (args.grooming+"MassAve").replace('Puppi','') ) jet1Mass = getattr( events, 'jet1'+(args.grooming+"Mass").replace('pruned','Pruned').replace('soft','Soft').replace('Puppi','')) jet2Mass = getattr( events, 'jet2'+(args.grooming+"Mass").replace('pruned','Pruned').replace('soft','Soft').replace('Puppi','')) jet1Pt = events.jet1Pt jet2Pt = events.jet2Pt jet1Eta = events.jet1Eta jet2Eta = events.jet2Eta jet1CosThetaStar = events.jet1CosThetaStar jet2CosThetaStar = events.jet2CosThetaStar jet1BtagCSV = ( events.jet1btagCSVv2 > 0.800 ) jet2BtagCSV = ( events.jet2btagCSVv2 > 0.800 ) #print 'Entry ', Run, ':', Lumi, ':', NumEvent if 'DATA' in sample: scale = 1 #elif 'RPV' in sample: scale = 2606 * puWeight * SF else: scale = 2666 * puWeight * lumiWeight if 'PDF' in UNC: if 'Up' in UNC: scale = scale*(1+pdfWeight) else: scale = scale*(1-pdfWeight) cutFlowList[ 'Process' ] += 1 cutFlowScaledList[ 'Process' ] += scale cutFlowScaledList[ 'Process' ] += (puWeight*puWeight) ########## DDT jet1RhoDDT = TMath.Log( jet1Mass*jet1Mass/jet1Pt ) jet2RhoDDT = TMath.Log( jet2Mass*jet2Mass/jet2Pt ) jet1Tau21DDT = events.jet1Tau21 + 0.063 * jet1RhoDDT jet2Tau21DDT = events.jet2Tau21 + 0.063 * jet2RhoDDT #### Pre-selection HTCut = ( HT > 900 ) dijetCut = ( numJets > 1 ) #jetPtCut = ( jet1Pt > 500 ) and ( jet2Pt > 450 ) jetPtCut = ( jet1Pt > 150 ) and ( jet2Pt > 150 ) #if HTCut and dijetCut and jetPtCut: if HTCut and dijetCut : cutFlowList[ 'Preselection' ] += 1 cutFlowScaledList[ 'Preselection' ] += scale cutFlowScaledList[ 'Preselection' ] += (puWeight*puWeight) sigCutsList = [] allHistos[ "HT_"+sam ].Fill( HT, scale ) allHistos[ "MET_"+sam ].Fill( MET, scale ) allHistos[ "massAve_"+sam ].Fill( massAve, scale ) allHistos[ "numJets_"+sam ].Fill( numJets, scale ) allHistos[ "jet1Pt_"+sam ].Fill( jet1Pt, scale ) allHistos[ "jet2Pt_"+sam ].Fill( jet2Pt, scale ) allHistos[ "jet1RhoDDT_"+sam ].Fill( jet1RhoDDT, scale ) allHistos[ "jet2RhoDDT_"+sam ].Fill( jet2RhoDDT, scale ) allHistos[ "jet1Tau21VsRhoDDT_"+sam ].Fill( events.jet1Tau21, jet1RhoDDT, scale ) allHistos[ "jet2Tau21VsRhoDDT_"+sam ].Fill( events.jet2Tau21, jet2RhoDDT, scale ) allHistos[ "jet1Tau21DDT_"+sam ].Fill( jet1Tau21DDT, scale ) allHistos[ "jet2Tau21DDT_"+sam ].Fill( jet2Tau21DDT, scale ) allHistos[ "jet1Tau21DDTVsRhoDDT_"+sam ].Fill( jet1Tau21DDT, jet1RhoDDT, scale ) allHistos[ "jet2Tau21DDTVsRhoDDT_"+sam ].Fill( jet2Tau21DDT, jet2RhoDDT, scale ) allHistos[ "prunedMassAsym_"+sam ].Fill( events.prunedMassAsym, scale ) allHistos[ "deltaEtaDijet_"+sam ].Fill( events.deltaEtaDijet, scale ) allHistos[ "jet1CosThetaStar_"+sam ].Fill( jet1CosThetaStar, scale ) allHistos[ "jet2CosThetaStar_"+sam ].Fill( jet2CosThetaStar, scale ) allHistos[ "jet1Tau21_"+sam ].Fill( events.jet1Tau21, scale ) allHistos[ "jet2Tau21_"+sam ].Fill( events.jet2Tau21, scale ) allHistos[ "jet1Tau31_"+sam ].Fill( events.jet1Tau31, scale ) allHistos[ "jet2Tau31_"+sam ].Fill( events.jet2Tau31, scale ) allHistos[ "jet1Tau32_"+sam ].Fill( events.jet1Tau32, scale ) allHistos[ "jet2Tau32_"+sam ].Fill( events.jet2Tau32, scale ) allHistos[ "jet1SubjetPtRatio_"+sam ].Fill( events.jet1SubjetPtRatio, scale ) allHistos[ "jet2SubjetPtRatio_"+sam ].Fill( events.jet2SubjetPtRatio, scale ) allHistos[ "jet1BtagCSV_"+sam ].Fill( 1 if jet1BtagCSV else 0 ) allHistos[ "jet2BtagCSV_"+sam ].Fill( 1 if jet2BtagCSV else 0 ) bothBtag = ( jet1BtagCSV and jet2BtagCSV ) oneBtag = ( jet1BtagCSV or jet2BtagCSV ) if bothBtag: allHistos[ "jetsBtagCSV_"+sam ].Fill( 2 ) elif oneBtag: allHistos[ "jetsBtagCSV_"+sam ].Fill( 1 ) else: allHistos[ "jetsBtagCSV_"+sam ].Fill( 0 ) for var in listCuts: #allHistos[ var[0]+'_'+sample ].Fill( getattr( events, var[0] ), scale ) nextCut = False if ( getattr( events, var[0] ) < var[1] ): nextCut = True else: nextCut = False sigCutsList.append( nextCut ) if all(sigCutsList): allHistos[ 'massAve_'+var[0]+'_'+sample ].Fill( massAve, scale ) ### adding two prong scale factor allHistos[ 'jet1Tau21_'+var[0]+'_'+sample ].Fill( events.jet1Tau21, scale ) allHistos[ 'jet2Tau21_'+var[0]+'_'+sample ].Fill( events.jet2Tau21, scale ) #if 'low' in args.RANGE: allHistos[ 'jet1Tau31_'+var[0]+'_'+sample ].Fill( events.jet1Tau31, scale ) #if 'low' in args.RANGE: allHistos[ 'jet2Tau31_'+var[0]+'_'+sample ].Fill( events.jet2Tau31, scale ) allHistos[ 'prunedMassAsym_'+var[0]+'_'+sample ].Fill( events.prunedMassAsym, scale ) allHistos[ 'deltaEtaDijet_'+var[0]+'_'+sample ].Fill( events.deltaEtaDijet, scale ) allHistos[ "HT_"+var[0]+"_"+sam ].Fill( HT, scale ) allHistos[ "MET_"+var[0]+"_"+sam ].Fill( MET, scale ) allHistos[ "numJets_"+var[0]+"_"+sam ].Fill( numJets, scale ) allHistos[ "jet1Pt_"+var[0]+"_"+sam ].Fill( jet1Pt, scale ) allHistos[ "jet2Pt_"+var[0]+"_"+sam ].Fill( jet2Pt, scale ) cutFlowList[ var[0] ] += 1 cutFlowScaledList[ var[0] ] += scale cutFlowScaledList[ var[0] ] += (puWeight*puWeight) #if oneBtag and all(sigCutsList): if bothBtag and all(sigCutsList): allHistos[ 'massAve_btag_'+sample ].Fill( massAve, scale ) ### adding two prong scale factor allHistos[ 'jet1Tau21_btag_'+sample ].Fill( events.jet1Tau21, scale ) allHistos[ 'jet2Tau21_btag_'+sample ].Fill( events.jet2Tau21, scale ) #if 'low' in args.RANGE: allHistos[ 'jet1Tau31_btag_'+sample ].Fill( events.jet1Tau31, scale ) #if 'low' in args.RANGE: allHistos[ 'jet2Tau31_btag_'+sample ].Fill( events.jet2Tau31, scale ) allHistos[ 'prunedMassAsym_btag_'+sample ].Fill( events.prunedMassAsym, scale ) allHistos[ 'deltaEtaDijet_btag_'+sample ].Fill( events.deltaEtaDijet, scale ) allHistos[ "HT_btag_"+sam ].Fill( HT, scale ) allHistos[ "MET_btag_"+sam ].Fill( MET, scale ) allHistos[ "numJets_btag_"+sam ].Fill( numJets, scale ) allHistos[ "jet1Pt_btag_"+sam ].Fill( jet1Pt, scale ) allHistos[ "jet2Pt_btag_"+sam ].Fill( jet2Pt, scale ) cutFlowList[ 'btag' ] += 1 cutFlowScaledList[ 'btag' ] += scale cutFlowScaledList[ 'btag' ] += (puWeight*puWeight) #### n-1 plots ''' if ( 'low' in args.RANGE ): if ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ) and ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ) and ( getattr( events, listCuts[2][0] ) < listCuts[2][1] ) and ( getattr( events, listCuts[3][0] ) < listCuts[3][1] ) and ( getattr( events, listCuts[4][0] ) < listCuts[4][1] ): allHistos[ 'deltaEtaDijet_n-1_'+sample ].Fill( events.deltaEtaDijet, scale ) if ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ) and ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ) and ( getattr( events, listCuts[2][0] ) < listCuts[2][1] ) and ( getattr( events, listCuts[3][0] ) < listCuts[3][1] ) and ( getattr( events, listCuts[5][0] ) < listCuts[5][1] ): allHistos[ 'prunedMassAsym_n-1_'+sample ].Fill( events.prunedMassAsym, scale ) if ( getattr( events, listCuts[2][0] ) < listCuts[2][1] ) and ( getattr( events, listCuts[3][0] ) < listCuts[3][1] ) and ( getattr( events, listCuts[4][0] ) < listCuts[4][1] ) and ( getattr( events, listCuts[5][0] ) < listCuts[5][1] ): allHistos[ 'jet1Tau21_n-1_'+sample ].Fill( events.jet1Tau21, scale ) allHistos[ 'jet2Tau21_n-1_'+sample ].Fill( events.jet2Tau21, scale ) if ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ) and ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ) and ( getattr( events, listCuts[4][0] ) < listCuts[4][1] ) and ( getattr( events, listCuts[5][0] ) < listCuts[5][1] ): allHistos[ 'jet1Tau31_n-1_'+sample ].Fill( events.jet1Tau31, scale ) allHistos[ 'jet2Tau31_n-1_'+sample ].Fill( events.jet2Tau31, scale ) else: ''' if ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ) and ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ) and ( getattr( events, listCuts[3][0] ) < listCuts[3][1] ): allHistos[ 'prunedMassAsym_n-1_'+sample ].Fill( events.prunedMassAsym, scale ) if ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ) and ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ) and ( getattr( events, listCuts[2][0] ) < listCuts[2][1] ): allHistos[ 'deltaEtaDijet_n-1_'+sample ].Fill( events.deltaEtaDijet, scale ) if ( getattr( events, listCuts[2][0] ) < listCuts[2][1] ) and ( getattr( events, listCuts[3][0] ) < listCuts[3][1] ): allHistos[ 'jet1Tau21_n-1_'+sample ].Fill( events.jet1Tau21, scale ) allHistos[ 'jet2Tau21_n-1_'+sample ].Fill( events.jet2Tau21, scale ) ########## for Ind in listOfOptions: allHistos[ listCuts[Ind[0]][0]+'Vs'+listCuts[Ind[1]][0]+'_'+sample ].Fill( getattr( events, listCuts[Ind[0]][0] ), getattr( events, listCuts[Ind[1]][0] ), scale ) tmpSigCutsList = [ x for i,x in enumerate(sigCutsList) if i not in Ind ] ##### Bkg estimation/ABCD method if ( all(sigCutsList[:-2]) ): # and ( getattr( events, listCuts[5][0] ) > (listCuts[5][1]*2) )): allHistos[ listCuts[-2][0]+'Vs'+listCuts[-1][0]+'_'+sample+'_Bkg' ].Fill( getattr( events, listCuts[0][0] ), getattr( events, listCuts[1][0] ), scale ) plotABCD( [ ( getattr( events, listCuts[-2][0] ) < listCuts[-2][1] ), ( getattr( events, listCuts[-1][0] ) < listCuts[-1][1] ) ], [ listCuts[-2][0], listCuts[-1][0] ], events, massAve, scale, sample ) if bothBtag: plotABCD( [ ( getattr( events, listCuts[-2][0] ) < listCuts[-2][1] ), ( getattr( events, listCuts[-1][0] ) < listCuts[-1][1] ) ], [ listCuts[-2][0], listCuts[-1][0] ], events, massAve, scale, sample+'_btag' ) ####### bkg estimation alternatives if sigCutsList[2]: allHistos[ 'jet1Tau21VsdeltaEtaDijet_'+sample+'_Bkg' ].Fill( getattr( events, 'jet1Tau21' ), getattr( events, 'deltaEtaDijet' ), scale ) allHistos[ 'jet2Tau21VsdeltaEtaDijet_'+sample+'_Bkg' ].Fill( getattr( events, 'jet2Tau21' ), getattr( events, 'deltaEtaDijet' ), scale ) plotABCDv2( [ ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ), ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ), ( getattr( events, listCuts[-1][0] ) < listCuts[-1][1] ) ], [ listCuts[0][0], listCuts[-1][0] ], events, massAve, scale, sample ) plotABCDv2( [ ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ), ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ), ( getattr( events, listCuts[-1][0] ) < listCuts[-1][1] ) ], [ listCuts[1][0], listCuts[-1][0] ], events, massAve, scale, sample ) ''' if sigCutsList[-1]: allHistos[ 'jet1Tau21VsprunedMassAsym_'+sample+'_Bkg' ].Fill( getattr( events, 'jet1Tau21' ), getattr( events, 'prunedMassAsym' ), scale ) allHistos[ 'jet2Tau21VsprunedMassAsym_'+sample+'_Bkg' ].Fill( getattr( events, 'jet2Tau21' ), getattr( events, 'prunedMassAsym' ), scale ) plotABCDv2( [ ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ), ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ), ( getattr( events, listCuts[-2][0] ) < listCuts[-2][1] ) ], [ listCuts[0][0], listCuts[-2][0] ], events, massAve, scale, sample ) plotABCDv2( [ ( getattr( events, listCuts[0][0] ) < listCuts[0][1] ), ( getattr( events, listCuts[1][0] ) < listCuts[1][1] ), ( getattr( events, listCuts[-2][0] ) < listCuts[-2][1] ) ], [ listCuts[1][0], listCuts[-2][0] ], events, massAve, scale, sample ) ''' dummy = 1 for q in cutFlowList: allHistos[ 'cutFlow_'+sample ].SetBinContent( dummy, cutFlowList[q] ) allHistos[ 'cutFlow_'+sample ].GetXaxis().SetBinLabel( dummy, q ) allHistos[ 'cutFlow_Scaled_'+sample ].SetBinContent( dummy, cutFlowScaledList[q] ) allHistos[ 'cutFlow_Scaled_'+sample ].GetXaxis().SetBinLabel( dummy, q ) allHistos[ 'cutFlow_Scaled_Weights_'+sample ].SetBinContent( dummy, cutFlowScaledListWeights[q] ) allHistos[ 'cutFlow_Scaled_Weights_'+sample ].GetXaxis().SetBinLabel( dummy, q ) dummy+=1 for sample in dictSamples: nameABCD = listCuts[-2][0]+'Vs'+listCuts[-1][0]+'_'+sample allHistos[ 'massAve_'+nameABCD+'_BC' ].Multiply( allHistos[ 'massAve_'+nameABCD+'_B' ], allHistos[ 'massAve_'+nameABCD+'_C' ], 1, 1, '') allHistos[ 'massAve_'+nameABCD+'_ABCDProj' ].Divide( allHistos[ 'massAve_'+nameABCD+'_BC' ], allHistos[ 'massAve_'+nameABCD+'_D' ], 1, 1, '') ''' ### The two lines above are doing exactly the following: for ibin in range( 0, allHistos[ 'massAve_'+nameABCD+'_B' ].GetNbinsX() ): Bcont = allHistos[ 'massAve_'+nameABCD+'_B' ].GetBinContent( ibin ) Berr = allHistos[ 'massAve_'+nameABCD+'_B' ].GetBinError( ibin ) Ccont = allHistos[ 'massAve_'+nameABCD+'_C' ].GetBinContent( ibin ) Cerr = allHistos[ 'massAve_'+nameABCD+'_C' ].GetBinError( ibin ) Dcont = allHistos[ 'massAve_'+nameABCD+'_D' ].GetBinContent( ibin ) Derr = allHistos[ 'massAve_'+nameABCD+'_D' ].GetBinError( ibin ) try: Nbkg = ( Bcont * Ccont ) / Dcont except ZeroDivisionError: Nbkg = 0 allHistos[ "massAve_"+nameABCD+'_ABCDProj' ].SetBinContent( ibin, Nbkg ) #try: NbkgErr = Nbkg * TMath.Sqrt( TMath.Power( Berr / Bcont, 2 ) + TMath.Power( Cerr / Ccont, 2 ) + TMath.Power( Derr / Dcont, 2 ) ) try: NbkgErr = Nbkg * TMath.Sqrt( TMath.Power( TMath.Sqrt(Bcont) / Bcont, 2 ) + TMath.Power( TMath.Sqrt(Ccont) / Ccont, 2 ) + TMath.Power( TMath.Sqrt(Dcont) / Dcont, 2 ) ) except ZeroDivisionError: NbkgErr = 0 allHistos[ "massAve_"+nameABCD+'_ABCDProj' ].SetBinError( ibin, NbkgErr ) ''' outputFile.Write() ##### Closing print 'Writing output file: '+ outputFileName outputFile.Close()
def PoissonError(nObs, ErrorType, plot=""): # Prepare histograms LambdaMin = 0. LambdaMax = nObs + 6*np.sqrt(nObs) Nsteps = 1000 h_likelihood = TH1D( "h_likelihood","",Nsteps,LambdaMin,LambdaMax ) h_2loglik = TH1D( "h_2loglik","",Nsteps,LambdaMin,LambdaMax ) h_pdf_full = TH1D( "h_pdf_full","",Nsteps,LambdaMin,LambdaMax ) IntFraction =ROOT.Math.gaussian_cdf(-1,1,0) # loop over possible Lambda values for iBin in range(1,Nsteps+1): Lambda=h_likelihood.GetBinCenter(iBin) PoissonProb = TMath.Poisson(nObs,Lambda) LogLikelihood = -2.*TMath.Log(PoissonProb) h_likelihood.Fill( Lambda,PoissonProb ) h_2loglik.Fill( Lambda, LogLikelihood ) h_pdf_full.Fill( Lambda, PoissonProb*1. ) # get characteristic values bin_central = h_2loglik.GetMinimumBin() LoglikMin = h_2loglik.GetBinContent(bin_central) Lambda_central = h_2loglik.GetBinCenter(bin_central) LambdaLow = -1. LambdaUp = -1. if ErrorType=="ClassicalCentral": # Frequentist NobsMax = nObs+100 for iBin in range(1,h_pdf_full.GetNbinsX()+1): Lambda = h_pdf_full.GetBinCenter(iBin) PoissonSumLow = 0. PoissonSumUp = 0. for i in range(nObs,NobsMax+1): PoissonSumLow += TMath.Poisson(i,Lambda) if( PoissonSumLow>IntFraction and LambdaLow<0 ): LambdaLow = Lambda for i in range(0,nObs+1): PoissonSumUp += TMath.Poisson(i,Lambda) if( PoissonSumUp<IntFraction and LambdaUp<0 ): LambdaUp = Lambda cvs = TCanvas("Standard Canvas","",600,600) cvs.Divide(1,2) cvs.cd(1) hLow = TH1D("hLow","CLASSICAL CENTRAL",1000,0,17) for i in range(1,hLow.GetNbinsX()+1): hLow.SetBinContent(i, TMath.Poisson( hLow.GetBinCenter(i),LambdaLow )) hLow.SetLineColor(1) hLow.GetXaxis().SetTitle(r"#mu") h_conf = hLow.Clone("h_conf") for i in range(1,h_conf.GetNbinsX()+1): if(h_conf.GetBinCenter(i)<=nObs): h_conf.SetBinContent(i, 0) h_conf.SetFillColorAlpha(9,0.5) h_conf.SetLineWidth(1) h_conf.SetLineColor(1) LowLine = TLine(LambdaLow,0,LambdaLow,hLow.GetBinContent(hLow.FindBin(LambdaLow))) hLow.SetStats(kFALSE) hLow.Draw("l") h_conf.Draw("l same") LowLine.Draw() text = TLatex() text.SetTextSize(0.5) text.SetTextColor(1) text.DrawLatex(0.5,0.8, "LOL") cvs.cd(2) hUp = TH1D("hUp","",1000,0,17) for i in range(1,hUp.GetNbinsX()+1): hUp.SetBinContent(i, TMath.Poisson( hLow.GetBinCenter(i),LambdaUp )) hUp.SetLineColor(1) hUp.GetXaxis().SetTitle(r"#mu") h_conf1 = hUp.Clone("h_conf") for i in range(1,h_conf.GetNbinsX()+1): if(h_conf.GetBinCenter(i)>=nObs): h_conf1.SetBinContent(i, 0) h_conf1.SetFillColorAlpha(9,0.5) h_conf1.SetLineWidth(1) h_conf1.SetLineColor(1) UpLine = TLine(LambdaUp,0,LambdaUp,hUp.GetBinContent(hUp.FindBin(LambdaUp))) hUp.SetStats(kFALSE) hUp.Draw("l") h_conf1.Draw("l same") UpLine.Draw() cvs.cd() leg1 = TLegend(0.35,0.70,0.90,0.85) leg1.SetBorderSize(1); leg1.SetFillColor(0); h_conf.SetMarkerColor(0) leg1a = leg1.AddEntry(h_conf, r"(#mu_{low}, #mu_{up}) = ("+str(round(LambdaLow,2))+","+str(round(LambdaUp,2))+")","p"); leg1a.SetTextSize(0.04); leg1.Draw() cvs.Print("Plots/Errors_"+ErrorType+".eps") if ErrorType=="LikelihoodRatio": for i in range(1,h_2loglik.GetNbinsX()+1): if (h_2loglik.GetBinCenter(i)<LoglikMin and h_2loglik.GetBinContent(i)-LoglikMin>=1.): LambdaLow=h_2loglik.GetBinCenter(i) if (h_2loglik.GetBinCenter(i)>LoglikMin and h_2loglik.GetBinContent(i)-LoglikMin<=1.): LambdaUp=h_2loglik.GetBinCenter(i) cvs = TCanvas("Standard Canvas","",600,400) cvs.cd() LowLine = TLine(LambdaLow,2,LambdaLow,h_2loglik.GetBinContent(h_2loglik.FindBin(LambdaLow))); LowLine.SetLineWidth(1) UpLine = TLine(LambdaUp,2,LambdaUp,h_2loglik.GetBinContent(h_2loglik.FindBin(LambdaUp))); UpLine.SetLineWidth(1) ObsLine = TLine(nObs,2,nObs,h_2loglik.GetBinContent(h_2loglik.FindBin(nObs))); ObsLine.SetLineWidth(1); ObsLine.SetLineColor(2); ObsLine.SetLineStyle(7) h_2loglik.SetFillColor(0) h_2loglik.SetStats(kFALSE) h_2loglik.SetTitle("LIKELIHOOD RATIO") h_2loglik.SetAxisRange(0.8,8,"X") h_2loglik.SetAxisRange(2,7,"Y") h_2loglik.Draw("hist lp") h_2loglik.GetXaxis().SetTitle(r"#mu") h_conf = h_2loglik.Clone("h_conf") for i in range(1,h_conf.GetNbinsX()+1): if(h_conf.GetBinCenter(i)>LambdaLow and h_conf.GetBinCenter(i)<LambdaUp): h_conf.SetBinContent(i,0) h_conf.SetFillColor(9) h_conf.Draw("hist same") leg1 = TLegend(0.55,0.70,0.90,0.85) leg1.SetBorderSize(1); leg1.SetFillColor(0); h_conf.SetMarkerColor(0) leg1a = leg1.AddEntry(h_conf, r"(#mu_{low}, #mu_{up}) = ("+str(round(LambdaLow,2))+","+str(round(LambdaUp,2))+")","p"); leg1a.SetTextSize(0.04); leg1.Draw() LowLine.Draw() UpLine.Draw() ObsLine.Draw() cvs.Print("Plots/Errors_"+ErrorType+".eps") if ErrorType=="BayesCentral": # Work on likelihood as PDF Integral = h_likelihood.Integral() for i in range(1,h_likelihood.GetNbinsX()+1): h_likelihood.SetBinContent(i,h_likelihood.GetBinContent(i)/Integral) # Sum over bins until reached CL Sum_low = 0. Sum_up = 0. for i in range(1,bin_central+1): if Sum_low<=IntFraction: Sum_low += h_likelihood.GetBinContent(i) LambdaLow = h_likelihood.GetBinCenter(i) for i in range(h_likelihood.GetNbinsX(),bin_central,-1): if Sum_up<=IntFraction: Sum_up += h_likelihood.GetBinContent(i) LambdaUp = h_likelihood.GetBinCenter(i) cvs = TCanvas("Standard Canvas","",600,400) cvs.cd() LowLine = TLine(LambdaLow,0,LambdaLow,h_likelihood.GetBinContent(h_likelihood.FindBin(LambdaLow))); LowLine.SetLineWidth(1) UpLine = TLine(LambdaUp,0,LambdaUp,h_likelihood.GetBinContent(h_likelihood.FindBin(LambdaUp))); UpLine.SetLineWidth(1) ObsLine = TLine(nObs,0,nObs,h_likelihood.GetBinContent(h_2loglik.FindBin(nObs))); ObsLine.SetLineWidth(1); ObsLine.SetLineColor(2); ObsLine.SetLineStyle(7) h_likelihood.SetFillColor(0) h_likelihood.SetStats(kFALSE) h_likelihood.SetTitle("BAYES CENTRAL") #h_likelihood.SetAxisRange(0.8,8,"X") #h_likelihood.SetAxisRange(2,7,"Y") h_likelihood.Draw("hist lp") h_likelihood.GetXaxis().SetTitle(r"#mu") h_conf = h_likelihood.Clone("h_conf") for i in range(1,h_conf.GetNbinsX()+1): if(h_conf.GetBinCenter(i)>LambdaLow and h_conf.GetBinCenter(i)<LambdaUp): h_conf.SetBinContent(i,0) h_conf.SetFillColor(9) h_conf.Draw("hist same") leg1 = TLegend(0.55,0.70,0.90,0.85) leg1.SetBorderSize(1); leg1.SetFillColor(0); h_conf.SetMarkerColor(0) leg1a = leg1.AddEntry(h_conf, r"(#mu_{low}, #mu_{up}) = ("+str(round(LambdaLow,2))+","+str(round(LambdaUp,2))+")","p"); leg1a.SetTextSize(0.04); leg1.Draw() LowLine.Draw() UpLine.Draw() ObsLine.Draw() cvs.Print("Plots/Errors_"+ErrorType+".eps") if ErrorType=="BayesHDI": # Work on likelihood as PDF Integral = h_likelihood.Integral() for i in range(1,h_likelihood.GetNbinsX()+1): h_likelihood.SetBinContent(i,h_likelihood.GetBinContent(i)/Integral) Area = 0. RightIndex=bin_central while Area<=(1-2*IntFraction): RightIndex+=1 # find corresponding bin on left side for i in range(1,bin_central): if h_likelihood.GetBinContent(i)<=h_likelihood.GetBinContent(RightIndex): LeftIndex=i Area = h_likelihood.Integral( LeftIndex,RightIndex ) LambdaLow = h_likelihood.GetBinCenter(LeftIndex) LambdaUp = h_likelihood.GetBinCenter(RightIndex) cvs = TCanvas("Standard Canvas","",600,400) cvs.cd() LowLine = TLine(LambdaLow,0,LambdaLow,h_likelihood.GetBinContent(h_likelihood.FindBin(LambdaLow))); LowLine.SetLineWidth(1) UpLine = TLine(LambdaUp,0,LambdaUp,h_likelihood.GetBinContent(h_likelihood.FindBin(LambdaUp))); UpLine.SetLineWidth(1) ObsLine = TLine(nObs,0,nObs,h_likelihood.GetBinContent(h_2loglik.FindBin(nObs))); ObsLine.SetLineWidth(1); ObsLine.SetLineColor(2); ObsLine.SetLineStyle(7) h_likelihood.SetFillColor(0) h_likelihood.SetStats(kFALSE) h_likelihood.SetTitle("BAYES SHORTEST") h_likelihood.Draw("hist lp") h_likelihood.GetXaxis().SetTitle(r"#mu") h_conf = h_likelihood.Clone("h_conf") for i in range(1,h_conf.GetNbinsX()+1): if(h_conf.GetBinCenter(i)>LambdaLow and h_conf.GetBinCenter(i)<LambdaUp): h_conf.SetBinContent(i,0) h_conf.SetFillColor(9) h_conf.Draw("hist same") leg1 = TLegend(0.55,0.70,0.90,0.85) leg1.SetBorderSize(1); leg1.SetFillColor(0); h_conf.SetMarkerColor(0) leg1a = leg1.AddEntry(h_conf, r"(#mu_{low}, #mu_{up}) = ("+str(round(LambdaLow,2))+","+str(round(LambdaUp,2))+")","p"); leg1a.SetTextSize(0.04); leg1.Draw() LowLine.Draw() UpLine.Draw() ObsLine.Draw() cvs.Print("Plots/Errors_"+ErrorType+".eps") return [LambdaLow, LambdaUp] return
def BackgroundFit_f1_trigcorr_bbh(x, par): return par[0] * (1. - (x[0] / 8.e3))**par[1] / ((x[0] / 8.e3)**(par[2] + par[3] * TMath.Log((x[0] / 8.e3)))) * trigger_efficiency.trigger_efficiency_bbh(x[0])
def create_off_interpolations(OffDir, plot=False): global log_ene_off global numtotoff global inter_func off_en_edges = [] off_en_center = [] log_off_en_center = np.empty(shape=(numsamples, config.NumBckgEnBins), dtype='double') y_log_off_en_center = np.empty(shape=(numsamples, config.NumBckgEnBins), dtype='double') binsize = math.log( config.EnCutHighBG / config.EnCutLowBG) / config.NumBckgEnBins for ebin in range(0, config.NumBckgEnBins + 1): off_en_edges.append(config.EnCutLowBG * math.exp(binsize * ebin)) for ebin in range(0, config.NumBckgEnBins): off_en_center.append( math.sqrt(off_en_edges[ebin + 1] * off_en_edges[ebin])) # create here offbins with the (larger) background limits offbins = array('d', off_en_edges) for k in range(0, numsamples): print("\n\n--- INITIALIZING VALUES from:", OffDir[k], "\n") DoHisto = TFile.Open(OffDir[k], "READ") print(DoHisto) log_ene_off.append(np.empty( shape=(config.EvtMaxOff), dtype='double')) # array of log(energies) from all OFF events noff = np.array([DoHisto.Get("fOffSample")]) numtotoff[k] = noff[0].GetEntries() print("The total number of entries in the normalization histogram is:", numtotoff[k]) isto = ROOT.TH1F("interpolation", "interpolation", config.NumBckgEnBins, offbins) num = 0 norm = 0. for i in range(0, numtotoff[k]): noff[0].GetEntry(i) energy = noff[0].GetArgs()[0] isto.Fill(energy) if (not pass_energy_cuts(energy)): continue log_ene_off[k][num] = math.log(energy) num += 1 numtotoff[k] = num log_ene_off[k] = np.resize(log_ene_off[k], num) print('The number of off entries (after energy cuts) is:', num) print(log_ene_off[k]) print('The sum of ', np.sum(log_ene_off[k])) ebin_cnt = 0 for ebin in range(0, config.NumBckgEnBins): bin = isto.GetXaxis().FindBin(off_en_center[ebin]) bin_content = isto.GetBinContent(bin) if (bin_content < 1.): print('ebin: ', ebin, ' bin: ', bin, 'skipped because of zero entries') continue # calculate here the logarithm of dN/dE (which normalized will provide dP/dE) log_off_en_center[k][ebin_cnt] = math.log(off_en_center[ebin]) y_log_off_en_center[k][ebin_cnt] = math.log( bin_content / (off_en_edges[ebin + 1] - off_en_edges[ebin])) print('ebin: ', ebin, ' ebin_cnt=', ebin_cnt, ' log_off_center: ', log_off_en_center[k][ebin_cnt], ' content: ', y_log_off_en_center[k][ebin_cnt], 'bin= ', bin, ' content=', bin_content) ebin_cnt += 1 inter_func.append( interp1d( log_off_en_center[k], y_log_off_en_center[k], kind='linear')) #inter_func = interp1d(x, y, kind='linear') print(math.exp(inter_func[k](math.log(90)))) print('found sum: ', np.sum(inter_func[k](log_ene_off[k]))) if plot: xnew = np.linspace(TMath.Log(config.energy_cut), TMath.Log(config.energy_upper_cut), num=200, endpoint=True) plt.plot(log_off_en_center[k], y_log_off_en_center[k], 'o', xnew, inter_func[k](xnew), '-') plt.xlabel('Log Eest') plt.ylabel('Log events') plt.legend(['data', 'linear'], loc='best') plt.show() plt.savefig('interpolation_{}.png'.format(k)) # for k in range (0, 2): #k in range 0,2 ??? ## if k < NumSubSamples: # hOffHeight[k] = ROOT.TH1F(ROOT.Form("hOffHeight_%s" % SampleName[k]), ROOT.Form("Background Level_%s" % SampleName[k]), config.NumBckgEnBins, off_en_edges) ## else: ## hOffHeight[k] = ROOT.TH1F("hOffHeight", "Background Level", config.NumBckgEnBins, off_en_edges) ## if k < NumSubSamples: ## hOnHeight[k] = ROOT.TH1F(ROOT.Form("hOnHeight_%s" % SampleName[k]), ROOT.Form("Signal Level_%s" % SampleName[k]), config.NumBckgEnBins, off_en_edges) ## else: ## hOnHeight[k] = ROOT.TH1F("hOnHeight", "Signal Level", config.NumBckgEnBins, off_en_edges) return
def calc_doubling(slope): return 1. / (slope * 24 * 3600) * TMath.Log(2.)
def main(): if len(sys.argv) < 3: print("Usage: ToyMC [numberEvents] [randomSeed]") return numberEvents = int(sys.argv[1]) seed = int(sys.argv[2]) print( "==================================== TRAIN ====================================" ) f = root_open( "legotrain_350_20161117-2106_LHCb4_fix_CF_pPb_MC_ptHardMerged.root", "read" ) hJetPt = f.Get("AliJJetJtTask/AliJJetJtHistManager/JetPt/JetPtNFin{:02d}".format(2)) hZ = f.Get("AliJJetJtTask/AliJJetJtHistManager/Z/ZNFin{:02d}".format(2)) FillFakes = False dummy_variable = True weight = True NBINS = 50 LimL = 0.1 LimH = 500 logBW = (TMath.Log(LimH) - TMath.Log(LimL)) / NBINS LogBinsX = [LimL * math.exp(ij * logBW) for ij in range(0, NBINS + 1)] hJetPtMeas = Hist(LogBinsX) hJetPtTrue = Hist(LogBinsX) myRandom = TRandom3(seed) fEff = TF1("fEff", "1-0.5*exp(-x)") jetBinBorders = [5, 10, 20, 30, 40, 60, 80, 100, 150, 500] hJetPtMeasCoarse = Hist(jetBinBorders) hJetPtTrueCoarse = Hist(jetBinBorders) NBINSJt = 64 low = 0.01 high = 10 BinW = (TMath.Log(high) - TMath.Log(low)) / NBINSJt LogBinsJt = [low * math.exp(i * BinW) for i in range(NBINSJt + 1)] hJtTrue = Hist(LogBinsJt) hJtMeas = Hist(LogBinsJt) hJtFake = Hist(LogBinsJt) LogBinsPt = jetBinBorders jetPtBins = [(a, b) for a, b in zip(jetBinBorders, jetBinBorders[1:])] hJtTrue2D = Hist2D(LogBinsJt, LogBinsPt) hJtMeas2D = Hist2D(LogBinsJt, LogBinsPt) hJtFake2D = Hist2D(LogBinsJt, LogBinsPt) hJtMeasBin = [Hist(LogBinsJt) for i in jetBinBorders] hJtTrueBin = [Hist(LogBinsJt) for i in jetBinBorders] response = RooUnfoldResponse(hJtMeas, hJtTrue) response2D = RooUnfoldResponse(hJtMeas2D, hJtTrue2D) responseBin = [RooUnfoldResponse(hJtMeas, hJtTrue) for i in jetBinBorders] responseJetPt = RooUnfoldResponse(hJetPtMeas, hJetPtTrue) responseJetPtCoarse = RooUnfoldResponse(hJetPtMeasCoarse, hJetPtTrueCoarse) # Histogram index is jet pT index, Bin 0 is 5-10 GeV # Histogram X axis is observed jT, Bin 0 is underflow # Histogram Y axis is observed jet Pt, Bin 0 is underflow # Histogram Z axis is True jT, Bin 0 is underflow responses = [Hist3D(LogBinsJt, LogBinsPt, LogBinsJt) for i in jetPtBins] misses = Hist2D(LogBinsJt, LogBinsPt) fakes2D = Hist2D(LogBinsJt, LogBinsPt) outFile = TFile("tuple.root", "recreate") responseTuple = TNtuple( "responseTuple", "responseTuple", "jtObs:ptObs:jtTrue:ptTrue" ) hMultiTrue = Hist(50, 0, 50) hMultiMeas = Hist(50, 0, 50) hZMeas = Hist(50, 0, 1) hZTrue = Hist(50, 0, 1) hZFake = Hist(50, 0, 1) responseMatrix = Hist2D(LogBinsJt, LogBinsJt) numberJets = 0 numberFakes = 0 numberJetsMeasBin = [0 for i in jetBinBorders] numberJetsTrueBin = [0 for i in jetBinBorders] numberFakesBin = [0 for i in jetBinBorders] ieout = numberEvents / 10 if ieout > 10000: ieout = 10000 fakeRate = 1 start_time = datetime.now() print("Processing Training Events") for ievt in range(numberEvents): tracksTrue = [] tracksMeas = [0 for x in range(100)] if ievt % ieout == 0 and ievt > 0: time_elapsed = datetime.now() - start_time time_left = timedelta( seconds=time_elapsed.total_seconds() * 1.0 * (numberEvents - ievt) / ievt ) print( "Event {} [{:.2f}%] Time Elapsed: {} ETA: {}".format( ievt, 100.0 * ievt / numberEvents, fmtDelta(time_elapsed), fmtDelta(time_left), ) ) jetTrue = TVector3(0, 0, 0) jetMeas = TVector3(0, 0, 0) jetPt = hJetPt.GetRandom() remainder = jetPt if jetPt < 5: continue nt = 0 nt_meas = 0 while remainder > 0: trackPt = hZ.GetRandom() * jetPt if trackPt < remainder: track = TVector3() remainder = remainder - trackPt else: trackPt = remainder remainder = -1 if trackPt > 0.15: track.SetPtEtaPhi( trackPt, myRandom.Gaus(0, 0.1), myRandom.Gaus(math.pi, 0.2) ) tracksTrue.append(track) jetTrue += track if fEff.Eval(trackPt) > myRandom.Uniform(0, 1): tracksMeas[nt] = 1 jetMeas += track nt_meas += 1 else: tracksMeas[nt] = 0 nt += 1 fakes = [] for it in range(fakeRate * 100): if myRandom.Uniform(0, 1) > 0.99: fake = TVector3() fake.SetPtEtaPhi( myRandom.Uniform(0.15, 1), myRandom.Gaus(0, 0.1), myRandom.Gaus(math.pi, 0.2), ) fakes.append(fake) jetMeas += fake hJetPtMeas.Fill(jetMeas.Pt()) hJetPtTrue.Fill(jetTrue.Pt()) responseJetPt.Fill(jetMeas.Pt(), jetTrue.Pt()) responseJetPtCoarse.Fill(jetMeas.Pt(), jetTrue.Pt()) hMultiTrue.Fill(nt) hMultiMeas.Fill(nt_meas) ij_meas = GetBin(jetBinBorders, jetMeas.Pt()) ij_true = GetBin(jetBinBorders, jetTrue.Pt()) if nt < 5 or nt_meas < 5: continue numberJets += 1 if ij_meas >= 0: numberJetsMeasBin[ij_meas] += 1 hJetPtMeasCoarse.Fill(jetMeas.Pt()) if ij_true >= 0: numberJetsTrueBin[ij_true] += 1 hJetPtTrueCoarse.Fill(jetTrue.Pt()) for track, it in zip(tracksTrue, range(100)): zTrue = (track * jetTrue.Unit()) / jetTrue.Mag() jtTrue = (track - scaleJet(jetTrue, zTrue)).Mag() hZTrue.Fill(zTrue) if ij_true >= 0: if weight: hJtTrue.Fill(jtTrue, 1.0 / jtTrue) hJtTrueBin[ij_true].Fill(jtTrue, 1.0 / jtTrue) hJtTrue2D.Fill(jtTrue, jetTrue.Pt(), 1.0 / jtTrue) else: hJtTrue.Fill(jtTrue) hJtTrueBin[ij_true].Fill(jtTrue) hJtTrue2D.Fill(jtTrue, jetTrue.Pt()) if ij_meas >= 0: if tracksMeas[it] == 1: zMeas = (track * jetMeas.Unit()) / jetMeas.Mag() jtMeas = (track - scaleJet(jetMeas, zMeas)).Mag() hZMeas.Fill(zMeas) if weight: hJtMeasBin[ij_meas].Fill(jtMeas, 1.0 / jtMeas) hJtMeas.Fill(jtMeas, 1.0 / jtMeas) hJtMeas2D.Fill(jtMeas, jetMeas.Pt(), 1.0 / jtMeas) else: hJtMeas.Fill(jtMeas) hJtMeasBin[ij_meas].Fill(jtMeas) hJtMeas2D.Fill(jtMeas, jetMeas.Pt()) response.Fill(jtMeas, jtTrue) responseBin[ij_true].Fill(jtMeas, jtTrue) response2D.Fill(jtMeas, jetMeas.Pt(), jtTrue, jetTrue.Pt()) responseMatrix.Fill(jtMeas, jtTrue) responses[ij_true].Fill(jtMeas, jetMeas.Pt(), jtTrue) responseTuple.Fill(jtMeas, jetMeas.Pt(), jtTrue, jetTrue.Pt()) else: response.Miss(jtTrue) responseBin[ij_true].Miss(jtTrue) response2D.Miss(jtTrue, jetTrue.Pt()) misses.Fill(jtTrue, jetTrue.Pt()) responseTuple.Fill(-1, -1, jtTrue, jetTrue.Pt()) if ij_meas >= 0: for fake in fakes: zFake = (fake * jetMeas.Unit()) / jetMeas.Mag() jtFake = (fake - scaleJet(jetMeas, zFake)).Mag() hZMeas.Fill(zFake) hZFake.Fill(zFake) if weight: hJtMeas.Fill(jtFake, 1.0 / jtFake) hJtMeasBin[ij_meas].Fill(jtFake, 1.0 / jtFake) hJtMeas2D.Fill(jtFake, jetMeas.Pt(), 1.0 / jtFake) hJtFake2D.Fill(jtFake, jetMeas.Pt(), 1.0 / jtFake) hJtFake.Fill(jtFake, 1.0 / jtFake) else: hJtMeas.Fill(jtFake) hJtMeasBin[ij_meas].Fill(jtFake) hJtMeas2D.Fill(jtFake, jetMeas.Pt()) hJtFake2D.Fill(jtFake, jetMeas.Pt()) hJtFake.Fill(jtFake) if FillFakes: response.Fake(jtFake) responseBin[ij_true].Fake(jtFake) response2D.Fake(jtFake, jetMeas.Pt()) fakes2D.Fill(jtFake, jetMeas.Pt()) responseTuple.Fill(jtFake, jetMeas.Pt(), -1, -1) numberFakes += 1 numberFakesBin[ij_true] += 1 response2Dtest = make2Dresponse( responses, jetPtBins, hJtMeas2D, hJtTrue2D, misses=misses, fakes=fakes2D ) if dummy_variable: hJetPtMeas.Reset() hJetPtTrue.Reset() hMultiTrue.Reset() hMultiMeas.Reset() hJetPtMeasCoarse.Reset() hJetPtTrueCoarse.Reset() hZTrue.Reset() hZMeas.Reset() hJtTrue.Reset() hJtTrue2D.Reset() hJtMeas.Reset() hJtMeas2D.Reset() hJtFake.Reset() hJtFake2D.Reset() for h, h2 in zip(hJtTrueBin, hJtMeasBin): h.Reset() h2.Reset() numberJetsMeasBin = [0 for i in jetBinBorders] numberJetsTrueBin = [0 for i in jetBinBorders] numberJets = 0 print("Create testing data") start_time = datetime.now() numberEvents = numberEvents / 2 for ievt in range(numberEvents): tracksTrue = [] tracksMeas = [0 for x in range(100)] if ievt % ieout == 0 and ievt > 0: time_elapsed = datetime.now() - start_time time_left = timedelta( seconds=time_elapsed.total_seconds() * 1.0 * (numberEvents - ievt) / ievt ) print( "Event {} [{:.2f}%] Time Elapsed: {} ETA: {}".format( ievt, 100.0 * ievt / numberEvents, fmtDelta(time_elapsed), fmtDelta(time_left), ) ) jetTrue = TVector3(0, 0, 0) jetMeas = TVector3(0, 0, 0) jetPt = hJetPt.GetRandom() remainder = jetPt if jetPt < 5: continue nt = 0 nt_meas = 0 while remainder > 0: trackPt = hZ.GetRandom() * jetPt if trackPt < remainder: track = TVector3() remainder = remainder - trackPt else: trackPt = remainder remainder = -1 if trackPt > 0.15: track.SetPtEtaPhi( trackPt, myRandom.Gaus(0, 0.1), myRandom.Gaus(math.pi, 0.2) ) tracksTrue.append(track) jetTrue += track if fEff.Eval(trackPt) > myRandom.Uniform(0, 1): tracksMeas[nt] = 1 jetMeas += track nt_meas += 1 else: tracksMeas[nt] = 0 nt += 1 fakes = [] for it in range(fakeRate * 100): if myRandom.Uniform(0, 1) > 0.99: fake = TVector3() fake.SetPtEtaPhi( myRandom.Uniform(0.15, 1), myRandom.Gaus(0, 0.1), myRandom.Gaus(math.pi, 0.2), ) fakes.append(fake) jetMeas += fake hJetPtMeas.Fill(jetMeas.Pt()) hJetPtTrue.Fill(jetTrue.Pt()) hMultiTrue.Fill(nt) hMultiMeas.Fill(nt_meas) ij_meas = GetBin(jetBinBorders, jetMeas.Pt()) ij_true = GetBin(jetBinBorders, jetTrue.Pt()) if nt < 5 or nt_meas < 5: continue numberJets += 1 if ij_meas >= 0: numberJetsMeasBin[ij_meas] += 1 hJetPtMeasCoarse.Fill(jetMeas.Pt()) if ij_true >= 0: numberJetsTrueBin[ij_true] += 1 hJetPtTrueCoarse.Fill(jetTrue.Pt()) for track, it in zip(tracksTrue, range(100)): zTrue = (track * jetTrue.Unit()) / jetTrue.Mag() jtTrue = (track - scaleJet(jetTrue, zTrue)).Mag() hZTrue.Fill(zTrue) if ij_true >= 0: if weight: hJtTrue.Fill(jtTrue, 1.0 / jtTrue) hJtTrueBin[ij_true].Fill(jtTrue, 1.0 / jtTrue) hJtTrue2D.Fill(jtTrue, jetTrue.Pt(), 1.0 / jtTrue) else: hJtTrue.Fill(jtTrue) hJtTrueBin[ij_true].Fill(jtTrue) hJtTrue2D.Fill(jtTrue, jetTrue.Pt()) if ij_meas >= 0: if tracksMeas[it] == 1: zMeas = (track * jetMeas.Unit()) / jetMeas.Mag() jtMeas = (track - scaleJet(jetMeas, zMeas)).Mag() hZMeas.Fill(zMeas) if weight: hJtMeasBin[ij_meas].Fill(jtMeas, 1.0 / jtMeas) hJtMeas.Fill(jtMeas, 1.0 / jtMeas) hJtMeas2D.Fill(jtMeas, jetMeas.Pt(), 1.0 / jtMeas) else: hJtMeas.Fill(jtMeas) hJtMeasBin[ij_meas].Fill(jtMeas) hJtMeas2D.Fill(jtMeas, jetMeas.Pt()) if ij_meas >= 0: for fake in fakes: zFake = (fake * jetMeas.Unit()) / jetMeas.Mag() jtFake = (fake - scaleJet(jetMeas, zFake)).Mag() hZMeas.Fill(zFake) hZFake.Fill(zFake) if weight: hJtMeas.Fill(jtFake, 1.0 / jtFake) hJtMeasBin[ij_meas].Fill(jtFake, 1.0 / jtFake) hJtMeas2D.Fill(jtFake, jetMeas.Pt(), 1.0 / jtFake) hJtFake2D.Fill(jtFake, jetMeas.Pt(), 1.0 / jtFake) hJtFake.Fill(jtFake, 1.0 / jtFake) else: hJtMeas.Fill(jtFake) hJtMeasBin[ij_meas].Fill(jtFake) hJtMeas2D.Fill(jtFake, jetMeas.Pt()) hJtFake2D.Fill(jtFake, jetMeas.Pt()) hJtFake.Fill(jtFake) time_elapsed = datetime.now() - start_time print( "Event {} [{:.2f}%] Time Elapsed: {}".format( numberEvents, 100.0, fmtDelta(time_elapsed) ) ) if not FillFakes: hJtMeas.Add(hJtFake, -1) hJtMeas2D.Add(hJtFake2D, -1) responseTuple.Print() outFile.Write() # printTuple(responseTuple) hJtMeasProjBin = [ makeHist(hJtMeas2D.ProjectionX("histMeas{}".format(i), i, i), bins=LogBinsJt) for i in range(1, len(jetBinBorders)) ] hJtMeasProj = makeHist(hJtMeas2D.ProjectionX("histMeas"), bins=LogBinsJt) hJtTrueProjBin = [ makeHist(hJtTrue2D.ProjectionX("histTrue{}".format(i), i, i), bins=LogBinsJt) for i in range(1, len(jetBinBorders)) ] hJtTrueProj = makeHist(hJtTrue2D.ProjectionX("histTrue"), bins=LogBinsJt) hJtFakeProjBin = [ makeHist(hJtFake2D.ProjectionX("histFake{}".format(i), i, i), bins=LogBinsJt) for i in range(1, len(jetBinBorders)) ] if not FillFakes: for h, h2 in zip(hJtMeasBin, hJtFakeProjBin): h.Add(h2, -1) for h in ( hJtMeasProj, hJtTrueProj, hJtMeas, hJtTrue, hJtFake, hZFake, hZMeas, hZTrue, ): h.Scale(1.0 / numberJets, "width") for meas, true, n_meas, n_true in zip( hJtMeasBin, hJtTrueBin, numberJetsMeasBin, numberJetsTrueBin ): if n_meas > 0: meas.Scale(1.0 / n_meas, "width") if n_true > 0: true.Scale(1.0 / n_true, "width") numberJetsMeasFromHist = [ hJetPtMeasCoarse.GetBinContent(i) for i in range(1, hJetPtMeasCoarse.GetNbinsX() + 1) ] numberJetsTrueFromHist = [ hJetPtTrueCoarse.GetBinContent(i) for i in range(1, hJetPtTrueCoarse.GetNbinsX() + 1) ] print("Total number of jets: {}".format(numberJets)) print("Total number of fakes: {}".format(numberFakes)) print("Measured jets by bin") print(numberJetsMeasBin) print(numberJetsMeasFromHist) print("True jets by bin") print(numberJetsTrueBin) print(numberJetsTrueFromHist) hRecoJetPtCoarse = unfoldJetPt(hJetPtMeasCoarse, responseJetPtCoarse, jetBinBorders) numberJetsFromReco = [ hRecoJetPtCoarse.GetBinContent(i) for i in range(1, hRecoJetPtCoarse.GetNbinsX()) ] print("Unfolded jet numbers by bin:") print(numberJetsFromReco) print("Fakes by bin") print(numberFakesBin) print( "==================================== UNFOLD ===================================" ) unfold = RooUnfoldBayes(response, hJtMeas, 4) # OR unfoldSVD = RooUnfoldSvd(response, hJtMeas, 20) # OR unfold2D = RooUnfoldBayes(response2D, hJtMeas2D, 4) for u in (unfold, unfoldSVD, unfold2D): u.SetVerbose(0) # response2Dtest = makeResponseFromTuple(responseTuple,hJtMeas2D,hJtTrue2D) unfold2Dtest = RooUnfoldBayes(response2Dtest, hJtMeas2D, 4) unfoldBin = [ RooUnfoldBayes(responseBin[i], hJtMeasBin[i]) for i in range(len(jetBinBorders)) ] for u in unfoldBin: u.SetVerbose(0) hRecoBayes = makeHist(unfold.Hreco(), bins=LogBinsJt) hRecoSVD = makeHist(unfoldSVD.Hreco(), bins=LogBinsJt) hRecoBin = [ makeHist(unfoldBin[i].Hreco(), bins=LogBinsJt) for i in range(len(jetBinBorders)) ] hReco2D = make2DHist(unfold2D.Hreco(), xbins=LogBinsJt, ybins=LogBinsPt) hReco2Dtest = make2DHist(unfold2Dtest.Hreco(), xbins=LogBinsJt, ybins=LogBinsPt) hRecoJetPt = unfoldJetPt(hJetPtMeas, responseJetPt, LogBinsX) hReco2DProjBin = [ makeHist(hReco2D.ProjectionX("histReco{}".format(i), i, i), bins=LogBinsJt) for i in range(1, len(jetBinBorders)) ] hReco2DTestProjBin = [ makeHist( hReco2Dtest.ProjectionX("histRecoTest{}".format(i), i, i), bins=LogBinsJt ) for i in range(1, len(jetBinBorders)) ] hReco2DProj = makeHist(hReco2D.ProjectionX("histReco"), bins=LogBinsJt) hReco2DProj.Scale(1.0 / numberJets, "width") for h, h2, n in zip(hReco2DProjBin, hReco2DTestProjBin, numberJetsFromReco): if n > 0: h.Scale(1.0 / n, "width") h2.Scale(1.0 / n, "width") # unfold.PrintTable (cout, hJtTrue) for h, h2, nj in zip(hJtMeasProjBin, hJtFakeProjBin, numberJetsMeasBin): if nj > 0: h.Scale(1.0 / nj, "width") h2.Scale(1.0 / nj, "width") # else: # print("nj is 0 for {}".format(h.GetName())) for h, nj in zip(hJtTrueProjBin, numberJetsTrueBin): if nj > 0: h.Scale(1.0 / nj, "width") # draw8grid(hJtMeasBin[1:],hJtTrueBin[1:],jetPtBins[1:],xlog = True,ylog = True,name="newfile.pdf",proj = hJtMeasProjBin[2:], unf2d = hReco2DProjBin[2:], unf=hRecoBin[1:]) if numberEvents > 1000: if numberEvents > 1000000: filename = "ToyMC_{}M_events.pdf".format(numberEvents / 1000000) else: filename = "ToyMC_{}k_events.pdf".format(numberEvents / 1000) else: filename = "ToyMC_{}_events.pdf".format(numberEvents) draw8gridcomparison( hJtMeasBin, hJtTrueBin, jetPtBins, xlog=True, ylog=True, name=filename, proj=None, unf2d=hReco2DProjBin, unf2dtest=hReco2DTestProjBin, unf=hRecoBin, fake=hJtFakeProjBin, start=1, stride=1, ) drawQA( hJtMeas, hJtTrue, hJtFake, hRecoBayes, hRecoSVD, hReco2DProj, hZ, hZTrue, hZMeas, hZFake, hMultiMeas, hMultiTrue, hJetPt, hJetPtTrue, hJetPtMeas, hRecoJetPt, responseMatrix, ) outFile.Close()
def integrand_init(interp, mass, Elow, Eup): print("\n\n--- INITIALIZING MASS:", mass, " Low Limit ", Elow, " , Up Lim: ", Eup, "\n") global inter_mean1 global inter_mean2 global inter_sigma1 global inter_sigma2 global inter_part global ene_on global integrand global normon global numtoton # integrand = [] for k in range(0, config.nsamples): # k= number of samples integrand.append( np.empty(shape=(numtoton[k], config.BinsIntegrand), dtype=config.ArrayDataType)) pbar = tqdm(total=numtoton[k] * config.BinsIntegrand) for i in range(0, numtoton[k]): energy = ene_on[k][i] mean1 = energy - inter_mean1[k]( energy) * energy # pEnBiasVsEnEst = ( E' - E ) / E' mean2 = energy - inter_mean1[k]( energy) * energy # pEnBiasVsEnEst = ( E' - E ) / E' sigma1 = inter_sigma1[k](energy) * energy sigma2 = inter_sigma2[k](energy) * energy part = inter_part[k](energy) # define the integration limits for true energy lowl = mean2 - config.SigmaLimits * sigma2 highl = mean2 + config.SigmaLimits * sigma2 if (lowl < config.energy_absmin): lowl = config.energy_absmin # define the bins in true energy binwidth = (highl - lowl) / config.BinsIntegrand for j in range(0, config.BinsIntegrand): entrue = lowl + (j + 0.5) * binwidth pbar.update(1) area = get_area(entrue, k) flux = 0. if (Elow < entrue < Eup): flux = get_phi(entrue, interp, mass) enmig = part * gaussian(entrue, mean1, sigma1) + ( 1. - part) * gaussian(entrue, mean2, sigma2) integrand[k][i][j] = flux * area * enmig * binwidth pbar.close() print("Starting Normalization\n") integrand_norm.append( np.empty(shape=(config.BinsIntegrandNorm, config.BinsIntegrand), dtype=config.ArrayDataType)) on_en_edges = [] binsize = TMath.Log(config.energy_upper_cut / config.energy_cut) / config.BinsIntegrandNorm for ebin in range(0, config.BinsIntegrandNorm + 1): on_en_edges.append(config.energy_cut * TMath.Exp(binsize * ebin)) for ebin in range(0, config.BinsIntegrandNorm): energy = math.sqrt(on_en_edges[ebin + 1] * on_en_edges[ebin]) mean1 = energy - inter_mean1[k]( energy) * energy # pEnBiasVsEnEst = ( E' - E ) / E' mean2 = energy - inter_mean1[k]( energy) * energy # pEnBiasVsEnEst = ( E' - E ) / E' sigma1 = inter_sigma1[k](energy) * energy sigma2 = inter_sigma2[k](energy) * energy part = inter_part[k](energy) # define the integration limits for true energy lowl = mean2 - config.SigmaLimits * sigma2 highl = mean2 + config.SigmaLimits * sigma2 if (lowl < config.energy_absmin): lowl = config.energy_absmin # define the bins in true energy binwidth = (highl - lowl) / config.BinsIntegrand for j in range(0, config.BinsIntegrand): entrue = lowl + (j + 0.5) * binwidth pbar.update(1) area = get_area(entrue, k) flux = 0. if ((Elow < entrue) and (entrue < Eup)): flux = get_phi(entrue, interp, mass) enmig = part * gaussian(entrue, mean1, sigma1) + ( 1. - part) * gaussian(entrue, mean2, sigma2) integrand_norm[k][ebin][j] = flux * area * enmig * ( on_en_edges[ebin + 1] - on_en_edges[ebin]) normon[k] = np.sum(integrand_norm[k]) print("Normalization=", normon[k]) # enmig.append(part*gaussian(entrue,mean1,sigma1)+(1.-part)*gaussian(entrue,mean2,sigma2)) # print ('mean1=', mean1, ' mean2= ', mean2, ' sigma1= ', sigma1, ' sigma2= ', sigma2, ' part=',part) print("--- INITIALIZATION FINISHED!\n") return
def off_energy_distr(OffDir, NumBckgEnBins=40, EnCutHighBG=8000, EnCutLowBG=100): """ this function does: - read the OFF sample input files - """ global normoff global numtotoff global numsamples for k in range(0, numsamples): print("\n\n--- INITIALIZING VALUES from:", OffDir[k], "\n") DoHisto = TFile.Open(OffDir[k], "READ") print(DoHisto) noff = np.array([DoHisto.Get("fOffSample")]) numtotoff[k] = noff[0].GetEntries() print("The total number of entries in the normalization histogram is:", numtotoff[k]) off_en_edges = [] binsize = TMath.Log(EnCutHighBG / EnCutLowBG) / config.NumBckgEnBins for ebin in range(0, config.NumBckgEnBins + 1): off_en_edges.append(EnCutLowBG * TMath.Exp(binsize * ebin)) # create here offbins with the (larger) background limits offbins = array('d', off_en_edges) isto = ROOT.TH1F("integral", "integral", config.NumBckgEnBins, offbins) for i in range(0, numtotoff[k]): noff[0].GetEntry(i) energy = noff[0].GetArgs()[0] isto.Fill(energy) f = TFile('normalization_histogram_sample_{}.root'.format(k), "RECREATE") gramma = TCanvas("EOff_Histogram", 'EOFF_Histogram sample {}'.format(k)) gramma.SetCanvasSize(800, 800) gramma.SetWindowSize(1000, 1000) gramma.SetLogx() gramma.SetLogy() isto.SetXTitle("Log Eest") isto.SetYTitle("Log events") isto.Draw() ll = isto.GetXaxis().FindBin(EnCutLowBG) hh = isto.GetXaxis().FindBin(EnCutHighBG) line = TLine() line.SetLineColor(TColor.kRed) line.DrawLine(ll, isto.GetMinimum(), ll, isto.GetMaximum()) line.DrawLine(hh, isto.GetMinimum(), hh, isto.GetMaximum()) # gPad.Update() normoff[k] = isto.Integral(ll, hh) print("The integral from ", ll, " to ", hh, " of the normalization histogram is:", normoff[k]) latex = TLatex() latex.DrawLatex( ll, isto.GetMaximum() / 2, "The integral of the normalization histogram is: {}".format( normoff[k])) gramma.Write() f.Close() return
def createToyTraining(self, rootFile, numberEvents): self._f = root_open(rootFile, "read") self._hJetPtIn = self._f.Get( "AliJJetJtTask/AliJJetJtHistManager/JetPt/JetPtNFin{:02d}".format( 2)) self._hZIn = self._f.Get( "AliJJetJtTask/AliJJetJtHistManager/Z/ZNFin{:02d}".format(2)) LimL = 0.1 LimH = 500 logBW = (TMath.Log(LimH) - TMath.Log(LimL)) / self._NBINS self._LogBinsX = [ LimL * math.exp(ij * logBW) for ij in range(0, self._NBINS + 1) ] self._hJetPtMeas = Hist(self._LogBinsX) self._hJetPtTrue = Hist(self._LogBinsX) self._myRandom = TRandom3(self._randomSeed) if self._fEff is None: self._fEff = TF1("fEff", "1-0.5*exp(-x)") if self._jetBinBorders is None: self._jetBinBorders = [5, 10, 20, 30, 40, 60, 80, 100, 150, 500] self._hJetPtMeasCoarse = Hist(self._jetBinBorders) self._hJetPtTrueCoarse = Hist(self._jetBinBorders) low = 0.01 high = 10 BinW = (TMath.Log(high) - TMath.Log(low)) / self._NBINSJt self._LogBinsJt = [ low * math.exp(i * BinW) for i in range(self._NBINSJt + 1) ] self._jetBinBorders = self._jetBinBorders self._jetPtBins = [ (a, b) for a, b in zip(self._jetBinBorders, self._jetBinBorders[1:]) ] self._hJtTrue2D = Hist2D(self._LogBinsJt, self._jetBinBorders) self._hJtMeas2D = Hist2D(self._LogBinsJt, self._jetBinBorders) self._hJtFake2D = Hist2D(self._LogBinsJt, self._jetBinBorders) # Histogram to store jT with respect to the leading hadron self._hJtTestMeas2D = Hist2D( self._LogBinsJt, self._jetBinBorders) # Needs a better name self._hJtTestTrue2D = Hist2D( self._LogBinsJt, self._jetBinBorders) # Needs a better name self._responseJetPt = RooUnfoldResponse(self._hJetPtMeas, self._hJetPtTrue) self._responseJetPtCoarse = RooUnfoldResponse(self._hJetPtMeasCoarse, self._hJetPtTrueCoarse) self._hresponseJetPt = Hist2D(self._jetBinBorders, self._jetBinBorders) self._hresponseJetPtCoarse = Hist2D(self._jetBinBorders, self._jetBinBorders) # Histogram index is jet pT index, Bin 0 is 5-10 GeV # Histogram X axis is observed jT, Bin 0 is underflow # Histogram Y axis is observed jet Pt, Bin 0 is underflow # Histogram Z axis is True jT, Bin 0 is underflow self._2Dresponses = [ Hist3D(self._LogBinsJt, self._jetBinBorders, self._LogBinsJt) for i in self._jetPtBins ] self._misses2D = Hist2D(self._LogBinsJt, self._jetBinBorders) self._fakes2D = Hist2D(self._LogBinsJt, self._jetBinBorders) self._numberJetsMeasBin = [0 for i in self._jetBinBorders] self._numberJetsTrueBin = [0 for i in self._jetBinBorders] self._numberJetsTestMeasBin = [0 for i in self._jetBinBorders] self._numberJetsTestTrueBin = [0 for i in self._jetBinBorders] self._numberFakesBin = [0 for i in self._jetBinBorders] ieout = numberEvents / 10 if ieout > 20000: ieout = 20000 start_time = datetime.now() print("Processing MC Training Events") for ievt in range(numberEvents): tracksTrue = [] tracksMeas = [0 for x in range(100)] if ievt % ieout == 0 and ievt > 0: time_elapsed = datetime.now() - start_time time_left = timedelta(seconds=time_elapsed.total_seconds() * 1.0 * (numberEvents - ievt) / ievt) print("Event {} [{:.2f}%] Time Elapsed: {} ETA: {}".format( ievt, 100.0 * ievt / numberEvents, fmtDelta(time_elapsed), fmtDelta(time_left), )) jetTrue = TVector3(0, 0, 0) jetMeas = TVector3(0, 0, 0) jetPt = self._hJetPtIn.GetRandom() remainder = jetPt if jetPt < 5: continue nt = 0 nt_meas = 0 while remainder > 0: trackPt = self._hZIn.GetRandom() * jetPt if trackPt < remainder: track = TVector3() remainder = remainder - trackPt else: trackPt = remainder remainder = -1 if trackPt > 0.15: track.SetPtEtaPhi( trackPt, self._myRandom.Gaus(0, 0.1), self._myRandom.Gaus(math.pi, 0.2), ) tracksTrue.append(track) jetTrue += track if self._fEff.Eval(trackPt) > self._myRandom.Uniform(0, 1): tracksMeas[nt] = 1 jetMeas += track nt_meas += 1 else: tracksMeas[nt] = 0 nt += 1 fakes = [] for it in range(self._fakeRate * 100): if self._myRandom.Uniform(0, 1) > 0.99: fake = TVector3() fake.SetPtEtaPhi( self._myRandom.Uniform(0.15, 1), self._myRandom.Gaus(0, 0.1), self._myRandom.Gaus(math.pi, 0.2), ) fakes.append(fake) jetMeas += fake self._responseJetPt.Fill(jetMeas.Pt(), jetTrue.Pt()) self._responseJetPtCoarse.Fill(jetMeas.Pt(), jetTrue.Pt()) self._hresponseJetPt.Fill(jetMeas.Pt(), jetTrue.Pt()) self._hresponseJetPtCoarse.Fill(jetMeas.Pt(), jetTrue.Pt()) ij_meas = GetBin(self._jetBinBorders, jetMeas.Pt()) ij_true = GetBin(self._jetBinBorders, jetTrue.Pt()) if nt < 5 or nt_meas < 5: continue if ij_meas >= 0: self._numberJetsMeasBin[ij_meas] += 1 if ij_true >= 0: self._numberJetsTrueBin[ij_true] += 1 for track, it in zip(tracksTrue, range(100)): zTrue = (track * jetTrue.Unit()) / jetTrue.Mag() jtTrue = (track - scaleJet(jetTrue, zTrue)).Mag() if ij_meas >= 0: if tracksMeas[it] == 1: zMeas = (track * jetMeas.Unit()) / jetMeas.Mag() jtMeas = (track - scaleJet(jetMeas, zMeas)).Mag() self._2Dresponses[ij_true].Fill( jtMeas, jetMeas.Pt(), jtTrue) else: self._misses2D.Fill(jtTrue, jetTrue.Pt()) if ij_meas >= 0: for fake in fakes: zFake = (fake * jetMeas.Unit()) / jetMeas.Mag() jtFake = (fake - scaleJet(jetMeas, zFake)).Mag() if self._weight: self._hJtFake2D.Fill(jtFake, jetMeas.Pt(), 1.0 / jtFake) else: self._hJtFake2D.Fill(jtFake, jetMeas.Pt()) if self._fillFakes: self._fakes2D.Fill(jtFake, jetMeas.Pt()) print("Event {} [{:.2f}%] Time Elapsed: {}".format( numberEvents, 100.0, fmtDelta(time_elapsed))) self._numberJetsMeasTrain = sum(self._numberJetsMeasBin)
def SideBandFit(irebin=1): print("\n Side-band fit \n\n") # prepare histograms h_bgr = GetMassDistribution(1) h_data = GetMassDistribution(2) # rebin h_bgr.Rebin(irebin) h_data.Rebin(irebin) print("INFO: Rebinning histograms with factor",irebin,". Binwidth: ",h_data.GetBinWidth(1)) # Loop over scalefactor (alpha_bgr) h_sf_bgr = TH1D("h_sf_bgr","",100,0.8,1.5) for i in range(1,h_sf_bgr.GetNbinsX()+1): sf_bgr = h_sf_bgr.GetBinCenter(i) # Loop over bins, compute loglikelihood, save in histogram loglik = 0. for j in range(1,h_data.GetNbinsX()+1): # Signal-free region m4lepBin = h_data.GetBinCenter(j) NObsBin = h_data.GetBinContent(j) if ( m4lepBin>=150. and m4lepBin<=400.): MeanBgdBin = sf_bgr * h_bgr.GetBinContent(j) if MeanBgdBin>0.: loglik += TMath.Log( TMath.Poisson(NObsBin,MeanBgdBin) ) h_sf_bgr.SetBinContent(i,-2.*loglik) # Interpret the likelihood distribution h_sf_bgr_rescaled = h_sf_bgr.Clone("h_sf_bgr_rescaled") # find minimum MinBin = h_sf_bgr.GetMinimumBin() Minimum = h_sf_bgr.GetBinContent(MinBin) BestSF_bgd = h_sf_bgr.GetBinCenter(MinBin) # Rescale and find \pm 1\sigma errors for i in range(1,h_sf_bgr.GetNbinsX()+1): h_sf_bgr_rescaled.SetBinContent(i,Minimum-h_sf_bgr.GetBinContent(i)) LeftLim = h_sf_bgr.GetBinCenter( h_sf_bgr_rescaled.FindFirstBinAbove(-1)-1 ) RightLim = h_sf_bgr.GetBinCenter( h_sf_bgr_rescaled.FindLastBinAbove(-1)+1 ) LeftError = BestSF_bgd - LeftLim RightError = RightLim - BestSF_bgd # Print summary print(" ----------\n"," Result fit: \n"," ----------","Background scale factor from sideband fit: ",BestSF_bgd," - ",LeftError," + ",RightError) # Plot histogram canvas1 = TCanvas("canvas1","Standard Canvas",600,400) canvas1.SetLeftMargin(0.175) canvas1.SetBottomMargin(0.125) canvas1.cd() h_sf_bgr_rescaled.SetStats(kFALSE) h_sf_bgr_rescaled.SetTitle("Background scale factor") h_sf_bgr_rescaled.GetXaxis().SetTitle("scale factor") h_sf_bgr_rescaled.GetYaxis().SetTitle("2 log L") #h_sf_bgr_rescaled.SetAxisRange(1,-1 ,"Y") h_sf_bgr_rescaled.Draw("C") CenterLine = TLine(BestSF_bgd,0,BestSF_bgd,-31) CenterLine.SetLineColor(2) LeftLine = TLine(LeftLim,Minimum-h_sf_bgr.GetBinContent( h_sf_bgr_rescaled.FindFirstBinAbove(-1)-1 ),LeftLim,-31) LeftLine.SetLineColor(40) RightLine = TLine(RightLim,Minimum-h_sf_bgr.GetBinContent( h_sf_bgr_rescaled.FindLastBinAbove(-1)+1 ),RightLim,-31) RightLine.SetLineColor(40) CenterLine.Draw("same axis") LeftLine.Draw("same axis") RightLine.Draw("same axis") canvas1.Print("Plots/SideBandFit.pdf") # Find expected background bgr = h_bgr.Integral(h_bgr.FindBin(125.-0.5*7.15),h_bgr.FindBin(125+0.5*7.15)) sig = GetMassDistribution(125).Integral(h_bgr.FindBin(125.-0.5*7.15),h_bgr.FindBin(125+0.5*7.15)) obs = h_data.Integral(h_bgr.FindBin(125.-0.5*7.15),h_bgr.FindBin(125+0.5*7.15)) print("BACKGROUND - without rescaling : ", bgr) print("Best scalefactor: ",BestSF_bgd) print("BACKGROUND - with rescaling : ", BestSF_bgd*bgr," - ",LeftError*bgr,' + ',RightError*bgr) print("SIGNAL EVENTS: ",sig) print("OBSERVED EVENTS: ",obs) return