def createPlotsReco_(reco_file, label, debug=False): """Cumulative material budget from reconstruction. Internal function that will produce a cumulative profile of the material budget in the reconstruction starting from the single detectors that compose the tracker. It will iterate over all existing detectors contained in the sDETS dictionary. The function will automatically stop everytime it encounters a non-existent detector, until no more detectors are left to try. For this reason the keys in the sDETS dictionary can be as inclusive as possible. """ cumulative_matbdg = None sPREF = ["Original_RadLen_vs_Eta_", "RadLen_vs_Eta_"] c = TCanvas("c", "c", 1024, 1024) diffs = [] if not checkFile_(reco_file): print("Error: missing file %s" % reco_file) raise RuntimeError file = TFile(reco_file) prefix = "/DQMData/Run 1/Tracking/Run summary/RecoMaterial/" for s in sPREF: hs = THStack("hs", "") histos = [] for det, color in six.iteritems(sDETS): layer_number = 0 while True: layer_number += 1 name = "%s%s%s%d" % (prefix, s, det, layer_number) prof = file.Get(name) # If we miss an object, since we are incrementally # searching for consecutive layers, we may safely # assume that there are no additional layers and skip # to the next detector. if not prof: if debug: print("Missing profile %s" % name) break else: histos.append(prof.ProjectionX("_px", "hist")) diffs.append(histos[-1]) histos[-1].SetFillColor(color + layer_number) histos[-1].SetLineColor(color + layer_number + 1) name = "CumulativeRecoMatBdg_%s" % s if s == "RadLen_vs_Eta_": cumulative_matbdg = TH1D(name, name, histos[0].GetNbinsX(), histos[0].GetXaxis().GetXmin(), histos[0].GetXaxis().GetXmax()) cumulative_matbdg.SetDirectory(0) for h in histos: hs.Add(h) if cumulative_matbdg: cumulative_matbdg.Add(h, 1.) hs.Draw() hs.GetYaxis().SetTitle("RadLen") c.Update() c.Modified() c.SaveAs("%sstacked_%s.png" % (s, label)) hs = THStack("diff", "") for d in range(0, len(diffs) / 2): diffs[d + len(diffs) / 2].Add(diffs[d], -1.) hs.Add(diffs[d + len(diffs) / 2]) hs.Draw() hs.GetYaxis().SetTitle("RadLen") c.Update() c.Modified() c.SaveAs("RadLen_difference_%s.png" % label) return cumulative_matbdg
def FindHotPixel(self, threshold, Nevents=-1, filename="hotpixels.txt"): # will calculate the frequency with which each pixel fires # threshold (0 -> 1) defines hot pixel cut # saves hot pixels to text file n_max = 0 prev_pixel_xhits = [999, 999] unique_events = 0 histo_nhits = TH1D("nhit", "N Pixel Fires", 40, 0, 40) histo_hitpixel = TH2D("hit", "Hit Pixel Map", 256, 0, 256, 256, 0, 256) histo_frequency = TH1D("freq", "Pixel Firing Frequency", 10000, 0, 1) histo_hotpixel = TH2D("hot", "Hot Pixel Map", 256, 0, 256, 256, 0, 256) if Nevents > self.p_nEntries or Nevents == -1: n_max = self.p_nEntries elif Nevents < 10000 and self.p_nEntries >= 10000: print "FindHotPixel over-riding requested nevents" print "FindHotPixel must be run on atleast 10000 events (for a threshold of 0.01) to be accurate" print "FindHotPixel will use 10000 events" n_max = 10000 elif Nevents < 10000 and self.p_nEntries < 10000: print "FindHotPixel over-riding requested nevents" print "FindHotPixel must be run on atleast 10000 events (for a threshold of 0.01) to be accurate" print "FindHotPixel will use as many events as exist in this run" n_max = self.p_nEntries else: n_max = Nevents # loop through events to find unique events # for each fired pixel in each event, increment hit map for i in range(n_max): self.getEvent(i) if i % 10000 == 0: print " [Hot Pixel Finder] Parsing event %i" % i # is this a new frame, or the next event in the same frame? npixels_hit = len(self.p_col) pixel_x_hits = [] for k in xrange(npixels_hit): pixel_x_hits.append(self.p_col[k]) if (pixel_x_hits == prev_pixel_xhits): # another track in the same event continue else: # this is a new event unique_events = unique_events + 1 prev_pixel_xhits = pixel_x_hits for j in range(len(self.p_row)): self.hit_map[self.p_col[j]][self.p_row[j]] += 1 histo_hitpixel.Fill(self.p_col[j], self.p_row[j]) # loop through hitmap # fill freq map with hits / nevents print "Ran over", n_max, "events, found", unique_events, "unique pixel maps" for i in range(npix_X): for j in range(npix_Y): self.frequency_map[i][j] = self.hit_map[i][j] * ( 1.0 / float(unique_events)) histo_nhits.Fill(self.hit_map[i][j]) histo_frequency.Fill(self.frequency_map[i][j]) # if freq > threshold, make a hotpixel if (self.frequency_map[i][j] > threshold): histo_hotpixel.Fill(i, j, self.frequency_map[i][j]) self.hotpixels.append([i, j]) f = open(filename, 'w') f.write("%s" % self.hotpixels) f.close() print "##### Hot Pixel Report #####" print " %i Hot pixel found at : " % (len(self.hotpixels)) print self.hotpixels print "############################" return histo_nhits, histo_hitpixel, histo_hotpixel, histo_frequency
# display_name: Python 2 # --- # # Demonstration of the central limit theorem # # First import what is needed from ROOT import TH1D, gRandom, TCanvas from math import sqrt # Define histograms with sum of random numbers points = [1, 2, 3, 6, 12, 20, 40] histos = {} for p in points: histos[p] = TH1D(str(p), str(p), 100, -6, 6) # Fill the histograms with random numbers for n in range(100000): for p in points: x = 0. for n2 in range(p): x += gRandom.Uniform() histos[p].Fill((x - p / 2.) / sqrt(p / 12.)) # Plot Histograms separately c = [] for p in points[:3]: c.append(TCanvas())
def beginJob(self): """Prepare output analysis tree and cutflow histogram.""" # FILE self.outfile = TFile(self.fname, 'RECREATE') self.njet = TH1D('njet', 'Number of jets (p_{T}>20, |#eta|<2.4)', 15, 0, 15) self.njtf = TH1D('njtf', 'Number of j #rightarrow #tau_{h} fakes', 10, 0, 10) self.pflav_all = TH1D( 'pflav_all', 'Jet parton flav;Jet parton flavor;Number of jets', 25, 0, 25) self.pflav_jtf = TH1D( 'pflav_jtf', "Jet parton flav of j #rightarrow #tau_{h} fake (Medium VSjet, VVVLoose VSe, VLoose VSmu);Jet parton flavor;Number of j #rightarrow #tau_{h}", 25, 0, 25) self.pflav_eff = TH1D( 'pflav_eff', "j #rightarrow #tau_{h} fake rate (Medium VSjet, VVVLoose VSe, VLoose VSmu);Jet parton flavor; Medium VSjet j #rightarrow #tau_{h} fake rate", 25, 0, 25) self.hflav_all = TH1D( 'hflav_all', 'Jet hadron flav;Jet hadron flavor;Number of jets', 8, 0, 8) self.hflav_jtf = TH1D( 'hflav_jtf', "Jet hadron flav of j #rightarrow #tau_{h} fake (Medium VSjet, VVVLoose VSe, VLoose VSmu);Jet hadron flavor;Number of j #rightarrow #tau_{h}", 8, 0, 8) self.hflav_eff = TH1D( 'hflav_eff', "j #rightarrow #tau_{h} fake rate (Medium VSjet, VVVLoose VSe, VLoose VSmu);Jet hadron flavor; Medium VSjet j #rightarrow #tau_{h} fake rate", 8, 0, 8) self.jet = TTree('jet', 'jet') # save per jet self.jtf = TTree('jtf', 'jtf') # save per j -> tauh fake # JET BRANCHES self.jet_pt = np.zeros(1, dtype='f') self.jet_eta = np.zeros(1, dtype='f') self.jet_pflav = np.zeros(1, dtype='i') self.jet_hflav = np.zeros(1, dtype='i') self.jet_tpt = np.zeros(1, dtype='f') self.jet_teta = np.zeros(1, dtype='f') self.jet_tdm = np.zeros(1, dtype='f') self.jet_VSjet = np.zeros(1, dtype='i') self.jet_VSmu = np.zeros(1, dtype='i') self.jet_VSe = np.zeros(1, dtype='f') self.jet_dR = np.zeros(1, dtype='f') self.jet.Branch('pt', self.jet_pt, 'pt/F').SetTitle("jet pt") self.jet.Branch('eta', self.jet_eta, 'eta/F').SetTitle("jet eta") self.jet.Branch('pflav', self.jet_pflav, 'pflav/I').SetTitle("jet parton flavor") self.jet.Branch('hflav', self.jet_hflav, 'hflav/I').SetTitle("jet hadron flavor") self.jet.Branch('tpt', self.jet_tpt, 'tpt/F').SetTitle("matched tau pt") self.jet.Branch('teta', self.jet_teta, 'teta/F').SetTitle("matched tau eta") self.jet.Branch('tdm', self.jet_tdm, 'tdm/F').SetTitle("matched tau decay mode") self.jet.Branch('VSjet', self.jet_VSjet, 'VSjet/I').SetTitle("matched tau DeepTauVSjet") self.jet.Branch('VSmu', self.jet_VSmu, 'VSmu/I').SetTitle("matched tau DeepTauVSmu") self.jet.Branch('VSe', self.jet_VSe, 'VSe/I').SetTitle("matched tau DeepTauVSe") self.jet.Branch('dR', self.jet_dR, 'dR/F').SetTitle("DeltaR(jet,tau)") # TAU BRANCHES self.jtf_pt = np.zeros(1, dtype='f') self.jtf_eta = np.zeros(1, dtype='f') self.jtf_dm = np.zeros(1, dtype='i') self.jtf_VSjet = np.zeros(1, dtype='i') self.jtf_VSmu = np.zeros(1, dtype='f') self.jtf_VSe = np.zeros(1, dtype='f') self.jtf_jpt = np.zeros(1, dtype='f') self.jtf_jeta = np.zeros(1, dtype='f') self.jtf_pflav = np.zeros(1, dtype='i') self.jtf_hflav = np.zeros(1, dtype='i') self.jtf_dR = np.zeros(1, dtype='f') self.jtf.Branch('pt', self.jtf_pt, 'pt/F').SetTitle("tau pt") self.jtf.Branch('eta', self.jtf_eta, 'eta/F').SetTitle("tau eta") self.jtf.Branch('dm', self.jtf_dm, 'dm/I').SetTitle("tau decay mode") self.jtf.Branch('VSjet', self.jtf_VSjet, 'VSjet/I').SetTitle("tau DeepTauVSjet") self.jtf.Branch('VSmu', self.jtf_VSmu, 'VSmu/I').SetTitle("tau DeepTauVSmu") self.jtf.Branch('VSe', self.jtf_VSe, 'VSe/I').SetTitle("tau DeepTauVSe") self.jtf.Branch('jpt', self.jtf_jpt, 'jpt/F').SetTitle("matched jet pt") self.jtf.Branch('jeta', self.jtf_jeta, 'jeta/F').SetTitle("matched jet eta") self.jtf.Branch('pflav', self.jtf_pflav, 'pflav/I').SetTitle("matched jet parton flavor") self.jtf.Branch('hflav', self.jtf_hflav, 'hflav/I').SetTitle("matched jet hadron flavor") self.jtf.Branch('dR', self.jtf_dR, 'dR/F').SetTitle("DeltaR(jet,tau)")
def MakeHistogram( self ): h_Xsec = TH1D("h_Xsec", "", len(self.MassBinEdges)-1, array("d", self.MassBinEdges) ) ############################################################### # -- collect Xsec information from FEWZ outputs (txt file) -- # ############################################################### BinCenter_Xsec = [] for i in range(0, len(self.MassBinEdges_File)-1): LowerEdge = self.MassBinEdges_File[i] UpperEdge = self.MassBinEdges_File[i+1] FileName = self.MakeFileName( LowerEdge, UpperEdge ) BinCenter_Xsec += self.GetXsec_SingleBin( FileName ) # -- check consistency -- # if len(BinCenter_Xsec) != len(self.MassBinEdges)-1: print "(# x-seciton, # mass bins) = (%d, %d) -> they are different! check the details" % ( len(BinCenter_Xsec), len(self.MassBinEdges)-1 ) return ############################ # -- fill the histogram -- # ############################ for i in range(0, len(self.MassBinEdges)-1): LowerEdge = self.MassBinEdges[i] UpperEdge = self.MassBinEdges[i+1] BinCenter_Xsec_ithBin = BinCenter_Xsec[i] BinCenter = BinCenter_Xsec_ithBin[0] if not(BinCenter > LowerEdge and BinCenter < UpperEdge): print "(LowerEdge, UpperEdge) = (%.0lf, %.0lf) ... BinCenter=%.0lf is not in this bin!" % (LowerEdge, UpperEdge, BinCenter) break i_bin = i+1 CentralValue = BinCenter_Xsec_ithBin[1] h_Xsec.SetBinContent( i_bin, CentralValue ) IntgErr = BinCenter_Xsec_ithBin[2] PDFErr = BinCenter_Xsec_ithBin[3] # Error = BinCenter_Xsec_ithBin[2] # -- Integration error only -- # Error = math.sqrt( IntgErr*IntgErr + PDFErr*PDFErr ) h_Xsec.SetBinError( i_bin, Error ) h_Xsec.Sumw2() h_DiffXsec = self.MakeDiffXsecHist( h_Xsec ) h_DiffXsec.Sumw2() # self.DrawHistogram(h_DiffXsec, 15, 3000) # self.DrawHistogram(h_DiffXsec, 15, 60) # self.DrawHistogram(h_DiffXsec, 50, 70) # self.DrawHistogram(h_DiffXsec, 60, 120) # self.DrawHistogram(h_DiffXsec, 110, 130) # self.DrawHistogram(h_DiffXsec, 120, 3000) self.DrawHistogram(h_DiffXsec, 15, 1500) f_output = TFile("ROOTFile_DY_FEWZ_NNLO_%s.root" % (self.PDF), "RECREATE") f_output.cd() h_Xsec.Write() h_DiffXsec.Write() f_output.Close()
'_pt_10_eta_2', '_pt_10_eta_2p4', '_pt_20_eta_1p5', '_pt_20_eta_2', '_pt_20_eta_2p4' ] hSVPt = [] pt_type = ['Pt', 'Pt_gen', 'Pt_f', 'Pt_gen_f'] hSVEta = [] eta_type = ['Eta', 'Eta_gen', 'Eta_f', 'Eta_gen_f'] hSVMass = [] mass_type = ['Mass', 'Mass_gen', 'Mass_f', 'Mass_gen_f'] hCutflow = [] for c in bNOTb: h = TH1D("hCutflow" + c, ";;Events;", 5, 0.5, 5.5) setAxisBins(h, cutsName) hCutflow.append(h) for s in eta_range: h = TH1D("hCutflow" + s + c, ";;Events;", 5, 0.5, 5.5) setAxisBins(h, cutsName) hCutflow_eta.append(h) for s in pt_range: h = TH1D("hCutflow" + s + c, ";;Events;", 5, 0.5, 5.5) setAxisBins(h, cutsName) hCutflow_pt.append(h) for s in pt_eta_range: h = TH1D("hCutflow" + s + c, ";;Events;", 5, 0.5, 5.5)
fullRange = pdf.IntegralOneDim(0.040 - 1.4 * mres.Eval(0.02), 0.130 + 1.4 * mres.Eval(0.06), 1e-12, 1e-12, ROOT.Double(blahh)) / 2.8 print "full range has {0} independent regions".format(fullRange) rand = TRandom() c = TCanvas("c", "c", 800, 600) c.Print(remainder[0] + ".pdf[") outfile = TFile(remainder[0] + ".root", "RECREATE") c.SetLogy(0) n_massbins = 50 minmass = 0.040 maxmass = 0.130 data = TH1D("data", "data", 200, 0, 0.17) #pvalHist = TH1D("pval","pval",100000,0,1.0) #allpvalHist = TH1D("allpval","allpval",100000,0,1.0) #pvalCdfHist = TH1D("pvalCdf","pvalCdf",100000,0,1.0) pvalHist = TH1D("pval", "pval", 100, 0, 1.0) allpvalHist = TH1D("allpval", "allpval", 100, 0, 1.0) pvalCdfHist = TH1D("pvalCdf", "pvalCdf", 100, 0, 1.0) lastTime = time.time() for runnum in xrange(0, n): newTime = time.time() if newTime - lastTime > 10: print runnum lastTime = newTime #print runnum data.Reset()
try: root_file_name = sys.argv[1] except IndexError: root_file_name = "muon_reco24.1.root" set_gstyle(gStyle) root_file = TFile(root_file_name) dir_name = "PatAnalyzerSkeleton" """open tree""" tree = "ass_muons" tree = os.path.join(dir_name, tree) tree = root_file.Get(tree) """Transverse momentum""" hist_stats = 100, 0, 50 B_pt_hist = TH1D("B_pt", "B", *hist_stats) D_pt_hist = TH1D("D_pt", "D", *hist_stats) B_pt_hist.SetStats(0) D_pt_hist.SetLineColor(2) tree.Project("B_pt", "pt", "abs(mother_pdgid) == 521") tree.Project("D_pt", "pt", "abs(mother_pdgid) == 411") [ set_histogram(x, """p_{t} #[]{GeV/c}""", """entries / 0.5 #[]{GeV/c}""") for x in [B_pt_hist, D_pt_hist] ] title = "p_{t} distribution" pt_can = TCanvas("p_{t} distribution", title) pt_can.cd()
template = "ntuple_new/qq_ILD{}/analyzers.ZqqIPJetsWithMCTreeProducer.ZqqIPJetsWithMCTreeProducer_1/tree.root" formats = ["", "_20res", "_0res", "_0res_ecal_hcal"] legend_names = ["hcal 50%", "hcal 20%", "hcal 0%", "hcal and ecal 0%"] file_names = [template.format(x) for x in formats] files = [TFile.Open(file_name) for file_name in file_names] trees = [tree_file.Get("events") for tree_file in files] canvas = TCanvas("c_" + variable, title, 800, 600) canvas.SetGrid() legend = TLegend(0.7, 0.6, 0.9, 0.8) legend.SetFillStyle(0) legend.SetBorderSize(0) histogram_list = [] for tree, form, legend_name in zip(trees, formats, legend_names): histogram = TH1D("h" + form, title, 60, -0.2, 0.2) histogram.SetStats(kFALSE) histogram.GetYaxis().SetTitle("Entries/ {:3.2f}".format( histogram.GetBinWidth(1))) histogram.GetXaxis().SetTitle(xtitle) tree.Project(histogram.GetName(), variable) histogram.SetLineColor(len(histogram_list) + 2) histogram.Fit("gaus", "0", "", -0.04, 0.1) histogram.Fit("gaus", "0", "", -histogram.GetFunction("gaus").GetParameter(2) * 2, 0.1) histogram.Fit("gaus", "0", "", -histogram.GetFunction("gaus").GetParameter(2) * 2, 0.1) legend.AddEntry( histogram, legend_name + # " RMS: {:.2f}".format(histogram.GetRMS()),
def plotEfficiency(self, doFit=False): if hasattr(self, "Denominator") and hasattr(self, "Numerator"): if self._rebinFactor != 1: self.Denominator["hist"].Rebin(self._rebinFactor) self.Numerator["hist"].Rebin(self._rebinFactor) for ibin in range(-1, self.Numerator["hist"].GetNbinsX() + 1): if self.Numerator["hist"].GetBinContent( ibin + 1) > self.Denominator["hist"].GetBinContent(ibin + 1): print 'Fixing bad bin:', (ibin + 1) self.Numerator["hist"].SetBinContent( ibin + 1, self.Denominator["hist"].GetBinContent(ibin + 1)) self.Numerator["hist"].SetBinError( ibin + 1, self.Denominator["hist"].GetBinError(ibin + 1)) efficiencyGraph = TGraphAsymmErrors(self.Numerator["hist"], self.Denominator["hist"], "cp") pt_cmsPrelim = TPaveText(0.132832, 0.859453, 0.486216, 0.906716, "brNDC") pt_cmsPrelim.SetBorderSize(0) pt_cmsPrelim.SetFillStyle(0) pt_cmsPrelim.SetTextFont(62) pt_cmsPrelim.SetTextSize(0.0374065) pt_cmsPrelim.AddText("CMS Preliminary") pt_lumi = TPaveText(0.744361, 0.92928, 0.860902, 0.977667, "brNDC") pt_lumi.SetBorderSize(0) pt_lumi.SetFillStyle(0) pt_lumi.SetTextFont(42) pt_lumi.SetTextSize(0.0374065) pt_lumi.AddText(self._luminosityLabel) pt_leg = TPaveText(0.160401, 0.768657, 0.342105, 0.863184, "brNDC") pt_leg.SetBorderSize(0) pt_leg.SetFillStyle(0) pt_leg.SetTextFont(42) pt_leg.SetTextSize(0.0349127) pt_leg.SetTextAlign(12) if self._leg == "METLeg": legLabel = "" for filt in self._metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + self._metHLTFilters[-1] pt_leg.AddText(legLabel) pt_leg.AddText(self._datasetLabel) if self._leg == "TrackLeg": pt_leg.AddText(self._path + "*") pt_leg.AddText(self._datasetLabel) legLabel = "" for filt in self._metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + self._metHLTFilters[-1] + " applied" pt_leg.AddText(legLabel) if self._leg == "METPath": if self._path == "GrandOr": pt_leg.AddText("OR of Signal Paths") else: pt_leg.AddText(self._path + "*") pt_leg.AddText(self._datasetLabel) lumiLabel = TPaveText(0.66416, 0.937339, 0.962406, 0.992894, "brNDC") lumiLabel.SetBorderSize(0) lumiLabel.SetFillStyle(0) lumiLabel.SetTextFont(42) lumiLabel.SetTextSize(0.0387597) lumiLabel.AddText(str(self._luminosityLabel)) oneLine = TLine(xlo, 1.0, xhi, 1.0) oneLine.SetLineWidth(3) oneLine.SetLineStyle(2) backgroundHist = TH1D("backgroundHist", "backgroundHist", 1, xlo, xhi) backgroundHist.GetYaxis().SetTitle("Trigger Efficiency") backgroundHist.GetYaxis().SetRangeUser(ylo, yhi) if self._leg == "METLeg" or self._leg == "METPath": backgroundHist.GetXaxis().SetTitle(self._metLegAxisTitle) elif self._leg == "TrackLeg": backgroundHist.GetXaxis().SetTitle(self._trackLegAxisTitle) SetStyle(backgroundHist) SetStyle(efficiencyGraph) self._canvas.cd() backgroundHist.Draw() efficiencyGraph.Draw("P") pt_cmsPrelim.Draw("same") pt_lumi.Draw("same") pt_leg.Draw("same") oneLine.Draw("same") if doFit: (fitFunc, fitText) = self.PlotFit(efficiencyGraph) fitFunc.Draw("same") fitText.Draw("same") if not os.path.exists('plots_' + self.Denominator["sample"]): os.mkdir('plots_' + self.Denominator["sample"]) if self._leg == "METPath": self._canvas.SaveAs('plots_' + self.Denominator["sample"] + '/' + self._path + "_Efficiency.pdf") else: self._canvas.SaveAs('plots_' + self.Denominator["sample"] + '/' + self._path + "_" + self._leg + ".pdf") self._fout.cd() if self._tgraphSuffix is not None: efficiencyGraph.Write(self._path + "_" + self._leg + "_" + self._tgraphSuffix) if doFit: fitFunc.Write(self._path + "_" + self._leg + "_" + self._tgraphSuffix + "_fitResult") else: efficiencyGraph.Write(self._path + "_" + self._leg) if doFit: fitFunc.Write(self._path + "_" + self._leg + "_fitResult") else: print "Denominator and Numerator must be defined for path ", self._path, ", leg ", self._leg return 0.0
def T_to_H(filename, outputpath='', NBins=100, IBin=-10, FBin=10): from ROOT import TFile, TCanvas, TPad, TH1D, TH1F import os import numpy if (filename[0] == "/"): filename = filename else: filename = os.getcwd( ) + "/" + filename # get the path included filename loca = len(filename) for i in range(1, len(filename) + 1): # find the "/" location if (filename[-i] == "/"): loca = i - 1 break FILENAME = filename.replace( filename[:-loca], "") # this is the shorten filename, excluded path # print(filename) # path included file name # print(FILENAME) # file name only if (outputpath == ''): pass else: if (outputpath[0] == "/"): filetxt = outputpath + "/" + FILENAME.replace(".root", "") filetxt = filetxt.replace("//", "/") elif (outputpath[0] == "~"): filetxt = outputpath.replace( "~", os.environ['HOME']) + "/" + FILENAME.replace(".root", "") filetxt = filetxt.replace("//", "/") else: filetxt = os.getcwd() + "/" + outputpath + "/" + FILENAME.replace( ".root", "") filetxt = filetxt.replace("//", "/") # print(filetxt) f = TFile(filename, "READ") # read file dirlist = f.GetListOfKeys() # make TList consisting of keys # print(dirlist.GetSize()) # printing the number of TTree included in the file ITER = dirlist.MakeIterator( ) # MakeIterator() : Return a list iterator -> TIterator key = ITER.Next( ) # Next() : returning TObject, but returning TKey.. why?? # px = numpy.zeros(1, dtype=float) histoNum = 0 histList = [] NamehistList = [] while key: tree = key.ReadObj() # TTree # print(tree.GetName()) branchlist = tree.GetListOfBranches() # TObjArray if (branchlist.IsEmpty()): # continue if tree is empty continue ITER_b = branchlist.MakeIterator() # TIterator key_b = ITER_b.Next() # TBranch while key_b: Namehist = FILENAME.replace( ".root", "") + "_" + tree.GetName() + "_" + key_b.GetName() NamehistList.append(Namehist) BranchName = key_b.GetName() #print(BranchName) vaName = key_b.GetName() exec(vaName + '= numpy.zeros(1, dtype=float)') tree.SetBranchAddress(BranchName, vaName) # print(Namehist) for i in range(tree.GetEntries()): tree.GetEntry(i) hist = TH1D(Namehist, Namehist, NBins, IBin, FBin) # hist.Fill(vaName) histList.append(hist) histoNum = histoNum + 1 # print(key_b.GetName()) key_b = ITER_b.Next() key = ITER.Next() #### After upper "while", all of the histograms are defined. (Is it possible to automatically set BranchAdress???) dirlist = f.GetListOfKeys() # make TList consisting of keys # print(dirlist.GetSize()) # printing the number of TTree included in the file ITER = dirlist.MakeIterator( ) # MakeIterator() : Return a list iterator -> TIterator key = ITER.Next() while key: HN = 0 tree = key.ReadObj() # TTree branchlist = tree.GetListOfBranches() # TObjArray if (branchlist.IsEmpty()): # continue if tree is empty continue ITER_b = branchlist.MakeIterator() # TIterator key_b = ITER_b.Next() # TBranch while key_b: for i in range(tree.GetEntries()): tree.GetEntry(i) # histList[HN].Fill(px) HN = HN + 1 key_b = ITER_b.Next() key = ITER.Next() filetxt = filetxt + "_HIST.root" outputROOT = TFile(filetxt, "RECREATE") for jj in range(len(histList)): histList[jj].Write() outputROOT.Close()
def compare(trigger, leg, data, mc, axisTitle, canvas, dataLumi, metHLTFilters): dataFile = TFile.Open("triggerEfficiency_" + data + ".root", "read") mcFile = TFile.Open("triggerEfficiency_" + mc + ".root", "read") dataEff = dataFile.Get(trigger + "_" + leg) mcEff = mcFile.Get(trigger + "_" + leg) SetStyle(dataEff) SetStyle(mcEff) mcEff.SetLineColor(600) mcEff.SetMarkerColor(600) oneLine = TLine(xlo, 1.0, xhi, 1.0) oneLine.SetLineWidth(3) oneLine.SetLineStyle(2) backgroundHist = TH1D("backgroundHist", "backgroundHist", 1, xlo, xhi) backgroundHist.GetYaxis().SetTitle("Trigger Efficiency") backgroundHist.GetYaxis().SetRangeUser(ylo, yhi) backgroundHist.GetXaxis().SetTitle(axisTitle) SetStyle(backgroundHist) canvas.cd() backgroundHist.Draw() dataEff.Draw("CP same") mcEff.Draw("CP same") #oneLine.Draw("same") pt_cmsPrelim = TPaveText(0.132832, 0.859453, 0.486216, 0.906716, "brNDC") pt_cmsPrelim.SetBorderSize(0) pt_cmsPrelim.SetFillStyle(0) pt_cmsPrelim.SetTextFont(62) pt_cmsPrelim.SetTextSize(0.0374065) pt_cmsPrelim.AddText("CMS Preliminary") pt_cmsPrelim.Draw("same") pt_lumi = TPaveText(0.744361, 0.92928, 0.860902, 0.977667, "brNDC") pt_lumi.SetBorderSize(0) pt_lumi.SetFillStyle(0) pt_lumi.SetTextFont(42) pt_lumi.SetTextSize(0.0374065) pt_lumi.AddText("{:.2f}".format(dataLumi / 1000.0) + " fb^{-1}, 13 TeV") pt_lumi.Draw("same") pt_leg = TPaveText(0.160401, 0.768657, 0.342105, 0.863184, "brNDC") pt_leg.SetBorderSize(0) pt_leg.SetFillStyle(0) pt_leg.SetTextFont(42) pt_leg.SetTextSize(0.025) pt_leg.SetTextAlign(12) if leg == "METLeg": legLabel = "" for filt in metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + metHLTFilters[-1] pt_leg.AddText(legLabel) if leg == "TrackLeg": pt_leg.AddText(trigger + "*") legLabel = "" for filt in metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + metHLTFilters[-1] + " applied" pt_leg.AddText(legLabel) if leg == "METPath": if trigger == "GrandOr": pt_leg.AddText("OR of Signal Paths") else: if len(trigger) > 25 and len(trigger.split("_")) > 2: firstLine = trigger.split("_")[0] + "_" + trigger.split( "_")[1] + "_" pt_leg.AddText(firstLine) secondLine = "" for line in trigger.split("_")[2:-1]: secondLine += line + "_" secondLine += trigger.split("_")[-1] + "*" pt_leg.AddText(secondLine) else: pt_leg.AddText(trigger + "*") pt_leg.Draw("same") dataLabel = '2015 data' if '2016BC' in data: dataLabel = '2016 B+C data' if '2016DEFGH' in data: dataLabel = '2016 D-H data' if '2017' in data: dataLabel = '2017 data' if '2018' in data: dataLabel = '2018 data' legendLabel = trigger legend = TLegend(0.65, 0.75, 0.93, 0.88) legend.SetBorderSize(0) legend.SetFillColor(0) legend.SetFillStyle(0) legend.SetTextFont(42) if leg == 'METLeg': legend.SetHeader('MET Leg') elif leg == 'TrackLeg': legend.SetHeader('Track Leg') legend.AddEntry(dataEff, dataLabel, 'P') legend.AddEntry(mcEff, 'W #rightarrow l#nu MC', 'P') legend.Draw("same") outputDirectory = 'plots_compare' if 'BC' in data: outputDirectory = 'plots_compareBC' if 'DEFGH' in data: outputDirectory = 'plots_compareDEFGH' if not os.path.exists(outputDirectory): os.mkdir(outputDirectory) canvas.SaveAs(outputDirectory + '/' + trigger + '_' + leg + '.pdf') mcFile.Close() dataFile.Close() return
def compareDatasets(trigger, leg, datasets, lineColors, legendLabels, axisTitle, canvas, dataLumi, metHLTFilters, outputSuffix=None): inputFiles = [ TFile.Open("triggerEfficiency_" + x + ".root") for x in datasets ] efficiencies = [x.Get(trigger + "_" + leg) for x in inputFiles] for iEff in range(len(efficiencies)): SetStyle(efficiencies[iEff]) efficiencies[iEff].SetLineColor(lineColors[iEff]) efficiencies[iEff].SetMarkerColor(lineColors[iEff]) oneLine = TLine(xlo, 1.0, xhi, 1.0) oneLine.SetLineWidth(3) oneLine.SetLineStyle(2) backgroundHist = TH1D("backgroundHist", "backgroundHist", 1, xlo, xhi) backgroundHist.GetYaxis().SetTitle("Trigger Efficiency") backgroundHist.GetYaxis().SetRangeUser(ylo, yhi) backgroundHist.GetXaxis().SetTitle(axisTitle) SetStyle(backgroundHist) canvas.cd() backgroundHist.Draw() for eff in efficiencies: eff.Draw("CP same") #oneLine.Draw("same") pt_cmsPrelim = TPaveText(0.132832, 0.859453, 0.486216, 0.906716, "brNDC") pt_cmsPrelim.SetBorderSize(0) pt_cmsPrelim.SetFillStyle(0) pt_cmsPrelim.SetTextFont(62) pt_cmsPrelim.SetTextSize(0.0374065) if dataLumi > 0: pt_cmsPrelim.AddText("CMS Preliminary") else: pt_cmsPrelim.AddText("CMS Simulation") pt_cmsPrelim.Draw("same") if dataLumi > 0: pt_lumi = TPaveText(0.744361, 0.92928, 0.860902, 0.977667, "brNDC") pt_lumi.SetBorderSize(0) pt_lumi.SetFillStyle(0) pt_lumi.SetTextFont(42) pt_lumi.SetTextSize(0.0374065) pt_lumi.AddText("{:.2f}".format(dataLumi / 1000.0) + " fb^{-1}, 13 TeV") pt_lumi.Draw("same") pt_leg = TPaveText(0.160401, 0.768657, 0.342105, 0.863184, "brNDC") pt_leg.SetBorderSize(0) pt_leg.SetFillStyle(0) pt_leg.SetTextFont(42) pt_leg.SetTextSize(0.025) pt_leg.SetTextAlign(12) if leg == "METLeg": legLabel = "" for filt in metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + metHLTFilters[-1] pt_leg.AddText(legLabel) if leg == "TrackLeg": pt_leg.AddText(trigger + "*") legLabel = "" for filt in metHLTFilters[:-1]: legLabel = legLabel + filt + ", " legLabel = legLabel + metHLTFilters[-1] + " applied" pt_leg.AddText(legLabel) if leg == "METPath": if trigger == "GrandOr": pt_leg.AddText("OR of Signal Paths") else: if len(trigger) > 25 and len(trigger.split("_")) > 2: firstLine = trigger.split("_")[0] + "_" + trigger.split( "_")[1] + "_" pt_leg.AddText(firstLine) secondLine = "" for line in trigger.split("_")[2:-1]: secondLine += line + "_" secondLine += trigger.split("_")[-1] + "*" pt_leg.AddText(secondLine) else: pt_leg.AddText(trigger + "*") pt_leg.Draw("same") legendLabel = trigger legend = TLegend(0.65, 0.75, 0.93, 0.88) legend.SetBorderSize(0) legend.SetFillColor(0) legend.SetFillStyle(0) legend.SetTextFont(42) if leg == 'METLeg': legend.SetHeader('MET Leg') elif leg == 'TrackLeg': legend.SetHeader('Track Leg') for iEff in range(len(efficiencies)): legend.AddEntry(efficiencies[iEff], legendLabels[iEff], 'P') legend.Draw("same") if outputSuffix is not None: canvas.SaveAs('compareDatasets_' + trigger + '_' + leg + '_' + outputSuffix + '.pdf') else: canvas.SaveAs('compareDatasets_' + trigger + '_' + leg + '_.pdf') for inputFile in inputFiles: inputFile.Close() return
def createPlots_(plot): """Cumulative material budget from simulation. Internal function that will produce a cumulative profile of the material budget inferred from the simulation starting from the single detectors that compose the tracker. It will iterate over all existing detectors contained in the DETECTORS dictionary. The function will automatically skip non-existent detectors. """ IBs = ["InnerServices", "Phase2PixelBarrel", "TIB", "TIDF", "TIDB"] theDirname = "Figures" if plot not in plots.keys(): print("Error: chosen plot name not known %s" % plot) return hist_X0_IB = None # We need to keep the file content alive for the lifetime of the # full function.... subDetectorFiles = [] hist_X0_elements = OrderedDict() prof_X0_elements = OrderedDict() for subDetector, color in six.iteritems(DETECTORS): subDetectorFilename = "matbdg_%s.root" % subDetector if not checkFile_(subDetectorFilename): print("Error opening file: %s" % subDetectorFilename) continue subDetectorFiles.append(TFile(subDetectorFilename)) subDetectorFile = subDetectorFiles[-1] print("Opening file: %s" % subDetectorFilename) prof_X0_XXX = subDetectorFile.Get("%d" % plots[plot].plotNumber) # Merge together the "inner barrel detectors". if subDetector in IBs: hist_X0_IB = assignOrAddIfExists_(hist_X0_IB, prof_X0_XXX) hist_X0_detectors[subDetector] = prof_X0_XXX.ProjectionX() # category profiles for label, [num, color, leg] in six.iteritems(hist_label_to_num): prof_X0_elements[label] = subDetectorFile.Get( "%d" % (num + plots[plot].plotNumber)) hist_X0_elements[label] = assignOrAddIfExists_( hist_X0_elements.setdefault(label, None), prof_X0_elements[label]) cumulative_matbdg = TH1D("CumulativeSimulMatBdg", "CumulativeSimulMatBdg", hist_X0_IB.GetNbinsX(), hist_X0_IB.GetXaxis().GetXmin(), hist_X0_IB.GetXaxis().GetXmax()) cumulative_matbdg.SetDirectory(0) # colors for det, color in six.iteritems(DETECTORS): setColorIfExists_(hist_X0_detectors, det, color) for label, [num, color, leg] in six.iteritems(hist_label_to_num): hist_X0_elements[label].SetFillColor(color) # First Plot: BeamPipe + Pixel + TIB/TID + TOB + TEC + Outside # stack stackTitle_SubDetectors = "Tracker Material Budget;%s;%s" % ( plots[plot].abscissa, plots[plot].ordinate) stack_X0_SubDetectors = THStack("stack_X0", stackTitle_SubDetectors) for det, histo in six.iteritems(hist_X0_detectors): stack_X0_SubDetectors.Add(histo) cumulative_matbdg.Add(histo, 1) # canvas can_SubDetectors = TCanvas("can_SubDetectors", "can_SubDetectors", 800, 800) can_SubDetectors.Range(0, 0, 25, 25) can_SubDetectors.SetFillColor(kWhite) # Draw stack_X0_SubDetectors.SetMinimum(plots[plot].ymin) stack_X0_SubDetectors.SetMaximum(plots[plot].ymax) stack_X0_SubDetectors.Draw("HIST") stack_X0_SubDetectors.GetXaxis().SetLimits(plots[plot].xmin, plots[plot].xmax) # Legenda theLegend_SubDetectors = TLegend(0.180, 0.8, 0.98, 0.92) theLegend_SubDetectors.SetNColumns(3) theLegend_SubDetectors.SetFillColor(0) theLegend_SubDetectors.SetFillStyle(0) theLegend_SubDetectors.SetBorderSize(0) for det, histo in six.iteritems(hist_X0_detectors): theLegend_SubDetectors.AddEntry(histo, det, "f") theLegend_SubDetectors.Draw() # text text_SubDetectors = TPaveText(0.180, 0.727, 0.402, 0.787, "NDC") text_SubDetectors.SetFillColor(0) text_SubDetectors.SetBorderSize(0) text_SubDetectors.AddText("CMS Simulation") text_SubDetectors.SetTextAlign(11) text_SubDetectors.Draw() # Store can_SubDetectors.Update() if not checkFile_(theDirname): os.mkdir(theDirname) can_SubDetectors.SaveAs("%s/Tracker_SubDetectors_%s.pdf" % (theDirname, plot)) can_SubDetectors.SaveAs("%s/Tracker_SubDetectors_%s.root" % (theDirname, plot)) # Second Plot: BeamPipe + SEN + ELE + CAB + COL + SUP + OTH/AIR + # Outside stack stackTitle_Materials = "Tracker Material Budget;%s;%s" % ( plots[plot].abscissa, plots[plot].ordinate) stack_X0_Materials = THStack("stack_X0", stackTitle_Materials) stack_X0_Materials.Add(hist_X0_detectors["BeamPipe"]) for label, [num, color, leg] in six.iteritems(hist_label_to_num): stack_X0_Materials.Add(hist_X0_elements[label]) # canvas can_Materials = TCanvas("can_Materials", "can_Materials", 800, 800) can_Materials.Range(0, 0, 25, 25) can_Materials.SetFillColor(kWhite) # Draw stack_X0_Materials.SetMinimum(plots[plot].ymin) stack_X0_Materials.SetMaximum(plots[plot].ymax) stack_X0_Materials.Draw("HIST") stack_X0_Materials.GetXaxis().SetLimits(plots[plot].xmin, plots[plot].xmax) # Legenda theLegend_Materials = TLegend(0.180, 0.8, 0.95, 0.92) theLegend_Materials.SetNColumns(3) theLegend_Materials.SetFillColor(0) theLegend_Materials.SetBorderSize(0) theLegend_Materials.AddEntry(hist_X0_detectors["BeamPipe"], "Beam Pipe", "f") for label, [num, color, leg] in six.iteritems(hist_label_to_num): theLegend_Materials.AddEntry(hist_X0_elements[label], leg, "f") theLegend_Materials.Draw() # text text_Materials = TPaveText(0.180, 0.727, 0.402, 0.787, "NDC") text_Materials.SetFillColor(0) text_Materials.SetBorderSize(0) text_Materials.AddText("CMS Simulation") text_Materials.SetTextAlign(11) text_Materials.Draw() # Store can_Materials.Update() can_Materials.SaveAs("%s/Tracker_Materials_%s.pdf" % (theDirname, plot)) can_Materials.SaveAs("%s/Tracker_Materials_%s.root" % (theDirname, plot)) return cumulative_matbdg
"--outFileName", default='', help="File to be used for output.") parser.add_argument("-n", "--nEvents", default=0, type=int, help="Number of events to process.") return parser.parse_args() args = getArgs() print("args={0:s}".format(str(args))) maxPrint = 10 hWPL = TH1D('Loose', 'Loose', 50, -1., 1.) hWPL.GetXaxis().SetTitle('Electron_mvaFall17noIso') hWPL.GetYaxis().SetTitle('Entries') hWP80 = TH1D('WP80', 'WP80', 50, -1., 1.) hWP80.GetXaxis().SetTitle('Electron_mvaFall17noIso') hWP80.GetYaxis().SetTitle('Entries') hWP90 = TH1D('WP90', 'WP90', 50, -1., 1.) hWP90.GetXaxis().SetTitle('Electron_mvaFall17noIso') hWP90.GetYaxis().SetTitle('Entries') hNotWP90 = TH1D('WP90', 'WP90', 50, -1., 1.) hNotWP90.GetXaxis().SetTitle('Electron_mvaFall17noIso') hNotWP90.GetYaxis().SetTitle('Entries')
import sys #GET THE STUFF FROM THE TOP DIRECTORY sys.path.append('../') from ROOT import gROOT, gDirectory, TCanvas, TF1, TH1F, TH1D, TH1I, TFile, TKey, TString, THStack, TList, TLegend, TPaveText, TIter from Plot_DataBkgSigPlot import * from Plot_PlottingStyles import * from Plot_SetRootObjectsOptions import GetColor from Plot_RootObjects import GetUnits #AN EXAMPLE OF A DATA PLOT VERSUS 3 STACKED MC PLOTS # # #Create the histograms dataH = TH1D('muon_pt', 'the pt of a gluino', 50, 0.0, 100.0) ttjets = TH1D('ttjets_muon_pt', 'the pt of a gluino', 50, 0.0, 100.0) wjets = TH1D('wjets_muon_pt', 'the pt of a gluino', 50, 0.0, 100.0) singletop = TH1D('singletop_pt', 'the pt of a gluino', 50, 0.0, 100.0) lm6 = TH1D('lm6_pt', 'the pt of a gluino', 50, 0.0, 100.0) # #Fill them gaus = gROOT.GetFunction("gaus") gaus.SetParameter("Mean", 50) gaus.SetParameter("Sigma", 50) dataH.FillRandom("gaus", 15000) ttjets.FillRandom("gaus", 10000) wjets.FillRandom("gaus", 6000) singletop.FillRandom("gaus", 4000) landau = gROOT.GetFunction("landau") landau.SetParameter("Sigma", 30)
from ROOT import TFile, TH1D, TDirectory, TCanvas, TList, gPad, gStyle, TLegend import math Data = TFile('user.luadamek.20219352._000196.hist-output.root') Monte = TFile('user.luadamek.20219380._000083.hist-output.root') histTreeData = Data.Get( 'LA_EoverP_InDetTrackParticlesSortedLooseIsolatedVertexAssociated_tree') histTreeMonte = Monte.Get( 'LA_EoverP_InDetTrackParticlesSortedLooseIsolatedVertexAssociated_tree') #Data entries = histTreeData.GetEntriesFast() gd1 = TH1D("trk_p_data", "Track p", 100, 0.5, 20) gd2 = TH1D("trk_pE_data", "Track p with Energy", 100, 0.5, 20) gd3 = TH1D("efficiency_data", "Efficiency", 100, 0.5, 20) legd = TLegend(0.7, 0.1) for jentry in xrange(entries): nb = histTreeData.GetEntry(jentry) if nb <= 0: continue p = histTreeData.trk_p totalClusterEnergy = histTreeData.trk_nclusters_EM_200 + histTreeData.trk_nclusters_HAD_200 nTRT = histTreeData.trk_nTRT eta = histTreeData.trk_etaID gd1.Fill(p) if (totalClusterEnergy > 0 and nTRT >= 20): gd2.Fill(p)
elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_178v5")): listoffiles[5].insert(0, file) listofnames = [] listofnames.append("166v5") listofnames.append("169v5") listofnames.append("172v5") listofnames.append("173v5") listofnames.append("175v5") listofnames.append("178v5") for x in listofnames: ts_pratio_2ntr.append( TH1D("secbhad_pratio_emu_2ntr_" + x, "ratio between |p| of secvtx and bhadron in eMu channel, " + x, 50, 0., 1.)) ts_pratio_3ntr.append( TH1D("secbhad_pratio_emu_3ntr_" + x, "ratio between |p| of secvtx and bhadron in eMu channel, " + x, 50, 0., 1.)) ts_pratio_4ntr.append( TH1D("secbhad_pratio_emu_4ntr_" + x, "ratio between |p| of secvtx and bhadron in eMu channel, " + x, 50, 0., 1.)) ts_pratio_5ntr.append( TH1D("secbhad_pratio_emu_5ntr_" + x, "ratio between |p| of secvtx and bhadron in eMu channel, " + x, 50, 0., 1.)) ts_pratio_6ntr.append( TH1D("secbhad_pratio_emu_6ntr_" + x,
intervals = { 1: ((100, 130), (600, 750), (1500, 1700)), 2: ((120, 150), (720, 870), (1650, 1850)), 3: ((100, 140), (620, 770), (1500, 1750)), 4: ((0, 0), (700, 850), (1650, 1900)) } energies = array("d", (88, 511, 1274.5)) #keV energy_error = array("d", (0, 0, 0)) for i in range(1, 5): hist_name = base_name + str(i) file_name = base_name + str(i) + extension root_file = TFile(file_name) tree = root_file.Get("pjmca") hist = TH1D(hist_name, hist_name, 2000, 0, 2000) branch = branch_name.substitute(ch=(i - 1)) tree.Project(hist_name, branch, "{0} > 2".format(branch)) canvas = TCanvas(hist_name + "canvas", hist_name) means = array("d") errors = array("d") for interval in intervals[i]: if (interval[1] - interval[0]): result = hist.Fit("gaus", "QSR+", "", *interval) mean = result.Parameter(1) error = result.ParError(1) sigma = result.Parameter(2) means.append(mean) errors.append(error) print("gaussian mean = ", mean, "\pm", error)
def main(): gStyle.SetOptStat(0) BIAS_DIR = global_paths.BIASDIR + args.btagging + "/" if args.year == 'run2c': BIAS_DIR += "combined_run2_r{}{}/" ## individual plots stored in run2c_masspoints ## extract pulls pulls = {} for signal_strength in ['0', '2sigma', '5sigma']: print print print "--------------------------------------------------" print "r = " + signal_strength print "--------------------------------------------------" pulls[signal_strength] = TGraphErrors() for m in range(1600, 8001, 100): if (signal_strength == '2sigma' and m < 2600) or (signal_strength == '5sigma' and m < 4100): ##FIXME FIXME datacard_correction = True else: datacard_correction = False print print "m = " + str(m) if datacard_correction: print "correcting signal strength in the datacard" print pull0 = int(SIGNAL_STRENGTH[signal_strength][m]) tree = TChain("tree_fit_sb") for seed in [ '123456', '234567', '345678', '456789', '567891', '678912', '789123', '891234', '912345', '123459' ]: tree.Add( BIAS_DIR.format(signal_strength, "_lowm" if datacard_correction else "") + "fitDiagnostics_M{mass}_{seed}.root".format( mass=m, seed=seed)) ##FIXME FIXME hist = TH1D("s_pulls", ";%s/#sigma_{r};Number of toys" % ("#Deltar"), 25, -5, +5) # for i in range(tree.GetEntries()): if hist.GetEntries() >= 1000: continue tree.GetEntry(i) #print "r = {} (+{}, -{})".format(tree.r, tree.rHiErr, tree.rLoErr) ##if tree.rLoErr < 0.: continue if abs(tree.r + 1.) < 0.001: continue if abs(tree.r - 1.) < 0.001: continue if abs(tree.r - 0.) < 0.001: continue if tree.rHiErr == 0. or tree.rLoErr == 0.: continue if abs(tree.r + abs(tree.rHiErr) - round(tree.r + abs(tree.rHiErr))) < 0.0001: continue if abs(tree.r - abs(tree.rLoErr) - round(tree.r - abs(tree.rLoErr))) < 0.0001: continue #print "r = {} (+{}, -{})".format(tree.r, tree.rHiErr, tree.rLoErr) pull = (tree.r - pull0) / abs( tree.rHiErr) if tree.r - pull0 > 0. else ( tree.r - pull0) / abs(tree.rLoErr) ## my own approach hist.Fill(pull) ## individual plots for checking the fit quality c1 = TCanvas("c1", "Pulls", 600, 600) c1.cd() hist.GetXaxis().SetTitleSize(0.045) hist.GetYaxis().SetTitleSize(0.045) hist.GetYaxis().SetTitleOffset(1.1) hist.GetXaxis().SetTitleOffset(1.05) hist.GetXaxis().SetLimits(-6, 6.) hist.GetYaxis().SetLimits(0, 200.) hist.SetMinimum(0.) hist.SetMaximum(190.) c1.SetTopMargin(0.05) ##print "@ m= {}: \t mean = {}".format(m, hist.GetMean()) #pulls[signal_strength].SetPoint(pulls[signal_strength].GetN(), m, hist.GetMean()) ## get actual mean of histogram fit_func = TF1("gaussfit", "gaus", -3., 3.) hist.Fit(fit_func, "E") hist.Draw() drawCMS(-1, "Simulation Preliminary", year='run2') drawMass("m_{Z'} = " + str(m) + " GeV") c1.Print("plots/bias/run2c_masspoints/r" + signal_strength + "/bias_fit_" + str(m) + "_" + args.year + ".pdf") c1.Print("plots/bias/run2c_masspoints/r" + signal_strength + "/bias_fit_" + str(m) + "_" + args.year + ".png") n = pulls[signal_strength].GetN() pulls[signal_strength].SetPoint( n, m, fit_func.GetParameter(1)) ## get fitted gaussian mean pulls[signal_strength].SetPointError( n, 0., fit_func.GetParError(1)) ## set gaussian width as error fit_func.Delete() hist.Delete() c1.Delete() #except: # print "something went wrong in m =", m ## draw pulls outfile = TFile("plots/bias/bias_study_new_" + args.year + ".root", "RECREATE") c = TCanvas("canvas", "canvas", 800, 600) leg = TLegend(0.65, 0.7, 0.95, 0.9) leg.SetBorderSize(0) leg.SetFillStyle(0) #1001 leg.SetFillColor(0) for i, signal_strength in enumerate(['0', '2sigma', '5sigma']): pulls[signal_strength].SetMarkerStyle(2) pulls[signal_strength].SetMarkerColor(COLORS[signal_strength]) pulls[signal_strength].SetLineColor(COLORS[signal_strength]) pulls[signal_strength].SetLineWidth(2) pulls[signal_strength].SetMinimum(-0.7) pulls[signal_strength].SetMaximum(0.7) pulls[signal_strength].Draw("APL" if i == 0 else "PL") leg.AddEntry(pulls[signal_strength], LEGEND[signal_strength]) zeroline = TGraph() zeroline.SetPoint(zeroline.GetN(), 1000, 0) zeroline.SetPoint(zeroline.GetN(), 8600, 0) zeroline.SetMarkerStyle(7) zeroline.SetMarkerSize(0) zeroline.SetLineStyle(15) zeroline.SetLineColor(1) zeroline.Draw("PL") c.SetGrid() pulls['0'].SetTitle(";m_{Z'} (GeV);mean #Deltar/#sigma_{r}") pulls['0'].GetXaxis().SetTitleSize(0.045) pulls['0'].GetYaxis().SetTitleSize(0.045) pulls['0'].GetYaxis().SetTitleOffset(1.1) pulls['0'].GetXaxis().SetTitleOffset(1.05) pulls['0'].GetXaxis().SetLimits(1350., 8150.) c.SetTopMargin(0.05) leg.Draw() drawCMS(-1, "Simulation Preliminary", year='run2') c.Print("plots/bias/bias_study_new_" + args.year + ".png") c.Print("plots/bias/bias_study_new_" + args.year + ".pdf") c.Write() outfile.Close()
listoffiles = [[] for x in xrange(0, 1)] for file in os.listdir(path): if (file.endswith(".root") and file.startswith("MC8TeV_TTJets_MSDecays_172v5")): listoffiles[0].insert(0, file) # Open the files -> adds all samples together listofnames = [] listofnames.append("172v5") for x in listofnames: ts_correct.append( TH1D( "deltar_correct" + x, "inv.m. of lepton and secvtx (deltar, correct charge) for diff. topweights", 50, 0., 150.)) ts_correctmass.append( TH1D( "minmass_correct" + x, "inv.m. of lepton and secvtx (minmass correct charge) for diff. topweights", 50, 0., 150.)) ts_correct_topweight.append( TH1D( "deltar_correct_topweight" + x, "inv.m. of lepton and secvtx (deltar, correct charge) for diff. topweights", 50, 0., 150.)) ts_correctmass_topweight.append( TH1D( "minmass_correct_topweight" + x,
def run(self, selections, dv, dv2d, ch='', name='', nevents=-1): # initialize dictionary selection: list of histograms if name=='': name = self.name nsel = 0 for s in selections: self.sv[s] = collections.OrderedDict() self.sv2d[s] = collections.OrderedDict() selstr = 'sel{}'.format(int(nsel)) nsel += 1 for v in dv.keys() : hname = '{}_{}_{}'.format(name, selstr, v) self.sv[s][v] = TH1D(hname,hname+";"+dv[v]["title"]+";",dv[v]["bin"],dv[v]["xmin"],dv[v]["xmax"]) self.sv[s][v].Sumw2() for v in dv2d.keys() : hname = '{}_{}_{}'.format(name, selstr, v) self.sv2d[s][v] = TH2D(hname,hname+";"+dv2d[v]["titlex"]+";"+dv2d[v]["titley"]+";", dv2d[v]["binx"],dv2d[v]["xmin"],dv2d[v]["xmax"], dv2d[v]["biny"],dv2d[v]["ymin"],dv2d[v]["ymax"], ) self.sv2d[s][v].Sumw2() rf = TFile(self.rt) t = rf.Get("events") if nevents == -1: numberOfEntries = t.GetEntries() print 'running over the full entries %i'%numberOfEntries else: numberOfEntries = nevents if t.GetEntries()<nevents: numberOfEntries = t.GetEntries() print 'running over the full entries %i'%numberOfEntries else: print 'running over a subset of entries %i'%numberOfEntries for s in selections: weighttrf_name='' weighttrfin_name=[] weighttrfless_name=[] sformula=s if '**' in s: s_split=s.split('**') sformula=s_split[1] weighttrf_name=s_split[0] weighttrf_name=weighttrf_name.strip() if 'tagin' in weighttrf_name: nbtagex = int(filter(str.isdigit, weighttrf_name)) for i in range(nbtagex) : weighttrfin_name.append('weight_%itagex'%(i)) if 'tagless' in weighttrf_name: nbtagex = int(filter(str.isdigit, weighttrf_name)) for i in range(nbtagex) : weighttrfless_name.append('weight_%itagex'%(i)) formula = TTreeFormula("",sformula,t) # loop over events print 'number of events:', numberOfEntries for entry in xrange(numberOfEntries) : if (entry+1)%500 == 0: sys.stdout.write( '... %i events processed ...\r'%(entry+1)) sys.stdout.flush() t.GetEntry(entry) weight = self.w * getattr(t,"weight") weighttrf=1. if weighttrf_name!='' and len(weighttrfin_name)==0 and len(weighttrfless_name)==0 : weighttrf = getattr(t,weighttrf_name) elif weighttrf_name!='' and len(weighttrfin_name)!=0 and len(weighttrfless_name)==0 : weighttrf = 1. for i in weighttrfin_name : weighttrf -= getattr(t,i) elif weighttrf_name!='' and len(weighttrfin_name)==0 and len(weighttrfless_name)!=0 : weighttrf = 0. for i in weighttrfless_name : weighttrf += getattr(t,i) weight=weight*weighttrf # apply selection result = formula.EvalInstance() # fill histos on selected events if result > 0.: for v in dv.keys(): divide=1 try: divide=dv[v]["divide"] except KeyError, e: divide=1 self.sv[s][v].Fill(getattr(t,dv[v]["name"])/divide, weight) for v in dv2d.keys(): self.sv2d[s][v].Fill(getattr(t,dv2d[v]["namex"]), getattr(t,dv2d[v]["namey"]), weight)
import os #from ROOT import * from ROOT import TStyle, TF1, TFile, TCanvas, gDirectory, TTree, TH1D, TH1F, THStack, TLegend, gROOT import ROOT from style import * scaleHct = 0.02 scaleHut = 0.003 c1 = TCanvas( 'c1', 'c1', 450, 450 ) hct_gendR = TH1D('hct_gendR','#Delta R of gen b jets (Hct)', 30, 0, 4) hut_gendR = TH1D('hut_gendR','#Delta R of gen b jets (Hut)', 30, 0, 4) hct_recodR = TH1D('hct_recodR','#Delta R of reco b jets (Hct)', 30, 0, 4) hut_recodR = TH1D('hut_recodR','#Delta R of reco b jets (Hut)', 30, 0, 4) hct_genHm = TH1D('hct_genHm','Mass of gen H (Hct)', 30, 0, 250) hut_genHm = TH1D('hut_genHm','Mass of gen H (Hut)', 30, 0, 250) hct_recoHm = TH1D('hct_recoHm','Mass of reco H (Hct)', 30, 0, 250) hut_recoHm = TH1D('hut_recoHm','Mass of reco H (Hut)', 30, 0, 250) hct_matchHm = TH1D('hct_matchHm','Mass of gen matched H (Hct)', 30, 0, 250) hut_matchHm = TH1D('hut_matchHm','Mass of gen matched H (Hut)', 30, 0, 250) tch = TFile.Open('/home/minerva1993/fcnc/analysis_2017/fullAna/hist_singleTopHct.root') tuh = TFile.Open('/home/minerva1993/fcnc/analysis_2017/fullAna/hist_singleTopHut.root ') label = TPaveText() label.SetX1NDC(gStyle.GetPadLeftMargin()) label.SetY1NDC(1.0-gStyle.GetPadTopMargin()) label.SetX2NDC(1.0-gStyle.GetPadRightMargin()+0.03)
def Book(process): ntotmax = 6 + 1 if (process == "bppp") else 3 + 1 histos.update({"h_ntot": TH1D("h_ntot", ";;N", ntotmax, 0, ntotmax)}) histos["h_ntot"].GetXaxis().SetBinLabel(1, "N_{BXs}") histos["h_ntot"].GetXaxis().SetBinLabel(2, "N_{e+} tru_all") histos["h_ntot"].GetXaxis().SetBinLabel(3, "N_{e+} tru_acc") histos["h_ntot"].GetXaxis().SetBinLabel(4, "N_{e+} rec_emu") if (process == "bppp"): histos["h_ntot"].GetXaxis().SetBinLabel(5, "N_{e-} tru_all") histos["h_ntot"].GetXaxis().SetBinLabel(6, "N_{e-} tru_acc") histos["h_ntot"].GetXaxis().SetBinLabel(7, "N_{e-} rec_emu") ntrkmin = 0 ntrkmax = 200 ntrkbins = 200 histos.update({ "h_ntrks_positrons": TH1D("h_ntrks_positrons", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins, ntrkmin, ntrkmax) }) histos.update({ "h_ntrks_electrons": TH1D("h_ntrks_electrons", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins, ntrkmin, ntrkmax) }) histos.update({ "h_ntrks_positrons_rec_emul": TH1D("h_ntrks_positrons_rec_emul", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins, ntrkmin, ntrkmax) }) histos.update({ "h_ntrks_electrons_rec_emul": TH1D("h_ntrks_electrons_rec_emul", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins, ntrkmin, ntrkmax) }) ntrkmax_small = 50 ntrkbins_small = 50 histos.update({ "h_ntrks_positrons_small": TH1D("h_ntrks_positrons_small", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins_small, ntrkmin, ntrkmax_small) }) histos.update({ "h_ntrks_electrons_small": TH1D("h_ntrks_electrons_small", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins_small, ntrkmin, ntrkmax_small) }) histos.update({ "h_ntrks_positrons_rec_emul_small": TH1D("h_ntrks_positrons_rec_emul_small", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins_small, ntrkmin, ntrkmax_small) }) histos.update({ "h_ntrks_electrons_rec_emul_small": TH1D("h_ntrks_electrons_rec_emul_small", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins_small, ntrkmin, ntrkmax_small) }) ntrkmax_tiny = 25 ntrkbins_tiny = 25 histos.update({ "h_ntrks_positrons_tiny": TH1D("h_ntrks_positrons_tiny", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins_tiny, ntrkmin, ntrkmax_tiny) }) histos.update({ "h_ntrks_electrons_tiny": TH1D("h_ntrks_electrons_tiny", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins_tiny, ntrkmin, ntrkmax_tiny) }) histos.update({ "h_ntrks_positrons_rec_emul_tiny": TH1D("h_ntrks_positrons_rec_emul_tiny", ";Positron multiplicity;N_{e+}/BX/Shot", ntrkbins_tiny, ntrkmin, ntrkmax_tiny) }) histos.update({ "h_ntrks_electrons_rec_emul_tiny": TH1D("h_ntrks_electrons_rec_emul_tiny", ";Electron multiplicity;N_{e-}/BX/Shot", ntrkbins_tiny, ntrkmin, ntrkmax_tiny) }) Emin = 0 Emax = 20 if (proc == "bppp") else 10 Emax_full = 20 Ebins = 80 if (proc == "bppp") else 40 Ebins_fine = 200 if (proc == "bppp") else 100 Ebins_full = 200 histos.update({ "h_E_positrons": TH1D("h_E_positrons", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins, Emin, Emax) }) histos.update({ "h_E_positrons_fine": TH1D("h_E_positrons_fine", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins_fine, Emin, Emax) }) histos.update({ "h_E_electrons": TH1D("h_E_electrons", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins, Emin, Emax) }) histos.update({ "h_E_electrons_fine": TH1D("h_E_electrons_fine", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins_fine, Emin, Emax) }) histos.update({ "h_E_positrons_rec_emul": TH1D("h_E_positrons_rec_emul", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins, Emin, Emax) }) histos.update({ "h_E_positrons_rec_emul_fine": TH1D("h_E_positrons_rec_emul_fine", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins_fine, Emin, Emax) }) histos.update({ "h_E_electrons_rec_emul": TH1D("h_E_electrons_rec_emul", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins, Emin, Emax) }) histos.update({ "h_E_electrons_rec_emul_fine": TH1D("h_E_electrons_rec_emul_fine", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins_fine, Emin, Emax) }) histos.update({ "h_E_positrons_full": TH1D("h_E_positrons_full", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins_full, Emin, Emax_full) }) histos.update({ "h_E_electrons_full": TH1D("h_E_electrons_full", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins_full, Emin, Emax_full) }) histos.update({ "h_E_positrons_rec_emul_full": TH1D("h_E_positrons_rec_emul_full", ";#it{E} [GeV];N_{e+}/BX/Shot", Ebins_full, Emin, Emax_full) }) histos.update({ "h_E_electrons_rec_emul_full": TH1D("h_E_electrons_rec_emul_full", ";#it{E} [GeV];N_{e-}/BX/Shot", Ebins_full, Emin, Emax_full) }) pzmin = 0 pzmax = 20 if (proc == "bppp") else 10 pzmax_full = 20 pzbins = 80 if (proc == "bppp") else 40 pzbins_fine = 200 if (proc == "bppp") else 100 pzbins_full = 200 histos.update({ "h_pz_positrons": TH1D("h_pz_positrons", ";#it{p}_{z} [GeV];N_{e+}/BX/Shot", pzbins, pzmin, pzmax) }) histos.update({ "h_pz_positrons_fine": TH1D("h_pz_positrons_fine", ";#it{p}_{z} [GeV];N_{e+}/BX/Shot", pzbins_fine, pzmin, pzmax) }) histos.update({ "h_pz_positrons_full": TH1D("h_pz_positrons_full", ";#it{p}_{z} [GeV];N_{e+}/BX/Shot", pzbins_full, pzmin, pzmax_full) }) histos.update({ "h_pz_electrons": TH1D("h_pz_electrons", ";#it{p}_{z} [GeV];N_{e-}/BX/Shot", pzbins, pzmin, pzmax) }) histos.update({ "h_pz_electrons_fine": TH1D("h_pz_electrons_fine", ";#it{p}_{z} [GeV];N_{e-}/BX/Shot", pzbins_fine, pzmin, pzmax) }) histos.update({ "h_pz_electrons_full": TH1D("h_pz_electrons_full", ";#it{p}_{z} [GeV];N_{e-}/BX/Shot", pzbins_full, pzmin, pzmax_full) }) pymin = -0.005 pymax = +0.005 pybins = 100 pybins_fine = 200 histos.update({ "h_py_positrons": TH1D("h_py_positrons", ";#it{p}_{y} [GeV];N_{e+}/BX/Shot", pybins, pymin, pymax) }) histos.update({ "h_py_positrons_fine": TH1D("h_py_positrons_fine", ";#it{p}_{y} [GeV];N_{e+}/BX/Shot", pybins_fine, pymin, pymax) }) histos.update({ "h_py_electrons": TH1D("h_py_electrons", ";#it{p}_{y} [GeV];N_{e-}/BX/Shot", pybins, pymin, pymax) }) histos.update({ "h_py_electrons_fine": TH1D("h_py_electrons_fine", ";#it{p}_{y} [GeV];N_{e-}/BX/Shot", pybins_fine, pymin, pymax) }) pxmin = -0.005 pxmax = +0.005 pxbins = 100 pxbins_fine = 200 histos.update({ "h_px_positrons": TH1D("h_px_positrons", ";#it{p}_{x} [GeV];N_{e+}/BX/Shot", pxbins, pxmin, pxmax) }) histos.update({ "h_px_positrons_fine": TH1D("h_px_positrons_fine", ";#it{p}_{x} [GeV];N_{e+}/BX/Shot", pxbins_fine, pxmin, pxmax) }) histos.update({ "h_px_electrons": TH1D("h_px_electrons", ";#it{p}_{x} [GeV];N_{e-}/BX/Shot", pxbins, pxmin, pxmax) }) histos.update({ "h_px_electrons_fine": TH1D("h_px_electrons_fine", ";#it{p}_{x} [GeV];N_{e-}/BX/Shot", pxbins_fine, pxmin, pxmax) }) rmax = 0.05 * um2cm if (process == "bppp") else 30. * um2cm zmax = 30 * um2cm if (process == "bppp") else 200. * um2cm histos.update({ "h_xyVtx_positrons": TH2D( "h_xyVtx_positrons", "Positrons vertex x:y;Vertex(x) [cm];Vertex(y) [cm];N_{e+}/BX/Shot", 200, -rmax, +rmax, 200, -rmax, +rmax) }) histos.update({ "h_zxVtx_positrons": TH2D( "h_zxVtx_positrons", "Positrons vertex z:x;Vertex(z) [cm];Vertex(x) [cm];N_{e+}/BX/Shot", 200, -zmax, +zmax, 200, -rmax, +rmax) }) histos.update({ "h_zyVtx_positrons": TH2D( "h_zyVtx_positrons", "Positrons vertex z:y;Vertex(z) [cm];Vertex(y) [cm];N_{e+}/BX/Shot", 200, -zmax, +zmax, 200, -rmax, +rmax) }) ## sumw2 for hname, hist in histos.items(): hist.Sumw2()
def processFile(sample_name, verbose=False): sample = sample_name.replace(".root", "") isMC = not '201' in sample # Unweighted input # treeFile_name = origin + '/' + sample_name # if not os.path.exists(treeFile_name): # print ' WARNING: file', treeFile_name, 'does not exist, continuing' # return True # Weighted output treeFile_name = origin + '/' + sample + '.root' #if os.path.exists(treeFile_name): # print ' WARNING: weighted file exists, overwriting' #return True # Open file treeFile = TFile(treeFile_name, 'UPDATE') treeFile.cd() if isMC: # number of events runTree = treeFile.Get('Runs') genH = TH1D("genH_%s" % sample, "", 1, 0, 0) genH.Sumw2() #runTree.Draw("genEventSumw>>genH_%s" % sample, "", "goff") runTree.Draw("genEventCount>>genH_%s" % sample, "", "goff") genEv = genH.GetMean() * genH.GetEntries() # Cross section XS = getXsec(sample) #SF = getSF(sample) Leq = LUMI * XS / genEv if genEv > 0 else 0. else: Leq = 1. print sample, ": Leq =", Leq # Variables declaration eventweightlumi = array('f', [1.0]) # global event weight with lumi # Looping over file content # Tree tree = treeFile.Get('Events') nev = tree.GetEntriesFast() # New branches eventweightlumiBranch = tree.Branch('eventweightlumi', eventweightlumi, 'eventweightlumi/F') # looping over events for event in range(0, tree.GetEntries()): if verbose and (event % 10000 == 0 or event == nev - 1): print ' = TTree:', tree.GetName(), 'events:', nev, '\t', int( 100 * float(event + 1) / float(nev)), '%\r', #print '.',#*int(20*float(event)/float(nev)),#printProgressBar(event, nev) tree.GetEntry(event) # Initialize eventweightlumi[0] = 1. # Weights if isMC: eventweightlumi[ 0] = Leq * tree.lheweight * tree.btagweight * tree.triggerweight #tree.puweight else: eventweightlumi[0] = 1. # Fill the branches eventweightlumiBranch.Fill() tree.Write("", TObject.kOverwrite) if verbose: print ' ' treeFile.Close()
def sample_pmt(df, ismcp, ishq, num): sn2, gain2, rsl2, pde2, dcr2, tts2, amp2, hv2, pvsv2, svsn2, riset2, fallt2, fwhm2 = read_alldata( df, ismcp, ishq) """ HQMCP require 3456 more added """ """ DynodeMCP require 5 more added """ """ Prepare sampling hists """ hqmcp_gainrsl_hist = TH2D("hqmcp_gainrsl_hist", "", 100, 9000000, 13000000, 30, 0.2, 0.5) hqmcp_pdedcr_hist = TH2D("hqmcp_pdedcr_hist", "", 100, 0, 50, 100, 0, 100) hqmcp_tts_hist = TH1D("tts", "", 100, 0, 50) hqmcp_amp_hist = TH1D("amp", "", 100, 0, 50) hqmcp_pvsv_hist = TH1D("pvsv", "", 100, 0, 30) hqmcp_svsn_hist = TH1D("svsn", "", 100, 0, 0.2) hqmcp_rise_hist = TH1D("rise", "", 100, 0, 50) hqmcp_fall_hist = TH1D("fall", "", 100, 0, 50) hqmcp_fwhm_hist = TH1D("fwhm", "", 100, 0, 50) hqmcp_hv_hist = TH1D("hv", "", 100, 0, 3000) for i, j, k, l, a, b, c, d, e, f, g, h in zip(gain2, rsl2, pde2, dcr2, tts2, amp2, hv2, pvsv2, svsn2, riset2, fallt2, fwhm2): hqmcp_gainrsl_hist.Fill(i, j) hqmcp_pdedcr_hist.Fill(k, l) hqmcp_tts_hist.Fill(a) hqmcp_amp_hist.Fill(b) hqmcp_hv_hist.Fill(c) hqmcp_pvsv_hist.Fill(d) hqmcp_svsn_hist.Fill(e) hqmcp_rise_hist.Fill(f) hqmcp_fall_hist.Fill(g) hqmcp_fwhm_hist.Fill(h) gain2_add, rsl2_add, pde2_add, dcr2_add = [], [], [], [] tts2_add, amp2_add, hv2_add, pvsv2_add, svsn2_add, riset2_add, fallt2_add, fwhm2_add = [], [], [], [], [], [], [], [] for idx in range(num): pde_sample, dcr_sample = ROOT.Double(0), ROOT.Double(0) hqmcp_pdedcr_hist.GetRandom2(pde_sample, dcr_sample) gain_sample, rsl_sample = ROOT.Double(0), ROOT.Double(0) hqmcp_gainrsl_hist.GetRandom2(gain_sample, rsl_sample) gain2_add.append(gain_sample) rsl2_add.append(rsl_sample) pde2_add.append(pde_sample) dcr2_add.append(dcr_sample) tts2_add.append(hqmcp_tts_hist.GetRandom()) amp2_add.append(hqmcp_amp_hist.GetRandom()) hv2_add.append(hqmcp_hv_hist.GetRandom()) pvsv2_add.append(hqmcp_pvsv_hist.GetRandom()) svsn2_add.append(hqmcp_svsn_hist.GetRandom()) riset2_add.append(hqmcp_rise_hist.GetRandom()) fallt2_add.append(hqmcp_fall_hist.GetRandom()) fwhm2_add.append(hqmcp_fwhm_hist.GetRandom()) gain2_add = np.array(gain2_add) gain2_all = np.concatenate((gain2, gain2_add)) rsl2_add = np.array(rsl2_add) rsl2_all = np.concatenate((rsl2, rsl2_add)) pde2_add = np.array(pde2_add) pde2_all = np.concatenate((pde2, pde2_add)) dcr2_add = np.array(dcr2_add) dcr2_all = np.concatenate((dcr2, dcr2_add)) tts2_add = np.array(tts2_add) tts2_all = np.concatenate((tts2, tts2_add)) amp2_add = np.array(amp2_add) amp2_all = np.concatenate((amp2, amp2_add)) hv2_add = np.array(hv2_add) hv2_all = np.concatenate((hv2, hv2_add)) pvsv2_add = np.array(pvsv2_add) pvsv2_all = np.concatenate((pvsv2, pvsv2_add)) svsn2_add = np.array(svsn2_add) svsn2_all = np.concatenate((svsn2, svsn2_add)) riset2_add = np.array(riset2_add) riset2_all = np.concatenate((riset2, riset2_add)) fallt2_add = np.array(fallt2_add) fallt2_all = np.concatenate((fallt2, fallt2_add)) fwhm2_add = np.array(fwhm2_add) fwhm2_all = np.concatenate((fwhm2, fwhm2_add)) return sn2, gain2_all, rsl2_all, pde2_all, dcr2_all, tts2_all, amp2_all, hv2_all, pvsv2_all, svsn2_all, riset2_all, fallt2_all, fwhm2_all
class EudetData: """A container for TBTrack Data """ RunNumber = 0 tbtrack_file = 0 pixelTree = ROOT.TTree() TrackTree = ROOT.TTree() Chi2 = TH1D() Chi2ndof = TH1D() Chi2_Cut = 10000000 EnergyCut = 0. scale = 1. p_nEntries = 0 t_nEntries = 0 entry = 0 sigmaX = 0.005 sigmaY = 0.005 #Track Data Holders t_nTrackParams = 0 t_euEv = 0 t_posX = 0 t_posY = 0 t_dxdz = 0 t_dydz = 0 t_iden = 0 t_trackNum = 0 t_chi2 = 0 t_ndof = 0 # t_posX = [] # t_posY = [] # t_dxdz = [] # t_dydz = [] # t_iden = [] # t_trackNum = [] # t_chi2 = [] # t_ndof = [] #Pixel Data holders p_nHits = 0 p_col = 0 p_row = 0 p_tot = 0 p_lv1 = 0 p_chip = 0 p_iden = 0 p_euEv = 0 p_energyGC = [] p_energyPbPC = [] edge = 0 # p_col = [] # p_row = [] # p_tot = [] # p_lv1 = [] # p_chip = [] # p_iden = [] # p_euEv = [] #hotpixel firing matrix #hit_map = [[0]*npix_Y]*npix_X #frequency_map = [[0.0]*npix_Y]*npix_X hit_map = [[0 for x in xrange(npix_X)] for x in xrange(npix_Y)] frequency_map = [[0 for x in xrange(npix_X)] for x in xrange(npix_Y)] hotpixels = [] mode = "" def __init__(self, filename, ECut, edge=0, scale=1.0, Run=0, mode="tbtrack"): self.RunNumber = Run self.edge = edge #self.AllClusters = PersistentList("cluster_%i"%self.RunNumber,250) #self.AllTracks = PersistentList("Track_%i"%self.RunNumber,250) self.AllClusters = [] self.AllTracks = [] self.mode = mode self.scale = scale self.tbtrack_file = TFile(filename) print "Opening %s" % filename if (self.mode == "tbtrack"): print "Reading in tbtrack mode" if (SensorType == "Timepix3" or SensorType == "CLICpix"): self.TrackTree = self.tbtrack_file.Get("tracks") self.pixelTree = self.tbtrack_file.Get("rawdata") else: self.TrackTree = self.tbtrack_file.Get("eutracks") self.pixelTree = self.tbtrack_file.Get("zspix") self.p_nEntries = self.pixelTree.GetEntries() self.t_nEntries = self.TrackTree.GetEntries() elif (self.mode == "pyEudetNTuple"): print "Reading in pyEudetNTuple mode" self.TrackTree = self.tbtrack_file.Get("tracks") self.pixelTree = self.tbtrack_file.Get("clusters") self.TrackTree.Print() self.pixelTree.Print() self.p_nEntries = self.pixelTree.GetEntries() self.t_nEntries = self.TrackTree.GetEntries() else: print "Wrong mode. Exiting ..." exit() self.EnergyCut = ECut self.frequency_map = [[0.0] * npix_Y] * npix_X for i in range(len(self.hit_map)): for j in range(len(self.hit_map[0])): self.hit_map[i][j] = 0 def GetChi2Cut(self, reduction_factor=0.95, applyChiCut=True): self.TrackTree.Draw("chi2 >> chi2plot(1000,0,1000)", "", "goff") self.Chi2 = gROOT.FindObject("chi2plot") self.TrackTree.Draw("chi2/ndof >> chi2ndofplot(1000,0,100)", "", "goff") self.Chi2ndof = gROOT.FindObject("chi2ndofplot") totIntegral = reduction_factor * self.Chi2.Integral() aBin = 0 while (self.Chi2.Integral(0, aBin) < totIntegral): aBin += 1 self.Chi2_Cut = 1000000000 if (applyChiCut == True): print "Cutting at Chi2 = %f" % (aBin * self.Chi2.GetBinWidth(0)) self.Chi2_Cut = aBin * self.Chi2.GetBinWidth(0) return self.Chi2, self.Chi2ndof def FindHotPixel(self, threshold, Nevents=-1, filename="hotpixels.txt"): # will calculate the frequency with which each pixel fires # threshold (0 -> 1) defines hot pixel cut # saves hot pixels to text file n_max = 0 prev_pixel_xhits = [999, 999] unique_events = 0 histo_nhits = TH1D("nhit", "N Pixel Fires", 40, 0, 40) histo_hitpixel = TH2D("hit", "Hit Pixel Map", 256, 0, 256, 256, 0, 256) histo_frequency = TH1D("freq", "Pixel Firing Frequency", 10000, 0, 1) histo_hotpixel = TH2D("hot", "Hot Pixel Map", 256, 0, 256, 256, 0, 256) if Nevents > self.p_nEntries or Nevents == -1: n_max = self.p_nEntries elif Nevents < 10000 and self.p_nEntries >= 10000: print "FindHotPixel over-riding requested nevents" print "FindHotPixel must be run on atleast 10000 events (for a threshold of 0.01) to be accurate" print "FindHotPixel will use 10000 events" n_max = 10000 elif Nevents < 10000 and self.p_nEntries < 10000: print "FindHotPixel over-riding requested nevents" print "FindHotPixel must be run on atleast 10000 events (for a threshold of 0.01) to be accurate" print "FindHotPixel will use as many events as exist in this run" n_max = self.p_nEntries else: n_max = Nevents # loop through events to find unique events # for each fired pixel in each event, increment hit map for i in range(n_max): self.getEvent(i) if i % 10000 == 0: print " [Hot Pixel Finder] Parsing event %i" % i # is this a new frame, or the next event in the same frame? npixels_hit = len(self.p_col) pixel_x_hits = [] for k in xrange(npixels_hit): pixel_x_hits.append(self.p_col[k]) if (pixel_x_hits == prev_pixel_xhits): # another track in the same event continue else: # this is a new event unique_events = unique_events + 1 prev_pixel_xhits = pixel_x_hits for j in range(len(self.p_row)): self.hit_map[self.p_col[j]][self.p_row[j]] += 1 histo_hitpixel.Fill(self.p_col[j], self.p_row[j]) # loop through hitmap # fill freq map with hits / nevents print "Ran over", n_max, "events, found", unique_events, "unique pixel maps" for i in range(npix_X): for j in range(npix_Y): self.frequency_map[i][j] = self.hit_map[i][j] * ( 1.0 / float(unique_events)) histo_nhits.Fill(self.hit_map[i][j]) histo_frequency.Fill(self.frequency_map[i][j]) # if freq > threshold, make a hotpixel if (self.frequency_map[i][j] > threshold): histo_hotpixel.Fill(i, j, self.frequency_map[i][j]) self.hotpixels.append([i, j]) f = open(filename, 'w') f.write("%s" % self.hotpixels) f.close() print "##### Hot Pixel Report #####" print " %i Hot pixel found at : " % (len(self.hotpixels)) print self.hotpixels print "############################" return histo_nhits, histo_hitpixel, histo_hotpixel, histo_frequency def LoadHotPixel(self, filename): # to load the hot pixels recorded in a text file f = open(filename, 'r') hotpixels_as_str = f.readline() self.hotpixels = ast.literal_eval(hotpixels_as_str) f.close() print "##### Hot Pixels set from file #####" print " %i Hot pixel set : " % (len(self.hotpixels)) print self.hotpixels print "####################################" def getEvent(self, i): self.entry = self.TrackTree.GetEntry(i) self.t_nTrackParams = self.TrackTree.nTrackParams self.t_euEv = self.TrackTree.euEvt self.t_posX = self.TrackTree.xPos self.t_posY = self.TrackTree.yPos self.t_dxdz = self.TrackTree.dxdz self.t_dydz = self.TrackTree.dydz self.t_iden = self.TrackTree.iden self.t_trackNum = self.TrackTree.trackNum self.t_chi2 = self.TrackTree.chi2 self.t_ndof = self.TrackTree.ndof for index, Xval in enumerate(self.t_posX): self.t_posX[index] += pitchX / 2. self.t_posY[index] += pitchY / 2. self.entry = self.pixelTree.GetEntry(i) self.p_nHits = self.pixelTree.nPixHits self.p_col = self.pixelTree.col self.p_row = self.pixelTree.row self.p_tot = self.pixelTree.tot #self.p_lv1= self.pixelTree.lv1 removing for Timepix3 #self.p_chip= self.pixelTree.chip self.p_iden = self.pixelTree.iden self.p_euEv = self.pixelTree.euEvt self.p_energyGC = [] self.p_energyPbPC = [] if Assembly != "AssemblyNotDefined": for tot, col, row in zip(self.p_tot, self.p_col, self.p_row): self.p_energyGC.append( (globalCalib_t * globalCalib_a + tot - globalCalib_b + sqrt((globalCalib_b + globalCalib_t * globalCalib_a - tot)**2 + 4 * globalCalib_a * globalCalib_c)) / (2 * globalCalib_a)) # energy in keV calib_denom = 2 * pixelCalib_a[col][row] calib_sqrtterm = ( pixelCalib_b[col][row] + pixelCalib_t[col][row] * pixelCalib_a[col][row] - tot )**2 + 4 * pixelCalib_a[col][row] * pixelCalib_c[col][row] if (calib_sqrtterm > 0.) and (calib_denom != 0): self.p_energyPbPC.append( (pixelCalib_t[col][row] * pixelCalib_a[col][row] + tot - pixelCalib_b[col][row] + sqrt(calib_sqrtterm)) / calib_denom) # energy in keV else: self.p_energyPbPC.append(0.) print "Math error during pixel calibration, this pixel energy set to 0." else: for tot, col, row in zip(self.p_tot, self.p_col, self.p_row): self.p_energyGC.append(0.) self.p_energyPbPC.append(0.) # for index,totvalue in enumerate(self.p_tot) : # self.p_tot[index]=float(totvalue)/self.scale def PlotFrame(self, i, c, n_pix_min=0): plot = TH2D("frame %i" % i, "frame %i" % i, npix_X, 0, npix_X, npix_Y, 0, npix_Y) self.getEvent(i) if (len(self.p_col) > n_pix_min): for j in xrange(len(self.p_col)): plot.Fill(self.p_col[j], self.p_row[j], self.p_tot[j]) plot.Draw("colz") c.Update() print "press enter for next frame, ctrl-D to exit" a = raw_input() else: print "Skipping event %i, does not have more than minimum number of hits (%i)" % ( i, n_pix_min) def WriteReconstructedData(self, filename, dut=6): outfile = TFile(filename, 'recreate') self.DumpTrackTree(outfile, dut) self.DumpClusterTree(outfile, dut) outfile.Close() def ReadReconstructedData(self, NEvents=-1): print "Reading data ..." self.ReadTrackTree() self.ReadClusterTree() def ReadTrackTree(self, NEvents=-1): event = 0 nentries = self.TrackTree.GetEntriesFast() events = [] for i in xrange(nentries): self.entry = self.TrackTree.GetEntry(i) events.append(self.TrackTree.event) for i in range(max(events) + 1): self.AllTracks.append([]) print "%i events in track file" % max(events) for i in xrange(nentries): self.entry = self.TrackTree.GetEntry(i) track = Track() for i in range(self.TrackTree.size): track.trackX.append(self.TrackTree.trackX[i]) track.trackY.append(self.TrackTree.trackY[i]) track.chi2.append(self.TrackTree.chi2[i]) track.ndof.append(self.TrackTree.ndof[i]) track.trackNum.append(self.TrackTree.trackNum[i]) track.dxdz.append(self.TrackTree.dxdz[i]) track.dydz.append(self.TrackTree.dydz[i]) track.iden.append(self.TrackTree.iden[i]) event = self.TrackTree.event track.cluster = self.TrackTree.cluster self.AllTracks[event].append(track) def ReadClusterTree(self): event = 0 events = [] nentries = self.pixelTree.GetEntriesFast() for i in xrange(nentries): self.entry = self.pixelTree.GetEntry(i) events.append(self.pixelTree.event) for i in range(max(events) + 1): self.AllClusters.append([]) print "%i events in cluster file" % max(events) for i in xrange(nentries): self.entry = self.pixelTree.GetEntry(i) cluster = Cluster() for i in range(self.pixelTree.size): cluster.col.append(self.pixelTree.col[i]) cluster.row.append(self.pixelTree.row[i]) cluster.tot.append(self.pixelTree.tot[i]) cluster.energyGC.append(self.pixelTree.energyGC[i]) cluster.energyPbPC.append(self.pixelTree.energyPbPC[i]) event = self.pixelTree.event cluster.sizeX = self.pixelTree.sizeX cluster.sizeY = self.pixelTree.sizeY cluster.size = self.pixelTree.size cluster.totalTOT = self.pixelTree.totalTOT cluster.totalEnergyGC = self.pixelTree.totalEnergyGC cluster.totalEnergyPbPC = self.pixelTree.totalEnergyPbPC cluster.aspectRatio = self.pixelTree.aspectRatio cluster.relX = self.pixelTree.relX cluster.relY = self.pixelTree.relY cluster.absX = self.pixelTree.absX cluster.absY = self.pixelTree.absY cluster.resX = self.pixelTree.resX cluster.resY = self.pixelTree.resY cluster.relX_energyGC = self.pixelTree.relX_energyGC cluster.relY_energyGC = self.pixelTree.relY_energyGC cluster.absX_energyGC = self.pixelTree.absX_energyGC cluster.absY_energyGC = self.pixelTree.absY_energyGC cluster.resX_energyGC = self.pixelTree.resX_energyGC cluster.resY_energyGC = self.pixelTree.resY_energyGC cluster.relX_energyPbPC = self.pixelTree.relX_energyPbPC cluster.relY_energyPbPC = self.pixelTree.relY_energyPbPC cluster.absX_energyPbPC = self.pixelTree.absX_energyPbPC cluster.absY_energyPbPC = self.pixelTree.absY_energyPbPC cluster.resX_energyPbPC = self.pixelTree.resX_energyPbPC cluster.resY_energyPbPC = self.pixelTree.resY_energyPbPC cluster.id = self.pixelTree.id cluster.trackNum = self.pixelTree.trackNum self.AllClusters[event].append(cluster) def DumpTrackTree(self, outfile, dut=6): outfile.cd() trackTree = TTree('tracks', 'TestBeam track tree') nplanes = 7 trackX = array('f', nplanes * [0.]) trackY = array('f', nplanes * [0.]) iden = array('i', nplanes * [0]) chi2 = array('f', nplanes * [0.]) event = array('i', [0]) ndof = array('f', nplanes * [0.]) trackNum = array('i', nplanes * [0]) dxdz = array('f', nplanes * [0.]) dydz = array('f', nplanes * [0.]) cluster = array('i', [0]) clusterX = array('f', [0.]) clusterY = array('f', [0.]) size = array('i', [0]) trackTree.Branch('size', size, 'size/I') trackTree.Branch('trackX', trackX, 'trackX[size]/F') trackTree.Branch('trackY', trackY, 'trackY[size]/F') trackTree.Branch('iden', iden, 'iden[size]/I') trackTree.Branch('chi2', chi2, 'chi2[size]/F') trackTree.Branch('event', event, 'event/I') trackTree.Branch('ndof', ndof, 'ndof[size]/F') trackTree.Branch('trackNum', trackNum, 'trackNum[size]/I') trackTree.Branch('dxdz', dxdz, 'dxdz[size]/F') trackTree.Branch('dydz', dydz, 'dydz[size]/F') trackTree.Branch('cluster', cluster, 'cluster/I') trackTree.Branch('clusterX', clusterX, 'clusterX/F') trackTree.Branch('clusterY', clusterY, 'clusterY/F') for j, tracks in enumerate(self.AllTracks): for track in tracks: size[0] = nplanes for index, t in enumerate(track.trackX): trackX[index] = t for index, t in enumerate(track.trackY): trackY[index] = t for index, t in enumerate(track.chi2): chi2[index] = t for index, t in enumerate(track.ndof): ndof[index] = t for index, t in enumerate(track.trackNum): trackNum[index] = t for index, t in enumerate(track.dxdz): dxdz[index] = t for index, t in enumerate(track.dydz): dydz[index] = t for index, t in enumerate(track.iden): iden[index] = t event[0] = j cluster[0] = track.cluster if track.cluster != -11 and len(self.AllClusters[j]) != 0: clusterX[0] = self.AllClusters[j][track.cluster].absX clusterY[0] = self.AllClusters[j][track.cluster].absY else: clusterX[0] = -1000 clusterY[0] = -1000 trackTree.Fill() outfile.Write() def DumpClusterTree(self, outfile, dut=6): outfile.cd() clusterTree = TTree('clusters', 'Timepix cluster tree') maxn = 500 col = array('i', maxn * [0]) row = array('i', maxn * [0]) tot = array('f', maxn * [0.]) energyGC = array('f', maxn * [0.]) energyPbPC = array('f', maxn * [0.]) event = array('i', [0]) sizeX = array('i', [0]) sizeY = array('i', [0]) size = array('i', [0]) totalTOT = array('f', [0.]) totalEnergyGC = array('f', [0.]) totalEnergyPbPC = array('f', [0.]) aspectRatio = array('f', [0.]) relX = array('f', [0.]) relY = array('f', [0.]) absX = array('f', [0.]) absY = array('f', [0.]) resX = array('f', [0.]) resY = array('f', [0.]) relX_energyGC = array('f', [0.]) relY_energyGC = array('f', [0.]) absX_energyGC = array('f', [0.]) absY_energyGC = array('f', [0.]) resX_energyGC = array('f', [0.]) resY_energyGC = array('f', [0.]) relX_energyPbPC = array('f', [0.]) relY_energyPbPC = array('f', [0.]) absX_energyPbPC = array('f', [0.]) absY_energyPbPC = array('f', [0.]) resX_energyPbPC = array('f', [0.]) resY_energyPbPC = array('f', [0.]) trackX = array('f', [0.]) trackY = array('f', [0.]) id = array('f', [0.]) trackNum = array('i', [0]) clusterTree.Branch('event', event, 'event/I') clusterTree.Branch('size', size, 'size/I') clusterTree.Branch('sizeX', sizeX, 'sizeX/I') clusterTree.Branch('sizeY', sizeY, 'sizeY/I') clusterTree.Branch('totalTOT', totalTOT, 'totalTOT/F') clusterTree.Branch('totalEnergyGC', totalEnergyGC, 'totalEnergyGC/F') clusterTree.Branch('totalEnergyPbPC', totalEnergyPbPC, 'totalEnergyPbPC/F') clusterTree.Branch('aspectRatio', aspectRatio, 'aspectRatio/F') clusterTree.Branch('relX', relX, 'relX/F') clusterTree.Branch('relY', relY, 'relY/F') clusterTree.Branch('absX', absX, 'absX/F') clusterTree.Branch('absY', absY, 'absY/F') clusterTree.Branch('resX', resX, 'resX/F') clusterTree.Branch('resY', resY, 'resY/F') clusterTree.Branch('relX_energyGC', relX_energyGC, 'relX_energyGC/F') clusterTree.Branch('relY_energyGC', relY_energyGC, 'relY_energyGC/F') clusterTree.Branch('absX_energyGC', absX_energyGC, 'absX_energyGC/F') clusterTree.Branch('absY_energyGC', absY_energyGC, 'absY_energyGC/F') clusterTree.Branch('resX_energyGC', resX_energyGC, 'resX_energyGC/F') clusterTree.Branch('resY_energyGC', resY_energyGC, 'resY_energyGC/F') clusterTree.Branch('relX_energyPbPC', relX_energyPbPC, 'relX_energyPbPC/F') clusterTree.Branch('relY_energyPbPC', relY_energyPbPC, 'relY_energyPbPC/F') clusterTree.Branch('absX_energyPbPC', absX_energyPbPC, 'absX_energyPbPC/F') clusterTree.Branch('absY_energyPbPC', absY_energyPbPC, 'absY_energyPbPC/F') clusterTree.Branch('resX_energyPbPC', resX_energyPbPC, 'resX_energyPbPC/F') clusterTree.Branch('resY_energyPbPC', resY_energyPbPC, 'resY_energyPbPC/F') clusterTree.Branch('trackX', trackX, 'trackX/F') clusterTree.Branch('trackY', trackY, 'trackY/F') clusterTree.Branch('id', id, 'id/I') clusterTree.Branch('trackNum', trackNum, 'trackNum/I') clusterTree.Branch('col', col, 'col[size]/I') clusterTree.Branch('row', row, 'row[size]/I') clusterTree.Branch('tot', tot, 'tot[size]/F') clusterTree.Branch('energyGC', energyGC, 'energyGC[size]/F') clusterTree.Branch('energyPbPC', energyPbPC, 'energyPbPC[size]/F') for j, clusters in enumerate(self.AllClusters): for cluster in clusters: if (len(cluster.col) < maxn): for i in range(len(cluster.col)): col[i] = cluster.col[i] for i in range(len(cluster.row)): row[i] = cluster.row[i] for i in range(len(cluster.tot)): tot[i] = cluster.tot[i] for i in range(len(cluster.energyGC)): energyGC[i] = cluster.energyGC[i] for i in range(len(cluster.energyPbPC)): energyPbPC[i] = cluster.energyPbPC[i] else: for i in range(maxn): col[i] = cluster.col[i] for i in range(maxn): row[i] = cluster.row[i] for i in range(maxn): tot[i] = cluster.tot[i] for i in range(maxn): energyGC[i] = cluster.energyGC[i] for i in range(maxn): energyPbPC[i] = cluster.energyPbPC[i] sizeX[0] = cluster.sizeX sizeY[0] = cluster.sizeY size[0] = cluster.size totalTOT[0] = cluster.totalTOT totalEnergyGC[0] = cluster.totalEnergyGC totalEnergyPbPC[0] = cluster.totalEnergyPbPC aspectRatio[0] = cluster.aspectRatio relX[0] = cluster.relX relY[0] = cluster.relY resX[0] = cluster.resX resY[0] = cluster.resY absX[0] = cluster.absX absY[0] = cluster.absY relX_energyGC[0] = cluster.relX_energyGC relY_energyGC[0] = cluster.relY_energyGC resX_energyGC[0] = cluster.resX_energyGC resY_energyGC[0] = cluster.resY_energyGC absX_energyGC[0] = cluster.absX_energyGC absY_energyGC[0] = cluster.absY_energyGC relX_energyPbPC[0] = cluster.relX_energyPbPC relY_energyPbPC[0] = cluster.relY_energyPbPC resX_energyPbPC[0] = cluster.resX_energyPbPC resY_energyPbPC[0] = cluster.resY_energyPbPC absX_energyPbPC[0] = cluster.absX_energyPbPC absY_energyPbPC[0] = cluster.absY_energyPbPC id[0] = cluster.id event[0] = j trackNum[0] = cluster.tracknum if (cluster.tracknum != -1): try: trackX[0] = self.AllTracks[j][cluster.tracknum].trackX[ self.AllTracks[j][cluster.tracknum].iden.index( dut)] trackY[0] = self.AllTracks[j][cluster.tracknum].trackY[ self.AllTracks[j][cluster.tracknum].iden.index( dut)] except: trackX[0] = 0 trackY[0] = 0 clusterTree.Fill() outfile.Write() def IsInEdges(self, track, dut=6): is_in = False if (fabs(track.trackX[track.iden.index(dut)]) <= (halfChip_X + self.edge) and fabs(track.trackY[track.iden.index(dut)]) <= (halfChip_Y + self.edge)): is_in = True if (fabs(track.trackX[track.iden.index(dut)]) <= (halfChip_X) and fabs(track.trackY[track.iden.index(dut)]) <= (halfChip_Y)): is_in = False return is_in def IsInMain(self, track, dut=6): if (fabs(track.trackX[track.iden.index(dut)]) <= (halfChip_X) and fabs(track.trackY[track.iden.index(dut)]) <= (halfChip_Y)): return True else: return False def ComputeResiduals(self, i, dut=6): nmatch_in_main = 0. nmatch_in_edge = 0. for track in self.AllTracks[i]: if (i < len(self.AllClusters)): for cluster in self.AllClusters[i]: if cluster.id == track.cluster: cluster.GetResiduals( track.trackX[track.iden.index(dut)], track.trackY[track.iden.index(dut)]) if (self.IsInMain(track, dut)): nmatch_in_main += 1. elif (self.IsInEdges(track, dut)): nmatch_in_edge += 1. return nmatch_in_main, nmatch_in_edge def PrintResiduals(self, i): print "###################### Event : %d ######################" % i self.getEvent(i) for cluster in self.AllClusters[i]: cluster.GetResiduals(self.t_posX[3], self.t_posY[3]) print "resX = %f resY = %f" % (cluster.resX, cluster.resY) print "#######################################################" def PrintEvent(self, i): self.getEvent(i) print "###################### Event : %d ######################" % i outstr = "" print "posX = %f posY = %f" % (self.t_posX[3], self.t_posY[3]) # for j in self.t_posX : # outstr+="%.3f "%j # print "posX = %s"%outstr # outstr ="" # for j in self.t_posY : # outstr+="%.3f "%j # print "posY = %s"%outstr #======================================================================= # outstr ="" # for j in self.t_dxdz : # outstr+="%.6e "%j # print "dxdz = %s"%outstr # outstr ="" # for j in self.t_dydz : # outstr+="%.6e "%j # print "dydz = %s"%outstr #======================================================================= print "#######################################################" def GetTrack(self, i): self.getEvent(i) posX_tmp = [] posY_tmp = [] dxdz_tmp = [] dydz_tmp = [] iden_tmp = [] chi2_tmp = [] ndof_tmp = [] trackNum_tmp = [] nTrackParams_tmp = 0 tracks = [] #--------------------- self.t_nTrackParams = self.TrackTree.nTrackParams #------------------------------------- self.t_euEv= self.TrackTree.euEvt #------------------------------------- self.t_posX = self.TrackTree.xPos #-------------------------------------- self.t_posY= self.TrackTree.yPos #-------------------------------------- self.t_dxdz= self.TrackTree.dxdz #-------------------------------------- self.t_dydz= self.TrackTree.dydz #-------------------------------------- self.t_iden= self.TrackTree.iden #------------------------------ self.t_trackNum= self.TrackTree.trackNum #-------------------------------------- self.t_chi2= self.TrackTree.chi2 #------------------------------------- self.t_ndof = self.TrackTree.ndof for data in self.t_posX: posX_tmp.append(data) for data in self.t_posY: posY_tmp.append(data) for data in self.t_iden: iden_tmp.append(data) for data in self.t_dxdz: dxdz_tmp.append(data) for data in self.t_dydz: dydz_tmp.append(data) for data in self.t_chi2: chi2_tmp.append(data) for data in self.t_ndof: ndof_tmp.append(data) for data in self.t_trackNum: trackNum_tmp.append(data) nTrackParams_tmp = self.t_nTrackParams if len(trackNum_tmp) > 0: for j in range(max(trackNum_tmp) + 1): aTrack = Track() ndata = nTrackParams_tmp / (max(trackNum_tmp) + 1) #print "nTrackParam : %i len(trackNum %i)"%(nTrackParams_tmp,max(trackNum_tmp)+1) aTrack.trackX = posX_tmp[j * ndata:j * ndata + ndata] aTrack.trackY = posY_tmp[j * ndata:j * ndata + ndata] for index, element in enumerate(aTrack.trackX): #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 aTrack.trackX[index] = aTrack.trackX[ index] - halfChip_X - pitchX / 2. aTrack.trackY[index] = aTrack.trackY[ index] - halfChip_Y - pitchY / 2. aTrack.iden = iden_tmp[j * ndata:j * ndata + ndata] aTrack.chi2 = chi2_tmp[j * ndata:j * ndata + ndata] aTrack.trackNum = trackNum_tmp[j * ndata:j * ndata + ndata] aTrack.ndof = ndof_tmp[j * ndata:j * ndata + ndata] aTrack.dxdz = dxdz_tmp[j * ndata:j * ndata + ndata] aTrack.dydz = dydz_tmp[j * ndata:j * ndata + ndata] if (aTrack.chi2[0] < self.Chi2_Cut): tracks.append(aTrack) #print aTrack.chi2 self.AllTracks.append(tracks) #---------------------------------------------------------------- trackX=[] #----------------------------------------------------------------- trackY=[] #------------------------------------------------------------------- chi2=0. #------------------------------------------------------------------- event=0 #-------------------------------------------------------------------- ndof=0 #------------------------------------------------------------------- iden=[] #--------------------------------------------------------------- trackNum=[] #----------------------------------------------------------------- cluster=0 def FindMatchedCluster(self, i, r_max, dut=6, distances_histo=None, filter_cluster=True, TrackingRes=0.003): # find the clusters closest to the tracks in this event # clusters are matched to tracks using GetPixelResiduals # r_max: maximum radial distance allowed between track and any pixel of the cluster try: clusters_tmp = self.AllClusters[i] except: clusters_tmp = [] matched_clusters = [] good_count = 0 for track in self.AllTracks[i]: if len(clusters_tmp) != 0: dut_iden = track.iden.index(dut) distances = [] for cluster in clusters_tmp: mdr, mdx, mdy = cluster.GetPixelResiduals( track.trackX[dut_iden], track.trackY[dut_iden]) distances.append(mdr) if distances_histo: distances_histo.Fill(mdr) cluster = clusters_tmp[distances.index(min(distances))] if (min(distances) < r_max): # matched cluster cluster.id = good_count track.cluster = cluster.id cluster.tracknum = track.trackNum[dut_iden] matched_clusters.append(cluster) good_count += 1 else: # unmatched cluster track.cluster = -11 else: # no clusters track.cluster = -11 if (filter_cluster): if (i < len(self.AllClusters)): self.AllClusters[i] = matched_clusters else: self.AllClusters[i] = clusters_tmp def DoPatternRecognition(self, i, tolerance, scale=1): trackDistX = [] clusterDistX = [] trackDistY = [] clusterDistY = [] tmp_track_X = [] tmp_track_Y = [] for ind in range(i, i + scaler): for track in self.AllTracks[ind]: tmp_track_X.append(track.trackX[3]) tmp_track_Y.append(track.trackY[3]) allTrack_tmp = [] allCluster_tmp = [] for tracks in self.AllTracks[i:i + scale]: for track in tracks: allTrack_tmp.append(track) allCluster_tmp = self.AllClusters[i] pattern = TH2D("", "", 14000, -npix_Y * pitchY / 2, npix_Y * pitchY / 2, len(allCluster_tmp) + len(allTrack_tmp), 0, len(allCluster_tmp) + len(allTrack_tmp)) count = 0 for index, cluster in enumerate(allCluster_tmp): pass # # Generate cluster distance lists # for index1,cluster1 in enumerate(allCluster_tmp) : # tmpx = [] # tmpy = [] # for index2,cluster2 in enumerate(allCluster_tmp) : # tmpx.append(cluster1.absX-cluster2.absX) # tmpy.append(cluster1.absY-cluster2.absY) # clusterDistX.append(tmpx) # clusterDistY.append(tmpy) # print "[PR] cluster %i"%index1 # # for value in tmpx : # pattern.Fill(value,count,1) # count=count+1 # # tmpx.sort() # tmpy.sort() # # print tmpx # print tmpy # # for index1,track1 in enumerate(allTrack_tmp) : # tmpx = [] # tmpy = [] # if(fabs(track1.trackX[3])<npix_X*pitchX/2 and fabs(track1.trackY[3])<npix_Y*pitchY/2 ): # for index2,track2 in enumerate(allTrack_tmp) : # if(fabs(track2.trackX[3])<npix_X*pitchX/2 and fabs(track2.trackY[3])<npix_Y*pitchY/2 ): # tmpx.append(track1.trackX[3]-track2.trackX[3]) # tmpy.append(track1.trackY[3]-track2.trackY[3]) # trackDistX.append(tmpx) # trackDistY.append(tmpy) # print "[PR] Track %i"%index1 # for value in tmpx : # pattern.Fill(value,count,2) # count=count+1 # tmpx.sort() # tmpy.sort() # print tmpx # print tmpy # # can=TCanvas() # pattern.Draw("colz") # c=raw_input() def ComputePosition(self, i, method="QWeighted", sigma=0.003, sigmaGC=0.003, sigmaPbPC=0.003): if (i < len(self.AllClusters)): for cluster in self.AllClusters[i]: cluster.Statistics() if (method == "QWeighted"): cluster.GetQWeightedCentroid() elif (method == "DigitalCentroid"): cluster.GetDigitalCentroid() elif (method == "maxTOT"): cluster.GetMaxTOTCentroid() elif (method == "EtaCorrection"): cluster.GetEtaCorrectedQWeightedCentroid( sigma, sigmaGC, sigmaPbPC) def ClusterEvent(self, i, method="QWeighted", sigma=0.003, sigmaGC=0.003, sigmaPbPC=0.003): self.getEvent(i) row_tmp = [s for s in self.p_row] col_tmp = [s for s in self.p_col] tot_tmp = [s for s in self.p_tot] energyGC_tmp = [s for s in self.p_energyGC] energyPbPC_tmp = [s for s in self.p_energyPbPC] # ------------------------------------------------------------------------------------# # Temporary solution for pixels hit several times. Include TOA in the future analysis # ------------------------------------------------------------------------------------# if (SensorType == "Timepix3" or SensorType == "CLICpix"): indexPixelsToRemove = [] for index in range(0, len(row_tmp)): row_temp = row_tmp[index] col_temp = col_tmp[index] for index2 in range(index + 1, len(row_tmp)): if (row_temp == row_tmp[index2] and col_temp == col_tmp[index2]): indexPixelsToRemove.append(index) indexPixelsToRemove.append(index2) row_tmp = [ row_tmp[k] for k in range(0, len(row_tmp)) if k not in indexPixelsToRemove ] col_tmp = [ col_tmp[k] for k in range(0, len(col_tmp)) if k not in indexPixelsToRemove ] tot_tmp = [ tot_tmp[k] for k in range(0, len(tot_tmp)) if k not in indexPixelsToRemove ] energyGC_tmp = [ energyGC_tmp[k] for k in range(0, len(energyGC_tmp)) if k not in indexPixelsToRemove ] energyPbPC_tmp = [ energyPbPC_tmp[k] for k in range(0, len(energyPbPC_tmp)) if k not in indexPixelsToRemove ] # ------------------------------------------------------------------------------------# # remove hot pixels hpindex = 0 if len(self.hotpixels) > 0: while (hpindex < len(row_tmp)): if ([col_tmp[hpindex], row_tmp[hpindex]] in self.hotpixels): col_tmp.pop(hpindex) row_tmp.pop(hpindex) tot_tmp.pop(hpindex) energyGC_tmp.pop(hpindex) energyPbPC_tmp.pop(hpindex) else: hpindex += 1 # set a maximum number of hit pixels to be clustered (skips large events) if len(col_tmp) < 5000: try: clusters = self.SciPyClustering(col_tmp, row_tmp, tot_tmp, energyGC_tmp, energyPbPC_tmp) except: clusters = [] else: print "Event", i, "not beng clustered,", len(col_tmp), "hit pixels" clusters = [] for cluster in clusters: cluster.Statistics() clusters = [cluster for cluster in clusters if cluster.totalTOT > 0] clusterid = 0 for cluster in clusters: if (method == "QWeighted"): cluster.GetQWeightedCentroid() elif (method == "DigitalCentroid"): cluster.GetDigitalCentroid() elif (method == "maxTOT"): cluster.GetMaxTOTCentroid() elif (method == "EtaCorrection"): cluster.GetEtaCorrectedQWeightedCentroid( sigma, sigmaGC, sigmaPbPC) cluster.id = clusterid clusterid += 1 cluster = 0 self.AllClusters.append(clusters) del clusters def SciPyClustering(self, col, row, tot, energyGC, energyPbPC): pixels = [[col[i], row[i]] for i, x in enumerate(col)] if (len(pixels) > 1): result = fclusterdata(pixels, sqrt(2.), criterion="distance") clusters = [Cluster() for i in range(max(result))] [ clusters[x - 1].addPixel(col[j], row[j], tot[j], energyGC[j], energyPbPC[j]) for j, x in enumerate(result) ] else: if (len(pixels) == 1): c = Cluster() c.addPixel(col[0], row[0], tot[0], energyGC[0], energyPbPC[0]) clusters = [c] return clusters def RecursiveClustering(self, row, col, tot): clusters = [] while (len(row) != 0): cluster = Cluster() cluster.addPixel(col[0], row[0], tot[0]) #print "[DEBUG] adding pixel col=%d row=%d as seed"%(col_tmp[0],row_tmp[0]) row.pop(0) col.pop(0) tot.pop(0) while (self.addNeighbor(cluster, col, row, tot) > 0): pass clusters.append(cluster) return clusters def FixedFrameClustering(self, X, Y, TOT): frame = [[0 for i in xrange(256)] for j in xrange(256)] totframe = [[0 for i in xrange(256)] for j in xrange(256)] for i, x in enumerate(X): frame[X[i]][Y[i]] = -1 totframe[X[i]][Y[i]] = TOT[i] cluster_number = 1 for i, j in [[i, j] for i, j in product(xrange(256), xrange(256)) if frame[i][j] == -1]: for u, v in [ [u, v] for u, v in product([-1, 0, 1], [-1, 0, 1]) if (((i + u >= 0 and i + u <= 255) and (j + v >= 0 and j + v <= 255)) and (u != 0 or v != 0)) ]: if (frame[i + u][j + v] == -1): frame[i][j] = cluster_number frame[i + u][j + v] = cluster_number cluster_number += 1 elif (frame[i + u][j + v] > 0): frame[i][j] = frame[i + u][j + v] clusters = {} for i, j in [[i, j] for i, j in product(xrange(256), xrange(256)) if frame[i][j] > 0]: try: clusters[frame[i][j]].addPixel(i, j, totframe[i][j]) except KeyError: clusters[frame[i][j]] = Cluster() clusters[frame[i][j]].addPixel(i, j, totframe[i][j]) del frame del totframe #print clusters.ite return clusters.values() def addNeighbor(self, cluster, col, row, tot): counter = 0 i = 0 j = 0 len_col = len(col) len_clu_col = len(cluster.col) while (i < len_col): j = 0 while (j < len_clu_col): if ((col[i] - cluster.col[j])**2 > 1): j += 1 continue if ((row[i] - cluster.row[j])**2 > 1): j += 1 continue cluster.addPixel(col[i], row[i], tot[i]) #print "[DEBUG] after adding pixel col=%d row=%d to existing cluster as neighbor to x=%d y=%d "%(col[i],row[i],cluster.col[j],cluster.row[j]) col.pop(i) row.pop(i) tot.pop(i) counter += 1 i += -1 len_col = len(col) len_clu_col = len(cluster.col) break i += 1 return counter def PrintClusters(self, i): print "########## Event %d ##########" % i for j, c in enumerate(self.AllClusters[i]): if (c.totalTOT < self.EnergyCut): print "##### Cluster %d #####" % j c.Print() def PrintTBranchElement(self): for event_i in self.TrackTree: print "new event........................" print self.TrackTree.xPos print "number of entries : " print self.TrackTree.xPos.size() for entry_j in range(0, self.TrackTree.xPos.size()): print self.TrackTree.xPos[entry_j] print "trackNum : " print self.TrackTree.trackNum.size() for entry_j in range(0, self.TrackTree.trackNum.size()): print self.TrackTree.trackNum[entry_j] print "nTrackParams : " print self.TrackTree.nTrackParams
listoffiles[4].insert(0, file) elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_178v5")): listoffiles[5].insert(0, file) elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_MSDecays_scaleup")): listoffiles[6].insert(0, file) elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_MSDecays_scaledown")): listoffiles[7].insert(0, file) for x in range(0, 6): ts_correct.append( TH1D("emu_deltar_correct_" + listofnames[x], "test stacked histograms", 50, 0., 150.)) ts_wrong.append( TH1D("emu_deltar_wrong_" + listofnames[x], "test stacked histograms", 50, 0., 150.)) ts_correctmass.append( TH1D("emu_minmass_correct_" + listofnames[x], "test stacked histograms", 50, 0., 150.)) ts_wrongmass.append( TH1D("emu_minmass_wrong_" + listofnames[x], "test stackhsed histograms", 50, 0., 150.)) ts_correct_topweight.append( TH1D("emu_deltar_topweight_correct_" + listofnames[x], "test stacked histograms", 50, 0., 150.)) ts_wrong_topweight.append( TH1D("emu_deltar_topweight_wrong_" + listofnames[x],
def main(argv): P0DBANFFStyle = INTERFACE.GetThisStyle() INTERFACE.SetStyle(P0DBANFFStyle.GetName()) OUTPUTFILENAME = "BANFFReactionCodeStacksOfficial_Joint2018_Numode_Prefit_P0DOnly_Momentum.pdf" inputFile = TFile(sys.argv[0]) dataFile = TFile(sys.argv[1]) dummyPage = TCanvas() dummyPage.Print(OUTPUTFILENAME + "[") sampleNames = [ "P0D_Air_NuMu_CC_CC1Track", "P0D_Air_NuMu_CC_CCNTracks", "P0D_Water_NuMu_CC1Track", "P0D_Water_NuMu_CCNTracks" ] fileNameRoots = [ "P0D_Air_NuMu_CC1Track", "P0D_Air_NuMu_CCNTracks", "P0D_Water_NuMu_CC1Track", "P0D_Water_NuMu_CCNTracks" ] stackHistos = [ "#nu CCQE", "#nu CC 2p-2h", "#nu CC Res 1#pi", "#nu CC Coh 1#pi", "#nu CC Other", "#nu NC modes", "#bar{#nu} modes" ] P0DBANFFStyle.SetTitleBorderSize(0) P0DBANFFStyle.SetTitleX(0.02) P0DBANFFStyle.SetTitleY(0.95) P0DBANFFStyle.SetTitleW(0.3) P0DBANFFStyle.SetTitleH(0.07) # Align left, centered in y) P0DBANFFStyle.SetTitleAlign(11) stackColours = [1300, 1302, 1303, 1304, 1305, 1308, 1310] stackFillStyle = [1001, 1001, 1001, 1001, 1001, 1001, 1001] """ The graphs will be of the form <sampleName>_rxnPredMC_<rxnCode+100> So for each sample, we'll loop from 0-200, and if the THnD is not NULL when we try and get it, add it to a list for stacking. for sampleName in sampleNames: """ for ii in xrange(0, len(sampleNames)): sampleName = sampleNames[ii] # Total the number of events for this sample going into the histogram. # (Remember: Sand is excluded here.) mcSampleTotal = 0.0 # Now make the set of histograms to show, which combine several interaction # types. # Load in the data histogram and use it to assemble the TH2Ds for the MC, as # well as making it one of the histograms on the stack. dataHist = dataFile.Get(sampleName + "_data").Projection(1, 0) dataMomProj = dataFile.Get(sampleName + "_data").Projection( 1, 0).ProjectionX("dataratio" + sampleName + "_px", 1, dataHist.GetNbinsX()) dataMomProj.SetDirectory(0) dataMomProj.SetTitle("") prefitMomProj = inputFile.Get(sampleName + "_prefit").Projection( 1, 0).ProjectionX("prefitratio" + sampleName + "_px", 1, dataHist.GetNbinsX()) prefitMomProj.SetDirectory(0) postfitMomProj = inputFile.Get(sampleName + "_postfit_0_0").Projection( 1, 0).ProjectionX("postfitratio" + sampleName + "_px", 1, dataHist.GetNbinsX()) postfitMomProj.SetDirectory(0) for ibx in xrange(1, dataMomProj.GetNbinsX() + 1): dataMomProj.SetBinContent( ibx, dataMomProj.GetBinContent(ibx) / prefitMomProj.GetBinContent(ibx)) dataMomProj.SetBinError( ibx, ROOT.Math.sqrt(dataMomProj.GetBinContent(ibx)) / prefitMomProj.GetBinContent(ibx)) print "Data = " + str( dataMomProj.GetBinContent(ibx)) + " MC = " + str( prefitMomProj.GetBinContent(ibx)) + " Ratio = " + str( dataMomProj.GetBinContent(ibx) / prefitMomProj.GetBinContent(ibx)) dataMomProj.GetYaxis().SetRangeUser(0.75, 1.25) dataMomProj.GetXaxis().SetRangeUser(MINMOMENTUM, MAXMOMENTUM) dataMomProj.GetXaxis().SetTitle("Reconstructed muon momentum (MeV/c)") dataMomProj.GetYaxis().SetTitle("Data / Sim.") # There is only one data plot for each stack, so: # Momentum projection # Assemble them here. dataOneDimPlots = [] dataOneDimPlots.append( dataHist.ProjectionX("data" + sampleName + "_px", 1, dataHist.GetNbinsY())) dataNXBins = dataHist.GetXaxis().GetNbins() dataxarray = dataHist.GetXaxis().GetXbins().GetArray() # Now, loop through all the theta bins and add the slices in. for iby in xrange(1, dataHist.GetNbinsY() + 1): thetaLow = dataHist.GetYaxis().GetBinLowEdge(iby) thetaHigh = dataHist.GetYaxis().GetBinUpEdge(iby) thetaString = " < cos# theta < " thetaString = str(thetaLow) + thetaString + str(thetaHigh) dataOneDimPlots.append( TH1D("dataslice_" + sampleName + str(iby), thetaString, dataNXBins, dataxarray)) # Now, go through the x bins in this slice and fill the plot we # just created. for ibx in xrange(1, dataHist.GetNbinsX() + 1): # Get the bin dimensions for dividing purposes to area # normalize. dataOneDimPlots[len(dataOneDimPlots) - 1].SetBinContent( ibx, dataHist.GetBinContent(ibx, iby)) # Now that the dataOneDimPlots array is filled, loop through a area # normalize the bins. for dataOneDimHist in dataOneDimPlots: for ibx in xrange(1, dataOneDimHist.GetNbinsX() + 1): mpdim = (dataOneDimHist.GetXaxis().GetBinUpEdge(ibx) - dataOneDimHist.GetXaxis().GetBinLowEdge(ibx)) / 100.0 dataOneDimHist.SetBinError( ibx, math.sqrt(dataOneDimHist.GetBinContent(ibx)) / mpdim) dataOneDimHist.SetBinContent( ibx, dataOneDimHist.GetBinContent(ibx) / mpdim) histosToStack = [] for histo in stackHistos: histosToStack.append( TH2D(sampleName + "_" + histo, sampleName + "_" + histo, dataHist.GetXaxis().GetNbins(), dataHist.GetXaxis().GetXbins().GetArray(), dataHist.GetYaxis().GetNbins(), dataHist.GetYaxis().GetXbins().GetArray())) # Now pull the histograms from the file, and add their content to the # relevant combined histogram. for i in xrange(0, 200): if inputFile.Get(sampleName + "_rxnPredMC_" + str(i)): histosToStack[CorrespondingIndex(i)].Add( inputFile.Get(sampleName + "_rxnPredMC_" + str(i)).Projection( 1, 0)) """ # OK, so now we have an array of 2D histograms for this sample that we'd # like to stack. Need to divide it up into individual ones. Do Momentum # Projection, then momentum in theta slices. Normalize by bin area. # For storing all the one dimensional plots for this sample. """ oneDimPlots = [] # Add nothing for the momentum projection, will append the thetaStrings as # needed. stackTitles = [] for i in xrange(0, len(stackHistos)): # For storing all the one dimensional plots for this reaction code. oneDimPlotsThisReac = [] nameBase = histosToStack[i].GetTitle() oneDimPlotsThisReac.append(histosToStack[i].ProjectionX( nameBase + "_px", 1, histosToStack[i].GetNbinsY())) stackTitles.append("Momentum projection") # Area normalize this now. NXBins = histosToStack[i].GetXaxis().GetNbins() for ibx in xrange(1, NXBins + 1): mpdim = (histosToStack[i].GetXaxis().GetBinUpEdge(ibx) - histosToStack[i].GetXaxis().GetBinLowEdge(ibx)) / 100.0 oneDimPlotsThisReac[0].SetBinContent( ibx, oneDimPlotsThisReac[0].GetBinContent(ibx) / mpdim) mcSampleTotal += oneDimPlotsThisReac[0].GetSumOfWeights() # Now that the x bins are filled, loop on to the next histogram. # With all the y-slices done, oneDimPlotsThisReac is now full. Append # it to oneDimPlots. oneDimPlots.append(oneDimPlotsThisReac) """" At this point in time, oneDimPlots has a momentum projection and theta slices for each reaction code grouping in this sample. So now we just need to make TStacks for them and print them out to PDF. The stack contains the plot from the same index of each entry of oneDimPlots. They should all have the same length, so use the first one to loop through (number of slices plus 1 full momenutm projection.) """ for odi in xrange(0, len(oneDimPlots[0])): # For each one create a TStack sampleStack = THStack( "mpStack_" + sampleName + str(odi), ";Reconstructed muon momentum (MeV/c);Events/(100 MeV/c)") leg = TLegend(0.7, 0.15, 0.94, 0.93) leg.SetFillColor(0) # Make data the first entry. leg.AddEntry(dataOneDimPlots[odi], "Data", "LEP") dataOneDimPlots[odi].SetMarkerStyle(20) dataOneDimPlots[odi].SetMarkerColor(kBlack) dataOneDimPlots[odi].SetLineColor(kBlack) for odj in xrange(0, len(oneDimPlots)): # Apply the appropriate plot options from the beginning. oneDimPlots[odj][odi].SetFillColor(stackColours[odj]) oneDimPlots[odj][odi].SetFillStyle(stackFillStyle[odj]) oneDimPlots[odj][odi].SetMarkerStyle(1) # Don't want P0DBANFFStyle to interfere. sampleStack.Add(oneDimPlots[odj][odi]) print oneDimPlots[odj][odi].GetTitle() + " " + str( oneDimPlots[odj][odi].GetSumOfWeights()) leg.AddEntry(oneDimPlots[odj][odi], stackHistos[odj], "F") c1 = TCanvas() upperPad = TPad("upperPad", "upperPad", .05, .35, .96, .95) lowerPad = TPad("lowerPad", "lowerPad", .05, .1, .96, .35) upperPad.Draw() lowerPad.Draw() upperPad.cd() gPad.SetBottomMargin(1e-5) c1.SetTicks(1, 1) sampleStack.Draw("") dataOneDimPlots[odi].Draw("PEsame") sampleStack.SetMaximum( max(sampleStack.GetMaximum(), FindPlotMax(dataOneDimPlots[odi], dataOneDimPlots[odi]))) leg.Draw() sampleStack.GetXaxis().SetRangeUser(MINMOMENTUM, MAXMOMENTUM) sampleStack.GetXaxis().SetTitle("") sampleStack.GetXaxis().SetTickLength(0) sampleStack.GetXaxis().SetLabelSize(0) sampleStack.SetTitle("# nu-mode") lowerPad.cd() ROOT.gPad.SetTopMargin(1e-5) ROOT.gPad.SetBottomMargin(0.35) ROOT.gPad.SetTickx() dataMomProj.SetLineColor(2) dataMomProj.SetMarkerColor(2) dataMomProj.GetYaxis().SetNdivisions(8, 1) dataMomProj.GetYaxis().SetLabelSize( dataMomProj.GetYaxis().GetLabelSize() + 0.07) dataMomProj.GetYaxis().SetTitleSize( dataMomProj.GetYaxis().GetLabelSize() + 0.05) dataMomProj.GetYaxis().SetTitleOffset(0.335) dataMomProj.GetXaxis().SetLabelSize( dataMomProj.GetXaxis().GetLabelSize() + 0.08) dataMomProj.GetXaxis().SetTitleSize( dataMomProj.GetXaxis().GetLabelSize() + 0.04) dataMomProj.GetXaxis().SetTitleOffset(0.9) dataMomProj.GetXaxis().SetTickLength(0.04) dataMomProj.Draw("PE") line = TLine(0.0, 1.0, 5000.0, 1.0) line.SetLineWidth(2) line.SetLineColor(1) line.Draw("SAME") c1.Modified() c1.Update() c1.Print(OUTPUTFILENAME) outFileName = fileNameRoots[sampleNames.index( sampleName)] + "_mumom_rxn_postfit" c1.Print(outFileName + ".pdf") c1.Print(outFileName + ".png") c1.Print(outFileName + ".eps") dummyPage.Print(OUTPUTFILENAME + ']')
elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_MSDecays_172v5")): listoffiles[1].insert(0, file) elif (file.endswith(".root") and file.startswith("MC8TeV_TTJets_MSDecays_scaleup")): listoffiles[2].insert(0, file) listofnames = [] listofnames.append("scaledown") listofnames.append("172v5") listofnames.append("scaleup") for x in listofnames: ts_correct.append( TH1D("deltar_correct" + x, "test stacked histograms", 50, 0., 150.)) ts_correctmass.append( TH1D("minmass_correct" + x, "test stacked histograms", 50, 0., 150.)) # Open the files -> adds all samples together for i in range(0, len(listoffiles)): ts_correct[i].Sumw2() ts_correctmass[i].Sumw2() rootfile1 = [] for x in listoffiles[i]: #inv.m. l + sv rootfile1.append(TFile.Open("./../lxyplots/" + x, "READ")) #deltar histo_correct = []