def getTH3F(self, lumi, name, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel, extraWeight): if (xmin == xmax) and (ymax == ymin) and (zmax == zmin): h = TH3F(name, "", len(nbinx) - 1, array('d', nbinx), len(nbiny) - 1, array('d', nbiny), len(nbinz) - 1, array('d', nbinz)) else: h = TH3F(name, "", nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax) h.Sumw2() h.GetXaxis().SetTitle(xlabel) h.GetYaxis().SetTitle(ylabel) h.GetZaxis().SetTitle(zlabel) for s in self.samples: AuxName = "auxT3_block" + s.name haux = s.getTH3F(lumi, AuxName, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel, extraWeight) h.Add(haux) del haux return h
def drawSignleClosure(filenames, plot, commonCut, info): from ROOT import TH3F import array ptBins = readAxisConf("photons[0].ptJet()")[2] htBins = readAxisConf("ht")[2] metBins = readAxisConf("met")[2] ptBinsAr = array.array('d', ptBins) htBinsAr = array.array('d', htBins) metBinsAr = array.array('d', metBins) gname = randomName() fname = randomName() sname = randomName() goHist3d = TH3F(gname, "3d prediction", len(ptBins) - 1, ptBinsAr, len(htBins) - 1, htBinsAr, len(metBins) - 1, metBinsAr) foHist3d = TH3F(fname, "3d prediction", len(ptBins) - 1, ptBinsAr, len(htBins) - 1, htBinsAr, len(metBins) - 1, metBinsAr) sHist3d = TH3F(sname, "3d prediction", len(ptBins) - 1, ptBinsAr, len(htBins) - 1, htBinsAr, len(metBins) - 1, metBinsAr) for h in goHist3d, foHist3d, sHist3d: h.Sumw2() for filename in filenames: goTree = readTree(filename, "photonTree") goTree.Draw("met:ht:photons[0].ptJet()>>+%s" % gname, "weight", "goff") foTree = readTree(filename, "photonJetTree") foTree.AddFriend("foWeights", filename) foTree.Draw("met:ht:photons[0].ptJet()>>+%s" % fname, "weight*w_qcd", "goff") foTree.Draw("met:ht:photons[0].ptJet()>>+%s" % sname, "weight*w_qcd_error", "goff") for ptIndex in range(0, len(ptBins) - 1): for htIndex in range(0, len(htBins) - 1): gHist = goHist3d.ProjectionZ(randomName(), ptIndex + 1, ptIndex + 1, htIndex + 1, htIndex + 1, "e") fHist = foHist3d.ProjectionZ(randomName(), ptIndex + 1, ptIndex + 1, htIndex + 1, htIndex + 1, "e") sHist = sHist3d.ProjectionZ(randomName(), ptIndex + 1, ptIndex + 1, htIndex + 1, htIndex + 1, "e") drawTwoHists(gHist, fHist, sHist, "test_%02d_%02d" % (ptIndex, htIndex), (ptBins[ptIndex], ptBins[ptIndex + 1], htBins[htIndex], htBins[htIndex + 1]))
def getTH3F(self, lumi, name, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel): if(xmin == xmax) and (ymax == ymin) and (zmax == zmin): h = TH3F(name, "", len(nbinx)-1, array('d', nbinx), len(nbiny)-1, array('d', nbiny), len(nbinz)-1, array('d', nbinz)) else: h = TH3F(name, "", nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax) h.Sumw2() h.GetXaxis().SetTitle(xlabel) h.GetYaxis().SetTitle(ylabel) h.GetZaxis().SetTitle(zlabel) if(self.isData == 0): cut = cut + "* ( " + str(self.lumWeight*lumi) + " * genWeight/abs(genWeight) )" self.ttree.Project(name, var, cut, options) return h
def _parallel_mocker(process, region, expressions, binnings): # Extract binnings edges = tuple((b.edges() for b in binnings)) # Create a unique name and title for the histogram name = title = uuid4().hex # Create an empty histogram # NOTE: When specifying explicit bin edges, you aren't passing a length # argument, you are passing an nbins argument, which is length - 1, hence # the code below. If you pass length for n bins, then you'll get garbage # for the last bin's upper edge and things go nuts in ROOT. dimensionality = len(edges) if dimensionality == 1: return TH1F(name, title, len(edges[0]) - 1, edges[0]) elif dimensionality == 2: return TH2F(name, title, len(edges[0]) - 1, edges[0], len(edges[1]) - 1, edges[1]) elif dimensionality == 3: return TH3F(name, title, len(edges[0]) - 1, edges[0], len(edges[1]) - 1, edges[1], len(edges[2]) - 1, edges[2]) else: raise ValueError('ROOT can only histograms 1 - 3 dimensions')
def __init__(self, run=22011, sourceDir='./', outputDir=''): print 'Creating AnalyseSelectionArea instance for run:', run self.run = run self.sourceDir = sourceDir self.outputDir = outputDir if outputDir != '' else '{s}/{r}/selectionAnalysis/'.format( r=self.run, s=self.sourceDir) self.rootFile = TFile( sourceDir + '/{r}/selectionAnalysis/root/histograms.{r}.{r}.root'.format( r=self.run)) self.histo3D = TH3F(self.rootFile.Get('hChargeVsFidCut')) self.histo3D.GetXaxis().SetTitle('Silicon X/ch') self.histo3D.GetYaxis().SetTitle('Silicon Y/ch') self.histo1D = 0 self.map = 0 self.map_fid = 0 self.sel_old = { 'x_low': self.histo3D.GetXaxis().GetXmin(), 'x_high': self.histo3D.GetXaxis().GetXmax(), 'y_low': self.histo3D.GetYaxis().GetXmin(), 'y_high': self.histo3D.GetYaxis().GetXmax() } self.fidcut = 0 self.fidpoints = [] self.nameFid = '' if not os.path.isdir('{dir}/Plots'.format(dir=self.outputDir)): os.makedirs('{dir}/Plots'.format(dir=self.outputDir)) if not os.path.isdir('{dir}/root'.format(dir=self.outputDir)): os.makedirs('{dir}/root'.format(dir=self.outputDir)) gStyle.SetPalette(55) gStyle.SetNumberContours(999) self.bla = []
def bookHist(h, key=None, title='', nbinsx=100, xmin=0, xmax=1, nbinsy=0, ymin=0, ymax=1, nbinsz=0, zmin=0, zmax=1): if key == None: print 'missing key' return rkey = str( key) # in case somebody wants to use integers, or floats as keys if h.has_key(key): h[key].Reset() elif nbinsz > 0: h[key] = TH3F(rkey, title, nbinsx, xmin, xmax, nbinsy, ymin, ymax, nbinsz, zmin, zmax) elif nbinsy > 0: h[key] = TH2F(rkey, title, nbinsx, xmin, xmax, nbinsy, ymin, ymax) else: h[key] = TH1F(rkey, title, nbinsx, xmin, xmax) h[key].SetDirectory(gROOT)
def copy_histo_dimensions(self, h, name, title): return TH3F( name, title, h.GetNbinsX(), h.GetXaxis().GetXmin(), h.GetXaxis().GetXmax(), h.GetNbinsY(), h.GetYaxis().GetXmin(), h.GetYaxis().GetXmax(), h.GetNbinsZ(), h.GetZaxis().GetXmin(), h.GetZaxis().GetXmax(), )
def make_histo(title, args, include_signal=True): """ Make and return a histogram describe by the above parameters """ histo = TH3F( title, "3D BumpHunter Test %s" % title, args.nbins, 0, args.nbins * args.binsize, args.nbins, 0, args.nbins * args.binsize, args.nbins, 0, args.nbins * args.binsize, ) histo.GetXaxis().SetTitle("X") histo.GetXaxis().SetTitleOffset(2) histo.GetYaxis().SetTitle("Y") histo.GetYaxis().SetTitleOffset(2) histo.GetZaxis().SetTitle("Z") histo.GetZaxis().SetTitleOffset(2) for i in range(args.nbkg): histo.Fill(gRandom.Exp(args.bkg_mean), gRandom.Exp(args.bkg_mean), gRandom.Exp(args.bkg_mean)) if include_signal: for i in range(args.nsig): x = gRandom.Gaus(args.sig_x, args.sig_spread_x) y = gRandom.Gaus(args.sig_y, args.sig_spread_y) z = gRandom.Gaus(args.sig_z, args.sig_spread_z) histo.Fill(x, y, z) return histo
def getTH3F(self, lumi, name, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel): if cut == '': cut = '(1)' if(xmin == xmax) and (ymax == ymin) and (zmax == zmin): h = TH3F(name, "", len(nbinx)-1, array('d', nbinx),len(nbiny)-1, array('d', nbiny), len(nbinz)-1, array('d', nbinz)) else: h = TH3F(name, "", nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax) h.Sumw2() h.GetXaxis().SetTitle(xlabel) h.GetYaxis().SetTitle(ylabel) h.GetZaxis().SetTitle(zlabel) for b in self.blocks: AuxName = "aux_block" + name + "_" + b.name haux = b.getTH3F(lumi, AuxName, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel) h.Add(haux) del haux return h
def makefill3dhist(df_, titlehist, arrayx, arrayy, arrayz, nvar1, nvar2, nvar3): """ Create a TH3F histogram and fill it with three variables from a dataframe. """ lenx = len(arrayx) - 1 leny = len(arrayy) - 1 lenz = len(arrayz) - 1 histo = TH3F(titlehist, titlehist, lenx, arrayx, leny, arrayy, lenz, arrayz) histo.Sumw2() df_rd = df_[[nvar1, nvar2, nvar3]] arr3 = df_rd.values fill_hist(histo, arr3) return histo
def preStitching(): print " Running pre-stitching" for d in os.listdir(origin): sample = d.replace(ANALYSIS + ".", "").replace(".root", "") if not '.root' in d: continue #d = d.replace(ANALYSIS+".", "").replace(".root", "") if not sample.startswith('DY'): continue if not 'LL' in sample: continue if 'BJets' in sample: continue #if sample.startswith('DYBBJets'): continue #if 'Zpt' in sample: continue nEvents[sample] = {} for i in range(len(binsNb)): nEvents[sample][i] = TH3F("nEvents_%s_nB%d" % (sample, i), ";HT;Z pT;n partons", len(binsHT) - 1, array('f', binsHT), len(binsV_pt) - 1, array('f', binsV_pt), len(binsNj) - 1, array('f', binsNj)) nEvents[sample][i].SetDirectory(0) ref_file = TFile(origin + '/' + d, 'READ') obj = ref_file.Get("tree") for event in range(0, obj.GetEntries()): obj.GetEntry(event) nB = min(int(obj.LheNb), binsNb[-1]) nEvents[sample][nB].Fill(min(obj.LheHT, binsHT[-1] - 1), min(obj.LheV_pt, binsV_pt[-1] - 1), min(obj.LheNj, binsNj[-1]), 1. if obj.eventWeight > 0 else -1.) print " Done for", d.replace(ANALYSIS + ".", "").replace(".root", "") total = nEvents[nEvents.keys()[0]][i].Clone("nEvents") total.Reset("MICES") #total.SetDirectory(0) for name, h in nEvents.iteritems(): for i in range(len(binsNb)): total.Add(h[i]) outFile = TFile("stitching.root", "RECREATE") outFile.cd() total.Write() for name, h in nEvents.iteritems(): for i in range(len(binsNb)): h[i].Write() outFile.Close() print " Pre-stitching complete"
def getTH3F(self, lumi, name, var, nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax, cut, options, xlabel, ylabel, zlabel, extraWeight): if (xmin == xmax) and (ymax == ymin) and (zmax == zmin): h = TH3F(name, "", len(nbinx) - 1, array('d', nbinx), len(nbiny) - 1, array('d', nbiny), len(nbinz) - 1, array('d', nbinz)) else: h = TH3F(name, "", nbinx, xmin, xmax, nbiny, ymin, ymax, nbinz, zmin, zmax) h.Sumw2() h.GetXaxis().SetTitle(xlabel) h.GetYaxis().SetTitle(ylabel) h.GetZaxis().SetTitle(zlabel) if not self.isData: cut = cut + "* ( " + 'eventW * ' + str('lumi') + " )" else: cut = cut + "* ( " + str(self.eventW) + " )" self.ttree.Project(name, var, cut, options) return h
def __BookTrueHistograms(self): """ Book the true histograms used for drawing """ nhit = len(self.TrueHits()) scale = self.Pr * 0.5 lbin = -700 / scale ubin = 700 / scale self.txz = TH2F("txz", "True: x vs z", nhit, lbin, ubin, nhit, lbin, ubin) self.tyz = TH2F("tyz", "True: y vs z", nhit, lbin, ubin, nhit, lbin, ubin) self.txyz = TH3F("tyxz", "True: x vs y vs z", nhit / 50, lbin, ubin, nhit / 50, lbin, ubin, nhit / 50, lbin, ubin) return
def __BookSmearHistograms(self): """ Book the smear histograms used for drawing """ nhit = len(self.SmearedHits()) scale = self.Pr * 0.5 lbin = -700 / scale ubin = 700 / scale self.sxz = TH2F("sxz", "Smeared: x vs z", nhit, lbin, ubin, nhit, lbin, ubin) self.syz = TH2F("syz", "Smeared: y vs z", nhit, lbin, ubin, nhit, lbin, ubin) self.sxyz = TH3F("syxz", "Smeared: x vs y vs z", nhit / 10, lbin, ubin, nhit / 10, lbin, ubin, nhit / 10, lbin, ubin) return
def buildhisto(h_name, h_tit, arrayx, arrayy=None, arrayz=None): """ Create a histogram of size 1D, 2D, 3D, depending on the number of arguments given """ histo = None def binning(binning_array): return len(binning_array) - 1, binning_array if arrayz: histo = TH3F(h_name, h_tit, *binning(arrayx), *binning(arrayy), *binning(arrayz)) elif arrayy: histo = TH2F(h_name, h_tit, *binning(arrayx), *binning(arrayy)) else: histo = TH1F(h_name, h_tit, *binning(arrayx)) histo.Sumw2() return histo
def create_histogram(dimensionality, name, binnings): # Create the bare histogram if dimensionality == 1: h = TH1F(name, name, *_rootify_binning(*binnings[0])) elif dimensionality == 2: flat_binnings = \ _rootify_binning(*binnings[0]) + \ _rootify_binning(*binnings[1]) h = TH2F(name, name, *flat_binnings) elif dimensionality == 3: flat_binnings = \ _rootify_binning(*binnings[0]) + \ _rootify_binning(*binnings[1]) + \ _rootify_binning(*binnings[2]) h = TH3F(name, name, *flat_binnings) else: raise ValueError('ROOT can only histograms 1 - 3 dimensions') h.Sumw2() return h
def __BookMeasHistograms(self): """ Book the true histograms used for drawing """ xl = self.lbin[0] yl = self.lbin[1] zl = self.lbin[2] xu = self.ubin[0] yu = self.ubin[1] zu = self.ubin[2] nhit = len(self.Meas) self.mxz = TH2F("mxz", "Meas: x vs z", nhit, zl, zu, nhit, xl, xu) self.myz = TH2F("myz", "Meas: y vs z", nhit, zl, zu, nhit, yl, yu) self.mxyz = TH3F("myxz", "Meas: x vs y vs z", nhit / 50, xl, xu, nhit / 50, yl, yu, nhit / 50, zl, zu) return
def __BookHitHistograms(self): """ Book the smear histograms used for drawing """ xl = self.lbin[0] yl = self.lbin[1] zl = self.lbin[2] xu = self.ubin[0] yu = self.ubin[1] zu = self.ubin[2] nhit = len(self.Hits) self.hxz = TH2F("hxz", "Meas: x vs z", nhit, zl, zu, nhit, xl, xu) self.hyz = TH2F("hyz", "Meas: y vs z", nhit, zl, zu, nhit, yl, yu) self.hxyz = TH3F("hyxz", "Meas: x vs y vs z", nhit / 10, xl, xu, nhit / 10, yl, yu, nhit / 10, zl, zu) return
def process_response(self): list_df_mc_reco = [] list_df_mc_gen = [] for iptskim, _ in enumerate(self.lpt_anbinmin): df_mc_reco = pickle.load(openfile(self.lpt_recodecmerged[iptskim], "rb")) if "pt_jet" not in df_mc_reco.columns: print("Jet variables not found in the dataframe. Skipping process_response.") return if self.s_evtsel is not None: df_mc_reco = df_mc_reco.query(self.s_evtsel) if self.s_trigger is not None: df_mc_reco = df_mc_reco.query(self.s_trigger) df_mc_reco = selectdfrunlist(df_mc_reco, \ self.run_param[self.runlistrigger[self.triggerbit]], "run_number") if self.doml is True: df_mc_reco = df_mc_reco.query(self.l_selml[iptskim]) else: print("Doing std analysis") list_df_mc_reco.append(df_mc_reco) df_mc_gen = pickle.load(openfile(self.lpt_gendecmerged[iptskim], "rb")) df_mc_gen = selectdfrunlist(df_mc_gen, \ self.run_param[self.runlistrigger[self.triggerbit]], "run_number") df_mc_gen = df_mc_gen.query(self.s_presel_gen_eff) list_df_mc_gen.append(df_mc_gen) df_rec = pd.concat(list_df_mc_reco) df_gen = pd.concat(list_df_mc_gen) his_njets = TH1F("his_njets_gen", "Number of MC jets", 1, 0, 1) his_njets.SetBinContent(1, len(df_gen.index)) # total number of generated & selected jets for normalisation df_rec = df_rec[df_rec.ismcfd == 1] # reconstructed & selected non-prompt jets df_gen = df_gen[df_gen.ismcfd == 1] # generated & selected non-prompt jets out_file = TFile.Open(self.n_fileeff, "update") # Bin arrays # pt_cand n_bins_ptc = len(self.lpt_finbinmin) bins_ptc_temp = self.lpt_finbinmin.copy() bins_ptc_temp.append(self.lpt_finbinmax[n_bins_ptc - 1]) bins_ptc = array.array('d', bins_ptc_temp) # pt_jet n_bins_ptjet = len(self.lvar2_binmin) bins_ptjet_temp = self.lvar2_binmin.copy() bins_ptjet_temp.append(self.lvar2_binmax[n_bins_ptjet - 1]) bins_ptjet = array.array('d', bins_ptjet_temp) # z bins_z_temp = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1] n_bins_z = len(bins_z_temp) - 1 bins_z = array.array('d', bins_z_temp) # Detector response matrix of pt_jet of non-prompt jets df_resp_jet_fd = df_rec.loc[:, ["pt_gen_jet", "pt_jet"]] his_resp_jet_fd = TH2F("his_resp_jet_fd", \ "Response matrix of #it{p}_{T}^{jet, ch} of non-prompt jets;#it{p}_{T}^{jet, ch, gen.} (GeV/#it{c});#it{p}_{T}^{jet, ch, rec.} (GeV/#it{c})", \ 100, 0, 100, 100, 0, 100) fill_hist(his_resp_jet_fd, df_resp_jet_fd) # Simulated pt_cand vs. pt_jet of non-prompt jets df_ptc_ptjet_fd = df_gen.loc[:, ["pt_cand", "pt_jet"]] his_ptc_ptjet_fd = TH2F("his_ptc_ptjet_fd", \ "Simulated #it{p}_{T}^{cand.} vs. #it{p}_{T}^{jet} of non-prompt jets;#it{p}_{T}^{cand., gen.} (GeV/#it{c});#it{p}_{T}^{jet, ch, gen.} (GeV/#it{c})", \ n_bins_ptc, bins_ptc, 100, 0, 100) fill_hist(his_ptc_ptjet_fd, df_ptc_ptjet_fd) # z_gen of reconstructed feed-down jets (for response) arr_z_gen_resp = z_gen_calc(df_rec.pt_gen_jet, df_rec.phi_gen_jet, df_rec.eta_gen_jet, df_rec.pt_gen_cand, df_rec.delta_phi_gen_jet, df_rec.delta_eta_gen_jet) # z_rec of reconstructed feed-down jets (for response) arr_z_rec_resp = z_calc(df_rec.pt_jet, df_rec.phi_jet, df_rec.eta_jet, df_rec.pt_cand, df_rec.phi_cand, df_rec.eta_cand) # z_gen of simulated feed-down jets arr_z_gen_sim = z_calc(df_gen.pt_jet, df_gen.phi_jet, df_gen.eta_jet, df_gen.pt_cand, df_gen.phi_cand, df_gen.eta_cand) df_rec["z_gen"] = arr_z_gen_resp df_rec["z"] = arr_z_rec_resp df_gen["z"] = arr_z_gen_sim # Simulated pt_cand vs. pt_jet vs z of non-prompt jets df_ptc_ptjet_z_fd = df_gen.loc[:, ["pt_cand", "pt_jet", "z"]] his_ptc_ptjet_z_fd = TH3F("his_ptc_ptjet_z_fd", \ "Simulated #it{p}_{T}^{cand.} vs. #it{p}_{T}^{jet} vs. #it{z} of non-prompt jets;" "#it{p}_{T}^{cand., gen.} (GeV/#it{c});" "#it{p}_{T}^{jet, ch, gen.} (GeV/#it{c});" "#it{z}", \ n_bins_ptc, bins_ptc, n_bins_ptjet, bins_ptjet, n_bins_z, bins_z) fill_hist(his_ptc_ptjet_z_fd, df_ptc_ptjet_z_fd) # Create response matrix for feed-down smearing # x axis = z, y axis = pt_jet his_resp_rec = TH2F("his_resp_rec", "his_resp_rec", n_bins_z, bins_z, n_bins_ptjet, bins_ptjet) his_resp_gen = TH2F("his_resp_gen", "his_resp_gen", n_bins_z, bins_z, n_bins_ptjet, bins_ptjet) resp_z = RooUnfoldResponse(his_resp_rec, his_resp_gen) for row in df_rec.itertuples(): resp_z.Fill(row.z, row.pt_jet, row.z_gen, row.pt_gen_jet) out_file.cd() his_resp_jet_fd.Write() his_ptc_ptjet_fd.Write() his_ptc_ptjet_z_fd.Write() his_njets.Write() resp_z.Write("resp_z") out_file.Close()
def main(): """ xmin xmax ymin ymax zmin zmax val rel_err """ f = open(sys.argv[1]) x = [] y = [] z = [] val = {} # dictionary of bin indices relerr = {} # dictionary of bin indices nline = 0 # line number i = 0 # bin x j = 0 # bin y k = 0 # bin z title, xtitle, ytitle, ztitle = "title", "x-title", "y-title", "z-title" # first make a list of all coordinates: for i, line in enumerate(f.readlines()): if re.search("^#", line): if i==0: title = line[1:].strip() elif i==1: xtitle = line[1:].strip() elif i==2: ytitle = line[1:].strip() elif i==3: ztitle = line[1:].strip() continue # skip comments w = line.split() if nline == 0: x.append(w[0]) y.append(w[2]) z.append(w[4]) x.append(w[1]) y.append(w[3]) z.append(w[5]) else: # now we do not care about being uniqe - fix it later x.append(w[1]) y.append(w[3]) z.append(w[5]) nline = nline+1 vx = sorted(str2float(list(set(x)))) # vx = [ x*-1 for x in vx[::-1]] + vx[1:] # print("\n!!! vx: added reversed values - used only for polar plots !!!\n") vy = sorted(str2float(list(set(y)))) vz = sorted(str2float(list(set(z)))) # vz = [-600] + [ z*-1 for z in vz[::-1]] + vz[1:] + [600] # !!! add reversed values - used only for polar plots !!! # print("\n!!! vz: added reversed values - used only for polar plots !!!\n") # print(i, j, k) # val[(i,j,k)] = w[6] # relerr[(i,j,k)] = w[7] print("x: ", vx) print("y: ", vy) print("z: ", vz) nx = len(vx)-1 ny = len(vy)-1 nz = len(vz)-1 print(nx, ny, nz) h = TH3F("neutron", "%s;%s;%s;%s" % (title, xtitle, ytitle, ztitle), nx, array('f', vx), ny, array('f', vy), nz, array('f', vz)) f.seek(0) x0, y0, z0 = 0, 0, 0 bin0 = 0 for line in f.readlines(): if re.search("^#", line): continue # skip comments w = line.split() x0 = (float(w[0]) + float(w[1]))/2.0 y0 = (float(w[2]) + float(w[3]))/2.0 z0 = (float(w[4]) + float(w[5]))/2.0 bin0 = h.FindBin(x0, y0, z0) h.SetBinContent(bin0, float(w[6])) h.SetBinError(bin0, float(w[6])*float(w[7])) f.close() # # arrays of float # vx = str2float(x) # vy = str2float(y) # vz = str2float(z) # # number of bins # for i in range(nx): # for j in range(ny): # for k in range(nz): # if (i,j,k) in val: # h.SetBinContent(i+1, j+1, k+1, float(val[(i,j,k)])) # h.SetBinError(i+1, j+1, k+1, float(relerr[(i,j,k)])*float(val[(i,j,k)])) # # h.SetBinError(i+1, j+1, k+1, float(relerr[(i,j,k)])) # h.Print() out = TFile("out.root", "recreate") h.Write() out.Close()
par1GridMin, par1GridMax) newFormatInput = TH1D('bin_content_par1_' + str(i), 'bincontent', nGridPointsForNewF, par1GridMin, par1GridMax) elif (model == "par1par2_TH2" or model == "par1par2_TF2"): theBaseData = TH2F('theBaseData_' + section + '_' + str(i), 'Base Histogram for RooDataHist', nGridPar1Bins, par1GridMin, par1GridMax, nGridPar2Bins, par2GridMin, par2GridMax) newFormatInput = TH2D('bin_content_par1_par2_' + str(i), 'bincontent', nGridPointsForNewF, par1GridMin, par1GridMax, nGridPointsForNewF, par2GridMin, par2GridMax) elif (model == "par1par2par3_TH3" or model == "par1par2par3_TF3"): theBaseData = TH3F('theBaseData_' + section + '_' + str(i), 'Base Histogram for RooDataHist', nGridPar1Bins, par1GridMin, par1GridMax, nGridPar2Bins, par2GridMin, par2GridMax, nGridPar3Bins, par3GridMin, par3GridMax) newFormatInput = TH3D('bin_content_par1_par2_par3_' + str(i), 'bincontent', nGridPointsForNewF, par1GridMin, par1GridMax, nGridPointsForNewF, par2GridMin, par2GridMax, nGridPointsForNewF, par3GridMin, par3GridMax) if i != len(bins) - 1: binMin = bins[i - 1] binMax = bins[i] if (model == "par1_TH1" or model == "par1_TF1"): sigObj.Draw( par1Name + ' >> theBaseData_' + section + '_' + str(i),
def defineHistograms(postfix="_"): TDCmap = TH2F('TDCmap'+postfix,'TDCmap'+postfix,40,-20,20, 40,-20,20) allhisto = {'TDCmap':TDCmap} h_TDCxVsTDCy_withoutAmpCut = TH2F('TDCxVsTDCy_withoutAmpCut'+postfix,'TDCxVsTDCy_withoutAmpCut',100,-30,30,100,-30,30) allhisto ['TDCmapNoAmpCut'] = h_TDCxVsTDCy_withoutAmpCut h_HotCellTime = TH1F('h_HotCellTime','h_HotCellTime',4000,-20.,20.) allhisto['h_HotCellTime'] = h_HotCellTime for iampTh in config.relativeAmpThreshold_: ampThStr = '_AmpTh_'+str(int(iampTh*100)) h_Totaltime = TH1F('h_Totaltime'+postfix+ampThStr,'h_Totaltime', 400, -2, 2) allhisto['Totaltime'+ampThStr] = h_Totaltime h_NPads = TH1F('h_NPads'+postfix+ampThStr, 'h_NPads', 20, 1,21) allhisto['h_NPads'+ampThStr] = h_NPads for ipad in [2,3,4,5,6,7]: ipad_ = '_'+str(ipad) h_Totaltime_NPads = TH1F('h_Totaltime'+postfix+ampThStr+ipad_,'h_Totaltime', 400, -2, 2) #print 'Totaltime'+ampThStr+'_'+ipad_ allhisto['Totaltime'+ampThStr+'_'+ipad_] = h_Totaltime_NPads h_Totaltime_Quad = TH1F('h_Totaltime_Quad'+postfix,'h_Totaltime_Quad', 400, -2, 2.) allhisto['Totaltime_Quad'] = h_Totaltime_Quad h_Totaltime_Log = TH1F('h_Totaltime_Log'+postfix,'h_Totaltime_Log', 400, -2, 2.) allhisto['Totaltime_Log'] = h_Totaltime_Log h3_amp_time_cell = TH3F('h3_amp_time_cell'+postfix, 'h3_amp_time_cell', 120, 0.0, 0.6, 200, 9.0, 11.0 , 7, 17, 24) allhisto['amp_time_cell_'] = h3_amp_time_cell h_TDCxVsTDCy_withAmpCut=[] h_Timeplot=[] h_TimeCorrected=[] h2_TDCx_vs_amp=[] h2_TDCy_vs_amp=[] h2_TDCx_vs_time=[] h2_TDCy_vs_time=[] h2_amp_vs_time=[] h_TimeOffsetCorrected=[] for icell in range(17,24): cellnumber = str(icell) h_TDCxVsTDCy_withAmpCut.append ( TH2F("TDCxVsTDCy_withAmpCut_"+cellnumber+postfix,"TDCxVsTDCy_withAmpCut_"+cellnumber,100,-30,30,100,-30,30) ) allhisto ['TDCmapWithAmpCut_'+cellnumber] = h_TDCxVsTDCy_withAmpCut[icell-17] h_Timeplot.append( TH1F("TimePlot_"+cellnumber+postfix,"TimePlot_"+cellnumber,100,9.5,10.5) ) allhisto ['time_'+cellnumber] = h_Timeplot[icell-17] h_TimeCorrected.append( TH1F('TimeCorrected_'+cellnumber+postfix, 'TimeCorrected_'+cellnumber, 100,-0.5,0.5) ) allhisto ['timecorrected_'+cellnumber] = h_TimeCorrected[icell-17] h_TimeOffsetCorrected.append( TH1F('TimeOffsetCorrected_'+cellnumber+postfix, 'TimeOffsetCorrected_'+cellnumber, 200,10,30) ) allhisto ['timeOffsetcorrected_'+cellnumber] = h_TimeOffsetCorrected[icell-17] h2_TDCx_vs_amp.append( TH2F ('h2_TDCx_vs_amp_'+cellnumber+postfix, 'h2_TDCx_vs_amp_'+cellnumber, 120, -30.0, 30.0, 100, 0.0, 1.0 ) ) h2_TDCy_vs_amp.append( TH2F ('h2_TDCy_vs_amp_'+cellnumber+postfix, 'h2_TDCy_vs_amp_'+cellnumber, 120, -30.0, 30.0, 100, 0.0, 1.0 ) ) h2_TDCx_vs_time.append( TH2F ('h2_TDCx_vs_time_'+cellnumber+postfix, 'h2_TDCx_vs_time_'+cellnumber, 120, -30.0, 30.0, 200, -1.0, 1.0 ) ) h2_TDCy_vs_time.append( TH2F ('h2_TDCy_vs_time_'+cellnumber+postfix, 'h2_TDCy_vs_time_'+cellnumber, 120, -30.0, 30.0, 200, -1.0, 1.0 ) ) h2_amp_vs_time.append( TH2F ('h2_amp_vs_time_'+cellnumber+postfix, 'h2_amp_vs_time_'+cellnumber, 120, 0.0, 0.6, 200, 9.0, 11.0 ) ) allhisto['h2_TDCx_vs_amp_'+cellnumber] = h2_TDCx_vs_amp[icell-17] allhisto['h2_TDCy_vs_amp_'+cellnumber] = h2_TDCy_vs_amp[icell-17] allhisto['h2_TDCx_vs_time_'+cellnumber] = h2_TDCx_vs_time[icell-17] allhisto['h2_TDCy_vs_time_'+cellnumber] = h2_TDCy_vs_time[icell-17] allhisto['h2_amp_vs_time_'+cellnumber] = h2_amp_vs_time[icell-17] return allhisto
def th3(n, t): return TH3F(n, t, 100, -50., 50., 100, -50., 50., 100, -50., 50.)
bin_ang, 0, 180) h_theta_geant = TH1F("h_theta_geant", ";#theta [#circ]; N. Entries / 15#circ", bin_ang, 0, 180) h_phi_reco = TH1F("h_phi_reco", ";#phi [#circ]; N. Entries / 15#circ", bin_ang, -180, 0) h_phi_geant = TH1F("h_phi_geant", ";#phi [#circ]; N. Entries / 15#circ", bin_ang, -180, 0) h_l_geant = TH1F("h_l_geant", ";L [cm]; N. Entries / 80 cm", bin_len, fidvol, 500) h_l_reco = TH1F("h_l_reco", ";L [cm]; N. Entries / 80 cm", bin_len, fidvol, 500) h_theta_phi_l_reco = TH3F("h_theta_phi_l_reco", ";#theta [#circ]; #phi [#circ]; L [cm]", bin_ang, 0, 180, bin_ang, -180, 0, bin_len, fidvol, 500) h_theta_phi_l_geant = TH3F("h_theta_phi_l_geant", ";#theta [#circ]; #phi [#circ]; L [cm]", bin_ang, 0, 180, bin_ang, -180, 0, bin_len, fidvol, 500) h_dist = TH1F("h_dist", ";Distance [cm]; N. Entries / 2 cm", 25, 0, 50) anode_plane = [0, 0, 0] anode_no = [1, 0, 0] cathode_plane = [x_end, 0, 0] cathode_no = anode_no top_plane = [0, y_end, 0] top_no = [0, 1, 0] bottom_plane = [0, y_start, 0] bottom_no = top_no
print(tanBetas) print 'cos(beta-alpha)s:' print sinB_As #print cosB_As # print sinB_As_bin #print "len mH bin: ",str(len(mH_bin)-1) #print "mH bin: ", str(mH_bin) #print "len tan beta bin: " , str(len(tanBetas_bin)-1) #print "tan beta bin: " , str(tanBetas_bin) #print "len cos bins: " , str(len(cosB_As_bin)-1) #print "cos bins: " , str(cosB_As_bin) #print "len hists: " , str(len(histNames)) hists = [ TH3F(histNames[i], ";mH;tan(#beta);cos(#beta-#alpha)", len(mH_bin) - 1, mH_bin, len(tanBetas_bin) - 1, tanBetas_bin, len(cosB_As_bin) - 1, cosB_As_bin) for i in range(5, len(histNames)) ] # Ouptut file output_name = '/nfs/dust/cms/user/asmusspa/public/CMSSW_9_2_15/src/Analysis/MssmHbb/SusHi/FullRun_100PerJob_AllTypesAndBosons/rootFiles/Histograms3D_' + type_boson + '.root' f = TFile(output_name, 'recreate') for i in range(1, len(lines)): for j in range(5, len(histNames)): massH = float(lines[i][3]) #print massH tanBeta = float(lines[i][0]) #print tanBeta sinB_A = float(lines[i][1]) #Round
chain.Add("../root_files/MuCSRun7348_Group181_MergedTree.Root") chain.Add("../root_files/MuCSRun7702_Group182_MergedTree.Root") chain.Add("../root_files/MuCSRun7703_Group183_MergedTree.Root") fidvol = 20 x_start = 0 x_end = 256.35 y_start = -116.5 y_end = 116.5 z_start = 0 z_end = 1036.8 bin_ang = 12 bin_len = 8 h_theta_phi_l_tpc = TH3F("h_theta_phi_l_tpc", ";#theta [#circ]; #phi [#circ]; L [cm]", bin_ang, 0, 180, bin_ang, -180, 0, bin_len, fidvol, 500) h_theta_phi_l_mucs = TH3F("h_theta_phi_l_mucs", ";#theta [#circ]; #phi [#circ]; L [cm]", bin_ang, 0, 180, bin_ang, -180, 0, bin_len, fidvol, 500) h_dist = TH1F("h_dist", ";Distance [cm]; N. Entries / 1 cm", 60, 0, 60) entries = chain.GetEntries() print(entries) wrong, right = [0] * 5, [0] * 5 for entry in range(entries): if entry % 1000 == 0: print(entry) ientry = chain.LoadTree(entry) nb = chain.GetEntry(entry) l_mucs = chain.MuCS_TPC_len
def histo_maker(name, x_bins=x_bins, y_bins=y_bins, z_bins=z_bins): return TH3F(name, name, len(x_bins) - 1, x_bins, len(y_bins) - 1, y_bins, len(z_bins) - 1, z_bins)
from ROOT import THelix, TH3F, gPad helix = THelix(0, 0, 0, 2, 0, 1, 4) hframe = TH3F("hframe", "", 10, -2, 2, 10, -2, 2, 10, -2, 2) hframe.Draw() helix.SetRange(0, 0.1, 0) helix.Draw("same") gPad.Update()
def _histogram(process, region, expressions, binnings, load_hints=None): """Generates a ROOT histogram of a distribution a process in a region. Args: process: The process whose events should be histogrammed region: The region whose weighting/selection should be applied expressions: A tuple of expression strings binnings: A tuple of Binning instances distribution: The distribution to histogram load_hints: If provided, this argument will hint to _histogram that it should load additional properties when loading data and that it should use the _caching_loader. This facilitates cached loading of data across multiple calls to _histogram with the same process. This is particularly useful for parallelized histogramming, where the jobs are grouped by process. Returns: A ROOT histogram, of the TH1F, TH2F, or TH3F variety. """ # Compute weighted selection selection, weight = region.selection_weight() # Expand binnings to edge lists edges = tuple((b.edges() for b in binnings)) # Load data if load_hints is not None: # If load_hints have been provided, just use those with the # _caching_loader data = _caching_loader(process, load_hints) else: # Otherwise manually create the set of necessary properties # NOTE: All we need to do are region and expression properties - patch # properties are handled internally by the process required_properties = set() # Add those properties necessary to evaluate region selection/weight required_properties.update(properties(selection)) required_properties.update(properties(weight)) # Add in those properties necessary to evaluate expressions required_properties.update(*(properties(e) for e in expressions)) # Load data data = process.load(required_properties) # Apply selection if specified if selection != '': data = data[data.eval(normalized(selection))] # Evaluate each variable expression, converting the resultant Pandas Series # to a NumPy array # HACK: TH1::FillN only supports 64-bit floating point values, so convert # things. Would be nice to find a better approach. samples = tuple((data.eval(normalized(e)).values.astype(numpy.float64) for e in expressions)) # Evaluate weights, converting the resultant Pandas Series to a NumPy array # HACK: TH1::FillN only supports 64-bit floating point values, so convert # things. Would be nice to find a better approach. if weight != '': weights = data.eval(normalized(weight)).values.astype(numpy.float64) else: weights = nullptr # Create a unique name and title for the histogram name = title = uuid4().hex # Create a histogram based on dimensionality # NOTE: When specifying explicit bin edges, you aren't passing a length # argument, you are passing an nbins argument, which is length - 1, hence # the code below. If you pass length for n bins, then you'll get garbage # for the last bin's upper edge and things go nuts in ROOT. dimensionality = len(expressions) count = len(data) if dimensionality == 1: # Create a one-dimensional histogram result = TH1F(name, title, len(edges[0]) - 1, edges[0]) # Fill the histogram # HACK: TH1::FillN will die if N == 0 if count > 0: result.FillN(count, samples[0], weights) elif dimensionality == 2: # Create a two-dimensional histogram result = TH2F(name, title, len(edges[0]) - 1, edges[0], len(edges[1]) - 1, edges[1]) # Fill the histogram # HACK: TH1::FillN will die if N == 0 if count > 0: result.FillN(count, samples[0], samples[1], weights) elif dimensionality == 3: # Create a three-dimensional histogram result = TH3F(name, title, len(edges[0]) - 1, edges[0], len(edges[1]) - 1, edges[1], len(edges[2]) - 1, edges[2]) # HACK: TH3 doesn't have a FillN method, so we have to do things the # slow way. # TODO: We may want to put a warning about this slowness if weights == nullptr: weights = numpy.ones(count, dtype=numpy.float64) for x, y, z, w in zip(samples[0], samples[1], samples[2], weights): result.Fill(x, y, z, w) else: raise ValueError('ROOT can only histograms 1 - 3 dimensions') # All done return result
def main(args): lumi = 40e6 # = 40 fb^-1 (there are 6 orders of magnitude between femto- and nano-) normalization = { # i: xsec (nb) * filt_eff * kfactor / nevents * lumi (nb^-1) 7: .016215 * 3.9216e-4 * 1 / 1770193 * lumi, 6: .25753 * 9.4106e-4 * 1 / 1893389 * lumi, 5: 4.5535 * 9.2196e-4 * 1 / 7977567 * lumi, 4: 254.63 * 5.3015e-4 * 1 / 7975217 * lumi, 3: 26454 * 3.1956e-4 * 1 / 7349799 * lumi, } TH1.SetDefaultSumw2() # "Regular" histograms m1 = TH1F("jet1m", "Leading Jet Mass", mj_max / mj_binsize, 0, mj_max) m2 = TH1F("jet2m", "Subleading Jet Mass", mj_max / mj_binsize, 0, mj_max) mjj = TH1F("dijetmass", "Dijet Mass", mjj_max / mjj_binsize, 0, mjj_max) jetm = TH3F("jetmass", "Jet Masses", mj_max / mj_binsize, 0, mj_max, mj_max / mj_binsize, 0, mj_max, mjj_max / mjj_binsize, 0, mjj_max) pt1 = TH1F("jet1pt", "Leading Jet pT", 50, 0, 1500) pt2 = TH1F("jet2pt", "Subleading Jet pT", 50, 0, 1500) # histograms for events where m1 ~ m2 m_avg = TH1F("jetm_avg", "Avg jetmass where m1 ~ m2", mj_max / mj_binsize, 0, mj_max) mjj_avg = TH1F("mjj_avg", "Dijet mass where m1 ~ m2", mjj_max / mjj_binsize, 0, mjj_max) jetm_avg = TH2F("jetmass_avg", "Jet masses where m1 ~ m2", mj_max / mj_binsize, 0, mj_max, mjj_max / mjj_binsize, 0, mjj_max) # histograms separated by qq/qg/gg events. Index is number of gluons (0, 1, or 2) separated_jetm = { 0: TH2F("jetm_qq", "Avg jet mass (m1 ~ m2) for qq events", mj_max / mj_binsize, 0, mj_max, mjj_max / mjj_binsize, 0, mjj_max), 1: TH2F("jetm_qg", "Avg jet mass (m1 ~ m2) for qg events", mj_max / mj_binsize, 0, mj_max, mjj_max / mjj_binsize, 0, mjj_max), 2: TH2F("jetm_gg", "Avg jet mass (m1 ~ m2) for gg events", mj_max / mj_binsize, 0, mj_max, mjj_max / mjj_binsize, 0, mjj_max), } separated_mjj = { 0: TH1F("mjj_qq", "Dijet mass for qq events", mjj_max / mjj_binsize, 0, mjj_max), 1: TH1F("mjj_qg", "Dijet mass for qg events", mjj_max / mjj_binsize, 0, mjj_max), 2: TH1F("mjj_gg", "Dijet mass for gg events", mjj_max / mjj_binsize, 0, mjj_max), } separated_m1 = { 0: TH1F("jet1m_qq", "Leading jet mass for qq events", mj_max / mj_binsize, 0, mj_max), 1: TH1F("jet1m_qg", "Leading jet mass for qg events", mj_max / mj_binsize, 0, mj_max), 2: TH1F("jet1m_gg", "Leading jet mass for gg events", mj_max / mj_binsize, 0, mj_max), } separated_m2 = { 0: TH1F("jet2m_qq", "Subleading jet mass for qq events", mj_max / mj_binsize, 0, mj_max), 1: TH1F("jet2m_qg", "Subleading jet mass for qg events", mj_max / mj_binsize, 0, mj_max), 2: TH1F("jet2m_gg", "Subleading jet mass for gg events", mj_max / mj_binsize, 0, mj_max), } separated_pt1 = { 0: TH1F("jet1pt_qq", "Leading jet pT for qq events", 50, 0, 1500), 1: TH1F("jet1pt_qg", "Leading jet pT for qg events", 50, 0, 1500), 2: TH1F("jet1pt_gg", "Leading jet pT for gg events", 50, 0, 1500), } separated_pt2 = { 0: TH1F("jet2pt_qq", "Subleading jet pT for qq events", 50, 0, 1500), 1: TH1F("jet2pt_qg", "Subleading jet pT for qg events", 50, 0, 1500), 2: TH1F("jet2pt_gg", "Subleading jet pT for gg events", 50, 0, 1500), } for sample, sample_norm in normalization.iteritems(): directory = "data/user.vbaratha.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ%sW.%s_JZ%s_histOutput.root" % ( sample, args.jobname, sample) print "doing one dir" for f in iter_outfiles(directory): print "doing one file" tree = f.Get("aTree") for evt in tree: try: i, j = leading_subleading(evt) _m1, _m2, _mjj = masses(evt, i, j) p1, p2 = evt.JetPt[i], evt.JetPt[j] wt = evt.weight * sample_norm # Fill "regular" histograms m1.Fill(_m1, wt) m2.Fill(_m2, wt) mjj.Fill(_mjj, wt) jetm.Fill(_m1, _m2, _mjj, wt) pt1.Fill(p1, wt) pt2.Fill(p2, wt) # Fill histograms separated by qq/qg/gg events num_gluons = sum( 1 for pdgid in [evt.JetType[i], evt.JetType[j]] if pdgid == 21) separated_mjj[num_gluons].Fill(_mjj, wt) separated_m1[num_gluons].Fill(_m1, wt) separated_m2[num_gluons].Fill(_m2, wt) separated_pt1[num_gluons].Fill(p1, wt) separated_pt2[num_gluons].Fill(p2, wt) # Fill histograms for events where m_j1 ~ m_j2 if abs(_m1 - _m2) < MASS_DIFF_CUTOFF: mavg = (_m1 + _m2) / 2 m_avg.Fill(mavg, wt) mjj_avg.Fill(_mjj, wt) jetm_avg.Fill(mavg, _mjj, wt) separated_jetm[num_gluons].Fill(mavg, _mjj, wt) except IndexError: pass f = TFile(args.outfile, "RECREATE") m1.Write() m2.Write() mjj.Write() jetm.Write() pt1.Write() pt2.Write() m_avg.Write() mjj_avg.Write() jetm_avg.Write() for i in range(3): separated_jetm[i].Write() separated_mjj[i].Write() separated_m1[i].Write() separated_m2[i].Write() separated_pt1[i].Write() separated_pt2[i].Write() f.Close()