def __init__(self, data=[], *args): # title='hist', description='hist', nbins=100, xmin=0.0, xmax=100.0): """Creates a root TH1F of data (an iterable) with title, description, nbins, xmax and xmin. The number of bins defaults to 100, if the range values aren't set they are based from the data.""" if data: xmin = float(min(data)) xmax = float(max(data)) # args = (title, description, nbins, xmin, xmax) TH1F.__init__(self, *args) # title, description, nbins, xmin, xmax) self.addList(data) else: # args = (tite, description, nbins, xmin, xmax) TH1F.__init__(self, *args) # title, description, nbins, xmin, xmax)
def closePDF(outfile,canvas): canvas.Print(outfile+".pdf]") gStyle.SetOptStat(0) c = TCanvas("c","c",800,600) outfile = remainder[0] outfileroot = TFile(remainder[0]+".root","RECREATE") infile = TFile(remainder[1]) histo = infile.Get("myana/myana_trigNLay") nevents = histo.GetEntries() #h = TH1F("h", "h", histo.GetNbinsX(), histo.GetXaxis().GetBinLowEdge(1), histo.GetXaxis().GetBinUpEdge(histo.GetNbinsX())) h = TH1F("h", "h", 20, 0, 20) pulse = 37 #MHz mu = 1. #average electons / bunch for i in range(2, h.GetNbinsX()): n = histo.Integral(i,histo.GetNbinsX()) rate = n * pulse * mu / 1000 #kHz h.SetBinContent(i, rate) error = 0 if(n != 0): error = np.sqrt(1/n) * rate else: error = np.sqrt(1/2.) * pulse * mu / 1000. h.SetBinError(i, error)
if "plus" in filename: tree = tree + "_JESUp" # tree = tree+weight print "Current Tree Path : ", tree rootfile = basepath + f if not os.path.exists(rootfile): print 'File Does Not exist' break else: print 'Adding File : ', rootfile rtfile = TFile(rootfile) if "PUWeight" in weight: tmp = TH1F('tmp', weight + ' Unfolding; SF; Number of Events', 50, 0, 5) else: tmp = TH1F('tmp', weight + ' Unfolding; SF; Number of Events', 50, 0, 2) hist = rtfile.Get(tree) canvas = TCanvas("CanvasRef", "CanvasTitle", 800, 600) canvas.SetGridy() drawparams = weight + ">>tmp" hist.Draw(drawparams) canvas.Update() canvas.SaveAs(outpath + filename + '/' + weight + ' ' + hist.GetTitle() + '.pdf') canvas.Close() rtfile.Close()
def plot_clusterdb(dbfilename=None, histofilename="MyHistos.root"): if dbfilename == None: return None dbfile = gROOT.FindObject(dbfilename) if dbfile: dbfile.Close() dbfile = TFile(dbfilename, 'READ') h_Weight = gROOT.FindObject("hDB_Weight") h_Weight.SetDirectory(0) h_U = gROOT.FindObject("hDB_U") h_U.SetDirectory(0) h_V = gROOT.FindObject("hDB_V") h_V.SetDirectory(0) h_Var_U = gROOT.FindObject("hDB_Sigma2_U") h_Var_U.SetDirectory(0) h_Var_V = gROOT.FindObject("hDB_Sigma2_V") h_Var_V.SetDirectory(0) dbfile.Close() # First, we want to compute a list of all different cluster # types found in the db typeset = [] for bin in range(1, h_Weight.GetNbinsX() + 1): # The bin label decodes the cluster shape. The # label contains tokens seperated by "D". The # first token is the cluster size, all other # are digits. Each digits contains three tokens # seperated by "."; nameley iu, iv and signal. # These can be converted to integers. label = h_Weight.GetXaxis().GetBinLabel(bin) current_type = "" for tok in re.split('D', label): addr = re.split('\.', tok) if len(addr) == 1: current_type += tok else: current_type += "D" + addr[0] + '.' + addr[1] if not current_type in typeset: typeset.append(current_type) print("Number of labels in clusterDB is ", h_Weight.GetNbinsX()) print("Number of types in clusterDB is ", len(typeset)) histofile = gROOT.FindObject(histofilename) if histofile: histofile.Close() histofile = TFile(histofilename, 'RECREATE', 'Resolution plots created from ' + dbfilename) histomap_weight = {} histomap_u = {} histomap_v = {} histomap_sigu = {} histomap_sigv = {} for currenttype in typeset: map_weight = {} map_u = {} map_v = {} map_sigu = {} map_sigv = {} for bin in range(1, h_Weight.GetNbinsX() + 1): # The bin label decodes the cluster shape. The # label contains tokens seperated by "D". The # first token is the cluster size, all other # are digits. Each digits contains three tokens # seperated by "."; nameley iu, iv and signal. # These can be converted to integers. label = h_Weight.GetXaxis().GetBinLabel(bin) # We compute the type string by stripping all # signal information from the label current_type = "" for tok in re.split('D', label): addr = re.split('\.', tok) if len(addr) == 1: current_type += tok else: current_type += "D" + addr[0] + '.' + addr[1] if current_type == currenttype: map_weight[label] = h_Weight.GetBinContent(bin) map_u[label] = h_U.GetBinContent(bin) map_v[label] = h_V.GetBinContent(bin) map_sigu[label] = TMath.Sqrt(h_Var_U.GetBinContent(bin)) map_sigv[label] = TMath.Sqrt(h_Var_V.GetBinContent(bin)) histofile.cd("") histofile.mkdir(currenttype) histofile.cd(currenttype) # these are the reprocessed histos for viewing LABELS = len(map_weight) histomap_weight[currenttype] = TH1F("hweight_" + currenttype, "", LABELS, 0, LABELS) histomap_weight[currenttype].SetStats(0) histomap_weight[currenttype].SetFillColor(38) histomap_weight[currenttype].SetYTitle("weight") histomap_u[currenttype] = TH1F("hu_" + currenttype, "", LABELS, 0, LABELS) histomap_u[currenttype].SetStats(0) histomap_u[currenttype].SetFillColor(38) histomap_u[currenttype].SetYTitle("offset u [mm]") histomap_v[currenttype] = TH1F("hv_" + currenttype, "", LABELS, 0, LABELS) histomap_v[currenttype].SetStats(0) histomap_v[currenttype].SetFillColor(38) histomap_v[currenttype].SetYTitle("offset v [mm]") histomap_sigu[currenttype] = TH1F("hsigu_" + currenttype, "", LABELS, 0, LABELS) histomap_sigu[currenttype].SetStats(0) histomap_sigu[currenttype].SetFillColor(38) histomap_sigu[currenttype].SetYTitle("cluster sigma u [mm]") histomap_sigv[currenttype] = TH1F("hsigv_" + currenttype, "", LABELS, 0, LABELS) histomap_sigv[currenttype].SetStats(0) histomap_sigv[currenttype].SetFillColor(38) histomap_sigv[currenttype].SetYTitle("cluster sigma v [mm]") for i, label in enumerate(map_weight.keys()): histomap_weight[currenttype].SetBinContent(i + 1, map_weight[label]) histomap_u[currenttype].SetBinContent(i + 1, map_u[label]) histomap_v[currenttype].SetBinContent(i + 1, map_v[label]) histomap_sigu[currenttype].SetBinContent(i + 1, map_sigu[label]) histomap_sigv[currenttype].SetBinContent(i + 1, map_sigv[label]) histomap_weight[currenttype].GetXaxis().SetBinLabel(i + 1, label) histomap_u[currenttype].GetXaxis().SetBinLabel(i + 1, label) histomap_v[currenttype].GetXaxis().SetBinLabel(i + 1, label) histomap_sigu[currenttype].GetXaxis().SetBinLabel(i + 1, label) histomap_sigv[currenttype].GetXaxis().SetBinLabel(i + 1, label) # summary histograms on type resolution histofile.cd("") TYPES = len(typeset) htypes_sigu = TH1F("htypes_sigu", "", TYPES, 0, TYPES) htypes_sigu.SetStats(0) htypes_sigu.SetFillColor(38) htypes_sigu.SetYTitle("weighted cluster sigma u [mm]") htypes_sigv = TH1F("htypes_sigv", "", TYPES, 0, TYPES) htypes_sigv.SetStats(0) htypes_sigv.SetFillColor(38) htypes_sigv.SetYTitle("weighted cluster sigma v [mm]") htypes_weight = TH1F("htypes_weight", "", TYPES, 0, TYPES) htypes_weight.SetStats(0) htypes_weight.SetFillColor(38) htypes_weight.SetYTitle("weight") for j, currenttype in enumerate(typeset): htypes_sigu.GetXaxis().SetBinLabel(j + 1, currenttype) htypes_sigv.GetXaxis().SetBinLabel(j + 1, currenttype) htypes_weight.GetXaxis().SetBinLabel(j + 1, currenttype) weightedTypeVarU = 0.0 weightedTypeVarV = 0.0 typeNorm = 0.0 for bin in range(1, histomap_weight[currenttype].GetNbinsX() + 1): w = histomap_weight[currenttype].GetBinContent(bin) typeNorm += w weightedTypeVarU += w * TMath.Power( histomap_sigu[currenttype].GetBinContent(bin), 2) weightedTypeVarV += w * TMath.Power( histomap_sigv[currenttype].GetBinContent(bin), 2) htypes_weight.SetBinContent(j + 1, typeNorm) if typeNorm > 0: weightedTypeVarU /= typeNorm weightedTypeVarV /= typeNorm htypes_sigu.SetBinContent(j + 1, TMath.Sqrt(weightedTypeVarU)) htypes_sigv.SetBinContent(j + 1, TMath.Sqrt(weightedTypeVarV)) else: htypes_sigu.SetBinContent(j + 1, 0) # invalid htypes_sigv.SetBinContent(j + 1, 0) # invalid # Standardized plots SDTYPES = len(standard_types) hsdtypes_sigu = TH1F("hsdtypes_sigu", "", SDTYPES, 0, SDTYPES) hsdtypes_sigu.SetStats(0) hsdtypes_sigu.SetFillColor(38) hsdtypes_sigu.SetYTitle("weighted cluster sigma u [mm]") hsdtypes_sigv = TH1F("hsdtypes_sigv", "", SDTYPES, 0, SDTYPES) hsdtypes_sigv.SetStats(0) hsdtypes_sigv.SetFillColor(38) hsdtypes_sigv.SetYTitle("weighted cluster sigma v [mm]") hsdtypes_weight = TH1F("hsdtypes_weight", "", SDTYPES, 0, SDTYPES) hsdtypes_weight.SetStats(0) hsdtypes_weight.SetFillColor(38) hsdtypes_weight.SetYTitle("weight") for j, currenttype in enumerate(standard_types): hsdtypes_sigu.GetXaxis().SetBinLabel(j + 1, currenttype) hsdtypes_sigv.GetXaxis().SetBinLabel(j + 1, currenttype) hsdtypes_weight.GetXaxis().SetBinLabel(j + 1, currenttype) weightedTypeVarU = 0.0 weightedTypeVarV = 0.0 typeNorm = 0.0 if currenttype in histomap_weight: for bin in range(1, histomap_weight[currenttype].GetNbinsX() + 1): w = histomap_weight[currenttype].GetBinContent(bin) typeNorm += w weightedTypeVarU += w * TMath.Power( histomap_sigu[currenttype].GetBinContent(bin), 2) weightedTypeVarV += w * TMath.Power( histomap_sigv[currenttype].GetBinContent(bin), 2) hsdtypes_weight.SetBinContent(j + 1, typeNorm) if typeNorm > 0: weightedTypeVarU /= typeNorm weightedTypeVarV /= typeNorm hsdtypes_sigu.SetBinContent(j + 1, TMath.Sqrt(weightedTypeVarU)) hsdtypes_sigv.SetBinContent(j + 1, TMath.Sqrt(weightedTypeVarV)) else: hsdtypes_sigu.SetBinContent(j + 1, 0) # invalid hsdtypes_sigv.SetBinContent(j + 1, 0) # invalid # summary histograms on overall resolution hweighted_sigma_sensor = TH1F("hweighted_sigma_sensor", "", 2, 0, 2) hweighted_sigma_sensor.SetStats(0) hweighted_sigma_sensor.SetFillColor(38) hweighted_sigma_sensor.SetYTitle("cluster sigma [mm]") hweighted_sigma_sensor.GetXaxis().SetBinLabel(1, "sigma u") hweighted_sigma_sensor.GetXaxis().SetBinLabel(2, "sigma v") weightedVarU = 0.0 weightedVarV = 0.0 norm = 0.0 for bin in range(1, h_Weight.GetNbinsX() + 1): w = h_Weight.GetBinContent(bin) norm += w weightedVarU += w * h_Var_U.GetBinContent(bin) weightedVarV += w * h_Var_V.GetBinContent(bin) print("Number of tracks used for calibration is ", norm) if norm > 0: weightedVarU /= norm weightedVarV /= norm hweighted_sigma_sensor.SetBinContent(1, TMath.Sqrt(weightedVarU)) hweighted_sigma_sensor.SetBinContent(2, TMath.Sqrt(weightedVarV)) print("Weighted clusterDB sigmaU [mm]: ", TMath.Sqrt(weightedVarU)) print("Weighted clusterDB sigmaV [mm]: ", TMath.Sqrt(weightedVarV)) else: hweighted_sigma_sensor.SetBinContent(1, 0) # invalid hweighted_sigma_sensor.SetBinContent(2, 0) # invalid histofile.Write() histofile.Close()
fName = sys.argv[1] f = TFile(fName, "UPDATE") f.mkdir("MuMuChannel") f.mkdir("EEChannel") #data=TH1F("data_obs125","data_obs125",28,-0.2,1.2) #dataprod=TH1F("proddata_obs125","proddata_obs125",28,-0.2,1.2) data = None for c in channels: for l in list: h_ = {} p_ = {} f.cd() if "DY" in l and True: tmp = f.Get(c[:2] + "/" + l) h = TH1F("DY125", "DY125", tmp.GetNbinsX(), tmp.GetXaxis().GetXmin(), tmp.GetXaxis().GetXmax()) p = TH1F("prodDY125", "prodDY125", tmp.GetNbinsX(), tmp.GetXaxis().GetXmin(), tmp.GetXaxis().GetXmax()) for bin in DYbins: h_[bin] = f.Get(c[:2] + "/DY" + bin + "125") p_[bin] = f.Get(c[:2] + "/prodDY" + bin + "125") binScale = 1. if bin != "0to50": binScale = float(DYrew[c + sDYs[DYbins.index(bin) - 1] + "Extra_norm"]) for sdy in sDYs: h_[bin].Add(f.Get(c[:2] + "/" + sdy + bin + "125")) p_[bin].Add(f.Get(c[:2] + "/prod" + sdy + bin + "125")) h_[bin].Scale(binScale)
bins = [float(i) for i in cfg.get(section,'obsBins').split(',')] # print bins # break; outfile_newF = TFile.Open('signal_proc_'+section+'.root','RECREATE') # print ('outfile_newF = signal_proc'+section+'.root'); for i in range(1,len(bins)): if (model=="par1_TH1" or model=="par1_TF1"): theBaseData = TH1F('theBaseData_'+section+'_'+str(i),'Base Histogram for RooDataHist', nGridPar1Bins,par1GridMin,par1GridMax) newFormatInput = TH1D('bin_content_par1_'+str(i),'bincontent', nGridPointsForNewF,par1GridMin,par1GridMax) elif (model=="par1par2_TH2" or model=="par1par2_TF2"): theBaseData = TH2F('theBaseData_'+section+'_'+str(i),'Base Histogram for RooDataHist', nGridPar1Bins,par1GridMin,par1GridMax, nGridPar2Bins,par2GridMin,par2GridMax) newFormatInput = TH2D('bin_content_par1_par2_'+str(i),'bincontent', nGridPointsForNewF,par1GridMin,par1GridMax, nGridPointsForNewF,par2GridMin,par2GridMax) elif (model=="par1par2par3_TH3" or model=="par1par2par3_TF3"): theBaseData = TH3F('theBaseData_'+section+'_'+str(i),'Base Histogram for RooDataHist', nGridPar1Bins,par1GridMin,par1GridMax, nGridPar2Bins,par2GridMin,par2GridMax, nGridPar3Bins,par3GridMin,par3GridMax) newFormatInput = TH3D('bin_content_par1_par2_par3_'+str(i),'bincontent',
def main(): gStyle.SetOptStat(0) c1 = TCanvas('c1', 'c1', 3) gPad.SetTickx() gPad.SetTicky() c1.SetLeftMargin(0.12) h_frame = TH1F('frame', '', 50, 0, 1000) h_frame.SetXTitle('P^{miss}_{T} (GeV)') h_frame.SetYTitle('Arbitrary units') h_frame.SetMaximum(0.4) h_higgsPt = TH1F('h_higgsPt', 'h_higgsPt', 50, 0, 1000) h_higgsPtList = [] for i in range(6): h_higgsPtList.append(TH1F(histName_zp2HDM[i], '', 50, 0, 1000)) h_higgsPtList[i].SetLineWidth(2) h_higgsPt_BarList = [] for i in range(6): h_higgsPt_BarList.append(TH1F(histName_bar[i], '', 40, 0, 800)) h_higgsPt_BarList[i].SetLineWidth(2) ## test code ''' ivVectList = getPtList(hApath) for fourV in ivVectList: h_higgsPt.Fill(fourV.pt) ''' ## loop all combination leg = TLegend(0.32, 0.57, 0.87, 0.87) leg.SetBorderSize(0) for i in range(3): ivVectList = getPtList(hApathList[i]) h_higgsPtList[i].SetLineColor(87 + 4 * i) leg.AddEntry(h_higgsPtList[i], legtext[i]) for fourV in ivVectList: h_higgsPtList[i].Fill(fourV.pt) for i in range(3): ivVectList = getPtList(hApathList_5[i]) h_higgsPtList[i + 3].SetLineColor(61 + 4 * i) leg.AddEntry(h_higgsPtList[i + 3], legtext_5[i]) for fourV in ivVectList: h_higgsPtList[i + 3].Fill(fourV.pt) h_frame.Draw('hist') #h_higgsPt.Draw('histsame') for i in range(6): h_higgsPtList[i].DrawNormalized('histsame') leg.Draw() c1.Print('Zp2HDM_higgsPt.pdf') colorList = [95, 91, 87, 61, 65, 69] #colorList = [61,95,65,91,69,87] ## Baryonic info_bar = TLatex(0, 0.255, 'CMS') info_bar_2 = TLatex(750, 0.255, '35.9 fb^{-1} (13 TeV)') info_bar.SetTextSize(0.03) info_bar_2.SetTextSize(0.03) info_bar_2.SetTextAlign(31) h_frame.SetMaximum(0.25) h_frame.SetAxisRange(0., 750., "X") leg.Clear() for i in range(6): ivVectList = getPtList('BaryonicFile/' + hApath_barList[i]) h_higgsPt_BarList[i].SetLineColor(colorList[i]) #h_higgsPt_BarList[i].SetLineColor(90-6*i) leg.AddEntry(h_higgsPt_BarList[i], legtext_bar[i]) for fourV in ivVectList: h_higgsPt_BarList[i].Fill(fourV.pt) h_frame.Draw('hist') for i in range(5, -1, -1): h_higgsPt_BarList[i].DrawNormalized('histsame') leg.Draw() info_bar.Draw() info_bar_2.Draw() c1.Print('Baryonic_higgsPt.pdf') f = TFile('rootFile/Zp2HDM_missPt.root', 'recreate') for i in range(6): h_higgsPtList[i].SetLineColor(1) h_higgsPtList[i].Write() f.Close() f = TFile('rootFile/BaryonicZp_missPt.root', 'recreate') for i in range(6): h_higgsPt_BarList[i].SetLineColor(1) h_higgsPt_BarList[i].Write() f.Close()
gStyle.SetPadTopMargin(0.06) gStyle.SetPadRightMargin(0.04) gStyle.SetPadLeftMargin(0.15) f = ROOT.TFile.Open("/afs/cern.ch/work/n/nchernya/VBFZll/plotter/output_txt14/EWK_LLJJ_%s_QCDScalenom_JESnom_v25_bdt_alldata4_qglweightsnorm_vetoeff_reminiaod.root"%channel) path='/afs/cern.ch/user/n/nchernya/eos_mount/0/cms/store/group/phys_higgs/vbfHbb/V25_passall/EWK_LLJJ_MLL-50_MJJ-120_13TeV-madgraph-pythia8/VHBB_HEPPY_V25/170401_193326/0000/tree*.root' hist_sel = f.Get("hPVs") hist_sel.Scale(1./hist_sel.Integral()) chain = TChain("tree") chain.Add(path) hnPVs = TH1F("hPVs_all","",50,0,50) hnPVs.GetXaxis().SetTitle("nPVs") chain.Draw("nPVs>>hPVs_all","puWeight*genWeight/TMath::Abs(genWeight)") hnPVs.Scale(1./hnPVs.Integral()) hnPVs.SetLineColor(2) hnPVs.SetLineWidth(2) hist_sel.SetLineColor(ROOT.kBlue) hist_sel.SetLineWidth(2) hist_sel.SetLineStyle(7) hratio = hist_sel.Clone("new") hratio.Divide(hnPVs)
from ROOT import TFile, TH1F, TCanvas from papas_analysis_gael.tools.style import papas_style, cms_style, cms_style2, papas_style2 from papas_analysis_gael.tools.HistComparator import HistComparator file = TFile('./rootfiles/charged_hadron_tree4.root') tree = file.Get('events') Erec_Egen = HistComparator(tree, style1 = cms_style, style2 = papas_style, nbin = 500, xmin = 0., xmax = 3., xvar = 'E_{rec}/E_{gen}', var1 = 'cmsjet_e/gen_jet_e', cut = 'gen_jet_e>20 && simtrack_len==1 && abs(gen_jet_eta)<1.3 && gen_jet_e<100', var2 = 'papasjet_e/gen_jet_e') Erec_Egen.canva.SetLogy() no_region = TH1F("no_region","no region",300,0,2) tree.Project("no_region","cmsjet_e/gen_jet_e","gen_jet_e>20 && simtrack_len==1 && abs(gen_jet_eta)<1.3 && gen_jet_e<100 && (cmsjet_e/gen_jet_e<1.03 || cmsjet_e/gen_jet_e>1.3)") region_no130 = TH1F("region_no130","region_no130",300,0,2) tree.Project("region_no130","cmsjet_e/gen_jet_e","gen_jet_e>20 && simtrack_len==1 && abs(gen_jet_eta)<1.3 && gen_jet_e<100 && cmsjet_e/gen_jet_e>1.03 && cmsjet_e/gen_jet_e<1.3 && tagged_pdgid!=130") region_corrected = TH1F("region_corrected","region_corrected",300,0,2) tree.Project("region_corrected","(cmsjet_e-tagged_e)/gen_jet_e","gen_jet_e>20 && simtrack_len==1 && abs(gen_jet_eta)<1.3 && gen_jet_e<100 && cmsjet_e/gen_jet_e>1.03 && cmsjet_e/gen_jet_e<1.3 && tagged_pdgid==130") total = TH1F("total","total",300,0,2) total.Add(no_region, region_no130) total.Add(region_corrected) total.SetLineColor(1) can0 = TCanvas() previous = TH1F("previous","previous",300,0,2) tree.Project("previous",'cmsjet_e/gen_jet_e','gen_jet_e>20 && simtrack_len==1 && abs(gen_jet_eta)<1.3 && gen_jet_e<100') previous.SetLineColor(4)
def main(argv): #Usage controls from OptionParser parser_usage = "outputfile.root filename1.root ... filenameN.root" parser = OptionParser(usage = parser_usage) (options, args) = parser.parse_args(argv) if(len(args) == 0): parser.print_help() return if(SCAN_OVER_ENERGY and SCAN_OVER_THETA): print "ERROR: both scan flags turned on! Please select either energy or theta to scan over..." return if(not SCAN_OVER_ENERGY and not SCAN_OVER_THETA): print "ERROR: neither scan flags turned on! Please select either energy or theta to scan over..." return c1 = TCanvas("c1","c1",1600,900) file_list = [] curr_P_val = 0.1 for i in range(1,len(args)): file_list.append(TFile.Open(argv[i],'read')) scan_val_arr = array('d',[]) scan_val_arr_arr_err = array('d',[]) effic_gauscore_arr = array('d',[]) effic_gauscore_err_arr = array('d',[]) effic_anyquality_arr = array('d',[]) effic_anyquality_err_arr = array('d',[]) effic_gauscore_1show_arr = array('d',[]) effic_gauscore_1show_err_arr = array('d',[]) effic_anyquality_1show_arr = array('d',[]) effic_anyquality_1show_err_arr = array('d',[]) # curr_P_val = 0.1 for i in range(0,len(file_list)): print "Current file: " + argv[i+1] #For normalization # h_curr_showers = file_list[i].Get("pi_pm_gun_hists/h_NFCALShowers") h_ThrownPionP_curr = TH1F() h_ThrownPhotonTheta_curr = TH1F() h_ThrownPionP_curr = file_list[i].Get("pi_pm_gun_hists/h_ThrownPionP") h_ThrownPhotonTheta_curr = file_list[i].Get("pi_pm_gun_hists/h_ThrownPionTheta") norm_count = h_ThrownPionP_curr.GetEntries() if(norm_count < MIN_EVENTS): continue # atleast_one_shower = norm_count-h_curr_showers.GetBinContent(1) #Bin 1 corresponds to 0 showers # curr_P_val = my_gaus_fit.GetParameter(1) curr_P_val = h_ThrownPionP_curr.GetBinLowEdge( h_ThrownPionP_curr.GetMaximumBin()+1 ) curr_theta_val = h_ThrownPhotonTheta_curr.GetBinLowEdge( h_ThrownPhotonTheta_curr.GetMaximumBin()+1 ) print "Current P: " + str(curr_P_val) print "Current theta: " + str(curr_theta_val) + "\n" BELOW_MEAN_TO_FIT = E_BELOWTHROWN_TOFIT*curr_P_val/2 ABOVE_MEAN_TO_FIT = E_BELOWTHROWN_TOFIT*curr_P_val/2 h_curr = file_list[i].Get("pi_pm_gun_hists/h_ChargedTrackHypothesis_P") my_gaus_fit = TF1("my_gaus_fit","gausn",0.001,12.) my_gaus_fit.SetParLimits(0,0,100000) my_gaus_fit.SetParLimits(1,curr_P_val-0.2,curr_P_val+0.1) my_gaus_fit.SetParLimits(2,0.005,0.4) my_gaus_fit.SetNpx(1000); h_curr.GetXaxis().SetRangeUser(curr_P_val-3,curr_P_val+3.0) h_curr.Fit(my_gaus_fit,"Q","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) h_curr.Fit(my_gaus_fit,"QL","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) if(POLY_ORDER>=1): gaus_fit_amplitude = my_gaus_fit.GetParameter(0) gaus_fit_mean = my_gaus_fit.GetParameter(1) gaus_fit_sigma = my_gaus_fit.GetParameter(2) gaus_plus_poly_fit = TF1("gaus_plus_poly_fit","gausn+pol"+str(POLY_ORDER)+"(3)",0.001,12.) gaus_plus_poly_fit.SetParameter(0,gaus_fit_amplitude) gaus_plus_poly_fit.SetParameter(1,gaus_fit_mean) gaus_plus_poly_fit.SetParameter(2,gaus_fit_sigma) gaus_plus_poly_fit.SetParLimits(0,0,100000) gaus_plus_poly_fit.SetParLimits(1,curr_P_val-0.2,curr_P_val+0.1) gaus_plus_poly_fit.SetParLimits(2,0.005,0.4) h_curr.Fit(gaus_plus_poly_fit,"Q","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) h_curr.Fit(gaus_plus_poly_fit,"QL","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) twogaus_plus_poly_fit = TF1("twogaus_plus_poly_fit","gausn+gausn(3)+pol"+str(POLY_ORDER)+"(6)",0.001,12.) twogaus_plus_poly_fit.SetParameter(0,gaus_plus_poly_fit.GetParameter(0)) twogaus_plus_poly_fit.SetParameter(1,gaus_plus_poly_fit.GetParameter(1)) twogaus_plus_poly_fit.SetParameter(2,gaus_plus_poly_fit.GetParameter(2)) twogaus_plus_poly_fit.SetParameter(6,gaus_plus_poly_fit.GetParameter(3)) twogaus_plus_poly_fit.SetParameter(7,gaus_plus_poly_fit.GetParameter(4)) twogaus_plus_poly_fit.SetParameter(8,gaus_plus_poly_fit.GetParameter(5)) twogaus_plus_poly_fit.SetParLimits(0,0,100000) twogaus_plus_poly_fit.SetParLimits(1,curr_P_val-0.2,curr_P_val+0.1) twogaus_plus_poly_fit.SetParLimits(2,0.005,0.4) twogaus_plus_poly_fit.SetParLimits(3,0,100000) twogaus_plus_poly_fit.SetParLimits(4,curr_P_val-0.2,curr_P_val+0.1) twogaus_plus_poly_fit.SetParLimits(5,0.005,0.4) h_curr.Fit(twogaus_plus_poly_fit,"Q","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) h_curr.Fit(twogaus_plus_poly_fit,"QL","",curr_P_val-BELOW_MEAN_TO_FIT,curr_P_val+ABOVE_MEAN_TO_FIT) c1.SaveAs(".plots/FitE_"+str(curr_P_val)+".png") effic_gauscore_arr.append((my_gaus_fit.GetParameter(0)/h_curr.GetBinWidth(0))/norm_count) effic_gauscore_err_arr.append(my_gaus_fit.GetParError(0)/h_curr.GetBinWidth(0)/norm_count) # effic_anyquality_arr.append(atleast_one_shower/norm_count) # effic_anyquality_err_arr.append(0) if(SCAN_OVER_ENERGY): scan_val_arr.append(curr_P_val) if(SCAN_OVER_THETA): scan_val_arr.append(curr_theta_val) scan_val_arr_arr_err.append(0) # curr_P_val+=0.05 gr_gauscore_effic = TGraphErrors( len(file_list), scan_val_arr, effic_gauscore_arr, scan_val_arr_arr_err, effic_gauscore_err_arr) gr_gauscore_effic.SetMarkerStyle(15) gr_gauscore_effic.SetMarkerSize(1.2) gr_gauscore_effic.SetMarkerColor(kBlue) gr_gauscore_effic.SetName("gr_gauscore_effic") gr_gauscore_effic.SetTitle("Efficiency at 1 GeV") gr_gauscore_effic.GetXaxis().SetTitle("Photon #theta (degrees)") gr_gauscore_effic.GetYaxis().SetTitle("Efficiency") # gr_gauscore_effic.GetXaxis().SetRangeUser(0,12.) gr_gauscore_effic.Draw("AP") c1.SaveAs("GaussianCoreEfficiency.png") # gr_anyquality_effic.Draw("AP") # c1.SaveAs("AnyQualityEfficiency.png") f_out = TFile(argv[0],"RECREATE") f_out.cd() gr_gauscore_effic.Write() # gr_anyquality_effic.Write() # gr_gauscore_1show_effic.Write() # gr_anyquality_1show_effic.Write() f_out.Close() print("Done ")
parser.add_option("-b", "--batch", action="store_true", default=False, dest="batch") (options, args) = parser.parse_args() if options.batch: gROOT.SetBatch(True) #scp lxplus.cern.ch:/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions16/13TeV/PileUp/pileup_latest.txt data/JSON/ #pileupCalc.py -i data/JSON/Cert_271036-276811_13TeV_PromptReco_Collisions16_JSON_NoL1T.txt --inputLumiJSON data/JSON/pileup_latest.txt --calcMode true --minBiasXsec 71300 --maxPileupBin 60 --numPileupBins 60 data/PU_71300.root # https://raw.githubusercontent.com/cms-sw/cmssw/CMSSW_7_4_X/SimGeneral/MixingModule/python/mix_2015_25ns_Startup_PoissonOOTPU_cfi.py #probValue = [4.8551E-07, 1.74806E-06, 3.30868E-06, 1.62972E-05, 4.95667E-05, 0.000606966, 0.003307249, 0.010340741, 0.022852296, 0.041948781, 0.058609363, 0.067475755, 0.072817826, 0.075931405, 0.076782504, 0.076202319, 0.074502547, 0.072355135, 0.069642102, 0.064920999, 0.05725576, 0.047289348, 0.036528446, 0.026376131, 0.017806872, 0.011249422, 0.006643385, 0.003662904, 0.001899681, 0.00095614, 0.00050028, 0.000297353, 0.000208717, 0.000165856, 0.000139974, 0.000120481, 0.000103826, 8.88868E-05, 7.53323E-05, 6.30863E-05, 5.21356E-05, 4.24754E-05, 3.40876E-05, 2.69282E-05, 2.09267E-05, 1.5989E-05, 4.8551E-06, 2.42755E-06, 4.8551E-07, 2.42755E-07, 1.21378E-07, 4.8551E-08] # https://github.com/cms-sw/cmssw/blob/CMSSW_8_1_X/SimGeneral/MixingModule/python/mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi.py probValue = [0.000829312873542, 0.00124276120498, 0.00339329181587, 0.00408224735376, 0.00383036590008, 0.00659159288946, 0.00816022734493, 0.00943640833116, 0.0137777376066, 0.017059392038, 0.0213193035468, 0.0247343174676, 0.0280848773878, 0.0323308476564, 0.0370394341409, 0.0456917721191, 0.0558762890594, 0.0576956187107, 0.0625325287017, 0.0591603758776, 0.0656650815128, 0.0678329011676, 0.0625142146389, 0.0548068448797, 0.0503893295063, 0.040209818868, 0.0374446988111, 0.0299661572042, 0.0272024759921, 0.0219328403791, 0.0179586571619, 0.0142926728247, 0.00839941654725, 0.00522366397213, 0.00224457976761, 0.000779274977993, 0.000197066585944, 7.16031761328e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] mc = TH1F("2016_25ns_SpringMC_PUScenarioV1", "True nPV distribution", 60, 0, 60) mc.Sumw2() for i in range(60): mc.SetBinContent(i+1, probValue[i]) mc.SetLineWidth(3) mc.SetLineColor(1) mc.SetLineStyle(2) mc.Scale(1./mc.Integral()) if options.save: outFile = TFile("../data/PU_MC.root", "RECREATE") outFile.cd() mc.Write() outFile.Close() print "Histograms written to ../data/PU_MC.root file" exit()
m_H2 = [] m_H3 = [] b_pT = [] b_eta = [] b_phi = [] bbar_pT = [] bbar_eta = [] bbar_phi = [] a_pT = [] a_eta = [] a_phi = [] h_m_H1 = TH1F('run_' + i, "H3(1MeV)", 100, 100, 150) h_m_H2 = TH1F('run_' + i, "H3(1MeV)", 1000, 0, 1000) h_b_pT = TH1F('run_' + i, "H3(1MeV)", 100, 0, 600) h_b_eta = TH1F('run_' + i, "H3(1MeV)", 100, -5, 5) h_b_phi = TH1F('run_' + i, "H3(1MeV)", 10, -4, 4) h_bbar_pT = TH1F('run_' + i, "H3(1MeV)", 100, 0, 600) h_bbar_eta = TH1F('run_' + i, "H3(1MeV)", 100, -5, 5) h_bbar_phi = TH1F('run_' + i, "H3(1MeV)", 10, -4, 4) h_a_pT = TH1F('run_' + i, "H3(1MeV)", 100, 0, 600) h_a_eta = TH1F('run_' + i, "H3(1MeV)", 100, -5, 5) h_a_phi = TH1F('run_' + i, "H3(1MeV)", 10, -4, 4) files = glob.glob(path + 'run_' + i + '/*.lhe')
def main(debug=True, real_data=False, plots=False): start = time.time() if (debug): dframe = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco4_6_0.65.pkl.lz4", "rb")) else: if (real_data): dfreco0 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco1_2_0.75.pkl.lz4", "rb")) dfreco1 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco2_4_0.75.pkl.lz4", "rb")) dfreco2 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco4_6_0.65.pkl.lz4", "rb")) dfreco3 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco6_8_0.65.pkl.lz4", "rb")) dfreco4 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_data/260_20191004-0008/skpkldecmerged/AnalysisResultsReco8_24_0.45.pkl.lz4", "rb")) frames = [dfreco0, dfreco1, dfreco2, dfreco3, dfreco4] dframe = pd.concat(frames) else: dfreco0 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_mc_prodD2H/261_20191004-0007/skpkldecmerged/AnalysisResultsReco1_2_0.75.pkl.lz4", "rb")) dfreco1 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_mc_prodD2H/261_20191004-0007/skpkldecmerged/AnalysisResultsReco2_4_0.75.pkl.lz4", "rb")) dfreco2 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_mc_prodD2H/261_20191004-0007/skpkldecmerged/AnalysisResultsReco4_6_0.65.pkl.lz4", "rb")) dfreco3 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_mc_prodD2H/261_20191004-0007/skpkldecmerged/AnalysisResultsReco6_8_0.65.pkl.lz4", "rb")) dfreco4 = pickle.load( openfile( "/data/Derived/D0kINT7HighMultwithJets/vAN-20191003_ROOT6-1/pp_2018_mc_prodD2H/261_20191004-0007/skpkldecmerged/AnalysisResultsReco8_24_0.45.pkl.lz4", "rb")) frames = [dfreco0, dfreco1, dfreco2, dfreco3, dfreco4] dframe = pd.concat(frames) #dframe = dframe.query("y_test_probxgboost>0.5") #dframe = dframe.query("pt_cand > 10") #dframe = dframe.query("pt_cand < 10") dfreco = dframe.reset_index(drop=True) end = time.time() print("Data loaded in", end - start, "sec") if (debug): print("Debug mode: reduced data") dfreco = dfreco[:1000000] print("Size of data", dfreco.shape) print(dfreco.columns) binning = 200 hfile = TFile('pre_selection_histos.root', 'RECREATE', 'ROOT file with histograms') cYields = TCanvas('cYields', 'The Fit Canvas') fit_fun1 = TF1("fit_fun1", "expo", 1.64, 1.82) fit_fun2 = TF1("fit_fun2", "gaus", 1.82, 1.92) fit_total = TF1("fit_total", "expo(0) + gaus(2) + expo(5)", 1.64, 2.1) h_invmass = TH1F("invariant mass", "", binning, dfreco.inv_mass.min(), dfreco.inv_mass.max()) fill_hist(h_invmass, dfreco.inv_mass) h_invmass.Fit(fit_fun1, "R") par1 = fit_fun1.GetParameters() h_invmass.Fit(fit_fun2, "R+") par2 = fit_fun2.GetParameters() fit_total.SetParameters(par1[0], par1[1], par2[0], par2[1], par2[2], par1[0], par1[1]) h_invmass.Fit(fit_total, "R+") par = fit_total.GetParameters() h_invmass.Draw() cYields.SaveAs("h_invmass.png") if (plots): cYields.SetLogy(True) h_d_len = TH1F("d_len", "", 200, dfreco.d_len.min(), dfreco.d_len.max()) fill_hist(h_d_len, dfreco.d_len) h_d_len.Draw() cYields.SaveAs("h_d_len.png") h_norm_dl = TH1F("norm dl", "", 200, dfreco.norm_dl.min(), dfreco.norm_dl.max()) fill_hist(h_norm_dl, dfreco.norm_dl) h_norm_dl.Draw() cYields.SaveAs("h_norm_dl.png") cYields.SetLogy(False) h_cos_p = TH1F("cos_p", "", 200, dfreco.cos_p.min(), dfreco.cos_p.max()) fill_hist(h_cos_p, dfreco.cos_p) h_cos_p.Draw() cYields.SaveAs("h_cos_p.png") cYields.SetLogy(True) h_nsigTPC_K_0 = TH1F("nsigma TPC K_0", "", 200, dfreco.nsigTPC_K_0.min(), dfreco.nsigTPC_K_0.max()) fill_hist(h_nsigTPC_K_0, dfreco.nsigTPC_K_0) h_nsigTPC_K_0.Draw() cYields.SaveAs("nsigTPC_K_0.png") h_nsigTPC_K_1 = TH1F("nsigTPC_K_1 ", "", 200, dfreco.nsigTPC_K_1.min(), dfreco.nsigTPC_K_1.max()) fill_hist(h_nsigTPC_K_1, dfreco.nsigTPC_K_1) h_nsigTPC_K_1.Draw() cYields.SaveAs("h_nsigTPC_K_1.png") h_nsigTOF_K_0 = TH1F("nsigma TOF K_0", "", 200, dfreco.nsigTOF_K_0.min(), dfreco.nsigTOF_K_0.max()) fill_hist(h_nsigTOF_K_0, dfreco.nsigTOF_K_0) h_nsigTOF_K_0.Draw() cYields.SaveAs("nsigTOF_K_0.png") h_nsigTOF_K_1 = TH1F("nsigTOF_K_1 ", "", 200, dfreco.nsigTOF_K_1.min(), dfreco.nsigTOF_K_1.max()) fill_hist(h_nsigTOF_K_1, dfreco.nsigTOF_K_1) h_nsigTOF_K_1.Draw() cYields.SaveAs("h_nsigTOF_K_1.png") cYields.SetLogy(False) h_pt_prong0 = TH1F("pt prong_0", "", 200, dfreco.pt_prong0.min(), dfreco.pt_prong0.max()) fill_hist(h_pt_prong0, dfreco.pt_prong0) h_pt_prong0.Draw() cYields.SaveAs("h_pt_prong0.png") h_pt_prong1 = TH1F("pt prong_1", "", 200, dfreco.pt_prong1.min(), dfreco.pt_prong1.max()) fill_hist(h_pt_prong1, dfreco.pt_prong1) h_pt_prong1.Draw() cYields.SaveAs("h_pt_prong1.png") h_eta_prong0 = TH1F("eta prong_0", "", 200, dfreco.eta_prong0.min(), dfreco.eta_prong0.max()) fill_hist(h_eta_prong0, dfreco.eta_prong0) h_eta_prong0.Draw() cYields.SaveAs("h_eta_prong0.png") h_eta_prong1 = TH1F("eta prong_1", "", 200, dfreco.eta_prong1.max(), dfreco.eta_prong1.max()) fill_hist(h_eta_prong1, dfreco.eta_prong1) h_eta_prong1.Draw() cYields.SaveAs("h_eta_prong1.png") h_eta_cand = TH1F("eta cand", "", 200, dfreco.eta_cand.min(), dfreco.eta_cand.max()) fill_hist(h_eta_cand, dfreco.eta_cand) h_eta_cand.Draw() cYields.SaveAs("h_eta_cand.png") h_phi_cand = TH1F("phi cand", "", 200, dfreco.eta_cand.min(), dfreco.eta_cand.max()) fill_hist(h_phi_cand, dfreco.phi_cand) h_phi_cand.Draw() cYields.SaveAs("h_phi_cand.png") h_pt_cand = TH1F("pt cand", "", 200, dfreco.pt_cand.min(), dfreco.pt_cand.max()) fill_hist(h_pt_cand, dfreco.pt_cand) h_pt_cand.Draw() cYields.SaveAs("h_pt_cand.png") grouped = dfreco.groupby(["run_number", "ev_id"]) grouplen = pd.array(grouped.size()) gmin = grouplen.min() gmax = grouplen.max() g_bins = gmax - gmin print("creating grouplen array", end - start, "sec") h_grouplen = TH1F("group_length", "", int(g_bins), gmin, gmax) fill_hist(h_grouplen, grouplen) cYields.SetLogy(True) h_grouplen.Draw() cYields.SaveAs("h_grouplen.png") hfile.Write() return dfreco
def dqm_getSingleHist_json(server, run, dataset, hist, rootContent=False): postfix = "?rootcontent=1" if rootContent else "" datareq = urllib2.Request(('%s/jsonfairy/archive/%s/%s/%s%s') % (server, run, dataset, hist, postfix)) datareq.add_header('User-agent', ident) # Get data data = eval( re.sub(r"\bnan\b", "0", urllib2.build_opener(X509CertOpen()).open(datareq).read()), {"__builtins__": None}, {}) histo = data['hist'] # Now convert into real ROOT histogram object if 'TH1' in histo['type']: # The following assumes a TH1F object contents = histo['bins']['content'] nbins = len(contents) xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] roothist = TH1F(histo['stats']['name'], histo['title'], nbins, xmin, xmax) for xx in range(1, nbins + 1): roothist.SetBinContent(xx, contents[xx - 1]) roothist.SetBinError(xx, histo['bins']['error'][xx - 1]) roothist.SetEntries(histo['stats']['entries']) stats = array('d') stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries'] * histo['stats']['mean']['X']['value']) stats.append( (histo['stats']['rms']['X']['value'] * histo['stats']['rms']['X']['value'] + histo['stats']['mean']['X']['value'] * histo['stats']['mean']['X']['value']) * histo['stats']['entries']) roothist.PutStats(stats) elif (histo['type'] == 'TProfile'): contents = histo['bins']['content'] nbins = len(contents) xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] roothist = TProfile(histo['stats']['name'], histo['title'], nbins, xmin, xmax) roothist.SetErrorOption("g") for xx in range(0, nbins): if (histo['bins']['error'][xx] != 0): ww = 1. / (histo['bins']['error'][xx] * histo['bins']['error'][xx]) else: ww = 0. roothist.Fill( xmin + (2 * xx + 1) * ((xmax - xmin) / (nbins * 2.0)), contents[xx], ww) # roothist.SetBinContent(xx, contents[xx-1]) # roothist.SetBinError(xx, histo['bins']['error'][xx-1]) roothist.SetEntries(histo['stats']['entries']) stats = array('d') for i in range(0, 6): stats.append(i) roothist.GetStats(stats) stats[0] = (histo['stats']['entries']) stats[1] = (histo['stats']['entries']) stats[2] = (histo['stats']['entries'] * histo['stats']['mean']['X']['value']) stats[3] = ((histo['stats']['rms']['X']['value'] * histo['stats']['rms']['X']['value'] + histo['stats']['mean']['X']['value'] * histo['stats']['mean']['X']['value']) * histo['stats']['entries']) roothist.PutStats(stats) elif 'TH2' in histo['type']: contents = histo['bins']['content'] nbinsx = histo['xaxis']['last']['id'] xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] nbinsy = histo['yaxis']['last']['id'] ymin = histo['yaxis']['first']['value'] ymax = histo['yaxis']['last']['value'] roothist = TH2F(histo['stats']['name'], histo['title'], nbinsx, xmin, xmax, nbinsy, ymin, ymax) for xx in range(1, nbinsx + 1): for yy in range(1, nbinsy + 1): roothist.SetBinContent(xx, yy, contents[yy - 1][xx - 1]) roothist.SetEntries(histo['stats']['entries']) stats = array('d') stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries'] * histo['stats']['mean']['X']['value']) stats.append( (histo['stats']['rms']['X']['value'] * histo['stats']['rms']['X']['value'] + histo['stats']['mean']['X']['value'] * histo['stats']['mean']['X']['value']) * histo['stats']['entries']) stats.append(histo['stats']['entries'] * histo['stats']['mean']['Y']['value']) stats.append( (histo['stats']['rms']['Y']['value'] * histo['stats']['rms']['Y']['value'] + histo['stats']['mean']['Y']['value'] * histo['stats']['mean']['Y']['value']) * histo['stats']['entries']) roothist.PutStats(stats) elif (histo['type'] == 'TProfile2D'): contents = histo['bins']['content'] nbinsx = histo['xaxis']['last']['id'] xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] nbinsy = histo['yaxis']['last']['id'] ymin = histo['yaxis']['first']['value'] ymax = histo['yaxis']['last']['value'] roothist = TProfile2D(histo['stats']['name'], histo['title'], nbinsx, xmin, xmax, nbinsy, ymin, ymax) for xx in range(0, nbinsx): for yy in range(0, nbinsy): roothist.Fill( xmin + (2 * xx + 1) * ((xmax - xmin) / (nbinsx * 2.0)), ymin + (2 * yy + 1) * ((ymax - ymin) / (nbinsy * 2.0)), 0, 1) for xx in range(1, nbinsx + 1): for yy in range(1, nbinsy + 1): roothist.SetBinContent(xx, yy, contents[yy - 1][xx - 1]) roothist.SetEntries(histo['stats']['entries']) return roothist
from ROOT import TCanvas, TFile, TProfile, TNtuple, TH1F, TH2F from ROOT import gROOT, gBenchmark, gRandom, gSystem, Double # Create a new canvas, and customize it. c1 = TCanvas( 'c1', 'Dynamic Filling Example', 200, 10, 700, 500 ) c1.SetFillColor( 42 ) c1.GetFrame().SetFillColor( 21 ) c1.GetFrame().SetBorderSize( 6 ) c1.GetFrame().SetBorderMode( -1 ) # Create a new ROOT binary machine independent file. Note that this file may contain any kind of ROOT objects, histograms, # pictures, graphics objects, detector geometries, tracks, events, etc.. This file is now becoming the current directory. hfile = gROOT.FindObject( 'py-hsimple.root' ) if hfile: hfile.Close() hfile = TFile( 'py-hsimple.root', 'RECREATE', 'Demo ROOT file with histograms' ) # Create some histograms, a profile histogram and an ntuple hpx = TH1F( 'hpx', 'This is the px distribution', 100, -4, 4 ) hpxpy = TH2F( 'hpxpy', 'py vs px', 40, -4, 4, 40, -4, 4 ) hprof = TProfile( 'hprof', 'Profile of pz versus px', 100, -4, 4, 0, 20 ) ntuple = TNtuple( 'ntuple', 'Demo ntuple', 'px:py:pz:random:i' ) # Set canvas/frame attributes. hpx.SetFillColor( 48 ) gBenchmark.Start( 'hsimple' ) # Initialize random number generator. gRandom.SetSeed() rannor, rndm = gRandom.Rannor, gRandom.Rndm # For speed, bind and cache the Fill member functions, histos = [ 'hpx', 'hpxpy', 'hprof', 'ntuple' ] for name in histos: exec('%sFill = %s.Fill' % (name,name)) # Fill histograms randomly. px, py = Double(), Double() kUPDATE = 1000 for i in range( 25000 ): # Generate random values. rannor( px, py ) pz = px*px + py*py random = rndm(1) # Fill histograms. hpx.Fill( px ) hpxpy.Fill( px, py ) hprof.Fill( px, pz ) ntuple.Fill( px, py, pz, random, i ) # Update display every kUPDATE events. if i and i%kUPDATE == 0:
# Simple example of generating a histogram from a gaussian function # and fitting it # # Author: Izaak Neutelings (August 2017) # https://root.cern.ch/root/htmldoc/guides/users-guide/FittingHistograms.html # https://root.cern.ch/doc/master/classTFormula.html # https://root.cern.ch/doc/master/classTF1.html import ROOT from ROOT import TF1, TH1F, TCanvas, TLegend, kBlack, kBlue, kRed, kViolet, kGreen from math import exp print ">>> generating signal..." function1 = TF1("f1", "gaus", 0, 10) function1.SetParameters(1, 2, 0.5) hist = TH1F("signal", "signal", 50, 0, 5) hist.FillRandom("f1", 1000) print ">>> make fit functions..." # pol1 = [0]+[1]*x+[2]*x**2 # gaus = [0]*exp(-0.5*((x-[1])/[2])**2) norm = hist.GetMaximum() function2 = TF1("f2", "gaus", 0, 10) function3 = TF1("f3", "pol2", 0, 10) function2.SetParNames("N", "mu", "sigma") function3.SetParNames("C", "B", "A") function2.SetParameters(1.3 * norm, 1, 0.4) # start values to help fit function3.SetParameters(-3 * norm, 4 * norm, -norm) # start values to help fit function2.SetParLimits(0, 0.2 * norm, 1.5 * norm) # set limits on parameter 1 function3.SetParLimits(1, 0.5 * norm, 10.0 * norm) # set limits on parameter 1 function3.SetParLimits(2, -1.4 * norm,
def calc_punzi_FOM_vs_ctau(cutlist, labellist=[], mass_point=40, additional_string="", alpha=2, CL=5, FOM='punzi', header=""): file = {} nevt = {} tree = {} effs = {} chain = {} hist = {} eff_dict = {k: {} for k in cutlist} back_int = {k: {} for k in cutlist} back_int_weight = {k: {} for k in cutlist} back_eff = {k: {} for k in cutlist} punzi_dict = {k: {} for k in cutlist} graph = {} back_graph = {} ncuts = len(cutlist) if labellist == []: labellist = cutlist print NTUPLEDIR print "............." #prepare ctau ordered array for 1D plot mass_array = [] ctau_array = [] #for signal we have the normal efficiency for i, s in enumerate(sign): file[s] = TFile(NTUPLEDIR + samples[s]['files'][0] + ".root", "READ") # Read TFile tree[s] = file[s].Get("ntuple/tree") # Read TTree nevt[s] = (file[s].Get('counter/c_nEvents')).GetBinContent( 1) # all gen events before cuts! #tree[s] = file[s].Get("skim") # Read TTree #nevt[s] = tree[s].GetEntries("")#if the tree is skimmed, this becomes a relative denominator #nevt[s] = (file[s].Get('c_nEvents')).GetBinContent(1)# all gen events before cuts! filename = TFile(NTUPLEDIR + samples[s]['files'][0] + ".root", "READ") if verbose_add: print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' if verbose_add: print filename if verbose_add: print "x-check: n gen events in counter, first bin:" if verbose_add: print(filename.Get('c_nEvents')).GetBinContent(1) if verbose_add: print "x-check: n entries in counter:" if verbose_add: print(filename.Get('c_nEvents')).GetEntries() effs[s] = [0] * (ncuts + 1) effs[s] = [0] * (ncuts + 1) weight = "1" #"EventWeight" var = "isMC" if samples[s]['mass'] not in mass_array: mass_array.append(samples[s]['mass']) if samples[s]['ctau'] not in ctau_array: ctau_array.append(samples[s]['ctau']) for j, c in enumerate(cutlist): tot_gen = nevt[s] n = tree[s].GetEntries("(" + cutlist[j] + ")") #wat?#test_op = cutlist[j] + " && number_of_matched_Jets>=1" #wat?#n = tree[s].GetEntries("(" + test_op + ")") ###BUGFIX: efficiency should be computed w.r.t. histo integral #hist[s+"_cut"+str(j)] = TH1F(s+"_cut"+str(j), ";"+variable[var]['title'], variable[var]['nbins'], variable[var]['min'], variable[var]['max']) #hist[s+"_cut"+str(j)].Sumw2() #cutstring = "("+weight+")" + ("*("+cutlist[j]+")" if len(cutlist[j])>0 else "") #tree[s].Project(s+"_cut"+str(j), var, cutstring) #hist[s+"_cut"+str(j)].SetOption("%s" % tree[s].GetTree().GetEntriesFast()) if verbose_add: print '\n' if verbose_add: print '**********************************************' if verbose_add: print "cut: ", c if verbose_add: print 'over signal ', s if verbose_add: print '\n' if verbose_add: print "signal num: ", n if verbose_add: print "signal den: ", tot_gen #if verbose_add: print "BUGFIX!!!!!!!!!!!" #if verbose: print "BUGFIX!!!!!!!!!!!" #if verbose_add: print "signal num from integral: ", hist[s+"_cut"+str(j)].Integral() #if verbose_add: print "signal den from generator: ", tot_gen #if verbose: print "BUGFIX!!!!!!!!!!!" if verbose_add: print("signal eff %.2f") % (float(n) / (tot_gen) * 100) if tot_gen == 0: effs[s][j] = float(0.) else: effs[s][j] = (float(n) / (tot_gen)) eff_dict[c][s] = { 'mass': samples[s]['mass'], 'ctau': samples[s]['ctau'], 'eff': effs[s][j], 'nevents': n } #sort mass array masses = np.array(mass_array) masses.sort() ctaus = np.array(ctau_array) ctaus.sort() #define multigraph mg = TMultiGraph() #leg = TLegend(0.78, 0.7, 0.98, 0.98) #leg2 = TLegend(0., 0.4, 0.98, 0.98) #leg2 = TLegend(0.3, 0.11, 0.65, 0.45)#DCMS,gen matching leg2 = TLegend(0.4, 0.11, 0.85, 0.45) #DCMS,summary plot leg2 = TLegend(0.4 - 0.3, 0.11 + 0.43, 0.85 + 0.05 - 0.3, 0.45 + 0.43) #EXO,summary plot leg2 = TLegend(0.4, 0.11, 0.85 + 0.05, 0.45) #EXO,summary plot leg3 = TLegend(0., 0.5, 0.5, 1.) #2 plots leg = TLegend(0., 0.4, 0.98, 0.98) leg.SetTextSize(0.03) leg2.SetTextSize(0.03) leg2.SetTextSize(0.025) leg.SetBorderSize(0) leg2.SetBorderSize(0) leg.SetHeader("Signal: m_{#pi}=" + str(mass_point) + " GeV") leg2.SetHeader("Signal: m_{#pi}=" + str(mass_point) + " GeV") leg3.SetTextSize(0.03) leg3.SetTextSize(0.025) leg3.SetBorderSize(0) leg3.SetHeader("Signal: m_{#pi}=" + str(mass_point) + " GeV") #for background let's first consider the cut for j, c in enumerate(cutlist): print '\n' print "cut: ", c print 'over background' print '\n' #then loop over background integral = 0 weighted_integral = 0 back_tot_events = 0 for i, s in enumerate(back): chain[s] = TChain("ntuple/tree") #chain[s] = TChain("skim") #print "back: ", s back_file = {} for p, ss in enumerate(samples[s]['files']): back_file[ss] = TFile(NTUPLEDIR + ss + ".root", "READ") # Read TFile #?#if verbose: print "file: ", ss #?#if verbose: print "gen events: ", (back_file[ss].Get('counter/c_nEvents')).GetBinContent(1) #?#if verbose: print "tree events: ", (back_file[ss].Get('ntuple/tree')).GetEntries() back_tot_events += ( back_file[ss].Get('counter/c_nEvents')).GetBinContent(1) #back_tot_events += (back_file[ss].Get('c_nEvents')).GetBinContent(1) chain[s].Add(NTUPLEDIR + ss + ".root") #print "MODIFIED WEIGHT!!!!!!" #weight = ("EventWeight*%s/5000." % str(back_tot_events)) weight = "EventWeight" #var = "nCHSJets" var = "isMC" hist[s] = TH1F(s, ";" + variable[var]['title'], variable[var]['nbins'], variable[var]['min'], variable[var]['max']) hist[s].Sumw2() cutstring = "(" + weight + ")" + ("*(" + cutlist[j] + ")" if len(cutlist[j]) > 0 else "") chain[s].Project(s, var, "") #"1*"+"("+weight+")") hist[s].SetOption("%s" % chain[s].GetTree().GetEntriesFast()) #if verbose: print "Hist content, no cut:" #if verbose: print hist[s].Print() #?#if verbose: print "events in the histo with get entries with empty project: ", hist[s].GetEntries() #?#if verbose: print "area under histo with empty project: ", hist[s].Integral() chain[s].Project(s, var, cutstring) #"1*"+"("+weight+")") hist[s].SetOption("%s" % chain[s].GetTree().GetEntriesFast()) hist[s].Scale( samples[s]['weight'] if hist[s].Integral() >= 0 else 0) #?#if verbose: print "events in the histo with get entries after project: ", hist[s].GetEntries() #?#if verbose: print "area under histo after project: ", hist[s].Integral() if verbose: print "Hist content, with cut:" if verbose: print hist[s].Print() integral += hist[s].GetEntries() weighted_integral += hist[s].Integral() back_int[c] = integral back_int_weight[c] = weighted_integral if back_tot_events == 0: back_eff[c] = float(0.) else: back_eff[c] = float(integral) / float(back_tot_events) if verbose: print "cut: ", c if verbose: print "back tot events (unweighted):", back_tot_events if verbose: print "back integral (unweighted): ", back_int[c] if verbose: print "back integral (weighted): ", back_int_weight[c] if verbose: print "back eff (unweighted): ", back_eff[c] * 100 if FOM == "signaleff": punzi_dict[c]['back'] = {'back': back_eff[c] * 100} for i, s in enumerate(sign): if verbose: print "signal efficiency: ", eff_dict[c][s]['eff'] * 100 if FOM == "punzi": punzi_dict[c][s] = { 'sign': eff_dict[c][s]['eff'] / (CL**2 / 2. + alpha * math.sqrt(back_int_weight[c]) + (CL / 2.) * math.sqrt(CL**2 + 4 * alpha * math.sqrt( back_int_weight[c]) + 4 * back_int_weight[c])) } elif FOM == "signaleff": punzi_dict[c][s] = {'sign': eff_dict[c][s]['eff'] * 100} elif FOM == "entries": punzi_dict[c][s] = {'sign': eff_dict[c][s]['nevents']} else: print "not punzi FOM, aborting!" exit() if FOM == "signaleff": dummy = TGraph() #len(ct),ct, np.array(ct)) dummy.SetMarkerStyle(0) dummy.SetLineWidth(2) dummy.SetMarkerSize(1.) dummy.SetLineColor(15) dummy.SetLineStyle(2) if header != "": leg2.AddEntry(dummy, header, '') leg3.AddEntry(dummy, header, '') #for each cut, we need a graph for j, c in enumerate(cutlist): #first let's build the ordered punzi vector w.r.t. masses, for a chosen ctau punzi_array = [] back_array = [] for la in ctaus: #la = str(a) if la == 0.001: st = CHANNEL + "_M" + str(mass_point) + "_ctau0" elif la == 0.05 or la == 0.1: st = CHANNEL + "_M" + str(mass_point) + "_ctau" + str( str(la).replace("0.", "0p")) else: st = CHANNEL + "_M" + str(mass_point) + "_ctau" + str(int(la)) #st = "VBFH_M"+str(mass_point)+"_ctau"+str(a) punzi_array.append(punzi_dict[c][st]['sign']) mass = array('d', masses) ct = array('d', ctaus) p_array = array('d', punzi_array) #graph[c] = TGraph(len(mass),mass, np.array(p_array)) graph[c] = TGraph(len(ct), ct, np.array(p_array)) graph[c].SetMarkerStyle(markers[j]) #21 graph[c].SetLineWidth(3) graph[c].SetMarkerSize(1.2) graph[c].SetMarkerColor(colors[j]) graph[c].SetLineColor(colors[j]) graph[c].SetFillColor(colors[j]) #graph[c].SetLogx() leg.AddEntry(graph[c], labellist[j], 'PL') leg2.AddEntry(graph[c], labellist[j], 'PL') leg3.AddEntry(graph[c], labellist[j], 'PL') mg.Add(graph[c]) if FOM == "signaleff": #add plot for background for a in ctaus: back_array.append(punzi_dict[c]['back']['back']) mass = array('d', masses) ct = array('d', ctaus) e_array = array('d', back_array) #back_graph[c] = TGraph(len(mass),mass, np.array(e_array)) back_graph[c] = TGraph(len(ct), ct, np.array(e_array)) back_graph[c].SetMarkerStyle(0) back_graph[c].SetLineWidth(2) back_graph[c].SetMarkerSize(1.) back_graph[c].SetMarkerColor(colors[j]) back_graph[c].SetLineColor(colors[j]) back_graph[c].SetLineStyle(2) back_graph[c].SetFillColor(colors[j]) #back_graph[c].SetLogx() #leg.AddEntry(back_graph[c],labellist[j]+" bkg.",'PL') #w#leg2.AddEntry(back_graph[c],labellist[j]+" bkg.",'PL') #w#mg.Add(back_graph[c]) if FOM == "signaleff": dummy = TGraph(len(ct), ct, np.array(e_array)) dummy.SetMarkerStyle(0) dummy.SetLineWidth(2) dummy.SetMarkerSize(1.) dummy.SetLineColor(15) dummy.SetLineStyle(2) #w#leg2.AddEntry(dummy, 'cuts on bkg.','PL') #cmg = TCanvas("cmg", "cmg", 2000, 1400) #cmg = TCanvas("cmg", "cmg", 2000, 800)#best #cmg = TCanvas("cmg", "cmg", 1200, 1000) cmg = TCanvas("cmg", "cmg", 1300, 800) #DCMS cmg.cd() cmg.SetGrid() cmg.SetLogx() #if FOM=="signaleff": # cmg.SetLogx() #pad1 = TPad("pad1", "pad1", 0, 0., 0.85, 1.0) #pad1 = TPad("pad1", "pad1", 0, 0., 0.7, 1.0) #pad1.SetGrid() #pad1.SetLogx() if FOM == "signaleff": print "LOL" #pad1.SetLogy() #pad1.SetLogy() #pad1.Draw() #pad1.cd() #W#if FOM=="signaleff": #w#mg.SetMaximum(101) #mg.SetMinimum(1.e-50) mg.SetMinimum(0.) #!! mg.Draw("APL") mg.GetXaxis().SetTitleSize(0.05) mg.GetYaxis().SetTitleSize(0.05) mg.GetXaxis().SetTitle('c#tau_{#pi} (mm)') mg.GetYaxis().SetTitleOffset(0.9) if FOM == "punzi": mg.GetYaxis().SetTitle('Punzi significance @ ' + str(alpha) + ' #sigma, ' + CHANNEL + ' cuts') #mg.GetYaxis().SetTitleOffset(1.5) elif FOM == "signaleff": #mg.GetYaxis().SetTitle('Signal efficiency, '+CHANNEL+' cuts (%)') mg.GetYaxis().SetTitle('Signal gen-matching efficiency, ' + CHANNEL + ' (%)') elif FOM == "entries": mg.GetYaxis().SetTitle('Signal entries surviving cuts') else: print "not punzi FOM, aborting" latex = TLatex() latex.SetNDC() latex.SetTextSize(0.05) latex.SetTextColor(1) latex.SetTextFont(42) latex.SetTextAlign(33) latex.SetTextFont(62) latex.DrawLatex(0.25, 0.96, "CMS") latex.SetTextFont(52) latex.DrawLatex(0.66, 0.96, "Simulation Preliminary") cmg.Update() cmg.cd() leg2.SetTextSize(0.04) #leg.Clear()#????????? #w#leg2.Draw() cmgL = TCanvas("cmgL", "cmgL", 2000, 800) #DCMS cmgL.cd() #pad2 = TPad("pad2", "pad2", 0.85, 0., 1, 1.0) #pad2 = TPad("pad2", "pad2", 0.7, 0., 1, 1.0) #pad2.SetGrid() #pad2.SetLogx()macro/VBF_punzi_LLP_AOD.py #pad2.Draw() #pad2.cd() leg3.SetTextSize(0.04) #leg.Clear()#????????? leg3.Draw() cmgL.Update() if FOM == "punzi": cmg.Print(OUTPUTDIR + "Punzi_correct_" + CHANNEL + "_m" + str(mass_point) + "_" + str(alpha) + "sigma" + additional_string + ".pdf") cmg.Print(OUTPUTDIR + "Punzi_correct_" + CHANNEL + "_m" + str(mass_point) + "_" + str(alpha) + "sigma" + additional_string + ".png") cmgL.Print(OUTPUTDIR + "Punzi_correct_" + CHANNEL + "_m" + str(mass_point) + "_" + str(alpha) + "sigma" + additional_string + "_L.pdf") cmgL.Print(OUTPUTDIR + "Punzi_correct_" + CHANNEL + "_m" + str(mass_point) + "_" + str(alpha) + "sigma" + additional_string + "_L.png") elif FOM == "signaleff": cmg.Print(OUTPUTDIR + "SignalEff_" + CHANNEL + "_m" + str(mass_point) + additional_string + ".pdf") cmg.Print(OUTPUTDIR + "SignalEff_" + CHANNEL + "_m" + str(mass_point) + additional_string + ".png") cmgL.Print(OUTPUTDIR + "SignalEff_" + CHANNEL + "_m" + str(mass_point) + additional_string + "_L.pdf") cmgL.Print(OUTPUTDIR + "SignalEff_" + CHANNEL + "_m" + str(mass_point) + additional_string + "_L.png") elif FOM == "entries": cmg.Print(OUTPUTDIR + "SignalEntries_" + CHANNEL + "_m" + str(mass_point) + additional_string + ".pdf") cmg.Print(OUTPUTDIR + "SignalEntries_" + CHANNEL + "_m" + str(mass_point) + additional_string + ".png") cmgL.Print(OUTPUTDIR + "SignalEntries_" + CHANNEL + "_m" + str(mass_point) + additional_string + "_L.pdf") cmgL.Print(OUTPUTDIR + "SignalEntries_" + CHANNEL + "_m" + str(mass_point) + additional_string + "_L.png") else: print "not punzi FOM, aborting" if not options.bash: raw_input("Press Enter to continue...") cmg.Close()
# ROOT imports import ROOT from ROOT import TChain, TH1F, TFile, vector, gROOT # custom ROOT classes from ROOT import alp, ComposableSelector, CounterOperator, TriggerOperator, JetFilterOperator, BTagFilterOperator, JetPairingOperator, DiJetPlotterOperator from ROOT import BaseOperator, EventWriterOperator, IsoMuFilterOperator, MetFilterOperator, JetPlotterOperator, FolderOperator, MiscellPlotterOperator from ROOT import ThrustFinderOperator, HemisphereProducerOperator, HemisphereWriterOperator, DiHiggsFilterOperator # imports from ../python from Analysis.alp_analysis.alpSamples import samples from Analysis.alp_analysis.samplelists import samlists from Analysis.alp_analysis.triggerlists import triggerlists from Analysis.alp_analysis.workingpoints import wps TH1F.AddDirectory(0) # parsing parameters import argparse parser = argparse.ArgumentParser() parser.add_argument("-e", "--numEvts", help="number of events", type=int, default='-1') parser.add_argument("-s", "--samList", help="sample list", default="") parser.add_argument("--btag", help="which btag algo", default='cmva') parser.add_argument("-i", "--iDir", help="input directory",
def pullsVertical(fileName): content = filterPullFile(fileName) nbins, off = len(content), 0.10 b_pulls = TH1F("b_pulls", ";;Pulls", nbins, 0.-off, nbins-off) s_pulls = TH1F("s_pulls", ";;Pulls", nbins, 0.+off, nbins+off) # for i, s in enumerate(content): l = s.split() b_pulls.GetXaxis().SetBinLabel(i+1, l[0]) s_pulls.GetXaxis().SetBinLabel(i+1, l[0]) b_pulls.SetBinContent(i+1, float(l[1])) b_pulls.SetBinError(i+1, float(l[2])) s_pulls.SetBinContent(i+1, float(l[3])) s_pulls.SetBinError(i+1, float(l[4])) b_pulls.SetFillStyle(3005) b_pulls.SetFillColor(923) b_pulls.SetLineColor(923) b_pulls.SetLineWidth(1) b_pulls.SetMarkerStyle(20) b_pulls.SetMarkerSize(1.25) s_pulls.SetLineColor(602) s_pulls.SetMarkerColor(602) s_pulls.SetMarkerStyle(24) #24 s_pulls.SetLineWidth(1) b_pulls.GetYaxis().SetRangeUser(-2.5, 2.5) # Graphs h_pulls = TH2F("pulls", "", 6, -3., 3., nbins, 0, nbins) B_pulls = TGraphAsymmErrors(nbins) S_pulls = TGraphAsymmErrors(nbins) boxes = [] canvas = TCanvas("canvas", "Pulls", 600, 150+nbins*10)#nbins*20) canvas.cd() canvas.SetGrid(0, 1) canvas.GetPad(0).SetTopMargin(0.01) canvas.GetPad(0).SetRightMargin(0.01) canvas.GetPad(0).SetBottomMargin(0.05) canvas.GetPad(0).SetLeftMargin(0.25)#(0.25)#(0.065) canvas.GetPad(0).SetTicks(1, 1) for i, s in enumerate(content): l = s.split() if "1034h" in l[0]: l[0]="CMS_PDF_13TeV" h_pulls.GetYaxis().SetBinLabel(i+1, l[0].replace('CMS2016_', ''))#C #y1 = gStyle.GetPadBottomMargin() #y2 = 1. - gStyle.GetPadTopMargin() #h = (y2 - y1) / float(nbins) #y1 = y1 + float(i) * h #y2 = y1 + h #box = TPaveText(0, y1, 1, y2, 'NDC') #box.SetFillColor(0) #box.SetTextSize(0.02) #box.SetBorderSize(0) #box.SetTextAlign(12) #box.SetMargin(0.005) #if i % 2 == 0: # box.SetFillColor(18) #box.Draw() #boxes.append(box) B_pulls.SetPoint(i+1,float(l[1]),float(i+1)-0.3)#C B_pulls.SetPointError(i+1,float(l[2]),float(l[2]),0.,0.)#C for i, s in enumerate(content): l = s.split() S_pulls.SetPoint(i+1,float(l[3]),float(i+1)-0.7)#C S_pulls.SetPointError(i+1,float(l[4]),float(l[4]),0.,0.)#C h_pulls.GetXaxis().SetTitle("(#hat{#theta} - #theta_{0}) / #Delta#theta") h_pulls.GetXaxis().SetLabelOffset(-0.01) h_pulls.GetXaxis().SetTitleOffset(.6) h_pulls.GetYaxis().SetNdivisions(nbins, 0, 0) B_pulls.SetFillColor(1) B_pulls.SetLineColor(1) B_pulls.SetLineStyle(1) B_pulls.SetLineWidth(2) B_pulls.SetMarkerColor(1) B_pulls.SetMarkerStyle(20) B_pulls.SetMarkerSize(1)#(0.75) S_pulls.SetFillColor(629) S_pulls.SetLineColor(629) S_pulls.SetMarkerColor(629) S_pulls.SetLineWidth(2) S_pulls.SetMarkerStyle(20) S_pulls.SetMarkerSize(1) box1 = TBox(-1., 0., 1., nbins) box1.SetFillStyle(3001) #box1.SetFillStyle(0) box1.SetFillColor(417) box1.SetLineWidth(2) box1.SetLineStyle(2) box1.SetLineColor(417) box2 = TBox(-2., 0., 2., nbins) box2.SetFillStyle(3001) #box2.SetFillStyle(0) box2.SetFillColor(800) box2.SetLineWidth(2) box2.SetLineStyle(2) box2.SetLineColor(800) leg = TLegend(0.1, -0.05, 0.7, 0.08) leg.SetBorderSize(0) leg.SetFillStyle(0) leg.SetFillColor(0) leg.SetNColumns(2) leg.AddEntry(B_pulls, "B-only fit", "lp") leg.AddEntry(S_pulls, "S+B fit", "lp") if text: leg.AddEntry(0, text, "") h_pulls.Draw("") box2.Draw() box1.Draw() B_pulls.Draw("P6SAME") S_pulls.Draw("P6SAME") leg.Draw() # drawCMS(35867, "Preliminary") # drawAnalysis("VH") # drawRegion(outName) canvas.Print(outName+".png") canvas.Print(outName+".pdf") if not gROOT.IsBatch(): raw_input("Press Enter to continue...")
px1=float (chi[0].split()[6]) py1=float (chi[0].split()[7]) pz1=float (chi[0].split()[8]) e1=float (chi[0].split()[9]) p1=TLorentzVector(px1,py1,pz1,e1) if chi and b: pi=[] pi=p+p1 m_A0.append(pi.M()) phi_A0.append(pi.Phi()) eta_A0.append(pi.Eta()) pt_A0.append(pi.Pt()) h_mass1=TH1F("Inv.Mass of A0","",200,200,600) for i in m_A0: h_mass1.Fill(i) h_pt1=TH1F("pT of A0","",100,-100,1000) for i in pt_A0: h_pt1.Fill(i) h_phi1=TH1F("Phi of A0","",10,-5,5) for i in phi_A0: h_phi1.Fill(i) h_eta1=TH1F("Eta of A0","",10,-8,8) for i in eta_A0: h_eta1.Fill(i)
def pulls(fileName): content = filterPullFile(fileName) nbins, off = len(content), 0.10 b_pulls = TH1F("b_pulls", ";;Pulls", nbins, 0.-off, nbins-off) s_pulls = TH1F("s_pulls", ";;Pulls", nbins, 0.+off, nbins+off) # for i, s in enumerate(content): l = s.split() b_pulls.GetXaxis().SetBinLabel(i+1, l[0]) s_pulls.GetXaxis().SetBinLabel(i+1, l[0]) b_pulls.SetBinContent(i+1, float(l[1])) b_pulls.SetBinError(i+1, float(l[2])) s_pulls.SetBinContent(i+1, float(l[3])) s_pulls.SetBinError(i+1, float(l[4])) b_pulls.SetFillStyle(3005) b_pulls.SetFillColor(923) b_pulls.SetLineColor(923) b_pulls.SetLineWidth(2) b_pulls.SetMarkerStyle(20) b_pulls.SetMarkerSize(1.25) s_pulls.SetLineColor(602) s_pulls.SetMarkerColor(602) s_pulls.SetMarkerStyle(24) #24 s_pulls.SetLineWidth(2) b_pulls.GetYaxis().SetRangeUser(-2.5, 2.5) canvas = TCanvas("canvas", "Pulls", 1600, 800) canvas.cd() canvas.GetPad(0).SetTopMargin(0.06) canvas.GetPad(0).SetRightMargin(0.05) canvas.GetPad(0).SetBottomMargin(0.15) canvas.GetPad(0).SetTicks(1, 1) # box = TBox(950., 105., 2000., 200.) # box.SetFillStyle(3354) # #box.SetFillStyle(0) # box.SetFillColor(1) # box.SetLineWidth(2) # box.SetLineStyle(2) # box.SetLineColor(1) # box.Draw() # Draw b_pulls.Draw("PE1") #b_pulls.Draw("B") s_pulls.Draw("SAME, PE1") leg = TLegend(0.25, 0.95, 0.75, 0.995) leg.SetBorderSize(0) leg.SetFillStyle(0) leg.SetFillColor(0) leg.SetNColumns(2) leg.AddEntry(b_pulls, "background-only fit", "flp") leg.AddEntry(s_pulls, "signal+background fit", "lp") if text: leg.AddEntry(0, text, "") line = TLine() line.DrawLine(0., 0., nbins, 0.) line.SetLineStyle(7) line.SetLineWidth(2) line.SetLineColor(417) line.DrawLine(0., 1., nbins, 1.) line.DrawLine(0., -1., nbins, -1.) line.SetLineColor(800) line.DrawLine(0., 2., nbins, 2.) line.DrawLine(0., -2., nbins, -2.) leg.Draw() # drawCMS(LUMI, "Simulation") # drawAnalysis("DM") # drawRegion(channel) # canvas.Print(outName+".jpg") canvas.Print(outName+".png") canvas.Print(outName+".pdf") if not gROOT.IsBatch(): raw_input("Press Enter to continue...")
def createH2(): h2 = TH1F("h2", "h2", 100, -5, 5) h2.FillRandom("gaus") h2.SetLineColor(kRed) h2.SetLineWidth(2) return h2
list_PT_T = ["T", "PT"] energy = ["5", "10", "20", "40"] for E in range(4): f1 = ROOT.TFile.Open( "/Users/ms08962476/singularity/TIming_Studies/tev" + str(energy[E]) + "mm_pythia6_zprime" + str(energy[E]) + "tev_qq_with_Eta_cut_for_component_check_1_reco.root", 'r') f2 = ROOT.TFile.Open( "/Users/ms08962476/singularity/TIming_Studies/tev" + str(energy[E]) + "mm_pythia6_zprime" + str(energy[E]) + "tev_ww_with_Eta_cut_for_component_check_1_reco.root", 'r') myTree_QQ = f1.Get("BDT_variables_Reco") myTree_WW = f2.Get("BDT_variables_Reco") for j in range(2): h1 = TH1F("QQ_plot0", "QQ_plot0", 20, 0, 0.1) h2 = TH1F("WW_plot0", "WW_plot0", 20, 0, 0.1) h3 = TH1F("QQ_plot1", "QQ_plot1", 20, 0, 0.1) h4 = TH1F("WW_plot1", "WW_plot1", 20, 0, 0.1) h5 = TH1F("QQ_plot2", "QQ_plot2", 20, 0, 0.1) h6 = TH1F("WW_plot2", "WW_plot2", 20, 0, 0.1) h7 = TH1F("QQ_plot3", "QQ_plot3", 20, 0, 0.1) h8 = TH1F("WW_plot3", "WW_plot3", 20, 0, 0.1) h9 = TH1F("QQ_plot4", "QQ_plot4", 20, 0, 0.1) h10 = TH1F("WW_plot4", "WW_plot4", 20, 0, 0.1) for iii in range(int(myTree_QQ.GetEntriesFast())): myTree_QQ.GetEntry(iii) A = myTree_QQ.dR_Tr0T_HPt_Reco B = myTree_QQ.dR_Tr1T_HPt_Reco C = myTree_QQ.dR_Tr2T_HPt_Reco
histo = [] for iMPA in range(0, numMPA): c1.cd(iMPA + 1) memory = open('data/asynchronous_data_noise_MPA' + str(iMPA + 1)).read().splitlines() threshold = int(memory[1]) events = [int(event) for event in memory[3:] ] # Skip first 3 lines, here the threshold is stored histData = [[hits, events.count(hits)] for hits in set(events) ] # Calculate how often each number of hits occurs print histData histo.append( TH1F( "h1", "Common noise analysis for MPA" + str(iMPA + 1) + " at Threshold " + str(threshold), max(events), 0, max(events))) for hits in histData: for i in range(0, hits[1]): histo[iMPA].Fill(hits[0]) XRange = int(histo[iMPA].GetMean() * 2) histo[iMPA].SetBins(XRange, 0, XRange) histo[iMPA].GetYaxis().SetTitle("Number of events") histo[iMPA].GetXaxis().SetTitle("Number of hits") # Gauss fit fit = histo[iMPA].Fit("gaus") histo[iMPA].Draw() gPad.Update()
### dataframe for output df_out = DataFrame(columns=[ 'run', 'lumi', 'event', 'MET', 'MT', 'Njets_PassID', 'Nbjets_PassID', 'ElePt', 'EleEta', 'ElePhi', 'Jet1Pt', 'Jet1Eta', 'Jet1Phi', 'Jet2Pt', 'Jet2Eta', 'Jet2Phi', 'Jet3Pt', 'Jet3Eta', 'Jet3Phi', 'Jet1Idx', 'Jet2Idx', 'Jet3Idx', 'weight' ]) df_out_wmunu_cr = DataFrame(columns=[ 'run', 'lumi', 'event', 'MET', 'MT', 'Njets_PassID', 'Nbjets_PassID', 'Jet1Pt', 'Jet1Eta', 'Jet1Phi', 'Jet2Pt', 'Jet2Eta', 'Jet2Phi', 'Jet3Pt', 'Jet3Eta', 'Jet3Phi', 'Jet1Idx', 'Jet2Idx', 'Jet3Idx', 'MuPt', 'MuEta', 'MuPhi', 'weight' ]) recoil_den = TH1F("recoil_den", "recoil_den", 100, 0.0, 1000.) recoil_num = TH1F("recoil_num", "recoil_num", 100, 0.0, 1000.) def WriteHisto(): outputfilename = infile.split("/")[-1] #fout = TFile(outputfilename, "RECREATE") fout = TFile("tempout.root", "RECREATE") fout.cd() recoil_den.Write() recoil_num.Write() jetvariables = [ 'st_THINnJet', 'st_THINjetPx', 'st_THINjetPy', 'st_THINjetPz', 'st_THINjetEnergy', 'st_THINjetCISVV2', 'st_THINjetHadronFlavor',
def compare_clusterdb(dblist=[], histofilename="MyHistos.root", compare_type="2D0.0D0.1"): if dblist == []: return None dict_sigu = {} dict_sigv = {} dict_nlabels = {} dict_ntypes = {} dict_coverage = {} dict_trafo = {} for dbfilename in dblist: # open cluster db dbfile = gROOT.FindObject(dbfilename) if dbfile: dbfile.Close() dbfile = TFile(dbfilename, 'READ') # read data for comparison plots swADCSteps = dbfile.Get("DB_swADCSteps") adclabel = '' if swADCSteps: for index in range(0, swADCSteps.GetNrows()): adclabel = adclabel + 'S' + str(int(swADCSteps[index])) dict_trafo[dbfilename] = adclabel print("db sw adc ", dict_trafo[dbfilename]) coverageobj = dbfile.Get("hDB_Coverage") if coverageobj: dict_coverage[dbfilename] = coverageobj.GetBinContent(1) else: dict_coverage[dbfilename] = -1 print("db coverage ", dict_coverage[dbfilename]) weights = dbfile.Get("hDB_Weight") dict_nlabels[dbfilename] = weights.GetNbinsX() print("db labels ", dict_nlabels[dbfilename]) typeset = [] for bin in range(1, weights.GetNbinsX() + 1): current_type = get_labeltype( label=weights.GetXaxis().GetBinLabel(bin)) if not current_type in typeset: typeset.append(current_type) dict_ntypes[dbfilename] = len(typeset) print("db types ", dict_ntypes[dbfilename]) histo_sigma2U = dbfile.Get("hDB_Sigma2_U") histo_sigma2V = dbfile.Get("hDB_Sigma2_V") weightedSigma2U = 0.0 weightedSigma2V = 0.0 labelNorm = 0.0 for bin in range(1, weights.GetNbinsX() + 1): current_type = get_labeltype( label=weights.GetXaxis().GetBinLabel(bin)) if current_type == compare_type: w = weights.GetBinContent(bin) labelNorm += w weightedSigma2U += w * histo_sigma2U.GetBinContent(bin) weightedSigma2V += w * histo_sigma2V.GetBinContent(bin) if labelNorm > 0: weightedSigma2U /= labelNorm weightedSigma2V /= labelNorm dict_sigu[dbfilename] = TMath.Sqrt(weightedSigma2U) dict_sigv[dbfilename] = TMath.Sqrt(weightedSigma2V) else: dict_sigu[dbfilename] = 0 dict_sigv[dbfilename] = 0 print("db sigma2 u ", dict_sigu[dbfilename]) print("db sigma2 v ", dict_sigv[dbfilename]) dbfile.Close() histofile = gROOT.FindObject(histofilename) if histofile: histofile.Close() histofile = TFile(histofilename, 'RECREATE', 'Resolution plots created from ' + dbfilename) # summary histograms on type resolution histofile.cd("") NDB = len(dblist) hcoverage = TH1F("hcoverage", "", NDB, 0, NDB) hcoverage.SetStats(0) hcoverage.SetFillColor(38) hcoverage.SetYTitle("cluster coverage [%]") hcoverage.SetXTitle("cluster db") hntypes = TH1F("hntypes", "", NDB, 0, NDB) hntypes.SetStats(0) hntypes.SetFillColor(38) hntypes.SetYTitle("number of cluster types") hntypes.SetXTitle("cluster db") hnlabels = TH1F("hnlabels", "", NDB, 0, NDB) hnlabels.SetStats(0) hnlabels.SetFillColor(38) hnlabels.SetYTitle("number of cluster labels") hnlabels.SetXTitle("cluster db") hsigmau = TH1F("hsigmau", "", NDB, 0, NDB) hsigmau.SetStats(0) hsigmau.SetFillColor(38) hsigmau.SetYTitle("cluster sigma u [mm]") hsigmau.SetXTitle("cluster db") hsigmav = TH1F("hsigmav", "", NDB, 0, NDB) hsigmav.SetStats(0) hsigmav.SetFillColor(38) hsigmav.SetYTitle("cluster sigma v [mm]") hsigmav.SetXTitle("cluster db") for j, dbfilename in enumerate(dblist): hcoverage.GetXaxis().SetBinLabel(j + 1, str(dict_trafo[dbfilename])) hntypes.GetXaxis().SetBinLabel(j + 1, str(dict_trafo[dbfilename])) hnlabels.GetXaxis().SetBinLabel(j + 1, str(dict_trafo[dbfilename])) hsigmau.GetXaxis().SetBinLabel(j + 1, str(dict_trafo[dbfilename])) hsigmav.GetXaxis().SetBinLabel(j + 1, str(dict_trafo[dbfilename])) hcoverage.SetBinContent(j + 1, dict_coverage[dbfilename]) hntypes.SetBinContent(j + 1, dict_ntypes[dbfilename]) hnlabels.SetBinContent(j + 1, dict_nlabels[dbfilename]) hsigmau.SetBinContent(j + 1, dict_sigu[dbfilename]) hsigmav.SetBinContent(j + 1, dict_sigv[dbfilename]) histofile.Write() histofile.Close()
def plot(path, ecms, xmin, xmax, num_charm): try: f_data = TFile(path[0]) t_data = f_data.Get('save') entries_data = t_data.GetEntries() logging.info('data entries :' + str(entries_data)) except: logging.error(path[0] + 'is invalid!') sys.exit() try: f_side1 = TFile(path[1]) t_side1 = f_side1.Get('save') entries_side1 = t_side1.GetEntries() logging.info('data(side1) entries :' + str(entries_side1)) except: logging.error(path[1] + ' is invalid!') sys.exit() try: f_side2 = TFile(path[2]) t_side2 = f_side2.Get('save') entries_side2 = t_side2.GetEntries() logging.info('data(side2) entries :' + str(entries_side2)) except: logging.error(path[2] + ' is invalid!') sys.exit() try: f_side3 = TFile(path[3]) t_side3 = f_side3.Get('save') entries_side3 = t_side3.GetEntries() logging.info('data(side3) entries :' + str(entries_side3)) except: logging.error(path[3] + ' is invalid!') sys.exit() try: f_side4 = TFile(path[4]) t_side4 = f_side4.Get('save') entries_side4 = t_side4.GetEntries() logging.info('data(side4) entries :' + str(entries_side4)) except: logging.error(path[4] + ' is invalid!') sys.exit() mbc = TCanvas('mbc', 'mbc', 800, 600) set_canvas_style(mbc) xbins = 50 ytitle = 'Eentries' xtitle = 'Cos(OtherShws)' h_data = TH1F('data', 'data', xbins, xmin, xmax) set_histo_style(h_data, xtitle, ytitle, 1, -1) cos_othershws_fill(t_data, h_data, num_charm) h_side1 = TH1F('side1', 'side1', xbins, xmin, xmax) set_histo_style(h_side1, xtitle, ytitle, 3, 3004) cos_othershws_fill(t_side1, h_side1, num_charm) h_side2 = TH1F('side2', 'side2', xbins, xmin, xmax) set_histo_style(h_side2, xtitle, ytitle, 3, 3004) cos_othershws_fill(t_side2, h_side2, num_charm) h_side3 = TH1F('side3', 'side3', xbins, xmin, xmax) set_histo_style(h_side3, xtitle, ytitle, 3, 3004) cos_othershws_fill(t_side3, h_side3, num_charm) h_side4 = TH1F('side4', 'side4', xbins, xmin, xmax) set_histo_style(h_side4, xtitle, ytitle, 3, 3004) cos_othershws_fill(t_side4, h_side4, num_charm) h_side1.Add(h_side2) h_side1.Scale(0.5) h_side3.Add(h_side4) h_side3.Scale(0.25) h_side1.Add(h_side3, -1) h_data.Draw('E1') hs = THStack('hs', 'Stacked') hs.Add(h_side1) hs.Draw('same') h_data.Draw('sameE1') legend = TLegend(0.5, 0.6, 0.8, 0.85) leg_title = str(ecms) + ' MeV' set_legend(legend, h_data, h_side1, leg_title) legend.Draw() if not os.path.exists('./figs/'): os.makedirs('./figs/') mbc.SaveAs('./figs/cos_othershws_' + str(ecms) + '_' + str(num_charm) + '.pdf') raw_input('Enter anything to end...')
def findElectrons(opts): ### Load Python modules import os import math import numpy as np from array import array from os.path import isdir, abspath ### Load ROOT modules from ROOT import TClonesArray, TFile, TTree, gSystem, gROOT, AddressOf from ROOT import TH2F, TH1F, TMath, TGraphAsymmErrors ###Load DAMPE libs gSystem.Load("libDmpEvent.so") gSystem.Load("libDmpEventFilter.so") gSystem.Load("libDmpKernel.so") gSystem.Load("libDmpService.so") ###Load DAMPE modules from ROOT import DmpChain, DmpEvent, DmpFilterOrbit, DmpPsdBase, DmpCore from ROOT import DmpSvcPsdEposCor, DmpVSvc #DmpRecPsdManager import DMPSW gROOT.SetBatch(True) ############################# Searching for electrons ####### Reading input files #Creating DAMPE chain for input files dmpch = DmpChain("CollectionTree") #Reading input files if not opts.input: files = [f.replace("\n","") for f in open(opts.list,'r').readlines()] for ifile, f in enumerate(files): DMPSW.IOSvc.Set("InData/Read" if ifile == 0 else "InData/ReadMore",f) if os.path.isfile(f): dmpch.Add(f) if opts.verbose: print('\nInput file read: {} -> {}'.format(ifile,f)) else: DMPSW.IOSvc.Set("InData/Read",opts.input) if os.path.isfile(opts.input): dmpch.Add(opts.input) if opts.verbose: print('\nInput file read: {}'.format(opts.input)) #Defining the total number of events nevents = dmpch.GetEntries() if opts.verbose: print('\nTotal number of events: {}'.format(nevents)) print("\nPrinting the chain...\n") dmpch.Print() ####### Setting the output directory to the chain dmpch.SetOutputDir(abspath(opts.outputDir),"electrons") ####### Processing input files ###Histos #Defining log binning #np.logspace binning nBins=1000 eMax=6 eMin=0 eBinning = np.logspace(eMin, eMax, num=(nBins+1)) #custom binning ''' nBins = 1000 eMin=0.1 eMax=1000000 EDmax = [] EDEdge = [] EDstepX=np.log10(eMax/eMin)/nBins for iedge in range(0, nBins): EDEdge.append(eMin*pow(10,iedge*EDstepX)) EDmax.append(eMin*pow(10,(iedge+1)*EDstepX)) EDEdge.append(EDmax[-1]) Edges= array('d',EDEdge) # this makes a bound array for TH1F ''' #Pointing h_terrestrial_lat_vs_long = TH2F("h_terrestrial_lat_vs_long","latitude vs longitude",360,0,360,180,-90,90) ## Energy h_energy_all = TH1F("h_energy_all","all particle energy",nBins,eBinning) h_energyCut = TH1F("h_energyCut","all particle energy - 20 GeV cut",nBins,eBinning) h_energyCut_SAAcut = TH1F("h_energyCut_SAAcut","all particle energy - 20 GeV cut (no SAA)",nBins,eBinning) h_energyCut_noTrack = TH1F("h_energyCut_noTrack","all particle energy - 20 GeV cut (NO TRACK)",nBins,eBinning) h_energyCut_Track = TH1F("h_energyCut_Track","all particle energy - 20 GeV cut (TRACK)",nBins,eBinning) h_energyCut_TrackMatch = TH1F("h_energyCut_TrackMatch","all particle energy - 20 GeV cut (TRACK match)",nBins,eBinning) ##BGO h_energyBGOl=[] #energy of BGO vertical layer (single vertical plane) for BGO_idxl in range(14): histoName = "h_energyBGOl_" + str(BGO_idxl) histoTitle = "BGO energy deposit layer " + str(BGO_idxl) tmpHisto = TH1F(histoName,histoTitle,1000,0,1e+6) h_energyBGOl.append(tmpHisto) h_energyBGOb = [] #energy of BGO lateral layer (single bars of a plane) h_BGOb_maxEnergyFraction = [] #fraction of the maximum released energy for each bar on each layer of the BGO calorimeter for BGO_idxl in range(14): tmp_eLayer = [] for BGO_idxb in range(23): histoName = "h_energyBGOl_" + str(BGO_idxl) + "_BGOb_" + str(BGO_idxb) histoTitle = "BGO energy deposit layer " + str(BGO_idxl) + " bar " + str(BGO_idxb) tmpHisto = TH1F(histoName,histoTitle,1000,0,1e+6) tmp_eLayer.append(tmpHisto) maxhistoName = "h_BGO_maxEnergyFraction_l_" + str(BGO_idxl) maxhistoTitle = "fraction of the maximum released energy layer " + str(BGO_idxl) tmpMaxHisto = TH1F(maxhistoName,maxhistoTitle,100,0,1) h_BGOb_maxEnergyFraction.append(tmpMaxHisto) h_energyBGOb.append(tmp_eLayer) h_BGOl_maxEnergyFraction = TH1F("h_BGOl_maxEnergyFraction","Fraction of the maximum released energy",100,0,1) h_thetaBGO = TH1F("h_thetaBGO","theta BGO",100,0,90) ##STK h_STK_nTracks = TH1F("h_STK_nTracks","number of tracks",1000,0,1000) h_STK_trackChi2norm = TH1F("h_STK_trackChi2norm","\chi^2/n track",100,0,200) h_STK_nTracksChi2Cut = TH1F("h_STK_nTracksChi2Cut","number of tracks (\chi^2 cut)",1000,0,1000) h_stk_cluster_XvsY = [] for iLayer in range(6): hName = 'h_stkCluster_XvsY_l_'+str(iLayer) hTitle = 'cluster X vs Y - plane '+str(iLayer) tmpHisto = TH2F(hName,hTitle,1000,-500,500,1000,-500,500) h_stk_cluster_XvsY.append(tmpHisto) h_ThetaSTK = TH1F("h_ThetaSTK","theta STK",100,0,90) h_deltaTheta = TH1F("h_deltaTheta","\Delta theta",500,-100,100) h_resX_STK_BGO = TH1F("h_resX_STK_BGO","BGO/STK residue layer X",200,-1000,1000) h_resY_STK_BGO = TH1F("h_resY_STK_BGO","BGO/STK residue layer Y",200,-1000,1000) h_imapctPointSTK = TH2F("h_imapctPointSTK","STK impact point",1000,-500,500,1000,-500,500) h_stk_chargeClusterX = TH1F("h_stk_chargeClusterX","STK charge on cluster X",10000,0,10000) h_stk_chargeClusterY = TH1F("h_stk_chargeClusterY","STK charge on cluster Y",10000,0,10000) ##PSD h_psd_ChargeX = [] for lidx in range (2): histoName = "h_psd_ChargeX_l" + str(lidx) histoTitle = "PSD X charge layer " + str(lidx) tmpHisto = TH1F(histoName,histoTitle,10000,0,10000) h_psd_ChargeX.append(tmpHisto) h_psd_ChargeY = [] for lidx in range (2): histoName = "h_psd_ChargeY_l" + str(lidx) histoTitle = "PSD Y charge layer " + str(lidx) tmpHisto = TH1F(histoName,histoTitle,10000,0,10000) h_psd_ChargeY.append(tmpHisto) ### ### Analysis cuts eCut = 50 #Energy cut in GeV ### DAMPE geometry BGOzTop = 46. BGOzBot = 448. #Filtering for SAA if not opts.mc: DMPSW.IOSvc.Set("OutData/NoOutput", "True") DMPSW.IOSvc.Initialize() pFilter = DmpFilterOrbit("EventHeader") pFilter.ActiveMe() #Starting loop on files if opts.debug: if opts.verbose: print('\nDebug mode activated... the number of chain events is limited to 1000') nevents = 1000 for iev in xrange(nevents): if opts.mc: DmpVSvc.gPsdECor.SetMCflag(1) pev=dmpch.GetDmpEvent(iev) #Get latitude and longitude longitude = pev.pEvtAttitude().lon_geo latitude = pev.pEvtAttitude().lat_geo #Get particle total energy etot=pev.pEvtBgoRec().GetTotalEnergy()/1000. h_energy_all.Fill(etot) if etot < eCut: continue h_energyCut.Fill(etot) #Get BGO energy deposit for each layer (vertical BGO shower profile) v_bgolayer = np.array([pev.pEvtBgoRec().GetELayer(ibgo) for ibgo in range(14)]) for BGO_idxl in range(14): h_energyBGOl[BGO_idxl].Fill(v_bgolayer[BGO_idxl]) #Get BGO energy deposit for each bar (lateral BGO shower profile) of each layer for ilay in xrange(0,14): v_bgolayer_bars = np.array([pev.pEvtBgoRec().GetEdepPos(ilay,ibar) for ibar in xrange(0,23)]) #Fraction of the maximum energy deposit of the particle crossing the BGO on a certain layer (single bars) h_BGOb_maxEnergyFraction[ilay].Fill(np.max(v_bgolayer_bars)/1000./etot) for idx_BGOb in range (23): h_energyBGOb[ilay][idx_BGOb].Fill(v_bgolayer_bars[idx_BGOb]) #Fraction of the maximum energy deposit of the particle crossing the BGO h_BGOl_maxEnergyFraction.Fill(np.max(v_bgolayer)/1000./etot) #BGO acceptance projection projectionX_BGO_BGOTop = pev.pEvtBgoRec().GetInterceptXZ() +BGOzTop * pev.pEvtBgoRec().GetSlopeXZ() projectionY_BGO_BGOTop = pev.pEvtBgoRec().GetInterceptYZ() +BGOzTop * pev.pEvtBgoRec().GetSlopeYZ() #SAA filter if not opts.mc: inSAA = pFilter.IsInSAA(pev.pEvtHeader().GetSecond()) #inSAA = False if (inSAA): continue h_energyCut_SAAcut.Fill(etot) h_terrestrial_lat_vs_long.Fill(longitude,latitude) tgZ = math.atan(np.sqrt( (pev.pEvtBgoRec().GetSlopeXZ()*pev.pEvtBgoRec().GetSlopeXZ()) + (pev.pEvtBgoRec().GetSlopeYZ()*pev.pEvtBgoRec().GetSlopeYZ()) ) ); theta_bgo = tgZ*180./math.pi h_thetaBGO.Fill(theta_bgo) #Tracks ntracks = pev.NStkKalmanTrack() if ntracks < 0: print "\nTRACK ERROR: number of tracks < 0 - ABORTING\n" break if ntracks == 0: h_energyCut_noTrack.Fill(etot) h_STK_nTracks.Fill(ntracks) h_energyCut_Track.Fill(etot) res_X_min = 1000 res_Y_min = 1000 trackID_X = -9 trackID_Y = -9 lTrackIDX = [] lTrackIDY = [] residueXmin = [] residueYmin = [] #Loop on STK tracks to get the STK charge measurement for iTrack in range(ntracks): tmpTrack = pev.pStkKalmanTrack(iTrack) chi2_norm = tmpTrack.getChi2()/(tmpTrack.getNhitX()+tmpTrack.getNhitY()-4) h_STK_trackChi2norm.Fill(chi2_norm) if chi2_norm > 25: continue h_STK_nTracksChi2Cut.Fill(ntracks) l0ClusterX = l0ClusterY = False for iCluster in range(tmpTrack.GetNPoints()): clux = tmpTrack.pClusterX(iCluster) cluy = tmpTrack.pClusterY(iCluster) if clux and clux.getPlane() == 0: l0ClusterX = True if cluy and cluy.getPlane() == 0: l0ClusterY = True # check plot for the dead region of STK if(clux and cluy): h_stk_cluster_XvsY[clux.getPlane()].Fill(clux.GetX(),cluy.GetY()) if l0ClusterX == False and l0ClusterY == False: continue #### Tracks characteristics theta_stk =math.acos(tmpTrack.getDirection().CosTheta())*180./math.pi; delta_theta_STK_BGO = theta_stk - theta_bgo #STK impact point trackImpactPointX = tmpTrack.getImpactPoint().x() trackImpactPointY = tmpTrack.getImpactPoint().y() #Track projections trackProjX = tmpTrack.getDirection().x()*(BGOzTop - tmpTrack.getImpactPoint().z()) + tmpTrack.getImpactPoint().x() trackProjY = tmpTrack.getDirection().y()*(BGOzTop - tmpTrack.getImpactPoint().z()) + tmpTrack.getImpactPoint().y() #Track residues resX_STK_BGO = projectionX_BGO_BGOTop - trackProjX resY_STK_BGO = projectionY_BGO_BGOTop - trackProjY resX_STK_BGO_top = trackImpactPointX - (pev.pEvtBgoRec().GetInterceptXZ() + tmpTrack.getImpactPoint().z() * pev.pEvtBgoRec().GetSlopeXZ()) resY_STK_BGO_top = trackImpactPointY - (pev.pEvtBgoRec().GetInterceptYZ() + tmpTrack.getImpactPoint().z() * pev.pEvtBgoRec().GetSlopeYZ()) #### h_ThetaSTK.Fill(theta_stk) h_deltaTheta.Fill(delta_theta_STK_BGO) h_imapctPointSTK.Fill(trackImpactPointX,trackImpactPointY) h_resX_STK_BGO.Fill(tmpTrack.getImpactPoint().x() - (pev.pEvtBgoRec().GetInterceptXZ() + tmpTrack.getImpactPoint().z() * pev.pEvtBgoRec().GetSlopeXZ())) h_resY_STK_BGO.Fill(tmpTrack.getImpactPoint().y() - (pev.pEvtBgoRec().GetInterceptYZ() + tmpTrack.getImpactPoint().z() * pev.pEvtBgoRec().GetSlopeYZ())) if abs(theta_stk - theta_bgo) > 25: continue #Selecting good tracks for charge measurement if abs(resX_STK_BGO_top) < 200 and abs(resX_STK_BGO) < 60: lTrackIDX.append(tmpTrack) residueXmin.append(res_X_min) if res_X_min > abs(resX_STK_BGO_top): res_X_min = abs(resX_STK_BGO_top) trackID_X = iTrack if abs(resY_STK_BGO_top) < 200 and abs(resY_STK_BGO) < 60: lTrackIDY.append(tmpTrack) residueYmin.append(res_Y_min) if res_Y_min > abs(resY_STK_BGO_top): res_Y_min = abs(resY_STK_BGO_top) trackID_Y = iTrack if(trackID_X == -9): continue if(trackID_Y == -9): continue track_ID = -9 #print trackID_X if(trackID_X == trackID_Y): track_ID = trackID_X else: trackX = pev.pStkKalmanTrack(trackID_X) trackY = pev.pStkKalmanTrack(trackID_Y) chi2X = trackX.getChi2() /(trackX.getNhitX()+trackX.getNhitY()-4); chi2Y = trackY.getChi2() /(trackY.getNhitX()+trackY.getNhitY()-4); npointX = trackX.GetNPoints() npointY = trackY.GetNPoints() if(npointX == npointY or abs(npointX - npointY) == 1): if(chi2X < chi2Y): if trackID_X in lTrackIDY: track_ID = trackID_X elif trackID_Y in lTrackIDX: track_ID = trackID_Y else: common_id = list(set(lTrackIDX).intersection(lTrackIDY)) searchForTrack( common_id, lTrackIDX, lTrackIDY, residueXmin, residueYmin, track_ID ) else: if trackID_Y in lTrackIDX: track_ID = trackID_Y elif trackID_X in lTrackIDY: track_ID = trackID_X else: common_id = list(set(lTrackIDX).intersection(lTrackIDY)) searchForTrack( common_id, lTrackIDX, lTrackIDY, residueXmin, residueYmin, track_ID ) else: if(npointX > npointY): if trackID_X in lTrackIDY: track_ID = trackID_X elif trackID_Y in lTrackIDX: track_ID = trackID_Y else: common_id = list(set(lTrackIDX).intersection(lTrackIDY)) searchForTrack( common_id, lTrackIDX, lTrackIDY, residueXmin, residueYmin, track_ID ) else: if trackID_Y in lTrackIDX: track_ID = trackID_Y elif trackID_X in lTrackIDY: track_ID = trackID_X else: common_id = list(set(lTrackIDX).intersection(lTrackIDY)) searchForTrack( common_id, lTrackIDX, lTrackIDY, residueXmin, residueYmin, track_ID ) if(track_ID == -9): continue h_energyCut_TrackMatch.Fill(etot) #Select the matched track track_sel = pev.pStkKalmanTrack(track_ID) theta_track_sel =math.acos(track_sel.getDirection().CosTheta())*180./math.pi; deltaTheta_rec_sel = theta_bgo - theta_track_sel track_correction = track_sel.getDirection().CosTheta(); cluChargeX = -1000 cluChargeY = -1000 for iclu in xrange(0,track_sel.GetNPoints()): clux = track_sel.pClusterX(iclu) cluy = track_sel.pClusterY(iclu) if (clux and clux.getPlane() == 0): cluChargeX = clux.getEnergy()*track_correction if (cluy and cluy.getPlane() == 0): cluChargeY = cluy.getEnergy()*track_correction h_stk_chargeClusterX.Fill(cluChargeX) h_stk_chargeClusterY.Fill(cluChargeY) #Loop on PSD hits to get PSD charge measurement ''' #PSD fiducial volume cut psd_YZ_top = -324.7 psd_XZ_top = -298.5 stk_to_psd_topY = (track_sel.getDirection().y()*(psd_YZ_top - track_sel.getImpactPoint().z()) + track_sel.getImpactPoint().y()) stk_to_psd_topX = (track_sel.getDirection().x()*(psd_XZ_top - track_sel.getImpactPoint().z()) + track_sel.getImpactPoint().x()) if(abs(stk_to_psd_topX) > 400.): continue if(abs(stk_to_psd_topY) > 400.): continue ''' PSDXlayer0 = -298.5 PSDXlayer1 = -284.5 PSDYlayer0 = -324.7 PSDYlayer1 = -310.7 psdChargeX = [[]for _ in range(2)] psdGIDX = [[]for _ in range(2)] psdPathlengthX = [[]for _ in range(2)] psdPositionX = [[]for _ in range(2)] psdChargeY = [[]for _ in range(2)] psdGIDY = [[]for _ in range(2)] psdPathlengthY = [[]for _ in range(2)] psdPositionY = [[]for _ in range(2)] for lPSD in xrange(0,pev.NEvtPsdHits()): if pev.pEvtPsdHits().IsHitMeasuringX(lPSD): crossingX = False lenghtX = [-99999.,-99999.] array_lenghtX = array('d',lenghtX) if(pev.pEvtPsdHits().GetHitZ(lPSD) == PSDXlayer0): npsdX = 0 if(pev.pEvtPsdHits().GetHitZ(lPSD)== PSDXlayer1): npsdX = 1 if not opts.mc: crossingX = DmpVSvc.gPsdECor.GetPathLengthPosition(pev.pEvtPsdHits().fGlobalBarID[lPSD],track_sel.getDirection(),track_sel.getImpactPoint(), array_lenghtX) if crossingX: psdChargeX[npsdX].append(pev.pEvtPsdHits().fEnergy[lPSD]) psdGIDX[npsdX].append(pev.pEvtPsdHits().fGlobalBarID[lPSD]) psdPathlengthX[npsdX].append(array_lenghtX[1]) psdPositionX[npsdX].append(pev.pEvtPsdHits().GetHitX(lPSD)) elif pev.pEvtPsdHits().IsHitMeasuringY(lPSD): crossingY = False lenghtY = [-99999.,-99999.] array_lenghtY = array('d',lenghtY) if(pev.pEvtPsdHits().GetHitZ(lPSD) == PSDYlayer0): npsdY = 0 if(pev.pEvtPsdHits().GetHitZ(lPSD)== PSDYlayer1): npsdY = 1 if not opts.mc: crossingY = DmpVSvc.gPsdECor.GetPathLengthPosition(pev.pEvtPsdHits().fGlobalBarID[lPSD],track_sel.getDirection(),track_sel.getImpactPoint(), array_lenghtY) if crossingY: psdChargeY[npsdY].append(pev.pEvtPsdHits().fEnergy[lPSD]) psdGIDY[npsdY].append(pev.pEvtPsdHits().fGlobalBarID[lPSD]) psdPathlengthY[npsdY].append(array_lenghtY[1]) psdPositionY[npsdY].append(pev.pEvtPsdHits().GetHitY(lPSD)) ''' print psdChargeX print psdGIDX print psdPathlengthX print psdPositionX print psdChargeY print psdGIDY print psdPathlengthY print psdPositionY ''' psdFinalChargeX = [-999,-999] psdFinalChargeY = [-999,-999] #psdFinalChargeX_corr = [-999,-999] #psdFinalChargeY_corr = [-999,-999] psdFinalChargeX_proj = [-999,-999] psdFinalChargeY_proj = [-999,-999] psdX_pathlength = [-999,-999] psdY_pathlength = [-999,-999] psdX_position = [-999,-999] psdY_position = [-999,-999] PsdEC_tmpX = 0. PsdEC_tmpY = 0. for ipsd in xrange(0,2): if(len(psdChargeY[ipsd]) > 0): pos_max_len = np.argmax(psdPathlengthY[ipsd]) lenghtY = [-99999.,-99999.] array_lenghtY = array('d',lenghtY) test_pos = False if not opts.mc: test_pos = DmpVSvc.gPsdECor.GetPathLengthPosition(psdGIDY[ipsd][pos_max_len],track_sel.getDirection(),track_sel.getImpactPoint(), array_lenghtY) ''' PsdEC_tmpY = -1. if test_pos: PsdEC_tmpY = DmpVSvc.gPsdECor.GetPsdECorSp3(psdGIDY[ipsd][pos_max_len], array_lenghtY[0]) ''' psdFinalChargeY[ipsd] = psdChargeY[ipsd][pos_max_len] h_psd_ChargeY[ipsd].Fill(psdFinalChargeY[ipsd]) #psdFinalChargeY_corr[ipsd] = psdChargeY[ipsd][pos_max_len]*PsdEC_tmpY psdFinalChargeY_proj[ipsd] = array_lenghtY[0] psdY_pathlength[ipsd] = array_lenghtY[1] psdY_position[ipsd] = psdPositionY[ipsd][pos_max_len] if(len(psdChargeX[ipsd]) > 0): pos_max_len = np.argmax(psdPathlengthX[ipsd]) lenghtX = [-99999.,-99999.] array_lenghtX = array('d',lenghtX) test_pos = False if not opts.mc: test_pos = DmpVSvc.gPsdECor.GetPathLengthPosition(psdGIDX[ipsd][pos_max_len],track_sel.getDirection(),track_sel.getImpactPoint(), array_lenghtY) ''' PsdEC_tmpY = -1. if test_pos: PsdEC_tmpX = DmpVSvc.gPsdECor.GetPsdECorSp3(psdGIDX[ipsd][pos_max_len], array_lenghtX[0]) ''' psdFinalChargeX[ipsd] = psdChargeX[ipsd][pos_max_len] h_psd_ChargeX[ipsd].Fill(psdFinalChargeX[ipsd]) #psdFinalChargeX_corr[ipsd] = psdChargeX[ipsd][pos_max_len]*PsdEC_tmpX psdFinalChargeX_proj[ipsd] = array_lenghtX[0] psdX_pathlength[ipsd] = array_lenghtX[1] psdX_position[ipsd] = psdPositionX[ipsd][pos_max_len] ### Writing output files to file if opts.data: tf_skim = TFile(opts.outputFile,"RECREATE") h_energy_all.Write() h_energyCut.Write() h_energyCut_SAAcut.Write() h_energyCut_noTrack.Write() h_energyCut_Track.Write() h_energyCut_TrackMatch.Write() for BGO_idxl in range(14): h_energyBGOl[BGO_idxl].Write() h_BGOb_maxEnergyFraction[BGO_idxl].Write() for BGO_idxb in range(23): h_energyBGOb[BGO_idxl][BGO_idxb].Write() h_thetaBGO.Write() h_BGOl_maxEnergyFraction.Write() h_terrestrial_lat_vs_long.Write() h_STK_nTracks.Write() h_STK_trackChi2norm.Write() h_STK_nTracksChi2Cut.Write() for iLayer in range(6): h_stk_cluster_XvsY[iLayer].Write() h_ThetaSTK.Write() h_deltaTheta.Write() h_imapctPointSTK.Write() h_resX_STK_BGO.Write() h_resY_STK_BGO.Write() h_stk_chargeClusterX.Write() h_stk_chargeClusterY.Write() h_psd_ChargeX[0].Write() h_psd_ChargeX[1].Write() h_psd_ChargeY[0].Write() h_psd_ChargeY[1].Write() tf_skim.Close()
def get_result(result, f, model): try: if match('^overlapIntegral$', result): val = f.get('overlap_true').GetVal() err = f.get('overlap_rms').GetVal() if val < 0.0: hist = TH1F('thisoverlap', '', 1000, 0.0, 1.0) f.Get('corrTree').Draw('overlapTrue>>thisoverlap', '', 'goff') val = hist.GetMean() err = hist.GetRMS() return '{:.3e}<br />±{:.3e}'.format(val, err) elif match('^overlapDiff$', result): try: hist = f.get('overlap_diff') except NameError: f.Get('corrTree').Draw('overlapDiff>>hnew', '', 'goff') val = gDirectory.Get('hnew').GetMean() mini, maxi = val-0.5, val+0.5 hist = TH1F('myoverlap', '', 1000, mini, maxi) f.Get('corrTree').Draw('overlapDiff>>myoverlap', '', 'goff') val = hist.GetMean()*100.0 err = hist.GetRMS()*100.0 return '{:.3f}%<br />±{:.3f}%'.format(val, err) elif match('^randomized ovDiff$', result): hist = TH1F('myoverlap', '', 1000, -0.5, 0.5) f.Get('rndmzd_corrTree').Draw('overlapDiff>>myoverlap', '', 'goff') val = hist.GetMean()*100.0 err = hist.GetRMS()*100.0 return '{:.3f}%<br />±{:.3f}%'.format(val, err) elif match('^chiSq$', result): val = ( f.get('chisqX1').GetVal() + f.get('chisqX2').GetVal() + f.get('chisqY1').GetVal() + f.get('chisqY2').GetVal() ) return '{:.0f}'.format(val) elif match('^d.o.f.$', result): val = ( f.get('dofX1').GetVal() + f.get('dofX2').GetVal() + f.get('dofY1').GetVal() + f.get('dofY2').GetVal() ) return '{:.0f}'.format(val) elif match('chiSq/dof', result): val = ( f.get('chisqX1').GetVal() + f.get('chisqX2').GetVal() + f.get('chisqY1').GetVal() + f.get('chisqY2').GetVal() ) / ( f.get('dofX1').GetVal() + f.get('dofX2').GetVal() + f.get('dofY1').GetVal() + f.get('dofY2').GetVal() - model.dof() ) return '{:.4f}'.format(val) elif match('^time of fit$', result): return f.get('timestamp').GetTitle() elif match('^time of simulation$', result): return f.get('corrTimestamp').GetTitle() elif match('^time of residuals$', result): return f.get('resTimestamp').GetTitle() elif match('^time of randomization$', result): return f.get('rndmzdTimestamp').GetTitle() elif match('^neg.log.lik.$', result): val = f.get('fitResult').minNll() return '{:e}'.format(val) elif match('^scaling$', result): val = f.get('scaling').GetVal()*1.0e4 return '1={:.2f}µm'.format(val) else: raise NameError() except NameError: return '' except TypeError: return ''
def plot( sample, n, v, sel, hbins, hmin, hmax, hlog, xlabel, ylabel, dim ): #hlog=False; global output file = {} tree = {} hist = {} leaf = {} xsec = {} max = 0 min = 1e99 ROOT.gStyle.SetOptStat(1111) ROOT.gROOT.SetBatch(True) for i, s in enumerate(sample): file[s] = TFile( input + s + ".root", "READ") tree[s] = file[s].Get("Physics") if dim==1: hist[s] = TH1F(s, ";"+v, hbins, hmin , hmax) tree[s].Project(s, v, "%s"%sel) leaf[s] = tree[s].GetLeaf("xsec1") leaf[s].GetBranch().GetEntry(1) xsec[s] = leaf[s].GetValue() hist[s].SetLineColor(colour[i]) hist[s].SetLineWidth(2)#3 hist[s].SetFillColorAlpha(colour[i],0.35) hist[s].SetFillStyle(3005) if hist[s].GetMaximum() > max: max = hist[s].GetMaximum()*6 if hist[s].GetMinimum() < min: min = hist[s].GetMinimum() #leg = TLegend(0.4, 0.9-0.035*len(sample), 0.68, 0.89) c1 = TCanvas("c1", "Gen", 1600, 1200) c1.cd() hist[sample[0]].SetMaximum(max*1.2) hist[sample[0]].SetMinimum(min+1.e6) hist[sample[0]].GetXaxis().SetTitle("%s" %xlabel) hist[sample[0]].GetYaxis().SetTitle("%s" %ylabel) hist[sample[0]].SetTitle("%s" %n) if len(sample)>1: for i, s in enumerate(sample): hist[s].Draw("HIST" if i==0 else "HIST, SAME") else: hist[s].Draw("HIST") if hlog: c1.GetPad(0).SetLogy() elif dim==2: if len(hbins)!=2 or len(hmin)!=2 or len(hmax)!=2: print "dimension of hbins, hmin, hmax does not correspond to\ 2 dimensional histogram parameters."; exit; else: #X axis parameters follow by Y axis parameters hist[s] = TH2F( s, ";"+v, hbins[0], hmin[0] , hmax[0], hbins[1], hmin[1] , hmax[1] ) # v in the form of x:y tree[s].Project(s, v, "%s"%sel,"colz") c1 = TCanvas("c1", "Gen", 1600, 1200) c1.cd() hist[s].Draw("COLZ") #if hlog: # c1.GetPad(0).SetLogy() # c1.GetPad(0).SetLogx() else: print "Unkown dimension" exit; #leg.Draw() c1.Update() drawlabel( 0.37 , 0.934 , "CMS Simulation" ) output+="VH/"+sample[0]+"/" if not hlog: output+="Lin/" elif hlog: output+="Log/" if not os.path.exists(output): os.makedirs(output) c1.Print( output + n + ".pdf") c1.Print( output + n + ".png")
def defineHisto(self): self.h_total.append(TH1F('h_total', 'h_total', 2, 0, 2)) self.h_total_weight.append( TH1F('h_total_weight', 'h_total_weight', 2, 0, 2)) self.h_npass.append(TH1F('h_npass', 'h_nass', 2, 0, 2)) # self.h_cutflow=TH1F('h_cutflow_','h_cutflow_',7, 0, 7) # Cutflow self.h_met.append(TH1F('h_met_', 'h_met_', 1000, 0., 1000.)) #metbins_ = [200,350,500,1000] #self.h_met_rebin.append(TH1F('h_met_rebin_'+postname, 'h_met_rebin'+postname, 3, array(('d'),metbins_))) #self.h_mass.append(TH1F('h_mass_'+postname, 'h_mass_'+postname, 400,0.,400.)) self.h_met_vs_mass.append( TH2F('h_met_vs_mass_', 'h_met_vs_mass_', 1000, 0., 1000., 250, 0, 250.)) # self.h_csv1.append(TH1F('h_csv1_', 'h_csv1_', 20,0.,1.)) # self.h_csv2.append(TH1F('h_csv2_', 'h_csv2_', 20,0.,1.)) #self.h_mt.append(TH1F('h_mt_'+postname,'h_mt_'+postname,100,400.,1400.)) #self.h_dPhi.append(TH1F('h_dPhi_'+postname,'h_dPhi_'+postname,70, -3.5, 3.5 )) self.h_N_e.append(TH1F('h_N_e_', 'h_N_e_', 5, 0, 5)) self.h_N_mu.append(TH1F('h_N_mu_', 'h_N_mu_', 5, 0, 5)) self.h_N_tau.append(TH1F('h_N_tau_', 'h_N_tau_', 5, 0, 5)) self.h_N_Pho.append(TH1F('h_N_Pho_', 'h_N_Pho_', 5, 0, 5)) self.h_N_b.append(TH1F('h_N_b_', 'h_N_b_', 10, 0, 10)) self.h_N_j.append(TH1F('h_N_j_', 'h_N_j_', 10, 0, 10)) allquantlist = AllQuantList.getAll() preselquantlist = AllQuantList.getPresel() regquants = AllQuantList.getRegionQuants() def getBins(quant): if 'eta' in quant: bins = '30' low = '-3' high = '3' elif 'dPhi' in quant: bins = '32' low = '0' high = '3.2' elif 'phi' in quant: bins = '64' low = '-3.2' high = '3.2' elif 'csv' in quant: bins = '50' low = '0.' high = '1.' elif 'iso' in quant: bins = '50' low = '0.' high = '0.25' elif 'Zmass' in quant: bins = '60' low = '70.' high = '110.' elif 'Wmass' in quant: bins = '80' low = '0.' high = '400.' elif 'met' in quant: bins = '40' low = '0.' high = '2000.' elif 'nca15jet' in quant: bins = '5' low = '0' high = '5' elif 'nak8jet' in quant: bins = '5' low = '0' high = '5' elif 'bb_Mass' in quant: bins = '25' low = '0.0' high = '250.' elif 'chf' in quant or 'nhf' in quant or 'EF' in quant: bins = '40' low = '0.' high = '1.' elif 'njet' in quant: bins = '12' low = '0' high = '12' elif 'ntau' in quant or 'npho' in quant or 'nele' in quant or 'nmu' in quant or 'nUnclean' in quant: bins = '6' low = '0' high = '6' elif 'recoil' in quant: bins = '40' low = '0.' high = '2000.' elif '_dR_' in quant: bins = '60' low = '0.' high = '6.' elif 'lep1_pT' in quant or 'jet2_pT' in quant: bins = '100' low = '0.' high = '1000.' elif 'lep2_pT' in quant: bins = '200' low = '0.' high = '1000.' elif 'dr_jet_sr2' in quant or 'dr_jet_sr1' in quant: bins = '400' low = '0.' high = '4.' elif 'PV' in quant: bins = '100' low = '0.' high = '100.' elif 'syst' in quant: bins = '40' low = '0.' high = '2000.' else: # for pT, mass, etc. bins = '50' low = '0.' high = '1000.' return bins, low, high for quant in allquantlist: bins, low, high = getBins(quant) exec("self.h_" + quant + ".append(TH1F('h_" + quant + "_','h_" + quant + "_'," + bins + "," + low + "," + high + "))") for quant in preselquantlist: bins, low, high = getBins(quant) exec("self.h_" + quant + ".append(TH1F('h_" + quant + "_','h_" + quant + "_'," + bins + "," + low + "," + high + "))") for quant in regquants: bins, low, high = getBins(quant) exec("self.h_" + quant + ".append(TH1F('h_" + quant + "_','h_" + quant + "_'," + bins + "," + low + "," + high + "))") def getBins2D(quant): ZpTbins = '50' ZpTlow = '0.' ZpThigh = '500.' Rbins = '15' Rlow = '200.' Rhigh = '500.' Mbins = '20' Mlow = '0.' Mhigh = '500.' if 'ZpT_Recoil' in quant: return ZpTbins, ZpTlow, ZpThigh, Rbins, Rlow, Rhigh elif 'ZpT_MET' in quant: return ZpTbins, ZpTlow, ZpThigh, Mbins, Mlow, Mhigh elif 'MET_Recoil' in quant: return Mbins, Mlow, Mhigh, Rbins, Rlow, Rhigh Histos2D = AllQuantList.getHistos2D() for quant in Histos2D: xbins, xlow, xhigh, ybins, ylow, yhigh = getBins2D(quant) exec("self.h_" + quant + ".append(TH2F('h_" + quant + "_','h_" + quant + "_'," + xbins + "," + xlow + "," + xhigh + "," + ybins + "," + ylow + "," + yhigh + "))") h_met_pdf_tmp = [] for ipdf in range(2): midname = str(ipdf) h_met_pdf_tmp.append( TH1F('h_met_pdf' + '_' + midname + '_', 'h_met_pdf', 1000, 0., 1000.)) self.h_met_pdf.append(h_met_pdf_tmp) h_met_muR_tmp = [] for imuR in range(2): midname = str(imuR) h_met_muR_tmp.append( TH1F('h_met_muR' + '_' + midname + '_', 'h_met_muR', 1000, 0., 1000.)) self.h_met_muR.append(h_met_muR_tmp) h_met_muF_tmp = [] for imuF in range(2): midname = str(imuF) h_met_muF_tmp.append( TH1F('h_met_muF' + '_' + midname + '_', 'h_met_muF', 1000, 0., 1000.)) self.h_met_muF.append(h_met_muF_tmp) print "Histograms defined"