def main(): f1 = TFile("/afs/cern.ch/user/m/muell149/workb/HLTONLINE/CMSSW_7_0_0_pre1/src/DQMOffline/Trigger/test/700pre1DQM.root") f2 = TFile("/afs/cern.ch/user/m/muell149/workb/HLTONLINE/CMSSW_7_0_0_pre1/src/DQMOffline/Trigger/test/700pre2DQM.root") r1="700pre1" r2="700pre2" savePlotDir="DQM_plots/"+r1+"_"+r2+"/" cmd = "mkdir "+savePlotDir subprocess.call(cmd,shell=True) path1="DQMData/Run 1/HLT/Run summary/GeneralHLTOffline" f1.cd(path1) dir = f1.Get(path1) nextkey = dir.GetListOfKeys() for key in nextkey: h1 = key.ReadObj() #print "h1 before=",type(h1) if isinstance(h1,TH1F): print h1.GetName() if h1 is not None: hist_name = h1.GetName() path = "DQMData/Run 1/HLT/Run summary/GeneralHLTOffline/"+hist_name h2 = f2.Get(path) if h2 is not None: #print "h2=",h2.GetName() #print type(h1), type(h2) #break plot(h1,h2,r1,r2,savePlotDir)
def create_feature_array(input_root_file_name, input_tree_name = None, input_tree_class = None, feature_list = None): """ Store a subset of a .root ntuple as a numpy array. """ input_root_file = TFile(input_root_file_name, 'READ') # The following convention assumes a label of 0 for background and 1 for signal. # Check function arguments. if (not input_tree_name) or (not input_root_file.GetListOfKeys().FindObject(input_tree_name)): print "\nUnspecified or incorrect TTree name. Consider the file contents below.\n" input_root_file.ls() print "\n" return elif (input_tree_class == None): print "\nUnspecified class label. The options are 'signal or 'background'. Eventually implement 'fromVar'." return elif (feature_list == None): print "\nUnspecified list of features. Provide a list of features in the format ['feature1_name', 'feature2_name', ...]." return else: input_tree = input_root_file.Get(input_tree_name) input_tree_n_entries = input_tree.GetEntriesFast() n_features = len(feature_list) # Initialize the feature array with number of rows equal to the # number of examples and number of columns equal to the number of features. feature_array = np.zeros((input_tree_n_entries, n_features))
def main(): # Open input files file_data = TFile("Hists_Data.root", "read") file_signal = TFile("Hists_PhotonJet.root", "read") file_backgd = TFile("Hists_QCD.root", "read") # Get input TH1F hist_data = file_data.Get('g_pass_Liso_barrel/h_photon_sieie') hist_McSig = file_signal.Get('g_pass_Liso_barrel/h_photon_sieie') hist_McBkg = file_backgd.Get('g_pass_Liso_barrel/h_photon_sieie') # Determine amount of signal/background in data num_sig, num_sig_error, num_bkg, num_bkg_error = \ get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02) # Save scaled MC plots to output root file file_output = TFile("Hists_purityFits.root","recreate") file_output.cd() hist_McBkg.Scale(num_bkg/hist_McBkg.Integral()) hist_McSig.Scale(num_sig/hist_McSig.Integral()) hist_data.SetName("data") hist_McBkg.SetName("MC_bgd") hist_McSig.SetName("MC_sig") hist_data.Write() hist_McBkg.Write() hist_McSig.Write() file_output.Close() print "Created %s" % file_output.GetName()
def GetTriggerScalers(filename): inputfile = TFile.Open(filename) inputfile.cd("PtEMCalTriggerTask") gDirectory.ls() tasklist = gDirectory.Get("results") histlist = tasklist.FindObject("histosPtEMCalTriggerHistograms") gROOT.cd() triggerhist = histlist.FindObject("hEventTriggers") inputfile.Close() # Get number of Min. Bias counts mbcounts = GetCounts(triggerhist, "MinBias") print "MinBias counts: %d" %(mbcounts) triggerhist.GetAxis(0).SetRange(2,2) triggercounts = {} for trigger in triggerlookup.keys(): if trigger == "MinBias": continue triggercounts[trigger] = GetCounts(triggerhist, trigger) print "Number of events for trigger %s: %d" %(trigger, triggercounts[trigger]) hScalers = TH1D("triggerScalers", "trigger scalers", len(triggercounts), -0.5, len(triggercounts) - 0.5) counter = 1 for trigger in triggercounts.keys(): scaler = float(mbcounts)/float(triggercounts[trigger]) print "Scaler for trigger %s: %f" %(trigger, scaler) hScalers.GetXaxis().SetBinLabel(counter, trigger) hScalers.SetBinContent(counter, scaler) counter += 1 outputfile = TFile("TriggerScalers.root", "RECREATE") outputfile.cd() hScalers.Write() outputfile.Close()
def processRootFiles( descmap ): ifiles = dict() ofiles = dict() # odirs = [] for input, output in descmap.iteritems(): ifnam, idnam = file_dir_names( input ) ofnam, odnam = file_dir_names( output ) ifile = ifiles.get(ifnam, None) if ifile is None: ifile = TFile(ifnam) ifiles[ifnam] = ifile idir = ifile if idnam: idir = ifile.Get(idnam) ofile = ofiles.get(ofnam, None) if ofile is None: ofile = TFile(ofnam,'recreate') ofiles[ofnam] = ofile odir = ofile # import pdb; pdb.set_trace() if odnam: odir = ofile.Get(odnam) if odir == None: print 'mkdir', odnam odir = ofile.mkdir( odnam ) copyDirItems( idir, odir ) ofile.cd() for file in ofiles.values(): file.Write() pprint.pprint(ifiles) pprint.pprint(ofiles)
def analyze_pulser_data(outputfile='temp.root', force_overwrite=False): server = SoudanServer() file_list = server.get_accepted_runs() time_list = [] mean_list = [] sigma_list = [] for id in file_list: print id rundoc = server.get_run(id.id) if len(time_list) == 0: first_time = rundoc.time_of_start_of_run time_list.append(rundoc.time_of_start_of_run - first_time) pd = rundoc.pulser_data mean_list.append((pd.mean, \ pd.mean_err)) sigma_list.append((pd.sigma, \ pd.sigma_err)) file_to_output = TFile(outputfile, 'recreate'); objects_to_write = [] # generate final plots list_to_analyze = [ ("Mean of pulser signal", "Mean (keV)", mean_list),\ ("Sigma of pulser signal", "Sigma (keV)", sigma_list) ] for name, axis_name, data_list in list_to_analyze: file_to_output.cd() new_graph = TGraphErrors(len(data_list)) new_graph.SetNameTitle(name.replace(' ' , ''),\ name.replace(' ' , '')) new_hist = TH1F(name.replace(' ', '') + "hist",\ name, 100, time_list[0].days, \ time_list[len(time_list)-1].days + 1) new_hist.GetXaxis().SetTitle("Run start time (days)") new_hist.GetYaxis().SetTitle(axis_name) new_hist.GetYaxis().SetTitleOffset(1.17) maximum = data_list[0][0] minimum = data_list[0][0] for i in range(len(data_list)): new_graph.SetPoint(i, time_list[i].days + time_list[i].seconds/(24*3600.),\ data_list[i][0]) new_graph.SetPointError(i, 0,\ data_list[i][1]) if minimum > data_list[i][0]: minimum = data_list[i][0] if maximum < data_list[i][0]: maximum = data_list[i][0] ten_percent = (maximum - minimum)*0.1 new_hist.SetMaximum(maximum + ten_percent) new_hist.SetMinimum(minimum - ten_percent) #new_graph.SetHistogram(new_hist) objects_to_write.append(new_graph) objects_to_write.append(new_hist) file_to_output.cd() for object in objects_to_write: object.Write(object.GetName(), TObject.kOverwrite) file_to_output.Close()
def save_stacks(self): """ Saves finished stacks. No canvas... """ output_dir = root_style.DIR_PLOTS + "/stacks" if not os.path.exists(root_style.DIR_PLOTS): os.mkdir(root_style.DIR_PLOTS) if not os.path.exists(output_dir): os.mkdir(output_dir) filename = output_dir + "/" + self.ana_histo_name + ".root" file = TFile(filename, "RECREATE") file.cd() name = self.ana_histo_name if self.stack_data: histo = self.stack_data.histo histo.Write(name + "_data") add_histo_stack_up(histo).Write(name + "_data_sum") if self.stack_overlay_mc: histo = self.stack_overlay_mc.histo histo.Write(name + "_overlay_mc") add_histo_stack_up(histo).Write(name + "_overlay_mc_sum") if self.stack_mc: histo = self.stack_mc.histo histo.Write(name + "_mc") add_histo_stack_up(histo).Write(name + "_mc_sum") file.Close()
def rewtOneHist(dataset, hwts): fileName = condor_dir + "/" + dataset + ".root" if not os.path.exists(fileName): print "WARNING: didn't find ",fileName return print "About to reweight histogram in " + fileName inFile = TFile(fileName, "UPDATE") if inFile.IsZombie() or not inFile.GetNkeys(): return inFile.cd() h = inFile.Get(str(arguments.histToBeReWeighted)).Clone() if not h: print " Could not find hist named " + arguments.histToBeReWeighted + " in " + inFile.GetName() return h.SetDirectory(0) newName = h.GetName() + str(arguments.suffixRename) h.SetName(newName) dir = arguments.histToBeReWeighted dir = dir[:dir.rfind("/")] print "Will write hist to directory " + dir inFile.cd(dir) tdir = inFile.GetDirectory(dir) tdir.Delete(newName + ";*") for i in range(1,h.GetNbinsX()+1): val = h.GetBinContent(i) err = h.GetBinError(i) binCtr = h.GetBinCenter(i) wt = hwts.GetBinContent(hwts.FindBin(binCtr)) h.SetBinContent(i, val * wt) h.SetBinError (i, err * wt) h.Write() inFile.Close()
def main(): # Open input files file_data = TFile("data_bg_file.root", "read") file_signal = TFile("sig_gmbs600.root", "read") file_backgd = TFile("data_bg_file.root", "read") # Get input TH1F hist_data = file_data.Get('h_dataTime') hist_McSig = file_signal.Get('h_sgTime__ctau6000_hehb') hist_McBkg = file_backgd.Get('h_bgTime') # Determine amount of signal/background in data num_sig, num_sig_error, num_bkg, num_bkg_error = \ get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02) # Save scaled MC plots to output root file file_output = TFile("Hists_purityFits.root","recreate") file_output.cd() hist_McBkg.Scale(num_bkg/hist_McBkg.Integral()) hist_McSig.Scale(num_sig/hist_McSig.Integral()) hist_data.SetName("data") hist_McBkg.SetName("MC_bgd") hist_McSig.SetName("MC_sig") hist_data.Write() hist_McBkg.Write() hist_McSig.Write() file_output.Close() print "Created %s" % file_output.GetName()
def make_hists_rooplot(list_of_roodatahists, x_variable, path_to_save_plot, key_and_colors = None): """Makes a basic RooPlot of plotable objects (RooDataSets or RooPdfs)""" # declare the RooPlot object nbins = list_of_roodatahists[0].createHistogram("temp", x_variable).GetNbinsX() the_plot = x_variable.frame(nbins) # sanity check if key_and_colors: if len(list_of_roodatahists) != len(key_and_colors): print("error: length mismatch with provided draw options") print("len(list_of_roodatahists):", len(list_of_roodatahists)) print("len(key_and_colors):", len(key_and_colors)) print("will draw without key/legend") from ROOT import RooFit as rf from ROOT import TLegend legend = None # plot things on if key_and_colors: legend = TLegend() # plot the histos colorfully with legend for histo, key in zip(list_of_roodatahists, key_and_colors): legname, colorcode = key ma = rf.MarkerColor(colorcode) li = rf.LineColor(colorcode) histo.plotOn(the_plot, ma, li) legend.AddEntry(histo, legname) else: # or just plot them on for histo in list_of_roodatahists: histo.plotOn(the_plot) # declare root TCanvas and draw onto from ROOT import TCanvas canvas = TCanvas() the_plot.Draw() if legend: legend.Draw() canvas.SaveAs(path_to_save_plot) # make rootfile path_to_save_rootfile = path_to_save_plot.replace(".eps", ".root") path_to_save_rootfile = path_to_save_rootfile.replace(".pdf", ".root") path_to_save_rootfile = path_to_save_rootfile.replace(".png", ".root") path_to_save_rootfile = path_to_save_rootfile.replace(".gif", ".root") from ROOT import TFile rootfile = TFile(path_to_save_rootfile, "RECREATE") rootfile.cd() the_plot.Write() if legend: legend.Write() rootfile.Close() return
def merge(fileNames): lastchan = None files = [] categories = {} for fnam in fileNames: chan = fnam.split("_", 1)[0] categ = fnam.split("_", 1)[1].split(".")[0] categories[fnam] = categ if lastchan is not None and chan != lastchan: print lastchan, chan raise ValueError("cannot add different channels:", str(fileNames)) files.append(TFile(fnam)) lastchan = chan output = TFile(".".join([lastchan, "root"]), "recreate") for file in files: print file.GetName() categdir = None for key in file.GetListOfKeys(): output.cd() obj = file.Get(key.GetName()) if type(obj) is TDirectoryFile: subdir = output.mkdir(key.GetName()) subdir.cd() subobjs = getobjs(obj) for subobj in subobjs: subobj.Write() else: if categdir is None: categdir = output.mkdir(categories[file.GetName()]) categdir.cd() obj.Write(key.GetName()) output.Close()
def main(): # Open input files file_data = TFile("ZMassLessThan10GeV_TimePerf-plots.root", "read") file_signal = TFile("ZMassLessThan5GeV_TimePerf-plots.root", "read") file_backgd = TFile("ZMass_MoreThan5GeV_TimePerf-plots.root", "read") # Get input TH1F hist_data = file_data.Get('EBEB/seed time') hist_McSig = file_signal.Get('EBEB/seed time') hist_McBkg = file_backgd.Get('EBEB/seed time') # Determine amount of signal/background in data num_sig, num_sig_error, num_bkg, num_bkg_error = \ get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02) # Save scaled MC plots to output root file file_output = TFile("Hists_purityFits.root","recreate") file_output.cd() hist_McBkg.Scale(num_bkg/hist_McBkg.Integral()) hist_McSig.Scale(num_sig/hist_McSig.Integral()) hist_data.SetName("data") hist_McBkg.SetName("MC_bgd") hist_McSig.SetName("MC_sig") hist_data.Write() hist_McBkg.Write() hist_McSig.Write() file_output.Close() print "Created %s" % file_output.GetName()
def make_histos(dataset, channel, var, jettag): best_fits = [] ups = [] downs = [] outdir = "histos" outfile = TFile(outdir + "/%s/%s_%s_%s_pdf.root" % (channel, jettag, dataset, var), "RECREATE") for pdf in pdfsets: print pdf, dataset, channel, var, jettag (best_fit, up, down) = make_pdf_histos(pdf, dataset, channel, var, jettag) best_fits.append(best_fit) ups.append(up) downs.append(down) best_fit.SetNameTitle("%s_best" % pdf, "%s_best" % pdf) up.SetNameTitle("%s_up" % pdf, "%s_up" % pdf) down.SetNameTitle("%s_down" % pdf, "%s_down" % pdf) outfile.cd() best_fit.Write() up.Write() down.Write() print best_fit.Integral(), up.Integral(), down.Integral() (env_best, env_up, env_down) = make_envelope(ups, downs) outfile.cd() best_name = "%s_%s__%s__pdf__%s" % (jettag, var.replace("cos_theta", "cos_theta_lj"), groups[dataset], "bestfit") env_best.SetNameTitle(best_name, best_name) #env_best.Write() up_name = "%s_%s__%s__pdf__%s" % (jettag, var.replace("cos_theta", "cos_theta_lj"), groups[dataset], "up") env_up.SetNameTitle(up_name, up_name) env_up.Write() down_name = "%s_%s__%s__pdf__%s" % (jettag, var.replace("cos_theta", "cos_theta_lj"), groups[dataset], "down") env_down.SetNameTitle(down_name, down_name) env_down.Write() outfile.Close()
class TaggedFile: def __init__(self, name): self.file = TFile(self.makeFileName(name), "recreate") def Close(self): self.file.Close() def ls(self): self.file.ls() def makeFileName(self, name): stamp = datetime.today().strftime("%d%b%yT%H%M%S") tmpName = name.replace(".root", "_" + stamp + ".root") num = 0 pattern = re.compile(".*(_\d.root$)") while os.path.isfile(tmpName): num += 1 match = pattern.match(tmpName) if match != None: # print match.group(1) tmpName = tmpName.replace(match.group(1), "_%d.root" % num) else: tmpName = tmpName.replace(".root", "_%d.root" % num) return tmpName def tag(self, name, content): named = TNamed(name, content) oldDir = gDirectory self.file.cd() named.Write() oldDir.cd()
def make_histos(dataset, channel, var): best_fits = [] ups = [] downs = [] outdir = "histos_gen" print "OUT", outdir + "/%s_%s_%s_pdf_antitop.root" % (channel, dataset, var) outfile = TFile(outdir + "/%s_%s_%s_pdf_antitop.root" % (channel, dataset, var), "RECREATE") for pdf in pdfsets: print pdf, dataset, channel, var (best_fit, up, down) = make_pdf_histos(pdf, dataset, channel, var) best_fits.append(best_fit) ups.append(up) downs.append(down) best_fit.SetNameTitle("%s_best" % pdf, "%s_best" % pdf) up.SetNameTitle("%s_up" % pdf, "%s_up" % pdf) down.SetNameTitle("%s_down" % pdf, "%s_down" % pdf) outfile.cd() #best_fit.Write() #up.Write() #down.Write() print "integrals", pdf, best_fit.Integral(), up.Integral(), down.Integral() (env_best, env_up, env_down) = make_envelope(ups, downs) outfile.cd() best_name = "%s__%s__pdf__%s" % ("cos_theta_lj_gen", groups[dataset], "bestfit") env_best.SetNameTitle(best_name, best_name) env_best.Write() up_name = "%s__%s__pdf__%s" % ("cos_theta_lj_gen", groups[dataset], "up") env_up.SetNameTitle(up_name, up_name) env_up.Write() down_name = "%s__%s__pdf__%s" % ("cos_theta_lj_gen", groups[dataset], "down") env_down.SetNameTitle(down_name, down_name) env_down.Write() outfile.Close()
def Write(self, prefix='', name=None, update=False): """ Write out all (modified) hists, or those in name if given, to a new root file """ # Short cut for lazy people name = [name] if isinstance(name, str) else name if not prefix: outfile = self.Label+'.root' else: outfile = prefix.replace('.root', '') + '.root' f = TFile(outfile, 'RECREATE') if not update else TFile(outfile, "UPDATE") # Special case for passing a hist in (bit of a hack) if isinstance(name, TH1): name.Write() return for key, hist in self._HistDict.iteritems(): if name and not key in name: continue # Make dirs (last part is hist name) for d in key.split('/')[:-1]: if not f.GetKey(d): f.mkdir(d) f.cd(d) hist.Write() f.Close() return
def returnHist(option=None): __hist__ = [[[0 for i in range(len(config.hist_dict))] for j in range(len(config.dirname))] for k in range(len(filelist))] for ienergy, file in enumerate(filelist): if(option==None): Myroot = TFile(file) for icategory, catid in config.dirname.items(): if(option==None): Myroot.cd(icategory) for id in config.hist_dict: hnew = 0 hid = config.hist_dict[id]['hid'] if(option==None): name = configclass.returnName(id, mass) _h_ = gDirectory.Get(name) if(_h_ == None): print 'ERROR : No histogram for ', id hnew = copy.deepcopy(_h_) h_hist[ienergy][catid][hid] = copy.deepcopy(_h_) else: hnew = copy.deepcopy(h_hist[ienergy][catid][hid]) # configclass.DecoHist(id, hnew) __hist__[ienergy][catid][hid] = hnew __hist__[ienergy][catid][hid].SetTitle(icategory) __hist__[ienergy][catid][hid].SetName("h_" + str(ienergy) + "_" + icategory + "_" + id) setUncertainty(__hist__[ienergy][catid][hid], icategory, id, ienergy) return __hist__
def saveHistos(): outdataset= TFile("rooDS_"+str(options.fileName)+".root","recreate") outdataset.cd() rds.Write() tree = rds.tree() tree.SetName('atree') tree.Write() for h in hlist : h.Write()
def writeHistos(self,name): hfile = TFile(name,"RECREATE") hfile.cd() for h in self.histos.values(): h.SetDirectory(hfile) h.Write() hfile.Write() hfile.Close()
def joinRootFiles(files, output): outputFile = TFile(output, "RECREATE") print "Joining Files" print files totalArrSize = 0 if (len(files) == 0): return for file in files: print file f = TFile(file) key = f.FindKey("volts") arr = gatherArray(f, TString("volts")) totalArrSize += arr.GetSize() print totalArrSize f.Close() joinedArr = TArrayF(totalArrSize) pos = 0 minmaxarr = TArrayF(2) minmaxarr.AddAt(0.0, 0) minmaxarr.AddAt(0.0, 1) for file in files: print file f = TFile(file) source = gatherArray(f, TString("volts")) pos = joinArrays(joinedArr, source, pos, minmaxarr) print pos if pos > totalArrSize: print "Uh oh...array too big quitting!" exit() f.Close() f = TFile(files[0]) outputFile.cd() T0 = TH1D(f.Get("T0")) dT = TH1D(f.Get("dT")) sign = TH1F(f.Get("sign")) dV = TH1F(f.Get("dV")) min = minmaxarr.At(0) max = minmaxarr.At(1) maxTH1 = TH1F("vMax", "vMax", 1,-1,1) maxTH1.Fill(0.0, max) minTH1 = TH1F("vMin", "vMin", 1,-1,1) minTH1.Fill(0.0, min) outputFile.WriteObject(joinedArr, "volts") outputFile.Write() outputFile.Close() f.Close()
def ConvertCorrelToRate(xs,histfile,histfileMain): outfile=histfile.replace("combined","rates") outf = TFile(outfile,"RECREATE"); SetOwnership( outf, False ) # tell python not to take ownership print "Rate Histogram written to: ", outfile infileNEVTS = TFile.Open(histfileMain) histNevt = infileNEVTS.Get("NEVTS") nevt = histNevt.GetBinContent(1) infileNEVTS.Close() infile = TFile.Open(histfile) histPie = infile.Get("h_pie_QCD") histSharedRate = infile.Get("h_shared_rate_QCD") histPie_cl = histPie.Clone() histSharedRate_cl = histSharedRate.Clone() nbins = histPie.GetNbinsX() binx = histSharedRate.GetNbinsX() biny = histSharedRate.GetNbinsY() nbinsSh = histSharedRate.GetBin(binx,biny) print "Sample: ",Sample, " Cross section: ",xs/1.e-36, "N: ",nevt for b in xrange(1,nbins+1): #Label = histPie.GetXaxis().GetBinLabel(b) CountPie = histPie.GetBinContent(b) RatePie = Rate(CountPie,nevt,xs) RatePieErr = RateErr(CountPie,nevt,xs) histPie_cl.SetBinContent(b,RatePie) histPie_cl.SetBinError(b,RatePieErr) for b in range(1,nbinsSh+1): CountShared = histSharedRate.GetBinContent(b) RateSharedRate = Rate(CountShared,nevt,xs) RateSharedRateErr = RateErr(CountShared,nevt,xs) histSharedRate_cl.SetBinContent(b,RateSharedRate) histSharedRate_cl.SetBinError(b,RateSharedRateErr) histSharedRate_cl.SetTitle("QCD") outf.cd() histPie_cl.Write() histSharedRate_cl.Write() outf.Close() infile.Close() return outfile
def __init__(self,root_file_name,wheel,parent=None): print root_file_name self.RootFile=TFile(root_file_name) Frame.__init__(self, parent) self.pack() self.NewRootFile=TFile('prova.root','RECREATE') self.createEffHisto(self.fillDetList()) self.NewRootFile.Write() self.NewRootFile.Close()
def ConvertToRate(xs,histfile): outfile=histfile.replace("combined","rates") outf = TFile(outfile,"RECREATE"); SetOwnership( outf, False ) # tell python not to take ownership print "Rate Histogram written to: ", outfile infile = TFile.Open(histfile) histInd = infile.Get("individual") histCum = infile.Get("cumulative") histHLTpresc = infile.Get("HLTPrescale") histL1presc = infile.Get("L1Prescale") histL1names = infile.Get("L1Trignames") histInd_cl = histInd.Clone() histCum_cl = histCum.Clone() histNevt = infile.Get("NEVTS") nevt = histNevt.GetBinContent(1) nbins = histInd.GetNbinsX() print "Sample: ",Sample, " Cross section: ",xs/1.e-36, "N: ",nevt for b in xrange(1,nbins+1): Label = histInd.GetXaxis().GetBinLabel(b) CountInd = histInd.GetBinContent(b) CountCum = histCum.GetBinContent(b) RateInd = Rate(CountInd,nevt,xs) RateIndErr = RateErr(CountInd,nevt,xs) RateCum = Rate(CountCum,nevt,xs) RateCumErr = RateErr(CountCum,nevt,xs) histInd_cl.SetBinContent(b,RateInd) histInd_cl.SetBinError(b,RateIndErr) histCum_cl.SetBinContent(b,RateCum) histCum_cl.SetBinError(b,RateCumErr) # print Label, " ", RateInd, " +- ", RateIndErr, " ", RateCum, " +- ", RateCumErr outf.cd() histInd_cl.Write() histCum_cl.Write() histHLTpresc.Write() histL1presc.Write() histL1names.Write() outf.Close() infile.Close() return outfile
def make_pdf_histos(var, weight, samples, sn, sampn, cuts, cuts_antiiso, outdir, channel, coupling, binning=None, plot_range=None, asymmetry=None, mtmetcut=None): if sn == "qcd" and coupling == "powheg": hname = "%s__%s__%s__%s" % (var, sn, "pdf", "up") write_histogram(var, hname, str(weight), samples, sn, sampn, cuts, cuts_antiiso, outdir, channel, coupling, binning=binning, plot_range=plot_range, asymmetry=asymmetry, mtmetcut=mtmetcut) hname = "%s__%s__%s__%s" % (var, sn, "pdf", "down") write_histogram(var, hname, str(weight), samples, sn, sampn, cuts, cuts_antiiso, outdir, channel, coupling, binning=binning, plot_range=plot_range, asymmetry=asymmetry, mtmetcut=mtmetcut) return if sampn.startswith("Single") or coupling != "powheg": return nPDFSet_size = 44 weight_str = str(weight) samp = samples[sampn] hname_up = "%s__%s__pdf__up" % (var, sn) hname_down = "%s__%s__pdf__down" % (var, sn) #outfile = File(outdir + "/%s_%s.root" % (sampn,hname), "RECREATE") outfile = TFile(outdir + "/%s_%s_pdf.root" % (sampn, var), "RECREATE") if sn=="DATA": weight_str = "1" if var == "eta_lj": var = "abs("+var+")" hist_orig = create_histogram_for_fit(sn, samp, str(weight), cuts, cuts_antiiso, channel, coupling, var, binning=binning, plot_range=plot_range, asymmetry=asymmetry, qcd_extra=None, mtmetcut=mtmetcut) hist_std = create_histogram_for_fit(sn, samp, weight_str, cuts, cuts_antiiso, channel, coupling, var, binning=binning, plot_range=plot_range, asymmetry=asymmetry, qcd_extra=None, mtmetcut=mtmetcut) hist_plus = hist_orig.Clone(hname_up) hist_minus = hist_orig.Clone(hname_down) print sn, samp weighted_histos = [] for i in range(nPDFSet_size): #print "pdf nr = ", i #weight_str = str(weight * Weights.pdf_refweight * Weight("pdf_weights_MSTW2008nlo68cl["+str(i)+"]")) #weight_str = str(weight * Weights.pdf_refweight * Weight("pdf_weights_CT10.pdf_weights_CT10["+str(i)+"]")) weight_str = str(weight * Weight("pdf_weights_cteq66["+str(i)+"]")) hist = create_histogram_for_fit(sn, samp, weight_str, cuts, cuts_antiiso, channel, coupling, var, binning=binning, plot_range=plot_range, asymmetry=asymmetry, qcd_extra=None, mtmetcut=mtmetcut) hist.SetDirectory(0) weighted_histos.append(hist) outfile.cd() #Must cd after histogram creation (hist_plus, hist_minus) = calculate_PDF_uncertainties(hist_std, weighted_histos, hist_plus, hist_minus, orig=hist_orig) #hist_std.Write() hist_plus.Write() hist_minus.Write() #Write histogram to file #logging.info("Writing histogram %s to file %s" % (hist.GetName(), outfile.GetPath())) #logging.info("%i entries, %.2f events" % (hist.GetEntries(), hist.Integral())) #(a,b) = hist.GetName().split("_")[0], hist.GetName().split("_")[1] #print "YIELD", a+"_"+b, hist.Integral() #hist.SetName(hname) #hist.SetDirectory(outfile) outfile.Write() outfile.Close() samples = None
def WriteResults(self): outputfile = TFile(self.CreateOutputFilename(), "RECREATE") outputfile.cd() self._weights.Write(self._weights.GetName(), TObject.kSingleKey) for trigger in self._nevents.values(): trigger.Write(trigger.GetName(), TObject.kSingleKey) for mybin in self._pthardbins: bindata = self._pthardbins[mybin].MakeROOTPrimitive("bin%d" %(mybin)) bindata.Write(bindata.GetName(), TObject.kSingleKey) outputfile.Close()
def mergeCompositeDataset (composite_dataset, sema): sema.acquire () os.nice (arguments.increment) component_datasets_list = "" component_weights_list = "" component_dataset_file_path = "" composite_dataset_dir = "%s/%s" % (condor_dir,composite_dataset) command = "mkdir -p %s/logMerge" % condor_dir # Create logMerge directory if it does not already exist os.system(command) logMerge = "%s/logMerge/%s.out" % (condor_dir,composite_dataset) # the logfile for the composite dataset for component_dataset in composite_dataset_definitions[composite_dataset]: component_dataset_dir = "%s/%s" % (condor_dir,component_dataset) component_dataset_file_path = component_dataset_dir + ".root" if os.path.isfile(component_dataset_file_path): component_datasets_list += " " + component_dataset_file_path if isinstance (composite_dataset_definitions[composite_dataset], dict): if len (component_weights_list): component_weights_list += "," + str (composite_dataset_definitions[composite_dataset][component_dataset]) else: component_weights_list += str (composite_dataset_definitions[composite_dataset][component_dataset]) else: if len (component_weights_list): component_weights_list += ",1" else: component_weights_list += "1" command = "mergeHists -w %s -p %s %s" % (component_weights_list, composite_dataset_dir, component_datasets_list) if arguments.ttree: command += " -T" command += " >> " + logMerge + " 2>&1" output = open (logMerge, "w") output.write ("\n\n\n") output.write ("Merging component datasets for " + composite_dataset + " dataset\n") output.close () os.system(command) if arguments.verbose: print "Finished executing: " + command fcntl.lockf (sys.stdout, fcntl.LOCK_EX) output = open (logMerge, "r") sys.stdout.write (output.read ()) output.close () fcntl.lockf (sys.stdout, fcntl.LOCK_UN) command = "cat " + logMerge + " >> " + condor_dir + "/mergeAll.out" os.system(command) fout = TFile (composite_dataset_dir + ".root", "update") fout.cd () flags.Write () fout.Close () sema.release ()
def dump(ifn,ofn,hn): fi = TFile(ifn) t = fi.Get("output") print type(t) d = TH2F(hn,"",600,-3,3,100,0,0.1) t.Project(hn,"se:etaSC") fo = TFile(ofn,"RECREATE") fo.cd() d.Write() fo.Close() fi.Close()
def Write(self, rootfilename): """ Write Structure to file """ writer = TFile(rootfilename, "Recreate") writer.cd() for triggername, triggerdata in self.__data.iteritems(): rootprim = triggerdata.GetRootPrimitive(triggername) rootprim.Write(triggername, TObject.kSingleKey) if self.__mctruth: self.__mctruth.GetRootPrimitive().Write("MCTruth", TObject.kSingleKey) writer.Close()
def getChannels(condor_dir, dataset): # open first input file and re-make its directory structure in the output file channels = [] testFile = TFile(condor_dir + "/" + dataset + ".root") testFile.cd() for key in testFile.GetListOfKeys(): if (key.GetClassName() != "TDirectoryFile"): continue if not "CutFlow" in key.GetName(): continue channels.append(key.GetName()) return channels
def main(): """ Testing procedure: plot all graphs in a ROOT file. """ TFile.getkeynames = getkeynames myfile = TFile('input_data.root') gROOT.SetBatch(True) for name in myfile.getkeynames(): graph = myfile.Get(name) if type(graph) is not TGraphErrors: continue graph.Draw() gPad.SaveAs("images/" + name + '.png') gPad.Clear()
class BTagSF(object): '''Translate heppy run 1 BTagSF class to python, and update to 2012. ''' def __init__ (self, seed, wp='medium', measurement='central') : self.randm = TRandom3(seed) self.mc_eff_file = TFile('$CMSSW_BASE/src/CMGTools/H2TauTau/data/tagging_efficiencies.root') # MC b-tag efficiencies as measured in HTT by Adinda self.btag_eff_b = self.mc_eff_file.Get('btag_eff_b') self.btag_eff_c = self.mc_eff_file.Get('btag_eff_c') self.btag_eff_oth = self.mc_eff_file.Get('btag_eff_oth') # b-tag SFs from POG calib = ROOT.BTagCalibration("csvv2", os.path.expandvars("$CMSSW_BASE/src/CMGTools/H2TauTau/data/CSVv2_ichep.csv")) op_dict = { 'loose':0, 'medium':1, 'tight':2 } print 'Booking b/c reader' v_sys = getattr(ROOT, 'vector<string>')() v_sys.push_back('up') v_sys.push_back('down') # self.reader_bc = ROOT.BTagCalibrationReader(calib, op_dict[wp], "mujets", measurement) self.reader_bc = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_bc.load(calib, 0, 'mujets') print 'Booking light reader' # self.reader_light = ROOT.BTagCalibrationReader(calib, op_dict[wp], "incl", measurement) self.reader_light = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_light.load(calib, 2, 'incl') @staticmethod def getBTVJetFlav(flav): if abs(flav) == 5: return 0 elif abs(flav) == 4: return 1 return 2 def getMCBTagEff(self, pt, eta, flavor): hist = self.btag_eff_oth if flavor == 5: hist = self.btag_eff_b elif flavor == 4: hist = self.btag_eff_c binx = hist.GetXaxis().FindFixBin(pt) biny = hist.GetYaxis().FindFixBin(abs(eta)) eff = hist.GetBinContent(binx, biny) return eff def getPOGSFB(self, pt, eta, flavor): if flavor in [4, 5]: return self.reader_bc.eval_auto_bounds('central', self.getBTVJetFlav(flavor), eta, pt) return self.reader_light.eval_auto_bounds('central', self.getBTVJetFlav(flavor), eta, pt) def isBTagged(self, pt, eta, csv, jetflavor, is_data, csv_cut=0.8): jetflavor = abs(jetflavor) if is_data or pt < 20. or abs(eta) > 2.4: if csv > csv_cut: return True else: return False SFb = self.getPOGSFB(pt, abs(eta), jetflavor) eff_b = self.getMCBTagEff(pt, abs(eta), jetflavor) # if pt < 30.: # print 'pt, eta:', pt, eta # print 'SFb', SFb # print 'eff_b', eff_b promoteProb_btag = 0. # probability to promote to tagged demoteProb_btag = 0. #probability to demote from tagged self.randm.SetSeed((int)((eta+5)*100000)) btagged = False if SFb < 1.: demoteProb_btag = abs(1. - SFb) else: if eff_b == 0.: promoteProb_btag = 0. else: promoteProb_btag = abs(SFb - 1.)/((SFb/eff_b) - 1.) if csv > csv_cut: btagged = True if demoteProb_btag > 0. and self.randm.Uniform() < demoteProb_btag: btagged = False else: btagged = False if promoteProb_btag > 0. and self.randm.Uniform() < promoteProb_btag: btagged = True return btagged
background = sys.argv[1] background = background.upper () # '' will gives you Dataset_2016.root for the whole year #runPeriods = ['B', 'C', 'D', 'E', 'F', 'G', 'H'] runPeriods = ['BC', 'DEFGH', ''] if background == "FAKE" or background == "ALL": for runPeriod in runPeriods: print "********************************************************************************" print "evaluating fake track systematic (2016", runPeriod, ")" print "--------------------------------------------------------------------------------" fout = TFile.Open ("fakeTrackSystematic_2016" + runPeriod + ".root", "recreate") fakeTrackSystematic = FakeTrackSystematic () fakeTrackSystematic.addTFile (fout) fakeTrackSystematic.addTCanvas (canvas) fakeTrackSystematic.addLuminosityLabel (str (round (lumi["MET_2016" + runPeriod] / 1000.0, 2)) + " fb^{-1} (13 TeV)") fakeTrackSystematic.addChannel ("Basic", "BasicSelection", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/basicSelection_new") fakeTrackSystematic.addChannel ("DisTrkNHits3", "DisTrkSelectionSidebandD0CutNHits3", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackSystematic_d0Sideband_new_v2") fakeTrackSystematic.addChannel ("DisTrkNHits3NoD0Cut", "DisTrkSelectionNoD0CutNHits3", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackSystematic_d0Sideband_new_v2") fakeTrackSystematic.addChannel ("DisTrkNHits4", "DisTrkSelectionSidebandD0CutNHits4", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackSystematic_d0Sideband_new_v2") fakeTrackSystematic.addChannel ("DisTrkNHits5", "DisTrkSelectionSidebandD0CutNHits5", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackSystematic_d0Sideband_new_v2") fakeTrackSystematic.addChannel ("DisTrkNHits6", "DisTrkSelectionSidebandD0CutNHits6", "MET_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackSystematic_d0Sideband_new_v2") fakeTrackSystematic.addChannel ("ZtoLL", "ZtoMuMu", "SingleMu_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/zToMuMu_new") fakeTrackSystematic.addChannel ("ZtoMuMuDisTrkNHits3", "ZtoMuMuDisTrkSidebandD0CutNHits3", "SingleMu_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackBackground_d0Sideband_new") fakeTrackSystematic.addChannel ("ZtoMuMuDisTrkNHits3NoD0Cut", "ZtoMuMuDisTrkNoD0CutNHits3", "SingleMu_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackBackground_d0Sideband_new") fakeTrackSystematic.addChannel ("ZtoMuMuDisTrkNHits4", "ZtoMuMuDisTrkSidebandD0CutNHits4", "SingleMu_2016" + runPeriod, dirs['Andrew']+"2016_final_prompt/fakeTrackBackground_d0Sideband_new")
and fileName2_short == "TrackerAlignment.root"): print("Plotting Residuals from Alignment Tracks!") regime = "align" else: print("Not expected input file name!") print("expected both files named:TrackerAlignment.root") print("Please provide TrackerAlingment analysis-level plots.") print( "Run alignment FHICL with 'monitor : false' and output gm2tracker_reco.root file" ) print( "RunAlignmentPlots.fcl on that file, and use the result (TrackerAlignment.root) it in this script" ) sys.exit() f1 = TFile.Open(fileName1) f2 = TFile.Open(fileName2) if f1 and f2: print(str(fileName1) + " and " + str(fileName2) + " are open") else: print(str(fileName1) + " or " + str(fileName2) + " not found") if (regime == "align" ): # only alignment data has Pz/P and implicit station number label_mean_1 = f1.Get("TrackerAlignment/Hits/Labels").GetMean() label_mean_2 = f2.Get("TrackerAlignment/Hits/Labels").GetMean() #print("Mean label 1:", round(label_mean_1)) #print("Mean label 2:", round(label_mean_2)) label_mean = (label_mean_1 + label_mean_2) / 2 if (label_mean < 1280 and label_mean > 1210): stationN = "S12"
gStyle.SetPadLeftMargin(0.15) gStyle.SetPadTopMargin(0.05) gStyle.SetTitleSize(0.045, 'xy') gStyle.SetLabelSize(0.040, 'xy') gStyle.SetPadTickX(1) gStyle.SetPadTickY(1) gStyle.SetLegendBorderSize(0) gStyle.SetOptStat(0) leg = TLegend(0.3, 0.68, 0.75, 0.83) leg.SetFillStyle(0) leg.SetBorderSize(0) leg.SetTextSize(0.04) for iFile in range(len(inputfilenames)): inputfile = TFile('%s/%s' % (inputdir, inputfilenames[iFile])) hCorrYield.append(inputfile.Get(histonames[iFile])) gCorrYield.append(inputfile.Get(graphnames[iFile])) hCorrYield[iFile].SetDirectory(0) hCorrYield[iFile].SetLineColor(linecolors[iFile]) hCorrYield[iFile].SetLineWidth(2) hCorrYield[iFile].SetLineStyle(1) hCorrYield[iFile].SetMarkerSize(1.5) hCorrYield[iFile].SetMarkerColor(colors[iFile]) hCorrYield[iFile].SetMarkerStyle(markers[iFile]) gCorrYield[iFile].SetLineColor(colors[iFile]) gCorrYield[iFile].SetLineWidth(2) gCorrYield[iFile].SetFillStyle(0) leg.AddEntry(hCorrYield[iFile], legendnames[iFile], 'p') hCorrYieldRatio.append(hCorrYield[iFile].Clone("hCorrYield%d" % iFile)) hCorrYieldRatio[iFile].SetDirectory(0)
bulk_thick_um=700 airbox_thick_um=1000 # _gamma=np.array([]) # all_integrals_elec=np.array([]) # all_integrals_elecphs1=np.array([]) # for mat in material: for sims in sim_type: all_integrals=np.array([]) for field_sz in tqdm.tqdm(field_s): for thick in cu_thick_um: for filenum in range(1,201,1): filename=f"/work/lb8075/PhaseSpaces/PS3/PhS3BigSensor_v4_{sims}_{field_sz}/Epi-{thick}_um_Copper-{filenum}-Edep.root" fi=TFile(filename) try: histo=fi.Get("histo") inte=histo.Integral(105,145,105,145) histo.Delete() # del histo # print(inte,sims,field_sz,thick,filenum) all_integrals=np.append(all_integrals,inte) # if("gamma" in sims): # all_integrals_gamma=np.append(all_integrals_gamma,inte) # elif("PhS1" in sims): # all_integrals_elecphs1=np.append(all_integrals_elecphs1,inte) # else: # all_integrals_elec=np.append(all_integrals_elec,inte) # print("exists") except:
options.add_option('-n', '--maxEvts', dest="maxEvts", default=-1, type="int") (options, args) = options.parse_args() # ==========end: options ============= ##options.add_option_group(evtsel) #opt, remainder = options.parse_args() print options maxEvts = options.maxEvts # Define the output histograms fname = options.files.rstrip() ftemp = fname.split("//")[2] fout = TFile(ftemp.split("/")[5].replace('.root', '_out.root'), 'RECREATE') print 'here is something: ', ftemp.split("/")[5].replace('.root', '_out.root') fout.cd() hCutflow = TH1D("hCutflow", ";;Events;", 10, 0.5, 10.5) cutsName = [ 'Total', '== 1 lep', '2D lep Iso', 'N(jet) #geq 3', 'N(fjet) #geq 1', 'leading jet pt > 200', '2nd jet pt > 80', 'N(b jet) #geq 1', 'MET #geq 20', 'N(Higgs) #geq 1' ] ibin = 0 for n in cutsName: ibin = ibin + 1 hCutflow.GetXaxis().SetBinLabel(ibin, n) hNGenEvents = TH1D("hNGenEvents", "Total Events; Total Events; Events", 2, 0.5,
passedTrig = array('i', [0]) #dataset = "WJetsToLNu" dataset = str(sys.argv[1]) isMC = sys.argv[2] isSignal = 0 if "To3l_M" in sys.argv[1]: isSignal = 1 if isSignal == 1: out_file = "/cmsuf/data/store/user/t2/users/nikmenendez/skimmed/NanoAOD/2017/signal/control_sel/" + dataset + ".root" elif isMC == "1": out_file = "/cmsuf/data/store/user/t2/users/nikmenendez/skimmed/NanoAOD/2017/control_sel/" + dataset + ".root" else: out_file = "/cmsuf/data/store/user/t2/users/nikmenendez/skimmed/NanoAOD/2017/data/control_sel/" + dataset + ".root" f_out = TFile(out_file, 'RECREATE') out_tree = TTree("passedEvents", "Events that passed skimmer") out_tree.Branch("Run", Run, "Run/L") out_tree.Branch("Event", Event, "Event/L") out_tree.Branch("LumiSect", LumiSect, "LumiSect/L") out_tree.Branch("nLeptons", nLep, "nLeptons/I") out_tree.Branch("nMuons", nMuons, "nMuons/I") out_tree.Branch("nElectrons", nElectrons, "nElectrons/I") out_tree.Branch("pTL1", pTL1, "pTL1/F") out_tree.Branch("pTL2", pTL2, "pTL2/F") out_tree.Branch("pTL3", pTL3, "pTL3/F") out_tree.Branch("idL1", idL1, "idL1/F") out_tree.Branch("idL2", idL2, "idL2/F") out_tree.Branch("idL3", idL3, "idL3/F") out_tree.Branch("etaL1", etaL1, "etaL1/F")
fitran = [-60, 110] binned = True #fraction of events with valid ZDC vertex #f_4s = 0.575 f_4s = 1. #colM = rt.kMagenta colM = rt.kBlue col0 = rt.kRed #colLR = rt.kGreen colLR = rt.kGreen + 1 #get input inp = TFile.Open(basedir + "/" + infile) tree = inp.Get("jAllTree") gROOT.SetBatch() #output log file out = open("out.txt", "w") #log fit parameters loglist1 = [(x, eval(x)) for x in ["infile", "vbin", "vmin", "vmax"]] loglist2 = [(x, eval(x)) for x in ["fitran", "binned", "f_4s"]] strlog = ut.make_log_string(loglist1, loglist2) ut.log_results(out, strlog + "\n") #input data nbins, vmax = ut.get_nbins(vbin, vmin, vmax) z = RooRealVar("jZDCVtxZ", "z", vmin, vmax)
ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kBlack, ROOT.kRed, ROOT.kGreen, ROOT.kBlue ] hist_noCuts = [] hist_allCuts = [] hist_allOtherCuts = [] LUMI = 1000. ## ---- CERN ------- PATH = '/cmshome/gdimperi/Dijet/CMSDIJETrepo/CMSSW_7_1_0_pre9_DiJet/src/CMSDIJET/DijetRootTreeAnalyzer/data/output/' #---- open the files -------------------- i_f = 0 for f in fileNames: inf = TFile.Open(PATH + 'rootFile_' + f + '.root') print inf.GetName() Nev = inf.Get('cutHisto_noCuts_________________ptHat').GetEntries() wt = 1.0 #if i_f < 3: wt = LUMI * xsections[i_f] / Nev h_noCuts = inf.Get('cutHisto_noCuts_________________' + var) h_noCuts.Scale(wt) h_noCuts.Rebin(rebin) h_noCuts.SetDirectory(0) h_noCuts.SetFillColor(colorF[i_f]) h_noCuts.SetLineColor(colorL[i_f]) h_noCuts.SetMarkerColor(colorL[i_f]) hist_noCuts.append(h_noCuts)
histName = calo_init.args.histogramName print("Draw linearity: ", not calo_init.args.noLinearity) from ROOT import gSystem, gROOT, TCanvas, TGraphErrors, TF1, gStyle, kRed, kBlue, kGray, TFile, TTree, TPad from draw_functions import prepare_graph, prepare_second_graph, prepare_single_canvas, prepare_double_canvas, draw_text import numpy from math import sqrt gRes = TGraphErrors() gLin = TGraphErrors() # first get all the resolutions and prepare graphs for ifile, filename in enumerate(calo_init.filenamesIn): energy = calo_init.energy(ifile) f = TFile(filename, "READ") htotal = f.Get(histName) #myfunPre = TF1("firstGaus","gaus", htotal.GetMean() - 2. * htotal.GetRMS(), # htotal.GetMean() + 2. * htotal.GetRMS()) myfunPre = TF1("firstGaus","gaus", energy - 0.2 * energy, energy + 0.2 * energy) resultPre = htotal.Fit(myfunPre, "SRQN") myfun = TF1("finalGaus", "gaus", resultPre.Get().Parameter(1) - 2. * resultPre.Get().Parameter(2), resultPre.Get().Parameter(1) + 2. * resultPre.Get().Parameter(2) ) result = htotal.Fit(myfun, "SRQN") resolution = result.Get().Parameter(2) / result.Get().Parameter(1) resolutionErrorSigma = result.Get().Error(2) / result.Get().Parameter(1) resolutionErrorMean = result.Get().Error(1) * result.Get().Parameter(2) / ( result.Get().Parameter(1) ** 2) resolutionError = sqrt( resolutionErrorSigma ** 2 + resolutionErrorMean ** 2 ) linearity = ( result.Get().Parameter(1) - energy ) / energy linearityError = result.Get().Error(1) / energy
#can't make NewPage for display numplots = len(parsedPlots) if (options.outputfile == "DISPLAY") and (numplots > maxperlist) : printfunc ("ERROR: too many hists to print to display") sys.exit(1) from ROOT import TFile #opening root files for rootopt in parsedRoots : if not isfile(rootopt.filename) : printfunc ("ERROR: unexistent file:",rootopt.filename) sys.exit(1) root = TFile(rootopt.filename,"read") if root.IsOpen() == 0 : printfunc ("ERROR: can't open the file:",rootopt.filename) sys.exit(1) rootopt.rootfile = root rootopt.tree = root.Get("COL/1") printfunc ("Creating plots...") plots = createPlots(parsedPlots,parsedRoots) printfunc ("Filling plots...") fillPlots(plots,parsedPlots,parsedRoots,eventext) if (options.divide) : printfunc ("Calculating ratio") rootopt1 = parsedRoots.pop(0)
def main(): signal_rootfile = TFile('') histo_outputFile = open('signal_control_distributions.xml','w+')
def limit(): method = '' channel = "bb" if INCLUDEACC: particleP = "X" else: particleP = "Z'" particle = 'b#bar{b}' multF = ZPTOBB THEORY = ['bstar'] if INCLUDEACC: THEORY.append('SSM') suffix = "_" + BTAGGING if ISMC: suffix += "_MC" if SY: suffix += "_comb" #if method=="cls": suffix="_CLs" if INCLUDEACC: suffix += "_acc" if SY: filename = "./combine/limits/bstar/" + BTAGGING + "/combined_run2/" + YEAR + "_M%d.txt" else: filename = "./combine/limits/bstar/" + BTAGGING + "/" + YEAR + "_M%d.txt" if CATEGORY != "": if SY: filename = filename.replace( BTAGGING + "/combined_run2/", BTAGGING + "/single_category/combined_run2/" + CATEGORY + "_") else: filename = filename.replace( BTAGGING + "/", BTAGGING + "/single_category/" + CATEGORY + "_") suffix += "_" + CATEGORY if ISMC: filename = filename.replace(".txt", "_MC.txt") mass, val = fillValues(filename) #print "mass =",mass #print "val =", val Obs0s = TGraph() Exp0s = TGraph() Exp1s = TGraphAsymmErrors() Exp2s = TGraphAsymmErrors() Sign = TGraph() pVal = TGraph() Best = TGraphAsymmErrors() Theory = {} for i, m in enumerate(mass): if not m in val: print "Key Error:", m, "not in value map" continue if INCLUDEACC: acc_factor = ACCEPTANCE[m] else: acc_factor = 1. n = Exp0s.GetN() Obs0s.SetPoint(n, m, val[m][0] * multF * acc_factor) Exp0s.SetPoint(n, m, val[m][3] * multF * acc_factor) Exp1s.SetPoint(n, m, val[m][3] * multF * acc_factor) Exp1s.SetPointError(n, 0., 0., (val[m][3] - val[m][2]) * multF * acc_factor, (val[m][4] - val[m][3]) * multF * acc_factor) Exp2s.SetPoint(n, m, val[m][3] * multF * acc_factor) Exp2s.SetPointError(n, 0., 0., (val[m][3] - val[m][1]) * multF * acc_factor, (val[m][5] - val[m][3]) * multF * acc_factor) if len(val[m]) > 6: Sign.SetPoint(n, m, val[m][6]) if len(val[m]) > 7: pVal.SetPoint(n, m, val[m][7]) if len(val[m]) > 8: Best.SetPoint(n, m, val[m][8]) if len(val[m]) > 10: Best.SetPointError(n, 0., 0., abs(val[m][9]), val[m][10]) for t in THEORY: Theory[t] = TGraphAsymmErrors() if 'bstar' == t: x = [] y = [] with open('bstar_deta1p1_lhc13TeV2.txt') as fin: for line in fin.readlines(): x.append(float(line.split()[0])) y.append(float(line.split()[1]) * 1000) for ind in range(len(x)): Theory[t].SetPoint(ind + 1, x[ind], y[ind]) Theory[t].SetLineColor(theoryLineColor[t]) Theory[t].SetFillColor(theoryFillColor[t]) Theory[t].SetFillStyle(theoryFillStyle[t]) Theory[t].SetLineWidth(2) continue Xs_dict = HVT[t]['Z']['XS'] if t != 'SSM' else SSM['Z'] for m in sorted(Xs_dict.keys()): if INCLUDEACC and t != 'SSM': acc_factor = ACCEPTANCE[m] else: acc_factor = 1. if m < SIGNALS[0] or m > SIGNALS[-1]: continue #if m < mass[0] or m > mass[-1]: continue #if t!= 'SSM' and m>4500: continue ## I don't have the higher mass xs if m > 4500: continue XsZ, XsZ_Up, XsZ_Down = 0., 0., 0. if t != 'SSM': XsZ = 1000. * HVT[t]['Z']['XS'][m] * SSM["BrZ"][ m] #assuming the same BR as the SSM Z' one XsZ_Up = XsZ * (1. + math.hypot(HVT[t]['Z']['QCD'][m][0] - 1., HVT[t]['Z']['PDF'][m][0] - 1.)) XsZ_Down = XsZ * (1. - math.hypot(1. - HVT[t]['Z']['QCD'][m][0], 1. - HVT[t]['Z']['PDF'][m][0])) else: XsZ = 1000. * SSM['Z'][m] * SSM["BrZ"][m] XsZ_Up = XsZ * (1. + math.hypot(HVT['A1']['Z']['QCD'][m][0] - 1., HVT['A1']['Z']['PDF'][m][0] - 1.)) XsZ_Down = XsZ * (1. - math.hypot(1. - HVT['A1']['Z']['QCD'][m][0], 1. - HVT['A1']['Z']['PDF'][m][0])) n = Theory[t].GetN() Theory[t].SetPoint(n, m, XsZ * acc_factor) Theory[t].SetPointError(n, 0., 0., (XsZ - XsZ_Down) * acc_factor, (XsZ_Up - XsZ) * acc_factor) Theory[t].SetLineColor(theoryLineColor[t]) Theory[t].SetFillColor(theoryFillColor[t]) Theory[t].SetFillStyle(theoryFillStyle[t]) Theory[t].SetLineWidth(2) #Theory[t].SetLineStyle(7) Exp2s.SetLineWidth(2) Exp2s.SetLineStyle(1) Obs0s.SetLineWidth(3) Obs0s.SetMarkerStyle(0) Obs0s.SetLineColor(1) Exp0s.SetLineStyle(2) Exp0s.SetLineWidth(3) Exp1s.SetFillColor(417) #kGreen+1 Exp1s.SetLineColor(417) #kGreen+1 Exp2s.SetFillColor(800) #kOrange Exp2s.SetLineColor(800) #kOrange Exp2s.GetXaxis().SetTitle("m_{" + particleP + "} (GeV)") Exp2s.GetXaxis().SetTitleSize(Exp2s.GetXaxis().GetTitleSize() * 1.25) Exp2s.GetXaxis().SetNoExponent(True) Exp2s.GetXaxis().SetMoreLogLabels(True) Exp2s.GetYaxis().SetTitle( "#sigma(" + particleP + ") #bf{#it{#Beta}}(" + particleP + " #rightarrow " + particle + "){} (fb)".format(" #times #Alpha" if INCLUDEACC else "")) Exp2s.GetYaxis().SetTitleOffset(1.5) Exp2s.GetYaxis().SetNoExponent(True) Exp2s.GetYaxis().SetMoreLogLabels() Sign.SetLineWidth(2) Sign.SetLineColor(629) Sign.GetXaxis().SetTitle("m_{" + particleP + "} (GeV)") Sign.GetXaxis().SetTitleSize(Sign.GetXaxis().GetTitleSize() * 1.1) Sign.GetYaxis().SetTitle("Significance") pVal.SetLineWidth(2) pVal.SetLineColor(629) pVal.GetXaxis().SetTitle("m_{" + particleP + "} (GeV)") pVal.GetXaxis().SetTitleSize(pVal.GetXaxis().GetTitleSize() * 1.1) pVal.GetYaxis().SetTitle("local p-Value") Best.SetLineWidth(2) Best.SetLineColor(629) Best.SetFillColor(629) Best.SetFillStyle(3003) Best.GetXaxis().SetTitle("m_{" + particleP + "} (GeV)") Best.GetXaxis().SetTitleSize(Best.GetXaxis().GetTitleSize() * 1.1) Best.GetYaxis().SetTitle("Best Fit (pb)") c1 = TCanvas("c1", "Exclusion Limits", 800, 600) c1.cd() #SetPad(c1.GetPad(0)) c1.GetPad(0).SetTopMargin(0.06) c1.GetPad(0).SetRightMargin(0.05) c1.GetPad(0).SetLeftMargin(0.12) c1.GetPad(0).SetTicks(1, 1) #c1.GetPad(0).SetGridx() #c1.GetPad(0).SetGridy() c1.GetPad(0).SetLogy() Exp2s.Draw("A3") Exp1s.Draw("SAME, 3") for t in THEORY: Theory[t].Draw("SAME, L3") Theory[t].Draw("SAME, L3X0Y0") Exp0s.Draw("SAME, L") if not options.blind: Obs0s.Draw("SAME, L") #setHistStyle(Exp2s) Exp2s.GetXaxis().SetTitleSize(0.050) Exp2s.GetYaxis().SetTitleSize(0.050) Exp2s.GetXaxis().SetLabelSize(0.045) Exp2s.GetYaxis().SetLabelSize(0.045) Exp2s.GetXaxis().SetTitleOffset(0.90) Exp2s.GetYaxis().SetTitleOffset(1.25) Exp2s.GetYaxis().SetMoreLogLabels(True) Exp2s.GetYaxis().SetNoExponent(True) if INCLUDEACC: Exp2s.GetYaxis().SetRangeUser(0.05, 5.e3) else: Exp2s.GetYaxis().SetRangeUser(0.1, 5.e3) #else: Exp2s.GetYaxis().SetRangeUser(0.1, 1.e2) #Exp2s.GetXaxis().SetRangeUser(mass[0], min(mass[-1], MAXIMUM[channel] if channel in MAXIMUM else 1.e6)) Exp2s.GetXaxis().SetRangeUser(SIGNALS[0], SIGNALS[-1]) #drawAnalysis(channel) drawAnalysis("") #drawRegion(channel, True) drawRegion("", True) #drawCMS(LUMI, "Simulation Preliminary") #Preliminary if CATEGORY == "": #drawCMS(LUMI, "Work in Progress", suppressCMS=True) drawCMS(LUMI, "", suppressCMS=True) else: #drawCMS(LUMI, "Work in Progress, "+CAT_LABELS[CATEGORY], suppressCMS=True) drawCMS(LUMI, CAT_LABELS[CATEGORY], suppressCMS=True) # legend top = 0.9 nitems = 4 + len(THEORY) leg = TLegend(0.55, top - nitems * 0.3 / 5., 0.98, top) #leg = TLegend(0.45, top-nitems*0.3/5., 0.98, top) leg.SetBorderSize(0) leg.SetFillStyle(0) #1001 leg.SetFillColor(0) leg.SetHeader("95% CL upper limits") leg.AddEntry(Obs0s, "Observed", "l") leg.AddEntry(Exp0s, "Expected", "l") leg.AddEntry(Exp1s, "#pm 1 std. deviation", "f") leg.AddEntry(Exp2s, "#pm 2 std. deviation", "f") for t in THEORY: leg.AddEntry(Theory[t], theoryLabel[t], "fl") leg.Draw() latex = TLatex() latex.SetNDC() latex.SetTextSize(0.045) latex.SetTextFont(42) #latex.DrawLatex(0.66, leg.GetY1()-0.045, particleP+" #rightarrow "+particle+"h") leg2 = TLegend(0.12, 0.225 - 2 * 0.25 / 5., 0.65, 0.225) leg2.SetBorderSize(0) leg2.SetFillStyle(0) #1001 leg2.SetFillColor(0) c1.GetPad(0).RedrawAxis() leg2.Draw() if not options.blind: Obs0s.Draw("SAME, L") c1.GetPad(0).Update() if not gROOT.IsBatch(): raw_input("Press Enter to continue...") c1.Print("combine/plotsLimit/ExclusionLimits/" + YEAR + suffix + ".png") c1.Print("combine/plotsLimit/ExclusionLimits/" + YEAR + suffix + ".pdf") if 'ah' in channel or 'sl' in channel: c1.Print("combine/plotsLimit/ExclusionLimits/" + YEAR + suffix + ".C") c1.Print("combine/plotsLimit/ExclusionLimits/" + YEAR + suffix + ".root") for t in THEORY: print "Model", t, ":", for m in range(mass[0], mass[-1], 1): if not (Theory[t].Eval(m) > Obs0s.Eval(m)) == ( Theory[t].Eval(m + 1) > Obs0s.Eval(m + 1)): print m, print "" return ##FIXME # ---------- Significance ---------- c2 = TCanvas("c2", "Significance", 800, 600) c2.cd() c2.GetPad(0).SetTopMargin(0.06) c2.GetPad(0).SetRightMargin(0.05) c2.GetPad(0).SetTicks(1, 1) c2.GetPad(0).SetGridx() c2.GetPad(0).SetGridy() Sign.GetYaxis().SetRangeUser(0., 5.) Sign.Draw("AL3") #drawCMS(LUMI, "Preliminary") drawCMS(LUMI, "Work in Progress", suppressCMS=True) drawAnalysis(channel[1:3]) c2.Print("combine/plotsLimit/Significance/" + YEAR + suffix + ".png") c2.Print("combine/plotsLimit/Significance/" + YEAR + suffix + ".pdf") # c2.Print("plotsLimit/Significance/"+YEAR+suffix+".root") # c2.Print("plotsLimit/Significance/"+YEAR+suffix+".C") # ---------- p-Value ---------- c3 = TCanvas("c3", "p-Value", 800, 600) c3.cd() c3.GetPad(0).SetTopMargin(0.06) c3.GetPad(0).SetRightMargin(0.05) c3.GetPad(0).SetTicks(1, 1) c3.GetPad(0).SetGridx() c3.GetPad(0).SetGridy() c3.GetPad(0).SetLogy() pVal.Draw("AL3") pVal.GetYaxis().SetRangeUser(2.e-7, 0.5) ci = [ 1., 0.317310508, 0.045500264, 0.002699796, 0.00006334, 0.000000573303, 0.000000001973 ] line = TLine() line.SetLineColor(922) line.SetLineStyle(7) text = TLatex() text.SetTextColor(922) text.SetTextSize(0.025) text.SetTextAlign(12) for i in range(1, len(ci) - 1): line.DrawLine(pVal.GetXaxis().GetXmin(), ci[i] / 2, pVal.GetXaxis().GetXmax(), ci[i] / 2) text.DrawLatex(pVal.GetXaxis().GetXmax() * 1.01, ci[i] / 2, "%d #sigma" % i) #drawCMS(LUMI, "Preliminary") drawCMS(LUMI, "Work in Progress", suppressCMS=True) drawAnalysis(channel[1:3]) c3.Print("combine/plotsLimit/pValue/" + YEAR + suffix + ".png") c3.Print("combine/plotsLimit/pValue/" + YEAR + suffix + ".pdf") # c3.Print("plotsLimit/pValue/"+YEAR+suffix+".root") # c3.Print("plotsLimit/pValue/"+YEAR+suffix+".C") # --------- Best Fit ---------- c4 = TCanvas("c4", "Best Fit", 800, 600) c4.cd() c4.GetPad(0).SetTopMargin(0.06) c4.GetPad(0).SetRightMargin(0.05) c4.GetPad(0).SetTicks(1, 1) c4.GetPad(0).SetGridx() c4.GetPad(0).SetGridy() Best.Draw("AL3") #drawCMS(LUMI, "Preliminary") drawCMS(LUMI, "Work in Progress", suppressCMS=True) drawAnalysis(channel[1:3]) c4.Print("combine/plotsLimit/BestFit/" + YEAR + suffix + ".png") c4.Print("combine/plotsLimit/BestFit/" + YEAR + suffix + ".pdf") # c4.Print("plotsLimit/BestFit/"+YEAR+suffix+".root") # c4.Print("plotsLimit/BestFit/"+YEAR+suffix+".C") if not gROOT.IsBatch(): raw_input("Press Enter to continue...") if 'ah' in channel: outFile = TFile("bands.root", "RECREATE") outFile.cd() pVal.Write("graph") Best.Write("best") outFile.Close()
''' import ROOT from ROOT import gROOT, TFile, TH1F import sys import json inputFiles = sys.argv[1:] outputJson = {} for file in inputFiles: f = file # print file # f = glob.glob(file+'/*.root')[0] inputFile = TFile(f) tree = inputFile.Get('nTupleTree/tree') for event in tree: run = event.__getattr__('Event.Run') lumi = event.__getattr__('Event.LumiSection') if str(run) in outputJson.keys(): lumisSoFar = outputJson[str(run)] newLumi = True for l in lumisSoFar: if lumi == l[0]: newLumi = False if newLumi: outputJson[str(run)].append([lumi, lumi]) else: outputJson[str(run)] = [[lumi, lumi]] # print run,lumi
import h5py import numpy as np from ROOT import TFile f=TFile('ArCube_0000.root') argon=f.Get('argon') file=h5py.File('ArCube_0000.hdf5','w') g1=file.create_group('incoming_neutrino') g2=file.create_group('primary_interaction') g3=file.create_group('4D_charge_distribution') incoming_variables=['pida','xa','ya','za','ta','pxa','pya','pza','ekina','ma'] primary_interactions=['pidi', 'xi','yi','zi','pyi','ti','pzi','ekini','mi'] charge_dist_vars=['tidq','pidq','sidq','dq','xq','yq','zq'] for a1 in incoming_variables: array=np.zeros(1000) var=eval('argon.'+ a1) for b1 in range(1000): argon.GetEntry(b1) array[b1]=var g1.create_dataset(a1,data=array) ni_array=np.zeros(1000,dtype=int) for a2 in range(argon.GetEntries()): argon.GetEntry(a2) ni_array[a2]=int(argon.ni) g2.create_dataset('ni',data=ni_array)
class PrepareHistos(object): """ Class to define histogram preparation methods """ def __init__(self, useCache=False, useCacheToTreeFallback=False): """ Initialise the preparation object @param useCache Read out histograms from the cache file rather than trees @param useCacheToTreeFallBack If reading from histograms, fall back to trees in case they are not found """ from configManager import configMgr self.configMgr = configMgr self.var = "" self.cuts = "" self.weights = "" self.channel = None self.cutDict = {} self.cutList = [] self.histList = [] self.nameList = [] # histos or trees? self.useCache = useCache # for trees self.currentChainName = '' # for histos self.cacheFileName = '' self.cache2FileName = '' # fallback? self.useCacheToTreeFallback = useCacheToTreeFallback def __del__(self): if self.cacheFile != None: self.cacheFile.Close() if self.cache2File != None: self.cache2File.Close() def setUseCacheToTreeFallback(useCacheToTreeFallback): """ Set the use of fallback to trees to the argument @param useCacheToTreeFallback Boolean to determine fallback """ self.useCacheToTreeFallback = useCacheToTreeFallback def setHistoPaths(self, filepath, file2path=''): """ Set histogram paths @param filepath Name of the cache file @param file2path Optional path of extra file (used in conjunction with fallback) """ self.cacheFileName = filepath self.cache2FileName = file2path if os.path.isfile(file2path): self.cache2File = TFile(file2path, "READ") else: self.cache2File = None if os.path.isfile(filepath): if not os.path.isfile(file2path): # default, no archive file self.cacheFile = TFile(filepath, "READ") self.recreate = False else: self.cacheFile = TFile(filepath, "UPDATE") self.recreate = True else: self.cacheFile = TFile(filepath, "RECREATE") self.recreate = True def checkTree(self, treeName, fileList): """ Check existence of a tree in a list of files @param treeName Name of the tree @param fileList List of files @retval Returns true if the tree has been found """ if self.useCache and not self.useCacheToTreeFallback: log.debug("Not using cache or cache fallback: no trees") return False if len(fileList) == 0 or len(treeName) == 0: return False for f in fileList: file = TFile.Open(f) if file is None: continue tree = file.Get(treeName) if not tree or tree is None: file.Close() continue if tree is not None and tree.ClassName() != 'TTree': file.Close() continue file.Close() return True return False def read(self, treeName, fileList): """ Read in the root object that will make histograms and set the TChain objects in ConfigManager @param treeName Name of the tree to use @param fileList List of files to use """ if self.useCache and not self.useCacheToTreeFallback and treeName == '': log.info("Not using trees, will read histograms from %s" % (fileList)) return if not self.useCache and treeName == '': log.fatal("No tree name provided") return if self.useCache and self.useCacheToTreeFallback and treeName == '': log.warning( "No tree name provided, cache fallback to trees will not work") return if not self.currentChainName == '' and not ( self.currentChainName.find(treeName) > -1): del self.configMgr.chains[self.currentChainName] chainID = treeName for fileName in fileList: chainID += '_' + fileName self.currentChainName = chainID ## MB : no need to recreate chain if it already exists if not self.configMgr.chains.has_key(chainID): self.configMgr.chains[chainID] = TChain(treeName) for fileName in fileList: self.configMgr.chains[self.currentChainName].Add(fileName) return def addHisto(self, name, nBins=0, binLow=0., binHigh=0., nBinsY=0, binLowY=0., binHighY=0., useOverflow=False, useUnderflow=False, forceNoFallback=False): """ Make histogram and add it to the dictionary of prepared histograms @param name Name of the histogram @param nBins Number of X bins @param binLow Lower edge of left X bin @param binHigh Higher edge of rigth X bin @param nBinsY Number of Y bins @param binLowY Lower edge of left Y bin @param binHighY Higher edge of right Y bin @param useOverflow Use the overflow bins or not? @param useUnderflow Use the underflow bins or not ? @param forceNoFallBack If true, never use the fallback mechanism for this histogram @retval The constructed histogram """ if self.useCache: return self.__addHistoFromCache(name, nBins, binLow, binHigh, useOverflow, useUnderflow, forceNoFallback) return self.__addHistoFromTree(name, nBins, binLow, binHigh, nBinsY, binLowY, binHighY, useOverflow, useUnderflow) def __addHistoFromTree(self, name, nBins=0, binLow=0., binHigh=0., nBinsY=0, binLowY=0., binHighY=0., useOverflow=False, useUnderflow=False): """ Use the TTree::Draw method to create the histograms for var from cuts and weights defined in instance Recover from ROOT memory and add to dictionary of histograms @param name Name of the histogram @param nBins Number of X bins @param binLow Lower edge of left X bin @param binHigh Higher edge of rigth X bin @param nBinsY Number of Y bins @param binLowY Lower edge of left Y bin @param binHighY Higher edge of right Y bin @param useOverflow Use the overflow bins or not? @param useUnderflow Use the underflow bins or not ? @retval The constructed histogram """ if self.var == "cuts": if self.configMgr.hists[name] is None: self.configMgr.hists[name] = TH1F( name, name, len(self.channel.regions), self.channel.binLow, float(len(self.channel.regions)) + self.channel.binLow) for (iReg, reg) in enumerate(self.channel.regions): self.cuts = self.configMgr.cutsDict[reg] tempName = "%stemp%s" % (name, str(iReg)) tempHist = TH1F(tempName, tempName, 1, 0.5, 1.5) self.configMgr.chains[self.currentChainName].Project( tempName, self.cuts, self.weights) error = Double() integral = tempHist.IntegralAndError( 1, tempHist.GetNbinsX(), error) self.configMgr.hists[name].SetBinContent( iReg + 1, integral) self.configMgr.hists[name].SetBinError(iReg + 1, error) self.configMgr.hists[name].GetXaxis().SetBinLabel( iReg + 1, reg) tempHist.Delete() for iBin in xrange( 1, self.configMgr.hists[name].GetNbinsX() + 1): binVal = self.configMgr.hists[name].GetBinContent(iBin) binErr = self.configMgr.hists[name].GetBinError(iBin) if binVal < 0.0: self.configMgr.hists[name].SetBinContent(iBin, 0.0) else: if self.configMgr.hists[name] is None: if self.var.find(":") == -1: self.configMgr.hists[name] = TH1F(name, name, self.channel.nBins, self.channel.binLow, self.channel.binHigh) else: self.configMgr.hists[name] = TH2F( name, name, self.channel.nBins, self.channel.binLow, self.channel.binHigh, self.channelnBinsY, self.channel.binLowY, self.channel.binHighY) for (iReg, reg) in enumerate(self.channel.regions): tempName = "%stemp%s" % (name, str(iReg)) #self.cuts = self.configMgr.cutsDict[reg] if self.var.find(":") == -1: tempHist = TH1F(tempName, tempName, self.channel.nBins, self.channel.binLow, self.channel.binHigh) else: tempHist = TH2F(tempName, tempName, self.channel.nBins, self.channel.binLow, self.channel.binHigh, self.channelnBinsY, self.channel.binLowY, self.channel.binHighY) #print "!!!!!! PROJECTING",name+"temp"+str(iReg) #print "!!!!!! VAR",self.var #print "!!!!!! WEIGHTS",self.weights #print "!!!!!! CUTS",self.cuts nCuts = self.configMgr.chains[ self.currentChainName].Project( tempName, self.var, self.weights + " * (" + self.cuts + ")") self.configMgr.hists[name].Add(tempHist.Clone()) tempHist.Delete() for iBin in xrange( 1, self.configMgr.hists[name].GetNbinsX() + 1): binVal = self.configMgr.hists[name].GetBinContent(iBin) binErr = self.configMgr.hists[name].GetBinError(iBin) if binVal < 0.: self.configMgr.hists[name].SetBinContent(iBin, 0.) #if binErr==0: # self.configMgr.hists[name].SetBinError(iBin,1E-8) self.name = name #Over/Underflow bins if useOverflow or useUnderflow: self.updateOverflowBins(self.configMgr.hists[name], useOverflow, useUnderflow) return self.configMgr.hists[name] def __addHistoFromCacheWithoutFallback(self, name, nBins=None, binLow=None, binHigh=None, useOverflow=False, useUnderflow=False): """ simple helper to prevent specifying all the defaults """ return self.__addHistoFromCache(name, nBins, binLow, binHigh, useOverflow, useUnderflow, True, True) def __addHistoFromCache(self, name, nBins=None, binLow=None, binHigh=None, useOverflow=False, useUnderflow=False, forceNoFallback=False, forceReturn=False): """ Add this histogram to the dictionary of histograms. """ #Note: useOverflow and useUnderflow has no effect. It's there just for symmetry with TreePrepare above. if self.configMgr.hists[name] is None: try: self.configMgr.hists[name] = self.cache2File.Get(name) testsum = self.configMgr.hists[name].GetSum() except: # IOError: log.info( "Could not get histogram <%s> from backupCacheFile %s, trying cacheFile" % (name, self.cache2FileName)) try: self.configMgr.hists[name] = self.cacheFile.Get(name) testsum = self.configMgr.hists[name].GetSum() except: # IOError: if forceNoFallback or not self.useCacheToTreeFallback: self.configMgr.hists[name] = None if forceReturn: # used for QCD histograms log.info("Could not find histogram <" + name + "> in " + self.cacheFileName + " ! Force return.") return None log.debug( "__addHistoFromCache(): forceNoFallback=%s useCacheToTreeFallback=%s" % (forceNoFallback, self.useCacheToTreeFallback)) log.error("Could not find histogram <" + name + "> in " + self.cacheFileName + " ! ") raise #Exception("Could not find histogram <"+name+"> in "+self.cacheFileName) else: log.info("Could not find histogram <" + name + "> in " + self.cacheFileName + ", trying from tree ") self.configMgr.hists[name] = None return self.__addHistoFromTree(name, nBins, binLow, binHigh, nBins, binLow, binHigh, useOverflow, useUnderflow) if not (self.configMgr.hists[name] is None): if not (int(self.channel.nBins) == int(self.configMgr.hists[name].GetNbinsX())) or \ ( abs(self.channel.binLow - self.configMgr.hists[name].GetBinLowEdge(1))>0.00001 ) or \ ( abs(self.channel.binHigh - self.configMgr.hists[name].GetXaxis().GetBinUpEdge(self.configMgr.hists[name].GetNbinsX())) > 0.00001): if forceNoFallback or not self.useCacheToTreeFallback: self.configMgr.hists[name] = None if forceReturn: # used for QCD histograms log.info("Could not find histogram <" + name + "> in " + self.cacheFileName + " ! Force return.") return None log.debug( "__addHistoFromCache(): forceNoFallback=%s useCacheToTreeFallback=%s" % (forceNoFallback, self.useCacheToTreeFallback)) log.error("Could not find histogram <" + name + "> in " + self.cacheFileName + " ! ") raise #Exception("Could not find histogram <"+name+"> in "+self.cacheFileName) else: log.info("Histogram has different binning <" + name + "> in " + self.cacheFileName + ", trying from tree ") log.info( "addHistoFromCache: required binning %d,%f,%f, while histo has %d,%f,%f" % (self.channel.nBins, self.channel.binLow, self.channel.binHigh, self.configMgr.hists[name].GetNbinsX(), self.configMgr.hists[name].GetBinLowEdge(1), self.configMgr.hists[name].GetXaxis().GetBinUpEdge( self.configMgr.hists[name].GetNbinsX()))) self.configMgr.hists[name] = None return self.__addHistoFromTree(name, self.channel.nBins, self.channel.binLow, self.channel.binHigh, nBins, binLow, binHigh, useOverflow, useUnderflow) self.name = name return self.configMgr.hists[name] def addQCDHistos(self, sample, useOverflow=False, useUnderflow=False): """ Make the nominal QCD histogram and its errors @param sample The sample to use @param useOverflow Use the overflow bins or not @param useUnderflow Use the underflow bins or not """ if self.useCache: return self.__addQCDHistosFromCache(sample, useOverflow, useUnderflow) return self.__addQCDHistosFromTree(sample, useOverflow, useUnderflow) def __addQCDHistosFromTree(self, sample, useOverflow=False, useUnderflow=False): """ Make the nominal QCD histogram and its up and down fluctuations @param sample The sample to use @param useOverflow Use the overflow bins or not @param useUnderflow Use the underflow bins or not """ regString = "".join(self.channel.regions) prefixNom = "h%sNom_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) prefixHigh = "h%sHigh_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) prefixLow = "h%sLow_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) if self.channel.hasBQCD: self.weights = self.configMgr.weightsQCDWithB weightsQCD = self.configMgr.weightsQCDWithB else: self.weights = self.configMgr.weightsQCD weightsQCD = self.configMgr.weightsQCD self.__addHistoFromTree(prefixNom) self.__addHistoFromTree(prefixHigh) self.__addHistoFromTree(prefixLow) self.configMgr.hists[prefixNom].SetCanExtend(0) self.configMgr.hists[prefixHigh].SetCanExtend(0) self.configMgr.hists[prefixLow].SetCanExtend(0) systName = "%sSyst" % self.name statName = "%sStat" % self.name qcdHistoSyst = TH1F(systName, systName, self.channel.nBins, self.channel.binLow, self.channel.binHigh) qcdHistoStat = TH1F(statName, statName, self.channel.nBins, self.channel.binLow, self.channel.binHigh) if self.var == "cuts": for (iReg, reg) in enumerate(self.channel.regions): if self.configMgr.hists[prefixNom + "_" + str(iReg + 1)] is None: tempNameSyst = "%sSyst%s" % (self.name, str(iReg + 1)) qcdHistoSystTemp = TH1F(tempNameSyst, tempNameSyst, self.channel.nBins, self.channel.binLow, self.channel.binHigh) self.configMgr.chains[self.currentChainName].Project( tempNameSyst, self.configMgr.cutsDict[reg], self.weights + "Syst") qcdHistoSyst.SetBinContent( iReg + 1, qcdHistoSystTemp.GetBinContent(1)) tempNameStat = "%sStat%s" % (self.name, str(iReg + 1)) qcdHistoStatTemp = TH1F(tempNameStat, tempNameStat, self.channel.nBins, self.channel.binLow, self.channel.binHigh) self.configMgr.chains[self.currentChainName].Project( tempNameStat, self.configMgr.cutsDict[reg], self.weights + "Stat") qcdHistoStat.SetBinContent( iReg + 1, qcdHistoStatTemp.GetBinContent(1)) else: if self.weights == "1.0": sysWeightStat = "0.01" #rough average of Dan's results sysWeightSyst = "0.25" #rough average of Dan's results else: sysWeightStat = self.weights + "Stat" sysWeightSyst = self.weights + "Syst" if self.configMgr.hists[prefixNom + "_" + str(1)] is None: self.configMgr.chains[self.currentChainName].Project( systName, self.var, sysWeightSyst + " * (" + self.cuts + ")") self.configMgr.chains[self.currentChainName].Project( statName, self.var, sysWeightStat + " * (" + self.cuts + ")") ## correct nominal bins (not overflow) for iBin in xrange(1, self.configMgr.hists[prefixNom].GetNbinsX() + 1): # if self.configMgr.hists[prefixNom + "_" + str(iBin)] is None: if self.channel.variableName == "cuts": self.configMgr.hists[prefixNom + "_" + str(iBin)] = TH1F( prefixNom + "_" + str(iBin), prefixNom + "_" + str(iBin), len(self.channel.regions), self.channel.binLow, float(len(self.channel.regions)) + self.channel.binLow) else: self.configMgr.hists[prefixNom + "_" + str(iBin)] = TH1F( prefixNom + "_" + str(iBin), prefixNom + "_" + str(iBin), self.channel.nBins, self.channel.binLow, self.channel.binHigh) binVal = self.configMgr.hists[prefixNom].GetBinContent(iBin) #binError = sqrt(qcdHistoSyst.GetBinContent(iBin)**2+qcdHistoStat.GetBinContent(iBin)**2) #binStatError = qcdHistoStat.GetBinContent(iBin) if qcdHistoStat.GetBinContent( iBin ) < -1 * qcdHistoSyst.GetBinContent( iBin)**2: # Exception for folks using negative weights binError = sqrt(-qcdHistoSyst.GetBinContent(iBin)**2 - qcdHistoStat.GetBinContent(iBin)) else: binError = sqrt( qcdHistoSyst.GetBinContent(iBin)**2 + qcdHistoStat.GetBinContent(iBin)) if qcdHistoStat.GetBinContent( iBin ) < 0: # Check for negative weights (possible in QCD!) binStatError = sqrt(-qcdHistoStat.GetBinContent(iBin)) else: binStatError = sqrt(qcdHistoStat.GetBinContent(iBin)) binSystError = qcdHistoSyst.GetBinContent(iBin) ##self.configMgr.hists[prefixNom+"_"+str(iBin)].SetBinContent(iBin,self.configMgr.hists[prefixNom].GetBinContent(iBin)) # #print "GREPME %s bin %g content %.2g stat error %.2g syst error %.2g total error %.2g" % (prefixNom,iBin,self.configMgr.hists[prefixNom].GetBinContent(iBin),binStatError,binSystError,binError) if binVal > 0.: #self.configMgr.hists[prefixNom].SetBinContent(iBin,binVal) self.configMgr.hists[prefixNom + "_" + str(iBin)].SetBinContent( iBin, self.configMgr.hists[prefixNom]. GetBinContent(iBin)) else: self.configMgr.hists[prefixNom + "_" + str(iBin)].SetBinContent(iBin, 0.) self.configMgr.hists[prefixNom + "_" + str(iBin)].SetBinError( iBin, binError) self.configMgr.hists[prefixNom].SetBinContent(iBin, 0.) self.configMgr.hists[prefixNom].SetBinError(iBin, binError) # if self.configMgr.hists[prefixHigh + "_" + str(iBin)] is None: if self.channel.variableName == "cuts": self.configMgr.hists[prefixHigh + "_" + str(iBin)] = TH1F( prefixHigh + "_" + str(iBin), prefixHigh + "_" + str(iBin), len(self.channel.regions), self.channel.binLow, float(len(self.channel.regions)) + self.channel.binLow) else: self.configMgr.hists[prefixHigh + "_" + str(iBin)] = TH1F( prefixHigh + "_" + str(iBin), prefixHigh + "_" + str(iBin), self.channel.nBins, self.channel.binLow, self.channel.binHigh) if binVal + binError > 0.: # self.configMgr.hists[prefixNom].GetBinContent(iBin) > 0.: self.configMgr.hists[prefixHigh + "_" + str( iBin )].SetBinContent( iBin, binVal + binError ) #self.configMgr.hists[prefixNom].GetBinContent(iBin)+binError) self.configMgr.hists[prefixHigh].SetBinContent( iBin, binVal + binError ) #self.configMgr.hists[prefixNom].GetBinContent(iBin)+binError) else: self.configMgr.hists[prefixHigh + "_" + str(iBin)].SetBinContent(iBin, 0.) self.configMgr.hists[prefixHigh + "_" + str(iBin)].SetBinError( iBin, binError) self.configMgr.hists[prefixHigh].SetBinContent(iBin, 0.) self.configMgr.hists[prefixHigh].SetBinError( iBin, binError) # if self.configMgr.hists[prefixLow + "_" + str(iBin)] is None: if self.channel.variableName == "cuts": self.configMgr.hists[prefixLow + "_" + str(iBin)] = TH1F( prefixLow + "_" + str(iBin), prefixLow + "_" + str(iBin), len(self.channel.regions), self.channel.binLow, float(len(self.channel.regions)) + self.channel.binLow) else: self.configMgr.hists[prefixLow + "_" + str(iBin)] = TH1F( prefixLow + "_" + str(iBin), prefixLow + "_" + str(iBin), self.channel.nBins, self.channel.binLow, self.channel.binHigh) if ( binVal - binError ) > 0.: # ( self.configMgr.hists[prefixNom].GetBinContent(iBin) - binError ) > 0.: self.configMgr.hists[prefixLow + "_" + str( iBin )].SetBinContent( iBin, binVal - binError ) # self.configMgr.hists[prefixNom].GetBinContent(iBin)-binError) self.configMgr.hists[prefixLow].SetBinContent( iBin, binVal - binError ) # self.configMgr.hists[prefixNom].GetBinContent(iBin)-binError) else: self.configMgr.hists[prefixLow + "_" + str(iBin)].SetBinContent(iBin, 0.) self.configMgr.hists[prefixLow + "_" + str(iBin)].SetBinError( iBin, binError) self.configMgr.hists[prefixLow].SetBinContent(iBin, 0.) self.configMgr.hists[prefixLow].SetBinError(iBin, binError) ## MB : also correct the overflow bin! for iBin in xrange(self.configMgr.hists[prefixNom].GetNbinsX() + 1, self.configMgr.hists[prefixNom].GetNbinsX() + 2): # binVal = self.configMgr.hists[prefixNom].GetBinContent(iBin) binError = sqrt( qcdHistoSyst.GetBinContent(iBin)**2 + qcdHistoStat.GetBinContent(iBin)) binStatError = sqrt(qcdHistoStat.GetBinContent(iBin)) ##binError = sqrt(qcdHistoSyst.GetBinContent(iBin)**2+qcdHistoStat.GetBinContent(iBin)**2) ##binStatError = qcdHistoStat.GetBinContent(iBin) binSystError = qcdHistoSyst.GetBinContent(iBin) #print "GREPME %s bin %g content %.2g stat error %.2g syst error %.2g total error %.2g" % (prefixNom,iBin,self.configMgr.hists[prefixNom].GetBinContent(iBin),binStatError,binSystError,binError) if binVal > 0.: # self.configMgr.hists[prefixNom].GetBinContent(iBin) > 0.: pass else: self.configMgr.hists[prefixNom].SetBinContent(iBin, 0.) # if binVal + binError > 0.: # self.configMgr.hists[prefixNom].GetBinContent(iBin) > 0.: self.configMgr.hists[prefixHigh].SetBinContent( iBin, binVal + binError ) #self.configMgr.hists[prefixNom].GetBinContent(iBin)+binError) else: self.configMgr.hists[prefixHigh].SetBinContent(iBin, 0.) self.configMgr.hists[prefixHigh].SetBinError( iBin, binStatError) # if ( binVal - binError ) > 0.: # ( self.configMgr.hists[prefixNom].GetBinContent(iBin) - binError ) > 0.: self.configMgr.hists[prefixLow].SetBinContent( iBin, binVal - binError ) # self.configMgr.hists[prefixNom].GetBinContent(iBin)-binError) else: self.configMgr.hists[prefixLow].SetBinContent(iBin, 0.) self.configMgr.hists[prefixLow].SetBinError(iBin, binStatError) #Over/Underflow bins if useOverflow or useUnderflow: self.updateOverflowBins(self.configMgr.hists[prefixNom], useOverflow, useUnderflow) self.updateOverflowBins(self.configMgr.hists[prefixLow], useOverflow, useUnderflow) self.updateOverflowBins(self.configMgr.hists[prefixHigh], useOverflow, useUnderflow) return def __addQCDHistosFromCache(self, sample, useOverflow=False, useUnderflow=False): #Note: useOverflow and useUnderflow has no effect. It's there just for symmetry with TreePrepare above. """ Read the nominal, high and low QCD histograms. Fallback only in case nominals not present. @param sample The sample to use @param useOverflow Use the overflow bins or not. Note: has no effect, only present for symmetry with TreePrepare @param useUnderflow Use the underflow bins or not. Note: has no effect, only present for symmetry with TreePrepare """ regString = "".join(self.channel.regions) prefixNom = "h%sNom_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) prefixHigh = "h%sHigh_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) prefixLow = "h%sLow_%s_obs_%s" % ( sample.name, regString, self.channel.variableName.replace("/", "")) # NOTE: these histograms should NOT fallback to trees, but we fallback this entire function! self.__addHistoFromCacheWithoutFallback(prefixNom) self.__addHistoFromCacheWithoutFallback(prefixHigh) self.__addHistoFromCacheWithoutFallback(prefixLow) # if _any_ of them don't exist, just return the tree function if self.configMgr.hists[prefixNom] == None or self.configMgr.hists[ prefixHigh] == None or self.configMgr.hists[prefixLow] == None: return self.__addQCDHistosFromTree(sample, useUnderflow, useOverflow) if self.channel.variableName == "cuts": nHists = len(self.channel.regions) else: nHists = self.channel.nBins for iBin in xrange(1, nHists + 1): self.__addHistoFromCacheWithoutFallback(prefixNom + "_" + str(iBin)) self.__addHistoFromCacheWithoutFallback(prefixHigh + "_" + str(iBin)) self.__addHistoFromCacheWithoutFallback(prefixLow + "_" + str(iBin)) return self.configMgr.hists[prefixNom], self.configMgr.hists[ prefixLow], self.configMgr.hists[prefixHigh] def updateHistBin(self, h, binIn, binOver): """ Update a histogram bin with the overflow information @param h The histogram @param binIn The bin to add the content to @param binOver The overflow bin touse """ newVal = h.GetBinContent(binIn) + h.GetBinContent(binOver) h.SetBinContent(binIn, newVal) h.SetBinContent(binOver, 0.0) e1 = h.GetBinError(binIn) e2 = h.GetBinError(binOver) newErr = sqrt(e1 * e1 + e2 * e2) h.SetBinError(binIn, newErr) h.SetBinError(binOver, 0.0) return def updateOverflowBins(self, h, useOverflow, useUnderflow): """ Update all underflow and overflow bins for the histogram depending on the parameters. Calls updateHistBin(). @param h The histogram @param useOverflow Use the overflow bin? @param useUnderflow Use the underflow bin? """ if useOverflow: binIn = h.GetNbinsX() binOver = binIn + 1 self.updateHistBin(h, binIn, binOver) if useUnderflow: binIn = 1 binOver = 0 self.updateHistBin(h, binIn, binOver) return
#!/usr/bin/env python import sys import os import re #from uncertainty_unfold import * import ROOT from ROOT import gROOT, THStack, TH1D, TList, TFile, TH2D from math import fabs, sqrt sideband1 = sys.argv[1] sideband2 = sys.argv[2] sideband_total = sys.argv[3] outputdir = sys.argv[4] os.system("mkdir -p " + outputdir) f_in_1 = TFile.Open(sideband1) f_in_2 = TFile.Open(sideband2) f_in_3 = TFile.Open(sideband_total) hist_sideband1_barrel = f_in_1.Get("barrel_fraction") hist_sideband1_endcap = f_in_1.Get("endcap_fraction") hist_sideband2_barrel = f_in_2.Get("barrel_fraction") hist_sideband2_endcap = f_in_2.Get("endcap_fraction") hist_sideband3_barrel = f_in_3.Get("barrel_fraction") hist_sideband3_endcap = f_in_3.Get("endcap_fraction") uncer_barrel = [] uncer_endcap = []
class SimpleTreeProducer(Analyzer): def beginLoop(self, setup): super(SimpleTreeProducer, self).beginLoop(setup) self.rootfile = TFile('/'.join([self.dirName, 'simple_tree.root']), 'recreate') self.tree = Tree(self.cfg_ana.tree_name, self.cfg_ana.tree_title) # List of dict # Note : AttributeError: 'Event' object has no attribute ( pfjetsFlavor04, pfbTags04, weights ) # from : getattr(event, self.cfg_ana.weights) self.raw_vars_to_save = list() self.raw_vars_to_save.append({'container_name': 'electrons', 'save_name': 'electrons_', 'max_number': 6}) self.raw_vars_to_save.append({'container_name': 'muons', 'save_name': 'muons_', 'max_number': 6}) self.raw_vars_to_save.append({'container_name': 'photons', 'save_name': 'photons_', 'max_number': 6}) self.raw_vars_to_save.append({'container_name': 'pfjets02', 'save_name': 'pfjets02_', 'max_number': 6}) self.raw_vars_to_save.append({'container_name': 'pfjets04', 'save_name': 'pfjets04_', 'max_number': 6}) self.raw_vars_to_save.append({'container_name': 'pfjets08', 'save_name': 'pfjets08_', 'max_number': 6}) # self.raw_vars_to_save.append({'container_name': 'pfbTags04', # 'save_name': 'pfbTags04_', # 'max_number': 6}) #self.tree.var('weights', float) bookMet(self.tree, 'met') for container_i in self.raw_vars_to_save: max_number = container_i['max_number'] save_name = container_i['save_name'] for index in range(max_number): bookParticle(self.tree, '{}{}'.format(save_name, index)) # e.g: # bookParticle(self.tree, 'electron_0') # bookParticle(self.tree, 'electron_1') # bookParticle(self.tree, 'electron_2') # bookParticle(self.tree, 'electron_3') # bookParticle(self.tree, 'electron_4') # bookParticle(self.tree, 'electron_5') def fill_particles_by_index(self, event, max_number=None, container_name=None, save_name=None): event_particles = getattr(event, eval('self.cfg_ana.{}'.format(container_name))) for index, particle in enumerate(event_particles): if index == max_number: break fillParticle(self.tree, '{}{}'.format(save_name, index), particle) def process(self, event): #weights = getattr(event, self.cfg_ana.weights) #self.tree.fill('weights', weights) met = getattr(event, self.cfg_ana.met) fillMet(self.tree, 'met', met) for container_i in self.raw_vars_to_save: max_number = container_i['max_number'] container_name = container_i['container_name'] save_name = container_i['save_name'] self.fill_particles_by_index(event, max_number=max_number, container_name=container_name, save_name=save_name) # e.g. # electrons = getattr(event, self.cfg_ana.electrons) # fillParticle(self.tree, electron_0, electrons[0]) # .... self.tree.tree.Fill() def write(self, setup): self.rootfile.Write() self.rootfile.Close()
# Specifically, at any given point of a ROOT application, the ROOT.gDirectory # object tells which is the current directory where objects will be attached to. # The next line will print 'PyROOT' as the name of the current directory. # That is the global directory created when using ROOT from Python, which is # the ROOT.gROOT object. print("Current directory: '{}'.\n".format(ROOT.gDirectory.GetName())) # We can check to which directory a newly created histogram is attached. histo_1 = ROOT.TH1F("histo_1", "histo_1", 10, 0, 10) print("Histogram '{}' is attached to: '{}'.\n".format(histo_1.GetName(), histo_1.GetDirectory().GetName())) # For quick saving and forgetting of objects into ROOT files, it is possible to # open a TFile as a Python context manager. In the context, objects can be # created, modified and finally written to the file. At the end of the context, # the file will be automatically closed. with TFile.Open("pyroot005_file_1.root", "recreate") as f: histo_2 = ROOT.TH1F("histo_2", "histo_2", 10, 0, 10) # Inside the context, the current directory is the open file print("Current directory: '{}'.\n".format(ROOT.gDirectory.GetName())) # And the created histogram is automatically attached to the file print("Histogram '{}' is attached to: '{}'.\n".format(histo_2.GetName(), histo_2.GetDirectory().GetName())) # Before exiting the context, objects can be written to the file f.WriteObject(histo_2, "my_histogram") # When the TFile.Close method is called, the current directory is automatically # set again to ROOT.gROOT. Objects that were attached to the file inside the # context are automatically deleted and made 'None' when the file is closed. print("Status after the first TFile context manager:") print(" Current directory: '{}'.".format(ROOT.gDirectory.GetName())) print(" Accessing 'histo_2' gives: '{}'.\n".format(histo_2))
import ROOT from ROOT import TFile, TTree, gStyle, TF1, gROOT import plotting_utils as plotting reload(plotting) import pandas as pd import root_pandas as rpd gROOT.SetBatch(True) gROOT.ProcessLineSync(".x /afs/cern.ch/work/n/nchernya/Hbb/setTDRStyle.C") gROOT.ForceStyle() gStyle.SetPadTopMargin(0.06) gStyle.SetPadRightMargin(0.04) gStyle.SetPadLeftMargin(0.15) name = "/mnt/t3nfs01/data01/shome/nchernya/HHbbgg_ETH_devel/root_files/heppy_05_10_2017/ttbar_RegressionPerJet_heppy_energyRings3_forTraining_LargeAll.root" f = TFile.Open(name, "read") t = f.Get("tree") hist = ROOT.TH1F("hist", "hist", 100, 0, 1500) hist_ratio = ROOT.TH1F("hist_ratio", "hist_ratio", 100, 0, 1500) c = ROOT.TCanvas("c", "c", 600, 600) c.SetLogy() c.SetBottomMargin(0.3) cuts = '(Jet_pt > 20) & (Jet_mcFlavour==5 | Jet_mcFlavour==-5) & (Jet_eta<2.5 & Jet_eta>-2.5) & (Jet_mcPt>0) & (Jet_mcPt<6000)' t.Draw("Jet_pt>>hist", cuts) xmin = 0 xmax = 1500 frame = ROOT.TH1F("hframe", "hframe", 1000, xmin, xmax) frame.SetStats(0) frame.GetXaxis().SetLabelSize(0) frame.GetXaxis().SetTitleOffset(0.91)
def plotInputComparison(files, runs, processes, path): #clear memory gROOT.Reset() #make canvas to save plots to c1 = TCanvas('c1') i = 0 Tfiles = [] while i < len(files): print "Adding file: %s to list of files to run with Run Number: %s and Process Name: %s" % ( files[i], runs[i], processes[i]) Tfiles.append(TFile(files[i])) i += 1 j = 0 Thists = [] while j < len(Tfiles): dirname = "DQMData/Run %s/HLT/Run summary/TimerService/Running 1 processes/process %s/Paths/%s_module_total" % ( runs[j], processes[j], path) print dirname hist = Tfiles[j].Get(dirname) Thists.append(hist) j += 1 k = 0 leg = TLegend(0.4, 0.6, 0.9, 0.9, "") leg.SetFillStyle(0) leg.SetBorderSize(0) while k < len(Thists): if k == 0: Thists[k].Scale(1.0 / Thists[k].Integral()) Thists[k].GetYaxis().SetRangeUser(0.000008, 0.2) print "lower bound of bin 100: %i" % Thists[k].GetBinCenter(100) print "percentage of events running particle flow: %i " % Thists[ k].Integral(100, 500) Thists[k].SetLineWidth(2) Thists[k].SetLineColor(k + 1) if args.ext: Thists[k].GetXaxis().SetRangeUser(0, 2000) else: Thists[k].GetXaxis().SetRangeUser(0, 400) Thists[k].Draw() else: Thists[k].Scale(1.0 / Thists[k].Integral()) Thists[k].SetLineWidth(2) Thists[k].SetLineColor(k + 1) Thists[k].Draw("same") #write name in full name = "Mean: %f" % Thists[k].GetMean() if args.ext: Thists[k].GetXaxis().SetRangeUser(0, 2000) else: Thists[k].GetXaxis().SetRangeUser(0, 400) leg.AddEntry(Thists[k], name, "l") k += 1 leg.Draw("same") filename = 'ModuleTotal_%s.pdf' % path c1.Print(filename)
from ROOT import TFile, TH1F, TLorentzVector, TCanvas f = TFile('tag_1_delphes_events.root', 'read') tree = f.Delphes muon1 = TLorentzVector() muon2 = TLorentzVector() muon3 = TLorentzVector() muon4 = TLorentzVector() zBoson1 = TLorentzVector() zBoson2 = TLorentzVector() vectList = list() def getFourVector(zBoson): return [zBoson.Pt(), zBoson.Eta(), zBoson.Phi(), zBoson.M()] def reconstruct(muon1, muon2, muon3, muon4): zBoson1 = muon1 + muon2 zBoson2 = muon3 + muon4 print('zBoson1: ' + str(getFourVector(zBoson1))) print('zBoson2: ' + str(getFourVector(zBoson2))) print('\n')
from root_numpy import root2array, tree2array, array2root from root_numpy import testdata from keras import callbacks from keras.models import Sequential from keras.layers import Input from keras.layers.core import Dense, Activation, Dropout from keras.layers import BatchNormalization from keras.regularizers import l1, l2 from keras import initializers from keras import layers from keras.optimizers import SGD, Adam from keras.constraints import maxnorm #data = TFile.Open('input_TTZ_Delphes_small_new.root') data = TFile.Open('input_TTZ_DelphesEvalGen_5275k.root') tree = data.Get('Tree') #ADAM = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False) #################pre!!!!!! #MEM = True #######if you want MEM or KIN. MEM = False if (MEM == True): KIN = False else: KIN = True MEM = False ############################################!!!!!!!######### upper_limit = 280000
# if var in mytree.variables:print var, mytree.variables[var][0] # else:print var, MVA_Only[var] # print '#'*80 # print ' BDT_CWoLa' # print '#'*80 # for var in useQCD: # if var in mytree.variables:print var, mytree.variables[var][0] # else:print var, MVA_Only[var] # print '#'*80 # print "End of nominal" if pass_nominal > 0 or pass_sys > 0: mytree.variables['pass_sys'][0] = pass_sys mytree.variables['pass_nom'][0] = pass_nominal tkin.Fill() mytree.ZeroArray() # if pass_nominal > 0 or pass_sys > 0: # print 'should all be reset' # mytree.Print() if e > emax: break watch.Print() if e != emin: foutname = 'Skim_' foutname += sample + '_' foutname += str(emax) + '.root' fout = TFile(foutname, 'recreate') tkin.Write() fout.Write() fout.Close()
default=None, help='Name of the combined WS input root file.') parser.add_argument( '-o', '--overallSysFileName', default="overallsystematics13TevZtt.txt", help='Name of the overall sys file for global yield variations') args = parser.parse_args() # have to import root after the parser from ROOT import TFile, TH1D, TCanvas, Double, kBlack, kRed, kGreen, kBlue from wsPostProcess import * from math import sqrt # open the WS inpt file f = TFile.Open(args.wsInputFile) print "Loading WS input folder tree..." tree = readFolderTree(f) print "...done" if args.secondaryWsInputFile: print "Loading WS secondary input folder tree..." f2 = TFile.Open(args.secondaryWsInputFile) tree2 = readFolderTree(f2) tree = dict(tree.items() + tree2.items()) print "...done" # names of the systematic variation histograms to be processed (without _high/low suffix) and the corresponding samples variations = [
labels = ["BPix L1: ", "BPix L2: ", "BPix L3: ", "FPix tot: "] histonames = [ path + "pix_bar Occ_roc_ontracksiPixelDigis_layer_1", path + "pix_bar Occ_roc_ontracksiPixelDigis_layer_2", path + "pix_bar Occ_roc_ontracksiPixelDigis_layer_3", path + "ROC_endcap_occupancy" ] TotROCs = [ 2560 - 256, 4096 - 256, 5632 - 256, 4320 ] #total number of ROCs in the Pixel detector layers and the FPix, the factor 256 for BPix Layer derive by half modules, left there as a reminder DeadROCs = [0, 0, 0, 0] fin = TFile(fname) #print type(fname) outname = "PixZeroOccROCs_run" + runNumber + ".txt" out_file = open(outname, "w") out_file.write("Pixel Zero Occupancy ROCs \n\n") bpixtot = 0 for k in range(0, 4): GetNonZeroOccNumber(histonames[k]) if k == 3: nrocs = nrocs / 2 #in FPix the histo is filled twice to have it symmetric DeadROCs[k] = TotROCs[k] - nrocs if k < 3: bpixtot += DeadROCs[k]
compute_propagation_length(cos_z_lower_bin_edges[-1])) cos_z_lower_bin_edges = sorted(cos_z_lower_bin_edges) prop_length_bin_edges = sorted(prop_length_bin_edges) # print(energy_lower_bin_edges) # print(cos_z_lower_bin_edges) cos_z_lower_bin_edges_array = array('d', cos_z_lower_bin_edges) prop_length_bin_edges_array = array('d', prop_length_bin_edges) energy_lower_bin_edges_array = array('d', energy_lower_bin_edges) print(prop_length_bin_edges_array) print(toolbox.info + "Output file will be writen at", output_file_path) output_tfile = TFile.Open(output_file_path, "RECREATE") for histogram_name in histogram_names_list: histograms_dict[histogram_name] = TH2D(histogram_name, histogram_name, len(energy_lower_bin_edges) - 1, energy_lower_bin_edges_array, len(cos_z_lower_bin_edges) - 1, cos_z_lower_bin_edges_array) histograms_dict[histogram_name].GetXaxis().SetTitle("E_{#nu} (MeV)") histograms_dict[histogram_name].GetYaxis().SetTitle("cos(#theta_{z})") histograms_dict[histogram_name].GetZaxis().SetTitle( "Neutrino Flux (m^{2}.s.sr.GeV)^{-1}") histograms_L_dict[histogram_name] = TH2D( histogram_name + "_L", histogram_name + "_L", len(energy_lower_bin_edges) - 1, energy_lower_bin_edges_array, len(prop_length_bin_edges) - 1, prop_length_bin_edges_array) histograms_L_dict[histogram_name].GetXaxis().SetTitle("E_{#nu} (MeV)")
def __init__(self, yearData=2018, yearMC=2017, sigma='central'): """Load data and MC pilup profiles.""" assert (yearMC in [2016, 2017, 2018]), "You must choose a year from: 2016, 2017, or 2018." assert (yearData in [2016, 2017, 2018]), "You must choose a year from: 2016, 2017, or 2018." assert ( sigma in ['central', 'up', 'down'] ), "You must choose a s.d. variation from: 'central', 'up', or 'down'." minbias = '69p2' if sigma == 'down': minbias = '66p0168' # -4.6% elif sigma == 'up': minbias = '72p3832' # +4.6% if yearData == 2016: self.datafile = TFile( path + 'Data_PileUp_2016_%s.root' % (minbias), 'READ') elif yearData == 2017: self.datafile = TFile( path + 'Data_PileUp_2017_%s.root' % (minbias), 'READ') elif yearData == 2018: self.datafile = TFile( path + 'Data_PileUp_2018_%s.root' % (minbias), 'READ') if yearMC == 2016: self.mcfile = TFile(path + 'MC_PileUp_2016_Moriond17.root', 'READ') elif yearMC == 2017: self.mcfile = TFile(path + 'MC_PileUp_2017_Winter17_V2.root', 'READ') elif yearMC == 2018: self.mcfile = TFile(path + 'MC_PileUp_2018_Autumn18.root', 'READ') self.datahist = self.datafile.Get('pileup') self.mchist = self.mcfile.Get('pileup') self.datahist.SetDirectory(0) self.mchist.SetDirectory(0) self.datahist.Scale(1. / self.datahist.Integral()) self.mchist.Scale(1. / self.mchist.Integral()) self.datafile.Close() self.mcfile.Close()
f_out_name = 'back_ntuple_abr.root' else: print 'Must provide argument' exit() # Get all valid files and add them to TChain to be read together f_list = glob(f_loc) print 'Files found: ', len(f_list) t = ROOT.TChain("tobTree") for f_name in f_list: t.AddFile(f_name) entries = t.GetEntries() print 'Total entries: ', entries # Create TFile f_out = TFile(f_out_name, 'recreate') # Create description TString and store in TFile t_string = ROOT.TString(""" Production script: {} Production script version: 1 Events source: {} Source production script: https://gitlab.cern.ch/will/L1CaloUpgrade """.format(sys.argv[0], f_loc)) f_out.WriteObject(t_string, 'File Details') # Create output TTree t_out = TTree('mytree', 'Full event file') # Initialize variables to be written to tree l0_cells = np.array([0] * 9, dtype=np.float32)
# Settings: hits = 300000 tries = 20 peaks = "Test" binning = 200 min_bincontent = 50 pathExtension = "" PerfResults = "MC/Performance_Results/" # foldername = "2015-05-04 16-24-09.965268/" foldername = "_" + str(min_bincontent) + "_" + str(binning) + "_" + str( hits) + "_" + pathExtension + "/" filename = "MCPerformanceLog.root" filepath = PerfResults + foldername + filename file = TFile(filepath) LogTree = file.Get("LogTree") success_graph = TGraphErrors() ghost_graph = TGraph() minimas_graph = TGraph() RecSA_MinMax_graph = TGraphErrors() RecSA_Quantiles_graph = TGraphErrors() success = [] real_amplitude = [] All_RecSA_MinMax = [] All_RecSA_Quantiles = [] tmp_success = np.zeros(tries) tmp_RecSA_Q = np.zeros(tries)