def createNtupleInputMaker(filename, treename="", inputprefix="GUESS", Nvar="", momentumVars=(), inputType=None, masslessMode=False, inputsuffix=""): """ Create & configure a NtupleInputMaker class The function will try to guess eveything from the given filename TODO: Figure out input name in more cases. """ from ROOT import TFile, TTree syncMessageLevel() f = TFile(filename) if not f.IsOpen(): m_log.error('Could not open ROOT file with name ', filename) return # --------------------------------- # get tree tree = None if treename == "": # use the first TTree found keys = [k.GetName() for k in f.GetListOfKeys()] for k in keys: t = f.Get(k) if isinstance(t, TTree): tree = t treename = k break else: tree = f.Get(treename) if not bool(tree): m_log.error("Couldn't find tree in ", filename) return # --------------------------------- branches = [b.GetName() for b in tree.GetListOfBranches()] # --------------------------------- # Guess input prefix if not set if inputprefix == "GUESS": # branches, lower case for bn in branches: # we'll look for vars containing 'input' if 'input' in bn.lower() and '_' in bn: inputprefix = bn[:bn.find('_')] # get the part before '_' break # we'll look for vars without prefix' if bn.lower() == 'eta' or bn.lower() == 'px': inputprefix = '' break if inputprefix == "GUESS": m_log.error("Couldn't guess proper prefix for input variables") return else: m_log.info("Found prefix input = " + inputprefix) # --------------------------------- if inputprefix == '': # retrieve all variables starting with inputprefix branches = [b for b in branches if '_' not in b] # retrieve all vars from the branch name above : the XX part in bla_XX vars = dict([(b.lower(), b) for b in branches]) else: if not inputprefix.endswith('_'): inputprefix += '_' # retrieve all variables starting with inputprefix branches = [b for b in branches if b.startswith(inputprefix)] # retrieve all vars from the branch name above : the XX part in bla_XX vars = dict([(b[b.find('_') + 1:].lower(), b[b.find('_') + 1:]) for b in branches]) # --------------------------------- # Guess kinematic variables if momentumVars == (): # try px,py,pz,e vars_set = set(vars.keys()) if vars_set.issuperset(set(['px', 'py', 'pz', 'e'])): momentumVars = tuple([vars[k] for k in ('px', 'py', 'pz', 'e')]) elif vars_set.issuperset(set(['eta', 'phi', 'pt', 'e'])): momentumVars = tuple([vars[k] for k in ('eta', 'phi', 'pt', 'e')]) elif vars_set.issuperset(set(['eta', 'phi', 'p_t', 'e'])): momentumVars = tuple([vars[k] for k in ('eta', 'phi', 'p_t', 'e')]) elif vars_set.issuperset(set(['eta', 'phi', 'pt', 'm'])): momentumVars = tuple([vars[k] for k in ('eta', 'phi', 'pt', 'm')]) elif vars_set.issuperset(set(['eta', 'phi', 'p_t'])): momentumVars = tuple([vars[k] for k in ('eta', 'phi', 'p_t')]) elif vars_set.issuperset(set(['eta', 'phi', 'pt'])): momentumVars = tuple([vars[k] for k in ('eta', 'phi', 'pt')]) if momentumVars == (): m_log.error("Couldn't guess kinematic input variables") return else: m_log.info("Found kinematic input = " + str(momentumVars)) # --------------------------------- # Guess input type if inputType == None: vtype = _branchType(tree.GetBranch(inputprefix + momentumVars[0])) momkey = (momentumVars[0] + momentumVars[-1]).lower() inputType = { 'pxevector_double': SJ.NtupleInputMaker.PxPyPzE_vector_double, 'pxevector_float': SJ.NtupleInputMaker.PxPyPzE_vector_float, 'pxearray_double': SJ.NtupleInputMaker.PxPyPzE_array_double, 'pxearray_float': SJ.NtupleInputMaker.PxPyPzE_array_float, 'etaevector_double': SJ.NtupleInputMaker.EtaPhiPtE_vector_double, 'etaevector_float': SJ.NtupleInputMaker.EtaPhiPtE_vector_float, 'etaearray_double': SJ.NtupleInputMaker.EtaPhiPtE_array_double, 'etaearray_float': SJ.NtupleInputMaker.EtaPhiPtE_array_float, 'etamvector_double': SJ.NtupleInputMaker.EtaPhiPtM_vector_double, 'etamvector_float': SJ.NtupleInputMaker.EtaPhiPtM_vector_float, 'etamarray_double': SJ.NtupleInputMaker.EtaPhiPtM_array_double, 'etamarray_float': SJ.NtupleInputMaker.EtaPhiPtM_array_float, 'etaptvector_double': SJ.NtupleInputMaker.EtaPhiPt_vector_double, 'etaptvector_float': SJ.NtupleInputMaker.EtaPhiPt_vector_float, 'etaptarray_double': SJ.NtupleInputMaker.EtaPhiPt_array_double, 'etaptarray_float': SJ.NtupleInputMaker.EtaPhiPt_array_float }[momkey + vtype] m_log.info("Input variables type = " + vtype) # --------------------------------- # Guess variable N # only needed if array input if Nvar == "": for nName in ['n', 'num', 'nparticle']: if nName in vars: Nvar = vars[nName] if Nvar == "": if 'array' in vtype: m_log.error( "Couldn't guess proper input_n variable, please define manually." ) return else: m_log.info( "Couldn't guess proper input_n variable, input_n will be read from vector size." ) else: m_log.info("Found prefix input_n = " + inputprefix + Nvar) # --------------------------------- # guess if PDG Ids are stored pdgName = None for b in branches: if 'pdg' in b.lower(): pdgName = b[b.find('_') + 1:] break input = SJ.NtupleInputMaker(inputType) input.set_prefix(inputprefix) input.set_n_name(Nvar) input.set_variables(*momentumVars) input.setFileTree(filename, treename) input.set_name("InputJet") input.set_masslessMode(masslessMode) # if (pdgName is not None): input.read_pdgId(True) input.set_pdgId_name(pdgName) return input
def readTree(): # open file file = TFile(fileName, "READ") if not file.IsOpen(): print "File", fileName, "does not exist. WILL ABORT!!!" assert (False) # open tree tree = file.Get(treeName) if tree == None: print "tree", treeName, "doesn't exist in file", fileName, ". WILL ABORT!!!" assert (False) # determine how many entries to run on nrEntries = tree.GetEntries() if desiredNrEntries < 0 or desiredNrEntries > nrEntries: actualNrEntries = nrEntries else: actualNrEntries = desiredNrEntries if debug: print "We will run over", actualNrEntries, "entries." # we create a file to store histograms outputfile = TFile(outputfileName, "RECREATE") dict_scale_hist = get_dict_scale_hist(list_scale, debug) # run over the entries of the tree # unlike in C++, no need to define the branches in advance for i, entry in enumerate(tree): if i >= actualNrEntries: continue if debug or i % 1000 == 0: print "******* new entry", i, " **********" # we are looping over jets, each has the information of Pt, Eta, Phi, E # they are available at different calibration stages: # Nominal, OneMu, PtRecoBukin, PtRecoGauss, Regression # we also know what is the correct simulated information: Parton # we want to compare the different calibrations and see which one # models better the Higgs boson mass # all Pt, E, M are in GeV # Higgs -> bb, meaning the b1 and b2 in this tree # let's see how we get a variable from the tree for scale in list_scale: M = get_M(entry, scale, debug) dict_scale_hist[scale].Fill(M) #dict_scale_hist[scale].Fit("gaus") # done loop over all the entries in the tree #Gaussian = TF1 ("Gauss",Gauss(),48.5,168.5,3) Bukins = TF1("Bukin", Bukin(), 48.5, 168.5, 6) Bukins.SetLineColor(3) #Stack = THStack ("Stack", "Stacked Histograms") #Leg = TLegend (x1,y1,x2,y2) for scale in list_scale: #Gaussian.SetParameters(80,dict_scale_hist[scale].GetMean(),dict_scale_hist[scale].GetRMS()) #dict_scale_hist[scale].Fit(Gaussian,"+") Bukins.SetParameters(80, dict_scale_hist[scale].GetMean(), dict_scale_hist[scale].GetRMS(), 0, 0, 0) dict_scale_hist[scale].Fit(Bukins, "+") #Stack.Add(dict_scale_hist[scale]) #Stack.Draw("option") #Leg.Draw() outputfile.Write() outputfile.Close()
def main(): gROOT.SetBatch(1) sampleNames = [ #'Tag1_Top1', #'Tag1_Top2', 'Tag1_Top1_lhood', 'Tag1_Top2_lhood', #'Tag1_TopLepHad_lhood', 'Tag1_SystemPt', 'Tag1_SystemMass', 'Tag1_SystemRapidity', ] channels = ['el', 'mu'] generators = ['Alpgen', 'McAtNlo'] toys = 'toy5000' systematic = 'nominal' method_forFile = 'svd' method_forHist = 'SVD' regValue = 'reg4' baseRecoFileName = 'RecoClosure' baseFilePath = '../data/March_4/Alpgen/MCClosure' can = TCanvas("can", "can", 0, 0, 800, 600) can.SetMargin(0.2, 0.05, 0.15, 0.03) # loop over the sample names since each one needs its own plots for sampleName in sampleNames: print 'loop', sampleName x_title = '' y_title = "ratio of unfolded over truth #frac{dN}{d" y_max = 1.2 y_min = 0.8 latex_label = '' if sampleName is 'Tag1_SystemMass': x_title = 'M_{t#bar{t}} [GeV]' y_title += 'M_{t#bar{t}}}' latex_label = 't\\bar{t} Mass: ' elif sampleName is 'Tag1_SystemPt': x_title = 'p_{T}^{t#bar{t}} [GeV]' y_title += 'p_{T}^{t#bar{t}}}' latex_label = 't\\bar{t} p_{T}: ' y_max = 1.6 y_min = 0.6 elif sampleName is 'Tag1_SystemRapidity': x_title = 'y_{t#bar{t}}' y_title += 'y_{t#bar{t}}}' latex_label = 't\\bar{t} Rapidity: ' elif sampleName is 'Tag1_Top1_lhood': x_title = 'leptonic top p_{T} [GeV]' y_title += 'p_{T}^{t}}' latex_label = 'top p_{T}: ' elif sampleName is 'Tag1_Top2_lhood': x_title = 'hadronic top p_{T} [GeV]' y_title += 'p_{T}^{t}}' latex_label = 'top p_{T}: ' # loop over the channels since they will also each get their own plots for channel in channels: print 'loop', channel outFilename = baseFilePath + '/alpgenMcAtNloClosureTest_' + sampleName + '_' + channel + '.eps' #loop over the generator files and load plots genhistos = {} for generator in generators: # filename where plots are stored inFilename = (baseFilePath + '/' + baseRecoFileName + '_' + sampleName + '_' + channel + '_unfoldWith' + generator + '_' + toys + '_' + method_forFile + '_' + regValue + '.root') inFile = TFile(inFilename) if not inFile.IsOpen(): print 'ERROR opening input file:', inFilename return # build histogram names to retrieve baseHistoPath = ('unfolding/toys/' + systematic + '/' + channel + '/' + sampleName + '/' + method_forHist + '/' + regValue + '/' + toys + '/') baseHistoName = ('H_' + channel + '_' + sampleName + '_' + method_forHist + '_' + regValue + '_' + toys + '_') histos = {} truth_name = baseHistoPath + baseHistoName + 'mc_truth' histos['truth'] = inFile.Get(truth_name) if not histos['truth']: print 'ERROR could not load histogram', truth_name, 'from file', inFilename return histos['truth'].SetDirectory(0) reco_name = baseHistoPath + baseHistoName + 'mc_reco_diffxs' histos['reco'] = inFile.Get(reco_name) if not histos['reco']: print 'ERROR could not load histogram', reco_name, 'from file', inFilename return histos['reco'].SetDirectory(0) measured_name = baseHistoPath + baseHistoName + 'data_measured' histos['measured'] = inFile.Get(measured_name) if not histos['measured']: print 'ERROR could not load histogram', measured_name, 'from file', inFilename return histos['measured'].SetDirectory(0) diffxs_name = baseHistoPath + baseHistoName + 'data_unfolded_diffxs' histos['diffxs'] = inFile.Get(diffxs_name) if not histos['diffxs']: print 'ERROR could not load histogram', diffxs_name, 'from file', inFilename return histos['diffxs'].SetDirectory(0) diffnx_name = baseHistoPath + baseHistoName + 'data_unfolded' histos['diffnx'] = inFile.Get(diffnx_name) if not histos['diffnx']: print 'ERROR could not load histogram', diffnx_name, 'from file', inFilename return histos['diffnx'].SetDirectory(0) genhistos[generator] = histos inFile.Close() # end generators # create ratio plot for Alpgen[0] unfolding MC@NLO[1], divided by MC@NLO truth ratio01 = TH1F(genhistos[generators[0]]['truth']) ratio01.SetDirectory(0) ratio01.SetName('ratio_' + generator[0] + 'Unfolding' + generator[1] + '_over_' + generator[1] + 'Truth') # divide the unfolded result by the truth from the generator that cooresponds # to the distribution that was unfolded ratio01.Divide(genhistos[generators[0]]['diffnx'], genhistos[generators[1]]['truth']) ratio01.GetYaxis().SetTitle(y_title) ratio01.GetXaxis().SetTitle(x_title) ratio01.SetMaximum(y_max) ratio01.SetMinimum(y_min) # create ratio plot for MC@NLO[1] unfolding Alpgen[0], divided by Alpgen truth ratio10 = TH1F(genhistos[generators[0]]['truth']) ratio10.SetDirectory(0) ratio01.SetName('ratio_' + generator[1] + 'Unfolding' + generator[0] + '_over_' + generator[0] + 'Truth') ratio10.Divide(genhistos[generators[1]]['diffnx'], genhistos[generators[0]]['truth']) ratio10.GetYaxis().SetTitle(y_title) ratio10.GetXaxis().SetTitle(x_title) ratio10.SetMaximum(y_max) ratio10.SetMinimum(y_min) can.cd() ratio01.SetMarkerColor(kBlack) ratio01.SetMarkerStyle(20) ratio01.SetLineWidth(2) ratio01.Draw() ratio10.SetMarkerColor(kRed) ratio10.SetLineColor(kRed) ratio10.SetLineWidth(1) ratio10.SetMarkerStyle(23) ratio10.Draw('same') channel_label = latex_label if channel is 'el': channel_label += ' e+jets' elif channel is 'mu': channel_label += ' \\mu+jets' legend = TLegend(0.22, 0.95, 0.7, 0.75, channel_label) legend.SetFillStyle(0) legend.SetBorderSize(0) legend.AddEntry(ratio01, generators[0] + ' unfolding ' + generators[1], "lp") legend.AddEntry(ratio10, generators[1] + ' unfolding ' + generators[0], "lp") legend.Draw('same') #tex = TLatex() #tex.SetNDC() #tex.SetTextFont(62) #tex.DrawLatex(0.23,0.88,channel_label) can.SaveAs(outFilename)
def ReweightFile(filepath,LumiData,rwOptions): paraFile=rwOptions.get('parametersFile','para_config.txt') entriesFromFile=rwOptions.get('entriesFromFile',True) entriesFromHistogram=rwOptions.get('entriesFromHistogram',False) if entriesFromHistogram: entriesFromFile=False #raw_input('going to make the call with '+str(filepath)) FileName,InDir,Sample,SubSample,Estimation,Tail,AbsPath = BreakDownInputPath(filepath) #=====the output should be the same as #=====the filename + '_RW2X', where X is the lumi OutFileName=filepath print "" print "---------------------------------------------" print "INSIDE ReweightFile" print ' filepath= ',str(filepath) print ' filename= ',OutFileName # # OutFileName=OutFileName.replace('.root','_RWTo'+str(int(LumiData))+'.root') #OutFileName=AbsPath.replace(FileName,OutFileName) # # print '' print " Going to reweight the file",filepath, "to the lumi",LumiData print "" print " the outFileName will be ",OutFileName #INPUT FILE# infile=TFile(filepath,"READ") #except: #print "Unexpected error:", sys.exc_info()[0] #print 'infile is ',str(infile) #return infile.cd() if not infile.IsOpen(): print " the file could not be opened. Bye" return #============ # # # # outfile=TFile(OutFileName,"RECREATE") outfile.cd() # # # #=======PARAMETERS FOR THE REWEIGHTING parameters_dict={} config=open(paraFile,'r') for line in config: if line.find('#') == -1: thisline=line.split() # if len(thisline)==0: continue try: parameters_dict[thisline[0]]=float(thisline[2]) except IndexError: print "this is not going to work" print "thisline is", thisline print 'and the index is ',thisline[0] raise else: pass #=========== filtereff_key='FE_'+Sample+'_'+SubSample try: FE=parameters_dict[filtereff_key] except KeyError: print filtereff_key, "does not exist in the dictionary" return # xsec_key='xs_'+Sample+'_'+SubSample try: XS=parameters_dict[xsec_key] except KeyError: print xsec_key, "does not exist in the dictionary" return # if entriesFromFile: tnoe_key='TNoE_'+Sample+'_'+SubSample try: TNOE=parameters_dict[tnoe_key] except KeyError: print tnoe_key, "does not exist in the dictionary" return elif entriesFromHistogram: entriesHistoPath=rwOptions['entriesHistoPath'] entriesHisto=infile.Get(entriesHistoPath) if str(entriesHisto).find('nil') != -1: print 'the entries histo ',entriesHisto.GetName(),' was not found in ',infile.GetPath() # if entriesHisto.GetNBinsX() > 1: print 'the histo has more than one bin!! ',entriesHisto.GetNbinsX() TNOE=entriesHisto.GetBinContent(1) #COMPUTE THE WEIGHTS #if LumiData==-1: #LumiData=5097. print "----Reweighting information----" print ' XS =',XS print ' FE =',FE print ' TNOE =',TNOE Weight=float(LumiData)*XS*FE/TNOE; print " LumiData =",LumiData print " cross section is",XS print " FE is ", FE print " number of entries ",TNOE print " the weight is ",Weight #raw_input("ready to continue?") # # # #REWEIGHT ALL THE HISTOS IN THE FILE: LoopAndScale(infile,Weight) #outfile.Write() # # # # #MAKE THE ROOT FILE REMEMBER outfile.cd() histoname='RWto'+str(int(LumiData)) isrw=infile.Get(histoname) # # if str(isrw).find('nil') != -1: #create it newh=TH1D(histoname,"",1,0.5,1.5) newh.Fill(1.0) newh.Write() # datehist=outfile.Get("date_of_reweighting_to_"+str(int(LumiData))) date=commands.getoutput('date +%s') if str(datehist).find('nil') != -1: dateh=TH1D("date_of_reweighting_to_"+str(int(LumiData)),date,1,0.5,1.5) dateh.Fill(1.0) dateh.Write() else: datehist.SetTitle(date) datehist.Write("",TObject.kOverwrite) # outfile.Write() print "EXITING ReweightFile. Bye" print "----------------------------------" print "" return OutFileName
from sys import argv from glob import glob gROOT.SetBatch(True) files = argv[1:] for word in files: for f in glob(word): if not path.exists(f): print "could not file", f continue if not f.endswith(".root"): print "input must be .root file!" continue dirname = path.dirname(path.abspath(f)) print "saving pdf in", dirname infile = TFile(f) if infile.IsOpen() and not infile.IsZombie() and not infile.TestBit( TFile.kRecovered): tree_fit_sb = infile.Get("tree_fit_sb") gStyle.SetOptStat(221112211) c = TCanvas() tree_fit_sb.Draw("r") outname = dirname + "/r_" + path.basename(f) outname = outname.replace(".root", ".pdf") c.SaveAs(outname) infile.Close() else: print "file is broken: ", f
import sys import ROOT from ROOT import TFile, TTree, TH1D, TCanvas, gROOT, TPad, TGaxis, TColor, TLegend #import matplotlib file1_path = "/uboone/data/users/guzowski/numi_flux/fgd.root" file1 = TFile(file1_path, 'READ') if (file1.IsOpen()): print 'File ', file1_path, ' is open' if (file1.IsOpen() == False): quit() # these are the corrected gSimple flux histograms flux_nue_000 = file1.Get("nueFluxHisto000") flux_nuebar_000 = file1.Get("anueFluxHisto000") flux_nue_555 = file1.Get("nueFluxHisto555") flux_nuebar_555 = file1.Get("anueFluxHisto555") flux_nue_999 = file1.Get("nueFluxHisto999") flux_nuebar_999 = file1.Get("anueFluxHisto999") c1 = TCanvas("c1", "c1", 800, 600) c1.cd() flux_nue_555.SetLineColor(46) flux_nue_999.SetLineColor(32) flux_nue_000.Draw("hist") flux_nue_555.Draw("hist same") flux_nue_999.Draw("hist same") c1.Print("plots/nue_flux_inside_detector_position.pdf")
def main(): gROOT.SetBatch(1) SetAtlasStyle() channels = ["ejets", "mujets"] #channels = ["mujets"] variables = [ "Top1_lhood", "Top2_lhood", "SystemMass", "SystemPt", "SystemRapidity" ] #variables = ["Top1_lhood"] for j in range(len(variables)): variable = variables[j] print j, variable can = TCanvas(variable, variable, 0, 0, 800, 600) can.SetMargin(0.2, 0.05, 0.15, 0.03) label = '' regnumber = '' if variable is 'Top1_lhood': label = 'Leptonic top p^{t}_{T}' regnumber = "reg4" elif variable is 'Top2_lhood': label = 'Hadronic top p^{t}_{T}' regnumber = "reg4" elif variable is 'SystemMass': label = 't#bar{t} System mass' regnumber = "reg3" elif variable is 'SystemPt': label = 't#bar{t} System p_{T}' regnumber = "reg3" elif variable is 'SystemRapidity': label = 't#bar{t} System rapidity' regnumber = "reg4" legend = TLegend(0.6, 0.95, 0.95, 0.75, label) legend.SetFillStyle(0) legend.SetBorderSize(0) ratioPlots = [] for i in range(len(channels)): channel = channels[i] print i, channel postfix = '' labelChannel = '' if channel is 'ejets': postfix = 'el' labelChannel = 'e + jets' elif channel is 'mujets': postfix = 'mu' labelChannel = '#mu + jets' elif channel is 'combined': postfix = 'co' filename = "../data/data_Unfolded_14_02_2013/Alpgen/nominal_Tag1_" + variable + "_" + postfix + "_toy5000_svd_" + regnumber + ".root" fileDi = TFile(filename) if not fileDi.IsOpen(): print 'ERROR opening ', filename m_histo = fileDi.Get("unfolding/toys/nominal/" + postfix + "/Tag1_" + variable + "/SVD/" + regnumber + "/toy5000/H_" + postfix + "_Tag1_" + variable + "_SVD_" + regnumber + "_toy5000_Regularization") m_histo.SetName("diFactor" + variable + postfix) nbins = m_histo.GetNbinsX() - 1 diFactor = m_histo.Clone() diFactor.SetDirectory(0) diFactor.SetNdivisions(508) diFactor.GetXaxis().SetRangeUser(1, nbins + 1) for bin in range(nbins): bindiFactor = m_histo.GetBinContent(bin + 1) print bin, bindiFactor diFactor.SetBinContent(bin + 2, bindiFactor) diFactor.SetTitle("") diFactor.SetMarkerColor(kBlack + i) diFactor.SetMarkerStyle(20 + i) diFactor.SetLineColor(kBlack + i) diFactor.GetYaxis().SetRangeUser(0.005, 400) diFactor.GetYaxis().SetTitle("|d_{i}|") diFactor.GetXaxis().SetTitle("bin number") ratioPlots.append(diFactor) legend.AddEntry(diFactor, labelChannel, "p") if i > 0: diFactor.Draw('histosame') else: diFactor.Draw("histo") can.cd() horizontal = TF1("line", "pol1", 0, 2800) horizontal.SetParameters(1, 0) horizontal.SetLineColor(kBlack) horizontal.SetLineStyle(2) horizontal.Draw('same') legend.Draw('same') can.SetLogy() can.SaveAs("regularizationValue_" + variable + ".eps")
def updatestatus(jobstatus, outdir, name): from ROOT import TFile print("Updating job status") print("Total: " + str(len(jobstatus))) # get the qstat job listing proccommand = 'qstat | grep dx5412' proc = subprocess.Popen(proccommand, stdout=subprocess.PIPE, shell=True) qstat_result = proc.stdout.read() for key in jobstatus: index = jobstatus[key][0] # if job is completed, we don't need to check again if jobstatus[key][1] == 2: continue # check if the job is still underway jobinprocess = qstat_result.find((name + str(index) + ' ').encode()) if jobinprocess >= 0: jobstatus[key][1] = 1 continue # if the job is not still underway, # check to see if the job has completed properly # if not, mark to resubmit outDirMod = '' if key[1] == 'mtrack': outDirMod = 'tow_0_track_-1' if key[1] == 'ptrack': outDirMod = 'tow_0_track_1' if key[1] == 'mtow': outDirMod = 'tow_-1_track_0' if key[1] == 'ptow': outDirMod = 'tow_1_track_0' if key[1] == 'nom': outDirMod = 'tow_0_track_0' if outdir.startswith('/'): filename = outdir + '/' + outDirMod + \ '/' + name + str(index) + '.root' else: filename = os.getcwd() + '/' + outdir + '/' + outDirMod + \ '/' + name + str(index) + '.root' if os.path.isfile(filename): outputfile = TFile(filename, "READ") if outputfile.IsZombie(): print("job " + str(index + 1) + " of " + str(len(jobstatus)) + " complete: file is zombie, resubmit") jobstatus[key][1] = 0 os.remove(filename) elif outputfile.IsOpen(): print("job " + str(index + 1) + " of " + str(len(jobstatus)) + " complete: ROOT file healthy") print(filename) jobstatus[key][1] = 2 outputfile.Close() else: print("job " + str(index + 1) + " of " + str(len(jobstatus)) + " undefined file status, resubmit") jobstatus[key][1] = 0 else: print("undefined status: job " + str(index + 1) + " of " + str(len(jobstatus)) + " marked for submission") jobstatus[key][1] = 0 return jobstatus
def main(): gROOT.SetBatch(1) SetAtlasStyle() #channels = ["ejets","mujets"] channels = ["ejets"] variables = [ "Top1_lhood", "Top2_lhood", "SystemMass", "SystemPt", "SystemRapidity" ] for j in range(len(variables)): variable = variables[j] print j, variable can = TCanvas(variable, variable, 0, 0, 800, 600) can.SetMargin(0.2, 0.05, 0.15, 0.03) ratioPlots = [] label = '' latexlabel = "" if variable is 'Top1_lhood': label = 'Leptonic top p^{t}_{T}' latexlabel = "#frac{d#sigma}{dp^{t}_{T}}_{rew} #times #left[#frac{d#sigma}{dp^{t}_{T}}_{std}#right]^{-1}" elif variable is 'Top2_lhood': label = 'Hadronic top p^{t}_{T}' latexlabel = "#frac{d#sigma}{dp_{T}^{t}}_{rew} #times #left[#frac{d#sigma}{dp_{T}^{t}}_{std}#right]^{-1}" elif variable is 'SystemMass': label = 't#bar{t} System mass' latexlabel = "#frac{d#sigma}{dM_{t#bar{t}}}_{rew} #times #left[#frac{d#sigma}{dM_{t#bar{t}}}_{std}#right]^{-1}" elif variable is 'SystemRapidity': label = 't#bar{t} System rapidity' latexlabel = "#frac{d#sigma}{dY_{t#bar{t}}}_{rew} #times #left[#frac{d#sigma}{dY_{t#bar{t}}}_{std}#right]^{-1}" elif variable is 'SystemPt': label = 't#bar{t} System p_{T}' latexlabel = "#frac{d#sigma}{dp^{t#bar{t}}_{T}}_{rew} #times #left[#frac{d#sigma}{d}p^{t#bar{t}}_{T}_{std}#right]^{-1}" legend = TLegend(0.25, 0.95, 0.5, 0.75, label) legend.SetFillStyle(0) legend.SetBorderSize(0) for i in range(len(channels)): channel = channels[i] print i, channel postfix = '' if channel is 'ejets': postfix = '_el' elif channel is 'mujets': postfix = '_mu' elif channel is 'combined': postfix = '' filename = "../data/data_Unfolded_14_02_2013/Alpgen/SVD/unfoldedResult_Tag1_" + variable + postfix + ".root" file_std = TFile(filename) if not file_std.IsOpen(): print 'ERROR opening ', filename filename = "../data/test_Unfolded_08_03_2013/Alpgen/SVD/unfoldedResult_Tag1_" + variable + postfix + ".root" file_rew = TFile(filename) if not file_rew.IsOpen(): print 'ERROR opening ', filename diffxs_std = file_std.Get("diffxs_stat") diffxs_std.SetName("diffxs_stat_std" + postfix) measured_std = file_std.Get("measured") measured_std.SetName("measured_std" + postfix) diffxs_rew = file_rew.Get("diffxs_stat") measured_rew = file_rew.Get("measured") measured_rew.SetName("measured_rew" + postfix) diffxs_rew.SetName("diffxs_stat_rew" + postfix) ratio = diffxs_rew.Clone("diffxs_stat_ratio" + postfix) ratio.SetDirectory(0) ratio.GetYaxis().SetTitle(latexlabel) ratio.GetYaxis().SetTitleOffset(1.6) ratio.Divide(diffxs_std) ratio.GetYaxis().SetRangeUser(0.9, 1.2) ratio.SetMarkerColor(kBlack + i) ratio.SetMarkerStyle(20 + i) ratio.SetLineColor(kBlack + i) ratio1 = measured_rew.Clone("measured_ratio" + postfix) ratio1.SetDirectory(0) ratio1.GetYaxis().SetTitle(latexlabel) ratio1.GetYaxis().SetTitleOffset(1.6) ratio1.Divide(measured_std) ratio1.GetYaxis().SetRangeUser(0.7, 1.2) if variable is 'SystemRapidity': ratio.GetYaxis().SetRangeUser(-0.07, 0.04) ratio1.SetMarkerColor(kRed + i) ratio1.SetMarkerStyle(20 + i) ratio1.SetLineColor(kRed + i) ratioPlots.append(ratio) ratioPlots.append(ratio1) legend.AddEntry(ratio, "unfolded", "p") legend.AddEntry(ratio1, "measured", "p") can.cd() if i > 0: ratio.Draw('same') else: ratio.Draw() ratio1.Draw('same') can.cd() horizontal = TF1("line", "pol1", 200, 2800) horizontal.SetParameters(1, 0) horizontal.SetLineColor(kBlack) horizontal.SetLineStyle(2) horizontal.Draw('same') legend.Draw('same') can.SaveAs(variable + ".eps") controlPlots = [ 'InclusiveJetBinLeptonPt', 'Top1Pt', 'Top2Pt', 'SystemMass', 'SystemPt' ] ratioPlots1 = [] for j in range(len(controlPlots)): controlPlot = controlPlots[j] can1 = TCanvas(controlPlot, controlPlot, 0, 0, 800, 600) can1.SetMargin(0.2, 0.05, 0.15, 0.03) legend1 = TLegend(0.45, 0.95, 0.90, 0.75, "") legend1.SetFillStyle(0) legend1.SetBorderSize(0) labelx = '' if controlPlot is 'InclusiveJetBinLeptonPt': labelx = 'lepton p_{T}, all-jet bin[GeV/c]' elif controlPlot is 'Top1Pt': labelx = 'Leptonic top p^{t}_{T}[GeV/c]' elif controlPlot is 'Top2Pt': labelx = 'Hadronic top p^{t}_{T}[GeV/c]' elif controlPlot is 'SystemMass': labelx = 'M_{t#bar{t}}[GeV/c^{2}]' elif controlPlot is 'SystemPt': labelx = 'p^{t#bar{t}}_{T}[GeV/c]' for i in range(len(channels)): channel = channels[i] print i, channel postfix = '' if channel is 'ejets': postfix = '_el' elif channel is 'mujets': postfix = '_mu' elif channel is 'combined': postfix = '' filename = "../data/data_14_02_2013/nominal/ToUnfold_nominal_AlpgenJimmy/tagged_" + channel + ".root" cfile_std = TFile(filename) if not cfile_std.IsOpen(): print 'ERROR opening ', filename filename = "../data/test_08_03_2013/nominal/ToUnfold_nominal_AlpgenJimmy/tagged_" + channel + ".root" cfile_rew = TFile(filename) if not cfile_rew.IsOpen(): print 'ERROR opening ', filename control_std = cfile_std.Get(controlPlot + "_Data") control_std.SetName("control_std" + postfix) control_rew = cfile_rew.Get(controlPlot + "_Data") control_rew.SetName("control_rew" + postfix) control_std.SetDirectory(0) control_std.SetTitle("") control_std.GetYaxis().SetTitle("Events") control_std.GetXaxis().SetTitle(labelx) control_std.GetYaxis().SetTitleOffset(1.6) #control_std.GetYaxis().SetRangeUser(0.7,1.3) control_std.SetMarkerColor(kBlack + i) control_std.SetMarkerStyle(20 + i) control_std.SetLineColor(kBlack + i) control_rew.SetMarkerColor(kRed + i) control_rew.SetMarkerStyle(20 + i) control_rew.SetLineColor(kRed + i) if i > 0: control_std.Draw('same') control_rew.Draw('same') else: control_std.Draw() control_rew.Draw('same') legend1.AddEntry(control_std, "nominal", "p") legend1.AddEntry(control_rew, "Lepton p_{T} reweighting", "p") legend1.Draw('same') can1.SetLogy() can1.SaveAs(controlPlot + "_log.eps")
def CombinedRootFiles(self, path='', VBF_cut='invMAk4sel_1p0'): #pT_suffix = 'pT' mjj_suffix = '_mjj' VBF = (self.LastCut == VBF_cut) if (VBF): name_suffix = '_afterVBFsel' else: name_suffix = '' for i in range(len(self.SHists)): filename = self.getFileName(i) if (VBF): oFile = TFile(path + "/%s_mjj.root" % filename, "UPDATE") #pT_file = TFile(path+"/%s.root"%filename,"UPDATE"); # file= TFile(path+"/%s_SignalInjection.root"%filename,"UPDATE"); # file= TFile(path+"/%s_SidebandData.root"%filename,"UPDATE"); else: oFile = TFile(path + "/%s_mjj.root" % filename, "RECREATE") # file= TFile(path+"/%s_SignalInjection.root"%filename,"RECREATE"); # file= TFile(path+"/%s_SidebandData.root"%filename,"RECREATE"); if (not oFile.IsOpen()): print("Error: Could not open File No. %i" % i) # r_newbinning=range(0,14000,100) # d_newbinning=array('d') # for b in r_newbinning: # d_newbinning.append(b) # radion=self.SHists[i].Rebin(len(d_newbinning)-1,"new binning",d_newbinning) # qcd_data=self.BHist.Rebin(len(d_newbinning)-1,"new binning",d_newbinning) # radion.Write('radion_invMass'+name_suffix) # qcd_data.Write('qcd_invMass'+name_suffix) # qcd_data.Write('data_invMass'+name_suffix) #With SidebandData # signalHist=self.SHists[i] # backgroundHist=self.BHist # sidebandDataHist=self.sidebandDataHist # signalHist.Write('radion_invMass'+name_suffix) # backgroundHist.Write('qcd_invMass'+name_suffix) # sidebandDataHist.Write('data_invMass'+name_suffix) import numpy as np binning = np.linspace(0, 10000, 10001) signalHist = self.SHists[i].Rebin(len(binning) - 1, '', binning) backgroundHist = self.BHist.Rebin(len(binning) - 1, '', binning) # #For SignalInjectionTest #try this -> sig+bg shapes to pseudo sig+bg points #signalHist=self.SHists[i] #backgroundHist=self.BHist #fakedataHist=backgroundHist.Clone() #fakedataHist.Add(signalHist) #signalHist.Write('radion_invMass'+name_suffix) #backgroundHist.Write('qcd_invMass'+name_suffix) #fakedataHist.Write('data_invMass'+name_suffix) signalHist.Scale(4.178272981) backgroundHist.Scale(4.178272981) #Standard (with Background as FakeData)-> sig +bg shape to bg(pseudodata)points signalHist.Write('radion_invMass' + name_suffix) backgroundHist.Write('qcd_invMass' + name_suffix) backgroundHist.Write('data_invMass' + name_suffix) update_progress(i + 1, len(self.SHists)) oFile.Close()
h=dict_scale_hist[scale] if DrawBothHistAndFit: h.Draw("same") if DrawJustFit: h.Draw("func same") #Set Canvas Title pave = TPaveText(0.00,0.9,0.3,1.0,"tblrNDC") pave.SetTextColor(1) pave.SetTextSize(0.05) pave.AddText("Histogram Fits") pave.Draw("same") # Draw legend mylegend.Draw("same") c.Print("output/fitted.pdf") ####### Execute ######## # open file file=TFile(fileName,"READ") if not file.IsOpen(): print "File",fileName,"does not exist. WILL ABORT!!!" assert(False) scalestring = "Nominal,OneMu,OneMuNu,AllMu,AllMuNu,PtRecoBukin,PtRecoGauss,Regression" #Does not include Parton Overlay(scalestring) Fitting(scalestring,Fit_id)
class AnalysisSuiteGainMap: __slots__ = [ 'ADCPKPOS_SECTOR_AVG', 'ADCPKPOS_SECTOR_STDDEV', 'ANA_UNI_GRANULARITY', 'AVGCLUSTSIZE_SECTOR_AVG', 'AVGCLUSTSIZE_SECTOR_STDDEV', 'DEBUG', 'DETECTOR', 'DET_IMON_QC5_RESP_UNI', 'DET_IMON_POINTS', 'FILE_IN', 'FILE_OUT', 'GAIN_CALCULATOR', 'GAIN_LAMBDA', 'GAIN_LAMBDA_ERR', 'GAIN_AVG_POINTS', 'GAIN_STDDEV_POINTS', 'GAIN_MAX_POINTS', 'G2D_MAP_ABS_RESP_UNI', 'G2D_MAP_AVG_CLUST_SIZE_ORIG', 'G2D_MAP_AVG_CLUST_SIZE_NORM', 'G2D_MAP_GAIN_ORIG', 'PD_CALCULATOR', 'PD_AVG_POINTS', 'PD_STDDEV_POINTS', 'PD_MAX_POINTS', 'PD_MIN_POINTS' ] def __init__(self, file_out, inputfilename="", params_gain=PARAMS_GAIN(), params_det=PARAMS_DET(), params_discharge=PARAMS_PD(), debug=False): self.ADCPKPOS_SECTOR_AVG = 0. #Average of the fitted cluster ADC PkPos in defined (ieta,iphi) sector self.ADCPKPOS_SECTOR_STDDEV = 0. #Std. Dev. of the fitted cluster ADC PkPos in defined (ieta,iphi) sector self.ANA_UNI_GRANULARITY = 32 self.AVGCLUSTSIZE_SECTOR_AVG = 0. #Average of Average Cluster Size distributions in defined (ieta,iphi) sector self.AVGCLUSTSIZE_SECTOR_STDDEV = 0. #Std. Dev. of Average Cluster Size distributions in defined (ieta,iphi) sector self.DEBUG = debug self.DETECTOR = params_det self.DET_IMON_QC5_RESP_UNI = params_det.DET_IMON_QC5_RESP_UNI self.DET_IMON_POINTS = [] self.FILE_IN = [] if len(inputfilename) > 0: self.FILE_IN = TFile(str(inputfilename), "READ", "", 1) self.FILE_OUT = file_out self.GAIN_CALCULATOR = params_gain self.GAIN_LAMBDA = 1. self.GAIN_LAMBDA_ERR = 0. self.GAIN_AVG_POINTS = [] #Average Gain over the entire detector self.GAIN_STDDEV_POINTS = [ ] #Std. Dev of Gain over the entire detector self.GAIN_MAX_POINTS = [] #Max Gain over the entire detector self.GAIN_MIN_POINTS = [] #Min Gain over the entire detector self.G2D_MAP_ABS_RESP_UNI = TGraph2D( ) #Absolute Response Uniformity Map self.G2D_MAP_AVG_CLUST_SIZE_ORIG = TGraph2D( ) #Absolute Avg Cluster Size Map self.G2D_MAP_AVG_CLUST_SIZE_NORM = TGraph2D( ) #Normalized " " self.G2D_MAP_GAIN_ORIG = TGraph2D() #Effective Gain Map self.PD_CALCULATOR = params_discharge self.PD_AVG_POINTS = [] #Avg P_D over entire detector self.PD_STDDEV_POINTS = [] #Std. Dev of P_D over entire detector self.PD_MAX_POINTS = [] #Max P_D over the entire detector self.PD_MIN_POINTS = [] #Min P_D over the entire detector return def reset(self, debug=False): #Close TFiles self.closeTFiles(debug) #Reset Variables self.DEBUG = debug self.ADCPKPOS_SECTOR_AVG = 0. self.ADCPKPOS_SECTOR_STDDEV = 0. self.ANA_UNI_GRANULARITY = 32 self.AVGCLUSTSIZE_SECTOR_AVG = 0. self.AVGCLUSTSIZE_SECTOR_STDDEV = 0. self.DET_IMON_QC5_RESP_UNI = 0. self.GAIN_LAMBDA = 1. self.GAIN_LAMBDA_ERR = 0. #Reset classes self.DETECTOR.reset() #Clear Lists del self.DET_IMON_POINTS[:] del self.GAIN_AVG_POINTS[:] del self.GAIN_STDDEV_POINTS[:] del self.GAIN_MAX_POINTS[:] del self.GAIN_MIN_POINTS[:] del self.PD_AVG_POINTS[:] del self.PD_STDDEV_POINTS[:] del self.PD_MAX_POINTS[:] del self.PD_MIN_POINTS[:] #Clear TObjects? #self.G2D_MAP_ABS_RESP_UNI #self.G2D_MAP_AVG_CLUST_SIZE_ORIG #self.G2D_MAP_AVG_CLUST_SIZE_NORM #self.G2D_MAP_GAIN_ORIG return #Determines the Average & Std. Dev. ADC PkPos in the (DETPOS_IETA, DETPOS_IPHI) sector def avgROSectorADCPkPos(self): #Load the plot strPlotName = "SectorEta{0}/g_iEta{0}_clustADC_Fit_PkPos".format( self.DETECTOR.DETPOS_IETA) gSector_clustADC_Fit_PkPos = self.FILE_IN.Get(strPlotName) #Calculate the iphi sector boundaries list_sectBoundary = self.DETECTOR.calcROSectorBoundariesByEta( self.DETECTOR.DETPOS_IETA) #Print to user - Section Boundaries #if self.DEBUG == True: #for i in range(0,len(list_sectBoundary)): #print list_sectBoundary[i] #Loop over points in the plot list_clustADC_Fit_PkPos = [] for i in range(0, gSector_clustADC_Fit_PkPos.GetN()): #Get the i^th point in this plot fPx = Double(0.0) fPy = Double(0.0) gSector_clustADC_Fit_PkPos.GetPoint(i, fPx, fPy) #Check if this point is within the defined (ieta,iphi) sector, if so store it for later use if list_sectBoundary[self.DETECTOR.DETPOS_IPHI - 1] <= fPx and fPx <= list_sectBoundary[ self.DETECTOR.DETPOS_IPHI]: #Print to user - selected data points #if self.DEBUG == True: #print "{0}\t{1}\t{2}".format(i, fPx, fPy) #store data point list_clustADC_Fit_PkPos.append(fPy) #Store this list as a numpy array and then remove all outliers array_clustADC_Fit_PkPos = np.array(list_clustADC_Fit_PkPos) array_clustADC_Fit_PkPos = rejectOutliers(array_clustADC_Fit_PkPos) if self.DEBUG: print "np.mean(list_clustADC_Fit_PkPos) = {0}".format( np.mean(list_clustADC_Fit_PkPos)) print "np.mean(array_clustADC_Fit_PkPos) = {0}\t No Outliers".format( str(np.mean(array_clustADC_Fit_PkPos))) #Calculate Average self.ADCPKPOS_SECTOR_AVG = np.mean( array_clustADC_Fit_PkPos ) #Average of the fitted cluster ADC PkPos in defined (ieta,iphi) sector self.ADCPKPOS_SECTOR_STDDEV = np.std( array_clustADC_Fit_PkPos ) #Std Dev of the fitted cluster ADC PkPos in defined (ieta,iphi) sector print "Avg PkPos = {0}+/-{1}".format(self.ADCPKPOS_SECTOR_AVG, self.ADCPKPOS_SECTOR_STDDEV) return #Determine the average of the average cluster sizes within a single readout sector def avgROSectorAvgClustSize(self): #Load the plot strPlotName = "SectorEta{0}/h_iEta{0}_clustSize_v_clustPos".format( self.DETECTOR.DETPOS_IETA) hSector_clustSize_v_clustPos = self.FILE_IN.Get(strPlotName) #Calculate the iphi sector boundaries list_sectBoundary = self.DETECTOR.calcROSectorBoundariesByEta( self.DETECTOR.DETPOS_IETA) #Print to user - Section Boundaries if self.DEBUG == True: for i in range(0, len(list_sectBoundary)): print list_sectBoundary[i] #Loop over points in the plot list_avgClustSize = [] for i in range(1, hSector_clustSize_v_clustPos.GetNbinsX() + 1): fBinCenter = hSector_clustSize_v_clustPos.GetXaxis().GetBinCenter( i) #Check if this point is within the defined (ieta,iphi) sector, if so store it for later use if list_sectBoundary[ self.DETECTOR.DETPOS_IPHI - 1] <= fBinCenter and fBinCenter <= list_sectBoundary[ self.DETECTOR.DETPOS_IPHI]: #Project out cluster size distribution for *this* slice strPlotName = "h_iEta{0}Slice{1}_clustSize".format( self.DETECTOR.DETPOS_IETA, i) h_clustSize = hSector_clustSize_v_clustPos.ProjectionY( strPlotName, i, i, "") fAvgClustSize = h_clustSize.GetMean() #store data point list_avgClustSize.append(fAvgClustSize) #Print to user - selected data points if self.DEBUG == True: print "{0}\t{1}\t{2}".format(i, fBinCenter, fAvgClustSize) #Store this list as a numpy array and then remove all outliers array_avgClustSize = np.array(list_avgClustSize) array_avgClustSize = rejectOutliers(array_avgClustSize) if self.DEBUG: print "np.mean(list_avgClustSize) = {0}".format( np.mean(list_avgClustSize)) print "np.mean(array_avgClustSize) = {0}\t No Outliers".format( np.mean(array_avgClustSize)) #Calculate Average self.AVGCLUSTSIZE_SECTOR_AVG = np.mean( array_avgClustSize ) #Average of the fitted cluster ADC PkPos in defined (ieta,iphi) sector self.AVGCLUSTSIZE_SECTOR_STDDEV = np.std( array_avgClustSize ) #Std. Dev. of the fitted cluster ADC PkPos in defined (ieta,iphi) sector print "Avg of Avg Clust Size = {0}+/-{1}".format( self.AVGCLUSTSIZE_SECTOR_AVG, self.AVGCLUSTSIZE_SECTOR_STDDEV) return #alpha(x) = exp([0]*(x-x0) ) where x is hvPt and x0 is self.DET_IMON_QC5_RESP_UNI def calcAlpha(self, hvPt): return np.exp(self.GAIN_CALCULATOR.GAIN_CURVE_P0 * (hvPt - self.DETECTOR.DET_IMON_QC5_RESP_UNI)) #Determines the linear correlation factor lambda which relates Gain to ADC counts def calcROSectorLambda(self): gain = self.GAIN_CALCULATOR.calcGain(self.DET_IMON_QC5_RESP_UNI) gain_err = self.GAIN_CALCULATOR.calcGainErr(self.DET_IMON_QC5_RESP_UNI) self.GAIN_LAMBDA = gain / self.ADCPKPOS_SECTOR_AVG self.GAIN_LAMBDA_ERR = (1. / self.ADCPKPOS_SECTOR_AVG) * np.sqrt( np.square(gain_err) + np.square(self.ADCPKPOS_SECTOR_STDDEV * gain / self.ADCPKPOS_SECTOR_AVG) - 2. * gain_err * self.ADCPKPOS_SECTOR_STDDEV * gain / self.ADCPKPOS_SECTOR_AVG) print "lambda = {0}+/-{1}".format(self.GAIN_LAMBDA, self.GAIN_LAMBDA_ERR) return #Determines the gain map from the absolute response uniformity map def calcGainMap(self, strDetName): #Load the absolute response uniformity map strPlotName = "Summary/g2D_{0}_ResponseFitPkPos_AllEta".format( strDetName) if self.DEBUG: print "Attempted to Load:" print strPlotName self.G2D_MAP_ABS_RESP_UNI = self.FILE_IN.Get(strPlotName) #Setup the gain map self.G2D_MAP_GAIN_ORIG.Set(self.G2D_MAP_ABS_RESP_UNI.GetN()) self.G2D_MAP_GAIN_ORIG.SetName("g2D_{0}_EffGain_AllEta_{1}".format( strDetName, int(self.DET_IMON_QC5_RESP_UNI))) #Get the arrays that make the response uniformity map array_fPx = self.G2D_MAP_ABS_RESP_UNI.GetX() array_fPy = self.G2D_MAP_ABS_RESP_UNI.GetY() array_fPz = self.G2D_MAP_ABS_RESP_UNI.GetZ() #Loop Over all Points of self.G2D_MAP_ABS_RESP_UNI array_Gain_Vals = np.zeros(self.G2D_MAP_ABS_RESP_UNI.GetN()) array_PD_Vals = np.zeros(self.G2D_MAP_ABS_RESP_UNI.GetN()) for i in range(0, self.G2D_MAP_ABS_RESP_UNI.GetN()): #Set the i^th point in self.G2D_MAP_GAIN_ORIG array_Gain_Vals[i] = array_fPz[i] * self.GAIN_LAMBDA array_PD_Vals[i] = self.PD_CALCULATOR.calcPD(array_fPz[i] * self.GAIN_LAMBDA) self.G2D_MAP_GAIN_ORIG.SetPoint(i, array_fPx[i], array_fPy[i], array_fPz[i] * self.GAIN_LAMBDA) #Store Average, Std. Dev., Max, & Min Gain array_Gain_Vals = rejectOutliers(array_Gain_Vals) self.DET_IMON_POINTS.append(self.DET_IMON_QC5_RESP_UNI) self.GAIN_AVG_POINTS.append(np.mean(array_Gain_Vals)) self.GAIN_STDDEV_POINTS.append(np.std(array_Gain_Vals)) self.GAIN_MAX_POINTS.append(np.max(array_Gain_Vals)) self.GAIN_MIN_POINTS.append(np.min(array_Gain_Vals)) #Store Average, Std. Dev., Max & Min P_D array_PD_Vals = rejectOutliers(array_PD_Vals) self.PD_AVG_POINTS.append(np.mean(array_PD_Vals)) self.PD_STDDEV_POINTS.append(np.std(array_PD_Vals)) self.PD_MAX_POINTS.append(np.max(array_PD_Vals)) self.PD_MIN_POINTS.append(np.min(array_PD_Vals)) #Draw the effective gain map canv_Gain_Map_Orig = TCanvas( "canv_{0}_EffGain_AllEta_{1}".format( strDetName, int(self.DET_IMON_QC5_RESP_UNI)), "Gain Map - Original {0}".format(self.DET_IMON_QC5_RESP_UNI), 600, 600) canv_Gain_Map_Orig.cd() canv_Gain_Map_Orig.cd().SetLogz(1) self.G2D_MAP_GAIN_ORIG.Draw("TRI2Z") #Write the effective gain map to the output file dir_hvOrig = self.FILE_OUT.mkdir("GainMap_HVPt{0}".format( int(self.DET_IMON_QC5_RESP_UNI))) dir_hvOrig.cd() canv_Gain_Map_Orig.Write() self.G2D_MAP_GAIN_ORIG.Write() return #Determines the gain map from the absolute response uniformity map for an arbitrary voltage def calcGainMapHV(self, strDetName, hvPt): #Create the new TGraph2D - Gain g2D_Map_Gain_hvPt = TGraph2D(self.G2D_MAP_GAIN_ORIG.GetN()) g2D_Map_Gain_hvPt.SetName("g2D_{0}_EffGain_AllEta_{1}".format( strDetName, int(hvPt))) #Create the new TGraph2D - Discharge Probability g2D_Map_PD_hvPt = TGraph2D(self.G2D_MAP_GAIN_ORIG.GetN()) g2D_Map_PD_hvPt.SetName("g2D_{0}_PD_AllEta_{1}".format( strDetName, int(hvPt))) #Get the arrays that make the response uniformity map array_fPx = self.G2D_MAP_GAIN_ORIG.GetX() array_fPy = self.G2D_MAP_GAIN_ORIG.GetY() array_fPz = self.G2D_MAP_GAIN_ORIG.GetZ() #Calculate alpha alpha = self.calcAlpha(hvPt) #Loop Over all Points of self.G2D_MAP_ABS_RESP_UNI array_Gain_Vals = np.zeros(self.G2D_MAP_ABS_RESP_UNI.GetN()) array_PD_Vals = np.zeros(self.G2D_MAP_ABS_RESP_UNI.GetN()) for i in range(0, self.G2D_MAP_ABS_RESP_UNI.GetN()): #Set the i^th point in self.G2D_MAP_GAIN_ORIG array_Gain_Vals[i] = array_fPz[i] * alpha array_PD_Vals[i] = self.PD_CALCULATOR.calcPD(array_fPz[i] * alpha) g2D_Map_Gain_hvPt.SetPoint(i, array_fPx[i], array_fPy[i], array_fPz[i] * alpha) g2D_Map_PD_hvPt.SetPoint( i, array_fPx[i], array_fPy[i], self.PD_CALCULATOR.calcPD(array_fPz[i] * alpha)) #Store Average, Std. Dev., Max, & Min Gain array_Gain_Vals = rejectOutliers(array_Gain_Vals) self.DET_IMON_POINTS.append(hvPt) self.GAIN_AVG_POINTS.append(np.mean(array_Gain_Vals)) self.GAIN_STDDEV_POINTS.append(np.std(array_Gain_Vals)) self.GAIN_MAX_POINTS.append(np.max(array_Gain_Vals)) self.GAIN_MIN_POINTS.append(np.min(array_Gain_Vals)) #Store Average, Std. Dev., Max & Min P_D array_PD_Vals = rejectOutliers(array_PD_Vals) self.PD_AVG_POINTS.append(np.mean(array_PD_Vals)) self.PD_STDDEV_POINTS.append(np.std(array_PD_Vals)) self.PD_MAX_POINTS.append(np.max(array_PD_Vals)) self.PD_MIN_POINTS.append(np.min(array_PD_Vals)) #Draw the effective gain map canv_Gain_Map_hvPt = TCanvas( "canv_{0}_EffGain_AllEta_{1}".format(strDetName, int(hvPt)), "Gain Map - hvPt = {0}".format(hvPt), 600, 600) canv_Gain_Map_hvPt.cd() canv_Gain_Map_hvPt.cd().SetLogz(1) g2D_Map_Gain_hvPt.Draw("TRI2Z") #Draw the discharge probability map canv_PD_Map_hvPt = TCanvas( "canv_{0}_PD_AllEta_{1}".format(strDetName, int(hvPt)), "Discharge Probability Map - hvPt = {0}".format(hvPt), 600, 600) canv_PD_Map_hvPt.cd() canv_PD_Map_hvPt.cd().SetLogz(1) g2D_Map_PD_hvPt.Draw("TRI2Z") #Write the effective gain map to the output file dir_hvPt = self.FILE_OUT.mkdir("GainMap_HVPt{0}".format(int(hvPt))) dir_hvPt.cd() canv_Gain_Map_hvPt.Write() g2D_Map_Gain_hvPt.Write() canv_PD_Map_hvPt.Write() g2D_Map_PD_hvPt.Write() return g2D_Map_Gain_hvPt #Determines the average cluster size map for the entire detector def calcClusterSizeMap(self, strDetName): #Create the container which will store the clusterSize iNEtaSectors = len(self.DETECTOR.LIST_DET_GEO_PARAMS) iNBinNum = self.ANA_UNI_GRANULARITY * iNEtaSectors * self.DETECTOR.LIST_DET_GEO_PARAMS[ 0].NBCONNECT array_shape = (iNBinNum, 3) array_clustSize = np.zeros(array_shape) #Create the average cluster size map strPlotName = "g2D_{0}_AvgClustSize_AllEta_{1}".format( strDetName, int(self.DET_IMON_QC5_RESP_UNI)) self.G2D_MAP_AVG_CLUST_SIZE_ORIG.Set( iNBinNum) #Set number of pts, see comments above self.G2D_MAP_AVG_CLUST_SIZE_ORIG.SetName(strPlotName) self.G2D_MAP_AVG_CLUST_SIZE_ORIG.SetTitle("") #Create the average cluster size map strPlotName = "g2D_{0}_AvgClustSizeNormalized_AllEta_{1}".format( strDetName, int(self.DET_IMON_QC5_RESP_UNI)) self.G2D_MAP_AVG_CLUST_SIZE_NORM.Set( iNBinNum) #Set number of pts, see comments above self.G2D_MAP_AVG_CLUST_SIZE_NORM.SetName(strPlotName) self.G2D_MAP_AVG_CLUST_SIZE_NORM.SetTitle("") for iEta in range(1, iNEtaSectors + 1): #Get the Eta Sector etaSector = self.DETECTOR.LIST_DET_GEO_PARAMS[iEta - 1] #Load the cluster size vs cluster position plot for this iEta value strPlotName = "SectorEta{0}/h_iEta{0}_clustSize_v_clustPos".format( iEta) if self.DEBUG: print "Attempted to Load:" print strPlotName h_clustSize_v_clustPos = self.FILE_IN.Get(strPlotName) #Loop over the x-bins of this plot for iSlice in range(1, h_clustSize_v_clustPos.GetNbinsX() + 1): #Project out cluster size distribution for *this* slice strPlotName = "h_iEta{0}Slice{1}_clustSize".format( iEta, iSlice) h_clustSize = h_clustSize_v_clustPos.ProjectionY( strPlotName, iSlice, iSlice, "") #Store average cluster size, y-position and x-position array_clustSize[ (iEta - 1) * h_clustSize_v_clustPos.GetNbinsX() + iSlice - 1] = ( h_clustSize_v_clustPos.GetXaxis().GetBinCenter(iSlice), etaSector.SECTPOS, h_clustSize.GetMean()) #Set this point in the plot - Absolute self.G2D_MAP_AVG_CLUST_SIZE_ORIG.SetPoint( (iEta - 1) * h_clustSize_v_clustPos.GetNbinsX() + iSlice - 1, h_clustSize_v_clustPos.GetXaxis().GetBinCenter(iSlice), etaSector.SECTPOS, h_clustSize.GetMean()) #Set this point in the plot - Normalized self.G2D_MAP_AVG_CLUST_SIZE_NORM.SetPoint( (iEta - 1) * h_clustSize_v_clustPos.GetNbinsX() + iSlice - 1, h_clustSize_v_clustPos.GetXaxis().GetBinCenter(iSlice), etaSector.SECTPOS, h_clustSize.GetMean() / self.AVGCLUSTSIZE_SECTOR_AVG) #Print the cluster map to the user if requested if self.DEBUG: print "Average Cluster Size Map:" print array_clustSize #Draw the average cluster size map - Absolute canv_AvgClustSize_Map_Orig = TCanvas( "canv_{0}_AvgClustSize_AllEta_{1}".format( strDetName, int(self.DET_IMON_QC5_RESP_UNI)), "Average Cluster Size Map - Original {0}".format( self.DET_IMON_QC5_RESP_UNI), 600, 600) canv_AvgClustSize_Map_Orig.cd() self.G2D_MAP_AVG_CLUST_SIZE_ORIG.Draw("TRI2Z") #Draw the average cluster size map - Normalized canv_AvgClustSize_Map_Norm = TCanvas( "canv_{0}_AvgClustSizeNormalized_AllEta_{1}".format( strDetName, int(self.DETECTOR.DET_IMON_QC5_RESP_UNI)), "Average Cluster Size Map - Normalized {0}".format( self.DETECTOR.DET_IMON_QC5_RESP_UNI), 600, 600) canv_AvgClustSize_Map_Norm.cd() self.G2D_MAP_AVG_CLUST_SIZE_NORM.Draw("TRI2Z") #Write the average cluster size map to the output file dir_hvOrig = self.FILE_OUT.GetDirectory( "GainMap_HVPt{0}".format(int(self.DETECTOR.DET_IMON_QC5_RESP_UNI)), False, "GetDirectory") dir_hvOrig.cd() canv_AvgClustSize_Map_Orig.Write() self.G2D_MAP_AVG_CLUST_SIZE_ORIG.Write() canv_AvgClustSize_Map_Norm.Write() self.G2D_MAP_AVG_CLUST_SIZE_NORM.Write() return #Closes TFiles def closeTFiles(self, debug=False): if self.FILE_IN.IsOpen(): self.FILE_IN.Close() if self.FILE_OUT.IsOpen(): self.FILE_OUT.Close() return #Plot Average Gain Over Entire Detector Area def plotGainSummary(self, strDetName): #Create the Plot - Average gDet_AvgEffGain = TGraphErrors(len(self.GAIN_AVG_POINTS)) gDet_AvgEffGain.SetName("g_{0}_EffGainAvg".format(strDetName)) #Create the Plot - Max Gain gDet_MaxEffGain = TGraphErrors(len(self.GAIN_MAX_POINTS)) gDet_MaxEffGain.SetName("g_{0}_EffGainMax".format(strDetName)) #Create the Plot - Min Gain gDet_MinEffGain = TGraphErrors(len(self.GAIN_MIN_POINTS)) gDet_MinEffGain.SetName("g_{0}_EffGainMin".format(strDetName)) #Set and print the points #print "===============Printing Gain Data===============" #print "[BEGIN_DATA]" #print "\tVAR_INDEP,VAR_DEP,VAR_DEP_ERR" for i in range(0, len(self.GAIN_AVG_POINTS)): #Average gDet_AvgEffGain.SetPoint(i, self.DET_IMON_POINTS[i], self.GAIN_AVG_POINTS[i]) gDet_AvgEffGain.SetPointError(i, 0, self.GAIN_STDDEV_POINTS[i]) #print "\t%f,%f,%f"%(self.DET_IMON_POINTS[i],self.GAIN_AVG_POINTS[i],self.GAIN_STDDEV_POINTS[i]) #Max gDet_MaxEffGain.SetPoint(i, self.DET_IMON_POINTS[i], self.GAIN_MAX_POINTS[i]) #Min gDet_MinEffGain.SetPoint(i, self.DET_IMON_POINTS[i], self.GAIN_MIN_POINTS[i]) pass #print "[END_DATA]" #print "" #Draw canv_AvgEffGain = TCanvas( "canv_{0}_EffGainAvg".format(strDetName), "{0} Average Effective Gain".format(strDetName), 600, 600) canv_AvgEffGain.cd() canv_AvgEffGain.cd().SetLogy() gDet_AvgEffGain.GetXaxis().SetTitle("HV") gDet_AvgEffGain.GetYaxis().SetTitle("#LT Effective Gain #GT") gDet_AvgEffGain.GetYaxis().SetRangeUser(1e2, 1e6) gDet_AvgEffGain.SetMarkerStyle(21) gDet_AvgEffGain.Draw("AP") gDet_MaxEffGain.Draw("sameL") gDet_MinEffGain.Draw("sameL") #Write dir_Summary = self.FILE_OUT.mkdir("Summary") dir_Summary.cd() canv_AvgEffGain.Write() gDet_AvgEffGain.Write() gDet_MaxEffGain.Write() gDet_MinEffGain.Write() return #Plot Average Gain Over Entire Detector Area def plotPDSummary(self, strDetName): #Create the Plot - Average gDet_AvgPD = TGraphErrors(len(self.PD_AVG_POINTS)) gDet_AvgPD.SetName("g_{0}_PDAvg".format(strDetName)) #Create the Plot - Max Gain gDet_MaxPD = TGraphErrors(len(self.PD_MAX_POINTS)) gDet_MaxPD.SetName("g_{0}_PDMax".format(strDetName)) #Create the Plot - Min Gain gDet_MinPD = TGraphErrors(len(self.PD_MIN_POINTS)) gDet_MinPD.SetName("g_" + strDetName + "_PDMin") gDet_MinPD.SetName("g_{0}_PDMin".format(strDetName)) #Set the points for i in range(0, len(self.PD_AVG_POINTS)): #Average gDet_AvgPD.SetPoint(i, self.GAIN_AVG_POINTS[i], self.PD_AVG_POINTS[i]) gDet_AvgPD.SetPointError(i, self.GAIN_STDDEV_POINTS[i], self.PD_STDDEV_POINTS[i]) #Max gDet_MaxPD.SetPoint(i, self.GAIN_AVG_POINTS[i], self.PD_MAX_POINTS[i]) #Min gDet_MinPD.SetPoint(i, self.GAIN_AVG_POINTS[i], self.PD_MIN_POINTS[i]) #Draw canv_AvgPD = TCanvas("canv_{0}_PDAvg".format(strDetName), "{0} Discharge Probability".format(strDetName), 600, 600) canv_AvgPD.cd() canv_AvgPD.cd().SetLogx() canv_AvgPD.cd().SetLogy() gDet_AvgPD.GetXaxis().SetTitle("#LT Effective Gain #GT") gDet_AvgPD.GetYaxis().SetTitle("Discharge Probability P_{D}") gDet_AvgPD.GetYaxis().SetRangeUser(1e-11, 1e-6) gDet_AvgPD.SetMarkerStyle(21) gDet_AvgPD.Draw("AP") gDet_MaxPD.Draw("sameL") gDet_MinPD.Draw("sameL") #Write dir_Summary = self.FILE_OUT.GetDirectory("Summary") dir_Summary.cd() canv_AvgPD.Write() gDet_AvgPD.Write() gDet_MaxPD.Write() gDet_MinPD.Write() return #Open Input File def openInputFile(self, inputfilename): self.FILE_IN = TFile(str(inputfilename), "READ", "", 1) return #Set the detector def setDetector(self, params_det=PARAMS_DET()): self.DETECTOR = params_det return
#can't make NewPage for display numplots = len(parsedPlots) if (options.outputfile == "DISPLAY") and (numplots > maxperlist) : printfunc ("ERROR: too many hists to print to display") sys.exit(1) from ROOT import TFile #opening root files for rootopt in parsedRoots : if not isfile(rootopt.filename) : printfunc ("ERROR: unexistent file:",rootopt.filename) sys.exit(1) root = TFile(rootopt.filename,"read") if root.IsOpen() == 0 : printfunc ("ERROR: can't open the file:",rootopt.filename) sys.exit(1) rootopt.rootfile = root rootopt.tree = root.Get("COL/1") printfunc ("Creating plots...") plots = createPlots(parsedPlots,parsedRoots) printfunc ("Filling plots...") fillPlots(plots,parsedPlots,parsedRoots,eventext) if (options.divide) : printfunc ("Calculating ratio") rootopt1 = parsedRoots.pop(0) dividePlots(plots,rootopt1)