def loopDatasets(dataS,silent=False): plts = {} for l,j in dataS.iteritems(): if not silent: print "dataSet job ",l crabJ = json.load(open(j["crabJson"])) plts[l] = {'plots':{},'additive':[]}; if crabJ.has_key('sample') and crabJ['sample'].has_key('xSec'): plts[l]['color'] = crabJ['sample']['color'] plts[l]['label'] = crabJ['sample']['label'] plts[l]['xSec'] = crabJ['sample']['xSec'];plts[l]['additive'].append('xSec') plts[l]['inputEvents'] = j['EventsRead'];plts[l]['additive'].append('inputEvents') if not silent: print "adding additional information ","color ",crabJ['sample']['color']," label ",crabJ['sample']['label']," xSec ",crabJ['sample']['xSec']," inputEvts ",j['EventsRead'] if j['EventsRead'] != int(j['dasNeventsInput']): print "warning ",l," input events differ ",j['EventsRead']," ",j['dasNeventsInput'] if j.has_key('crabIntLumi'): plts[l]['intLumi'] = j['crabIntLumi']; plts[l]['additive'].append('intLumi') mergedFile = str(crabJ['mergedFilename']) if not silent: print "mergedFile: ",mergedFile events = Events(mergedFile) if not silent: print "events ",events.size() dists = fillDistributions(events,l) dists.loop() plts[l]['plots'].update(copy.deepcopy(dists.plots)) return plts
class EventLooper(object): def __init__(self,sequence,inputPath=None): self.sequence = sequence if inputPath: if type(inputPath) != list: self.inputPath = [ inputPath ] else: inputPath = [] self.inputPath = inputPath def loadEvents(self,nEvents = -1): self.events = Events( self.inputPath ) def loop(self,nEvents = -1): self.loadEvents(nEvents) print "Total Number of events to be run: %s"%self.events.size() for ana in self.sequence: ana.beginJob() for i,event in enumerate(self.events): if (i+1) % 10000 == 0: print "Processed events: %s"%i if (i > nEvents) and (nEvents != -1): break for ana in self.sequence: if not ana.applySelection(event): continue ana.analyze(event) for ana in self.sequence: ana.endJob()
def getEventsLumisInFile(infile): from DataFormats.FWLite import Lumis, Handle, Events events = Events(infile) lumis = Lumis(infile) ret = {} for lum in lumis: run = lum.aux().run() if not ret.has_key(run): ret[run] = [] ret[run] += [lum.aux().id().luminosityBlock()] return events.size(), ret
class Events(object): def __init__(self, files, tree_name): self.events = FWLiteEvents(files) def __len__(self): return self.events.size() def __getattr__(self, key): return getattr(self.events, key) def __getitem__(self, iEv): self.events.to(iEv) return self
class Events(object): def __init__(self, dataset): self._edmEvents = EDMEvents(dataset.files) self.iEvent = -1 self.nEvents = self._edmEvents.size() def __iter__(self): it = iter(self._edmEvents) while True: try: self.edm_event = next(it) self.iEvent = self.edm_event._eventCounts self.nEvents = self.edm_event.size() yield self except StopIteration: break
def loop(fname) : mus = [Handle("vector<reco::Track> "), "generalTracks"] eventsRef = Events(fname) for i in range(0, eventsRef.size()): a= eventsRef.to(i) a=eventsRef.getByLabel(mus[1],mus[0]) pmus = [] for mu in mus[0].product() : if (mu.pt()<5) : continue if (not mu.innerOk()) : continue e = 1000*(mu.momentum().r()-mu.outerMomentum().r()) if (e<0) : continue print e z = abs(mu.outerPosition().z()) r = mu.outerPosition().rho() #rhoH.Fill(mu.outerPosition().rho(),e) zH.Fill(mu.outerPosition().z(),e) etaH.Fill(mu.outerPosition().eta(),e) if (z>240) : xyeH.Fill(mu.outerPosition().x(),mu.outerPosition().y(),e) xyH.Fill(mu.outerPosition().x(),mu.outerPosition().y(),1) if (r<120) :phiH.Fill(mu.outerPosition().phi(),e) rhoH.Fill(r,e) c1 = TCanvas( 'c1', fname, 200, 10, 1000, 1400 ) gStyle.SetOptStat(111111) gStyle.SetHistLineWidth(2) c1.Divide(2,3) c1.cd(1) rhoH.Draw() c1.cd(2) zH.Draw() c1.cd(3) xyH.Draw("COLZ") c1.cd(4) xyeH.Divide(xyH) xyeH.Draw("COLZ") c1.cd(5) etaH.Draw() c1.cd(6) phiH.Draw() c1.Print("eloss"+fname+".png")
def processSample(self, sample, maxEv=-1): print 'Processing Files' print sample.files events = Events(sample.files) print "%s events available for processing" % events.size() ts = time.time() for N, event in enumerate(events): if maxEv >= 0 and (N + 1) >= maxEv: break if N % 1000000 == 0: t2 = time.time() print "%s events processed in %s seconds" % (N + 1, t2 - ts) weight = 1 box = EventBox() self.readCollections(event, box) if not self.analyze(box): continue self.addEvent(box) self.fillHistos(box, sample.type, weight) tf = time.time() print "%s events processed in %s seconds" % (N + 1, tf - ts)
class Events(object): def __init__(self, files, tree_name, options=None): if options is not None : if not hasattr(options,"inputFiles"): options.inputFiles=files if not hasattr(options,"maxEvents"): options.maxEvents = 0 if not hasattr(options,"secondaryInputFiles"): options.secondaryInputFiles = [] self.events = FWLiteEvents(options=options) else : self.events = FWLiteEvents(files) def __len__(self): return self.events.size() def __getattr__(self, key): return getattr(self.events, key) def __getitem__(self, iEv): self.events.to(iEv) return self
class Events(object): def __init__(self, files, tree_name, options=None): from DataFormats.FWLite import Events as FWLiteEvents #TODO not sure we still need the stuff below from ROOT import gROOT, gSystem, AutoLibraryLoader print "Loading FW Lite" gSystem.Load("libFWCoreFWLite"); gROOT.ProcessLine('FWLiteEnabler::enable();') gSystem.Load("libFWCoreFWLite"); gSystem.Load("libDataFormatsPatCandidates"); from ROOT import gInterpreter gInterpreter.ProcessLine("using namespace reco;") gInterpreter.ProcessLine("using edm::refhelper::FindUsingAdvance;") if options is not None : if not hasattr(options,"inputFiles"): options.inputFiles=files if not hasattr(options,"maxEvents"): options.maxEvents = 0 if not hasattr(options,"secondaryInputFiles"): options.secondaryInputFiles = [] self.events = FWLiteEvents(options=options) else : self.events = FWLiteEvents(files) def __len__(self): return self.events.size() def __getattr__(self, key): return getattr(self.events, key) def __getitem__(self, iEv): self.events.to(iEv) return self
'GENMC_08TeV/My_RunIIFall17GS_Lambdab_49.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_5.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_50.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_51.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_6.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_7.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_8.root', 'GENMC_08TeV/My_RunIIFall17GS_Lambdab_9.root']) handleGen = Handle("std::vector<reco::GenParticle>") labelGen = ("genParticles") ######################## # Loop over the events # ######################## print "Total number of events to analyze: ", events.size() for it, event in enumerate(events): if it%100 == 0: print "Processed events: ", it event.getByLabel(labelGen, handleGen) gen_p = handleGen.product() for br in branchNames: branches[br] = -99. b0 = TLorentzVector() mum = TLorentzVector() mup = TLorentzVector() num = TLorentzVector() nup = TLorentzVector() kst = TLorentzVector()
br_l.append([]) for y in xrange(len(br_c)): br_l[x].append(array("d", [0.0])) tr_l[x].Branch(sys_e[x]+"_"+br_c[y], br_l[x][y], sys_e[x]+"_"+br_c[y]+"/D") ### ntuple booking end root_l = [] root_fath = open(in_f) for x in root_fath: root_l.append(x[:-1]) print root_l for rf in root_l: events = Events(rf) in_put_num_event += events.size() jetsLabel, jets = "catJets", Handle("std::vector<cat::Jet>") goodVTXLabel, GVTX = "goodOfflinePrimaryVertices", Handle("vector<reco::Vertex>") metLabel, mets = "catMETs", Handle("vector<cat::MET>") hlt80preL, hlt80pre = ("recoEventInfo","HLTPFJet80", "CAT"), Handle("int") hlt140preL, hlt140pre = ("recoEventInfo","HLTPFJet140","CAT"), Handle("int") hlt320preL, hlt320pre = ("recoEventInfo","HLTPFJet320", "CAT"), Handle("int") if mc: puwLabel, puw = ("pileupWeight", ""), Handle("double") puwupLabel, puwup = ("pileupWeight", "up"), Handle("double") puwdownLabel, puwdown = ("pileupWeight", "dn"), Handle("double") for iev,event in enumerate(events):
def topbnv_fwlite(argv): ## _____________ __.____ .__ __ _________ __ _____ _____ ## \_ _____/ \ / \ | |__|/ |_ ____ / _____// |_ __ ___/ ____\/ ____\ ## | __) \ \/\/ / | | \ __\/ __ \ \_____ \\ __\ | \ __\\ __\ ## | \ \ /| |___| || | \ ___/ / \| | | | /| | | | ## \___ / \__/\ / |_______ \__||__| \___ > /_______ /|__| |____/ |__| |__| ## \/ \/ \/ \/ \/ options = fwlite_tools.getUserOptions(argv) ROOT.gROOT.Macro("rootlogon.C") vertices, vertexLabel = Handle("std::vector<reco::Vertex>"), "offlineSlimmedPrimaryVertices" f = ROOT.TFile(options.output, "RECREATE") f.cd() outtree = ROOT.TTree("T", "Our tree of everything") ############################################################################ # Vertex ############################################################################ vertexdata = {} vertexdata['nvertex'] = ['vertexX', 'vertexY', 'vertexZ', 'vertexndof'] outdata = {} for key in vertexdata.keys(): outdata[key] = array('i', [-1]) outtree.Branch(key, outdata[key], key+"/I") for branch in vertexdata[key]: outdata[branch] = array('f', 64*[-1.]) outtree.Branch(branch, outdata[branch], '{0}[{1}]/F'.format(branch,key)) ''' njet = array('i', [-1]) outtree.Branch('njet', njet, 'njet/I') jetpt = array('f', 16*[-1.]) outtree.Branch('jetpt', jetpt, 'jetpt[njet]/F') ''' ################################################################################# ## ___________ __ .____ ## \_ _____/__ __ ____ _____/ |_ | | ____ ____ ______ ## | __)_\ \/ // __ \ / \ __\ | | / _ \ / _ \\____ \ ## | \\ /\ ___/| | \ | | |__( <_> | <_> ) |_> > ## /_______ / \_/ \___ >___| /__| |_______ \____/ \____/| __/ ## \/ \/ \/ \/ |__| # IMPORTANT : Run one FWLite instance per file. Otherwise, # FWLite aggregates ALL of the information immediately, which # can take a long time to parse. ################################################################################# def processEvent(iev, event): event.getByLabel(vertexLabel, vertices) PV,NPV = fwlite_tools.process_vertices(vertices, outdata, verbose=options.verbose) # Should do this first. We shouldn't analyze events that don't have a # good primary vertex if PV is None: return 0 ## ___________.__.__ .__ ___________ ## \_ _____/|__| | | | \__ ___/______ ____ ____ ## | __) | | | | | | | \_ __ \_/ __ \_/ __ \ ## | \ | | |_| |__ | | | | \/\ ___/\ ___/ ## \___ / |__|____/____/ |____| |__| \___ >\___ > ## \/ \/ \/ outtree.Fill() #print("Made it to end!") return 1 ######################################### # Main event loop nevents = 0 maxevents = int(options.maxevents) for ifile in fwlite_tools.getInputFiles(options): print ('Processing file ' + ifile) events = Events (ifile) if maxevents > 0 and nevents > maxevents: break # loop over events in this file print('Tot events in this file: ' + str(events.size())) for iev, event in enumerate(events): #print(iev) if maxevents > 0 and nevents > maxevents: break nevents += 1 #if nevents % 1000 == 0: if nevents % 100 == 0: print ('===============================================') print (' ---> Event ' + str(nevents)) elif options.verbose: print (' ---> Event ' + str(nevents)) genOut = processEvent(iev, events) #outtree.Print() # Close the output ROOT file f.cd() f.Write() f.Close()
class METProducerTest(unittest.TestCase): def setUp(self): self.exEvents = Events([options.expectedPath]) self.acEvents = Events([options.actualPath]) self.exHandleGenMETs = Handle("std::vector<reco::GenMET>") self.exHandlePFMETs = Handle("std::vector<reco::PFMET>") self.exHandleCaloMETs = Handle("std::vector<reco::CaloMET>") self.exHandleMETs = Handle("std::vector<reco::MET>") self.exHandlePFClusterMETs = Handle("std::vector<reco::PFClusterMET>") self.acHandleGenMETs = Handle("std::vector<reco::GenMET>") self.acHandlePFMETs = Handle("std::vector<reco::PFMET>") self.acHandleCaloMETs = Handle("std::vector<reco::CaloMET>") self.acHandleMETs = Handle("std::vector<reco::MET>") self.acHandlePFClusterMETs = Handle("std::vector<reco::PFClusterMET>") def test_n_events(self): self.assertEqual(self.exEvents.size(), self.acEvents.size()) def test_recoPFMETs_pfMet(self): label = ("pfMet" ,"" ,"METP") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoGenMETs_genMetTrue(self): label = ("genMetTrue" ,"" ,"METP") exHandle = self.exHandleGenMETs acHandle = self.acHandleGenMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoGenMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoGenMETs_genMetCalo(self): label = ("genMetCalo" ,"" ,"METP") exHandle = self.exHandleGenMETs acHandle = self.acHandleGenMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoGenMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoGenMETs_genMetCaloAndNonPrompt(self): label = ("genMetCaloAndNonPrompt" ,"" ,"METP") exHandle = self.exHandleGenMETs acHandle = self.acHandleGenMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoGenMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_genMetIC5GenJets(self): label = ("genMetIC5GenJets", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_tcMet(self): label = ("tcMet", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_tcMetCST(self): label = ("tcMetCST", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_tcMetRft2(self): label = ("tcMetRft2", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_tcMetVedu(self): label = ("tcMetVedu", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_tcMetWithPFclusters(self): label = ("tcMetWithPFclusters", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_htMetAK5(self): label = ("htMetAK5", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) # def test_recoMETs_htMetAK7(self): # label = ("htMetAK7", "", "METP") # exHandle = self.exHandleMETs # acHandle = self.acHandleMETs # candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') # self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) # def test_recoMETs_htMetKT4(self): # label = ("htMetKT4", "", "METP") # exHandle = self.exHandleMETs # acHandle = self.acHandleMETs # candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') # self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoMETs_htMetKT6(self): label = ("htMetKT6", "", "METP") exHandle = self.exHandleMETs acHandle = self.acHandleMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) # def test_recoMETs_htMetIC5(self): # label = ("htMetIC5", "", "METP") # exHandle = self.exHandleMETs # acHandle = self.acHandleMETs # candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') # self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_met(self): label = ("met", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metHO(self): label = ("metHO", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metNoHF(self): label = ("metNoHF", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metNoHFHO(self): label = ("metNoHFHO", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metOpt(self): label = ("metOpt", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metOptHO(self): label = ("metOptHO", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metOptNoHF(self): label = ("metOptNoHF", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_metOptNoHFHO(self): label = ("metOptNoHFHO", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_corMetGlobalMuons(self): label = ("corMetGlobalMuons", "" ,"METP") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFClusterMETs_pfClusterMet(self): label = ("pfClusterMet", "", "METP") exHandle = self.exHandlePFClusterMETs acHandle = self.acHandlePFClusterMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfChargedMET(self): label = ("pfChargedMET" ,"" ,"METP") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def assert_collection(self, label, exHandle, acHandle, candidateAssertMethods): exEventIter = self.exEvents.__iter__() acEventIter = self.acEvents.__iter__() nevents = min(self.exEvents.size(), self.acEvents.size()) for i in range(nevents): exEvent = exEventIter.next() acEvent = acEventIter.next() exEvent.getByLabel(label, exHandle) exMETs = exHandle.product() exMET = exMETs.front() acEvent.getByLabel(label, acHandle) acMETs = acHandle.product() self.assertEqual(acMETs.size(), 1) acMET = acMETs.front() for method in candidateAssertMethods: getattr(self, method)(acMET, exMET) def assert_recoPFMET(self, actual, expected): # double self.assertEqual(actual.photonEtFraction() , expected.photonEtFraction() ) self.assertAlmostEqual(actual.photonEt() , expected.photonEt() , 10) self.assertEqual(actual.neutralHadronEtFraction() , expected.neutralHadronEtFraction() ) self.assertAlmostEqual(actual.neutralHadronEt() , expected.neutralHadronEt() , 10) self.assertEqual(actual.electronEtFraction() , expected.electronEtFraction() ) self.assertAlmostEqual(actual.electronEt() , expected.electronEt() , 10) self.assertEqual(actual.chargedHadronEtFraction() , expected.chargedHadronEtFraction() ) self.assertAlmostEqual(actual.chargedHadronEt() , expected.chargedHadronEt() , 10) self.assertEqual(actual.muonEtFraction() , expected.muonEtFraction() ) self.assertAlmostEqual(actual.muonEt() , expected.muonEt() , 10) self.assertEqual(actual.HFHadronEtFraction() , expected.HFHadronEtFraction() ) self.assertAlmostEqual(actual.HFHadronEt() , expected.HFHadronEt() , 10) self.assertEqual(actual.HFEMEtFraction() , expected.HFEMEtFraction() ) self.assertAlmostEqual(actual.HFEMEt() , expected.HFEMEt() , 10) def assert_recoGenMET(self, actual, expected): # double self.assertEqual(actual.NeutralEMEtFraction() , expected.NeutralEMEtFraction() ) self.assertEqual(actual.NeutralEMEt() , expected.NeutralEMEt() ) self.assertEqual(actual.ChargedEMEtFraction() , expected.ChargedEMEtFraction() ) self.assertEqual(actual.ChargedEMEt() , expected.ChargedEMEt() ) self.assertEqual(actual.NeutralHadEtFraction() , expected.NeutralHadEtFraction() ) self.assertEqual(actual.NeutralHadEt() , expected.NeutralHadEt() ) self.assertEqual(actual.ChargedHadEtFraction() , expected.ChargedHadEtFraction() ) self.assertEqual(actual.ChargedHadEt() , expected.ChargedHadEt() ) self.assertEqual(actual.MuonEtFraction() , expected.MuonEtFraction() ) self.assertEqual(actual.MuonEt() , expected.MuonEt() ) self.assertEqual(actual.InvisibleEtFraction() , expected.InvisibleEtFraction() ) self.assertEqual(actual.InvisibleEt() , expected.InvisibleEt() ) def assert_recoCaloMET(self, actual, expected): # double self.assertEqual(actual.maxEtInEmTowers() , expected.maxEtInEmTowers() ) self.assertEqual(actual.maxEtInHadTowers() , expected.maxEtInHadTowers() ) self.assertAlmostEqual(actual.etFractionHadronic() , expected.etFractionHadronic(), 15 ) self.assertAlmostEqual(actual.emEtFraction() , expected.emEtFraction() , 15 ) self.assertEqual(actual.hadEtInHB() , expected.hadEtInHB() ) self.assertEqual(actual.hadEtInHO() , expected.hadEtInHO() ) self.assertEqual(actual.hadEtInHE() , expected.hadEtInHE() ) self.assertEqual(actual.hadEtInHF() , expected.hadEtInHF() ) self.assertEqual(actual.emEtInEB() , expected.emEtInEB() ) self.assertEqual(actual.emEtInEE() , expected.emEtInEE() ) self.assertEqual(actual.emEtInHF() , expected.emEtInHF() ) self.assertEqual(actual.metSignificance() , expected.metSignificance() ) self.assertEqual(actual.CaloSETInpHF() , expected.CaloSETInpHF() ) self.assertEqual(actual.CaloSETInmHF() , expected.CaloSETInmHF() ) self.assertEqual(actual.CaloMETInpHF() , expected.CaloMETInpHF() ) self.assertEqual(actual.CaloMETInmHF() , expected.CaloMETInmHF() ) self.assertEqual(actual.CaloMETPhiInpHF() , expected.CaloMETPhiInpHF() ) self.assertEqual(actual.CaloMETPhiInmHF() , expected.CaloMETPhiInmHF() ) def assert_recoMET(self, actual, expected): # double self.assertAlmostEqual(actual.sumEt() , expected.sumEt() , 10) self.assertAlmostEqual(actual.mEtSig() , expected.mEtSig() , 10) self.assertEqual(actual.significance() , expected.significance() ) self.assertEqual(actual.e_longitudinal() , expected.e_longitudinal() ) self.assertEqual(actual.dmEx().size() , expected.dmEx().size()) for a, e in zip(actual.dmEx(), expected.dmEx()): self.assertEqual(a , e) self.assertEqual(actual.dmEy().size() , expected.dmEy().size()) for a, e in zip(actual.dmEy(), expected.dmEy()): self.assertEqual(a , e) self.assertEqual(actual.dsumEt().size() , expected.dsumEt().size()) for a, e in zip(actual.dsumEt(), expected.dsumEt()): self.assertEqual(a , e) self.assertEqual(actual.dSignificance().size() , expected.dSignificance().size()) for a, e in zip(actual.dSignificance(), expected.dSignificance()): self.assertEqual(a , e) self.assertEqual(actual.mEtCorr().size(), expected.mEtCorr().size()) for a, e in zip(actual.mEtCorr(), expected.mEtCorr()): # self.assertEqual(a.mex , e.mex) # self.assertEqual(a.mey , e.mey) # self.assertEqual(a.sumet , e.sumet) # self.assertEqual(a.significance , e.significance) pass actualSigMatrix = actual.getSignificanceMatrix() expectedSigMatrix = expected.getSignificanceMatrix() self.assertEqual(actualSigMatrix.GetNrows(), expectedSigMatrix.GetNrows()) self.assertEqual(actualSigMatrix.GetNcols(), expectedSigMatrix.GetNcols()) self.assertEqual(actualSigMatrix.GetNoElements(), expectedSigMatrix.GetNoElements()) for irow in range(actualSigMatrix.GetNrows()): for icol in range(actualSigMatrix.GetNcols()): self.assertEqual(actualSigMatrix(irow, icol), expectedSigMatrix(irow, icol)) def assert_recoLeafCandidate(self, actual, expected): # size_t self.assertEqual(actual.numberOfDaughters() , expected.numberOfDaughters() ) self.assertEqual(actual.numberOfMothers() , expected.numberOfMothers() ) # int self.assertEqual(actual.charge() , expected.charge() ) self.assertEqual(actual.threeCharge() , expected.threeCharge() ) # double self.assertEqual(actual.p() , expected.p() ) self.assertEqual(actual.energy() , expected.energy() ) self.assertEqual(actual.et() , expected.et() ) self.assertEqual(actual.mass() , expected.mass() ) self.assertEqual(actual.massSqr() , expected.massSqr() ) self.assertEqual(actual.mt() , expected.mt() ) self.assertEqual(actual.mtSqr() , expected.mtSqr() ) self.assertEqual(actual.px() , expected.px() ) self.assertEqual(actual.py() , expected.py() ) self.assertEqual(actual.pz() , expected.pz() ) self.assertAlmostEqual(actual.pt(), expected.pt(), 5) self.assertEqual(actual.phi() , expected.phi() ) self.assertEqual(actual.theta() , expected.theta() ) self.assertEqual(actual.eta() , expected.eta() ) # self.assertEqual(actual.rapidity() , expected.rapidity() ) # self.assertEqual(actual.y() , expected.y() ) self.assertEqual(actual.vx() , expected.vx() ) self.assertEqual(actual.vy() , expected.vy() ) self.assertEqual(actual.vz() , expected.vz() ) # int self.assertEqual(actual.pdgId() , expected.pdgId() ) self.assertEqual(actual.status() , expected.status() ) # bool self.assertEqual(actual.longLived() , expected.longLived() ) self.assertEqual(actual.massConstraint() , expected.massConstraint() ) # double self.assertEqual(actual.vertexChi2() , expected.vertexChi2() ) self.assertEqual(actual.vertexNdof() , expected.vertexNdof() ) self.assertEqual(actual.vertexNormalizedChi2() , expected.vertexNormalizedChi2() ) # bool self.assertEqual(actual.hasMasterClone() , expected.hasMasterClone() ) self.assertEqual(actual.hasMasterClonePtr() , expected.hasMasterClonePtr() ) self.assertEqual(actual.isElectron() , expected.isElectron() ) self.assertEqual(actual.isMuon() , expected.isMuon() ) self.assertEqual(actual.isStandAloneMuon() , expected.isStandAloneMuon() ) self.assertEqual(actual.isGlobalMuon() , expected.isGlobalMuon() ) self.assertEqual(actual.isTrackerMuon() , expected.isTrackerMuon() ) self.assertEqual(actual.isCaloMuon() , expected.isCaloMuon() ) self.assertEqual(actual.isPhoton() , expected.isPhoton() ) self.assertEqual(actual.isConvertedPhoton() , expected.isConvertedPhoton() ) self.assertEqual(actual.isJet() , expected.isJet() )
ptall = TH1F("pt all","pt all",100,-10.,10.) ptmiss = TH1F("pt miss","pt miss",100,-10.,10.) ptr = TH1F("pt ratio","pt ratio",100,-10.,10.) d0all = TH1F("d0 all","d0 all",100,-5.,5.) d0miss = TH1F("d0 miss","d0 miss",100,-5.,5.) d0r = TH1F("d0 ratio","d0 ratio",100,-5.,5.) dpall = TH1F("dp all","dp all",100,-0.5,0.5) dpmiss = TH1F("dp miss","dp miss",100,-0.5,0.5) dpr = TH1F("dp ratio","dp ratio",100,-0.5,0.5) algoall = TH1F("algo all","algo all",20,0,20) algomiss = TH1F("algo miss","algo miss",20,0,20) algor = TH1F("algo ratio","algo ratio",20,0,20) # for event in eventsRef: for i in range(0, eventsRef.size()): a= eventsRef.to(i) a= eventsNew.to(i) print "Event", i a=eventsRef.getByLabel(label, tracksRef) a=eventsNew.getByLabel(label, tracksNew) trRef = [] j = 0 for track in tracksRef.product(): if (track.found()<8) : continue if (track.quality(track.qualityByName(quality))) : dp = track.outerPosition().phi()-track.outerMomentum().phi() trRef.append((j,track.charge()*track.pt(), track.phi()+track.eta(),track.eta(),track.found(), track.hitPattern(), track.ndof(), track.chi2(), track.dxy(),dp, track.algo() )) j += 1 a = trRef.sort(key=lambda tr: tr[2]) print j
sys.exit(2) ###################################################### # Handles and labels for recovering RECO variables ###################################################### halosummaryH = Handle('reco::BeamHaloSummary') halosummaryL = ('BeamHaloSummary', '', 'RECO') cschalodataH = Handle('reco::CSCHaloData') cschalodataL = ('CSCHaloData', '', 'RECO') ###################################################### # Loop over events ###################################################### events = Events(inname) nEvents = events.size() for event in events: # Search for the input event eventAux = event.eventAuxiliary() id = eventAux.id() run = id.run() lumi = id.luminosityBlock() eventnum = id.event() if not (event_run == run and event_lb == lumi and event_id == eventnum): continue # Correct event: print out CSCHaloData event.getByLabel(halosummaryL, halosummaryH)
inputDir += '/' + os.listdir(inputDir)[0] + '/0000' file_count = 0 print('Starting analyzing files in %s' % inputDir) for inputFile in os.listdir(inputDir): if args.short: if file_count == 25: break #For quick tests if 'inLHE' not in inputFile: inFile = os.path.join(inputDir, inputFile) file_count += 1 events = Events(inFile) nevents = int(events.size()) if file_count % 10 == 0: print("Analyzing file # %d" % file_count) #print("Number of events: %d" % nevents) for event in events: event.getByLabel(metLabel, mets) event.getByLabel(genParticleLabel, genParticles) MET_hist.Fill(mets.product()[0].pt()) numLQ = [0, 0, 0] if args.LQhist:
def loop(fname) : genPars = Handle("vector<reco::GenParticle>") genParsLabel = "prunedGenParticles" gPar = [Handle("vector<pat::PackedGenParticle>"), "packedGenParticles"] vertices = [Handle("vector<reco::Vertex>"), "offlineSlimmedPrimaryVertices" ] mus = [Handle("vector<pat::Muon>"), "slimmedMuons"] sip2d = TH1F("SIP2D","SIP2D",40,-10.,10.) sip3d = TH1F("SIP3D","SIP3D",40,-10.,10.) sipxy = TH1F("tk2d","TK SIPXY",40,-10.,10.) sipz = TH1F("tk3z","TK SIPZ",40,-10.,10.) sip3d_l = TH1F("SIP2D l","SIP2D l",40,-10.,10.) sip3d_h = TH1F("SIP2D h","SIP2D h",40,-10.,10.) sip3d_best = TH1F("SIP2D best","SIP2D best",40,-10.,10.) vert = TH1F("zpv","zpv",100,-10.,10.) sip_v = TProfile("SIP2D vs zpv","SIP2D best vs zpv",50,0.,5.,0.,10.) # eventsRef = Events(fname) nw=0 nehpt=0 nwhpt=0 nech=0 nwch=0 # for i in range(0, eventsRef.size()): a= eventsRef.to(i) print "Event", i a=eventsRef.getByLabel(genParsLabel, genPars) zpv=0 gpvp = genPars.product()[0].vertex() for part in genPars.product(): if (part.vz()!=0) : zpv = part.vz() gpvp = part.vertex() break print "zpv " , zpv # nmu=0 nel=0 nch1=0 nch2=0 gmu = [] for part in genPars.product(): if (part.status()!=1) : continue if (abs(part.pdgId())==13 and part.pt()>5 and abs(part.eta())<2.4) : gmu.append((part.phi(),part.eta(), part.charge()*part.pt())) if (abs(part.pdgId())==13 and part.pt()>5 and abs(part.eta())<2.4) : nmu+=1 if (abs(part.pdgId())==11 and part.pt()>7 and abs(part.eta())<2.4) : nel+=1 if (abs(part.pdgId())==13 and part.pt()>8 and abs(part.eta())<2.4) : nch1+=1 if (abs(part.pdgId())==11 and part.pt()>10 and abs(part.eta())<2.4) : nch1+=1 if (abs(part.pdgId())==13 and part.pt()>20 and abs(part.eta())<2.4) : nch2+=1 if (abs(part.pdgId())==11 and part.pt()>20 and abs(part.eta())<2.4) : nch2+=1 # if (abs(part.pdgId())==13): # print "part", part.phi(),part.eta(), part.pt(), part.vz(), part.vx(), part.vy(), part.mass(), part.pdgId(), part.status() # print "nmu ", nmu,nel # print gmu a=eventsRef.getByLabel(vertices[1],vertices[0]) minz=99999. iv=0 ii=0 pv = vertices[0].product()[0] pvp = vertices[0].product()[0].position() nv = vertices[0].product().size() for v in vertices[0].product() : if (abs(v.z()-zpv) < minz) : minz=abs(v.z()-zpv) iv = ii ii+=1 print "pv ", iv, minz if (iv!=0) : nw+=1 # if (nmu+nel>3) : if (nmu>1) : nehpt+=1 if (iv!=0) : nwhpt+=1 # if (nch1>0 and nch2>0) : if (nch1<1) : continue # nech+=1 if (iv!=0) : nwch+=1 a=eventsRef.getByLabel(mus[1],mus[0]) pmus = [] for mu in mus[0].product() : if (mu.pt()<5) : continue # if ( mu.isTrackerMuon() or mu.isGlobalMuon()) : if ( mu.isGlobalMuon()) : pmus.append(( mu.phi(), mu.eta(), mu.pt()*mu.charge(), mu.dB(2)/mu.edB(2), mu.dB(1)/mu.edB(1), mu.track().dxy(gpvp)/mu.track().dxyError(), mu.track().dz(gpvp)/mu.track().dzError(), mu.track().hitPattern().pixelLayersWithMeasurement() )) # print 'mu', iv, mu.phi(), mu.eta(), mu.pt(), mu.dB(2)/mu.edB(2), mu.dB(1)/mu.edB(1), mu.isTrackerMuon(), mu.isGlobalMuon() # print pmus matches = [] i=0 for g in gmu : j = 0 for mu in pmus : j+=1 if ( g[2]/mu[2] < 0.5 or g[2]/mu[2] > 2.0 ) : continue dr = dR2(g[0],g[1],mu[0],mu[1]) if ( dr > 0.04 ) : continue matches.append((i,j-1, dr, abs(1.-g[2]/mu[2]))) #print "matched mu", mu i+=1 if (len(matches)<1 ) : continue vert.Fill((pv.z()-zpv)/pv.zError()) k=matches[0][0] best = 99999 dr = 999999 for m in matches : # if (abs(pv.z()-zpv)<3*pv.zError()) : if(pmus[m[1]][7]>2) : # if (nv<21) : sip3d_l.Fill(pmus[m[1]][3]) else : sip3d_h.Fill(pmus[m[1]][3]) sip2d.Fill(pmus[m[1]][4]) sip3d.Fill(pmus[m[1]][3]) sipxy.Fill(pmus[m[1]][5]) sipz.Fill(pmus[m[1]][6]) sip_v.Fill(abs(pv.z()-zpv)/pv.zError(),abs(pmus[m[1]][5])) if (m[0]!=k) : sip3d_best.Fill(best) k=m[0] best = pmus[m[1]][4] dr = m[3] else : if (m[3]<dr ): dr = m[3] best = pmus[m[1]][4] if (dr<9999) : sip3d_best.Fill(best) print "wrong pv", nw, nehpt, nwhpt,nech,nwch c1 = TCanvas( 'c1', fname, 200, 10, 1000, 1400 ) gStyle.SetOptStat(111111) gStyle.SetHistLineWidth(2) c1.Divide(2,4) c1.cd(1).SetLogy() sip2d.DrawNormalized() e = TF1("q","0.5*exp(-0.5*x*x)/sqrt(6.28)",-10.,10.) e.Draw("same") c1.cd(2).SetLogy() sip3d.DrawNormalized() e.Draw("same") c1.cd(3).SetLogy() sipxy.DrawNormalized() e.Draw("same") c1.cd(4).SetLogy() sipz.DrawNormalized() e.Draw("same") c1.cd(5).SetLogy() sip3d_l.DrawNormalized() e.Draw("same") c1.cd(6).SetLogy() sip3d_h.DrawNormalized() e.Draw("same") # sip3d_best.DrawNormalized() c1.cd(7).SetLogy() vert.DrawNormalized() # ev = TF1("qv","0.2*exp(-0.5*x*x)/sqrt(6.28)",-10.,10.) # ev.Draw("same") c1.cd(8) sip_v.Draw() c1.Print("sipall"+fname+".png")
indicies = events.fileIndicies() for event in events: newIndicies = event.fileIndicies() if indicies != newIndicies: print("new file") indicies = newIndicies event.getByLabel('Thing', thingHandle) thing = thingHandle.product() for loop in range(thing.size()): print(thing.at(loop).a) events.toBegin() for event in events: pass events.toBegin() for event in events: event.getByLabel('Thing', thingHandle) thing = thingHandle.product() for loop in range(thing.size()): print(thing.at(loop).a) for i in range(events.size()): if not events.to(i): print("failed to go to index ", i) exit(1) print("Python test succeeded!")
def main(): # All the inputs we need to retrieve the EDM collections from MiniAOD jet_handle, jet_label = Handle("std::vector<pat::Jet>"), "slimmedJets" pjet_handle, pjet_label = Handle( "std::vector<pat::Jet>"), "slimmedJetsPuppi" njet_handle, njet_label = Handle("std::vector<pat::Jet>"), "patJetsPuppi" vertex_handle, vertex_label = Handle( "std::vector<reco::Vertex>"), "offlineSlimmedPrimaryVertices" genjet_handle, genjet_label = Handle( "std::vector<reco::GenJet>"), "slimmedGenJets" # Initialize histograms npv_bins = np.arange(0, 70, 7) def make_npv_hist(title): h = ROOT.TH1F(title, title, len(npv_bins) - 1, array('d', npv_bins)) h.SetDirectory(0) return h # for purity h_chsjet_npv = make_npv_hist("recojets_npv_CHS") h_chsjet_matched_npv = make_npv_hist("matchedrecojets_npv_CHS") h_puppijet_npv = make_npv_hist("recojets_npv_PUPPI") h_puppijet_matched_npv = make_npv_hist("matchedrecojets_npv_PUPPI") h_newpuppijet_npv = make_npv_hist("recojets_npv_newPUPPI") h_newpuppijet_matched_npv = make_npv_hist("matchedrecojets_npv_newPUPPI") # for efficiency h_gen_chsjet_npv = make_npv_hist("genjets_npv_CHS") h_gen_chsjet_matched_npv = make_npv_hist("matchedgenjets_npv_CHS") h_gen_puppijet_npv = make_npv_hist("genjets_npv_PUPPI") h_gen_puppijet_matched_npv = make_npv_hist("matchedgenjets_npv_PUPPI") h_gen_newpuppijet_npv = make_npv_hist("genjets_npv_newPUPPI") h_gen_newpuppijet_matched_npv = make_npv_hist( "matchedgenjets_npv_newPUPPI") events = Events( "root://cmsxrootd.fnal.gov//store/user/aandreas/share/jumpshot/ReminiAOD.root" ) nevents = int(events.size()) print "total events: ", nevents for ievent, event in enumerate(events): print_same_line(str(round(100. * ievent / nevents, 2)) + '%') # Retrieve collections event.getByLabel(jet_label, jet_handle) event.getByLabel(pjet_label, pjet_handle) event.getByLabel(njet_label, njet_handle) event.getByLabel(genjet_label, genjet_handle) event.getByLabel(vertex_label, vertex_handle) # Unpack for easier handling nvtx = vertex_handle.product().size() genjets = genjet_handle.product() jets = jet_handle.product() pjets = pjet_handle.product() njets = njet_handle.product() # CHS jets fill_histograms(jets, genjets, nvtx, h_chsjet_npv, h_chsjet_matched_npv, h_gen_chsjet_npv, h_gen_chsjet_matched_npv) # Puppi jets fill_histograms(pjets, genjets, nvtx, h_puppijet_npv, h_puppijet_matched_npv, h_gen_puppijet_npv, h_gen_puppijet_matched_npv) # New puppi jets fill_histograms(njets, genjets, nvtx, h_newpuppijet_npv, h_newpuppijet_matched_npv, h_gen_newpuppijet_npv, h_gen_newpuppijet_matched_npv) # Plug our numerator and denominator histograms into TEfficiency # and run a quick plotting code prt_npv = [] prt_npv.append(ROOT.TEfficiency(h_chsjet_matched_npv, h_chsjet_npv)) prt_npv.append(ROOT.TEfficiency(h_puppijet_matched_npv, h_puppijet_npv)) prt_npv.append( ROOT.TEfficiency(h_newpuppijet_matched_npv, h_newpuppijet_npv)) plot(prt_npv, "purity_npv", "number of reconstructed vertices", "Purity") # Same for efficiency eff_npv = [] eff_npv.append(ROOT.TEfficiency(h_gen_chsjet_matched_npv, h_gen_chsjet_npv)) eff_npv.append( ROOT.TEfficiency(h_gen_puppijet_matched_npv, h_gen_puppijet_npv)) eff_npv.append( ROOT.TEfficiency(h_gen_newpuppijet_matched_npv, h_gen_newpuppijet_npv)) plot(eff_npv, "efficiency_npv", "number of reconstructed vertices", "Efficiency")
totalSection = options.totalSec section = options.sec numFiles = len(files) / totalSection if section == totalSection: secFiles = files[section*numFiles:] else : secFiles = files[section*numFiles:(section+1)*numFiles] files = secFiles print files events = Events (files) ntotal = events.size() analyzer = mistag_maker(options.outfile, options.seed, False) count = 0 print "Start looping" for event in events: count = count + 1 if count % 10000 == 0 or count == 1: percentDone = float(count) / float(ntotal) * 100.0 print 'Processing Job {0:2.0f} {1:10.0f}/{2:10.0f} : {3:5.2f} %'.format(section, count, ntotal, percentDone ) error = analyzer.analyze(event) analyzer.reset() if count > options.Nevents and options.Nevents > 0:
def run(self, runParams): self.runParams = runParams InitializeFWLite() outputFileObject = TFile(runParams.outputFilePath, "RECREATE") totalEventCount = 0 Break = False startTime = time.time() genJetsObj = namedtuple('Obj', ['handle', 'label']) genJetsObj.handle = Handle('std::vector<reco::GenJet>') genJetsObj.label = "ak5GenJets" infoObj = namedtuple('Obj', ['handle', 'label']) infoObj.handle = Handle('<GenEventInfoProduct>') infoObj.label = "generator" genParticlesObj = namedtuple('Obj', ['handle', 'label']) genParticlesObj.handle = Handle('std::vector<reco::GenParticle>') genParticlesObj.label = "genParticles" for currentCutIndex, currentCut in enumerate(runParams.pTCuts): if Break: break events = Events(self.runParams.inputFileList) histos = Histos(str(currentCut), outputFileObject) if runParams.modulo == 0: print 'Processing ' + str(events.size( )) + ' events @ pTCut=' + str(currentCut) + 'GeV' if not runParams.useDebugOutput: sys.stdout.write( "[ ]\r[" ) sys.stdout.flush() percentage50 = 0 for currentEventIndex, currentEvent in enumerate(events): if len(runParams.events) > 0: if currentEventIndex not in runParams.events: continue totalEventCount = totalEventCount + 1 if runParams.maxEvents > -1 and totalEventCount > runParams.maxEvents: Break = True break if runParams.useDebugOutput: print "Event #" + str(currentEventIndex) else: percentageNow = 50. * currentEventIndex / events.size() if percentageNow >= percentage50 + 1: percentage50 = percentage50 + 1 if runParams.modulo == 0: sys.stdout.write('.') sys.stdout.flush() else: print "pT" + str(currentCut) + " P" + str( runParams.moduloRest + 1) + "/" + str( runParams.modulo) + ": " + str( 100. * currentEventIndex / events.size()) + "%" if (runParams.modulo <> 0): currentModIndex = currentEventIndex % runParams.modulo if currentModIndex <> runParams.moduloRest: continue self.processEvent(infoObj, genJetsObj, genParticlesObj, currentCut, currentCutIndex, currentEventIndex, currentEvent, histos) endTime = time.time() totalTime = endTime - startTime histos.finalize() del histos if not runParams.useDebugOutput: print(".\n") print "%i events in %.2fs (%.2f events/sec)" % ( totalEventCount, totalTime, totalEventCount / totalTime)
eventsRef = Events("run258425_reco.root") # "run259721_reco.root") #"step3.root") clusRef = Handle("edmNew::DetSetVector<SiStripCluster>") label = "siStripClusters" vRef = Handle("vector<reco::Vertex>") vl = "offlinePrimaryVertices" clusHIP = [0,0,0,0,0,0,0,0,0] nv=0 for i in range(0, eventsRef.size()): #for i in range(0, 20): if (i%100 == 0) : print i a= eventsRef.to(i) # print "Event", i a=eventsRef.getByLabel(label, clusRef) a=eventsRef.getByLabel(vl,vRef) nvtx = vRef.product().size() if (nvtx<2) : continue; nv+=nvtx clusters = clusRef.product().data() for ids in clusRef.product().ids() : for k in range(0,ids.size): clus = clusters[ids.offset+k] if (clus.amplitudes().size()>6 and clus.charge()/clus.amplitudes().size()>250) : clusHIP[(ids.id >>25)&0x7]+=1
for event in events: newIndicies = event.fileIndicies() if indicies != newIndicies: print "new file" indicies = newIndicies event.getByLabel ('Thing', thingHandle) thing = thingHandle.product() for loop in range (thing.size()): print thing.at (loop).a events.toBegin() for event in events: pass events.toBegin() for event in events: event.getByLabel ('Thing', thingHandle) thing = thingHandle.product() for loop in range (thing.size()): print thing.at (loop).a for i in xrange(events.size()): if not events.to(i): print "failed to go to index ",i exit(1) print "Python test succeeded!"
class Loop: '''Manages looping and navigation on a set of events.''' def __init__(self, name, component, cfg): '''Build a loop object. listOfFiles can be "*.root". name will be used to make the output directory''' self.name = name self.cmp = component self.cfg = cfg self.events = Events(glob.glob(self.cmp.files)) self.triggerList = TriggerList(self.cmp.triggers) if self.cmp.isMC or self.cmp.isEmbed: self.trigEff = TriggerEfficiency() self.trigEff.tauEff = None self.trigEff.lepEff = None if self.cmp.tauEffWeight is not None: self.trigEff.tauEff = getattr(self.trigEff, self.cmp.tauEffWeight) if self.cmp.muEffWeight is not None: self.trigEff.lepEff = getattr(self.trigEff, self.cmp.muEffWeight) # here create outputs self.regions = H2TauTauRegions(self.cfg.cuts) self.output = Output(self.name, self.regions) if self.cmp.name == 'DYJets': self.outputFakes = Output(self.name + '_Fakes', self.regions) self.logger = logging.getLogger(self.name) self.logger.addHandler( logging.FileHandler('/'.join([self.name, 'log.txt']))) self.counters = Counters() self.averages = {} # self.histograms = [] self.InitHandles() self.InitCounters() def LoadCollections(self, event): '''Load all collections''' for str, handle in self.handles.iteritems(): handle.Load(event) # could do something clever to get the products... a setattr maybe? if self.cmp.isMC: for str, handle in self.mchandles.iteritems(): handle.Load(event) if self.cmp.isEmbed: for str, handle in self.embhandles.iteritems(): handle.Load(event) def InitHandles(self): '''Initialize all handles for the products we want to read''' self.handles = {} self.mchandles = {} self.embhandles = {} #MUON self.handles['cmgTauMuCorFullSelSVFit'] = AutoHandle( 'cmgTauMuCorSVFitFullSel', 'std::vector<cmg::DiObject<cmg::Tau,cmg::Muon>>') ## self.handles['cmgTauMu'] = AutoHandle( 'cmgTauMu', ## 'std::vector<cmg::DiObject<cmg::Tau,cmg::Muon>>') self.handles['cmgTriggerObjectSel'] = AutoHandle( 'cmgTriggerObjectSel', 'std::vector<cmg::TriggerObject>>') if self.cmp.isMC and self.cmp.vertexWeight is not None: self.handles['vertexWeight'] = AutoHandle(self.cmp.vertexWeight, 'double') self.handles['vertices'] = AutoHandle('offlinePrimaryVertices', 'std::vector<reco::Vertex>') #MUON self.handles['leptons'] = AutoHandle('cmgMuonSel', 'std::vector<cmg::Muon>') self.handles['jets'] = AutoHandle('cmgPFJetSel', 'std::vector<cmg::PFJet>') self.mchandles['genParticles'] = AutoHandle( 'genParticlesStatus3', 'std::vector<reco::GenParticle>') self.embhandles['generatorWeight'] = AutoHandle( ('generator', 'weight'), 'double') def InitCounters(self): '''Initialize histograms physics objects, counters.''' # declaring counters and averages self.counters = Counters() self.counters.addCounter('triggerPassed') self.counters.addCounter('exactlyOneDiTau') self.counters.addCounter('singleDiTau') self.counters.addCounter('VBF') # self.averages['triggerWeight']=Average('triggerWeight') self.averages['lepEffWeight'] = Average('lepEffWeight') self.averages['tauEffWeight'] = Average('tauEffWeight') self.averages['vertexWeight'] = Average('vertexWeight') self.averages['generatorWeight'] = Average('generatorWeight') self.averages['eventWeight'] = Average('eventWeight') def ToEvent(self, iEv): '''Navigate to a given event and process it.''' # output event structure self.event = Event() # navigating to the correct FWLite event self.iEvent = iEv self.events.to(iEv) self.LoadCollections(self.events) # reading CMG objects from the handle #COLIN this kind of stuff could be automatized cmgDiTaus = self.handles['cmgTauMuCorFullSelSVFit'].product() cmgLeptons = self.handles['leptons'].product() self.event.triggerObject = self.handles['cmgTriggerObjectSel'].product( )[0] self.event.vertices = self.handles['vertices'].product() cmgJets = self.handles['jets'].product() if self.cmp.isMC: genParticles = self.mchandles['genParticles'].product() self.event.genParticles = map(GenParticle, genParticles) # converting them into my own python objects self.event.diTaus = [DiTau(diTau) for diTau in cmgDiTaus] self.event.leptons = [Lepton(lepton) for lepton in cmgLeptons] # self.event.dirtyJets = [ Jet(jet) for jet in cmgJets if testJet(jet, self.cfg.cuts) ] self.event.dirtyJets = [] for cmgJet in cmgJets: jet = Jet(cmgJet) if self.cmp.isMC: scale = random.gauss(self.cmp.jetScale, self.cmp.jetSmear) jet.scaleEnergy(scale) if not testJet(cmgJet, self.cfg.cuts): continue self.event.dirtyJets.append(jet) self.counters.counter('triggerPassed').inc('a: All events') if not self.triggerList.triggerPassed(self.event.triggerObject): return False self.counters.counter('triggerPassed').inc('b: Trig OK ') self.counters.counter('exactlyOneDiTau').inc('a: any # of di-taus ') if len(self.event.diTaus) == 0: print 'Event %d : No tau mu.' % i return False if len(self.event.diTaus) > 1: # print 'Event %d : Too many tau-mus: n = %d' % (iEv, len(self.event.diTaus)) #COLIN could be nice to have a counter class # which knows why events are rejected. make histograms with that. self.logger.warning('Ev %d: more than 1 di-tau : n = %d' % (iEv, len(self.event.diTaus))) self.counters.counter('exactlyOneDiTau').inc('b: at least 1 di-tau ') #MUONS if not leptonAccept(self.event.leptons): return False self.counters.counter('exactlyOneDiTau').inc('c: exactly one lepton ') self.event.diTau = self.event.diTaus[0] if len(self.event.diTaus) > 1: self.event.diTau = bestDiTau(self.event.diTaus) elif len(self.event.diTaus) == 1: self.counters.counter('exactlyOneDiTau').inc( 'd: exactly 1 di-tau ') else: raise ValueError('should not happen!') cuts = self.cfg.cuts self.counters.counter('singleDiTau').inc('a: best di-tau') self.event.tau = Tau(self.event.diTau.leg1()) if self.event.tau.decayMode() == 0 and \ self.event.tau.calcEOverP() < 0.2: return False else: self.counters.counter('singleDiTau').inc('b: E/p > 0.2 ') if self.event.tau.pt() > cuts.tauPt: self.counters.counter('singleDiTau').inc( 'c: tau pt > {ptCut:3.1f}'.format(ptCut=cuts.tauPt)) else: return False self.event.lepton = Lepton(self.event.diTau.leg2()) if self.event.lepton.pt() > cuts.lepPt: self.counters.counter('singleDiTau').inc( 'd: lep pt > {ptCut:3.1f}'.format(ptCut=cuts.lepPt)) else: return False if abs(self.event.lepton.eta()) < cuts.lepEta: self.counters.counter('singleDiTau').inc( 'e: lep |eta| <{etaCut:3.1f}'.format(etaCut=cuts.lepEta)) else: return False ################## Starting from here, we have the di-tau ############### # clean up jet collection self.event.jets = cleanObjectCollection( self.event.dirtyJets, masks=[self.event.diTau.leg1(), self.event.diTau.leg2()], deltaRMin=0.5) # print '-----------' # if len(self.event.dirtyJets)>0: # print 'Dirty:' # print '\n\t'.join( map(str, self.event.dirtyJets) ) # print self.event.diTau # print 'Clean:' # print '\n\t'.join( map(str, self.event.jets) ) self.counters.counter('VBF').inc('a: all events ') if len(self.event.jets) > 1: self.counters.counter('VBF').inc('b: at least 2 jets ') self.event.vbf = VBF(self.event.jets) if self.event.vbf.mjj > cuts.VBF_Mjj: self.counters.counter('VBF').inc( 'c: Mjj > {mjj:3.1f}'.format(mjj=cuts.VBF_Mjj)) if abs(self.event.vbf.deta) > cuts.VBF_Deta: self.counters.counter('VBF').inc( 'd: deta > {deta:3.1f}'.format(deta=cuts.VBF_Deta)) if len(self.event.vbf.centralJets) == 0: self.counters.counter('VBF').inc('e: no central jet ') matched = None if self.cmp.name == 'DYJets': leg1DeltaR, leg2DeltaR = self.event.diTau.match( self.event.genParticles) if leg1DeltaR>-1 and leg1DeltaR < 0.1 and \ leg2DeltaR>-1 and leg2DeltaR < 0.1: matched = True else: matched = False self.event.eventWeight = 1 # self.event.triggerWeight = 1 self.event.vertexWeight = 1 self.event.tauEffWeight = 1 self.event.lepEffWeight = 1 self.event.generatorWeight = 1 if self.cmp.isMC: self.event.vertexWeight = self.handles['vertexWeight'].product()[0] if self.cmp.isEmbed: self.event.generatorWeight = self.embhandles[ 'generatorWeight'].product()[0] if self.cmp.isMC or self.cmp.isEmbed: if self.trigEff.tauEff is not None: self.event.tauEffWeight = self.trigEff.tauEff( self.event.tau.pt()) #MUONS if self.trigEff.lepEff is not None: self.event.lepEffWeight = self.trigEff.lepEff( self.event.lepton.pt(), self.event.lepton.eta()) self.event.eventWeight = self.event.vertexWeight * \ self.event.tauEffWeight * \ self.event.lepEffWeight * \ self.event.generatorWeight # self.averages['triggerWeight'].add( self.event.triggerWeight ) self.averages['tauEffWeight'].add(self.event.tauEffWeight) self.averages['lepEffWeight'].add(self.event.lepEffWeight) self.averages['vertexWeight'].add(self.event.vertexWeight) self.averages['generatorWeight'].add(self.event.generatorWeight) self.averages['eventWeight'].add(self.event.eventWeight) regionName = self.regions.test(self.event) if matched is None or matched is True: self.output.Fill(self.event, regionName) elif matched is False: self.outputFakes.Fill(self.event, regionName) else: raise ValueError('should not happen!') return True def Loop(self, nEvents=-1): '''Loop on a given number of events, and call ToEvent for each event.''' print 'starting loop' # self.InitCounters() nEvents = int(nEvents) for iEv in range(0, self.events.size()): if iEv == nEvents: break if iEv % 1000 == 0: print 'event', iEv try: self.ToEvent(iEv) except ValueError: #COLIN should not be a value error break self.logger.warning(str(self)) def Write(self): '''Write all histograms to their root files''' # for hist in self.histograms: # hist.Write() self.output.Write() if self.cmp.name == 'DYJets': self.outputFakes.Write() def __str__(self): name = 'Loop %s' % self.name component = str(self.cmp) counters = map(str, self.counters.counters) strave = map(str, self.averages.values()) return '\n'.join([name, component] + counters + strave)
from DataFormats.FWLite import Handle, Events from ROOT import gROOT, gStyle, TCanvas, TF1, TFile, TTree, gRandom, TH1F, TH2F import os eventsOri = Events("step3_ori.root") eventsNew = Events("step3.root") tracksOri = Handle("std::vector<reco::Track>") tracksNew = Handle("std::vector<reco::Track>") label = "generalTracks" quality = "highPurity" for i in range(0, eventsOri.size()): a = eventsOri.to(i) a = eventsNew.to(i) # print "Event", i a = eventsOri.getByLabel(label, tracksOri) a = eventsNew.getByLabel(label, tracksNew) ntOri = tracksOri.product().size() ntNew = tracksOri.product().size() if (ntOri != ntNew): print i, ntOri, ntNew
def convertGENSIM(infiles, outfilename, Nmax=-1, isPythia=False): """Loop over GENSIM events and save custom trees.""" start1 = time.time() lqids = [46] if isPythia else [9000002, 9000006] print ">>> loading files..." events = Events(infiles) outfile = TFile(outfilename, 'RECREATE') print ">>> creating trees and branches..." tree_event = TTree('event', 'event') tree_jet = TTree('jet', 'jet') tree_mother = TTree('mother', 'mother') tree_decay = TTree('decay', 'decay') tree_assoc = TTree('assoc', 'assoc') # EVENT tree_event.addBranch('nbgen', 'i') tree_event.addBranch('nbcut', 'i') tree_event.addBranch('ntgen', 'i') tree_event.addBranch('njet', 'i') tree_event.addBranch('nlepton', 'i') tree_event.addBranch('ntau', 'i') tree_event.addBranch('ntaucut', 'i') tree_event.addBranch('nnu', 'i') tree_event.addBranch('nlq', 'i') tree_event.addBranch('ntau_assoc', 'i') tree_event.addBranch('ntau_decay', 'i') tree_event.addBranch('nbgen_decay', 'i') tree_event.addBranch('met', 'f') tree_event.addBranch('jpt1', 'f') tree_event.addBranch('jpt2', 'f') tree_event.addBranch('sumjet', 'f') tree_event.addBranch('dphi_jj', 'f') tree_event.addBranch('deta_jj', 'f') tree_event.addBranch('dr_jj', 'f') tree_event.addBranch('ncentral', 'i') tree_event.addBranch('mjj', 'f') tree_event.addBranch('lq1_mass', 'f') tree_event.addBranch('lq2_mass', 'f') tree_event.addBranch('lq1_pt', 'f') tree_event.addBranch('lq2_pt', 'f') tree_event.addBranch('tau1_pt', 'f') tree_event.addBranch('tau1_eta', 'f') tree_event.addBranch('tau2_pt', 'f') tree_event.addBranch('tau2_eta', 'f') tree_event.addBranch('st', 'f') # scalar sum pT tree_event.addBranch('st_met', 'f') # scalar sum pT with MET tree_event.addBranch('weight', 'f') # LQ DECAY tree_mother.addBranch('pid', 'i') tree_mother.addBranch('moth', 'i') tree_mother.addBranch('status', 'i') tree_mother.addBranch('pt', 'f') tree_mother.addBranch('eta', 'f') tree_mother.addBranch('phi', 'f') tree_mother.addBranch('mass', 'f') tree_mother.addBranch('inv', 'f') tree_mother.addBranch('ndau', 'i') tree_mother.addBranch('dau', 'i') tree_mother.addBranch('dphi_ll', 'f') tree_mother.addBranch('deta_ll', 'f') tree_mother.addBranch('dr_ll', 'f') tree_mother.addBranch('st', 'f') # scalar sum pT tree_mother.addBranch('st_met', 'f') # scalar sum pT with MET tree_mother.addBranch('weight', 'f') # FROM LQ DECAY tree_decay.addBranch('pid', 'i') tree_decay.addBranch('pt', 'f') tree_decay.addBranch('eta', 'f') tree_decay.addBranch('phi', 'f') tree_decay.addBranch('lq_mass', 'f') tree_decay.addBranch('ptvis', 'f') tree_decay.addBranch('type', 'i') tree_decay.addBranch('isBrem', 'i') tree_decay.addBranch('weight', 'f') # NOT FROM LQ DECAY (ASSOCIATED) tree_assoc.addBranch('pid', 'i') tree_assoc.addBranch('moth', 'i') tree_assoc.addBranch('pt', 'f') tree_assoc.addBranch('ptvis', 'f') tree_assoc.addBranch('eta', 'f') tree_assoc.addBranch('phi', 'f') tree_assoc.addBranch('weight', 'f') # JETS tree_jet.addBranch('pt', 'f') tree_jet.addBranch('eta', 'f') tree_jet.addBranch('phi', 'f') tree_jet.addBranch('weight', 'f') hist_LQ_decay = TH1F('LQ_decay', "LQ decay", 60, -30, 30) handle_gps, label_gps = Handle( 'std::vector<reco::GenParticle>'), 'genParticles' handle_jets, label_jets = Handle('std::vector<reco::GenJet>'), 'ak4GenJets' handle_met, label_met = Handle('vector<reco::GenMET>'), 'genMetTrue' handle_weight, label_weight = Handle('GenEventInfoProduct'), 'generator' evtid = 0 sec_per_evt = 0.023 # seconds per event Ntot = Nmax if Nmax > 0 else events.size() step = stepsize(Ntot) print ">>> start processing %d events, ETA %s..." % ( Ntot, formatTimeShort(sec_per_evt * Ntot)) start_proc = time.time() # LOOP OVER EVENTS for event in events: #print '='*30 #print evtid if Nmax > 0 and evtid >= Nmax: break if evtid > 0 and evtid % step == 0: print ">>> processed %4s/%d events, ETA %s" % ( evtid, Ntot, ETA(start_proc, evtid + 1, Ntot)) evtid += 1 event.getByLabel(label_gps, handle_gps) gps = handle_gps.product() event.getByLabel(label_jets, handle_jets) jets = handle_jets.product() event.getByLabel(label_met, handle_met) met = handle_met.product() event.getByLabel(label_weight, handle_weight) gweight = handle_weight.product() weight = gweight.weight() # GEN PARTICLES gps_mother = [p for p in gps if isFinal(p) and abs(p.pdgId()) in [42]] gps_final = [ p for p in gps if isFinal(p) and abs(p.pdgId()) in [5, 6, 15, 16] + lqids ] gps_mother = [ p for p in gps_final if abs(p.pdgId()) in lqids and p.status() > 60 ] #not(moth.numberOfDaughters()==2 and abs(moth.daughter(0).pdgId()) in lqids) gps_bgen = [ p for p in gps_final if abs(p.pdgId()) == 5 and p.status() == 71 ] gps_bcut = [p for p in gps_bgen if p.pt() > 20 and abs(p.eta()) < 2.5] gps_tgen = [p for p in gps_final if abs(p.pdgId()) == 6] #[-1:] gps_nugen = [p for p in gps_final if abs(p.pdgId()) == 16] gps_tau = [ p for p in gps_final if abs(p.pdgId()) == 15 and p.status() == 2 ] gps_tau.sort(key=lambda p: p.pt(), reverse=True) gps_taucut = [p for p in gps_tau if p.pt() > 20 and abs(p.eta()) < 2.5] #print '-'*10 #for p in gps_tgen: # printParticle(p) #if gps_tgen: # print "has top" #for p in gps_nugen: # printParticle(p) # REMOVE TOP QUARK if its final daughter is also in the list for top in gps_tgen[:]: dau = top while abs(dau.daughter(0).pdgId()) == 6: dau = dau.daughter(0) if dau != top and dau in gps_tgen: gps_tgen.remove(top) # REMOVE JET-LEPTON OVERLAP jets, dummy = cleanObjectCollection(jets, gps_tau, dRmin=0.5) njets = 0 sumjet = 0 jets30 = [] for jet in jets: if jet.pt() > 30 and abs(jet.eta()) < 5: sumjet += jet.pt() njets += 1 tree_jet.pt[0] = jet.pt() tree_jet.eta[0] = jet.eta() tree_jet.phi[0] = jet.phi() tree_jet.weight[0] = weight tree_jet.Fill() jets30.append(jet) # MULTIPLICITIES tree_event.nlq[0] = len(gps_mother) tree_event.nbcut[0] = len(gps_bcut) tree_event.nbgen[0] = len(gps_bgen) tree_event.ntgen[0] = len(gps_tgen) tree_event.njet[0] = njets tree_event.nlepton[0] = len(gps_tau) tree_event.ntau[0] = len(gps_tau) tree_event.ntaucut[0] = len(gps_taucut) tree_event.nnu[0] = len(gps_nugen) # JETS tree_event.met[0] = met[0].pt() tree_event.sumjet[0] = sumjet if len(jets30) >= 2: centrajpt1s = findCentrajpt1s(jets30[:2], jets30[2:]) tree_event.ncentral[0] = len(centrajpt1s) else: tree_event.ncentral[0] = -9 if (len(jets30) >= 2): tree_event.jpt1[0] = jets30[0].pt() tree_event.jpt2[0] = jets30[1].pt() tree_event.dphi_jj[0] = deltaPhi(jets30[0].phi(), jets30[1].phi()) tree_event.deta_jj[0] = jets30[0].eta() - jets30[1].eta() tree_event.dr_jj[0] = deltaR(jets30[0].eta(), jets30[0].phi(), jets30[1].eta(), jets30[1].phi()) dijetp4 = jets30[0].p4() + jets30[1].p4() tree_event.mjj[0] = dijetp4.M() elif (len(jets30) == 1): tree_event.jpt1[0] = jets30[0].pt() tree_event.jpt2[0] = -1 tree_event.dphi_jj[0] = -9 tree_event.deta_jj[0] = -9 tree_event.dr_jj[0] = -1 tree_event.mjj[0] = -1 else: tree_event.jpt1[0] = -1 tree_event.jpt2[0] = -1 tree_event.dphi_jj[0] = -9 tree_event.deta_jj[0] = -9 tree_event.dr_jj[0] = -1 tree_event.mjj[0] = -1 # SCALAR SUM PT if len(gps_taucut) >= 2 and len(gps_bcut) >= 1: st = 0 #gps_taucut.sort(key=lambda p: p.pt(), reverse=True) gps_bcut.sort(key=lambda p: p.pt(), reverse=True) #taus_assoc.sort(key=lambda p: p.pt(), reverse=True) #taus_decay.sort(key=lambda p: p.pt(), reverse=True) #bgen_decay.sort(key=lambda p: p.pt(), reverse=True) for part in gps_taucut[2:] + gps_bcut[1:]: st += part.pt() stmet = st + met[0].pt() else: st = -1 stmet = -1 tree_event.tau1_pt[0] = gps_tau[0].pt() tree_event.tau1_eta[0] = gps_tau[0].eta() tree_event.tau2_pt[0] = gps_tau[1].pt() tree_event.tau2_eta[0] = gps_tau[1].eta() tree_event.st[0] = st tree_event.st_met[0] = stmet tree_mother.st[0] = st tree_mother.st_met[0] = stmet tree_event.weight[0] = weight #print 'len, gps_mother = ', len(gps_mother) #if len(gps_mother)==1: # print gps_mother[0].pdgId(), gps_mother[0].status(), gps_mother[0].pt(), gps_mother[0].eta(), gps_mother[0].phi() # print '1 (ndaughter, daughter pdgid) =', gps_mother[0].numberOfDaughters(), gps_mother[0].daughter(0).pdgId(), '(pdgId, status, pt, eta, phi) = ', gps_mother[0].pdgId(), gps_mother[0].status(), gps_mother[0].pt(), gps_mother[0].eta(), gps_mother[0].phi() #if len(gps_mother)>=2: # print '2 (ndaughter, daughter 1/2 pdgid) =', gps_mother[0].numberOfDaughters(), gps_mother[0].daughter(0).pdgId(), gps_mother[0].daughter(1).pdgId(), '(pdgId, status, pt, eta, phi) = ', gps_mother[0].pdgId(), gps_mother[0].status(), gps_mother[0].pt(), gps_mother[0].eta(), gps_mother[0].phi() # print '2 (ndaughter, daughter 1/2 pdgid) =', gps_mother[1].numberOfDaughters(), gps_mother[1].daughter(0).pdgId(), gps_mother[1].daughter(1).pdgId(), '(pdgId, status, pt, eta, phi) = ', gps_mother[1].pdgId(), gps_mother[1].status(), gps_mother[1].pt(), gps_mother[1].eta(), gps_mother[1].phi() # TAU taus_assoc = [] for gentau in gps_tau: while gentau.status() != 2: gentau = gentau.daughter(0) genfinDaughters = finalDaughters(gentau, []) genptvis = p4sumvis(genfinDaughters).pt() # CHECK MOTHER taumoth = gentau.mother(0) mothpid = abs(taumoth.pdgId()) from_LQ = False #from_had = False # from hadron decay #print '-'*30 while mothpid != 2212: #print taumoth.pdgId() if mothpid in lqids: from_LQ = True break elif 100 < mothpid < 10000: #and mothpid!=2212: #from_had = True break taumoth = taumoth.mother(0) mothpid = abs(taumoth.pdgId()) # ASSOC if not from_LQ: tree_assoc.pt[0] = gentau.pt() tree_assoc.ptvis[0] = genptvis tree_assoc.eta[0] = gentau.eta() tree_assoc.phi[0] = gentau.phi() tree_assoc.pid[0] = gentau.pdgId() tree_assoc.moth[0] = taumoth.pdgId() tree_assoc.weight[0] = weight tree_assoc.Fill() #if not from_had: taus_assoc.append(gentau) # B QUARK for genb in gps_bgen: bmoth = genb.mother(0) mothpid = abs(bmoth.pdgId()) from_LQ = False while mothpid != 2212: if mothpid in lqids: from_LQ = True break bmoth = bmoth.mother(0) mothpid = abs(bmoth.pdgId()) if not from_LQ: tree_assoc.pt[0] = genb.pt() tree_assoc.ptvis[0] = -1 tree_assoc.eta[0] = genb.eta() tree_assoc.phi[0] = genb.phi() tree_assoc.pid[0] = genb.pdgId() tree_assoc.moth[0] = bmoth.pdgId() tree_assoc.weight[0] = weight tree_assoc.Fill() # MOTHER LQ #print '-'*80 taus_decay = [] bgen_decay = [] gps_mother.sort(key=lambda p: p.pt(), reverse=True) for moth in gps_mother: dau_pid = 0 pair = [] if moth.numberOfDaughters() == 2: if moth.daughter(0).pdgId() in [ 21, 22 ] or moth.daughter(1).pdgId() in [21, 22]: continue if abs(moth.daughter(0).pdgId() ) in lqids: # single production with t-channel LQ continue lq_moth = moth.mother(0) while abs(lq_moth.pdgId()) in lqids: lq_moth = lq_moth.mother(0) for i in range(moth.numberOfDaughters()): #print '\t', dau.pdgId() dau = moth.daughter(i) # TAU isBrem = False if abs(dau.pdgId()) == 15: while dau.status() != 2: dau = dau.daughter(0) if dau.numberOfDaughters() == 2 and abs( dau.daughter(0).pdgId()) == 15 and dau.daughter( 1).pdgId() == 22: #print "This is brems !?!" isBrem = True else: taus_decay.append(dau) # BOTTOM QUARK elif abs(dau.pdgId()) == 5: dau_pid = dau.pdgId() bgen_decay.append(dau) # TOP QUARK elif abs(dau.pdgId()) == 6: dau_pid = dau.pdgId() newdau = dau while abs(newdau.daughter(0).pdgId()) == 6: newdau = newdau.daughter(0) if isFinal(newdau): dau = newdau pair.append(dau.p4()) tree_decay.lq_mass[0] = moth.mass() tree_decay.pid[0] = dau.pdgId() tree_decay.pt[0] = dau.pt() tree_decay.eta[0] = dau.eta() tree_decay.phi[0] = dau.phi() tree_decay.isBrem[0] = isBrem if abs(dau.pdgId()) == 15: finDaughters = finalDaughters(dau, []) ptvis = p4sumvis(finDaughters).pt() tree_decay.ptvis[0] = ptvis decaymode = tauDecayMode(dau) tree_decay.type[0] = decaydict[decaymode] #print decaymode, 'vis pt = ', ptvis , 'tau pt = ', dau.pt() if ptvis > dau.pt(): print "%s, vis pt = %s, tau pt = %s " % ( decaymode, ptvis, dau.pt()) + '!' * 30 else: tree_decay.ptvis[0] = dau.pt() tree_decay.type[0] = -1 tree_decay.weight[0] = weight tree_decay.Fill() if abs(moth.pdgId()) in lqids: hist_LQ_decay.Fill(dau.pdgId()) if len(pair) == 2: tree_mother.inv[0] = (pair[0] + pair[1]).mass() tree_mother.dphi_ll[0] = deltaPhi(pair[0].phi(), pair[1].phi()) tree_mother.deta_ll[0] = pair[0].eta() - pair[1].eta() tree_mother.dr_ll[0] = deltaR(pair[0].eta(), pair[0].phi(), pair[1].eta(), pair[1].phi()) else: tree_mother.inv[0] = -1 tree_mother.dphi_ll[0] = -99 tree_mother.deta_ll[0] = -99 tree_mother.dr_ll[0] = -99 tree_mother.pid[0] = moth.pdgId() tree_mother.moth[0] = lq_moth.pdgId() tree_mother.status[0] = moth.status() tree_mother.mass[0] = moth.mass() tree_mother.pt[0] = moth.pt() tree_mother.eta[0] = moth.eta() tree_mother.phi[0] = moth.phi() tree_mother.ndau[0] = len(pair) tree_mother.dau[0] = dau_pid # save PDG ID for quark daughter tree_mother.weight[0] = weight tree_mother.Fill() if len(gps_mother) == 1: tree_event.lq1_mass[0] = gps_mother[0].mass() tree_event.lq1_pt[0] = gps_mother[0].pt() tree_event.lq2_mass[0] = -1 tree_event.lq2_pt[0] = -1 elif len(gps_mother) >= 2: tree_event.lq1_mass[0] = gps_mother[0].mass() tree_event.lq1_pt[0] = gps_mother[0].pt() tree_event.lq2_mass[0] = gps_mother[1].mass() tree_event.lq2_pt[0] = gps_mother[1].pt() else: tree_event.lq1_mass[0] = -1 tree_event.lq1_pt[0] = -1 tree_event.lq2_mass[0] = -1 tree_event.lq2_pt[0] = -1 tree_event.ntau_assoc[0] = len(taus_assoc) tree_event.ntau_decay[0] = len(taus_decay) tree_event.nbgen_decay[0] = len(bgen_decay) tree_event.Fill() print ">>> processed %4s events in %s" % ( evtid, formatTime(time.time() - start_proc)) print ">>> writing to output file %s..." % (outfilename) outfile.Write() outfile.Close() print ">>> done in in %s" % (formatTime(time.time() - start1))
def main(): identifier = inputFiles[0][inputFiles[0].rfind('/') + 1:].replace( '.root', '').replace('_step2', '').replace('_AODSIM', '').replace('_*', '').replace('*', '') identifier += 'nFiles' + str(len(inputFiles)) fnew = TFile('histsEDM_' + identifier + '.root', 'recreate') events = Events(inputFiles) #events = Events('/uscms_data/d3/sbein/LongLiveTheChi/22Apr2017/pMSSM12_MCMC1_27_200970_step2_AODSIM.root') hGenChiEtaPos = TH1F("hGenChiEtaPos", "hGenChiEtaPos", 50, 0, 5) hGenChiEtaNeg = TH1F("hGenChiEtaNeg", "hGenChiEtaNeg", 50, 0, 5) hDrChipmTrack = TH1F("hDrChipmTrack", "hDrChipmTrack", 50, 0, .2) hDrChipmPFCand = TH1F("hDrChipmPFCand", "hDrChipmPFCand", 50, 0, .2) hDrRandomTrackTrack = TH1F("hDrRandomTrackTrack", "hDrRandomTrackTrack", 50, 0, .2) hChipmLabLengthAll = TH1F("hChipmLabLengthAll", "hChipmLabLengthAll", 50, -0.5, 3.5) hChipmLabLengthPass = TH1F("hChipmLabLengthPass", "hChipmLabLengthPass", 50, -0.5, 3.5) hDrMinVsChipmLabLength = TH2F("hDrMinVsChipmLabLength", "hDrMinVsChipmLabLength", 25, -1.0, 3.0, 20, 0, 0.2) hDrMinPFVsChipmLabLength = TH2F("hDrMinPFVsChipmLabLength", "hDrMinPFVsChipmLabLength", 25, -1.0, 3.0, 20, 0, 0.2) hChipmLabLengthVsEtaAll = TH2F("hChipmLabLengthVsEtaAll", "hChipmLabLengthVsEtaAll", 100, -2.4, 2.4, 40, .5, 3.5) hChipmLabLengthVsEta2MohPass = TH2F("hChipmLabLengthVsEta2MohPass", "hChipmLabLengthVsEta2MohPass", 100, -2.4, 2.4, 40, .5, 3.5) hChipmLabLengthVsEta5MohPass = TH2F("hChipmLabLengthVsEtaEta5MohPass", "hChipmLabLengthVsEtaEta5MohPass", 100, -2.4, 2.4, 40, .5, 3.5) hChipmLabLengthVsEtaPixelOnlyPass = TH2F( "hChipmLabLengthVsEtaPixelOnlyPass", "hChipmLabLengthVsEtaPixelOnlyPass", 100, -2.4, 2.4, 40, .5, 3.5) hChipmLabLengthVsEtaPixelOnly0MohPass = TH2F( "hChipmLabLengthVsEtaPixelOnly0MohPass", "hChipmLabLengthVsEtaPixelOnly0MohPass", 100, -2.4, 2.4, 40, .5, 3.5) hSigDeDx = TH1F("hSigDeDx", "hSigDeDx", 100, 0, 25) histoStyler(hSigDeDx, kBlack) hBkgDeDx_ = TH1F("hBkgDeDx", "hBkgDeDx", 100, 0, 25) histoStyler(hBkgDeDx, kBlack) hSigIsolation = TH1F("hSigIsolation", "hSigIsolation", 100, 0, 2.5) histoStyler(hSigIsolation, kBlack) hBkgIsolation = TH1F("hBkgIsolation", "hBkgIsolation", 100, 0, 2.5) histoStyler(hBkgIsolation, kBlack) hSigMiniIsolation = TH1F("hSigMiniIsolation", "hSigMiniIsolation", 100, 0, 2.5) histoStyler(hSigMiniIsolation, kBlack) hBkgMiniIsolation = TH1F("hBkgMiniIsolation", "hBkgMiniIsolation", 100, 0, 2.5) histoStyler(hBkgMiniIsolation, kBlack) hSigChi2oNdof = TH1F("hSigChi2oNdof", "hSigChi2oNdof", 100, 0, 10) histoStyler(hSigChi2oNdof, kBlack) hBkgChi2oNdof = TH1F("hBkgChi2oNdof", "hBkgChi2oNdof", 100, 0, 10) histoStyler(hBkgChi2oNdof, kBlack) hSigDeDxVsP = TH2F("hSigDeDxVsP", "hSigDeDxVsP", 50, 0, 1500, 40, 0, 20) hBkgDeDxVsP = TH2F("hBkgDeDxVsP", "hBkgDeDxVsP", 50, 0, 1500, 40, 0, 20) #handle_muons = Handle ("std::vector<reco::Muon>") #label_muons = ('muons') #handle_tracks = Handle ("vector<reco::TrackExtra>") handle_tracks = Handle("vector<reco::Track>") label_tracks = ('generalTracks') handle_pfcands = Handle("std::vector<reco::PFCandidate>") label_pfcands = ('particleFlow') dEdxTrackHandle = Handle("edm::ValueMap<reco::DeDxData>") label_dEdXtrack = 'dedxHarmonic2' handle_genparticles = Handle("vector<reco::GenParticle>") label_genparticles = ('genParticlePlusGeant') liboffuturehists = {} listoffuturehists = [] nevents = events.size() #nevents = 100 for ievent, event in enumerate(events): if ievent >= nevents: break if ievent % 20 == 0: print 'analyzing event %d of %d' % (ievent, nevents) #event.getByLabel (label_muons, handle_muons) event.getByLabel(label_tracks, handle_tracks) event.getByLabel(label_pfcands, handle_pfcands) event.getByLabel(label_dEdXtrack, dEdxTrackHandle) event.getByLabel(label_genparticles, handle_genparticles) # get the product #muons = handle_muons.product() tracks = handle_tracks.product() pfcands = handle_pfcands.product() dEdxTrack = dEdxTrackHandle.product() genparticles = handle_genparticles.product() #print '='*10 if not (dEdxTrack.size() == tracks.size()): print 'bad times' exit(0) listOfOffLimitFakes = [] for gp in genparticles: #print dir(gp) #exit(0) if not (abs(gp.pdgId()) == 1000024 and gp.status()): continue if gp.eta() > 0: hGenChiEtaPos.Fill(gp.eta()) elif gp.eta() < 0: hGenChiEtaNeg.Fill(-gp.eta()) if not (gp.pt() > 15): continue try: log10decaylength = TMath.Log10( TMath.Sqrt( pow(gp.daughter(0).vx() - gp.vx(), 2) + pow(gp.daughter(0).vy() - gp.vy(), 2))) except: print 'no daughters!' log10decaylength = -1 chipmTlv = TLorentzVector() chipmTlv.SetPxPyPzE(gp.px(), gp.py(), gp.pz(), gp.energy()) hChipmLabLengthAll.Fill(log10decaylength) fillth2(hChipmLabLengthVsEtaAll, gp.eta(), log10decaylength) #===#pfcandidates drmin = 10 idx = -1 eta = -11 for ipfc, pfc in enumerate(pfcands): #print dir(pfc) pfcTlv = TLorentzVector() pfcTlv.SetPxPyPzE(pfc.px(), pfc.py(), pfc.pz(), pfc.pt()) dr = pfcTlv.DeltaR(chipmTlv) if dr < drmin: drmin = dr idx = ipfc hDrMinPFVsChipmLabLength.Fill(log10decaylength, drmin) hDrChipmPFCand.Fill(drmin) #===# drmin = 10 idx = -1 eta = -11 for itrack, track in enumerate(tracks): if not track.pt() > 10: continue if track.numberOfValidHits() == 0: continue if track.ndof() == 0: continue trkTlv = TLorentzVector() trkTlv.SetPxPyPzE(track.px(), track.py(), track.pz(), track.pt()) dr = trkTlv.DeltaR(chipmTlv) if dr < 0.05: listOfOffLimitFakes.append(itrack) if dr < drmin: drmin = dr idx = itrack if idx == -1: continue if verbose: print '+++++++++++++' print 'chargino pt=', gp.pt() print 'best matched pt = ', tracks[idx].pt() print 'drmin', drmin hDrMinVsChipmLabLength.Fill(log10decaylength, drmin) hDrChipmTrack.Fill(drmin) hitpattern = tracks[idx].hitPattern() if not (idx == reco.TrackRef(tracks, idx).index()): print 'highly unusual but no guaranteed concern' exit(0) if not (tracks[idx].numberOfValidHits() == hitpattern.numberOfValidHits()): print 'strangeness!' exit(0) #dedx = dEdxTrack.get(reco.TrackRef(tracks, idx).index()).dEdx() try: dedx = dEdxTrack.get(idx).dEdx() except: dedx = 1 print 'no dedx for index', idx chi2ondof = tracks[idx].chi2() / tracks[idx].ndof() if ievent == 0: #print dir(tracks[idx]) #print methods a = 1 if not drmin < 0.02: continue if not log10decaylength > 0: continue hChipmLabLengthPass.Fill(log10decaylength) hSigDeDx.Fill(dedx) hSigDeDxVsP.Fill(gp.p(), dedx) hSigChi2oNdof.Fill(chi2ondof) trkIso = calcTrackIso(track, tracks) trkJetIso = True #calcTrackJetIso(track, jets) trkMiniIso = calcMiniIso(track, tracks) if trkJetIso: hSigIsolation.Fill(trkIso) else: hSigIsolation.Fill(2.4) hSigMiniIsolation.Fill(trkMiniIso) moh = hitpattern.trackerLayersWithoutMeasurement( hitpattern.MISSING_OUTER_HITS) if hitpattern.numberOfValidTrackerHits( ) == hitpattern.numberOfValidPixelHits(): fillth2(hChipmLabLengthVsEtaPixelOnlyPass, gp.eta(), log10decaylength) if moh == 0: fillth2(hChipmLabLengthVsEtaPixelOnly0MohPass, gp.eta(), log10decaylength) if not moh >= 2: continue fillth2(hChipmLabLengthVsEta2MohPass, gp.eta(), log10decaylength) if not moh >= 5: continue fillth2(hChipmLabLengthVsEta5MohPass, gp.eta(), log10decaylength) for itrack, track in enumerate(tracks): if not track.pt() > 10: continue if itrack in listOfOffLimitFakes: continue if track.ndof() == 0: continue #dedx = dEdxTrack.get(reco.TrackRef(tracks, itrack).index()).dEdx() try: dedx = dEdxTrack.get(itrack).dEdx() except: dedx = 1 print 'no bkg dedx for index', idx hBkgDeDx_.Fill(dedx) hBkgDeDxVsP.Fill(track.p(), dedx) chi2ondof = track.chi2() / track.ndof() hBkgChi2oNdof.Fill(chi2ondof) hitpattern = track.hitPattern() trkIso = calcTrackIso(track, tracks) trkJetIso = True #calcTrackJetIso(track, jets) trkMiniIso = calcMiniIso(track, tracks) if trkJetIso: hBkgIsolation.Fill(trkIso) else: hBkgIsolation.Fill(2.4) hBkgMiniIsolation.Fill(trkMiniIso) fnew.cd() hGenChiEtaPos.Write() hGenChiEtaNeg.Write() hSigIsolation.Write() hSigMiniIsolation.Write() hBkgIsolation.Write() hBkgMiniIsolation.Write() hDrChipmTrack.Write() hDrChipmPFCand.Write() hDrRandomTrackTrack.Write() hChipmLabLengthAll.Write() hChipmLabLengthPass.Write() hDrMinVsChipmLabLength.Write() hDrMinPFVsChipmLabLength.Write() hSigDeDx.Write() hBkgDeDx_.Write() hSigChi2oNdof.Write() hBkgChi2oNdof.Write() hSigDeDxVsP.Write() hBkgDeDxVsP.Write() print 'just created', fnew.GetName() hChipmLabLengthVsEtaAll.Write() hChipmLabLengthVsEta2MohPass.Write() hChipmLabLengthVsEta5MohPass.Write() hChipmLabLengthVsEtaPixelOnlyPass.Write() hChipmLabLengthVsEtaPixelOnly0MohPass.Write() fnew.Close()
class TreeProducer: def __init__(self, DY='CC', verbose=False, debug=True, filenames=[], postfix='test', save_tree=True, save_histo=False): print "****** TreeProducer *****" print 'Running for ' + DY + ' Drell-Yan' self.DY = DY self.verbose = verbose self.debug = debug # save the TTree in the output file self.save_tree = save_tree # save the histograms in the output file self.save_histo = save_histo # open output file self.outfile = None if self.debug: self.outfile = ROOT.TFile( os.environ['CMSSW_BASE'] + '/src/Wmass/test/' + 'tree_' + postfix + '.root', "RECREATE") else: self.outfile = ROOT.TFile('tree.root', "RECREATE") # out tree self.outtree = None if self.save_tree: self.outtree = ROOT.TTree('tree', 'tree') # add histos #self.weights_for_histos = [0] self.weights_for_histos = range(109) self.coefficients_for_histos = [ 'A0', 'A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7' ] if self.save_histo: self.histos = add_histo2D(charges=['Wminus', 'Wplus'], var=['Wdress'], coeff=self.coefficients_for_histos, weights=self.weights_for_histos) # add branches to tree (needed even if self.save_tree=False) self.variables = add_vars(self.outtree) self.filenames = filenames if self.debug: if self.DY == 'NC': self.filenames.append( 'root://xrootd-cms.infn.it//store/mc/RunIISummer16MiniAODv2/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v2/120000/02A210D6-F5C3-E611-B570-008CFA197BD4.root' ) else: self.filenames.append( 'root://xrootd-cms.infn.it//store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0AF0207B-EFBE-E611-B4BE-0CC47A7FC858.root' ) print "Opening file..." self.events = Events(self.filenames) print "File opened.... Tot. num of events:", self.events.size() def run(self): # handles genH, genN = Handle( "std::vector<reco::GenParticle>"), "prunedGenParticles" infoH, infoN = Handle("GenEventInfoProduct"), "generator" lheH, lheN = Handle("LHEEventProduct"), "externalLHEProducer" start = time.time() # loop over the events ############################################### for i, event in enumerate(self.events): if i % 1000 == 0: print "Processing event", i, '/', self.events.size() if self.debug: if i % 100 == 0: print "Processing event", i, '/', self.events.size() if i > 1000: break # fill with default values fill_default(self.variables) # load LHE product event.getByLabel(lheN, lheH) lhe = lheH.product() hepeup = lhe.hepeup() self.variables['scale'][0] = hepeup.SCALUP self.variables['alphaQCD'][0] = hepeup.AQCDUP self.variables['alphaQED'][0] = hepeup.AQEDUP isW = False isWToMuNu = False Wp4_lhe = [0., 0., 0., 0., 0.] for p in range(hepeup.NUP): if self.verbose: print 'HEPEUP...', p, 'pdg:', hepeup.IDUP[ p], '(', hepeup.PUP[p][0], hepeup.PUP[p][ 1], hepeup.PUP[p][2], hepeup.PUP[p][3], ')' # first parton if p == 0: self.variables['id1'][0] = hepeup.IDUP[p] self.variables['x1'][0] = hepeup.PUP[p][3] / 6500. elif p == 1: self.variables['id2'][0] = hepeup.IDUP[p] self.variables['x2'][0] = hepeup.PUP[p][3] / 6500. if abs(hepeup.IDUP[p]) == 13: isWToMuNu = True if abs(hepeup.IDUP[p]) == (24 if self.DY == 'CC' else 23): isW = True for x in range(5): Wp4_lhe[x] = hepeup.PUP[p][x] Wp4 = ROOT.TLorentzVector(Wp4_lhe[0], Wp4_lhe[1], Wp4_lhe[2], Wp4_lhe[3]) self.variables['lhe_y'][0] = Wp4.Rapidity() self.variables['lhe_qt'][0] = Wp4.Pt() self.variables['lhe_mass'][0] = Wp4.M() self.variables['lhe_phi'][0] = Wp4.Phi() # consider only W->munu events if not (isW and isWToMuNu): continue # LHE weights norm = abs(lhe.originalXWGTUP()) wid = 0 for w in lhe.weights(): if wid >= 109: continue #if self.verbose: # print w.id, w.wgt self.variables['weights'][wid] = w.wgt / norm wid += 1 # read gen particles event.getByLabel(genN, genH) genParticles = list(genH.product()) # filter muons, neutrinos and gammas muons = [p for p in genParticles if isMuon(p)] neutrinos = [p for p in genParticles if isNeutrino(p)] gammas = [p for p in genParticles if isPhoton(p)] # sort by pt muons.sort(reverse=True) neutrinos.sort(reverse=True) # CC: consider events with 0 muons to study acceptance if self.DY == 'CC' and len(muons) == 0: if self.verbose: print "No muon in W>munu for event", i, ". Try to understand why:" print " > W rapidity: ", self.variables['lhe_y'][0] self.variables['muLost'][0] += 1.0 if len(neutrinos) > 0: self.variables[t + '_nu_pt'][0] = neutrinos[0].p4().Pt() self.variables[t + '_nu_eta'][0] = neutrinos[0].p4().Eta() self.variables[t + '_nu_phi'][0] = neutrinos[0].p4().Phi() # consider events with 0 neutrinos to study acceptance if self.DY == 'CC' and len(neutrinos) == 0: if self.verbose: print "No muon neutrinos for event", i, ". Try to understand why:" print " > W rapidity: ", self.variables['lhe_y'][0] print " > Muon rapidity: ", muons[0].p4().Rapidity() self.variables['nuLost'][0] += 1.0 if len(muons) > 0: self.variables[t + '_mu_pt'][0] = muons[0].p4().Pt() self.variables[t + '_mu_eta'][0] = muons[0].p4().Eta() self.variables[t + '_mu_phi'][0] = muons[0].p4().Phi() # NC: consider events with 1 muons to study acceptance if self.DY == 'NC' and len(muons) < 2: if self.verbose: print "No muons in Z>mumu for event", i, ". Try to understand why:" print " > Z rapidity: ", self.variables['lhe_y'][0] self.variables['muLost'][0] += 1.0 if len(muons) == 1: self.variables[t + '_nu_pt'][0] = muons[0].p4().Pt() self.variables[t + '_nu_eta'][0] = muons[0].p4().Eta() self.variables[t + '_nu_phi'][0] = muons[0].p4().Phi() # if no muons or no neutrinos, save the event and continue if self.DY == 'CC': if (len(muons) == 0 or len(neutrinos) == 0): if self.save_tree: self.outtree.Fill() continue elif self.DY == 'NC': if len(muons) < 2: if self.save_tree: self.outtree.Fill() continue # the muon is the first ranked by pt if CC else the mu+ mu = muons[0] if self.DY == 'NC': mu = (muons[0] if muons[0].pdgId() == -13 else muons[1]) self.variables['isFromW'][0] += int( isFromW(mu) if self.DY == 'CC' else isFromZ(mu)) self.variables['mu_charge'][0] = mu.pdgId() if self.verbose: printp('muon', mu, '') # the neutrino is the first ranked by pt if CC else the mu- nu = None if self.DY == 'CC': nu = neutrinos[0] elif self.DY == 'NC': nu = (muons[0] if muons[0].pdgId() == +13 else muons[1]) self.variables['isFromW'][0] += int( isFromW(nu) if self.DY == 'CC' else isFromZ(nu)) self.variables['nu_charge'][0] = nu.pdgId() if self.verbose: printp('neut', nu, '') # pre-FSR mother = mu mu_prefsr = mu while (mother.numberOfMothers() > 0): if self.verbose: printp('MOTH', mother, '') if abs(mother.pdgId()) == 13 and mother.statusFlags( ).isLastCopyBeforeFSR(): mu_prefsr = mother mother = mother.mother(0) mother = nu nu_prefsr = nu while (mother.numberOfMothers() > 0): if self.verbose: printp('MOTH', mother, '') if abs(mother.pdgId()) == 13 and mother.statusFlags( ).isLastCopyBeforeFSR(): nu_prefsr = mother mother = mother.mother(0) # standard dressing alorithm mu_fsr, nu_fsr = [], [] gammas.sort(reverse=True) for ng, g in enumerate(gammas): dR_mu = deltaR(mu, g) dR_nu = deltaR(nu, g) dR = (dR_mu if self.DY == 'CC' else min(dR_mu, dR_nu)) if dR < 0.1: if self.DY == 'CC': mu_fsr.append(g.p4()) if self.verbose: printp('>gam', g, 'dR:{:03.2f}'.format(dR) + ' to muon') elif self.DY == 'NC': if dR_mu < dR_nu: mu_fsr.append(g.p4()) if self.verbose: printp('>gam', g, 'dR:{:03.2f}'.format(dR) + ' to muon-') else: nu_fsr.append(g.p4()) if self.verbose: printp('>gam', g, 'dR:{:03.2f}'.format(dR) + ' to mu+') # bare mup4 = ROOT.TLorentzVector(mu.p4().Px(), mu.p4().Py(), mu.p4().Pz(), mu.p4().E()) nup4 = ROOT.TLorentzVector(nu.p4().Px(), nu.p4().Py(), nu.p4().Pz(), nu.p4().E()) # pre-FSR mup4_prefsr = ROOT.TLorentzVector(mu_prefsr.p4().Px(), mu_prefsr.p4().Py(), mu_prefsr.p4().Pz(), mu_prefsr.p4().E()) nup4_prefsr = copy.deepcopy( nup4) if self.DY == 'CC' else ROOT.TLorentzVector( nu_prefsr.p4().Px(), nu_prefsr.p4().Py(), nu_prefsr.p4().Pz(), nu_prefsr.p4().E()) # dressed mup4_recfsr = copy.deepcopy(mup4) for g in mu_fsr: mup4_recfsr += ROOT.TLorentzVector(g.Px(), g.Py(), g.Pz(), g.E()) nup4_recfsr = copy.deepcopy(nup4) if self.DY == 'NC': for g in nu_fsr: nup4_recfsr += ROOT.TLorentzVector(g.Px(), g.Py(), g.Pz(), g.E()) # list of p4 for W Wp4 = {} Wp4['Wbare'] = mup4 + nup4 # the mu+nu p4 after FSR Wp4['WpreFSR'] = mup4_prefsr + nup4_prefsr # the mu+nu p4 pre-FSR Wp4['Wdress'] = mup4_recfsr + nup4_recfsr # the mu+nu p4 w/ dressed mu for t in ['Wbare', 'Wdress', 'WpreFSR']: self.variables[t + '_mass'][0] = Wp4[t].M() self.variables[t + '_qt'][0] = Wp4[t].Pt() self.variables[t + '_y'][0] = Wp4[t].Rapidity() self.variables[t + '_phi'][0] = Wp4[t].Phi() mup4LV = mup4 nup4LV = nup4 if t == 'Wdress': mup4LV = mup4_recfsr nup4LV = nup4_recfsr elif t == 'WpreFSR': mup4LV = mup4_prefsr nup4LV = nup4_prefsr self.variables[t + '_mu_pt'][0] = mup4LV.Pt() self.variables[t + '_mu_eta'][0] = mup4LV.Eta() self.variables[t + '_mu_phi'][0] = mup4LV.Phi() self.variables[t + '_nu_pt'][0] = nup4LV.Pt() self.variables[t + '_nu_eta'][0] = nup4LV.Eta() self.variables[t + '_nu_phi'][0] = nup4LV.Phi() # the CS variables ps = boost_to_CS_root(Lp4=mup4LV, Wp4=Wp4[t]) self.variables[t + '_ECS'][0] = ps[0] self.variables[t + '_cosCS'][0] = ps[1] self.variables[t + '_phiCS'][0] = ps[2] if self.save_histo: if t not in ['Wdress']: continue for w in self.weights_for_histos: fill_coefficients( histos=self.histos, charge=self.variables['mu_charge'][0], coefficients_for_histos=self. coefficients_for_histos, var='Wdress', weight_name=w, ps_W=(Wp4[t].Rapidity(), Wp4[t].Pt()), ps_CS=(ps[1], ps[2]), weight=self.variables['weights'][w]) if self.DY == 'NC': fill_coefficients( histos=self.histos, charge=self.variables['nu_charge'][0], coefficients_for_histos=self. coefficients_for_histos, var='Wdress', weight_name=w, ps_W=(Wp4[t].Rapidity(), Wp4[t].Pt()), ps_CS=(-ps[1], ps[2] + math.pi - (2 * math.pi if (ps[2] + math.pi) > 2 * math.pi else 0.0)), weight=self.variables['weights'][w]) # fill the tree if self.save_tree: self.outtree.Fill() ############################################### stop = time.time() # save and close self.outfile.cd() if self.save_tree: self.outtree.Write("tree", ROOT.TObject.kOverwrite) if self.save_histo: for kq, q in self.histos.items(): print 'Charge: ' + kq self.outfile.mkdir(kq) for kv, v in q.items(): print '\tVar: ' + kv self.outfile.mkdir(kq + '/' + kv) for kc, c in v.items(): print '\t\tVar: ' + kc self.outfile.mkdir(kq + '/' + kv + '/' + kc) for kw, w in c.items(): print '\t\t\tWeight: ' + kw + '.....', w[ 0].GetEntries(), 'entries' self.outfile.cd(kq + '/' + kv + '/' + kc) w[0].Write('', ROOT.TObject.kOverwrite) w[1].Write('', ROOT.TObject.kOverwrite) self.outfile.cd() if self.debug and self.save_tree: add_vars(tree=self.outtree, debug=True) self.outfile.Close() print "Output file closed. Processed ", i, "events in " + "{:03.0f}".format( stop - start) + " sec.(" + "{:03.0f}".format( i / (stop - start)) + " Hz)"
def topbnv_fwlite(argv): ######## ## ## ## #### ######## ######## ###### ######## ## ## ######## ######## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ## ## ## ## ## ## ###### ###### ## ## ## ###### ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ### ######## #### ## ######## ###### ## ####### ## ## options = getUserOptions(argv) ROOT.gROOT.Macro("rootlogon.C") #print argv #print options jets, jetLabel = Handle("std::vector<pat::Jet>"), "slimmedJets" muons, muonLabel = Handle("std::vector<pat::Muon>"), "slimmedMuons" electrons, electronLabel = Handle("std::vector<pat::Electron>"), "slimmedElectrons" gens, genLabel = Handle("std::vector<reco::GenParticle>"), "prunedGenParticles" #packedgens, packedgenLabel = Handle("std::vector<reco::packedGenParticle>"), "PACKEDgENpARTICLES" packedgens, packedgenLabel = Handle("std::vector<pat::PackedGenParticle>"), "packedGenParticles" rhos, rhoLabel = Handle("double"), "fixedGridRhoAll" vertices, vertexLabel = Handle("std::vector<reco::Vertex>"), "offlineSlimmedPrimaryVertices" genInfo, genInfoLabel = Handle("GenEventInfoProduct"), "generator" mets, metLabel = Handle("std::vector<pat::MET>"), "slimmedMETs" pileups, pileuplabel = Handle("std::vector<PileupSummaryInfo>"), "slimmedAddPileupInfo" # NEED HLT2 for 80x 2016 (maybe only TTBar? # https://twiki.cern.ch/twiki/bin/view/CMS/TopTrigger#Summary_for_2016_Run2016B_H_25_n triggerBits, triggerBitLabel = Handle("edm::TriggerResults"), ("TriggerResults","", "HLT") #triggerPrescales, triggerPrescalesLabel = Handle("pat::PackedTriggerPrescales"), ("PackedTriggerPrescales","", "HLT") #triggerPrescales, triggerPrescalesLabel = Handle("pat::PackedTriggerPrescales"), ("TriggerUserData","", "HLT") triggerPrescales, triggerPrescalesLabel = Handle("pat::PackedTriggerPrescales"), "patTrigger" f = ROOT.TFile(options.output, "RECREATE") f.cd() outtree = ROOT.TTree("T", "Our tree of everything") def bookFloatBranch(name, default): tmp = array('f', [default]) outtree.Branch(name, tmp, '%s/F' %name) return tmp def bookIntBranch(name, default): tmp = array('i', [default]) outtree.Branch(name, tmp, '%s/I' %name) return tmp def bookLongIntBranch(name, default): tmp = array('l', [default]) outtree.Branch(name, tmp, '%s/L' %name) return tmp ################################################################################# ################################################################################# # Jets jetdata = {} jetdata['njet'] = ['jetpt', 'jeteta', 'jetphi', 'jete', 'jetpx', 'jetpy', 'jetpz'] jetdata['njet'] += ['jetbtag0', 'jetbtag1', 'jetbtagsum'] jetdata['njet'] += ['jetarea', 'jetjec', 'jetNHF', 'jetNEMF', 'jetCHF', 'jetCHM', 'jetMUF', 'jetCEMF'] jetdata['njet'] += ['jetNumConst', 'jetNumNeutralParticles'] outJets = {} for key in jetdata.keys(): outJets[key] = array('i', [-1]) outtree.Branch(key, outJets[key], key+"/I") for branch in jetdata[key]: outJets[branch] = array('f', 16*[-1.]) outtree.Branch(branch, outJets[branch], '{0}[{1}]/F'.format(branch,key)) ################################################################################# ################################################################################# # Muons # https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideCMSDataAnalysisSchoolLPC2018Muons # https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideMuonIdRun2 # https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideMuonIdRun2#Muon_Identification muondata = {} muondata['nmuon'] = ['muonpt', 'muoneta', 'muonphi', 'muone', 'muonpx', 'muonpy', 'muonpz', 'muonq'] muondata['nmuon'] += ['muonsumchhadpt', 'muonsumnhadpt', 'muonsumphotEt', 'muonsumPUPt'] muondata['nmuon'] += ['muonIsLoose', 'muonIsMedium', 'muonIsTight', 'muonPFiso'] muondata['nmuon'] += ['muonPFIsoLoose', 'muonPFIsoMedium', 'muonPFIsoTight'] muondata['nmuon'] += ['muonMvaLoose', 'muonMvaMedium', 'muonMvaTight'] outMuons = {} for key in muondata.keys(): outMuons[key] = array('i', [-1]) outtree.Branch(key, outMuons[key], key+"/I") for branch in muondata[key]: outMuons[branch] = array('f', 16*[-1.]) outtree.Branch(branch, outMuons[branch], '{0}[{1}]/F'.format(branch,key)) ################################################################################# ################################################################################# # Electrons # https://twiki.cern.ch/twiki/bin/view/CMS/CutBasedElectronIdentificationRun2 # https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideCMSDataAnalysisSchoolLPC2018egamma electrondata = {} electrondata['nelectron'] = {} electrondata['nelectron']['F'] = ['electronpt', 'electroneta', 'electronphi', 'electrone', 'electronpx', 'electronpy', 'electronpz', 'electronq'] electrondata['nelectron']['F'] += ['electronTkIso', 'electronHCIso', 'electronECIso'] electrondata['nelectron']['I'] = ['electronIsLoose', 'electronIsMedium', 'electronIsTight'] # These are nominally integers datatypes = {"F":['f',-1.0], "I":['i',-1]} outElectrons = {} for key in electrondata.keys(): outElectrons[key] = array('i', [-1]) outtree.Branch(key, outElectrons[key], key+"/I") print(key) for datatype in electrondata[key]: print(datatype) for branch in electrondata[key][datatype]: outElectrons[branch] = array(datatypes[datatype][0], 16*[datatypes[datatype][1]]) outtree.Branch(branch, outElectrons[branch], '{0}[{1}]/{2}'.format(branch,key,datatype)) ############################################################################ ################################################################################# # MET metdata = ['metpt', 'metphi'] outMET = {} for key in metdata: outMET[key] = array('f', [-1]) outtree.Branch(key, outMET[key], key+"/F") ################################################################################# ################################################################################# # Pileup pudata = ['pu_wt'] outPileup = {} for key in pudata: outPileup[key] = array('f', [-1]) outtree.Branch(key, outPileup[key], key+"/F") ''' njet = array('i', [-1]) outtree.Branch('njet', njet, 'njet/I') jetpt = array('f', 16*[-1.]) outtree.Branch('jetpt', jetpt, 'jetpt[njet]/F') ''' purw = None # pileup reweighting histogram if options.isMC and not options.disablePileup: pileupReweightFile = ROOT.TFile('purw.root', 'READ') purw = pileupReweightFile.Get('pileup') ################################################################################# ################################################################################# # Trigger trigdata = {} trigdata['ntrig_muon'] = ['trig_muon'] trigdata['ntrig_electron'] = ['trig_electron'] trigdata['ntrig_dilepmue'] = ['trig_dilepmue'] trigdata['ntrig_dilepemu'] = ['trig_dilepemu'] trigdata['ntrig_dilepmumu'] = ['trig_dilepmumu'] trigdata['ntrig_dilepee'] = ['trig_dilepee'] outTriggers = {} for key in trigdata.keys(): print(key) outTriggers[key] = array('i', [-1]) outtree.Branch(key, outTriggers[key], key+"/I") for branch in trigdata[key]: # Save them as integers outTriggers[branch] = array('i', 8*[-1]) outtree.Branch(branch, outTriggers[branch], '{0}[{1}]/I'.format(branch,key)) trigger_tree_branches = { "SingleMuon":outTriggers['trig_muon'], "SingleElectron":outTriggers['trig_electron'], "DileptonMuE":outTriggers['trig_dilepmue'], "DileptonEMu":outTriggers['trig_dilepemu'], "DileptonMuMu":outTriggers['trig_dilepmumu'], "DileptonEE":outTriggers['trig_dilepee'] } ################################################################################# ################################################################################# # Vertex vertexdata = {} vertexdata['nvertex'] = ['vertexX', 'vertexY', 'vertexZ', 'vertexndof'] outVertex = {} for key in vertexdata.keys(): outVertex[key] = array('i', [-1]) outtree.Branch(key, outVertex[key], key+"/I") for branch in vertexdata[key]: outVertex[branch] = array('f', 64*[-1.]) outtree.Branch(branch, outVertex[branch], '{0}[{1}]/F'.format(branch,key)) ## ######## ######## ###### ####### ######## ######## ######## ###### ######## #### ####### ## ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ###### ## ## ## ## ######## ######## ###### ## ## ## ## ## ## ## ## ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ###### ######## ## ###### ####### ## ## ## ## ######## ###### ## #### ####### ## ## ###### ROOT.gSystem.Load('libCondFormatsJetMETObjects') if options.isMC: # CHANGE THIS FOR DIFFERENT MCs down the road jecAK4 = createJEC('JECs/Summer/Summer16_23Sep2016V4_MC', ['L1FastJet', 'L2Relative', 'L3Absolute'], 'AK4PFchs') jecAK8 = createJEC('JECs/Summer/Summer16_23Sep2016V4_MC', ['L1FastJet', 'L2Relative', 'L3Absolute'], 'AK8PFchs') jecUncAK4 = ROOT.JetCorrectionUncertainty(ROOT.std.string('JECs/Summer/Summer16_23Sep2016V4_MC_Uncertainty_AK4PFchs.txt')) jecUncAK8 = ROOT.JetCorrectionUncertainty(ROOT.std.string('JECs/Summer/Summer16_23Sep2016V4_MC_Uncertainty_AK8PFchs.txt')) else: # CHANGE THIS FOR DATA DataJECs = DataJEC(jet_energy_corrections) # from within CMSSW: ROOT.gSystem.Load('libCondFormatsBTauObjects') ROOT.gSystem.Load('libCondToolsBTau') ######## ## ## ######## ## ## ######## ## ####### ####### ######## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ###### ## ## ###### ## ## ## ## ## ## ## ## ## ######## ## ## ## ## ## #### ## ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## ## ## ######## ### ######## ## ## ## ######## ####### ####### ## # IMPORTANT : Run one FWLite instance per file. Otherwise, # FWLite aggregates ALL of the information immediately, which # can take a long time to parse. ################################################################################# ################################################################################# def processEvent(iev, event): ########################################################################### # Trigger event.getByLabel(triggerBitLabel, triggerBits) event.getByLabel(triggerPrescalesLabel, triggerPrescales ) trigger_names = event.object().triggerNames(triggerBits.product()) FLAG_passed_trigger = fwlite_tools.process_triggers(triggerBits, triggerPrescales, trigger_names, trigger_tree_branches, outTriggers, options, verbose=options.verbose) # Should do this early. We shouldn't analyze events that don't # pass any of the relevant triggers if FLAG_passed_trigger is False: return 0 ########################################################################### # If trigger test is passed, process event ########################################################################### # Vertex event.getByLabel(vertexLabel, vertices) PV,NPV = fwlite_tools.process_vertices(vertices, outVertex, verbose=options.verbose) # Should do this first. We shouldn't analyze events that don't have a # good primary vertex if PV is None: return 0 ########################################################################### # Muons event.getByLabel (muonLabel, muons) fwlite_tools.process_muons(muons, outMuons, verbose=options.verbose) ########################################################################### # Electrons event.getByLabel (electronLabel, electrons) fwlite_tools.process_electrons(electrons, outElectrons, verbose=options.verbose) ########################################################################### # Pileup if options.isMC: event.getByLabel(pileuplabel, pileups) fwlite_tools.process_pileup(pileups, outPileup, purw, options, verbose=options.verbose) ########################################################################### # MET event.getByLabel( metLabel, mets ) fwlite_tools.process_mets(mets, outMET, verbose=options.verbose) ########################################################################### # Rhos event.getByLabel(rhoLabel, rhos) rho = fwlite_tools.process_rhos(rhos, verbose=options.verbose) if rho is None: return 0 ########################################################################### # Jets runnumber = event.eventAuxiliary().run() event.getByLabel(vertexLabel, vertices) event.getByLabel (jetLabel, jets) if options.isMC: fwlite_tools.process_jets(jets, outJets, options, jecAK4=jecAK4, jecUncAK4=jecUncAK4, runnumber=runnumber, rho=rho, NPV=NPV, verbose=options.verbose) else: fwlite_tools.process_jets(jets, outJets, options, DataJECs=DataJECs, runnumber=runnumber, rho=rho, NPV=NPV, verbose=options.verbose) genOut = "Event %d\n" % (iev) if options.verbose: print( "\nProcessing %d: run %6d, lumi %4d, event %12d" % \ (iev,event.eventAuxiliary().run(), \ event.eventAuxiliary().luminosityBlock(), \ event.eventAuxiliary().event())) ######## #### ## ## ######## ######## ######## ######## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ###### ## ## ## ## ######## ###### ###### ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #### ######## ######## ## ## ## ######## ######## outtree.Fill() #print("Made it to end!") return genOut ########################################################################### ########################################################################### # Main event loop #genoutputfile = open("generator_information.dat",'w') nevents = 0 maxevents = int(options.maxevents) for ifile in getInputFiles(options): print ('Processing file ' + ifile) events = Events (ifile) if maxevents > 0 and nevents > maxevents: break # loop over events in this file print('Tot events in this file: ' + str(events.size())) for iev, event in enumerate(events): #print(iev) if maxevents > 0 and nevents > maxevents: break nevents += 1 #if nevents % 1000 == 0: if nevents % 100 == 0: print ('===============================================') print (' ---> Event ' + str(nevents)) elif options.verbose: print (' ---> Event ' + str(nevents)) genOut = processEvent(iev, events) #print type(genOut) #print genOut #if genOut is not None: #genoutputfile.write(genOut) #outtree.Print() # Close the output ROOT file f.cd() f.Write() f.Close()
#import EcalDetId #from DataFormats.EcalDetId import * # import sys, os #from __future__ import print_function file = '/eos/uscms/store/user/skalafut/WR/13TeV/RunIISpring15_MiniAODSignalSamples/WRToNuEToEEJJ_MW-6000_MNu-3000_TuneCUETP8M1_pythia8_13TeV.root' events = Events(file) print file handleGenParticles = Handle('std::vector<reco::GenParticle>') genParticleTAG = 'prunedGenParticles' print "numEvents = ", events.size() from collections import * nWR = Counter() nWRdaughter0 = Counter() nNuR = Counter() nWstarR = Counter() nNuRdaughter0 = Counter() nNuRdaughter3 = Counter() class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m'
''' timestart = time.time() events = Events('lhe.root') handle = Handle('LHEEventProduct') label = ("source") ROOT.gROOT.SetStyle('Plain') # white background histolist = [] cuts = [0, 0, 0, 0, 0, 0, 0] first = True n_event = 0 n_used = 0 n_tot = events.size() n_base = 0 for event in events: n_event += 1 if n_event % 5000 == 0: print str(n_used) + ' / ' + str(n_event) + ' / ' + str(n_tot) event.getByLabel(label, handle) lhe = handle.product() hepeup = lhe.hepeup() if first: first = False for i in xrange(lhe.weights().size()):
count = 0 jobiter = 0 print "Start looping" #initialize the ttree variables tree_vars = { "bpt": array('d', [0.]), "bmass": array('d', [0.]), "btag": array('d', [0.]), "tpt": array('d', [0.]), "tmass": array('d', [0.]), "nsubjets": array('d', [0.]), "sjbtag": array('d', [0.]) } Tree = Make_Trees(tree_vars) totevents = events.size() print str(totevents) + ' Events total' usegenweight = False if options.set == "QCDFLAT7000": usegenweight = True print "Using gen weight" for event in events: count = count + 1 weightSFb = 1.0 errorSFb = 0.0 #Uncomment for a low count test run #if count > 300000: #break
outpath = sys.argv[1] outfile = os.path.join(outpath, os.path.basename(filename).replace("AOD", "ntuple")) if os.path.isfile(outfile): print("Output file {0} exists, exiting".format(outfile), file=sys.stderr) sys.exit(0) events = Events(filename) print("Reading input file {0}".format(filename)) print("Saving output to file {0}".format(outfile)) evdesc = EventDesc() output = Output(outfile) num_events = events.size() # loop over events for iev, event in enumerate(events): #if iev > 10: # break eid = event.object().id() if iev % 10 == 0: print("Event {0}/{1}".format(iev, num_events)) eventId = (eid.run(), eid.luminosityBlock(), int(eid.event())) evdesc.get(event) output.clear() genpart = evdesc.genparticle.product() ngenparticles = 0
from Firefighter.ffLite.dataSample import samples ROOT.gROOT.SetBatch() plt.style.use("default") plt.rcParams["grid.linestyle"] = ":" plt.rcParams["savefig.dpi"] = 120 plt.rcParams["savefig.bbox"] = "tight" bkgType = "WWZ" # WWZ fn = samples[bkgType] events = Events(fn) print("- Sample: {}".format(fn)) print("- Number of events: {}".format(events.size())) genHdl = Handle("std::vector<reco::GenParticle>") genLbl = ("genParticles", "", "HLT") recoMuHdl = Handle("std::vector<reco::Muon>") recoMuLbl = ("muons", "", "RECO") res_pt = defaultdict(list) res_eta = defaultdict(list) res_phi = defaultdict(list) res_dR = defaultdict(list) recoMu_n = list() recoMu_pt = list() recoMu_eta = list()
def HarvestMiniAOD(inFilePath, outFilePath): # # Create the output file # print "Create Output File: %s" % (outFilePath) f = ROOT.TFile(outFilePath, "RECREATE") f.cd() # # Initialize the tree jet # treeName = "TreeFatJet" print "Create Output Tree: %s" % (treeName) TreeFatJet = ROOT.TTree(treeName, treeName) # # FatJet branch # nFatJetSizeMax = 10 nFatJetString = 'nFatJet' nFatJet = bookIntBranch(TreeFatJet, nFatJetString) FatJetPt = bookFloatArrayBranch(TreeFatJet, 'FatJet_pt', nFatJetString, nFatJetSizeMax) FatJetEta = bookFloatArrayBranch(TreeFatJet, 'FatJet_eta', nFatJetString, nFatJetSizeMax) FatJetPhi = bookFloatArrayBranch(TreeFatJet, 'FatJet_phi', nFatJetString, nFatJetSizeMax) FatJetM = bookFloatArrayBranch(TreeFatJet, 'FatJet_m', nFatJetString, nFatJetSizeMax) # FatJetSoftdropMass = bookFloatArrayBranch(TreeFatJet, 'FatJet_msoftdrop', nFatJetString, nFatJetSizeMax) # FatJetTau1 = bookFloatArrayBranch(TreeFatJet, 'FatJet_tau1', nFatJetString, nFatJetSizeMax) # FatJetTau2 = bookFloatArrayBranch(TreeFatJet, 'FatJet_tau2', nFatJetString, nFatJetSizeMax) # FatJetTau3 = bookFloatArrayBranch(TreeFatJet, 'FatJet_tau3', nFatJetString, nFatJetSizeMax) # # # ak8jetLabel = "slimmedJetsAK8" ak8jets = Handle("std::vector<pat::Jet>") ak4jetLabel = "slimmedJets" ak4jets = Handle("std::vector<pat::Jet>") # # Read in the MiniAOD file # events = Events(inFilePath) # # Get number of events in MiniAOD file # numEvents = events.size() # # Set max number of events to process # Set to -1 if you want to run over all events # # maxevents = -1 maxevents = 1 # # The Event Loop # print "Looping over %d events " % (numEvents) for iev, event in enumerate(events): # # # if maxevents > 0 and iev > maxevents: break if (iev + 1) % 100 == 0: print "Processing event %d out of %d" % (iev, numEvents) # # Get jets # event.getByLabel(ak4jetLabel, ak4jets) # # Loop over jets # for i, jet in enumerate(ak4jets.product()): print "New Jet" for ufl in jet.userFloatNames(): print "\t%s %s" % (ufl, jet.userFloat(ufl)) for uil in jet.userIntNames(): print "\t%s %s" % (uil, jet.userInt(uil)) # # # # Get jets # # # event.getByLabel (ak8jetLabel, ak8jets) # # # # Loop over jets # # # nFatJet[0]=0 # for i,jet in enumerate(ak8jets.product()): # jetP4 = ROOT.TLorentzVector( jet.px(), jet.py(), jet.pz(), jet.energy() ) # FatJetPt[i] = jetP4.Pt() # FatJetEta[i] = jetP4.Eta() # FatJetPhi[i] = jetP4.Phi() # FatJetM[i] = jetP4.M() # print "New Jet" # for ufl in jet.userFloatNames(): # print "\t%s %s" % (ufl, jet.userFloat(ufl)) # # For MiniAODv2 # https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookMiniAOD2016#Jets # https://twiki.cern.ch/twiki/bin/viewauth/CMS/JetWtagging#Recipes_to_obtain_the_PUPPI_soft # FatJetSoftdropMass [i] = jet.userFloat("ak8PFJetsPuppiValueMap:mass"); # FatJetTau1[i] = jet.userFloat("ak8PFJetsPuppiValueMap:NjettinessAK8PuppiTau1"); # FatJetTau2[i] = jet.userFloat("ak8PFJetsPuppiValueMap:NjettinessAK8PuppiTau2"); # FatJetTau3[i] = jet.userFloat("ak8PFJetsPuppiValueMap:NjettinessAK8PuppiTau3"); # nFatJet[0] += 1 # # Fill the tree for this event # # TreeFatJet.Fill() # # Save the output ttree in the output file # print "Write tree to file" f.Write() # # Gracefully close the output file # print "Closing output file" f.Close()
from array import array from collections import OrderedDict from DataFormats.FWLite import Events, Handle from PhysicsTools.Heppy.physicsobjects.PhysicsObjects import Jet, GenJet from CMGTools.HNL.physicsobjects.Electron import Electron # events = Events('output.root') # events = Events('output_ttbar2017_102X_mc2017_realistic_v7.root') # events = Events('output_ttbar2017_94X_mc2017_realistic_v17.root') # events = Events('../python/output.root') # events = Events('output_2016_mc.root') events = Events('output_2018_mc.root') # events = Events('/afs/cern.ch/work/m/manzoni/HNL/instructions/CMSSW_10_6_12/src/CMGTools/HNL/cfg/2018/test/HN3L_M_5_V_0p00178044938148_mu_Dirac_cc_massiveAndCKM_LO_3/cmsswPreProcessing.root') maxevents = -1 # max events to process totevents = events.size() # total number of events in the files label_jets = ('slimmedJets', '', 'PAT') handle_jets = Handle('std::vector<pat::Jet>') label_jets_up = ('selectedUpdatedPatJetsNewDFTraining', '', 'NEWDF') handle_jets_up = Handle('std::vector<pat::Jet>') # label_old_ele = ('slimmedElectrons', '', 'PAT') # handle_old_ele = Handle('std::vector<pat::Electron>') label_new_ele = ('slimmedElectrons', '', 'NEWDF') handle_new_ele = Handle('std::vector<pat::Electron>') # label_new_ele = ('updatedElectrons', '', 'NEWDF') # handle_new_ele = Handle('std::vector<pat::Electron>')
f = ROOT.TFile(options.outputFile, 'recreate') f.cd() t = ROOT.TTree( "Events", "Events", 1 ) gDir.cd() for v in vars: name, type=v.split('/') t.Branch(name, ROOT.AddressOf(s, name), v) handles={k:Handle(edmCollections[k][0]) for k in edmCollections.keys()} res={} events = Events(inputFiles) events.toBegin() products={} size=events.size() if not small else 10 missingCollections=[] counter=0 for nev in range(size): if nev%1000==0:print nev,'/',size events.to(nev) eaux=events.eventAuxiliary() run=eaux.run() if options.run>0 and not run==options.run: # print run, options.run continue counter+=1 if options.maxEvents>0 and counter>options.maxEvents: break event=eaux.event() lumi=eaux.luminosityBlock()
# 'ctau_weight_central', # 'ctau_weight_up', # 'ctau_weight_down', # # 'ctau_weight_central_lhe', # 'ctau_weight_up_lhe', # 'ctau_weight_down_lhe', ] fout = ROOT.TFile('%s/flat_tree_bc_newtaubranches.root' % (destination), 'recreate') ntuple = ROOT.TNtuple('tree', 'tree', ':'.join(branches)) tofill = OrderedDict(zip(branches, [np.nan] * len(branches))) start = time() maxevents = maxevents if maxevents >= 0 else events.size( ) # total number of events in the files for i, event in enumerate(events): if (i + 1) > maxevents: break if i % 1000 == 0: percentage = float(i) / maxevents * 100. speed = float(i) / (time() - start) eta = datetime.now() + timedelta(seconds=(maxevents - i) / max(0.1, speed)) print( '\t===> processing %d / %d event \t completed %.1f%s \t %.1f ev/s \t ETA %s s' % (i, maxevents, percentage, '%', speed, eta.strftime('%Y-%m-%d %H:%M:%S'))) #
class METProducerTest(unittest.TestCase): def setUp(self): self.exEvents = Events([options.expectedPath]) self.acEvents = Events([options.actualPath]) self.exHandleGenMETs = Handle("std::vector<reco::GenMET>") self.exHandlePFMETs = Handle("std::vector<reco::PFMET>") self.exHandleCaloMETs = Handle("std::vector<reco::CaloMET>") self.exHandleMETs = Handle("std::vector<reco::MET>") self.exHandlePFClusterMETs = Handle("std::vector<reco::PFClusterMET>") self.acHandleGenMETs = Handle("std::vector<reco::GenMET>") self.acHandlePFMETs = Handle("std::vector<reco::PFMET>") self.acHandleCaloMETs = Handle("std::vector<reco::CaloMET>") self.acHandleMETs = Handle("std::vector<reco::MET>") self.acHandlePFClusterMETs = Handle("std::vector<reco::PFClusterMET>") def test_n_events(self): self.assertEqual(self.exEvents.size(), self.acEvents.size()) def test_recoPFMETs_pfMetT0rt(self): label = ("pfMetT0rt", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT1(self): label = ("pfMetT0rtT1", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT1T2(self): label = ("pfMetT0rtT1T2", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT2(self): label = ("pfMetT0rtT2", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0pc(self): label = ("pfMetT0pc", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0pcT1(self): label = ("pfMetT0pcT1", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT1(self): label = ("pfMetT1", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT1T2(self): label = ("pfMetT1T2", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtTxy(self): label = ("pfMetT0rtTxy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT1Txy(self): label = ("pfMetT0rtT1Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT1T2Txy(self): label = ("pfMetT0rtT1T2Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0rtT2Txy(self): label = ("pfMetT0rtT2Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0pcTxy(self): label = ("pfMetT0pcTxy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT0pcT1Txy(self): label = ("pfMetT0pcT1Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT1Txy(self): label = ("pfMetT1Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoPFMETs_pfMetT1T2Txy(self): label = ("pfMetT1T2Txy", "", "TEST") exHandle = self.exHandlePFMETs acHandle = self.acHandlePFMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoPFMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_caloMetT1(self): label = ("caloMetT1", "","TEST") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def test_recoCaloMETs_caloMetT1T2(self): label = ("caloMetT1T2", "","TEST") exHandle = self.exHandleCaloMETs acHandle = self.exHandleCaloMETs candidateAssertMethods = ('assert_recoLeafCandidate', 'assert_recoMET', 'assert_recoCaloMET') self.assert_collection(label, exHandle, acHandle, candidateAssertMethods) def assert_collection(self, label, exHandle, acHandle, candidateAssertMethods): exEventIter = self.exEvents.__iter__() acEventIter = self.acEvents.__iter__() nevents = min(self.exEvents.size(), self.acEvents.size()) for i in range(nevents): exEvent = exEventIter.next() acEvent = acEventIter.next() exEvent.getByLabel(label, exHandle) exMETs = exHandle.product() exMET = exMETs.front() acEvent.getByLabel(label, acHandle) acMETs = acHandle.product() self.assertEqual(acMETs.size(), 1) acMET = acMETs.front() for method in candidateAssertMethods: getattr(self, method)(acMET, exMET) def assert_recoPFMET(self, actual, expected): # double self.assertEqual(actual.photonEtFraction() , expected.photonEtFraction() ) self.assertAlmostEqual(actual.photonEt() , expected.photonEt() , 12 ) self.assertEqual(actual.neutralHadronEtFraction() , expected.neutralHadronEtFraction() ) self.assertAlmostEqual(actual.neutralHadronEt() , expected.neutralHadronEt() , 12 ) self.assertEqual(actual.electronEtFraction() , expected.electronEtFraction() ) self.assertAlmostEqual(actual.electronEt() , expected.electronEt() , 12 ) self.assertEqual(actual.chargedHadronEtFraction() , expected.chargedHadronEtFraction() ) self.assertAlmostEqual(actual.chargedHadronEt() , expected.chargedHadronEt() , 12 ) self.assertEqual(actual.muonEtFraction() , expected.muonEtFraction() ) self.assertAlmostEqual(actual.muonEt() , expected.muonEt() , 12 ) self.assertEqual(actual.HFHadronEtFraction() , expected.HFHadronEtFraction() ) self.assertAlmostEqual(actual.HFHadronEt() , expected.HFHadronEt() , 12 ) self.assertEqual(actual.HFEMEtFraction() , expected.HFEMEtFraction() ) self.assertAlmostEqual(actual.HFEMEt() , expected.HFEMEt() , 12 ) def assert_recoGenMET(self, actual, expected): # double self.assertEqual(actual.NeutralEMEtFraction() , expected.NeutralEMEtFraction() ) self.assertEqual(actual.NeutralEMEt() , expected.NeutralEMEt() ) self.assertEqual(actual.ChargedEMEtFraction() , expected.ChargedEMEtFraction() ) self.assertEqual(actual.ChargedEMEt() , expected.ChargedEMEt() ) self.assertEqual(actual.NeutralHadEtFraction() , expected.NeutralHadEtFraction() ) self.assertEqual(actual.NeutralHadEt() , expected.NeutralHadEt() ) self.assertEqual(actual.ChargedHadEtFraction() , expected.ChargedHadEtFraction() ) self.assertEqual(actual.ChargedHadEt() , expected.ChargedHadEt() ) self.assertEqual(actual.MuonEtFraction() , expected.MuonEtFraction() ) self.assertEqual(actual.MuonEt() , expected.MuonEt() ) self.assertEqual(actual.InvisibleEtFraction() , expected.InvisibleEtFraction() ) self.assertEqual(actual.InvisibleEt() , expected.InvisibleEt() ) def assert_recoCaloMET(self, actual, expected): # double self.assertEqual(actual.maxEtInEmTowers() , expected.maxEtInEmTowers() ) self.assertEqual(actual.maxEtInHadTowers() , expected.maxEtInHadTowers() ) self.assertAlmostEqual(actual.etFractionHadronic() , expected.etFractionHadronic(), 15 ) self.assertAlmostEqual(actual.emEtFraction() , expected.emEtFraction() , 15 ) self.assertEqual(actual.hadEtInHB() , expected.hadEtInHB() ) self.assertEqual(actual.hadEtInHO() , expected.hadEtInHO() ) self.assertEqual(actual.hadEtInHE() , expected.hadEtInHE() ) self.assertEqual(actual.hadEtInHF() , expected.hadEtInHF() ) self.assertEqual(actual.emEtInEB() , expected.emEtInEB() ) self.assertEqual(actual.emEtInEE() , expected.emEtInEE() ) self.assertEqual(actual.emEtInHF() , expected.emEtInHF() ) self.assertEqual(actual.metSignificance() , expected.metSignificance() ) self.assertEqual(actual.CaloSETInpHF() , expected.CaloSETInpHF() ) self.assertEqual(actual.CaloSETInmHF() , expected.CaloSETInmHF() ) self.assertEqual(actual.CaloMETInpHF() , expected.CaloMETInpHF() ) self.assertEqual(actual.CaloMETInmHF() , expected.CaloMETInmHF() ) self.assertEqual(actual.CaloMETPhiInpHF() , expected.CaloMETPhiInpHF() ) self.assertEqual(actual.CaloMETPhiInmHF() , expected.CaloMETPhiInmHF() ) def assert_recoMET(self, actual, expected): # double self.assertAlmostEqual(actual.sumEt() , expected.sumEt() , 12) self.assertAlmostEqual(actual.mEtSig() , expected.mEtSig() , 12) self.assertEqual(actual.significance() , expected.significance() ) self.assertEqual(actual.e_longitudinal() , expected.e_longitudinal() ) self.assertEqual(actual.dmEx().size() , expected.dmEx().size()) for a, e in zip(actual.dmEx(), expected.dmEx()): self.assertEqual(a , e) self.assertEqual(actual.dmEy().size() , expected.dmEy().size()) for a, e in zip(actual.dmEy(), expected.dmEy()): self.assertEqual(a , e) self.assertEqual(actual.dsumEt().size() , expected.dsumEt().size()) for a, e in zip(actual.dsumEt(), expected.dsumEt()): self.assertEqual(a , e) self.assertEqual(actual.dSignificance().size() , expected.dSignificance().size()) for a, e in zip(actual.dSignificance(), expected.dSignificance()): self.assertEqual(a , e) self.assertEqual(actual.mEtCorr().size(), expected.mEtCorr().size()) for a, e in zip(actual.mEtCorr(), expected.mEtCorr()): # self.assertEqual(a.mex , e.mex) # self.assertEqual(a.mey , e.mey) # self.assertAlmostEqual(a.sumet , e.sumet, 4) # self.assertEqual(a.significance , e.significance) pass def assert_recoLeafCandidate(self, actual, expected): # size_t self.assertEqual(actual.numberOfDaughters() , expected.numberOfDaughters() ) self.assertEqual(actual.numberOfMothers() , expected.numberOfMothers() ) # int self.assertEqual(actual.charge() , expected.charge() ) self.assertEqual(actual.threeCharge() , expected.threeCharge() ) # double self.assertEqual(actual.p() , expected.p() ) self.assertAlmostEqual(actual.energy() , expected.energy() , 10 ) self.assertAlmostEqual(actual.et() , expected.et() , 10 ) # self.assertEqual(actual.mass() , expected.mass() ) # self.assertEqual(actual.massSqr() , expected.massSqr() ) self.assertAlmostEqual(actual.mt() , expected.mt() , 10 ) self.assertAlmostEqual(actual.mtSqr() , expected.mtSqr() , 10 ) self.assertEqual(actual.px() , expected.px() ) self.assertEqual(actual.py() , expected.py() ) self.assertEqual(actual.pz() , expected.pz() ) self.assertAlmostEqual(actual.pt(), expected.pt(), 5) self.assertEqual(actual.phi() , expected.phi() ) self.assertEqual(actual.theta() , expected.theta() ) self.assertEqual(actual.eta() , expected.eta() ) # self.assertEqual(actual.rapidity() , expected.rapidity() ) # self.assertEqual(actual.y() , expected.y() ) self.assertEqual(actual.vx() , expected.vx() ) self.assertEqual(actual.vy() , expected.vy() ) self.assertEqual(actual.vz() , expected.vz() ) # int self.assertEqual(actual.pdgId() , expected.pdgId() ) self.assertEqual(actual.status() , expected.status() ) # bool self.assertEqual(actual.longLived() , expected.longLived() ) self.assertEqual(actual.massConstraint() , expected.massConstraint() ) # double self.assertEqual(actual.vertexChi2() , expected.vertexChi2() ) self.assertEqual(actual.vertexNdof() , expected.vertexNdof() ) self.assertEqual(actual.vertexNormalizedChi2() , expected.vertexNormalizedChi2() ) # bool self.assertEqual(actual.hasMasterClone() , expected.hasMasterClone() ) self.assertEqual(actual.hasMasterClonePtr() , expected.hasMasterClonePtr() ) self.assertEqual(actual.isElectron() , expected.isElectron() ) self.assertEqual(actual.isMuon() , expected.isMuon() ) self.assertEqual(actual.isStandAloneMuon() , expected.isStandAloneMuon() ) self.assertEqual(actual.isGlobalMuon() , expected.isGlobalMuon() ) self.assertEqual(actual.isTrackerMuon() , expected.isTrackerMuon() ) self.assertEqual(actual.isCaloMuon() , expected.isCaloMuon() ) self.assertEqual(actual.isPhoton() , expected.isPhoton() ) self.assertEqual(actual.isConvertedPhoton() , expected.isConvertedPhoton() ) self.assertEqual(actual.isJet() , expected.isJet() )
class EdmDataAccessor(BasicDataAccessor, RelativeDataAccessor, ParticleDataAccessor, EventFileAccessor): def __init__(self): logging.debug(__name__ + ": __init__") self._dataObjects = [] self._edmLabel={} self._edmParent={} self._edmChildren={} self._edmMotherRelations={} self._edmDaughterRelations={} self._edmChildrenObjects={} self._eventIndex = 0 self._numEvents = 0 self._filename="" self._branches=[] self._filteredBranches=[] self._events=None self._readOnDemand=True self._underscore=False self._filterBranches=True self.maxLevels=2 self.maxDaughters=1000 def isRead(self,object,levels=1): if not id(object) in self._edmChildrenObjects.keys(): return False if levels>1 and id(object) in self._edmChildren.keys(): for child in self._edmChildren[id(object)]: if not self.isRead(child, levels-1): return False return True def children(self,object): """ Get children of an object """ if id(object) in self._edmChildren.keys() and self.isRead(object): return self._edmChildren[id(object)] else: return () def isContainer(self,object): """ Get children of an object """ if id(object) in self._edmChildren.keys() and self.isRead(object): return len(self._edmChildren[id(object)])>0 else: return True def motherRelations(self,object): """ Get motherRelations of an object """ if id(object) in self._edmMotherRelations.keys(): return self._edmMotherRelations[id(object)] else: return () def daughterRelations(self,object): """ Get daughterRelations of an object """ if id(object) in self._edmDaughterRelations.keys(): return self._edmDaughterRelations[id(object)] else: return () def label(self,object): return self.getShortLabel(object) def getShortLabel(self,object): if id(object) in self._edmLabel.keys(): splitlabel=self._edmLabel[id(object)].strip(".").split(".") return splitlabel[len(splitlabel)-1] else: return "" def getShortLabelWithType(self,object): return self.getShortLabel(object)+" <"+self.getShortType(object)+">" def getObjectLabel(self,object): splitlabel=self._edmLabel[id(object)].strip(".").split(".") return ".".join(splitlabel[1:-1]) def getType(self,object): typ=str(object.__class__) if "\'" in typ: typ=typ.split("\'")[1] if "." in typ: typ=typ.split(".")[len(typ.split("."))-1] return typ.strip(" ") def getShortType(self,object): typ=self.getType(object).split("<")[0].strip(" ") return typ def getBranch(self,object): entry=object while id(entry) in self._edmParent.keys() and self._edmParent[id(entry)]!=None: entry=self._edmParent[id(entry)] return entry def getDepth(self,object): entry=object i=0 while id(entry) in self._edmParent.keys() and self._edmParent[id(entry)]!=None: entry=self._edmParent[id(entry)] i+=1 return i def getObjectProperties(self,object): """ get all method properties of an object """ objects=[] for attr in dir(object): prop=getattr(object,attr) if not attr.startswith("__") and (self._underscore or attr.strip("_")==attr): objects+=[(attr,prop)] return objects def getObjectRef(self,object): """ get object and resolve references """ typshort=self.getShortType(object) ref_types=["edm::Ref","edm::RefProd","edm::RefToBase","edm::RefToBaseProd","edm::Ptr"] value=object ref=False if typshort in ref_types: try: if hasattr(object, "isNull") and object.isNull(): value="ERROR: "+self.getType(object)+" object is null" elif hasattr(object, "isAvailable") and not object.isAvailable(): value="ERROR: "+self.getType(object)+" object is not available" else: value=object.get() if isinstance(value, type(None)): value="ERROR: Could not get "+self.getType(object) else: ref=True except Exception as message: value="ERROR: "+str(message) return value,ref def getObjectContent(self,object): """ get string value of a method """ if not callable(object): return object else: typ="" if not object.__doc__ or str(object.__doc__)=="": return "ERROR: Empty __doc__ string" docs=str(object.__doc__).split("\n") for doc in docs: parameters=[] for p in doc[doc.find("(")+1:doc.find(")")].split(","): if p!="" and not "=" in p: parameters+=[p] if len(parameters)!=0: continue typestring=doc[:doc.find("(")] split_typestring=typestring.split(" ") templates=0 end_typestring=0 for i in reversed(range(len(split_typestring))): templates+=split_typestring[i].count("<") templates-=split_typestring[i].count(">") if templates==0: end_typestring=i break typ=" ".join(split_typestring[:end_typestring]) hidden_types=["iterator","Iterator"] root_types=["ROOT::"] if typ=="" or "void" in typ or True in [t in typ for t in hidden_types]: return None from ROOT import TClass if True in [t in typ for t in root_types] and TClass.GetClass(typ)==None: return "ERROR: Cannot display object of type "+typ try: object=object() value=object except Exception as message: value="ERROR: "+str(message) if "Buffer" in str(type(value)): return "ERROR: Cannot display object of type "+typ else: return value def isVectorObject(self,object): typ=self.getShortType(object) return typ=="list" or typ[-6:].lower()=="vector" or typ[-3:].lower()=="map" or typ[-10:].lower()=="collection" or hasattr(object,"size") def compareObjects(self,a,b): same=False if hasattr(a,"px") and hasattr(a,"py") and hasattr(a,"pz") and hasattr(a,"energy") and \ hasattr(b,"px") and hasattr(b,"py") and hasattr(b,"pz") and hasattr(b,"energy"): same=a.px()==b.px() and a.py()==b.py() and a.pz()==b.pz() and a.energy()==b.energy() return same def getDaughterObjects(self,object): """ get list of daughter objects from properties """ objects=[] # subobjects objectdict={} hidden_attr=["front","back","IsA","clone","masterClone","masterClonePtr","mother","motherRef","motherPtr","daughter","daughterRef","daughterPtr","is_back_safe"] broken_attr=[]#["jtaRef"] for attr1,property1 in self.getObjectProperties(object): if attr1 in hidden_attr: pass elif attr1 in broken_attr: objectdict[attr1]=("ERROR: Cannot read property",False) else: (value,ref)=self.getObjectRef(self.getObjectContent(property1)) if not isinstance(value,type(None)) and (not self.isVectorObject(object) or self._propertyType(value)!=None): objectdict[attr1]=(value,ref) for name in sorted(objectdict.keys()): objects+=[(name,objectdict[name][0],objectdict[name][1],self._propertyType(objectdict[name][0]))] # entries in vector if self.isVectorObject(object): n=0 for o in all(object): (value,ref)=self.getObjectRef(o) typ=self._propertyType(value) if typ!=None: name="["+str(n)+"]" elif "GenParticle" in str(value): name=defaultParticleDataList.getNameFromId(value.pdgId()) else: name=self.getType(value)+" ["+str(n)+"]" objects+=[(name,value,ref,typ)] n+=1 # read candidate relations for name,mother,ref,propertyType in objects: if hasattr(mother,"numberOfDaughters") and hasattr(mother,"daughter"): try: for n in range(mother.numberOfDaughters()): daughter=mother.daughter(n) found=False for na,da,re,st in objects: if self.compareObjects(daughter,da): daughter=da found=True if not id(mother) in self._edmDaughterRelations.keys(): self._edmDaughterRelations[id(mother)]=[] self._edmDaughterRelations[id(mother)]+=[daughter] if not id(daughter) in self._edmMotherRelations.keys(): self._edmMotherRelations[id(daughter)]=[] self._edmMotherRelations[id(daughter)]+=[mother] except Exception as message: logging.error("Cannot read candidate relations: "+str(message)) return objects def _propertyType(self,value): if type(value) in (bool,): return "Boolean" elif type(value) in (int, long): return "Integer" elif type(value) in (float,): return "Double" elif type(value) in (complex,str,unicode): return "String" else: return None def properties(self,object): """ Make list of all properties """ logging.debug(__name__ + ": properties: "+self.label(object)) properties=[] objectproperties={} objectproperties_sorted=[] if id(object) in self._edmChildrenObjects.keys(): for name,value,ref,propertyType in self._edmChildrenObjects[id(object)]: if propertyType!=None: objectproperties[name]=(value,propertyType) objectproperties_sorted+=[name] properties+=[("Category","Object info","")] shortlabel=self.getShortLabel(object) properties+=[("String","label",shortlabel)] properties+=[("String","type",self.getType(object))] objectlabel=self.getObjectLabel(object) if objectlabel!="": properties+=[("String","object",objectlabel)] branchlabel=self.label(self.getBranch(object)) if shortlabel.strip(".")!=branchlabel.strip("."): properties+=[("String","branch",branchlabel)] else: properties+=[("Category","Branch info","")] properties+=[("String","Type",branchlabel.split("_")[0])] properties+=[("String","Label",branchlabel.split("_")[1])] properties+=[("String","Product",branchlabel.split("_")[2])] properties+=[("String","Process",branchlabel.split("_")[3])] for property in ["pdgId","charge","status"]: if property in objectproperties.keys(): properties+=[(objectproperties[property][1],property,objectproperties[property][0])] del objectproperties[property] if "px" in objectproperties.keys(): properties+=[("Category","Vector","")] for property in ["energy","px","py","pz","mass","pt","eta","phi","p","theta","y","rapidity","et","mt","mtSqr","massSqr"]: if property in objectproperties.keys(): properties+=[(objectproperties[property][1],property,objectproperties[property][0])] del objectproperties[property] if "x" in objectproperties.keys(): properties+=[("Category","Vector","")] for property in ["x","y","z"]: if property in objectproperties.keys(): properties+=[(objectproperties[property][1],property,objectproperties[property][0])] del objectproperties[property] if False in [str(value[0]).startswith("ERROR") for value in objectproperties.values()]: properties+=[("Category","Values","")] for property in objectproperties_sorted: if property in objectproperties.keys(): if not str(objectproperties[property][0]).startswith("ERROR"): properties+=[(objectproperties[property][1],property,objectproperties[property][0])] del objectproperties[property] if len(objectproperties.keys())>0: properties+=[("Category","Errors","")] for property in objectproperties_sorted: if property in objectproperties.keys(): properties+=[(objectproperties[property][1],property,objectproperties[property][0])] return tuple(properties) def readObjectsRecursive(self,mother,label,edmobject,levels=1): """ read edm objects recursive """ logging.debug(__name__ + ": readObjectsRecursive (levels="+str(levels)+"): "+label) # save object information if not id(edmobject) in self._edmLabel.keys(): if not isinstance(edmobject,(int,float,long,complex,str,unicode,bool)): # override comparison operator of object try: type(edmobject).__eq__=eq type(edmobject).__ne__=ne except: pass self._edmLabel[id(edmobject)]=label self._edmParent[id(edmobject)]=mother self._edmChildren[id(edmobject)]=[] if not id(mother) in self._edmChildren.keys(): self._edmChildren[id(mother)]=[] self._edmChildren[id(mother)]+=[edmobject] if levels==0: # do not read more daughters return [edmobject],True else: # read daughters return self.readDaughtersRecursive(edmobject,[edmobject],levels) def readDaughtersRecursive(self,edmobject,objects,levels=1): """ read daughter objects of an edmobject """ logging.debug(__name__ + ": readDaughtersRecursive (levels="+str(levels)+"): "+str(edmobject)) # read children information if not id(edmobject) in self._edmChildrenObjects.keys(): self._edmChildrenObjects[id(edmobject)]=self.getDaughterObjects(edmobject) # analyze children information ok=True daughters=self._edmChildrenObjects[id(edmobject)] i=0 for name,daughter,ref,propertyType in daughters: # create children objects if propertyType==None: if ref: label="* "+name else: label=name if id(edmobject) in self._edmLabel.keys() and self._edmLabel[id(edmobject)]!="": label=self._edmLabel[id(edmobject)]+"."+label (res,ok)=self.readObjectsRecursive(edmobject,label,daughter,levels-1) objects+=res i+=1 if i>self.maxDaughters: logging.warning("Did not read all daughter objects. Maximum is set to "+str(self.maxDaughters)+".") return objects,False return objects,ok def read(self,object,levels=1): """ reads contents of a branch """ logging.debug(__name__ + ": read") if isinstance(object,BranchDummy): if hasattr(object,"product"): return object.product if not self._events: return object try: self._events.getByLabel(object.branchtuple[2],object.branchtuple[3],object.branchtuple[4],object.branchtuple[1]) if object.branchtuple[1].isValid(): product=object.branchtuple[1].product() if not isinstance(product,(int,float,long,complex,str,unicode,bool)): # override comparison operator of object try: type(product).__eq__=eq type(product).__ne__=ne except: pass self._dataObjects.insert(self._dataObjects.index(object),product) self._dataObjects.remove(object) self._edmLabel[id(product)]=object.branchtuple[0] object.product=product object=product else: self._edmChildrenObjects[id(object)]=[("ERROR","ERROR: Branch is not valid.",False,True)] logging.info("Branch is not valid: "+object.branchtuple[0]+".") object.invalid=True return object except Exception as e: self._edmChildrenObjects[id(object)]=[("ERROR","ERROR: Unable to read branch : "+str(e),False,True)] object.unreadable=True logging.warning("Unable to read branch "+object.branchtuple[0]+" : "+exception_traceback()) return object if self.isRead(object,levels): return object if levels>0: self.readDaughtersRecursive(object,[],levels) return object def goto(self, index): """ Goto event number index in file. """ self._eventIndex=index-1 self._edmLabel={} self._edmChildren={} self._edmMotherRelations={} self._edmDaughterRelations={} self._edmChildrenObjects={} if self._events: self._events.to(self._eventIndex) self._dataObjects=[] i=0 for branchtuple in self._filteredBranches: branch=BranchDummy(branchtuple) self._dataObjects+=[branch] self._edmLabel[id(branch)]=branchtuple[0] if not self._readOnDemand: self.read(branch,self.maxLevels) i+=1 if self._filterBranches and self._events: self.setFilterBranches(True) return True def eventNumber(self): return self._eventIndex+1 def numberOfEvents(self): return self._numEvents def topLevelObjects(self): return self._dataObjects def open(self, filename=None): """ Open edm file and show first event """ self._filename=filename self._branches=[] if os.path.splitext(filename)[1].lower()==".txt": file = open(filename) for line in file.readlines(): if "\"" in line: linecontent=[l.strip(" \n").rstrip(".") for l in line.split("\"")] self._branches+=[(linecontent[0]+"_"+linecontent[1]+"_"+linecontent[3]+"_"+linecontent[5],None,linecontent[1],linecontent[3],linecontent[5])] else: linecontent=line.strip("\n").split(" ")[0].split("_") if len(linecontent)>3: self._branches+=[(linecontent[0]+"_"+linecontent[1]+"_"+linecontent[2]+"_"+linecontent[3],None,linecontent[1],linecontent[2],linecontent[3])] elif os.path.splitext(filename)[1].lower()==".root": from DataFormats.FWLite import Events, Handle self._events = Events(self._filename) self._numEvents=self._events.size() branches=self._events.object().getBranchDescriptions() for branch in branches: try: branchname=branch.friendlyClassName()+"_"+branch.moduleLabel()+"_"+branch.productInstanceName()+"_"+branch.processName() handle=Handle(branch.fullClassName()) self._branches+=[(branchname,handle,branch.moduleLabel(),branch.productInstanceName(),branch.processName())] except Exception as e: logging.warning("Cannot read branch "+branchname+":"+str(e)) self._branches.sort(lambda x, y: cmp(x[0], y[0])) self._filteredBranches=self._branches[:] return self.goto(1) def particleId(self, object): charge=self.property(object,"pdgId") if charge==None: charge=0 return charge def isQuark(self, object): particleId = self.particleId(object) if not particleId: return False return defaultParticleDataList.isQuarkId(particleId) def isLepton(self, object): particleId = self.particleId(object) if not particleId: return False return defaultParticleDataList.isLeptonId(particleId) def isGluon(self, object): particleId = self.particleId(object) if not particleId: return False return defaultParticleDataList.isGluonId(particleId) def isBoson(self, object): particleId = self.particleId(object) if not particleId: return False return defaultParticleDataList.isBosonId(particleId) def isPhoton(self, object): particleId = self.particleId(object) if not particleId: return False if not hasattr(defaultParticleDataList,"isPhotonId"): return False return defaultParticleDataList.isPhotonId(particleId) def isHiggs(self, object): particleId = self.particleId(object) if not particleId: return False if not hasattr(defaultParticleDataList,"isHiggsId"): return False return defaultParticleDataList.isHiggsId(particleId) def lineStyle(self, object): particleId = self.particleId(object) if hasattr(defaultParticleDataList,"isPhotonId") and defaultParticleDataList.isPhotonId(particleId): return self.LINE_STYLE_WAVE elif defaultParticleDataList.isGluonId(particleId): return self.LINE_STYLE_SPIRAL elif defaultParticleDataList.isBosonId(particleId): return self.LINE_STYLE_DASH return self.LINE_STYLE_SOLID def color(self, object): particleId = self.particleId(object) if defaultParticleDataList.isLeptonId(particleId): return QColor(244, 164, 96) elif defaultParticleDataList.isQuarkId(particleId): return QColor(0, 100, 0) elif hasattr(defaultParticleDataList,"isHiggsId") and defaultParticleDataList.isHiggsId(particleId): return QColor(247, 77, 251) elif defaultParticleDataList.isBosonId(particleId): return QColor(253, 74, 74) return QColor(176, 179, 177) def charge(self, object): charge=self.property(object,"charge") if charge==None: charge=0 return charge def linkMother(self, object, mother): pass def linkDaughter(self, object, daughter): pass def underscoreProperties(self): return self._underscore def setUnderscoreProperties(self,check): self._underscore=check def filterBranches(self): return self._filterBranches def setFilterBranches(self,check): if not self._events: return True self._filterBranches=check if check: for branch in self._dataObjects[:]: result=self.read(branch,0) if isinstance(result,BranchDummy): self._dataObjects.remove(result) if hasattr(result,"invalid"): self._filteredBranches.remove(result.branchtuple) return True else: self._filteredBranches=self._branches[:] self.goto(self.eventNumber()) return False def filteredBranches(self): return self._filteredBranches
# make one output file per number of vertices # maps from number of vertices to output file fout = {} # maps from number of vertices to output tuple output_tuple = {} for num_vertices in range(max_num_vertices+1): fout[num_vertices] = ROOT.TFile(output_data_dir + "/all-sizes-%dvtx.root" % num_vertices, "RECREATE") import time total_num_events = events.size() start_time = time.time() max_num_vertices_seen = 0 # loop over events for event in events: event.getByLabel (("fedSizeData"), handle) # get the product fedsizedata = handle.product() num_vertices = fedsizedata.getNumPrimaryVertices() max_num_vertices_seen = max(num_vertices, max_num_vertices_seen)
class Looper(object): '''Creates a set of analyzers, and schedules the event processing.''' def __init__( self, name, cfg_comp, sequence, nEvents=None, firstEvent=0, nPrint=0): '''Handles the processing of an event sample. An Analyzer is built for each Config.Analyzer present in sequence. The Looper can now be used to process an event, or a collection of events in the sample. name : name of the Looper, will be used as the output directory name cfg_comp: information for the input sample, see Config sequence: an ordered list of Config.Analyzer nPrint : number of events to print at the beginning ''' self.name = self._prepareOutput(name) self.outDir = self.name self.logger = logging.getLogger( self.name ) self.logger.addHandler(logging.FileHandler('/'.join([self.name, 'log.txt']))) self.logger.addHandler( logging.StreamHandler(sys.stdout) ) self.cfg_comp = cfg_comp self.classes = {} #TODO: should be a diclist? self.analyzers = map( self._buildAnalyzer, sequence ) self.nEvents = nEvents self.firstEvent = firstEvent self.nPrint = int(nPrint) # initialize FWLite chain on input file: # import pdb ; pdb.set_trace() try: self.events = Events( self.cfg_comp.files ) except RuntimeError: #import pdb ; pdb.set_trace() print 'cannot find any file matching pattern', self.cfg_comp.files raise def _prepareOutput(self, name): index = 0 tmpname = name while True: try: # print 'mkdir', self.name os.mkdir( tmpname ) break except OSError: index += 1 tmpname = '%s_%d' % (name, index) return tmpname def _buildAnalyzer(self, cfg_ana): obj = None className = cfg_ana.name.split('_')[0] theClass = None try: # obviously, can't load a module twice # so keep track of the needed classes, instead several instances are built theClass = self.classes[className] print 'found class', theClass obj = theClass( cfg_ana, self.cfg_comp, self.outDir ) except KeyError: file = None try: file, path, desc = imp.find_module( className ) mod = imp.load_module( className , file, path, desc ) # getting the analyzer class object theClass = mod.__dict__[ className ] self.classes[className] = theClass # creating an analyzer #if hasattr( cfg_ana, 'instanceName'): # cfg_ana.name = cfg_ana.instanceName print 'loading class', theClass print ' from', file obj = theClass( cfg_ana, self.cfg_comp, self.outDir ) finally: try: file.close() except AttributeError: print 'problem loading module', cfg_ana.name print 'please make sure that the module name is correct.' print 'if it is, is this module in your path, as defined below?' pprint.pprint( sorted( sys.path )) return obj def loop(self): '''Loop on a given number of events. At the beginning of the loop, Analyzer.beginLoop is called for each Analyzer. At each event, self.process is called. At the end of the loop, Analyzer.endLoop is called.''' nEvents = self.nEvents firstEvent = self.firstEvent iEv = firstEvent if nEvents is None or int (nEvents) > int (self.events.size()) : nEvents = self.events.size() else: nEvents = int(nEvents) eventSize = nEvents self.logger.warning('starting loop at event {firstEvent} to process {eventSize} events.'.format(firstEvent=firstEvent, eventSize=eventSize)) self.logger.warning( str( self.cfg_comp ) ) for analyzer in self.analyzers: analyzer.beginLoop() try: for iEv in range(firstEvent, firstEvent+eventSize): # if iEv == nEvents: # break if iEv%100 ==0: print 'event', iEv self.process( iEv ) if iEv<self.nPrint: print self.event except UserWarning: print 'Stopped loop following a UserWarning exception' for analyzer in self.analyzers: analyzer.endLoop() self.logger.warning('') self.logger.warning( self.cfg_comp ) self.logger.warning('') self.logger.warning( 'number of events processed: {nEv}'.format(nEv=iEv+1) ) def process(self, iEv ): '''Run event processing for all analyzers in the sequence. This function is called by self.loop, but can also be called directly from the python interpreter, to jump to a given event. TODO: add an example for event investigation. ''' self.event = Event( iEv ) self.iEvent = iEv self.events.to(iEv) for analyzer in self.analyzers: if not analyzer.beginLoopCalled: analyzer.beginLoop() if analyzer.process( self.events, self.event ) == False: return (False, analyzer.name) return (True, analyzer.name) def write(self): '''Writes all analyzers. See Analyzer.Write for more information.''' for analyzer in self.analyzers: analyzer.write() pass
class Loop: '''Manages looping and navigation on a set of events.''' def __init__(self, name, component, cfg): '''Build a loop object. listOfFiles can be "*.root". name will be used to make the output directory''' self.name = name self.cmp = component self.cfg = cfg self.events = Events( glob.glob( self.cmp.files) ) self.triggerList = TriggerList( self.cmp.triggers ) if self.cmp.isMC: self.trigEff = TriggerEfficiency() self.trigEff.tauEff = None self.trigEff.lepEff = None if self.cmp.tauEffWeight is not None: self.trigEff.tauEff = getattr( self.trigEff, self.cmp.tauEffWeight ) if self.cmp.muEffWeight is not None: self.trigEff.lepEff = getattr( self.trigEff, self.cmp.muEffWeight ) #self.cmp.turnOnCurve = None #if self.cmp.isMC: # if self.cmp.tauTriggerTOC is not None: # self.cmp.turnOnCurve = TurnOnCurve( self.cmp.tauTriggerTOC ) self._MakeOutputDir() self.counters = Counters() self.averages = {} # self.histograms = [] self.InitHandles() def _MakeOutputDir(self): index = 0 name = self.name while True: try: # print 'mkdir', self.name os.mkdir( name ) break except OSError: index += 1 name = '%s_%d' % (self.name, index) self.logger = logging.getLogger(self.name) self.logger.addHandler(logging.FileHandler('/'.join([self.name, 'log.txt']))) def LoadCollections(self, event ): '''Load all collections''' for str,handle in self.handles.iteritems(): handle.Load( event ) # could do something clever to get the products... a setattr maybe? def InitHandles(self): '''Initialize all handles for the products we want to read''' self.handles = {} self.handles['cmgTauMuCorFullSelSVFit'] = AutoHandle( 'cmgTauMuCorSVFitFullSel', 'std::vector<cmg::DiObject<cmg::Tau,cmg::Muon>>') ## self.handles['cmgTauMu'] = AutoHandle( 'cmgTauMu', ## 'std::vector<cmg::DiObject<cmg::Tau,cmg::Muon>>') self.handles['cmgTriggerObjectSel'] = AutoHandle( 'cmgTriggerObjectSel', 'std::vector<cmg::TriggerObject>>') if self.cmp.isMC and self.cmp.vertexWeight is not None: self.handles['vertexWeight'] = AutoHandle( self.cmp.vertexWeight, 'double' ) self.handles['vertices'] = AutoHandle( 'offlinePrimaryVertices', 'std::vector<reco::Vertex>' ) self.handles['leptons'] = AutoHandle( 'cmgMuonSel', 'std::vector<cmg::Muon>' ) self.handles['jets'] = AutoHandle( 'cmgPFJetSel', 'std::vector<cmg::PFJet>' ) def InitOutput(self): '''Initialize histograms physics objects, counters.''' #COLIN do I really need to declare them? # declaring physics objects self.diTau = None self.triggerObject = None # declaring counters and averages self.counters = Counters() self.counters.addCounter('triggerPassed') self.counters.addCounter('exactlyOneDiTau') self.counters.addCounter('singleDiTau') self.counters.addCounter('VBF') # self.averages['triggerWeight']=Average('triggerWeight') self.averages['lepEffWeight']=Average('lepEffWeight') self.averages['tauEffWeight']=Average('tauEffWeight') self.averages['vertexWeight']=Average('vertexWeight') self.averages['eventWeight']=Average('eventWeight') self.regions = H2TauTauRegions( self.cfg.cuts ) self.histoLists = {} inclusiveRegions = set() for regionName in self.regions.regionNames(): self.histoLists[ regionName ] = H2TauTauHistogramList( '/'.join([self.name, regionName])) incRegName = inclusiveRegionName( regionName ) inclusiveRegions.add( incRegName ) for regionName in inclusiveRegions: self.histoLists[ regionName ] = H2TauTauHistogramList( '/'.join([self.name, regionName ])) def ToEvent( self, iEv ): '''Navigate to a given event and process it.''' # output event structure self.event = Event() # navigating to the correct FWLite event self.iEvent = iEv self.events.to(iEv) self.LoadCollections(self.events) # reading CMG objects from the handle #COLIN this kind of stuff could be automatized cmgDiTaus = self.handles['cmgTauMuCorFullSelSVFit'].product() cmgLeptons = self.handles['leptons'].product() self.event.triggerObject = self.handles['cmgTriggerObjectSel'].product()[0] self.event.vertices = self.handles['vertices'].product() cmgJets = self.handles['jets'].product() # converting them into my own python objects self.event.diTaus = [ DiTau(diTau) for diTau in cmgDiTaus ] self.event.leptons = [ Lepton(lepton) for lepton in cmgLeptons ] self.event.jets = [ Jet(jet) for jet in cmgJets if testJet(jet, self.cfg.cuts) ] self.event.jets = [] for cmgJet in cmgJets: if not testJet( jet, self.cfg.cuts): continue jet = Jet( cmgJet ) # print jet.energy() jet.scaleEnergy( 1 ) # print jet.energy() self.event.jets.append(jet) self.counters.counter('triggerPassed').inc('a: All events') if not self.triggerList.triggerPassed(self.event.triggerObject): return False self.counters.counter('triggerPassed').inc('b: Trig OK ') self.counters.counter('exactlyOneDiTau').inc('a: any # of di-taus ') if len(self.event.diTaus)==0: print 'Event %d : No tau mu.' % i return False if len(self.event.diTaus)>1: # print 'Event %d : Too many tau-mus: n = %d' % (iEv, len(self.event.diTaus)) #COLIN could be nice to have a counter class # which knows why events are rejected. make histograms with that. self.logger.warning('Ev %d: more than 1 di-tau : n = %d' % (iEv, len(self.event.diTaus))) self.counters.counter('exactlyOneDiTau').inc('b: at least 1 di-tau ') if not leptonAccept(self.event.leptons): return False self.counters.counter('exactlyOneDiTau').inc('c: exactly one lepton ') self.event.diTau = self.event.diTaus[0] if len(self.event.diTaus)>1: self.event.diTau = bestDiTau( self.event.diTaus ) elif len(self.event.diTaus)==1: self.counters.counter('exactlyOneDiTau').inc('d: exactly 1 di-tau ') else: raise ValueError('should not happen!') cuts = self.cfg.cuts self.counters.counter('singleDiTau').inc('a: best di-tau') self.event.tau = Tau( self.event.diTau.leg1() ) if self.event.tau.calcEOverP() > 0.2: self.counters.counter('singleDiTau').inc('b: E/p > 0.2 ') else: return False if self.event.tau.pt()>cuts.tauPt: self.counters.counter('singleDiTau').inc('c: tau pt > {ptCut:3.1f}'.format(ptCut = cuts.tauPt)) else: return False self.event.lepton = Lepton( self.event.diTau.leg2() ) if self.event.lepton.pt()>cuts.lepPt: self.counters.counter('singleDiTau').inc('d: lep pt > {ptCut:3.1f}'.format(ptCut = cuts.lepPt)) else: return False if abs( self.event.lepton.eta() ) < cuts.lepEta: self.counters.counter('singleDiTau').inc('e: lep |eta| <{etaCut:3.1f}'.format(etaCut = cuts.lepEta)) else: return False self.counters.counter('VBF').inc('a: all events ') if len(self.event.jets)>1: self.counters.counter('VBF').inc('b: at least 2 jets ') self.event.vbf = VBF( self.event.jets ) if self.event.vbf.mjj > cuts.VBF_Mjj: self.counters.counter('VBF').inc('c: Mjj > {mjj:3.1f}'.format(mjj = cuts.VBF_Mjj)) if abs(self.event.vbf.deta) > cuts.VBF_Deta: self.counters.counter('VBF').inc('d: deta > {deta:3.1f}'.format(deta = cuts.VBF_Deta)) if len(self.event.vbf.centralJets)==0: self.counters.counter('VBF').inc('e: no central jet ') # print self.event.vbf self.event.eventWeight = 1 # self.event.triggerWeight = 1 self.event.vertexWeight = 1 self.event.tauEffWeight = 1 self.event.lepEffWeight = 1 if self.cmp.isMC: self.event.vertexWeight = self.handles['vertexWeight'].product()[0] self.event.eventWeight *= self.event.vertexWeight if self.trigEff.tauEff is not None: self.event.tauEffWeight = self.trigEff.tauEff(self.event.tau.pt()) if self.trigEff.lepEff is not None: self.event.lepEffWeight = self.trigEff.lepEff( self.event.lepton.pt(), self.event.lepton.eta() ) self.event.eventWeight = self.event.vertexWeight * \ self.event.tauEffWeight * \ self.event.lepEffWeight # if self.cmp.turnOnCurve is not None: # self.event.triggerWeight = self.cmp.turnOnCurve.weight( # self.event.tau.pt() ) # self.event.eventWeight *= self.event.triggerWeight # self.averages['triggerWeight'].add( self.event.triggerWeight ) self.averages['tauEffWeight'].add( self.event.tauEffWeight ) self.averages['lepEffWeight'].add( self.event.lepEffWeight ) self.averages['vertexWeight'].add( self.event.vertexWeight ) self.averages['eventWeight'].add( self.event.eventWeight ) # exclusive analysis regionName = self.regions.test( self.event ) histoList = self.histoLists[regionName] histoList.Fill( self.event, self.event.eventWeight ) # inclusive analysis incRegionName = inclusiveRegionName( regionName ) histoList = self.histoLists[incRegionName] histoList.Fill( self.event, self.event.eventWeight ) return True def Loop(self, nEvents=-1 ): '''Loop on a given number of events, and call ToEvent for each event.''' print 'starting loop' self.InitOutput() nEvents = int(nEvents) for iEv in range(0, self.events.size() ): if iEv == nEvents: break if iEv%1000 ==0: print 'event', iEv try: self.ToEvent( iEv ) except ValueError: #COLIN should not be a value error break self.logger.warning( str(self) ) def Write(self): '''Write all histograms to their root files''' # for hist in self.histograms: # hist.Write() for histoList in self.histoLists.values(): histoList.Write() def __str__(self): name = 'Loop %s' % self.name component = str(self.cmp) counters = map(str, self.counters.counters) strave = map(str, self.averages.values()) # triggers = ': '.join( ['triggers', str(self.triggers)] ) # trigs = str( self.triggerList ) # vertexWeight = ': '.join( ['vertex weight', str(self.cmp.vertexWeight) ]) return '\n'.join([name, component] + counters + strave )
e = stpol.stable.event sigmu = stpol.stable.tchan.muon sigele = stpol.stable.tchan.electron if channel == "mu": siglepton = sigmu elif channel == "ele": siglepton = sigele bjet = stpol.stable.tchan.bjet ljet = stpol.stable.tchan.specjet1 top = stpol.stable.tchan.top ffile = stpol.stable.file weight = stpol.stable.weights total_events = events.size() print "Total ev:", total_events if total_events < 1: sys.exit(0) """total_events = 0 for f in infile_list: print strftime("%Y-%m-%d %H:%M:%S", gmtime()) print f, ffile.total_processed(f) total_events += ffile.total_processed(f) strftime("%Y-%m-%d %H:%M:%S", gmtime()) """ outfile = TFile(outfilename, "RECREATE") # mydir = ROOT.gDirectory.GetDirectory(out"/home/andres/single_top/stpol/src/qcd_ntuples/histos/mu/histos_cut_T_tW_iso_0.root:/") # print "dir0 ", mydir, ROOT.gDirectory.pwd() # print mydir
sys.exit(2) ###################################################### # Handles and labels for recovering RECO variables ###################################################### halosummaryH = Handle('reco::BeamHaloSummary') halosummaryL = ('BeamHaloSummary','','RECO') cschalodataH = Handle('reco::CSCHaloData') cschalodataL = ('CSCHaloData','','RECO') ###################################################### # Loop over events ###################################################### events = Events(inname) nEvents = events.size() for event in events: # Search for the input event eventAux = event.eventAuxiliary() id = eventAux.id() run = id.run() lumi = id.luminosityBlock() eventnum = id.event() if not (event_run == run and event_lb == lumi and event_id == eventnum): continue # Correct event: print out CSCHaloData event.getByLabel(halosummaryL, halosummaryH)
def main(): inputfilename = [ # '/uscms_data/d3/wsi/lpcdm/CMSSW_10_2_14/src/Firefighter/ffConfig/crabGarage/190825/results/pickevents_merged_ABC.root', '/uscms_data/d3/wsi/lpcdm/CMSSW_10_2_14/src/Firefighter/ffConfig/crabGarage/190825/results/pickevents_merged_D.root', ] # inputfilename = [ # '/uscms_data/d3/wsi/lpcdm/CMSSW_10_2_14/src/Firefighter/ffConfig/crabGarage/190825/fromCitron/Run2018A.root', # '/uscms_data/d3/wsi/lpcdm/CMSSW_10_2_14/src/Firefighter/ffConfig/crabGarage/190825/fromCitron/Run2018B.root', # '/uscms_data/d3/wsi/lpcdm/CMSSW_10_2_14/src/Firefighter/ffConfig/crabGarage/190825/fromCitron/Run2018D.root', # ] # inputfilename = [ # 'root://cmsxrootd.fnal.gov///store/data/Run2018A/Cosmics/AOD/06Jun2018-v1/80000/FEBEAF7F-FD71-E811-86DA-782BCB3BCA77.root' # ] debug = True if len(sys.argv) > 1: inputfilename = sys.argv[1] # assert os.path.isfile(inputfilename) lableHandleMap = { 'recomuons': (("muons", "", "RECO"), Handle("vector<reco::Muon>")), 'dsamuons': (("displacedStandAloneMuons", "", "RECO"), Handle("vector<reco::Track>")), 'samuons': (("standAloneMuons", "", "RECO"), Handle("vector<reco::Track>")), 'cosmicmuons': (('cosmicMuons', '', "RECO"), Handle("vector<reco::Track>")) } cosmicevents = [ 171885587, 588925175, 638569038, 1194219475, 1843404323, 767319972, 578254197 ] cosmicevents.extend([ 99710070, 131158592, 310216566, 12032414, 1382227313, 499511766, 499511766, 599103865, 988893070, 1645464771, 200924923 ]) events = Events(inputfilename) print("InputFile:", inputfilename) print("num_events:", events.size()) histmap = {} histmap['linear'] = ROOT.TH1F( 'linearity', ';\pi-\Delta\Phi_{0,1}+|\eta_{0}+\eta_{1}|;counts', 50, 0, 1) for i, event in enumerate(events): if debug and i > 100: break if not debug and i % 10000 == 0: print(i) _run = event.object().id().run() _lumi = event.object().luminosityBlock() _event = event.object().id().event() if _event not in cosmicevents: continue if debug: print(" {} : {} : {} ".format(_run, _lumi, _event).center(79, "*")) ## tasks check_dsamuons(event, lableHandleMap, histmap, debug=debug) if debug: print("_" * 79) c = ROOT.TCanvas('c', '', 500, 400) for h in histmap: histmap[h].Draw() c.SaveAs('{}_wsi.pdf'.format(h)) c.Clear()
'pfCandidates':("vector<reco::PFCandidate>", "particleFlow", "") } from categories import * inputFiles = options.inputFiles.split(',') #inputFiles = inputFiles[:1] if not small else inputFiles print "Running over files:",inputFiles print "plotDir:",options.plotDir handles={k:Handle(edmCollections[k][0]) for k in edmCollections.keys()} res={} events = Events(inputFiles) events.toBegin() products={} size=events.size() #if not small else 2000 missingCollections=[] for nev in range(size): if nev%1000==0:print nev,'/',size events.to(nev) eaux=events.eventAuxiliary() run=eaux.run() if options.run>0 and not run==options.run: # print run, options.run continue for k in [ x for x in edmCollections.keys() if x not in missingCollections]: try: events.getByLabel(edmCollections[k][1:],handles[k]) products[k]=handles[k].product()
#! /usr/bin/env python import ROOT from DataFormats.FWLite import Events, Handle import FWCore.ParameterSet.VarParsing as VarParsing options = VarParsing.VarParsing ('analysis') options.parseArguments() events = Events(options) print "In total there are %d events" % events.size() print "Trying an event loop" for i,event in enumerate(events): print "I am processing an event" if i > 10: break print "Done with the event loops"
100, 0, 100) profile1.GetXaxis().SetBinLabel(1, "Lepton+Lepton") profile1.GetXaxis().SetBinLabel(2, "Lepton+generalTracks") profile1.GetXaxis().SetBinLabel(3, "miniAOD's SecVtx") profile1.GetXaxis().SetBinLabel(4, "pfLepton+pfLepton") profile1.GetXaxis().SetBinLabel(5, "pfLepton+pfChargedHadron") profile1.GetXaxis().SetBinLabel(6, "pfChargedHadron+pfChargedHadron") profile2.GetXaxis().SetBinLabel(1, "Lepton+Lepton") profile2.GetXaxis().SetBinLabel(2, "Lepton+generalTracks") profile2.GetXaxis().SetBinLabel(3, "miniAOD's SecVtx") profile2.GetXaxis().SetBinLabel(4, "pfCandidates+pfCandidates") profile2.GetXaxis().SetBinLabel(5, "pfLepton+pfChargedHadron") profile2.GetXaxis().SetBinLabel(6, "pfChargedHadron+pfChargedHadron") print events.size() for i, event in enumerate(events): event.getByLabel(genParticleLabel, genParticleHandle) event.getByLabel(V1Label, V1Handle) #event.getByLabel( V2Label, V2Handle) event.getByLabel(V3Label, V3Handle) event.getByLabel(V40Label, V40Handle) event.getByLabel(V41Label, V41Handle) #event.getByLabel( V42Label, V42Handle) jpsi_idx = 0 genJpsis = [] for gen in genParticleHandle.product(): if (abs(gen.pdgId()) == 443 and abs(gen.eta()) < 2.4 and gen.numberOfDaughters() == 2 and (abs(gen.daughter(0).pdgId()) == 13 or abs(gen.daughter(0).pdgId()) == 11)):
#'pfRecHitsHBHE':{ 'label':("particleFlowRecHitHBHE"), 'type':"vector<reco::PFRecHit>"}, #'caloRecHits': { 'label':("reducedHcalRecHits"), 'type':'edm::SortedCollection<HBHERecHit,edm::StrictWeakOrdering<HBHERecHit> >'}, # 'clusterHCAL': { 'label': "particleFlowClusterHCAL", "type":"vector<reco::PFCluster>"}, # 'pf': { 'label':('particleFlow'), 'type':'vector<reco::PFCandidate>'}, #'ecalBadCalibFilter':{'label':( "ecalBadCalibFilter", "", "USER"), 'type':'bool'} } r = s2.fwliteReader(products = edmCollections) r.start() runs = set() # add handles for k, v in edmCollections.iteritems(): v['handle'] = Handle(v['type']) nevents = 1 if small else events.size() histo = ROOT.TH1F("histo","Stops Transverse decay length (13 TeV);Lxy[cm];number of events",10,0.04,0.05) histol = ROOT.TH1F("histol","Leptons Transverse decay length (13 TeV);Lxy[cm];number of events",50,0.0,0.2) histo.Sumw2() histol.Sumw2() canvas= ROOT.TCanvas("canvas", "Stops decay length ", 1000, 600) mothers = [] while r.run(): #for i in range(nevents): # events.to(i) runs.add(r.evt[0]) eaux = events.eventAuxiliary() print r.event.evt, r.event.lumi, r.event.run genparticles = r.event.genParticles secondaryVertices = r.event.inclusiveSecondaryVertices primaryVertices = r.event.offlinePrimaryVerticesWithBS
files.append(dirEos+dir+"00037C53-AAD1-E111-B1BE-003048D45F38.root"); elif nPU == 70 and nBx == 25: dir = "GJet_Pt40_doubleEMEnriched_TuneZ2star_14TeV-pythia6--UpgradePhase1Age0DES_DR61SLHCx_PU70bx25_DES17_61_V5-v1/" files.append(dirEos+dir+"0204D05E-F6EF-E211-B304-002354EF3BE1.root"); files.append(dirEos+dir+"047BA908-4AF0-E211-B9B4-002618943852.root"); files.append(dirEos+dir+"04C4DEEF-2FF0-E211-9C7A-00261894395C.root"); files.append(dirEos+dir+"04DB1901-3BF0-E211-9DCA-00248C55CC3C.root"); else: print "invalid file settings..."; sys.exit(); events = Events( files ); # loop over events count = 0 ntotal = events.size() print "Nevents = "+str(ntotal) print "Start looping" photonHandle = Handle( "vector<reco::Photon>" ); photonLabel = "photons"; calojetHandle = Handle( "vector<reco::CaloJet>" ); calojetLabel = "ak5CaloJets"; pfjetHandle = Handle( "vector<reco::PFJet>" ); pfjetLabel = "ak5PFJets"; genjetHandle = Handle( "vector<reco::GenJet>" ); genjetLabel = "ak5GenJets";
args = parser.parse_args() #this is a handy class we have made which manages all the handles and labels needed to access products #it gets the products on demand so its fine in this case to declare products you wont use products = [] add_product(products, "eles", "std::vector<pat::Electron>", "slimmedElectrons") add_product(products, "phos", "std::vector<pat::Photon>", "slimmedPhotons") add_product(products, "trigRes", "edm::TriggerResults", "TriggerResults::HLT") add_product(products, "gen", "std::vector<pat::PackedGenParticle>", "packedGenParticles") evtdata = EvtData(products, verbose=True) events = Events(args.in_filenames) nr_events = events.size() print("nrEvents: {}".format(nr_events)) """ now we can play around with the electrons interactively, for example to get the electrons of the 3rd event, do events.to(3) evtdata.get_handles(events) eles = evtdata.get("eles") a simply function get_eles() is defined above to simplify this so you can switch events easily eles = get_eles(evtdata,events,3) note, interactive investigation here is more to help you understand how electrons work in general or look at a specific event, it is not how you do a serious analysis
options.file2 = args[2] # Parse object name and label out of Charles format tName, objName, lName = options.label[0].split('^') label = lName.split(',') ROOT.gROOT.SetBatch() ROOT.gSystem.Load("libFWCoreFWLite.so") ROOT.gSystem.Load("libDataFormatsFWLite.so") ROOT.FWLiteEnabler.enable() chain1 = Events ([options.file1], forceEvent=True) chain2 = Events ([options.file2], forceEvent=True) if chain1.size() != chain1.size(): raise RuntimeError("Files have different #'s of events") numEvents = min(options.numEvents, chain1.size()) # Parameters to this script are the same regardless if the # product is double or vector<double> so have to try both productsCompared = 0 totalCount = 0 mismatches = 0 for handleName in typeMap[objName]: try: chain1.toBegin() chain2.toBegin() logging.info("Testing identity for handle=%s, label=%s" % (handleName, label)) # Use itertools to iterate over lists in || for ev1, ev2, count in itertools.izip(chain1, chain2, range(numEvents)):
(st - 1)] outputDict[fracname]['hTimeRes_MB%i_Wh%i_q5' % (st, wh)] = hTimeRes_MB_q5[wh + 2 + 5 * (st - 1)] outputDict[fracname]['hTimeRes_MB%i_Wh%i_q8' % (st, wh)] = hTimeRes_MB_q8[wh + 2 + 5 * (st - 1)] # loop over events count = 0 if (dumpToFile): f = open("EventDumpList_StdToBayes.log", "w+") for ev in events: if not count % 1000: print count, events.size() count = count + 1 ev.getByLabel(muoBayesLabel, muoBayesHandle) ev.getByLabel(muoStdLabel, muoStdHandle) ev.getByLabel(genLabel, genHandle) muon_bayes = muoBayesHandle.product().getContainer() muon_std = muoStdHandle.product().getContainer() if (dumpToFile): f.write("\nInspecting Event Number %i \n" % (ev.eventAuxiliary().id().event())) f.write( " Wh Se St | w1 w2 w3 w4 w5 w6 w7 w8 | tdc1 tdc2 tdc3 tdc4 tdc5 tdc6 tdc7 tdc8 | lat1 lat2 lat3 lat4 lat5 lat6 lat7 lat8 | Q phi phib phi_cmssw phib_cmssw bX Chi2 x tanPsi t0 \n" )
MtbbptcomparepostSB1e3.Sumw2() #---------------------------------------------------------------------------------------------------------------------# # loop over events #---------------------------------------------------------------------------------------------------------------------# count = 0 jobiter = 0 print "Start looping" #initialize the ttree variables tree_vars = {"bpt":array('d',[0.]),"bmass":array('d',[0.]),"btag":array('d',[0.]),"tpt":array('d',[0.]),"tmass":array('d',[0.]),"nsubjets":array('d',[0.]),"sjbtag":array('d',[0.])} Tree = Make_Trees(tree_vars) totevents = events.size() print str(totevents) + ' Events total' usegenweight = False if options.set == "QCDFLAT7000": usegenweight = True print "Using gen weight" for event in events: count = count + 1 weightSFb = 1.0 errorSFb = 0.0 #Uncomment for a low count test run #if count > 300000: #break
rhochargedpileup_, rhochargedpileupLabel = Handle( "double"), "fixedGridRhoFastjetCentralChargedPileUp" #in order to print out the progress def print_same_line(s): sys.stdout.write(s) # just print sys.stdout.flush() # needed for flush when using \x08 sys.stdout.write((b'\x08' * len(s)).decode()) # back n chars #time.sleep(0.2) # open file (you can use 'edmFileUtil -d /store/whatever.root' to get the physical file name) #events = Events("file:/eos/cms/store/relval/CMSSW_9_4_0_pre3/RelValTTbar_13/MINIAODSIM/PU25ns_94X_mc2017_realistic_PixFailScenario_Run305081_FIXED_HS_AVE50-v1/10000/02B605A1-86C2-E711-A445-4C79BA28012B.root") events = Events(options) nevents = int(events.size()) print "total events: ", events.size() outfile = ROOT.TFile("hists.root", "recreate") ROOT.TH1.SetDefaultSumw2() #Init histograms in npv,eta,pt _eta = n.arange(-5, 5, 0.2) eff_eta = [] #efficiency eff_eta.append( ROOT.TH1F("eff_eta_CHS", "pf+CHS", len(_eta) - 1, array('d', _eta))) eff_eta.append( ROOT.TH1F("eff_eta_PUPPI", "PUPPI", len(_eta) - 1, array('d', _eta))) prt_eta = [] #Purity
m23_energy = ROOT.TH1F("m23_energy","m23_energy",len(binning)-1,array('d',binning)) m0_multiplicity = ROOT.TH1F("m0_multiplicity","m0_multiplicity",len(binning)-1,array('d',binning)) m21_multiplicity = ROOT.TH1F("m21_multiplicity","m21_multiplicity",len(binning)-1,array('d',binning)) m23_multiplicity = ROOT.TH1F("m23_multiplicity","m23_multiplicity",len(binning)-1,array('d',binning)) from files import * #events = Events(RelValZMM_13_CMSSW_7_4_1_MCRUN2_74_V9_gensim_740pre7_v1_PhilFixRecHitFlag[:1]) #prefix="RelValZMM_13_CMSSW_7_4_1-MCRUN2_74_V9_gensim_740pre7-v1_PhilFixRecHitFlag" #events = Events(RelValZMM_13_CMSSW_7_4_1_PU25ns_MCRUN2_74_V9_gensim_740pre7_v1_PhilFixRecHitFlag) #prefix="RelValZMM_13_CMSSW_7_4_1_PU25ns_MCRUN2_74_V9_gensim_740pre7_v1_PhilFixRecHitFlag" events = Events(RelValZMM_13_CMSSW_7_4_1_PU50ns_MCRUN2_74_V8_gensim_740pre7_v1_PhilFixRecHitFlag) prefix="RelValZMM_13_CMSSW_7_4_1_PU50ns_MCRUN2_74_V8_gensim_740pre7_v1_PhilFixRecHitFlag" nevents = 10 if small else events.size() for i in range(nevents): events.to(i) if i%10==0:print "At event %i/%i"%(i,nevents) # eaux=events.eventAuxiliary() # run=eaux.run() # event=eaux.event() # lumi=eaux.luminosityBlock() pfClusters = getProd('pfClusters') caloRecHits = getProd('caloRecHits') for i_c, c in enumerate(pfClusters): hitsAndFractions = c.hitsAndFractions() recHitMultiplicity = len(hitsAndFractions) if verbose:print "\nCluster",i_c,"energy",c.energy(),"recHitMultiplicity",recHitMultiplicity clusterE=0.
import os eventsRef = Events("step3.root") tracksRef = Handle("std::vector<reco::Track>") label = "generalTracks" quality = "highPurity" #quality = "tight" #quality = "loose" mcMatchRef = Handle("std::vector<float>") nmu = 0 nmuOnly = 0 nmuNotConf = 0 for i in range(0, eventsRef.size()): #for i in range(0, 200): a = eventsRef.to(i) print "Event", i a = eventsRef.getByLabel(label, tracksRef) a = eventsRef.getByLabel("trackMCQuality", mcMatchRef) mcMatch = mcMatchRef.product() trVal = [] k = -1 for track in tracksRef.product(): k += 1 # if (track.phi()<0) : continue if (abs(track.eta()) > 2.3): continue if (track.pt() < 4): continue # if (track.quality(track.qualityByName(quality))) : trVal.append([
# Loop over events total = 0 has_pair = 0 has_pairx2 = 0 pass_emiso = 0 pass_neutraliso = 0 pass_chargediso = 0 pass_photonpt = 0 pass_full = 0 pass_fullx2 = 0 pass_matched = 0 pass_matchedx2 = 0 pass_matchedonly = 0 pass_matchedonlyx2 = 0 print datetime.datetime.now(), " Calculating total number of events..." print datetime.datetime.now(), " Total Events: " + str(events.size()) for event in events: total = total + 1 if (total % 500) == 0 or total == 1: print datetime.datetime.now(), " Processing event " + str(total) + "..." if total == MAX_EVENTS: break if not DUMP_EVENT_BY_NUM == (): if not event.object().id().event() in DUMP_EVENT_BY_NUM: continue # Get Event info event.getByLabel(ak4label, ak4handle) event.getByLabel(pfcandlabel, pfcandhandle) event.getByLabel(pvlabel, pvhandle) if RUNNING_ON_SIGNAL:
options.file2 = args[2] # Parse object name and label out of Charles format tName, objName, lName = options.label[0].split('^') label = lName.split(',') ROOT.gROOT.SetBatch() ROOT.gSystem.Load("libFWCoreFWLite.so") ROOT.gSystem.Load("libDataFormatsFWLite.so") ROOT.FWLiteEnabler.enable() chain1 = Events([options.file1], forceEvent=True) chain2 = Events([options.file2], forceEvent=True) if chain1.size() != chain1.size(): raise RuntimeError("Files have different #'s of events") numEvents = min(options.numEvents, chain1.size()) # Parameters to this script are the same regardless if the # product is double or vector<double> so have to try both productsCompared = 0 totalCount = 0 mismatches = 0 for handleName in typeMap[objName]: try: chain1.toBegin() chain2.toBegin() logging.info("Testing identity for handle=%s, label=%s" % (handleName, label)) # Use itertools to iterate over lists in ||