def main(): parser = argparse.ArgumentParser() parser.add_argument( 'inputfiles', metavar='N', type=str, nargs='+', help='List of input files to use') args = parser.parse_args() numofevts = 0 numoflumis = 0 numofruns = 0 # Using wc style output format print "{:>8s} {:>8s} {:>8s} {}".format("Events","Lumis", "Runs", "Files") fmtstr = "{:>8d} {:>8d} {:>8d} {}" for inputfile in args.inputfiles: ev = Events( inputfile ) lm = Lumis( inputfile ) rn = Runs( inputfile ) numofevts = numofevts + ev.size() numoflumis = numoflumis + lm._lumi.size() numofruns = numofruns + rn._run.size() print fmtstr.format( ev.size(), lm._lumi.size(), rn._run.size(), inputfile ) print fmtstr.format( numofevts, numoflumis, numofruns, "total({} files)".format(len(args.inputfiles)) )
def beginLoop(self): super(skimAnalyzerCount, self).beginLoop() self.counters.addCounter('SkimReport') self.count = self.counters.counter('SkimReport') self.count.register('All Events') if self.cfg_comp.isMC: self.count.register('Sum Weights') if not self.useLumiBlocks: print 'Will actually count events instead of accessing lumi blocks' return True print 'Counting the total events before the skim by accessing luminosity blocks' lumis = Lumis(self.cfg_comp.files) totalEvents = 0 for lumi in lumis: if lumi.getByLabel('prePathCounter', self.counterHandle): totalEvents += self.counterHandle.product().value else: self.useLumiBlocks = False break if self.useLumiBlocks: self.count.inc('All Events', totalEvents) if self.cfg_comp.isMC: self.count.inc('Sum Weights', totalEvents) print 'Done -> proceeding with the analysis' else: print 'Failed -> will have to actually count events (this can happen if the input dataset is not a CMG one)'
def beginLoop(self, setup): super(susyParameterScanAnalyzer, self).beginLoop(setup) self.counters.addCounter('events') if not self.cfg_comp.isMC: return True if self.cfg_ana.doLHE and self.cfg_ana.useLumiInfo: lumis = Lumis(self.cfg_comp.files) for lumi in lumis: if lumi.getByLabel('generator', self.genLumiHandle): self.LHEInfos.append( self.genLumiHandle.product().configDescription())
def beginLoop(self, setup): super(LHEWeightAnalyzer, self).beginLoop(setup) if self.cfg_ana.useLumiInfo: lumis = Lumis(self.cfg_comp.files) for lumi in lumis: if lumi.getByLabel('generator', self.genLumiHandle): weightNames = self.genLumiHandle.product().weightNames() for wn in weightNames: #direct cast is not working properly, copy of elements is needed self.LHEWeightsNames.append(wn) break
def getEventsLumisInFile(infile): from DataFormats.FWLite import Lumis, Handle, Events events = Events(infile) lumis = Lumis(infile) ret = {} for lum in lumis: run = lum.aux().run() if not ret.has_key(run): ret[run] = [] ret[run] += [lum.aux().id().luminosityBlock()] return events.size(), ret
def beginLoop(self, setup): super(JSONAnalyzer, self).beginLoop(setup) self.counters.addCounter('JSON') self.count = self.counters.counter('JSON') self.count.register('All Events') self.count.register('Passed Events') if self.useLumiBlocks and not self.cfg_comp.isMC and not self.lumiList is None: lumis = Lumis(self.cfg_comp.files) for lumi in lumis: lumiid = lumi.luminosityBlockAuxiliary().id() run, lumi = lumiid.run(), lumiid.luminosityBlock() if self.lumiList.contains(run, lumi): self.rltInfo.add('dummy', run, lumi)
def beginLoop(self): super(SkimCountAnalyzer, self).beginLoop() print 'Counting the total events before the skim by accessing luminosity blocks' lumis = Lumis(self.cfg_comp.files) totalEvents = 0 totalFourMu = 0 totalFourEle = 0 totalTwoEleTwoMu = 0 totalZH = 0 totalWH = 0 totalTTH = 0 for lumi in lumis: lumi.getByLabel('prePathCounter', self.counterHandle) totalEvents += self.counterHandle.product().value lumi.getByLabel('genInfo', '4mu', self.counterHandle) totalFourMu += self.counterHandle.product().value lumi.getByLabel('genInfo', '4e', self.counterHandle) totalFourEle += self.counterHandle.product().value lumi.getByLabel('genInfo', '2e2mu', self.counterHandle) totalTwoEleTwoMu += self.counterHandle.product().value lumi.getByLabel('genInfo', 'ZH', self.counterHandle) totalZH += self.counterHandle.product().value lumi.getByLabel('genInfo', 'WH', self.counterHandle) totalWH += self.counterHandle.product().value lumi.getByLabel('genInfo', 'ttH', self.counterHandle) totalTTH += self.counterHandle.product().value self.counters.addCounter('SkimReport') self.count = self.counters.counter('SkimReport') self.count.register('All Events') self.count.register('4e') self.count.register('4mu') self.count.register('2e2mu') self.count.register('ZH') self.count.register('WH') self.count.register('ttH') self.count.inc('All Events', totalEvents) self.count.inc('2e2mu', totalTwoEleTwoMu) self.count.inc('4mu', totalFourMu) self.count.inc('4e', totalFourEle) self.count.inc('ZH', totalZH) self.count.inc('WH', totalWH) self.count.inc('ttH', totalTTH) print 'Done -> proceeding with the analysis'
options.fileList = "/tmp/"+os.environ['USER']+"/filelist.dat" with open(options.fileList,'r') as textfile: files = [line.strip() for line in textfile] fullpath_files = [] if options.debug: print "Reading files: " for aline in files: fullpath_files.append( options.prefix+aline ) if options.debug: print options.prefix+aline lumis = Lumis(fullpath_files) handlePhiSymInfo = Handle ("std::vector<PhiSymInfo>") labelPhiSymInfo = ("PhiSymProducer") timeMap={} lumiList = LumiList(os.path.expandvars(options.jsonFile)) for i,lumi in enumerate(lumis): lumi.getByLabel (labelPhiSymInfo,handlePhiSymInfo) phiSymInfo = handlePhiSymInfo.product() #skipping BAD lumiSections if not lumiList.contains(phiSymInfo.back().getStartLumi().run(),phiSymInfo.back().getStartLumi().luminosityBlock()): continue
import ROOT from DataFormats.FWLite import Events, Handle, Lumis import time import numpy file_list = [ "/hdfs/cms/store/user/joosep/T_t-channel_TuneZ2star_8TeV-powheg-tauola/stpol_step1_04_19/c9249c44a215ffeb8c9ba40f59092334/output_1_1_VMY.root" ] events = Events(file_list) lumis = Lumis(file_list) jetH = Handle('std::vector <pat::Jet>') jetL = ("smearedPatJetsWithOwnRef") eleH = Handle('std::vector <pat::Electron>') eleL = ("electronsWithID") muH = Handle('std::vector <pat::Muon>') muL = ("muonsWithID") muH1 = Handle( 'edm::OwnVector<reco::Candidate,edm::ClonePolicy<reco::Candidate> >') muL1 = ("muons1") nEv = 0 t0 = time.time() trigH = Handle("edm::TriggerResults") nMuon_distr = []
import argparse from FWCore.PythonUtilities.LumiList import LumiList parser = argparse.ArgumentParser() parser.add_argument('-i', '--input', type=str, help="file containing the list of input files", required=True) args = parser.parse_args() goodLumis = LumiList( url= 'http://opendata.cern.ch/record/1002/files/Cert_190456-208686_8TeV_22Jan2013ReReco_Collisions12_JSON.txt' ) print "all good lumis" print goodLumis lumis = Lumis(args.input) for lum in lumis: runsLumisDict = {} runList = runsLumisDict.setdefault(lum.aux().run(), []) runList.append(lum.aux().id().luminosityBlock()) myLumis = LumiList(runsAndLumis=runsLumisDict) if myLumis.getLumis() in goodLumis.getLumis(): print "good lumi" print myLumis else: print "bad lumi" print myLumis
def main(args): import ROOT fName = args[0] jsonName = args[1] if fName.startswith("/store"): prepend = ["root://eoscms//eos/cms", "root://xrootd-cms.infn.it/"] else: prepend = [""] for pp in prepend: fin = ROOT.TFile.Open("%s/%s" % (pp, fName)) if fin and not fin.IsZombie(): break if not fin or fin.IsZombie(): sys.exit(1) events = fin.Get("Events") isData = False if events.GetEntriesFast() > 0: events.GetEntry(0) isData = events.EventAuxiliary.isRealData() elif "Run20" in fName: # try to guess from file name isData = True if not isData: lumi = fin.Get("LuminosityBlocks") ne = lumi.Draw( "1>>totEvents(1,0,2)", "edmMergeableCounter_eventCount__FLASHggMicroAOD.obj.value", "goff") ne = lumi.Draw( "1>>totWeights(1,0,2)", "edmMergeableDouble_weightsCount_totalWeight_FLASHggMicroAOD.obj.value", "goff") if ne > 0: totEvents = ROOT.gDirectory.Get("totEvents").Integral() totWeights = ROOT.gDirectory.Get("totWeights").Integral() else: totEvents = events.GetEntries() ne = events.Draw( "1>>totWeights(1,0,2)", "GenEventInfoProduct_generator__GEN.obj.weights_[0]", "goff") if ne < 0: ne = events.Draw( "1>>totWeights(1,0,2)", "GenEventInfoProduct_generator__SIM.obj.weights_[0]", "goff") if ne > 0: totWeights = ROOT.gDirectory.Get("totWeights").Integral() else: totWeights = 0. lumi.GetEntry(0) minpu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.min maxpu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.max npu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.values.size( ) print minpu, maxpu, npu puhisto = [0.] * npu for il in xrange(lumi.GetEntries()): for ibin in xrange(npu): puhisto[ ibin] += lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.values[ ibin] print puhisto tot = sum(puhisto) print tot print map(lambda x: float(x) / float(tot), puhisto) else: totEvents = events.GetEntriesFast() totWeights = totEvents output = { "events": int(events.GetEntriesFast()), "totEvents": int(totEvents), "weights": totWeights, } ### get list of processed lumi sections in data if isData: from DataFormats.FWLite import Lumis runsLumisDict = {} print fin.GetName() lumis = Lumis([fin.GetName()]) delivered = recorded = 0 for lum in lumis: runList = runsLumisDict.setdefault(lum.aux().run(), []) runList.append(lum.aux().id().luminosityBlock()) output["lumis"] = runsLumisDict jf = open(jsonName, "w+") jf.write(json.dumps(output)) jf.close() sys.exit(0)
def loop(model, m, nFiles): fileList = [fileBase.format(model, i) for i in range(1, nFiles + 1)] if len(fileList) == 0: raise ValueError("File list is empty!") plotScan = True if getattr(m, "scanTup", None) == None: print "[WARNING] You have to define scanTup=((\"nameX\",pdgId),(\"nameY\",pdgId)) for model {0} in order to plot the scan.".format( model) plotScan = False # Histograms gen particles histos = {} histos["susy_pdgId_1"] = r.TH1D( "h_susy_pdgId_1", "SUSY particle in the event (id < 1000000)", 40, 0., 40.) histos["susy_pdgId_2"] = r.TH1D( "h_susy_pdgId_2", "SUSY particle in the event (id > 1000000)", 40, 0., 40.) histos["susy_scan"] = r.TH2D("h2_susy_scan", "SUSY scan", m.nbinsx, m.xmin, m.xmax, m.nbinsy, m.ymin, m.ymax) histos["sm_pdgId"] = r.TH1D("h_sm_pdgId", "SM particles in the event", 40, 0., 40.) histos["sm_z_mass"] = r.TH1D("h_sm_z_mass", "Mass of the Z(*) boson", 120, 0., 120.) histos["sm_w_mass"] = r.TH1D("h_sm_w_mass", "Mass of the W(*) boson", 120, 0., 120.) # Histograms lumi-block histos["lumi_cfgId"] = r.TH1D("h_lumi_cfgId", "Config ID", m.nconfigs, 0., float(m.nconfigs) + 0.5) nEvts = 0 # Loop over files for file in fileList: print ' Running on file {0}'.format(file) if not os.path.exists(file): print "[WARNING] File {0} doesn't exist. Skipping".format(file) continue lumis = Lumis(file) events = Events(file) handle = Handle('std::vector<reco::GenParticle>') label = 'genParticles' handleHead = Handle('GenLumiInfoHeader') labelHead = ('generator') # Loop over lumi blocks for i, lum in enumerate(lumis): lum.getByLabel(labelHead, handleHead) genHeader = handleHead.product() cfgId = genHeader.randomConfigIndex() histos["lumi_cfgId"].Fill(cfgId) # Loop over events for iev, ev in enumerate(events): nEvts += 1 ev.getByLabel(label, handle) genps = handle.product() # print "Event n. ",iev # Loop over gen particles mX, mY = 0., 0. # for the 2D scan plot for p in genps: id = abs(p.pdgId()) status = p.status() if (not status in range(21, 25) + [1]): continue # SUSY particles if id > 1000000: if id > 2000000: idPruned = id - 2000000 histos["susy_pdgId_2"].Fill(idPruned) else: idPruned = id - 1000000 histos["susy_pdgId_1"].Fill(idPruned) if plotScan: if id == m.scanTup[0][1] and mX == 0.: mX = p.mass() elif id == m.scanTup[1][1] and mY == 0.: mY = p.mass() # SM particles elif id in SM_ids: histos["sm_pdgId"].Fill(id) if id == 23 and status == 22: histos["sm_z_mass"].Fill(p.mass()) if id == 24 and status == 22: histos["sm_w_mass"].Fill(p.mass()) # 2-D plot of the scan if plotScan: histos["susy_scan"].Fill(mX, mY) histos["susy_scan"].GetXaxis().SetTitle(m.scanTup[0][0]) histos["susy_scan"].GetYaxis().SetTitle(m.scanTup[1][0]) # Print plots for name, histo in histos.iteritems(): printPlot(histo) return len(fileList), nEvts
def main(): fileList = [fileBase.format(i) for i in range(1, 101)] modelDict = {} for file in fileList: print ' Inspecting file {0}'.format(file) lumis = Lumis(file) events = Events(file) handle = Handle('GenFilterInfo') label = ('genFilterEfficiencyProducer') handleHead = Handle('GenLumiInfoHeader') labelHead = ('generator') for i, lum in enumerate(lumis): lum.getByLabel(labelHead, handleHead) genHeader = handleHead.product() lum.getByLabel(label, handle) genFilter = handle.product() model = genHeader.configDescription() modelShort = "_".join(model.split("_")[-2:]) if not modelShort in modelDict.keys(): modelDict[modelShort] = {} modelDict[modelShort]["pass"] = float( genFilter.numEventsPassed()) modelDict[modelShort]["total"] = float( genFilter.numEventsTotal()) else: modelDict[modelShort]["pass"] += float( genFilter.numEventsPassed()) modelDict[modelShort]["total"] += float( genFilter.numEventsTotal()) oModelDict = OrderedDict(sorted(modelDict.items())) print "Total mass points: ", len(oModelDict.keys()) hFilterEff = r.TH1D('hFilterEff', 'Gen filter efficiency', len(oModelDict.keys()), 0., float(len(oModelDict.keys()))) count = 1 evtsTotAll = 0 evtsPassAll = 0 for model, evtsDict in oModelDict.iteritems(): eff = evtsDict["pass"] / evtsDict["total"] evtsTotAll += evtsDict["total"] evtsPassAll += evtsDict["pass"] # print "{0} {1:.2f}".format(model,eff) hFilterEff.GetXaxis().SetBinLabel(count, model) hFilterEff.SetBinContent(count, eff) count += 1 print "Average efficiency: {0:.2f}".format(evtsPassAll / evtsTotAll) c = r.TCanvas("c", "c") c.cd() hFilterEff.Draw() c.SaveAs("FilterEff.pdf") c.SaveAs("FilterEff.png") return True
def main(): fileList = [] # .txt filename where you saved the samples files from dasgoclient samplesFileName = "T2tt_" + YEAR + ".txt" number_of_lines = len(open(samplesFileName).readlines()) samplesFile = open(samplesFileName) samples = samplesFile.readlines() for sample in samples: # Change servers in case the script fails in a specific file #fileList += ["root://cms-xrd-global.cern.ch/"+sample.replace('\n', '')] fileList += ["root://xrootd-cms.infn.it/" + sample.replace('\n', '')] #print fileList #fileList = [fileBase.format(i) for i in range(1,101)] # for samples saved locally #fileList = [fileBase] # in case you're using only 1 file remotely, good to debug modelDict = {} prog = 0.0 for file in fileList: print ' Inspecting file {0}'.format(file) print "Progress: %.2f" % (prog / number_of_lines * 100) + " %" lumis = Lumis(file) events = Events(file) handle = Handle('GenFilterInfo') label = ('genFilterEfficiencyProducer') handleHead = Handle('GenLumiInfoHeader') labelHead = ('generator') for i, lum in enumerate(lumis): lum.getByLabel(labelHead, handleHead) genHeader = handleHead.product() lum.getByLabel(label, handle) genFilter = handle.product() model = genHeader.configDescription() modelShort = "_".join(model.split("_")[-2:]) if not modelShort in modelDict.keys(): modelDict[modelShort] = {} modelDict[modelShort]["pass"] = float( genFilter.numEventsPassed()) modelDict[modelShort]["total"] = float( genFilter.numEventsTotal()) else: modelDict[modelShort]["pass"] += float( genFilter.numEventsPassed()) modelDict[modelShort]["total"] += float( genFilter.numEventsTotal()) prog += 1 oModelDict = OrderedDict(sorted(modelDict.items())) print "Total mass points: ", len(oModelDict.keys()) hFilterEff = r.TH1D('hFilterEff', 'Gen filter efficiency', len(oModelDict.keys()), 0., float(len(oModelDict.keys()))) count = 1 evtsTotAll = 0 evtsPassAll = 0 for model, evtsDict in oModelDict.iteritems(): eff = evtsDict["pass"] / evtsDict["total"] evtsTotAll += evtsDict["total"] evtsPassAll += evtsDict["pass"] # print "{0} {1:.2f}".format(model,eff) hFilterEff.GetXaxis().SetBinLabel(count, model) hFilterEff.SetBinContent(count, eff) count += 1 print "Average efficiency: {0:.2f}".format(evtsPassAll / evtsTotAll) c = r.TCanvas("c", "c") c.cd() hFilterEff.Draw() c.SaveAs("FilterEff_" + YEAR + ".pdf") c.SaveAs("FilterEff_" + YEAR + ".png") hfile = r.TFile(hfilename, 'RECREATE') # Save the effs hist in root format # You can later access the effs by: hFilterEff->GetBinContent(hFilterEff->GetXaxis()->FindBin("375_315")) hFilterEff.Write() return True
options.register( 'maxFiles', -1, # default value VarParsing.multiplicity.singleton, # singleton or list VarParsing.varType.int, # string, int, or float "The maximum number of files to read") options.parseArguments() files = getFiles([options.datasetName], 'wreece', 'susy_tree_CMG_[0-9]+.root') if options.maxFiles > 0: options.inputFiles = files[0:options.maxFiles] else: options.inputFiles = files lumis = Lumis(options) totalSimInput = 0 totalSimOutput = 0 totalPatInput = 0 totalPatOutput = 0 simFilterInfoH = Handle('GenFilterInfo') patFilterInfoH = Handle('GenFilterInfo') weights = [] for lumi in lumis: lumi.getByLabel(('genFilterEfficiencyProducer', '', 'SIM'), simFilterInfoH) lumi.getByLabel(('preSelFilterEfficiencyProducer', '', 'PAT'), patFilterInfoH)
def main(args): #print "fggCheckFile", args import ROOT fName = args[0] jsonName = args[1] if fName.startswith("/store"): prepend = [ "root://cms-xrd-global.cern.ch/", "root://eoscms.cern.ch//eos/cms" ] else: prepend = [""] for pp in prepend: fin = ROOT.TFile.Open("%s/%s" % (pp, fName)) if fin and not fin.IsZombie(): break if not fin or fin.IsZombie(): sys.exit(1) events = fin.Get("Events") isData = False if events.GetEntriesFast() > 0: events.GetEntry(0) isData = events.EventAuxiliary.isRealData() elif "Run20" in fName: # try to guess from file name isData = True weights_included = {} LHE_Branch_Name = "" if len(args) > 4 and events.GetEntriesFast() > 0: weights_to_include_str = args[4] weights_to_include_names = weights_to_include_str.split(",") LHE_Branch_Name = args[5] events.GetEntry(0) weightNames = getattr(events, LHE_Branch_Name).weights() #weightNames = events.LHEEventProduct_source__LHEFile.weights() for i in range(0, weightNames.size()): if weightNames[i].id in weights_to_include_names: weights_included[i] = [weightNames[i].id, 0.0] print weights_included if not isData: if len(weights_included) > 0: for weight_index in weights_included: events.Draw( "1>>totWeights(1,0,2)", "{1}.obj.weights_.wgt[{0:d}]/{1}.obj.originalXWGTUP()". format(weight_index, LHE_Branch_Name), "goff") weights_included[weight_index][1] += ROOT.gDirectory.Get( "totWeights").Integral() lumi = fin.Get("LuminosityBlocks") ne = lumi.Draw( "1>>totEvents(1,0,2)", "edmMergeableCounter_eventCount__FLASHggMicroAOD.obj.value", "goff") ne = lumi.Draw( "1>>totWeights(1,0,2)", "edmMergeableDouble_weightsCount_totalWeight_FLASHggMicroAOD.obj.value", "goff") if ne > 0: totEvents = ROOT.gDirectory.Get("totEvents").Integral() totWeights = ROOT.gDirectory.Get("totWeights").Integral() else: totEvents = events.GetEntries() ne = events.Draw( "1>>totWeights(1,0,2)", "GenEventInfoProduct_generator__GEN.obj.weights_[0]", "goff") if ne < 0: ne = events.Draw( "1>>totWeights(1,0,2)", "GenEventInfoProduct_generator__SIM.obj.weights_[0]", "goff") if ne > 0: totWeights = ROOT.gDirectory.Get("totWeights").Integral() else: totWeights = 0. lumi.GetEntry(0) minpu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.min maxpu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.max npu = lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.values.size( ) print minpu, maxpu, npu puhisto = [0.] * npu for il in xrange(lumi.GetEntries()): for ibin in xrange(npu): puhisto[ ibin] += lumi.floatedmMergeableHisto_weightsCount_truePileup_FLASHggMicroAOD.values[ ibin] print puhisto tot = sum(puhisto) print tot print map(lambda x: float(x) / float(tot), puhisto) else: totEvents = events.GetEntriesFast() totWeights = totEvents output = { "events": int(events.GetEntriesFast()), "totEvents": int(totEvents), "weights": totWeights } PrintWeightsList(weights_included, output) ### get list of processed lumi sections in data if isData: from DataFormats.FWLite import Lumis runsLumisDict = {} print fin.GetName() lumis = Lumis([fin.GetName()]) delivered = recorded = 0 for lum in lumis: runList = runsLumisDict.setdefault(lum.aux().run(), []) runList.append(lum.aux().id().luminosityBlock()) output["lumis"] = runsLumisDict jf = open(jsonName, "w+") jf.write(json.dumps(output)) jf.close() sys.exit(0)
sys.argv = oldargv # load FWLite C++ librarie ROOT.gSystem.Load("libFWCoreFWLite.so") ROOT.gSystem.Load("libDataFormatsFWLite.so") ROOT.gSystem.Load("libDataFormatsEcalDetId.so") ROOT.gSystem.Load("libPhiSymEcalCalibDataFormats.so") ROOT.AutoLibraryLoader.enable() # load FWlite python libraries from DataFormats.FWLite import Handle, Events, Lumis # open file (you can use 'edmFileUtil -d /store/whatever.root' to get the physical file name) #lumis = Lumis("file:phisym.root") lumis = Lumis( "root://xrootd-cms.infn.it//store/user/spigazzi/AlCaPhiSym/crab_PHISYM-CMSSW_741-weights-GR_P_V56-Run2015B_v1/150714_150558/0000/phisym_weights_1lumis_13.root" ) handlePhiSymInfo = Handle("std::vector<PhiSymInfo>") handlePhiSymRecHitsEB = Handle("std::vector<PhiSymRecHit>") handlePhiSymRecHitsEE = Handle("std::vector<PhiSymRecHit>") labelPhiSymInfo = ("PhiSymProducer") labelPhiSymRecHitsEB = ("PhiSymProducer", "EB") labelPhiSymRecHitsEE = ("PhiSymProducer", "EE") histos = {} histos["EB_OccupancyMap"] = ROOT.TH2F("EB_OccupancyMap", "EB_OccupancyMap", 360, 0.5, 360.5, 171, -85.5, 85.5) histos["EB_EtMap"] = ROOT.TH2F("EB_EtMap", "EB_EtMap", 360, 0.5, 360.5, 171, -85.5, 85.5)
(options, args) = parser.parse_args() # put this here after parsing the arguments since ROOT likes to # grab command line arguments even when it shouldn't. from DataFormats.FWLite import Lumis, Handle if not args: raise RuntimeError, "Must provide at least one input file" # do we want to get the luminosity summary? if options.intLumi: handle = Handle('LumiSummary') label = ('lumiProducer') else: handle, lable = None, None runsLumisDict = {} lumis = Lumis(args) delivered = recorded = 0 for lum in lumis: runList = runsLumisDict.setdefault(lum.aux().run(), []) runList.append(lum.aux().id().luminosityBlock()) # get the summary and keep track of the totals if options.intLumi: lum.getByLabel(label, handle) summary = handle.product() delivered += summary.avgInsDelLumi() recorded += summary.avgInsRecLumi() # print out lumi sections in JSON format jsonList = LumiList(runsAndLumis=runsLumisDict) if options.output: jsonList.writeJSON(options.output)
#maxEvents=100 sample.files = sample.files[:1] # output directory directory = os.path.join(args.targetDir, 'signalWeights', sub_directory) output_directory = os.path.join(directory, sample.name) if not os.path.exists(output_directory): os.makedirs(output_directory) logger.info("Created output directory %s", output_directory) variables = ["run/I", "luminosityBlock/I", "event/l"] variables += ["LHE_weight_original/F", "LHE[weight/F]"] # read weight names lumis = Lumis(sample.files) genLumiHandle = Handle('GenLumiInfoHeader') LHEWeightsNames = [] for lumi in lumis: if lumi.getByLabel('generator', genLumiHandle): weightNames = genLumiHandle.product().weightNames() for wn in weightNames: #direct cast is not working properly, copy of elements is needed LHEWeightsNames.append(wn) break # Run only job number "args.job" from total of "args.nJobs" if args.nJobs > 1: n_files_before = len(sample.files) sample = sample.split(args.nJobs)[args.job] n_files_after = len(sample.files) logger.info("Running job %i/%i over %i files from a total of %i.",