def tauPt(h, name, rebin=1, ratio=False, opts={}, opts2={}): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "p_{T}^{#tau jet} (GeV/c)" ylabel = "Events / %.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() # h.addMCUncertainty() # if h.histoMgr.hasHisto("Data"): # th1 = h.histoMgr.getHisto("Data").getRootHisto() # print name # for bin in xrange(1, th1.GetNbinsX()+1): # print "Bin %d, low edge %.0f, content %.3f" % (bin, th1.GetXaxis().GetBinLowEdge(bin), th1.GetBinContent(bin)) # print _opts = {"ymin": 0.01, "ymaxfactor": 2} _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) name = name+"_log" #h.createFrameFraction(name, opts=opts) # h.createFrame(name, opts=opts) if ratio: h.createFrameFraction(name, opts=_opts, opts2=_opts2) else: h.createFrame(name, opts=_opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def tauPt(h, name, rebin=1, ratio=False, opts={}, opts2={}): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "p_{T}^{#tau jet} (GeV/c)" ylabel = "Events / %.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() # h.addMCUncertainty() # if h.histoMgr.hasHisto("Data"): # th1 = h.histoMgr.getHisto("Data").getRootHisto() # print name # for bin in xrange(1, th1.GetNbinsX()+1): # print "Bin %d, low edge %.0f, content %.3f" % (bin, th1.GetXaxis().GetBinLowEdge(bin), th1.GetBinContent(bin)) # print _opts = {"ymin": 0.01, "ymaxfactor": 2} _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) name = name + "_log" #h.createFrameFraction(name, opts=opts) # h.createFrame(name, opts=opts) if ratio: h.createFrameFraction(name, opts=_opts, opts2=_opts2) else: h.createFrame(name, opts=_opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def jetPt(h, name, rebin=2, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) particle = "jet" if "bjet" in name: particle = "bjet" # name = name.replace("jetPt", "bjetPt") xlabel = "p_{T}^{%s} (GeV/c)" % particle ylabel = "Events /%.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 10} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name+"_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) # h.createFrame(name, opts=opts) #h.createFrameFraction(name, opts=opts) h.getPad().SetLogy(True) #h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def met(h, rebin=5, ratio=True, opts={}, opts2={}): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "MET (GeV)" if "embedding" in name: xlabel = "Embedded " + xlabel elif "original" in name: xlabel = "Original " + xlabel ylabel = "Events / %.0f GeV" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() _opts = {"ymin": 0.001, "ymaxfactor": 2} _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) name = name + "_log" if ratio: h.createFrameFraction(name, opts=_opts, opts2=_opts2) else: h.createFrame(name, opts=_opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def met(h, rebin=5, ratio=True, opts={}, opts2={}): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "MET (GeV)" if "embedding" in name: xlabel = "Embedded "+xlabel elif "original" in name: xlabel = "Original "+xlabel ylabel = "Events / %.0f GeV" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() _opts = {"ymin": 0.001, "ymaxfactor": 2} _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) name = name+"_log" if ratio: h.createFrameFraction(name, opts=_opts, opts2=_opts2) else: h.createFrame(name, opts=_opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def jetPt(h, name, rebin=2, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) particle = "jet" if "bjet" in name: particle = "bjet" # name = name.replace("jetPt", "bjetPt") xlabel = "p_{T}^{%s} (GeV/c)" % particle ylabel = "Events /%.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 10} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name + "_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) # h.createFrame(name, opts=opts) #h.createFrameFraction(name, opts=opts) h.getPad().SetLogy(True) #h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def doCounters(datasets): eventCounter = counter.EventCounter(datasets, counters=analysisEmb + counters) if not mcEvents: if onlyWjets: eventCounter.normalizeMCToLuminosity(lumi) else: eventCounter.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounter) mainTable = eventCounter.getMainCounterTable() ewkDatasets = ["WJets", "TTJets", "DYJetsToLL", "SingleTop", "Diboson"] def ewkSum(table): table.insertColumn( 1, counter.sumColumn( "EWKMCsum", [table.getColumn(name=name) for name in ewkDatasets])) if not onlyWjets and not mcEvents: ewkSum(mainTable) return mainTable.getRow(name="deltaPhiTauMET<160")
def __init__(self, datasetsMany, scaleNormalization=True, *args, **kwargs): self.eventCounters = [] for dsMgr in datasetsMany.datasetManagers: ec = counter.EventCounter(dsMgr, *args, **kwargs) ec.normalizeMCToLuminosity(datasetsMany.getLuminosity()) if scaleNormalization: tauEmbedding.scaleNormalization(ec) self.eventCounters.append(ec)
def __init__(self, datasetsMany, scaleNormalization=True, *args, **kwargs): self.eventCounters = [] for dsMgr in datasetsMany.datasetManagers: ec = counter.EventCounter(dsMgr, *args, **kwargs) ec.normalizeMCToLuminosity(datasetsMany.getLuminosity()) if scaleNormalization: tauEmbedding.scaleNormalization(ec) self.eventCounters.append(ec)
def getHistograms(self, datasetName, name): histos = [] for i, dm in enumerate(self.datasetManagers): ds = dm.getDataset(datasetName) h = ds.getDatasetRootHisto(name) if h.isMC() and self.normalizeMCByLuminosity: h.normalizeToLuminosity(self.lumi) h = histograms.HistoWithDataset(ds, h.getHistogram(), "dummy") # only needed for scaleNormalization() tauEmbedding.scaleNormalization(h) h = h.getRootHisto() h.SetName("Trial %d"%(i+1)) histos.append(h) return histos # list of individual histograms
def getHistograms(self, datasetName, name): histos = [] for i, dm in enumerate(self.datasetManagers): ds = dm.getDataset(datasetName) h = ds.getDatasetRootHisto(name) if h.isMC() and self.normalizeMCByLuminosity: h.normalizeToLuminosity(self.lumi) h = histograms.HistoWithDataset( ds, h.getHistogram(), "dummy") # only needed for scaleNormalization() tauEmbedding.scaleNormalization(h) h = h.getRootHisto() h.SetName("Trial %d" % (i + 1)) histos.append(h) return histos # list of individual histograms
def drawPlot(h, name, xlabel, ylabel="Events / %.0f GeV/c", rebin=1, log=True, addMCUncertainty=True, ratio=True, opts={}, opts2={}, moveLegend={}, normalize=True, cutLine=None, cutBox=None, function=None): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) ylab = ylabel if "%" in ylabel: ylab = ylabel % h.binWidth() if normalize: tauEmbedding.scaleNormalization(h) h.stackMCHistograms() if addMCUncertainty: h.addMCUncertainty() _opts = {"ymin": 0.01, "ymaxfactor": 2} if not log: _opts["ymin"] = 0 _opts["ymaxfactor"] = 2 _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) if log: name = name + "_log" h.createFrame(name, createRatio=ratio, opts=_opts, opts2=_opts2) h.getPad().SetLogy(log) h.setLegend(histograms.moveLegend(histograms.createLegend(), **moveLegend)) # Add cut line and/or box if cutLine != None: lst = cutLine if not isinstance(lst, list): lst = [lst] for line in lst: h.addCutBoxAndLine(line, box=False, line=True) if cutBox != None: lst = cutBox if not isinstance(lst, list): lst = [lst] for box in lst: h.addCutBoxAndLine(**box) if function != None: function(h) common(h, xlabel, ylab)
def leadingTrack(h, rebin=5, ratio=True): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "p_{T}^{leading track} (GeV/c)" ylabel = "Events / %.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 2} name = name+"_log" #h.createFrameFraction(name, opts=opts) h.createFrame(name, opts=opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def doCounters(datasets): eventCounter = counter.EventCounter(datasets, counters=analysisEmb+counters) if not mcEvents: if onlyWjets: eventCounter.normalizeMCToLuminosity(lumi) else: eventCounter.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounter) mainTable = eventCounter.getMainCounterTable() ewkDatasets = ["WJets", "TTJets", "DYJetsToLL", "SingleTop", "Diboson"] def ewkSum(table): table.insertColumn(1, counter.sumColumn("EWKMCsum", [table.getColumn(name=name) for name in ewkDatasets])) if not onlyWjets and not mcEvents: ewkSum(mainTable) return mainTable.getRow(name="deltaPhiTauMET<160")
def leadingTrack(h, rebin=5, ratio=True): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "p_{T}^{leading track} (GeV/c)" ylabel = "Events / %.0f GeV/c" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 2} name = name + "_log" #h.createFrameFraction(name, opts=opts) h.createFrame(name, opts=opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def deltaPhi(h, rebin=40): name = flipName(h.getRootHistoPath()) particle = "#tau jet" if "Original" in name: particle = "#mu" h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "#Delta#phi(%s, MET) (rad)" % particle ylabel = "Events / %.2f rad" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() #h.createFrameFraction(name) h.createFrame(name) h.setLegend(histograms.createLegend(0.2, 0.6, 0.4, 0.9)) common(h, xlabel, ylabel)
def deltaPhi(h, rebin=40): name = flipName(h.getRootHistoPath()) particle = "#tau jet" if "Original" in name: particle = "#mu" h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "#Delta#phi(%s, MET) (rad)" % particle ylabel = "Events / %.2f rad" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() #h.createFrameFraction(name) h.createFrame(name) h.setLegend(histograms.createLegend(0.2, 0.6, 0.4, 0.9)) common(h, xlabel, ylabel)
def tauCandPhi(h, name, rebin=1, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "#phi^{#tau-jet candidate}" ylabel = "Events / %.2f" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() # h.addMCUncertainty() opts = {"ymin": 1.0, "ymaxfactor": 5} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name + "_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def rtau(h, name, rebin=2, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "R_{#tau}" ylabel = "Events / %.2f" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 10} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name+"_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) # h.createFrame(name, opts=opts) #h.createFrameFraction(name, opts=opts) h.getPad().SetLogy(True) #h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def numberOfJets(h, name, rebin=1, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) particle = "jet" if "Btagged" in name: particle = "bjet" xlabel = "Number of %ss" % particle ylabel = "Events / %.2f" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.0, "ymaxfactor": 1.2} opts2 = {"ymin": 0.5, "ymax": 1.5} # name = name+"_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) # h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def numberOfJets(h, name, rebin=1, ratio=True): h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) particle = "jet" if "Btagged" in name: particle = "bjet" xlabel = "Number of %ss" % particle ylabel = "Events / %.2f" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.0, "ymaxfactor": 1.2} opts2 = {"ymin": 0.5, "ymax": 1.5} # name = name+"_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) # h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def muonEta(h, rebin=5, ratio=False): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "#eta^{#mu}" ylabel = "Events" tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 2} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name+"_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def muonEta(h, rebin=5, ratio=False): name = flipName(h.getRootHistoPath()) h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "#eta^{#mu}" ylabel = "Events" tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() opts = {"ymin": 0.01, "ymaxfactor": 2} opts2 = {"ymin": 0.5, "ymax": 1.5} name = name + "_log" if ratio: h.createFrameFraction(name, opts=opts, opts2=opts2) else: h.createFrame(name, opts=opts) h.getPad().SetLogy(True) h.setLegend(histograms.createLegend()) common(h, xlabel, ylabel)
def transverseMass(h, name, rebin=1, opts={}, opts_log={}, ratio=False): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "m_{T}(#tau jet, E_{T}^{miss}) (GeV/c^{2})" ylabel = "Events / %.0f GeV/c^{2}" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() _opts = {"ymaxfactor": 1.5} _opts.update(opts) _opts_log = {"ymin": 1e-2, "ymaxfactor": 2} _opts_log.update(_opts) _opts_log.update(opts_log) _opts2 = {"ymin": 0, "ymax": 2} h.createFrame(name, opts=_opts, opts2=_opts2, createRatio=ratio) h.setLegend(histograms.createLegend()) deltaPhi = "#Delta#phi(#tau jet, E_{T}^{miss})" coord = {"x": 0.5, "y": 0.55, "size": 20} if "AfterBTagging" in name: histograms.addText(text="Without %s cut" % deltaPhi, **coord) elif "AfterDeltaPhi160" in name: histograms.addText(text="%s < 160^{#circ}" % deltaPhi, **coord) elif "AfterDeltaPhi130" in name: histograms.addText(text="%s < 130^{#circ}" % deltaPhi, **coord) common(h, xlabel, ylabel) name += "_log" h.createFrame(name, opts=_opts_log, opts2=_opts2, createRatio=ratio) h.setLegend(histograms.createLegend()) ROOT.gPad.SetLogy(True) common(h, xlabel, ylabel)
def transverseMass(h, name, rebin=1, opts={}, opts_log={}, ratio=False): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) xlabel = "m_{T}(#tau jet, E_{T}^{miss}) (GeV/c^{2})" ylabel = "Events / %.0f GeV/c^{2}" % h.binWidth() tauEmbedding.scaleNormalization(h) h.stackMCHistograms() h.addMCUncertainty() _opts = {"ymaxfactor": 1.5} _opts.update(opts) _opts_log = {"ymin": 1e-2, "ymaxfactor": 2} _opts_log.update(_opts) _opts_log.update(opts_log) _opts2 = {"ymin": 0, "ymax": 2} h.createFrame(name, opts=_opts, opts2=_opts2, createRatio=ratio) h.setLegend(histograms.createLegend()) deltaPhi = "#Delta#phi(#tau jet, E_{T}^{miss})" coord = {"x": 0.5, "y": 0.55, "size": 20} if "AfterBTagging" in name: histograms.addText(text="Without %s cut"%deltaPhi, **coord) elif "AfterDeltaPhi160" in name: histograms.addText(text="%s < 160^{#circ}"%deltaPhi, **coord) elif "AfterDeltaPhi130" in name: histograms.addText(text="%s < 130^{#circ}"%deltaPhi, **coord) common(h, xlabel, ylabel) name += "_log" h.createFrame(name, opts=_opts_log, opts2=_opts2, createRatio=ratio) h.setLegend(histograms.createLegend()) ROOT.gPad.SetLogy(True) common(h, xlabel, ylabel)
def doCounters(datasetsEmb): # All embedded events eventCounterAll = counter.EventCounter( datasetsEmb.getFirstDatasetManager(), counters=analysisEmbAll + counters) eventCounterAll.normalizeMCByLuminosity() tableAll = eventCounterAll.getMainCounterTable() tableAll.keepOnlyRows([ "All events", ]) tableAll.renameRows({"All events": "All embedded events"}) # Mu eff + Wtau mu eventCounterMuEff = counter.EventCounter( datasetsEmb.getFirstDatasetManager(), counters=analysisEmbNoTauEff + counters) eventCounterMuEff.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounterMuEff) tableMuEff = eventCounterMuEff.getMainCounterTable() tableMuEff.keepOnlyRows(["All events"]) tableMuEff.renameRows({"All events": "mu eff + Wtaumu"}) # Event counts after embedding normalization, before tau trigger eff, # switch to calculate uncertainties of the mean of 10 trials eventCounterNoTauEff = tauEmbedding.EventCounterMany( datasetsEmb, counters=analysisEmbNoTauEff + counters) tableNoTauEff = eventCounterNoTauEff.getMainCounterTable() tableNoTauEff.keepOnlyRows([ "Trigger and HLT_MET cut", "njets", ]) tableNoTauEff.renameRows({ "Trigger and HLT_MET cut": "caloMET > 60", "njets": "tau ID" }) # Event counts after tau trigger eff eventCounter = tauEmbedding.EventCounterMany(datasetsEmb, counters=analysisEmb + counters) table = eventCounter.getMainCounterTable() table.keepOnlyRows([ "njets", "MET", "btagging scale factor", "deltaPhiTauMET<160", "deltaPhiTauMET<130" ]) table.renameRows({ "njets": "Tau trigger efficiency", "btagging scale factor": "b tagging" }) # Combine the rows to one table result = counter.CounterTable() for tbl in [tableAll, tableMuEff, tableNoTauEff, table]: for iRow in xrange(tbl.getNrows()): result.appendRow(tbl.getRow(index=iRow)) addMcSum(result) cellFormat = counter.TableFormatText( counter.CellFormatTeX(valueFormat='%.4f', withPrecision=2)) print result.format(cellFormat)
def drawPlot(h, name, xlabel, ylabel="Events / %.0f GeV/c", rebin=1, log=True, addMCUncertainty=True, ratio=True, opts={}, opts2={}, moveLegend={}, normalize=True, cutLine=None, cutBox=None, function=None): if rebin > 1: h.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(rebin)) ylab = ylabel if "%" in ylabel: ylab = ylabel % h.binWidth() if normalize: tauEmbedding.scaleNormalization(h) h.stackMCHistograms() if addMCUncertainty: h.addMCUncertainty() _opts = {"ymin": 0.01, "ymaxfactor": 2} if not log: _opts["ymin"] = 0 _opts["ymaxfactor"] = 2 _opts2 = {"ymin": 0.5, "ymax": 1.5} _opts.update(opts) _opts2.update(opts2) if log: name = name + "_log" h.createFrame(name, createRatio=ratio, opts=_opts, opts2=_opts2) h.getPad().SetLogy(log) h.setLegend(histograms.moveLegend(histograms.createLegend(), **moveLegend)) # Add cut line and/or box if cutLine != None: lst = cutLine if not isinstance(lst, list): lst = [lst] for line in lst: h.addCutBoxAndLine(line, box=False, line=True) if cutBox != None: lst = cutBox if not isinstance(lst, list): lst = [lst] for box in lst: h.addCutBoxAndLine(**box) if function != None: function(h) common(h, xlabel, ylab)
def doCounters(datasetsEmb): # All embedded events eventCounterAll = counter.EventCounter(datasetsEmb.getFirstDatasetManager(), counters=analysisEmbAll+counters) eventCounterAll.normalizeMCByLuminosity() tableAll = eventCounterAll.getMainCounterTable() tableAll.keepOnlyRows([ "All events", ]) tableAll.renameRows({"All events": "All embedded events"}) # Mu eff + Wtau mu eventCounterMuEff = counter.EventCounter(datasetsEmb.getFirstDatasetManager(), counters=analysisEmbNoTauEff+counters) eventCounterMuEff.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounterMuEff) tableMuEff = eventCounterMuEff.getMainCounterTable() tableMuEff.keepOnlyRows([ "All events" ]) tableMuEff.renameRows({"All events": "mu eff + Wtaumu"}) # Event counts after embedding normalization, before tau trigger eff, # switch to calculate uncertainties of the mean of 10 trials eventCounterNoTauEff = tauEmbedding.EventCounterMany(datasetsEmb, counters=analysisEmbNoTauEff+counters) tableNoTauEff = eventCounterNoTauEff.getMainCounterTable() tableNoTauEff.keepOnlyRows([ "Trigger and HLT_MET cut", "njets", ]) tableNoTauEff.renameRows({"Trigger and HLT_MET cut": "caloMET > 60", "njets": "tau ID" }) # Event counts after tau trigger eff eventCounter = tauEmbedding.EventCounterMany(datasetsEmb, counters=analysisEmb+counters) table = eventCounter.getMainCounterTable() table.keepOnlyRows([ "njets", "MET", "btagging scale factor", "deltaPhiTauMET<160", "deltaPhiTauMET<130" ]) table.renameRows({"njets": "Tau trigger efficiency", "btagging scale factor": "b tagging" }) # Combine the rows to one table result = counter.CounterTable() for tbl in [ tableAll, tableMuEff, tableNoTauEff, table ]: for iRow in xrange(tbl.getNrows()): result.appendRow(tbl.getRow(index=iRow)) addMcSum(result) cellFormat = counter.TableFormatText(counter.CellFormatTeX(valueFormat='%.4f', withPrecision=2)) print result.format(cellFormat)
def doCounters(datasets, mcLumi=None): createPlot = lambda name: createPlotCommon(name, datasets, mcLumi) eventCounter = counter.EventCounter(datasets, counters=countersWeighted) sels = [ # "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 20)", # "(20 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 80)", # "(80 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 120)", # "(120 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", ] tdCount = treeDraw.clone(weight=weightBTagging) tdCountMET = tdCount.clone(weight=weight, selection="&&".join(sels + [metCut])) tdCountBTagging = tdCount.clone(selection="&&".join(sels + [metCut, bTaggingCut])) tdCountDeltaPhi160 = tdCount.clone( selection="&&".join(sels + [metCut, bTaggingCut, deltaPhi160Cut])) tdCountDeltaPhi130 = tdCount.clone( selection="&&".join(sels + [metCut, bTaggingCut, deltaPhi130Cut])) tdCountDeltaPhi90 = tdCount.clone( selection="&&".join(sels + [metCut, bTaggingCut, deltaPhi90Cut])) # eventCounter.getMainCounter().appendRow("JetsForEffs", tdCount.clone(weight=weight, selection="&&".join(sels))) # eventCounter.getMainCounter().appendRow("METForEffs", tdCountMET) # eventCounter.getMainCounter().appendRow("BTagging", tdCountBTagging) # eventCounter.getMainCounter().appendRow("DeltaPhi < 160", tdCountDeltaPhi160) # eventCounter.getMainCounter().appendRow("DeltaPhi < 130", tdCountDeltaPhi130) # eventCounter.getMainCounter().appendRow("DeltaPhi < 90", tdCountDeltaPhi90) td1 = tdCount.clone(selection=metCut + "&&" + bTaggingCut + "&& (tecalometNoHF_p4.Pt() > 60)") td2 = tdCount.clone(selection=metCut + "&&" + bTaggingCut + "&& (tecalomet_p4.Pt() > 60)") td3 = dataset.TreeDrawCompound( td1, { "SingleMu_Mu_170722-172619_Aug05": td2, "SingleMu_Mu_172620-173198_Prompt": td2, "SingleMu_Mu_173236-173692_Prompt": td2, }) # eventCounter.getMainCounter().appendRow("BTagging+CaloMetNoHF", td1) # eventCounter.getMainCounter().appendRow("BTagging+CaloMet", td2) # eventCounter.getMainCounter().appendRow("BTagging+CaloMet(NoHF)", td3) if mcLumi != None: eventCounter.normalizeMCToLuminosity(mcLumi) else: eventCounter.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounter) ewkDatasets = ["WJets", "TTJets", "DYJetsToLL", "SingleTop", "Diboson"] table = eventCounter.getMainCounterTable() mainTable = table #muonAnalysis.addSumColumn(table) #mainTable.insertColumn(2, counter.sumColumn("EWKMCsum", [mainTable.getColumn(name=name) for name in ewkDatasets])) #muonAnalysis.addDataMcRatioColumn(table) if datasets.hasDataset("EWKSignal"): mainTable.insertColumn( 7, counter.divideColumn( "SignalFraction", mainTable.getColumn(name="TTToHplus_" + keepSignal), mainTable.getColumn(name="EWKSignal"))) datasets.printInfo() print "============================================================" print "Main counter (%s)" % eventCounter.getNormalizationString() cellFormat = counter.TableFormatText( counter.CellFormatTeX(valueFormat='%.3f')) print table.format(cellFormat) tauTable = eventCounter.getSubCounterTable( "TauIDPassedEvt::TauSelection_HPS") #muonAnalysis.addSumColumn(tauTable) tauTable.insertColumn( 2, counter.sumColumn( "EWKMCsum", [tauTable.getColumn(name=name) for name in ewkDatasets])) print tauTable.format(cellFormat) # print eventCounter.getSubCounterTable("TauIDPassedJets::tauID_HPSTight").format() # table = eventCounter.getSubCounterTable("Trigger") # muonAnalysis.addSumColumn(table) # print table.format(cellFormat) mainTable.keepOnlyRows([ "All events", "Trigger and HLT_MET cut", "taus == 1", # "trigger scale factor", "electron veto", "muon veto", "MET", "njets", "btagging", "btagging scale factor", "JetsForEffs", "METForEffs", "BTagging", "DeltaPhi < 160", "DeltaPhi < 130" ]) tauTable.keepOnlyRows([ "AllTauCandidates", "DecayModeFinding", "TauJetPt", "TauJetEta", "TauLdgTrackExists", "TauLdgTrackPtCut", "TauECALFiducialCutsCracksAndGap", "TauAgainstElectronCut", "TauAgainstMuonCut", #"EMFractionCut", "HPS", "TauOneProngCut", "TauRtauCut", ]) #effFormat = counter.TableFormatText(counter.CellFormatText(valueFormat='%.4f')) effFormat = counter.TableFormatText( counter.CellFormatTeX(valueFormat='%.4f')) #effFormat = counter.TableFormatConTeXtTABLE(counter.CellFormatTeX(valueFormat='%.4f')) for name, table in [("Main", mainTable), ("Tau ID", tauTable)]: effTable = counter.CounterTable() col = table.getColumn(name="Data") effTable.appendColumn(col) effTable.appendColumn( counter.efficiencyColumn(col.getName() + " eff", col)) col = table.getColumn(name="EWKMCsum") effTable.appendColumn(col) effTable.appendColumn( counter.efficiencyColumn(col.getName() + " eff", col)) print "%s counter efficiencies" % name print effTable.format(effFormat) print "Trigger uncertainties" bins = [40, 50, 60, 80] tauPtPrototype = ROOT.TH1F("tauPtTrigger", "Tau pt", len(bins) - 1, array.array("d", bins)) runs = [ "(160431 <= run && run <= 167913)", "(170722 <= run && run <= 173198)", "(173236 <= run && run <= 173692)", #"(160431 <= run && run <= 173692)", ] for name, td in [("BTagging", tdCountBTagging), ("DeltaPhi160", tdCountDeltaPhi160), ("DeltaPhi130", tdCountDeltaPhi130), ("DeltaPhi90", tdCountDeltaPhi90)]: t = td.clone(varexp="tau_p4.Pt() >>tauPtTrigger") NallSum = 0 NSum = 0 absUncSquareSum = 0 for runRegion in runs: #neventsPlot = createPlot(dataset.treeDrawToNumEntries(t.clone(weight="weightTrigger"))) #uncertaintyPlot = createPlot(dataset.treeDrawToNumEntries(t.clone(weight="weightTriggerAbsUnc*weightTriggerAbsUnc/(weightTrigger*weightTrigger)"))) tmp = t.clone(selection=t.selection + "&&" + runRegion) nallPlot = createPlot(tmp.clone(weight="")) neventsPlot = createPlot(tmp.clone(weight="weightTrigger")) uncertaintyPlot = createPlot( tmp.clone(weight="weightTriggerAbsUnc")) th1all = nallPlot.histoMgr.getHisto("Data").getRootHisto() th1 = neventsPlot.histoMgr.getHisto("Data").getRootHisto() th12 = uncertaintyPlot.histoMgr.getHisto("Data").getRootHisto() Nall = th1all.Integral(0, th1all.GetNbinsX() + 1) N = th1.Integral(0, th1.GetNbinsX() + 1) #absSum2 = th12.Integral(0, th12.GetNbinsX()+1) #absUnc = math.sqrt(absSum2) #absUnc = th12.Integral(0, 2) NallSum += Nall NSum += N absUnc = tauEmbedding.squareSum(th12) absUncSquareSum += absUnc absUnc = math.sqrt(absUnc) relUnc = 0 if N > 0: relUnc = absUnc / N print "%-15s for runs %s Nall = %.2f, N = %.2f, absolute uncertainty %.2f, relative uncertainty %.4f" % ( name, runRegion, Nall, N, absUnc, relUnc) absUnc = math.sqrt(absUncSquareSum) relUnc = absUnc / NSum print "%-15s Nall = %.2f, N = %.2f, absolute uncertainty %.2f, relative uncertainty %.4f" % ( name, NallSum, NSum, absUnc, relUnc) print
def doCounters(datasets, mcLumi=None): createPlot = lambda name: createPlotCommon(name, datasets, mcLumi) eventCounter = counter.EventCounter(datasets, counters=countersWeighted) sels = [ # "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 20)", # "(20 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 80)", # "(80 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", "(sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))) < 120)", # "(120 < sqrt(2 * tau_p4.Pt() * met_p4.Et() * (1-cos(tau_p4.Phi()-met_p4.Phi()))))", ] tdCount = treeDraw.clone(weight=weightBTagging) tdCountMET = tdCount.clone(weight=weight, selection="&&".join(sels+[metCut])) tdCountBTagging = tdCount.clone(selection="&&".join(sels+[metCut, bTaggingCut])) tdCountDeltaPhi160 = tdCount.clone(selection="&&".join(sels+[metCut, bTaggingCut, deltaPhi160Cut])) tdCountDeltaPhi130 = tdCount.clone(selection="&&".join(sels+[metCut, bTaggingCut, deltaPhi130Cut])) tdCountDeltaPhi90 = tdCount.clone(selection="&&".join(sels+[metCut, bTaggingCut, deltaPhi90Cut])) eventCounter.getMainCounter().appendRow("JetsForEffs", tdCount.clone(weight=weight, selection="&&".join(sels))) eventCounter.getMainCounter().appendRow("METForEffs", tdCountMET) eventCounter.getMainCounter().appendRow("BTagging", tdCountBTagging) eventCounter.getMainCounter().appendRow("DeltaPhi < 160", tdCountDeltaPhi160) eventCounter.getMainCounter().appendRow("DeltaPhi < 130", tdCountDeltaPhi130) eventCounter.getMainCounter().appendRow("DeltaPhi < 90", tdCountDeltaPhi90) td1 = tdCount.clone(selection=metCut+"&&"+bTaggingCut+"&& (tecalometNoHF_p4.Pt() > 60)") td2 = tdCount.clone(selection=metCut+"&&"+bTaggingCut+"&& (tecalomet_p4.Pt() > 60)") td3 = dataset.TreeDrawCompound(td1, { "SingleMu_Mu_170722-172619_Aug05": td2, "SingleMu_Mu_172620-173198_Prompt": td2, "SingleMu_Mu_173236-173692_Prompt": td2, }) eventCounter.getMainCounter().appendRow("BTagging+CaloMetNoHF", td1) eventCounter.getMainCounter().appendRow("BTagging+CaloMet", td2) eventCounter.getMainCounter().appendRow("BTagging+CaloMet(NoHF)", td3) if mcLumi != None: eventCounter.normalizeMCToLuminosity(mcLumi) else: eventCounter.normalizeMCByLuminosity() tauEmbedding.scaleNormalization(eventCounter) ewkDatasets = [ "WJets", "TTJets", "DYJetsToLL", "SingleTop", "Diboson" ] table = eventCounter.getMainCounterTable() mainTable = table muonAnalysis.addSumColumn(table) mainTable.insertColumn(2, counter.sumColumn("EWKMCsum", [mainTable.getColumn(name=name) for name in ewkDatasets])) # table = eventCounter.getSubCounterTable("Trigger") # muonAnalysis.reorderCounterTable(table) muonAnalysis.addDataMcRatioColumn(table) if datasets.hasDataset("EWKSignal"): mainTable.insertColumn(7, counter.divideColumn("SignalFraction", mainTable.getColumn(name="TTToHplus_"+keepSignal), mainTable.getColumn(name="EWKSignal"))) datasets.printInfo() print "============================================================" print "Main counter (%s)" % eventCounter.getNormalizationString() cellFormat = counter.TableFormatText(counter.CellFormatTeX(valueFormat='%.3f')) print table.format(cellFormat) tauTable = eventCounter.getSubCounterTable("TauIDPassedEvt::tauID_HPSTight") #muonAnalysis.addSumColumn(tauTable) tauTable.insertColumn(2, counter.sumColumn("EWKMCsum", [tauTable.getColumn(name=name) for name in ewkDatasets])) print tauTable.format(cellFormat) # print eventCounter.getSubCounterTable("TauIDPassedJets::tauID_HPSTight").format() # table = eventCounter.getSubCounterTable("Trigger") # muonAnalysis.addSumColumn(table) # print table.format(cellFormat) mainTable.keepOnlyRows([ "All events", "Trigger and HLT_MET cut", "taus == 1", # "trigger scale factor", "electron veto", "muon veto", "MET", "njets", "btagging", "btagging scale factor", "JetsForEffs", "METForEffs", "BTagging", "DeltaPhi < 160", "DeltaPhi < 130" ]) tauTable.keepOnlyRows([ "AllTauCandidates", "DecayModeFinding", "TauJetPt", "TauJetEta", "TauLdgTrackExists", "TauLdgTrackPtCut", "TauECALFiducialCutsCracksAndGap", "TauAgainstElectronCut", "TauAgainstMuonCut", #"EMFractionCut", "HPS", "TauOneProngCut", "TauRtauCut", ]) #effFormat = counter.TableFormatText(counter.CellFormatText(valueFormat='%.4f')) effFormat = counter.TableFormatText(counter.CellFormatTeX(valueFormat='%.4f')) #effFormat = counter.TableFormatConTeXtTABLE(counter.CellFormatTeX(valueFormat='%.4f')) for name, table in [("Main", mainTable), ("Tau ID", tauTable)]: effTable = counter.CounterTable() col = table.getColumn(name="Data") effTable.appendColumn(col) effTable.appendColumn(counter.efficiencyColumn(col.getName()+" eff", col)) col = table.getColumn(name="EWKMCsum") effTable.appendColumn(col) effTable.appendColumn(counter.efficiencyColumn(col.getName()+" eff", col)) print "%s counter efficiencies" % name print effTable.format(effFormat) print "Trigger uncertainties" bins = [40, 50, 60, 80] tauPtPrototype = ROOT.TH1F("tauPtTrigger", "Tau pt", len(bins)-1, array.array("d", bins)) runs = [ "(160431 <= run && run <= 167913)", "(170722 <= run && run <= 173198)", "(173236 <= run && run <= 173692)", #"(160431 <= run && run <= 173692)", ] for name, td in [ ("BTagging", tdCountBTagging), ("DeltaPhi160", tdCountDeltaPhi160), ("DeltaPhi130", tdCountDeltaPhi130), ("DeltaPhi90", tdCountDeltaPhi90) ]: t = td.clone(varexp="tau_p4.Pt() >>tauPtTrigger") NallSum = 0 NSum = 0 absUncSquareSum = 0 for runRegion in runs: #neventsPlot = createPlot(dataset.treeDrawToNumEntries(t.clone(weight="weightTrigger"))) #uncertaintyPlot = createPlot(dataset.treeDrawToNumEntries(t.clone(weight="weightTriggerAbsUnc*weightTriggerAbsUnc/(weightTrigger*weightTrigger)"))) tmp = t.clone(selection=t.selection+"&&"+runRegion) nallPlot = createPlot(tmp.clone(weight="")) neventsPlot = createPlot(tmp.clone(weight="weightTrigger")) uncertaintyPlot = createPlot(tmp.clone(weight="weightTriggerAbsUnc")) th1all = nallPlot.histoMgr.getHisto("Data").getRootHisto() th1 = neventsPlot.histoMgr.getHisto("Data").getRootHisto() th12 = uncertaintyPlot.histoMgr.getHisto("Data").getRootHisto() Nall = th1all.Integral(0, th1all.GetNbinsX()+1) N = th1.Integral(0, th1.GetNbinsX()+1) #absSum2 = th12.Integral(0, th12.GetNbinsX()+1) #absUnc = math.sqrt(absSum2) #absUnc = th12.Integral(0, 2) NallSum += Nall NSum += N absUnc = squareSum(th12) absUncSquareSum += absUnc absUnc = math.sqrt(absUnc) relUnc = 0 if N > 0: relUnc = absUnc/N print "%-15s for runs %s Nall = %.2f, N = %.2f, absolute uncertainty %.2f, relative uncertainty %.4f" % (name, runRegion, Nall, N, absUnc, relUnc) absUnc = math.sqrt(absUncSquareSum) relUnc = absUnc/NSum print "%-15s Nall = %.2f, N = %.2f, absolute uncertainty %.2f, relative uncertainty %.4f" % (name, NallSum, NSum, absUnc, relUnc) print