def printMuonOriginHelper(datasets, selection): mother = histograms.HistoManager(datasets, selection+"/muon_genMother") mother.normalizeMCByCrossSection() grandMother = histograms.HistoManager(datasets, selection+"/muon_genGrandMother") grandMother.normalizeMCByCrossSection() for datasetName in datasets.getMCDatasetNames(): m = mother.getHisto(datasetName).getRootHisto() gm = grandMother.getHisto(datasetName).getRootHisto() allMus = m.Integral(0, m.GetNbinsX()+1) muFromW = m.GetBinContent(25) muFromZ = m.GetBinContent(24) muFromTau = m.GetBinContent(16) muFromTauW = gm.GetBinContent(25) muFromTauZ = gm.GetBinContent(24) otherMus = allMus - muFromW - muFromZ - muFromTauW - muFromTauZ print "Dataset %s" % datasetName # print " All mus %f" % allMus print " Mus from W %f" % (muFromW/allMus) print " Mus from Z %f" % (muFromZ/allMus) print " Mus from tau %f" % (muFromTau/allMus) print " Mus from W->tau %f" % (muFromTauW/allMus) print " Mus from Z->tau %f" % (muFromTauZ/allMus) print " Other mus %f" % (otherMus/allMus)
def addCurve(self, name, label): def getHisto(dataset): tmp = self.datasets.getDataset(dataset).getDatasetRootHisto(name) #tmp.normalizeToLuminosity(46) tmp.normalizeByCrossSection() return tmp.getHistogram() signalRootHisto = getHisto("Signal") signalEff = muonAnalysis.dist2eff(signalRootHisto) signalEffValues = dataset.histoToList(signalEff) tmpHistoManager = histograms.HistoManager(self.datasets, name) tmpHistoManager.normalizeMCByCrossSection() #tmpHistoManager.normalizeMCByLuminosity() mcSum = histograms.sumRootHistos([ muonAnalysis.getSumOrRootHisto(histo) for histo in filter( lambda h: h.isMC(), tmpHistoManager.getHistos()) ]) mcSum = muonAnalysis.dist2pass(mcSum) bkgHisto = tmpHistoManager.getHisto("Background") bkg = muonAnalysis.getSumOrRootHisto(bkgHisto).Clone(name + "_bkgfraction") bkg = muonAnalysis.dist2pass(bkg) bkg.Divide(mcSum) bkgValues = dataset.histoToList(bkg) curve = ROOT.TGraph(len(signalEffValues), array.array("d", signalEffValues), array.array("d", bkgValues)) self.histoMgr.appendHisto(histograms.HistoGraph(curve, label))
def main(): datasets = dataset.getDatasetsFromMulticrabCfg() datasets.loadLuminosities() #mc = "WJets" mc = "QCD" data = "2010" #data = "2011" # maxVtx = 15 maxVtx = 20 if data == "2010": datasets.remove( filter(lambda name: "Prompt" in name, datasets.getAllDatasetNames())) elif data == "2011": datasets.remove( filter(lambda name: "Dec22" in name, datasets.getAllDatasetNames())) plots.mergeRenameReorderForDataMC(datasets) style = tdrstyle.TDRStyle() if mc == "QCD": datasets.remove(["WJets"]) elif mc == "WJets": datasets.remove(["QCD"]) h = histograms.HistoManager(datasets, "signalAnalysis/verticesBeforeWeight") h.normalizeToOne() h.forEachMCHisto(styles.generator()) h.forHisto("Data", styles.getDataStyle()) h.setHistoDrawStyle("Data", "EP") h.setHistoLegendStyle("Data", "p") cf = histograms.CanvasFrame(h, "vertex_%s_%s" % (data, mc), xmax=maxVtx) cf.frame.GetXaxis().SetTitle("N(vtx)") cf.frame.GetYaxis().SetTitle("A.u.") legend = histograms.createLegend() h.addToLegend(legend) h.draw() legend.Draw() cf.canvas.SaveAs(".png") cf.canvas.SaveAs(".eps") cf.canvas.SaveAs(".C") # Weight dataHisto = h.getHisto("Data").getRootHisto() mcHisto = h.getHisto(mc).getRootHisto() # For normalization, see https://twiki.cern.ch/twiki/bin/view/CMS/PileupReweighting weightHisto = dataHisto.Clone("weights") weightHisto.Divide(mcHisto) print "Weight histo integral", weightHisto.Integral() #weightHisto.Scale(1/dataHisto.Integral()) #weightHisto.Scale(1/weightHisto.Integral()) print "Weight histo integral", weightHisto.Integral() print "Sum of [weight*prob]", sum([ weightHisto.GetBinContent(bin) * mcHisto.GetBinContent(bin) for bin in xrange(1, weightHisto.GetNbinsX()) ]) print "weights = cms.vdouble(%s)" % ", ".join([ "%.8f" % weightHisto.GetBinContent(bin) for bin in xrange(1, min(maxVtx, weightHisto.GetNbinsX()) + 1) ]) h = histograms.HistoManager(datasetRootHistos=[]) h.appendHisto(histograms.Histo(weightHisto, "Weight", "", "HIST")) h.forEachHisto(styles.generator()) cf = histograms.CanvasFrame(h, "vertex_weight_%s_%s" % (data, mc), xmax=maxVtx) cf.frame.GetXaxis().SetTitle("N(vtx)") cf.frame.GetYaxis().SetTitle("Weight") h.draw() cf.canvas.SaveAs(".png") cf.canvas.SaveAs(".eps") cf.canvas.SaveAs(".C")
# Example how to merge histograms of several datasets #datasets.merge("QCD", ["QCD_Pt30to50_TuneZ2_Winter10", # "QCD_Pt50to80_TuneZ2_Winter10", # "QCD_Pt80to120_TuneZ2_Winter10", # "QCD_Pt120to170_TuneZ2_Winter10", # "QCD_Pt170to300_TuneZ2_Winter10", # "QCD_Pt300to470_TuneZ2_Winter10"]) datasets.merge("QCD", ["QCD_Pt30to50_TuneZ2_Winter10", "QCD_Pt80to120_TuneZ2_Winter10", "QCD_Pt120to170_TuneZ2_Winter10", "QCD_Pt170to300_TuneZ2_Winter10", "QCD_Pt300to470_TuneZ2_Winter10"]) # Get set of histograms with the given path. The returned object is of # type HistoManager, which contains a histogram from each dataset in # DatasetManager. The histograms can be e.g. merged/stacked or normalized # in various ways before drawing. tauPts = histograms.HistoManager(datasets, "qcdMeasurementMethod2Part1/TauSelection_all_tau_candidates_pt") # The default normalization is no normalization (i.e. number of MC # events for MC, and number of events for data) # Normalize MC histograms to their cross section #tauPts.normalizeMCByCrossSection() #ylabel = "Cross section (pb)" # Normalize MC histograms to the luminosity of the collision data in # the HistoManager #tauPts.normalizeMCByLuminosity() #tauPts.normalizeMCByCrossSection() tauPts.normalizeToOne() #tauPts.normalizeMCToLuminosity(33.69) ylabel = "#tau cands / 1 GeV/c"