dataE.setSelectionString([ filterCutData, "reweightHEM>0", cutInterpreter.cutString(args.mode), selection ]) dataE.setWeightString("weight") dataF.setSelectionString([ filterCutData, "reweightHEM>0", cutInterpreter.cutString(args.mode), selection ]) dataF.setWeightString("weight") key = (dataB.name, "B", args.variable, "_".join(map(str, args.binning)), dataB.weightString, dataB.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHistB = dirDB.get(key).Clone("B") else: dataHistB = dataB.get1DHistoFromDraw(args.variable, binning=args.binning) dirDB.add(key, dataHistB.Clone("B"), overwrite=True) key = (dataC.name, "C", args.variable, "_".join(map(str, args.binning)), dataC.weightString, dataC.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHistC = dirDB.get(key).Clone("C") else: dataHistC = dataC.get1DHistoFromDraw(args.variable, binning=args.binning) dirDB.add(key, dataHistC.Clone("C"), overwrite=True) key = (dataD.name, "D", args.variable, "_".join(map(str, args.binning)), dataD.weightString, dataD.selectionString, selection) if dirDB.contains(key) and not args.overwrite:
if args.year == "combined" and not args.plotYear: years = ["2016","2017","2018"] elif args.year == "combined" and args.plotYear: years = [args.plotYear] else: years = [None] for i, year in enumerate(years): if year: addon = addons + [year] else: addon = addons # data histogram name = ["bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "data"] print "_".join(name+addon) data_tmp = dirDB.get( "_".join(name+addon) ) # bkg substracted total histogram (signal) with total error name = ["bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "signal" if args.bkgSubstracted else "total"] print "_".join(name+addon) signal_tmp = dirDB.get( "_".join(name+addon) ) # bkg substracted total histogram (signal) with stat error name = ["bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "signal_stat" if args.bkgSubstracted else "total_stat"] print "_".join(name+addon) stat_tmp = dirDB.get( "_".join(name+addon) ) # experimental uncertainty name = ["bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "experimental_Up"] print "_".join(name+addon) experimentalUp_tmp = dirDB.get( "_".join(name+addon) ) name = ["bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "experimental_Down"] print "_".join(name+addon) experimentalDown_tmp = dirDB.get( "_".join(name+addon) )
setup = Setup( year=2016, photonSelection=False, checkOnly=False, runOnLxPlus=False ) #photonselection always false for qcd estimate setup = setup.sysClone( parameters=allRegions[args.selection]["parameters"] ) selection = setup.selection( "MC", channel="all", **setup.defaultParameters() )["prefix"] selection = cutInterpreter.cutString( selection ) selection += "&&triggered==1" print selection if args.addCut: selection += "&&" + cutInterpreter.cutString( args.addCut ) print( "Using selection string: %s"%selection ) key = (data2018.name, "18", args.variable, "_".join(map(str,args.binning)), data2018.weightString, data2018.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist2018 = dirDB.get(key).Clone("edataAR18") else: dataHist2018 = data2018.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, dataHist2018.Clone("edataAR18"), overwrite=True) key = (data2017.name, "17", args.variable, "_".join(map(str,args.binning)), data2017.weightString, data2017.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist2017 = dirDB.get(key).Clone("edataAR17") else: dataHist2017 = data2017.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, dataHist2017.Clone("edataAR17"), overwrite=True) key = (data2016.name, "16", args.variable, "_".join(map(str,args.binning)), data2016.weightString, data2016.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist2016 = dirDB.get(key).Clone("mudataAR16") else:
estimators = EstimatorList( setup, processes=["QCD-DD"] ) estimate = getattr(estimators, "QCD-DD") estimate.initCache(setup.defaultCacheDir()) # Accounting for leptonPtCutVar = "LeptonTightInvIso0_pt" if args.mode == "e": leptonEtaCutVar = "abs(LeptonTightInvIso0_eta+LeptonTightInvIso0_deltaEtaSC)" else: leptonEtaCutVar = "abs(LeptonTightInvIso0_eta)" QCDTF_updates_2J = copy.deepcopy(QCDTF_updates) key = (data_sample.name, "AR", args.variable, "_".join(map(str,args.binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key).Clone("dataAR") else: dataHist = data_sample.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection, addOverFlowBin="upper" ) dirDB.add(key, dataHist.Clone("dataAR"), overwrite=True) dataHist_SB = dataHist.Clone("data_SB") dataHist_SB.Scale(0) genCat = [None] if args.photonCat: hists = {} genCat = ["noChgIsoNoSieiephotoncat0","noChgIsoNoSieiephotoncat2","noChgIsoNoSieiephotoncat1","noChgIsoNoSieiephotoncat3","noChgIsoNoSieiephotoncat4"] catSettings = { "noChgIsoNoSieiephotoncat0":{"texName":"Genuine #gamma", "color":color.gen }, "noChgIsoNoSieiephotoncat2":{"texName":"Misid. e", "color":color.misID}, "noChgIsoNoSieiephotoncat134":{"texName":"Hadronic #gamma/fake", "color":color.had }, } # "noChgIsoNoSieiephotoncat1":{"texName":"had #gamma", "color":color.had },
"binning": "_".join(map(str, reconstructionBinning)), "ttgSingleLepton": str(args.ttgSingleLep), "small": str(args.small) } resRes = "_".join(["_".join([k, v]) for k, v in dresRes.iteritems()]) dresData = { "name": "DataHistogram", "year": str(args.year), "mode": args.mode, "selection": args.recoSelection, "recoBin": "_".join(map(str, args.recoBinning)) } resData = "_".join(["_".join([k, v]) for k, v in dresData.iteritems()]) if dirDB.contains(resData) and not args.noData: dataHist = dirDB.get(resData) logger.info("Getting cached data histograms!") elif not args.noData: raise Exception("No input data histogram found!") if dirDB.contains(resMatrix) and dirDB.contains(resRes) and not args.overwrite: matrix = dirDB.get(resMatrix) resHisto = dirDB.get(resRes) logger.info("Re-using cached histograms!") else: if args.year == 2016: from TTGammaEFT.Samples.nanoTuples_Summer16_private_incl_postProcessed import TTGSemiLep, TTG elif args.year == 2017: from TTGammaEFT.Samples.nanoTuples_Fall17_private_incl_postProcessed import TTGSemiLep, TTG elif args.year == 2018:
QCDSF_val = QCD4pSF_val lumiString = "(35.92*(year==2016)+41.53*(year==2017)+59.74*(year==2018))" ws = "(%s*weight*reweightHEM*reweightTrigger*reweightL1Prefire*reweightPU*reweightLeptonTightSF*reweightLeptonTrackingTightSF*reweightPhotonSF*reweightPhotonElectronVetoSF*reweightBTag_SF)" % lumiString ws16 = "+(%s*(PhotonNoChgIsoNoSieie0_photonCatMagic==2)*(%f-1)*(year==2016))" % ( ws, misIDSF_val[2016].val) ws17 = "+(%s*(PhotonNoChgIsoNoSieie0_photonCatMagic==2)*(%f-1)*(year==2017))" % ( ws, misIDSF_val[2017].val) ws18 = "+(%s*(PhotonNoChgIsoNoSieie0_photonCatMagic==2)*(%f-1)*(year==2018))" % ( ws, misIDSF_val[2018].val) weightStringAR = ws + ws16 + ws17 + ws18 key = (data_sample.name, "AR", args.variable, "_".join(map(str, args.binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key).Clone("data") else: dataHist = data_sample.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection) dirDB.add(key, dataHist.Clone("data"), overwrite=True) for s in mc: s.setWeightString(weightStringAR + "*" + sampleWeight) key = (s.name, "AR", args.variable, "_".join(map(str, args.binning)), s.weightString, s.selectionString, selection) if dirDB.contains(key) and not args.overwrite: s.hist = copy.deepcopy(dirDB.get(key).Clone(s.name)) else: s.hist = s.get1DHistoFromDraw(args.variable, binning=args.binning,
res = {} res[68] = {} res[95] = {} for i, (pdf, val) in enumerate(hepSample.root_samples_dict.iteritems()): pdf = pdf.split("_")[0] pdfVal = float(pdf.split("-")[1]) print pdfVal if pdf.endswith("35"): continue if pdf.endswith("42"): continue if pdf.endswith("45"): continue sConfig = "_".join([args.sample, args.selection, pdf]) if not limitCache.contains(sConfig): continue tmpRes = limitCache.get(sConfig) res[68][pdfVal] = tmpRes[68] res[95][pdfVal] = tmpRes[95] print res[68] print res[95] def toGraph(name, title, data): result = ROOT.TGraph(len(data)) result.SetName(name) result.SetTitle(title) data = data.items() data.sort(key=lambda (x, val): -x) for j, (x, val) in enumerate(data): result.SetPoint(j, x, val)
estimate = getattr(estimators, "QCD-DD") estimate.initCache(setup.defaultCacheDir()) # Accounting for leptonPtCutVar = "LeptonTightInvIso0_pt" if args.mode == "e": leptonEtaCutVar = "abs(LeptonTightInvIso0_eta+LeptonTightInvIso0_deltaEtaSC)" else: leptonEtaCutVar = "abs(LeptonTightInvIso0_eta)" QCDTF_updates_2J = copy.deepcopy(QCDTF_updates) key = (data_sample.name, "AR", args.variable, "_".join(map(str, args.binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key.Clone("dataAR")) else: dataHist = data_sample.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection) dirDB.add(key, dataHist.Clone("dataAR"), overwrite=True) dataHist_SB = dataHist.Clone("data_SB") dataHist_SB.Scale(0) qcdHist = dataHist.Clone("qcd") qcdHist.Scale(0) for s in mc: s.setWeightString(weightStringAR + "*" + sampleWeight) key = (s.name, "AR", args.variable, "_".join(map(str, args.binning)),
else: years = [year] for h in hists: for y in years: if year == "combined": data_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_data_%i" % ( h, h.replace("3p", "3"), h.replace("3p", "4p"), y) signal_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_signal_%i" % ( h, h.replace("3p", "3"), h.replace("3p", "4p"), y) signal_stat_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_signal_stat_%i" % ( h, h.replace("3p", "3"), h.replace("3p", "4p"), y) else: data_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_data" % ( h, h.replace("3p", "3"), h.replace("3p", "4p")) signal_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_signal" % ( h, h.replace("3p", "3"), h.replace("3p", "4p")) signal_stat_key = "bkgSubtracted_%s_addDYSF_addPtBinnedUnc_%s_%s_VG3_VG4p_misDY3_misDY4p_addDYSF_addPtBinnedUnc_signal_stat" % ( h, h.replace("3p", "3"), h.replace("3p", "4p")) data = dirDB.get(data_key) signal = dirDB.get(signal_key) signal_stat = dirDB.get(signal_stat_key) print cache_dir print h, y print data_key print signal_key print signal_stat_key print data, signal, signal_stat
setup = Setup( year=2016, photonSelection=False, checkOnly=False, runOnLxPlus=False ) #photonselection always false for qcd estimate setup = setup.sysClone( parameters=allRegions[args.selection]["parameters"] ) selection = setup.selection( "MC", channel="all", **setup.defaultParameters() )["prefix"] selection = cutInterpreter.cutString( selection ) selection += "&&pTStitching==1&&triggered==1" print selection if args.addCut: selection += "&&" + cutInterpreter.cutString( args.addCut ) print( "Using selection string: %s"%selection ) key = (mc18.name, "18", args.variable, "_".join(map(str,args.binning)), mc18.weightString, mc18.selectionString, selection) if dirDB.contains(key) and not args.overwrite: mcHist18 = dirDB.get(key).Clone("18") else: mcHist18 = mc18.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, mcHist18.Clone("18"), overwrite=True) key = (mc17.name, "17", args.variable, "_".join(map(str,args.binning)), mc17.weightString, mc17.selectionString, selection) if dirDB.contains(key) and not args.overwrite: mcHist17 = dirDB.get(key).Clone("17") else: mcHist17 = mc17.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, mcHist17.Clone("17"), overwrite=True) key = (mc16.name, "16", args.variable, "_".join(map(str,args.binning)), mc16.weightString, mc16.selectionString, selection) if dirDB.contains(key) and not args.overwrite: mcHist16 = dirDB.get(key).Clone("16") else:
"reweightPhotonElectronVetoSF/F", "reweightBTag_SF/F", "Flag_goodVertices/I", "Flag_globalSuperTightHalo2016Filter/I", "Flag_HBHENoiseFilter/I", "Flag_HBHENoiseIsoFilter/I", "Flag_EcalDeadCellTriggerPrimitiveFilter/I", "Flag_BadPFMuonFilter/I", "PV_ndof/F", "PV_x/F", "PV_y/F", "PV_z/F" ] extra_read_variables = { "2016": [], "2017": ["Flag_ecalBadCalibFilter/I", "Flag_ecalBadCalibFilterV2/I"], "2018": ["Flag_ecalBadCalibFilter/I", "Flag_ecalBadCalibFilterV2/I"], } loop_key = (cfg_key, "result") if dirDB.contains(loop_key) and not args.overwrite: matrix, fiducial_spectrum, reco_spectrum = dirDB.get(loop_key) else: # Define stuff fiducial_spectrum = ROOT.TH1D("fiducial_spectrum", "fiducial_spectrum", len(fiducial_thresholds) - 1, array.array('d', fiducial_thresholds)) fiducial_spectrum.GetXaxis().SetTitle(tex_gen) reco_spectrum = ROOT.TH1D("reco_spectrum", "reco_spectrum", len(reco_thresholds_years) - 1, array.array('d', reco_thresholds_years)) reco_spectrum.GetXaxis().SetTitle(tex_gen) matrix = ROOT.TH2D("unfolding_matrix", "unfolding_matrix", len(reco_thresholds_years) - 1,
setup = Setup( year=args.year, photonSelection=False, checkOnly=False, runOnLxPlus=False ) #photonselection always false for qcd estimate setup = setup.sysClone( parameters=allRegions[args.selection]["parameters"] ) selection = setup.selection( "MC", channel="all", **setup.defaultParameters() )["prefix"] selection = cutInterpreter.cutString( selection ) selection += "&&triggered==1" print selection if args.addCut: selection += "&&" + cutInterpreter.cutString( args.addCut ) print( "Using selection string: %s"%selection ) key = (data_sample_e.name, "AR", args.variable, "_".join(map(str,args.binning)), data_sample_e.weightString, data_sample_e.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist_e = dirDB.get(key).Clone("edataAR") else: dataHist_e = data_sample_e.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, dataHist_e.Clone("edataAR"), overwrite=True) key = (data_sample_mu.name, "AR", args.variable, "_".join(map(str,args.binning)), data_sample_mu.weightString, data_sample_mu.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist_mu = dirDB.get(key).Clone("mudataAR") else: dataHist_mu = data_sample_mu.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection ) dirDB.add(key, dataHist_mu.Clone("mudataAR"), overwrite=True) dataHist_mu.style = styles.errorStyle( ROOT.kBlack ) dataHist_mu.legendText = "data (#mu)" dataHist_e.style = styles.errorStyle( ROOT.kRed )
cutInterpreter.cutString(low + "-" + args.mode), copy.deepcopy(dataselection) ]) data_low.setWeightString("weight") data_high.setSelectionString([ filterCutData, "reweightHEM>0", cutInterpreter.cutString(high + "-" + args.mode), copy.deepcopy(dataselection) ]) data_high.setWeightString("weight") key = (mc_high.name, "high", args.variable, "_".join(map(str, bins)), mc_high.weightString, mc_high.selectionString) if dirDB.contains(key) and not args.overwrite: mcHist_high = dirDB.get(key).Clone("high") else: mcHist_high = mc_high.get1DHistoFromDraw(args.variable, binning=bins, binningIsExplicit=True, addOverFlowBin="upper") dirDB.add(key, mcHist_high.Clone("high"), overwrite=True) key = (mc_low.name, "low", args.variable, "_".join(map(str, bins)), mc_low.weightString, mc_low.selectionString) if dirDB.contains(key) and not args.overwrite: mcHist_low = dirDB.get(key).Clone("low") else: mcHist_low = mc_low.get1DHistoFromDraw(args.variable, binning=bins, binningIsExplicit=True,
selection += "&&triggered==1" if args.addCut: selection += "&&" + cutInterpreter.cutString(args.addCut) print("Using selection string: %s" % selection) data_sample_4p.setSelectionString([ filterCutData, "reweightHEM>0", cutInterpreter.cutString(args.mode), selection ]) data_sample_4p.setWeightString("weight") key = (data_sample_4p.name, "4p", args.variable, "_".join(map(str, args.binning)), data_sample_4p.weightString, data_sample_4p.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist_4p = dirDB.get(key).Clone("4p") else: dataHist_4p = data_sample_4p.get1DHistoFromDraw(args.variable, binning=args.binning) dirDB.add(key, dataHist_4p.Clone("4p"), overwrite=True) key = (data_sample_3.name, "3", args.variable, "_".join(map(str, args.binning)), data_sample_3.weightString, data_sample_3.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist_3 = dirDB.get(key).Clone("3") else: dataHist_3 = data_sample_3.get1DHistoFromDraw(args.variable, binning=args.binning) dirDB.add(key, dataHist_3.Clone("3"), overwrite=True)
"cos(JetGood0_phi-MET_phi)": "cos(JetGoodInvLepIso0_phi-MET_phi)", "cos(JetGood0_phi-JetGood1_phi)": "cos(JetGoodInvLepIso0_phi-JetGoodInvLepIso1_phi)", } invVariable = replaceVariable[ args.variable] if args.variable in replaceVariable.keys( ) else args.variable # histos key = (data_sample.name, "Binv", args.variable, "_".join(map(str, args.binning)), data_sample.weightString, data_sample.selectionString, preSelection) if dirDB.contains(key) and not args.overwrite: dataHist_SB = dirDB.get(key) else: dataHist_SB = data_sample.get1DHistoFromDraw(invVariable, binning=args.binning, selectionString=preSelection, addOverFlowBin="upper") dirDB.add(key, dataHist_SB, overwrite=True) key = (data_sample.name, "AR", args.variable, "_".join(map(str, args.binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key) else: dataHist = data_sample.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection,
class DataObservation(): def __init__(self, name, process, cacheDir=None): self.name = name self.process = process self.initCache(cacheDir) def initCache(self, cacheDir="dataObs"): if cacheDir: self.cacheDir = os.path.join(cache_directory, cacheDir) try: os.makedirs(cacheDir) except: pass cacheDirName = os.path.join(cacheDir, self.name) self.cache = MergingDirDB(cacheDirName) if not self.cache: raise else: self.cache = None def uniqueKey(self, region, channel, setup): ## this is used in MCBasedEstimate if hasattr(setup, "blinding"): return str(region), channel, json.dumps( setup.sys, sort_keys=True), json.dumps( setup.parameters, sort_keys=True), json.dumps(setup.lumi, sort_keys=True), setup.blinding else: return str(region), channel, json.dumps( setup.sys, sort_keys=True), json.dumps( setup.parameters, sort_keys=True), json.dumps(setup.lumi, sort_keys=True) # alias for cachedObservation to make it easier to call the same function as for the mc"s def cachedEstimate(self, region, channel, setup, signalAddon=None, save=True, overwrite=False, checkOnly=False): return self.cachedObservation(region, channel, setup, overwrite=overwrite, checkOnly=checkOnly) def cachedObservation(self, region, channel, setup, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey(region, channel, setup) if (self.cache and self.cache.contains(key)) and not overwrite: res = self.cache.get(key) logger.debug("Loading cached %s result for %r : %r" % (self.name, key, res)) elif self.cache and not checkOnly: res = self.observation(region, channel, setup, overwrite) _res = self.cache.add(key, res, overwrite=True) logger.debug("Adding cached %s result for %r" % (self.name, key)) elif not checkOnly: res = self.observation(region, channel, setup, overwrite) else: res = u_float(-1, 0) return res if res >= 0 or checkOnly else u_float(0, 0) def writeToCache(self, region, channel, setup, value, signalAddon=None, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey(region, channel, setup) if (self.cache and self.cache.contains(key)) and not overwrite: res = self.cache.get(key) if res.val != value.val: print "Warning, caches estimate not equal to input value: have %s, got %s" % ( res, value) logger.debug("Loading cached %s result for %r : %r" % (self.name, key, res)) elif self.cache and not checkOnly: _res = self.cache.add(key, value, overwrite=True) res = value logger.debug("Adding cached %s result for %r" % (self.name, key)) else: res = u_float(-1, 0) return res if res >= 0 or checkOnly else u_float(0, 0) def observation(self, region, channel, setup, overwrite): if setup.nJet == "3p": setup4p = setup.sysClone(parameters={"nJet": (4, -1)}) setup3 = setup.sysClone(parameters={"nJet": (3, 3)}) return sum([ self.cachedEstimate(region, channel, s, overwrite=overwrite) for s in [setup3, setup4p] ]) if channel == "all": return sum([ self.cachedEstimate(region, c, setup, overwrite=overwrite) for c in lepChannels ]) elif channel == "SFtight": return sum([ self.cachedEstimate(region, c, setup, overwrite=overwrite) for c in dilepChannels ]) else: preSelection = setup.preselection("Data", channel=channel) # cut = "&&".join([region.cutString(setup.sys['selectionModifier']), preSelection['cut']]) cut = "&&".join([region.cutString(), preSelection['cut']]) logger.debug("Using cut %s" % cut) weight = preSelection['weightStr'] if hasattr(setup, "blinding") and setup.blinding: weight += "*" + setup.blinding return u_float(**self.process.getYieldFromDraw( selectionString=cut, weightString=weight))
"reweightPhotonElectronVetoSF/F", "reweightBTag_SF/F", "Flag_goodVertices/I", "Flag_globalSuperTightHalo2016Filter/I", "Flag_HBHENoiseFilter/I", "Flag_HBHENoiseIsoFilter/I", "Flag_EcalDeadCellTriggerPrimitiveFilter/I", "Flag_BadPFMuonFilter/I", "PV_ndof/F", "PV_x/F", "PV_y/F", "PV_z/F" ] extra_read_variables = { "2016": [], "2017": ["Flag_ecalBadCalibFilter/I", "Flag_ecalBadCalibFilterV2/I"], "2018": ["Flag_ecalBadCalibFilter/I", "Flag_ecalBadCalibFilterV2/I"], } loop_key = (cfg_key, "result") if dirDB.contains(loop_key) and not args.overwrite: matrix, fiducial_spectrum, reco_spectrum, yield_fid, yield_fid_reco, yield_reco = dirDB.get( loop_key) else: # Define stuff fiducial_spectrum = ROOT.TH1D( "fiducial_spectrum", "fiducial_spectrum", len(settings.fiducial_thresholds) - 1, array.array('d', settings.fiducial_thresholds)) fiducial_spectrum.GetXaxis().SetTitle(settings.tex_gen) reco_spectrum = ROOT.TH1D("reco_spectrum", "reco_spectrum", len(settings.reco_thresholds_years) - 1, array.array('d', settings.reco_thresholds_years)) reco_spectrum.GetXaxis().SetTitle(settings.tex_gen) matrix = ROOT.TH2D("unfolding_matrix", "unfolding_matrix",
reg += "4p" if "4p" in args.selection else "3" setup = setup.sysClone(parameters=allRegions[reg]["parameters"]) estimators = EstimatorList(setup, processes=["QCD-DD"]) estimate = getattr(estimators, "QCD-DD") estimate.initCache(setup.defaultCacheDir()) # Accounting for leptonPtCutVar = "LeptonTightInvIsoNoSieie0_pt" leptonEtaCutVar = "abs(LeptonTightInvIsoNoSieie0_eta+LeptonTightInvIsoNoSieie0_deltaEtaSC)" QCDTF_updates_2J = copy.deepcopy(QCDTF_updates) key = (data_sample.name, "AR", args.variable, "_".join(map(str, args.binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key).Clone("data") else: dataHist = data_sample.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection) dirDB.add(key, dataHist.Clone("data"), overwrite=True) dataHist_SB = dataHist.Clone("data_SB") dataHist_SB.Scale(0) genCat = [None] if args.photonCat: hists = {} genCat = [ "noChgIsoNoSieiephotoncat0", "noChgIsoNoSieiephotoncat1", "noChgIsoNoSieiephotoncat2", "noChgIsoNoSieiephotoncat3",
else: addSel = args.addCut if addSel: addSel = cutInterpreter.cutString( addSel ) print addSel for iso, invIso in replaceSelection.iteritems(): addSel = addSel.replace(iso,invIso) print preSelection print addSel preSelection += "&&" + addSel else: raise Exception("Region not implemented") key = (data_sample.name, "mTinv", "_".join(map(str,binning)), data_sample.weightString, data_sample.selectionString, preSelection) if dirDB.contains(key) and not args.overwrite: dataHist_SB = dirDB.get(key).Clone("mTinv") else: dataHist_SB = data_sample.get1DHistoFromDraw( mTinv, binning=binning, selectionString=preSelection, addOverFlowBin="upper" ) dirDB.add(key, dataHist_SB.Clone("mTinv"), overwrite=True) key = (data_sample.name, "mT", "_".join(map(str,binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key).Clone("mT") else: dataHist = data_sample.get1DHistoFromDraw( "mT", binning=binning, selectionString=selection, addOverFlowBin="upper" ) dirDB.add(key, dataHist.Clone("mT"), overwrite=True) qcdHist = dataHist_SB.Clone("QCD") dataHist_SB.style = styles.errorStyle( ROOT.kBlack ) dataHist_SB.legendText = "Observed (%s)"%args.mode.replace("mu","#mu")
class SystematicEstimator: __metaclass__ = abc.ABCMeta def __init__(self, name, cacheDir=None): logger.info("Initializing Systematic Estimator for %s"%name) self.name = name self.initCache(cacheDir) self.processCut = None if "_gen" in name: self.processCut = "cat0" #"photoncat0" elif "_misID" in name: self.processCut = "cat2" #"photoncat2" elif "_had" in name: self.processCut = "cat134" #"photoncat134" elif "_prompt" in name: self.processCut = "cat02" #"photoncat02" elif "_np" in name: self.processCut = "cat134" #"photoncat134" elif "_hp" in name: self.processCut = "cat1" #"photoncat1" elif "_fake" in name: self.processCut = "cat3" #"photoncat3" elif "_PU" in name: self.processCut = "cat4" #"photoncat4" def initCache(self, cacheDir="systematics"): logger.info("Initializing cache for %s in directory %s"%(self.name, cacheDir)) if cacheDir: self.cacheDir = os.path.join(cache_directory, cacheDir) try: os.makedirs(cacheDir) except: pass cacheDirName = os.path.join(cacheDir, self.name) self.cache = MergingDirDB(cacheDirName) if not self.cache: raise Exeption("Cache not initiated!") if self.name.count("DD"): helperCacheDirName = os.path.join(cacheDir, self.name+"_helper") self.helperCache = MergingDirDB(helperCacheDirName) if not self.helperCache: raise histoHelperCacheDirName = os.path.join(cacheDir, self.name+"_histo") self.histoHelperCache = MergingDirDB(histoHelperCacheDirName) if not self.histoHelperCache: raise tfCacheDirName = os.path.join(cacheDir, self.name+"_tf") self.tfCache = MergingDirDB(tfCacheDirName) if not self.tfCache: raise elif self.name.count("had"): helperCacheDirName = os.path.join(cacheDir, "had_helper") self.helperCache = MergingDirDB(helperCacheDirName) if not self.helperCache: raise else: self.helperCache=None self.tfCache=None else: self.cache=None self.helperCache=None self.tfCache=None # For the datadriven subclasses which often need the same getYieldFromDraw we write those yields to a cache def yieldFromCache(self, setup, process, c, selectionString, weightString, overwrite=False): s = (process, c, selectionString, weightString) if self.helperCache and self.helperCache.contains(s) and not overwrite: return self.helperCache.get(s) else: yieldFromDraw = u_float(**setup.processes[process].getYieldFromDraw(selectionString, weightString)) if self.helperCache: self.helperCache.add(s, yieldFromDraw, overwrite=True) return yieldFromDraw # For the datadriven subclasses which often need the same mT histos we write those yields to a cache def histoFromCache(self, var, binning, setup, process, c, selectionString, weightString, overwrite=False): s = (var, "_".join(map(str,binning)), process, c, selectionString, weightString) if self.histoHelperCache and self.histoHelperCache.contains(s) and not overwrite: return self.histoHelperCache.get(s).Clone(process+c+var) else: histo = setup.processes[process].get1DHistoFromDraw( var, binning=binning, selectionString=selectionString, weightString=weightString, addOverFlowBin="upper" ) if self.histoHelperCache: self.histoHelperCache.add(s, histo.Clone(process+c+var), overwrite=True) return histo.Clone(process+c+var) def uniqueKey(self, region, channel, setup, signalAddon=None, qcdUpdates={}): sysForKey = setup.sys.copy() sysForKey["reweight"] = "TEMP" reweightKey = '["' + '", "'.join(sorted([i for i in setup.sys['reweight']])) + '"]' # little hack to preserve order of list when being dumped into json key = region, channel, json.dumps(sysForKey, sort_keys=True).replace('"TEMP"',reweightKey), json.dumps(setup.parameters, sort_keys=True), json.dumps(setup.lumi, sort_keys=True) if qcdUpdates: key += tuple(json.dumps(qcdUpdates, sort_keys=True)) if signalAddon: key += tuple(signalAddon) return key def replace(self, i, r): try: if i.count("reweight"): return i.replace(r[0], r[1]) else: return i except: return i def cachedEstimate(self, region, channel, setup, signalAddon=None, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey(region, channel, setup, signalAddon=signalAddon) if (self.cache and self.cache.contains(key)) and not overwrite: res = self.cache.get(key) logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) ) elif self.cache and not checkOnly: logger.debug( "Calculating %s result for %r"%(self.name, key) ) res = self._estimate( region, channel, setup, signalAddon=signalAddon, overwrite=overwrite ) _res = self.cache.add( key, res, overwrite=True ) logger.debug( "Adding cached %s result for %r : %r" %(self.name, key, res) ) elif not checkOnly: res = self._estimate( region, channel, setup, signalAddon=signalAddon, overwrite=overwrite) else: res = u_float(-1,0) return res if res >= 0 or checkOnly else u_float(0,0) def writeToCache(self, region, channel, setup, value, signalAddon=None, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey(region, channel, setup, signalAddon=signalAddon) if (self.cache and self.cache.contains(key)) and not overwrite: res = self.cache.get(key) # if res.val != value.val: print "Warning, caches estimate not equal to input value: have %s, got %s"%(res, value) # logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) ) elif self.cache and not checkOnly: _res = self.cache.add( key, value, overwrite=True ) res = value logger.debug( "Adding cached %s result for %r : %r" %(self.name, key, res) ) else: res = u_float(-1,0) return res if res >= 0 or checkOnly else u_float(0,0) def cachedTransferFactor(self, channel, setup, qcdUpdates=None, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey("region", channel, setup, qcdUpdates=qcdUpdates) if (self.tfCache and self.tfCache.contains(key)) and not overwrite: res = self.tfCache.get(key) logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) ) elif self.tfCache and not checkOnly: logger.debug( "Calculating %s result for %r"%(self.name, key) ) # res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) res = self._fittedTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) _res = self.tfCache.add( key, res, overwrite=True ) logger.debug( "Adding cached transfer factor for %r : %r" %(key, res) ) elif not checkOnly: # res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) res = self._fittedTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) else: res = u_float(-1,0) return res if res > 0 or checkOnly else u_float(0,0) def cachedQCDMCTransferFactor(self, channel, setup, qcdUpdates=None, save=True, overwrite=False, checkOnly=False): key = self.uniqueKey("regionQCDMC", channel, setup, qcdUpdates=qcdUpdates) if (self.tfCache and self.tfCache.contains(key)) and not overwrite: res = self.tfCache.get(key) logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) ) elif self.tfCache and not checkOnly: logger.debug( "Calculating %s result for %r"%(self.name, key) ) # res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) res = self._transferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) _res = self.tfCache.add( key, res, overwrite=True ) logger.debug( "Adding cached transfer factor for %r : %r" %(key, res) ) elif not checkOnly: # res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) res = self._transferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite ) else: res = u_float(-1,0) return res if res > 0 or checkOnly else u_float(0,0) def cachedFakeFactor(self, region, channel, setup, overwrite=False, checkOnly=False): key = self.uniqueKey(region, channel, setup) if (self.helperCache and self.helperCache.contains(key)) and not overwrite: res = self.helperCache.get(key) logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) ) elif self.helperCache and not checkOnly: logger.debug( "Calculating %s result for %r"%(self.name, key) ) res = self._dataDrivenFakeCorrectionFactor( region, channel, setup, overwrite=overwrite ) _res = self.helperCache.add( key, res, overwrite=True ) logger.debug( "Adding cached transfer factor for %r : %r" %(key, res) ) elif not checkOnly: res = self._dataDrivenFakeCorrectionFactor( region, channel, setup, overwrite=overwrite ) else: res = u_float(-1,0) return res if res > 0 or checkOnly else u_float(0,0) @abc.abstractmethod def _estimate(self, region, channel, setup, signalAddon=None, overwrite=False): """Estimate yield in "region" using setup""" return def _transferFactor(self, channel, setup, overwrite=False): """Estimate transfer factor for QCD in "region" using setup""" return def _dataDrivenTransferFactor(self, channel, setup, qcdUpdates=None, overwrite=False): """Estimate transfer factor for QCD in "region" using setup""" return def _fittedTransferFactor(self, channel, setup, qcdUpdates=None, overwrite=False): """Estimate transfer factor for QCD in "region" using setup""" return def _dataDrivenFakeCorrectionFactor(self, region, channel, setup, overwrite=False): """Estimate fake factor for hadronic fakes in "region" using setup""" return def TransferFactorStatistic(self, region, channel, setup): ref = self.cachedTransferFactor(channel, setup) up = u_float(ref.val + ref.sigma) down = u_float(ref.val - ref.sigma) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def TuneSystematic(self, region, channel, setup): up = self.cachedEstimate(region, channel, setup, signalAddon="TuneUp") down = self.cachedEstimate(region, channel, setup, signalAddon="TuneDown") ref = self.cachedEstimate(region, channel, setup) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def topPtSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightTopPt"]})) return abs((up-ref)/ref) if ref > 0 else up def ErdOnSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup, signalAddon="erdOn") return abs((up-ref)/ref) if ref > 0 else up def QCDbasedSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup, signalAddon="QCDbased") return abs((up-ref)/ref) if ref > 0 else up def GluonMoveSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup, signalAddon="GluonMove") return abs((up-ref)/ref) if ref > 0 else up def PUSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPUUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPUDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def EERSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"eResUp"})) down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"eResDown"})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def EESSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"eScaleUp"})) down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"eScaleDown"})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) # def MERSystematic(self, region, channel, setup): # ref = self.cachedEstimate(region, channel, setup) # up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"muTotalUp"})) # down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"muTotalDown"})) # return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def JERSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"jerUp"})) down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"jerDown"})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def JECSystematic(self, region, channel, setup, jes="Total"): ref = self.cachedEstimate(region, channel, setup) if ref == 0: return u_float(0,0) up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"jes%sUp"%jes})) down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"jes%sDown"%jes})) unc = abs(0.5*(up-down)/ref) if unc.val == 0: uncUp = abs((ref-up)/ref) uncDown = abs((ref-down)/ref) unc = uncUp if uncUp.val >= uncDown.val else uncDown if unc.val == 0: return u_float(0,0) return unc def unclusteredSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"unclustEnUp"})) down = self.cachedEstimate(region, channel, setup.sysClone({"selectionModifier":"unclustEnDown"})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def L1PrefireSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightL1PrefireUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightL1PrefireDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def btaggingSFbSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightBTag_SF_b_Up"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightBTag_SF_b_Down"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def btaggingSFlSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightBTag_SF_l_Up"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightBTag_SF_l_Down"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def leptonSFSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def leptonSFStatSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFStatUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFStatDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def leptonSFSystSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFSystUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFSystDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def leptonTrackingSFSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTrackingTightSFUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightLeptonTrackingTightSFDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def photonSFSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonSFUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonSFDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def photonSFAltSigSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonSFAltSigUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonSFAltSigDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def photonElectronVetoSFSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonElectronVetoSFUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightPhotonElectronVetoSFDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def triggerSystematic(self, region, channel, setup): ref = self.cachedEstimate(region, channel, setup) up = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightTriggerUp"]})) down = self.cachedEstimate(region, channel, setup.sysClone({"reweight":["reweightTriggerDown"]})) return abs(0.5*(up-down)/ref) if ref > 0 else u_float(0,0) def getBkgSysJobs(self, region, channel, setup): l = [ (region, channel, setup.sysClone({"reweight":["reweightTopPt"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPUUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPUDown"]}), None), (region, channel, setup.sysClone({"selectionModifier":"eScaleUp"}), None), (region, channel, setup.sysClone({"selectionModifier":"eScaleDown"}), None), (region, channel, setup.sysClone({"selectionModifier":"eResUp"}), None), (region, channel, setup.sysClone({"selectionModifier":"eResDown"}), None), # (region, channel, setup.sysClone({"selectionModifier":"muTotalUp"}), None), # (region, channel, setup.sysClone({"selectionModifier":"muTotalDown"}), None), (region, channel, setup.sysClone({"selectionModifier":"jerUp"}), None), (region, channel, setup.sysClone({"selectionModifier":"jerDown"}), None), # (region, channel, setup.sysClone({"selectionModifier":"jesTotalUp"}), None), # (region, channel, setup.sysClone({"selectionModifier":"jesTotalDown"}), None), # (region, channel, setup.sysClone({"selectionModifier":"unclustEnUp"}), None), # (region, channel, setup.sysClone({"selectionModifier":"unclustEnDown"}), None), (region, channel, setup.sysClone({"reweight":["reweightL1PrefireUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightL1PrefireDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightBTag_SF_b_Up"]}), None), (region, channel, setup.sysClone({"reweight":["reweightBTag_SF_b_Down"]}), None), (region, channel, setup.sysClone({"reweight":["reweightBTag_SF_l_Up"]}), None), (region, channel, setup.sysClone({"reweight":["reweightBTag_SF_l_Down"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTrackingTightSFUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTrackingTightSFDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonSFUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonSFDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonSFAltSigUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonSFAltSigDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonElectronVetoSFUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightPhotonElectronVetoSFDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightTriggerUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightTriggerDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFStatUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFStatDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFSystUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFSystDown"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFUp"]}), None), (region, channel, setup.sysClone({"reweight":["reweightLeptonTightSFDown"]}), None), ] # JEC Tags, (standard is "Total") jesTags = ['FlavorQCD', 'RelativeBal', 'HF', 'BBEC1', 'EC2', 'Absolute', 'Absolute_%i'%setup.year, 'HF_%i'%setup.year, 'EC2_%i'%setup.year, 'RelativeSample_%i'%setup.year, 'BBEC1_%i'%setup.year] for jes in jesTags: l += [ (region, channel, setup.sysClone({"selectionModifier":"jes%sUp"%jes}), None), (region, channel, setup.sysClone({"selectionModifier":"jes%sDown"%jes}), None), ] return l def getSigSysJobs(self, region, channel, setup): # in case there is a difference, enter it here (originally for fastSim) l = self.getBkgSysJobs(region = region, channel = channel, setup = setup) l += [ (region, channel, setup, "TuneUp"), (region, channel, setup, "TuneDown"), (region, channel, setup, "erdOn"), (region, channel, setup, "QCDbased"), (region, channel, setup, "GluonMove"), ] return l def getTexName(self, channel, rootTex=True): try: name = self.texName except: try: name = self.process[channel].texName except: try: texNames = [self.process[c].texName for c in allChannels] # If all, only take texName if it is the same for all lepChannels if texNames.count(texNames[0]) == len(texNames): name = texNames[0] else: name = self.name except: name = self.name if not rootTex: name = "$" + name.replace("#","\\") + "$" # Make it tex format return name
reweightPhotonElectronVetoSF * event.reweightBTag_SF) * ( misIDSF_val[2016].val if event.PhotonNoChgIsoNoSieie0_photonCatMagic == 2 and event.year == 2016 else 1. ) * (misIDSF_val[2017].val if event.PhotonNoChgIsoNoSieie0_photonCatMagic == 2 and event.year == 2017 else 1.) * (misIDSF_val[2018].val if event.PhotonNoChgIsoNoSieie0_photonCatMagic == 2 and event.year == 2018 else 1.) for s in mc: s.setWeightString(weightStringAR) key = (s.name, "AR", args.variable, "_".join(map(str, args.binning)), s.weightString, s.selectionString, selection) if dirDB.contains(key) and not args.overwrite: s.hist = copy.deepcopy(dirDB.get(key).Clone(s.name + "AR")) else: s.hist = s.get1DHistoFromDraw( args.variable, binning=args.binning, selectionString=selection) #, addOverFlowBin="upper" ) dirDB.add(key, s.hist.Clone("%s_AR" % s.name)) s.style = styles.lineStyle(s.color, width=2, dotted=False, dashed=False, errors=True) s.hist.style = styles.lineStyle(s.color, width=2, dotted=False, dashed=False,
years = [args.plotYear] else: years = [None] for i, year in enumerate(years): if year: addon = addons + [year] else: addon = addons # bkg substracted total histogram (signal) with total error name = [ "bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "signal" if args.bkgSubstracted else "total" ] print "_".join(name + addon) signal_tmp = dirDB.get("_".join(name + addon)) # bkg substracted total histogram (signal) with stat error name = [ "bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "signal_stat" if args.bkgSubstracted else "total_stat" ] print "_".join(name + addon) stat_tmp = dirDB.get("_".join(name + addon)) # background uncertainty name = [ "bkgSubtracted" if args.bkgSubstracted else "total", args.substituteCard, args.cardfile, "background_Up" ] print "_".join(name + addon) backgroundUp_tmp = dirDB.get("_".join(name + addon))
def getPSUnc(name, r, channel, setup): key = uniqueKey(name, r, channel, setup) + tuple(str(args.year)) PSUnc = psUncCache.get(key) return max(0.001, PSUnc) # check if already cached configlist = regionNames + EFTparams configlist.append("incl" if args.inclRegion else "diff") configlist.append("expected" if args.expected else "observed") sConfig = "_".join(configlist) print sConfig if args.checkOnly and nllCache.contains(sConfig): print eft, nllCache.get(sConfig) sys.exit(0) elif args.checkOnly and not nllCache.contains(sConfig): print eft, -1 sys.exit(0) elif not args.overwrite and nllCache.contains(sConfig): sys.exit(0) def wrapper(): c = cardFileWriter.cardFileWriter() c.releaseLocation = combineReleaseLocation cardFileNameTxt = os.path.join(limitDir, "_".join(regionNames + EFTparams) + ".txt") cardFileNameShape = cardFileNameTxt.replace(".txt", "_shape.root")
runOnLxPlus=False) #photonselection always false for qcd estimate setup = setup.sysClone(parameters=allRegions[args.selection]["parameters"]) selection = setup.selection("MC", channel="all", **setup.defaultParameters())["prefix"] selection = cutInterpreter.cutString(selection) selection += "&&pTStitching==1&&triggered==1" print selection if args.addCut: selection += "&&" + cutInterpreter.cutString(args.addCut) print("Using selection string: %s" % selection) key = (mc_e.name, "AR", args.variable, "_".join(map(str, args.binning)), mc_e.weightString, mc_e.selectionString, selection) if dirDB.contains(key) and not args.overwrite: mcHist_e = dirDB.get(key.Clone("e")) else: mcHist_e = mc_e.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection) dirDB.add(key, mcHist_e.Clone("e"), overwrite=True) key = (mc_mu.name, "AR", args.variable, "_".join(map(str, args.binning)), mc_mu.weightString, mc_mu.selectionString, selection) if dirDB.contains(key) and not args.overwrite: mcHist_mu = dirDB.get(key.Clone("mu")) else: mcHist_mu = mc_mu.get1DHistoFromDraw(args.variable, binning=args.binning, selectionString=selection) dirDB.add(key, mcHist_mu.Clone("mu"), overwrite=True)
preSelection = preSelection.replace("nBTagGoodInvLepIso==0", "nBTagGoodInvLepIso>=1") print "inv sel", preSelection if args.addCut: addSel = cutInterpreter.cutString(args.addCut) for iso, invIso in replaceSelection.iteritems(): preSelection = preSelection.replace(iso, invIso) addSel = addSel.replace(iso, invIso) preSelection += "&&" + addSel else: raise Exception("Region not implemented") key = (data_sample.name, "mTinv", "_".join(map(str, binning)), data_sample.weightString, data_sample.selectionString, preSelection) if dirDB.contains(key) and not args.overwrite: dataHist_SB = dirDB.get(key) else: dataHist_SB = data_sample.get1DHistoFromDraw(mTinv, binning=binning, selectionString=preSelection, addOverFlowBin="upper") dirDB.add(key, dataHist_SB) key = (data_sample.name, "mT", "_".join(map(str, binning)), data_sample.weightString, data_sample.selectionString, selection) if dirDB.contains(key) and not args.overwrite: dataHist = dirDB.get(key) else: dataHist = data_sample.get1DHistoFromDraw("mT", binning=binning, selectionString=selection,