#process.tagDumper.throwOnUnclassified= False process.tagDumper.dumpTrees = True process.tagDumper.dumpWorkspace = False process.tagDumper.quietRooFit = True process.flashggDiPhotonMVA.diphotonMVAweightfile = customize.diphoxml minmass=100. maxmass=180. if customize.runOnZ: minmass=70. maxmass=120. cfgTools.addCategory(process.tagDumper, "Reject", "diPhoton.mass< %f || diPhoton.mass> %f" %(minmass, maxmass), -1 ## if nSubcat is -1 do not store anythings ) cfgTools.addCategories(process.tagDumper, [## cuts are applied in cascade ("All","1",0), ], variables=[ "leadptom := diPhotonMVA.leadptom ", "subleadptom := diPhotonMVA.subleadptom ", "leadmva := diPhotonMVA.leadmva ", "subleadmva := diPhotonMVA.subleadmva ", "leadeta := diPhotonMVA.leadeta ", "subleadeta := diPhotonMVA.subleadeta", "sigmarv := diPhotonMVA.sigmarv", "sigmawv := diPhotonMVA.sigmawv",
process.tagDumper.dumpTrees = True process.tagDumper.dumpWorkspace = False process.tagDumper.quietRooFit = True process.flashggDiPhotonMVA.diphotonMVAweightfile = customize.diphoxml minmass = 100. maxmass = 180. if customize.runOnZ: minmass = 70. maxmass = 120. cfgTools.addCategory( process.tagDumper, "Reject", "diPhoton.mass< %f || diPhoton.mass> %f" % (minmass, maxmass), -1 ## if nSubcat is -1 do not store anythings ) cfgTools.addCategories( process.tagDumper, [ ## cuts are applied in cascade ("All", "1", 0), ], variables=[ "leadptom := diPhotonMVA.leadptom ", "subleadptom := diPhotonMVA.subleadptom ", "leadmva := diPhotonMVA.leadmva ", "subleadmva := diPhotonMVA.subleadmva ", "leadeta := diPhotonMVA.leadeta ", "subleadeta := diPhotonMVA.subleadeta",
process.diphotonDumper.src = "flashggSystTagMerger" process.diphotonDumper.processId = "test" process.diphotonDumper.dumpTrees = False process.diphotonDumper.dumpWorkspace = True process.diphotonDumper.dumpHistos = False process.diphotonDumper.quietRooFit = True process.diphotonDumper.systLabel = "" for systlabel in systlabels: cutstring = "hasSyst(\"%s\")"%systlabel #print "syst label ", systlabel cfgTools.addCategory(process.diphotonDumper, "flashggUntaggedTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel, subcats=5, variables = minimalVariables, histograms = minimalHistograms ) cfgTools.addCategory(process.diphotonDumper, "flashggVBFTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel, subcats=3, variables = minimalVariables, histograms = minimalHistograms ) cfgTools.addCategory(process.diphotonDumper, "flashggVHTightTag__%s"%systlabel,
nAlphaSWeights = 2 nScaleWeights = 9 else: print "Data, background MC, or non-central value: no PDF weights" dumpPdfWeights = False nPdfWeights = -1 nAlphaSWeights = -1 nScaleWeights = -1 cfgTools.addCategory(process.tagsDumper, systlabel, classname=tagName, cutbased=cutstring, subcats=tagCats, variables=currentVariables, histograms=minimalHistograms, binnedOnly=isBinnedOnly, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights ) process.flashggDiPhotonSystematics.src = "flashggDiPhotons" from HLTrigger.HLTfilters.hltHighLevel_cfi import hltHighLevel process.hltHighLevel= hltHighLevel.clone(HLTPaths = cms.vstring("HLT_Diphoton30_18_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass95_v1") ) process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ) process.load('RecoMET.METFilters.eeBadScFilter_cfi') process.eeBadScFilter.EERecHitSource = cms.InputTag("reducedEgamma","reducedEERecHits") # Saved MicroAOD Collection (data only)
process.diphotonDumper.dumpWorkspace = True process.diphotonDumper.dumpHistos = True process.diphotonDumper.quietRooFit = True ## define categories and associated objects to dump cfgTools.addCategory(process.diphotonDumper, "flashggUntaggedTag", subcats=5, variables=["CMS_hgg_mass[160,100,180]:=diPhoton().mass", "leadPt :=diPhoton().leadingPhoton.pt", "subleadPt :=diPhoton().subLeadingPhoton.pt", "diphoMVA :=diPhotonMVA().result", "maxEta :=max(abs(diPhoton().leadingPhoton.superCluster.eta),abs(diPhoton().leadingPhoton.superCluster.eta))", "genZ :=tagTruth().genPV().z", "vtxZ :=diPhoton().vtx().z", "dZ :=abs(tagTruth().genPV().z-diPhoton().vtx().z)" ], histograms=["CMS_hgg_mass>>mass(160,100,180)", "subleadPt:leadPt>>ptLeadvsSub(180,20,200:180,20,200)", "diphoMVA>>diphoMVA(50,0,1)", "maxEta>>maxEta[0.,0.1,0.2,0.3,0.4,0.6,0.8,1.0,1.2,1.4442,1.566,1.7,1.8,2.,2.2,2.3,2.5]" ] ) cfgTools.addCategory(process.diphotonDumper, "flashggVBFTag", subcats=3, variables=["CMS_hgg_mass[160,100,180]:=diPhoton().mass", "leadPt :=diPhoton().leadingPhoton.pt",
## Dumper process.analyzer = diphotonDumpConfig process.analyzer.dumpTrees = True process.analyzer.dumpWorkspace = False process.analyzer.quietRooFit = True # split tree, histogram and datasets by process process.analyzer.nameTemplate = "$PROCESS_$SQRTS_$LABEL_$SUBCAT" ## do not split by process ## process.analyzer.nameTemplate = "minitree_$SQRTS_$LABEL_$SUBCAT" ## define categories and associated objects to dump cfgTools.addCategory( process.analyzer, "Reject", "abs(leadingPhoton.superCluster.eta)>=1.4442&&abs(leadingPhoton.superCluster.eta)<=1.566||abs(leadingPhoton.superCluster.eta)>=2.5" "||abs(subLeadingPhoton.superCluster.eta)>=1.4442 && abs(subLeadingPhoton.superCluster.eta)<=1.566||abs(subLeadingPhoton.superCluster.eta)>=2.5", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories( process.analyzer, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [ ("EBHighR9", "max(abs(leadingPhoton.superCluster.eta),abs(leadingPhoton.superCluster.eta))<1.4442" "&& min(leadingPhoton.r9,subLeadingPhoton.r9)>0.94", 0), ## EB high R9 ("EBLowR9",
"sigmaIetaIeta := photon.sigmaIetaIeta", "r9 := photon.r9", "esEffSigmaRR := photon.esEffSigmaRR", "s4 := photon.s4", "covIEtaIPhi := photon.sieip" ] ## list of histograms to be plotted histograms=["r9>>r9(110,0,1.1)", "scEta>>scEta(100,-2.5,2.5)" ] ## define categories and associated objects to dump cfgTools.addCategory(process.photonViewDumper, "Reject", "abs(photon.superCluster.eta)>=1.4442&&abs(photon.superCluster.eta)<=1.566||abs(photon.superCluster.eta)>=2.5", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.photonViewDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("promptPhotonsEB","abs(photon.superCluster.eta)<1.5 && photon.genMatchType == 1",0), ("fakePhotonsEB", "abs(photon.superCluster.eta)<1.5 && photon.genMatchType != 1",0), ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory variables=variables, ## histograms to be plotted. ## the variables need to be defined first
def addGenAnalysis(self): if self.customize.processId == "Data": return import flashgg.Taggers.dumperConfigTools as cfgTools ## load gen-level bbgg self.process.load( "flashgg.MicroAOD.flashggGenDiPhotonDiBJetsSequence_cff") ## match gen-level to reco tag self.process.load("flashgg.Taggers.flashggTaggedGenDiphotons_cfi") self.process.flashggTaggedGenDiphotons.src = "flashggSelectedGenDiPhotonDiBJets" self.process.flashggTaggedGenDiphotons.tags = "flashggTagSorter" self.process.flashggTaggedGenDiphotons.remap = self.process.tagsDumper.classifierCfg.remap ## prepare gen-level dumper self.process.load("flashgg.Taggers.genDiphotonDumper_cfi") self.process.genDiphotonDumper.dumpTrees = True self.process.genDiphotonDumper.dumpWorkspace = False self.process.genDiphotonDumper.src = "flashggTaggedGenDiphotons" from flashgg.Taggers.globalVariables_cff import globalVariables self.process.genDiphotonDumper.dumpGlobalVariables = True self.process.genDiphotonDumper.globalVariables = globalVariables genVariables = [ "mgg := mass", "mbb := dijet.mass", "mhh := sqrt( pow(energy+dijet.energy,2) - pow(px+dijet.px,2) - pow(py+dijet.py,2) - pow(pz+dijet.pz,2))", "leadPho_px := leadingPhoton.px", "leadPho_py := leadingPhoton.py", "leadPho_pz := leadingPhoton.pz", "leadPho_e := leadingPhoton.energy", "subleadPho_px := subLeadingPhoton.px", "subleadPho_py := subLeadingPhoton.py", "subleadPho_pz := subLeadingPhoton.pz", "subleadPho_e := subLeadingPhoton.energy", "leadJet_px := leadingJet.px", "leadJet_py := leadingJet.py", "leadJet_pz := leadingJet.pz", "leadJet_e := leadingJet.energy", "subleadJet_px := subLeadingJet.px", "subleadJet_py := subLeadingJet.py", "subleadJet_pz := subLeadingJet.pz", "subleadJet_e := subLeadingJet.energy", ] # if self.customize.HHWWggReweight > 0: # for num in range(0,12): # genVariables += ["benchmark_reweight_%d := getHHbbggBenchmarkReweight(%d)"%(num,num)] # genVariables += ["benchmark_reweight_SM := getHHbbggBenchmarkReweight(12)"] # genVariables += ["benchmark_reweight_box := getHHbbggBenchmarkReweight(13)"] # genVariables += ["benchmark_reweight_2017fake := getHHbbggBenchmarkReweight(14)"] ## define categories for gen-level dumper cfgTools.addCategory( self.process.genDiphotonDumper, ## events with not reco-level tag "NoTag", 'isTagged("flashggNoTag")', 1, variables=genVariables, ) for tag in self.tagList: ## tagged events tagName, subCats = tag # need to define all categories explicitely because cut-based classifiers do not look at sub-category number for isub in xrange(subCats): cfgTools.addCategory( self.process.genDiphotonDumper, "%s_%d" % (tagName, isub), 'isTagged("%s") && categoryNumber == %d' % (tagName, isub), 0, variables=genVariables ##+recoVariables ) self.process.genp = cms.Path( self.process.flashggGenDiPhotonDiBJetsSequence * self.process.flashggTaggedGenDiphotons * self.process.genDiphotonDumper)
#process.mumugammaDumper.src = "kinPreselDiPhotons" process.mumugammaDumper.dumpTrees = True process.mumugammaDumper.dumpWorkspace = False process.mumugammaDumper.quietRooFit = True # split tree, histogram and datasets by process process.mumugammaDumper.nameTemplate = "$PROCESS_$SQRTS_$LABEL_$SUBCAT" ## do not split by process ## process.diphotonDumper.nameTemplate = "minitree_$SQRTS_$LABEL_$SUBCAT" ## define categories and associated objects to dump cfgTools.addCategory( process.mumugammaDumper, "Reject", " !Is2012FSRZMMG ", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories( process.mumugammaDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [ ("EB", "abs(MMG_Photon.superCluster.eta)<1.5", 0), ## ("EE", "abs(MMG_Photon.superCluster.eta)>1.5", 0), ##("EE","1",0), ## evereything elese is EB+EE ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory
## Dumper process.analyzer = photonDumpConfig process.analyzer.dumpTrees = True process.analyzer.dumpWorkspace = False process.analyzer.quietRooFit = True # split tree, histogram and datasets by process ## process.analyzer.nameTemplate ="$PROCESS_$SQRTS_$LABEL_$SUBCAT" ## do not split by process ## process.analyzer.nameTemplate = "minitree_$SQRTS_$LABEL_$SUBCAT" process.analyzer.nameTemplate ="$PROCESS_$LABEL" ## define categories and associated objects to dump cfgTools.addCategory(process.analyzer, "Reject", "abs(superCluster.eta)>=1.4442&&abs(superCluster.eta)<=1.566||abs(superCluster.eta)>=2.5", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.analyzer, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("promptPhotons","genMatchType == 1",0), ("fakePhotons", "genMatchType != 1",0), ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory variables=["pt","energy","eta","phi","scEta:=superCluster.eta", "r9","chgIsoWrtWorstVtx := getpfChgIsoWrtWorstVtx03", "genIso := userFloat('genIso')", "eTrue := ? hasMatchedGenPhoton ? matchedGenPhoton.energy : 0"
["VHLooseTag", 0], ["VHEtTag", 0], ["VHHadronicTag", 0], ["TTHHadronicTag", 0], ["TTHLeptonicTag", 0]] definedSysts = set() process.tagsDumper.classifierCfg.remap = cms.untracked.VPSet() for tag in tagList: tagName = tag[0] tagCats = tag[1] # remap return value of class-based classifier process.tagsDumper.classifierCfg.remap.append( cms.untracked.PSet(src=cms.untracked.string("flashgg%s" % tagName), dst=cms.untracked.string(tagName))) for systlabel in systlabels: if not systlabel in definedSysts: # the cut corresponding to the systematics can be defined just once cutstring = "hasSyst(\"%s\") " % (systlabel) definedSysts.add(systlabel) else: cutstring = None cfgTools.addCategory(process.tagsDumper, systlabel, classname=tagName, cutbased=cutstring, subcats=tagCats, variables=minimalVariables, histograms=minimalHistograms) process.p1 = cms.Path(process.tagsDumper) print process.p1
"phoRndConeChIso6 := 999", "phoRndConeChIso7 := 999", "phoRndConeChIso8 := 999", ]) if ":" in customize.massCut: massCutEB,massCutEE = map(float,customize.massCut.split(":")) massCut = min(massCutEB,massCutEE) else: massCutEB,massCutEE = None,None massCut = float(customize.massCut) if massCutEB or massCutEE: cfgTools.addCategory(diphotonDumper,"RejectLowMass", " (max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442 && mass <= %f)" "|| (max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))>1.566 && mass <= %f)" % (massCutEB,massCutEE),-1) cfgTools.addCategories(diphotonDumper, [## cuts are applied in cascade ## ("all","1"), ("EBHighR9","max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442" "&& min(leadingPhoton.r9,subLeadingPhoton.r9)>0.94",0), ("EBLowR9","max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442",0), ("EEHighR9","min(leadingPhoton.r9,subLeadingPhoton.r9)>0.94",0), ("EELowR9","1",0), ], variables=variables, histograms=histograms ) # single photon dumpoer
def addGenOnlyAnalysis(process,processId,acceptance,tagList,systlabels,pdfWeights=None,recoJetCollections=None): import itertools import flashgg.Taggers.dumperConfigTools as cfgTools accCut = getAccGenCut() cut = "1" if acceptance == "IN": cut = accCut elif acceptance == "OUT": cut = "!(%s)" % accCut process.load("flashgg.MicroAOD.flashggGenDiPhotonsSequence_cff") process.flashggSelectedGenDiPhotons.cut = cut process.flashggSortedGenDiPhotons.maxNumber = 1 process.load("flashgg.Taggers.flashggTaggedGenDiphotons_cfi") process.flashggTaggedGenDiphotons.src = "flashggSortedGenDiPhotons" process.flashggTaggedGenDiphotons.tags = "flashggTagSorter" process.flashggTaggedGenDiphotons.remap = process.tagsDumper.classifierCfg.remap ## process.flashggTaggedGenDiphotons.tags = "flashggSystTagMerger" process.load("flashgg.Taggers.genDiphotonDumper_cfi") process.genDiphotonDumper.dumpTrees = True process.genDiphotonDumper.dumpWorkspace = False process.genDiphotonDumper.src = "flashggTaggedGenDiphotons" from flashgg.Taggers.globalVariables_cff import globalVariables process.genDiphotonDumper.dumpGlobalVariables = True process.genDiphotonDumper.globalVariables = globalVariables bookHadronicActivityProducers(process,processId,"flashggTagSorter","flashggTaggedGenDiphotons",recoJetCollections,genJetCollection="slimmedGenJets") addGenGlobalVariables(process,process.genDiphotonDumper) addRecoGlobalVariables(process,process.genDiphotonDumper) dumpPdfWeights,nPdfWeights,nAlphaSWeights,nScaleWeights=False,-1,-1,-1 if pdfWeights: dumpPdfWeights,nPdfWeights,nAlphaSWeights,nScaleWeights=pdfWeights genVariables = getGenVariables(False) recoVariables = getRecoVariables(False) cfgTools.addCategory(process.genDiphotonDumper, "NoTag", 'isTagged("")',1, variables=genVariables, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights ) for tag in tagList: tagName,subCats = tag # need to define all categories explicitely because cut-based classifiers does not look at sub-category number for isub in xrange(subCats): cfgTools.addCategory(process.genDiphotonDumper, "%s_%d" % ( tagName, isub ), 'isTagged("%s") && categoryNumber == %d' % (tagName, isub),0, variables=genVariables+recoVariables, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights ) ## process.pfid = cms.Path(process.genFilter*process.flashggGenDiPhotonsSequence*process.flashggTaggedGenDiphotons*process.genDiphotonDumper) process.pfid = cms.Path(process.genFilter*process.genDiphotonDumper)
"sublead_superCluster_eta := subLeadingPhoton.superCluster.eta", "sublead_phi := subLeadingPhoton.phi", "sublead_energy := subLeadingPhoton.p4.energy", "sublead_5x5_Energy := subLeadingPhoton.full5x5_e5x5", "sublead_pfChIso03WrtChosenVtx := subLeadingView.pfChIso03WrtChosenVtx", "sublead_pfPhoIso03 := subLeadingPhoton.pfPhoIso03", "sublead_pfChgIsoWrtWorstVtx03 := subLeadingPhoton.pfChgIsoWrtWorstVtx03", "sublead_phoIdMvaWrtChosenVtx := subLeadingView.phoIdMvaWrtChosenVtx", "sublead_genMatchType := subLeadingPhoton.genMatchType", "sublead_genIso := ?subLeadingPhoton.hasUserFloat('genIso')?subLeadingPhoton.userFloat('genIso'):0", "sublead_egChargedHadronIso := subLeadingPhoton.egChargedHadronIso", "sublead_egPhotonIso := subLeadingPhoton.egPhotonIso", "sublead_egNeutralHadronIso := subLeadingPhoton.egNeutralHadronIso", "sublead_hadTowOverEm := subLeadingPhoton.hadTowOverEm", "sublead_full5x5_sigmaIetaIeta := subLeadingPhoton.full5x5_sigmaIetaIeta", "sublead_hasPixelSeed := subLeadingPhoton.hasPixelSeed", "sublead_passElectronVeto := subLeadingPhoton.passElectronVeto", "sublead_sigEOverE := subLeadingPhoton.sigEOverE", # "sublead_unsmearedSigmaEoE := subLeadingPhoton.userFloat('unsmearedSigmaEoE')", # "sublead_uncorr_r9 := ? subLeadingPhoton.hasUserFloat('uncorr_r9') ? subLeadingPhoton.userFloat('uncorr_r9') : -1.", # "sublead_uncorr_etaWidth := ? subLeadingPhoton.hasUserFloat('uncorr_etaWidth') ? subLeadingPhoton.userFloat('uncorr_etaWidth') : -1.", # "sublead_uncorr_s4 := ? subLeadingPhoton.hasUserFloat('uncorr_s4') ? subLeadingPhoton.userFloat('uncorr_s4') : -1.", "sublead_PhotonId := subLeadPhotonId()" ] cfgTools.addCategory(diphotonDumper, "all", "1", variables=validation_variables, )
process.diphotonDumper.src = "kinPreselDiPhotons" process.diphotonDumper.dumpTrees = True process.diphotonDumper.dumpWorkspace = False process.diphotonDumper.quietRooFit = True # split tree, histogram and datasets by process process.diphotonDumper.nameTemplate ="$PROCESS_$SQRTS_$LABEL_$SUBCAT" ## do not split by process ## process.diphotonDumper.nameTemplate = "minitree_$SQRTS_$LABEL_$SUBCAT" ## define categories and associated objects to dump cfgTools.addCategory(process.diphotonDumper, "Reject", "abs(leadingPhoton.superCluster.eta)>=1.4442&&abs(leadingPhoton.superCluster.eta)<=1.566||abs(leadingPhoton.superCluster.eta)>=2.5" "||abs(subLeadingPhoton.superCluster.eta)>=1.4442 && abs(subLeadingPhoton.superCluster.eta)<=1.566||abs(subLeadingPhoton.superCluster.eta)>=2.5", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.diphotonDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("EBHighR9","max(abs(leadingPhoton.superCluster.eta),abs(leadingPhoton.superCluster.eta))<1.4442" "&& min(leadingPhoton.full5x5_r9,subLeadingPhoton.full5x5_r9)>0.94",0), ## EB high R9 ("EBLowR9","max(abs(leadingPhoton.superCluster.eta),abs(leadingPhoton.superCluster.eta))<1.4442",0), ## remaining EB is low R9 ("EEHighR9","min(leadingPhoton.full5x5_r9,subLeadingPhoton.full5x5_r9)>0.94",0), ## then EE high R9 ("EELowR9","1",0), ## evereything elese is EE low R9 ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory
def addGenOnlyAnalysis(process, processId, acceptance, tagList, systlabels, pdfWeights=None, recoJetCollections=None): import itertools import flashgg.Taggers.dumperConfigTools as cfgTools accCut = getAccGenCut() cut = "1" if acceptance == "IN": cut = accCut elif acceptance == "OUT": cut = "!(%s)" % accCut process.load("flashgg.MicroAOD.flashggGenDiPhotonsSequence_cff") process.flashggSelectedGenDiPhotons.cut = cut process.flashggSortedGenDiPhotons.maxNumber = 1 process.load("flashgg.Taggers.flashggTaggedGenDiphotons_cfi") process.flashggTaggedGenDiphotons.src = "flashggSortedGenDiPhotons" process.flashggTaggedGenDiphotons.tags = "flashggTagSorter" process.flashggTaggedGenDiphotons.remap = process.tagsDumper.classifierCfg.remap ## process.flashggTaggedGenDiphotons.tags = "flashggSystTagMerger" process.load("flashgg.Taggers.genDiphotonDumper_cfi") process.genDiphotonDumper.dumpTrees = True process.genDiphotonDumper.dumpWorkspace = False process.genDiphotonDumper.src = "flashggTaggedGenDiphotons" from flashgg.Taggers.globalVariables_cff import globalVariables process.genDiphotonDumper.dumpGlobalVariables = True process.genDiphotonDumper.globalVariables = globalVariables bookHadronicActivityProducers(process, processId, "flashggTagSorter", "flashggTaggedGenDiphotons", recoJetCollections, genJetCollection="slimmedGenJets") addGenGlobalVariables(process, process.genDiphotonDumper) addRecoGlobalVariables(process, process.genDiphotonDumper) dumpPdfWeights, nPdfWeights, nAlphaSWeights, nScaleWeights = False, -1, -1, -1 if pdfWeights: dumpPdfWeights, nPdfWeights, nAlphaSWeights, nScaleWeights = pdfWeights genVariables = getGenVariables(False) recoVariables = getRecoVariables(False) cfgTools.addCategory(process.genDiphotonDumper, "NoTag", 'isTagged("")', 1, variables=genVariables, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights) for tag in tagList: tagName, subCats = tag # need to define all categories explicitely because cut-based classifiers does not look at sub-category number for isub in xrange(subCats): cfgTools.addCategory(process.genDiphotonDumper, "%s_%d" % (tagName, isub), 'isTagged("%s") && categoryNumber == %d' % (tagName, isub), 0, variables=genVariables + recoVariables, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights) ## process.pfid = cms.Path(process.genFilter*process.flashggGenDiPhotonsSequence*process.flashggTaggedGenDiphotons*process.genDiphotonDumper) process.pfid = cms.Path(process.genFilter * process.genDiphotonDumper)
nPdfWeights = 60 nAlphaSWeights = 2 nScaleWeights = 9 else: print "Data, background MC, or non-central value, or no systematics: no PDF weights" dumpPdfWeights = False nPdfWeights = -1 nAlphaSWeights = -1 nScaleWeights = -1 cfgTools.addCategory(process.tagsDumper, systlabel, classname=tagName, cutbased=cutstring, subcats=tagCats, variables=currentVariables, histograms=minimalHistograms, binnedOnly=isBinnedOnly, dumpPdfWeights=dumpPdfWeights, nPdfWeights=nPdfWeights, nAlphaSWeights=nAlphaSWeights, nScaleWeights=nScaleWeights, splitPdfByStage0Cat=customize.doHTXS) # Require standard diphoton trigger # debugging # print'customize = ',customize # print'customize.datasetName() = ',customize.datasetName() #
for systlabel in systlabels: cutstring = "hasSyst(\"%s\")"%systlabel #print "syst label ", systlabel cfgTools.addCategory(process.diphotonDumper, "flashggUntaggedTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel, subcats=5, variables=["CMS_hgg_mass[160,100,180]:=diPhoton().mass", "leadPt :=diPhoton().leadingPhoton.pt", "subleadPt :=diPhoton().subLeadingPhoton.pt", "diphoMVA :=diPhotonMVA().result", "maxEta :=max(abs(diPhoton().leadingPhoton.superCluster.eta),abs(diPhoton().leadingPhoton.superCluster.eta))", "genZ :=tagTruth().genPV().z", "vtxZ :=diPhoton().vtx().z", "dZ :=abs(tagTruth().genPV().z-diPhoton().vtx().z)" ], histograms=["CMS_hgg_mass>>mass(160,100,180)", "subleadPt:leadPt>>ptLeadvsSub(180,20,200:180,20,200)", "diphoMVA>>diphoMVA(50,0,1)", "maxEta>>maxEta[0.,0.1,0.2,0.3,0.4,0.6,0.8,1.0,1.2,1.4442,1.566,1.7,1.8,2.,2.2,2.3,2.5]" ] ) cfgTools.addCategory(process.diphotonDumper, "flashggVBFTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel,
#process.mumugammaDumper.src = "kinPreselDiPhotons" process.mumugammaDumper.dumpTrees = True process.mumugammaDumper.dumpWorkspace = False process.mumugammaDumper.quietRooFit = True # split tree, histogram and datasets by process process.mumugammaDumper.nameTemplate ="$PROCESS_$SQRTS_$LABEL_$SUBCAT" ## do not split by process ## process.diphotonDumper.nameTemplate = "minitree_$SQRTS_$LABEL_$SUBCAT" ## define categories and associated objects to dump cfgTools.addCategory(process.mumugammaDumper, "Reject", " !Is2012FSRZMMG ", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.mumugammaDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("EB","abs(MMG_Photon.superCluster.eta)<1.5",0), ## ("EE","abs(MMG_Photon.superCluster.eta)>1.5",0),##("EE","1",0), ## evereything elese is EB+EE ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory variables=["Mass_mmg :=mass", "Mass_mumu :=DiMuPtr.mass", "PT_mumu :=DiMuPtr.pt",
process.diphotonDumper.src = "flashggSystTagMerger" process.diphotonDumper.processId = processId process.diphotonDumper.dumpTrees = True process.diphotonDumper.dumpWorkspace = True process.diphotonDumper.dumpHistos = True process.diphotonDumper.quietRooFit = True process.diphotonDumper.systLabel = "" for systlabel in systlabels: cutstring = "hasSyst(\"%s\")"%systlabel #print "syst label ", systlabel cfgTools.addCategory(process.diphotonDumper, "flashggUntaggedTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel, subcats=5, variables = defaultVariables, histograms = defaultHistograms ) cfgTools.addCategory(process.diphotonDumper, "flashggVBFTag__%s"%systlabel, cutbased=cutstring, systLabel=systlabel, subcats=3, variables = defaultVariables + [ "leadJetPt :=leadingJet().pt", "subleadJetPt :=subLeadingJet().pt", "VBFMVA :=VBFMVA().VBFMVAValue()", ], histograms = defaultHistograms + [
if doSinglePho: photonDumper.globalVariables.addTriggerBits = cms.PSet( tag=cms.InputTag("TriggerResults","","RECO"),bits=cms.vstring(dumpBits) ) # categories definition if ":" in customize.massCut: massCutEB,massCutEE = map(float,customize.massCut.split(":")) massCut = min(massCutEB,massCutEE) else: massCutEB,massCutEE = None,None massCut = float(customize.massCut) if massCutEB or massCutEE: cfgTools.addCategory(diphotonDumper,"RejectLowMass", " (max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442 && mass <= %f)" "|| (max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))>1.566 && mass <= %f)" % (massCutEB,massCutEE),-1) cfgTools.addCategories(diphotonDumper, [## cuts are applied in cascade ## ("all","1"), ("EBHighR9","max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442" "&& min(leadingPhoton.r9,subLeadingPhoton.r9)>0.94",0), ("EBLowR9","max(abs(leadingPhoton.superCluster.eta),abs(subLeadingPhoton.superCluster.eta))<1.4442",0), ("EEHighR9","min(leadingPhoton.r9,subLeadingPhoton.r9)>0.94",0), ("EELowR9","1",0), ], variables=variables, histograms=histograms ) # single photon dumper
definedSysts=set() process.tagsDumper.classifierCfg.remap=cms.untracked.VPSet() for tag in tagList: tagName=tag[0] tagCats=tag[1] # remap return value of class-based classifier process.tagsDumper.classifierCfg.remap.append( cms.untracked.PSet( src=cms.untracked.string("flashgg%s"%tagName), dst=cms.untracked.string(tagName) ) ) for systlabel in systlabels: if not systlabel in definedSysts: # the cut corresponding to the systematics can be defined just once cutstring = "hasSyst(\"%s\") "%(systlabel) definedSysts.add(systlabel) else: cutstring = None cfgTools.addCategory(process.tagsDumper, systlabel, classname=tagName, cutbased=cutstring, subcats=tagCats, variables=minimalVariables, histograms=minimalHistograms ) process.p1 = cms.Path( process.tagsDumper ) print process.p1
] ## list of histograms to be plotted histograms=["r9>>r9(110,0,1.1)", "scEta>>scEta(100,-2.5,2.5)", "rndConePhoIso>>rndConePhoIso(60,-10,50)", "rndConeChIso>>rndConeChIso(60,-10,50)", "stdPhoIso>>stdPhoIso(60,-10,50)", "stdChIso>>stdChIso(60,-10,50)", ] ## define categories and associated objects to dump cfgTools.addCategory(process.photonDumper, "Reject", " abs(superCluster.eta)>=1.4442&&abs(superCluster.eta)<=1.566 " "|| abs(superCluster.eta)>=2.5 " "|| pt<100", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.photonDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("promptTree","genMatchType == 1",0), ("fakesTree", "genMatchType != 1",0), ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory variables=variables, ## histograms to be plotted.
#"esEnergyPlane1 := photon.esEnergyPlane1", #"esEnergyPlane2 := photon.esEnergyPlane2", #"e1x3 := photon.e1x3", #"e2x5max := photon.e2x5max", #"e5x5 := photon.e5x5" ] ## list of histograms to be plotted histograms=["r9>>r9(110,0,1.1)", "scEta>>scEta(100,-2.5,2.5)" ] ## define categories and associated objects to be rejected cfgTools.addCategory(process.photonViewDumper, "Reject", "abs(photon.superCluster.eta)>=1.4442&&abs(photon.superCluster.eta)<=1.566||abs(photon.superCluster.eta)>=2.5||photon.pt<15", -1 ## if nSubcat is -1 do not store anythings ) # interestng categories cfgTools.addCategories(process.photonViewDumper, ## categories definition ## cuts are applied in cascade. Events getting to these categories have already failed the "Reject" selection [("promptPhotons","photon.genMatchType == 1",0), ("fakePhotons", "photon.genMatchType != 1",0), ], ## variables to be dumped in trees/datasets. Same variables for all categories ## if different variables wanted for different categories, can add categorie one by one with cfgTools.addCategory variables=variables, ## histograms to be plotted. ## the variables need to be defined first