예제 #1
0
def buildSequence(process, patArgs):
    sequence = cms.Sequence()

    # Produce HPS taus from AK5 PF jets already in AOD
    process.load("RecoTauTag.Configuration.RecoPFTauTag_cff")
    sequence *= process.PFTau
    # Produce pat taus, assuming patDefaultSequence is already included to the process
    if not hasattr(process, "patTausHpsPFTau"):
        tauTools.addTauCollection(process,
                                  cms.InputTag('hpsPFTauProducer'),
                                  algoLabel="hps",
                                  typeLabel="PFTau")

    process.patTausHpsPFTauForPV = process.patTausHpsPFTau.clone(
        addGenJetMatch=False,
        embedGenJetMatch=False,
        addGenMatch=False,
        embedGenMatch=False,
        userIsolation=cms.PSet(),
        isoDeposits=cms.PSet(),
    )
    process.patTausHpsPFTauForPV.tauIDSources.byRawCombinedIsolationDeltaBetaCorr = cms.InputTag(
        "hpsPFTauDiscriminationByRawCombinedIsolationDBSumPtCorr")

    sequence *= process.patTausHpsPFTauForPV

    # Trigger matching
    sequence *= HChTriggerMatching.addTauHLTMatching(
        process,
        patArgs["matchingTauTrigger"],
        collections=["patTausHpsPFTauForPV"],
        postfix="ForPV")

    # Require decay mode finding
    process.selectedPatTausHpsPFTauForPV = cms.EDFilter(
        "PATTauSelector",
        src=cms.InputTag("patTausHpsPFTauForPVTriggerMatchedForPV"),
        cut=cms.string("tauID('decayModeFinding')"))
    sequence *= process.selectedPatTausHpsPFTauForPV

    # Obtain the index of the vertex of most isolated tau
    process.selectedPrimaryVertexIndex = cms.EDProducer(
        "HPlusVertexIndexTauMostIsolatedProducer",
        vertexSrc=cms.InputTag("offlinePrimaryVertices"),
        tauSrc=cms.InputTag("selectedPatTausHpsPFTauForPV"),
        tauDiscriminator=cms.string("byRawCombinedIsolationDeltaBetaCorr"),
        dz=cms.double(0.2),
    )
    sequence *= process.selectedPrimaryVertexIndex

    return sequence
def buildSequence(process, patArgs):
    sequence = cms.Sequence()

    # Produce HPS taus from AK5 PF jets already in AOD
    process.load("RecoTauTag.Configuration.RecoPFTauTag_cff")
    sequence *= process.PFTau
    # Produce pat taus, assuming patDefaultSequence is already included to the process
    if not hasattr(process, "patTausHpsPFTau"):
        tauTools.addTauCollection(process, cms.InputTag('hpsPFTauProducer'),
                                  algoLabel = "hps",
                                  typeLabel = "PFTau")

    process.patTausHpsPFTauForPV = process.patTausHpsPFTau.clone(
        addGenJetMatch = False,
        embedGenJetMatch = False,
        addGenMatch = False,
        embedGenMatch = False,
        userIsolation = cms.PSet(),
        isoDeposits = cms.PSet(),
    )
    process.patTausHpsPFTauForPV.tauIDSources.byRawCombinedIsolationDeltaBetaCorr = cms.InputTag("hpsPFTauDiscriminationByRawCombinedIsolationDBSumPtCorr")

    sequence *= process.patTausHpsPFTauForPV

    # Trigger matching
    sequence *= HChTriggerMatching.addTauHLTMatching(process, patArgs["matchingTauTrigger"], collections=["patTausHpsPFTauForPV"], postfix="ForPV")

    # Require decay mode finding
    process.selectedPatTausHpsPFTauForPV = cms.EDFilter("PATTauSelector",
        src = cms.InputTag("patTausHpsPFTauForPVTriggerMatchedForPV"),
        cut = cms.string("tauID('decayModeFinding')")
    )
    sequence *= process.selectedPatTausHpsPFTauForPV

    # Obtain the index of the vertex of most isolated tau
    process.selectedPrimaryVertexIndex = cms.EDProducer("HPlusVertexIndexTauMostIsolatedProducer",
        vertexSrc = cms.InputTag("offlinePrimaryVertices"),
        tauSrc = cms.InputTag("selectedPatTausHpsPFTauForPV"),
        tauDiscriminator = cms.string("byRawCombinedIsolationDeltaBetaCorr"),
        dz = cms.double(0.2),
    )
    sequence *= process.selectedPrimaryVertexIndex

    return sequence
예제 #3
0
def addPF2PAT(process, dataVersion, postfix="PFlow",
              doTauHLTMatching=True, matchingTauTrigger=None, 
              ):
#    if hasattr(process, "patDefaultSequence"):
#        raise Exception("PAT should not exist before calling addPF2PAT at the moment")

    # Hack to not to crash if something in PAT assumes process.out
    hasOut = hasattr(process, "out")
    outputCommands = []
    outputCommandsBackup = []
    if hasOut:
        outputCommandsBackup = process.out.outputCommands[:]
    else:
        process.out = cms.OutputModule("PoolOutputModule",
            fileName = cms.untracked.string('dummy.root'),
            outputCommands = cms.untracked.vstring()
        )

    outputCommands = []

    # Jet modifications
    # PhysicsTools/PatExamples/test/patTuple_42x_jec_cfg.py
    jetCorrFactors = patJetCorrLevels(dataVersion, True)
    jetCorrPayload = "AK5PFchs"

    process.load("PhysicsTools.PatAlgos.patSequences_cff")
    pfTools.usePF2PAT(process, runPF2PAT=True, jetAlgo="AK5", jetCorrections=(jetCorrPayload, jetCorrFactors),
                      runOnMC=dataVersion.isMC(), postfix=postfix)

    outputCommands = [
#        "keep *_selectedPatPhotons%s_*_*" % postfix,
#        'keep *_selectedPatElectrons%s_*_*' % postfix, 
        'keep *_selectedPatMuons%s_*_*' % postfix,
        'keep *_selectedPatJets%s*_*_*' % postfix,
        'keep *_selectedPatTaus%s_*_*' % postfix,
        'keep *_selectedPatPFParticles%s_*_*' % postfix,
        'keep *_selectedPatJets%s_pfCandidates_*' % postfix,
        'drop *_*PF_caloTowers_*',
        'drop *_*JPT_pfCandidates_*',
        'drop *_*Calo_pfCandidates_*',
        'keep *_patMETs%s_*_*' % postfix,
        ]

    # Enable PFnoPU
    getattr(process, "pfPileUp"+postfix).Enable = True
    getattr(process, "pfPileUp"+postfix).checkClosestZVertex = False
    getattr(process, "pfPileUp"+postfix).Vertices = "offlinePrimaryVertices"

    # Jet modifications
    # L1FastJet
    # https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookJetEnergyCorrections#OffsetJEC
    # https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookJetEnergyCorrections#JetEnCor2011
    # https://hypernews.cern.ch/HyperNews/CMS/get/jes/184.html
    kt6name = "kt6PFJets"+postfix
    process.load('RecoJets.Configuration.RecoPFJets_cff')
    from RecoJets.JetProducers.kt4PFJets_cfi import kt4PFJets
    setattr(process, kt6name, kt4PFJets.clone(
        rParam = 0.6,
        src = 'pfNoElectron'+postfix,
        doRhoFastjet = True,
        doAreaFastJet = cms.bool(True),
    ))
    getattr(process, "patPF2PATSequence"+postfix).replace(
        getattr(process, "pfNoElectron"+postfix),
        getattr(process, "pfNoElectron"+postfix) * getattr(process, kt6name))
    getattr(process, "patJetCorrFactors"+postfix).rho = cms.InputTag(kt6name, "rho")
    getattr(process, "patJetCorrFactors"+postfix).useRho = True

    # ak5PFJets
    getattr(process, "pfJets"+postfix).doAreaFastjet = cms.bool(True)
    getattr(process, "pfJets"+postfix).doRhoFastjet = False
#    getattr(process, "pfJets"+postfix).Vertices = cms.InputTag("goodPrimaryVertices")

    setPatJetDefaults(getattr(process, "patJets"+postfix))


    # Use HPS taus
    # Add and recalculate the discriminators
    addHChTauDiscriminators()
    if not hasattr(process, "hpsPFTauDiscriminationForChargedHiggsByLeadingTrackPtCut"):
        import RecoTauTag.RecoTau.PFRecoTauDiscriminationForChargedHiggs_cfi as HChPFTauDiscriminators
        import RecoTauTag.RecoTau.CaloRecoTauDiscriminationForChargedHiggs_cfi as HChCaloTauDiscriminators

        tauAlgos = ["hpsPFTau"]
#        tauAlgos = ["pfTaus"+postfix]
        HChPFTauDiscriminators.addPFTauDiscriminationSequenceForChargedHiggs(process, tauAlgos)
        HChPFTauDiscriminatorsCont.addPFTauDiscriminationSequenceForChargedHiggsCont(process, tauAlgos)
        PFTauTestDiscrimination.addPFTauTestDiscriminationSequence(process, tauAlgos)

        fixFlightPath(process, tauAlgos[0])
        fixFlightPath(process, tauAlgos[0], "Cont")
    
    patHelpers.cloneProcessingSnippet(process, process.hpsPFTauHplusDiscriminationSequence, postfix)
    patHelpers.cloneProcessingSnippet(process, process.hpsPFTauHplusDiscriminationSequenceCont, postfix)
    patHelpers.cloneProcessingSnippet(process, process.hpsPFTauHplusTestDiscriminationSequence, postfix)

    patTauSeq = cms.Sequence(
        getattr(process, "hpsPFTauHplusDiscriminationSequence"+postfix) *
        getattr(process, "hpsPFTauHplusDiscriminationSequenceCont"+postfix) * 
        getattr(process, "hpsPFTauHplusTestDiscriminationSequence"+postfix)
#        getattr(process, "pfTaus"+postfix+"HplusDiscriminationSequence") *
#        getattr(process, "pfTaus"+postfix+"HplusDiscriminationSequenceCont") * 
#        getattr(process, "pfTaus"+postfix+"HplusTestDiscriminationSequence")
    )
    setattr(process, "hplusPatTauSequence"+postfix, patTauSeq)
    patHelpers.massSearchReplaceParam(patTauSeq, "PFTauProducer", cms.InputTag("hpsPFTauProducer"), cms.InputTag("pfTaus"+postfix))
    patHelpers.massSearchReplaceAnyInputTag(patTauSeq, cms.InputTag("hpsPFTauDiscriminationByDecayModeFinding"), cms.InputTag("hpsPFTauDiscriminationByDecayModeFinding"+postfix))

    pfTools.adaptPFTaus(process, "hpsPFTau", postfix=postfix)

    setPatTauDefaults(getattr(process, "patTaus"+postfix), False)
    addPatTauIsolationEmbedding(process, getattr(process, "patDefaultSequence"+postfix), postfix)
    getattr(process, "selectedPatTaus"+postfix).cut = tauPreSelection

    # The prediscriminant of pfTausBaseDiscriminationByLooseIsolation
    # is missing from the default sequence, but since we don't want to
    # apply any tau selections as a part of PF2PAT anyway, let's just
    # remove this too
    getattr(process, "pfTaus"+postfix).discriminators = cms.VPSet()
#    getattr(process, "pfTauSequence"+postfix).remove(getattr(process, "pfTaus"+postfix))
#    delattr(process, "pfTaus"+postfix)
#    getattr(process, "pfTausBaseSequence"+postfix).remove(getattr(process, "pfTausBaseDiscriminationByLooseIsolation"+postfix))
    

    # Remove the shrinking cone altogether, we don't care about it
#    getattr(process, "patDefaultSequence"+postfix).remove(getattr(process, "patShrinkingConePFTauDiscrimination"+postfix))

    # Override the tau source (this is WRONG in the standard PF2PAT, the expers should know it already)
#    getattr(process, "patTaus"+postfix).tauSource = "hpsPFTauProducer"+postfix
#    patHelpers.massSearchReplaceAnyInputTag(getattr(process, "patHPSPFTauDiscrimination"+postfix),
#                                            cms.InputTag("pfTaus"+postfix),
#                                            cms.InputTag("hpsPFTauProducer"+postfix))
#    getattr(process, "pfNoTau"+postfix).topCollection = cms.InputTag("hpsPFTauProducer"+postfix)

    # Disable iso deposits, they take a LOT of space
    getattr(process, "patTaus"+postfix).isoDeposits = cms.PSet()

    # Disable tau top projection, the taus are identified and removed
    # from jets as a part of the analysis
    getattr(process, "pfNoTau"+postfix).enable = False



    # Lepton modifications
    setPatLeptonDefaults(getattr(process, "patMuons"+postfix), False)
    #setPatLeptonDefaults(getattr(process, "patElectrons"+postfix), False)
    #addPatElectronID(process, getattr(process, "patElectrons"+postfix), getattr(process, "makePatElectrons"+postfix))

    # PATElectronProducer segfaults, and we don't really need them now
    getattr(process, "patDefaultSequence"+postfix).remove(getattr(process, "makePatElectrons"+postfix))
    getattr(process, "patDefaultSequence"+postfix).remove(getattr(process, "selectedPatElectrons"+postfix))
    getattr(process, "patDefaultSequence"+postfix).remove(getattr(process, "countPatElectrons"+postfix))
    getattr(process, "patDefaultSequence"+postfix).remove(getattr(process, "countPatLeptons"+postfix))

    # Disable muon and electron top projections, needs wider
    # discussion about lepton definitions
    getattr(process, "pfNoMuon"+postfix).enable = False
    getattr(process, "pfNoElectron"+postfix).enable = False

    # Remove photon MC matcher in order to avoid keeping photons in the event content
    #process.patDefaultSequencePFlow.remove(process.photonMatchPFlow)

    if hasOut:
        process.out.outputCommands = outputCommandsBackup
        process.out.outputCommands.extend(outputCommands)
    else:
        del process.out

    getattr(process, "patDefaultSequence"+postfix).replace(
        getattr(process, "patTaus"+postfix),
        patTauSeq *
        getattr(process, "patTaus"+postfix)
    )

    sequence = cms.Sequence(
        getattr(process, "patPF2PATSequence"+postfix)
    )

    if doTauHLTMatching:
        sequence *= HChTriggerMatching.addTauHLTMatching(process, matchingTauTrigger, collections=["selectedPatTaus"+postfix], postfix=postfix)

    return sequence
예제 #4
0
def addPatOnTheFly(process, options, dataVersion,
                   doPlainPat=True, doPF2PAT=False,
                   plainPatArgs={}, pf2patArgs={},
                   doMcPreselection=False):
    def setPatArg(args, name, value):
        if name in args:
            print "Overriding PAT arg '%s' from '%s' to '%s'" % (name, str(args[name]), str(value))
        args[name] = value
    def setPatArgs(args, d):
        for name, value in d.iteritems():
            setPatArg(args, name, value)

    counters = []
    if dataVersion.isData():
        counters.extend(HChDataSelection.dataSelectionCounters[:])
    
    if options.tauEmbeddingInput != 0:
        import HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.PFEmbeddingSource_cff as PFEmbeddingSource
        counters.extend(MuonSelection.muonSelectionCounters[:])
        counters.extend(PFEmbeddingSource.muonSelectionCounters)
    elif dataVersion.isMC() and doMcPreselection:
        counters = HChMcSelection.mcSelectionCounters[:]
    
    if options.doPat == 0:
        process.load("HiggsAnalysis.HeavyChHiggsToTauNu.HChPrimaryVertex_cfi")
        seq = cms.Sequence(
#            process.goodPrimaryVertices10
        )
        if dataVersion.isMC() and doMcPreselection:
            process.eventPreSelection = HChMcSelection.addMcSelection(process, dataVersion, options.trigger)
            seq *= process.eventPreSelection
        if options.doTauHLTMatchingInAnalysis != 0:
            process.patTausHpsPFTauTauTriggerMatched = HChTriggerMatching.createTauTriggerMatchingInAnalysis(options.trigger, "selectedPatTausHpsPFTau")
            seq *= process.patTausHpsPFTauTauTriggerMatched
        return (seq, counters)

    print "Running PAT on the fly"

    process.eventPreSelection = cms.Sequence()
    if options.tauEmbeddingInput != 0:
        if doPF2PAT or not doPlainPat:
            raise Exception("Only plainPat can be done for tau embedding input at the moment")

        # Hack to not to crash if something in PAT assumes process.out
        hasOut = hasattr(process, "out")
        if not hasOut:
            process.out = cms.OutputModule("PoolOutputModule",
                fileName = cms.untracked.string('dummy.root'),
                outputCommands = cms.untracked.vstring()
            )
        setPatArgs(plainPatArgs, {"doPatTrigger": False,
                             "doTauHLTMatching": False,
                             "doPatCalo": False,
                             "doBTagging": True,
                             "doPatElectronID": False})

        process.patSequence = addPat(process, dataVersion, plainPatArgs=plainPatArgs)
        # FIXME: this is broken at the moment
        #removeSpecificPATObjects(process, ["Muons", "Electrons", "Photons"], False)
        process.patDefaultSequence.remove(process.patMuons)
        process.patDefaultSequence.remove(process.selectedPatMuons)
        #process.selectedPatCandidates.remove(process.selectedPatMuons)
        process.patDefaultSequence.remove(process.muonMatch)
        process.patDefaultSequence.remove(process.patElectrons)
        process.patDefaultSequence.remove(process.selectedPatElectrons)
        #process.selectedPatCandidates.remove(process.selectedPatElectrons)
        process.patDefaultSequence.remove(process.electronMatch)
        process.patDefaultSequence.remove(process.patPhotons)
        process.patDefaultSequence.remove(process.selectedPatPhotons)
        #process.selectedPatCandidates.remove(process.selectedPatPhotons)
        process.patDefaultSequence.remove(process.photonMatch)

        del process.patMuons
        del process.selectedPatMuons
        del process.muonMatch
        del process.patElectrons
        del process.selectedPatElectrons
        del process.electronMatch
        del process.patPhotons
        del process.selectedPatPhotons
        del process.photonMatch

        # Remove soft muon b tagging discriminators as they are not
        # well defined, cause technical problems and we don't use
        # them.
        process.patJets.discriminatorSources = filter(lambda tag: "softMuon" not in tag.getModuleLabel(), process.patJets.discriminatorSources)
        for seq in [process.btagging, process.btaggingJetTagsAOD, process.btaggingTagInfosAOD]:
            softMuonRemover = RemoveSoftMuonVisitor.RemoveSoftMuonVisitor()
            seq.visit(softMuonRemover)
            softMuonRemover.removeFound(process, seq)

        # Use the merged track collection
        process.ak5PFJetTracksAssociatorAtVertex.tracks.setModuleLabel("tmfTracks")
        process.jetTracksAssociatorAtVertex.tracks.setModuleLabel("tmfTracks")

        # Do jet-parton matching with the genParticles of the original event
        if dataVersion.isMC():
            process.patJetPartons.src.setProcessName(dataVersion.getTriggerProcess())
            process.patJetPartonMatch.matched.setProcessName(dataVersion.getTriggerProcess())
            # in v13_3 embeddings the GenJets are done from the tau part, hence they are meaningless
            process.patJets.addGenJetMatch = False
            process.patJets.genJetMatch = ""

        # Another part of the PAT process.out hack
        if not hasOut:
            del process.out

        # Add PV selection, if not yet done by PAT
#        if dataVersion.isData():
#            process.load("HiggsAnalysis.HeavyChHiggsToTauNu.HChPrimaryVertex_cfi")
#            process.patSequence *= process.goodPrimaryVertices
    else:
        if dataVersion.isData():
            process.eventPreSelection = HChDataSelection.addDataSelection(process, dataVersion, options.trigger)
        elif dataVersion.isMC() and doMcPreselection:
            process.eventPreSelection = HChMcSelection.addMcSelection(process, dataVersion, options.trigger)

        pargs = plainPatArgs.copy()
        pargs2 = pf2patArgs.copy()

        argsList = []
        if doPlainPat:
            argsList.append(pargs)
        if doPF2PAT:
            argsList.append(pargs2)

        for args in argsList:
            if args.get("doTauHLTMatching", True):
                if not "matchingTauTrigger" in args:
                    if options.trigger == "":
                        raise Exception("Command line argument 'trigger' is missing")
                    args["matchingTauTrigger"] = options.trigger
                print "Trigger used for tau matching:", args["matchingTauTrigger"]

        process.patSequence = addPat(process, dataVersion,
                                     doPlainPat=doPlainPat, doPF2PAT=doPF2PAT,
                                     plainPatArgs=pargs, pf2patArgs=pargs2,)
    
    # Add selection of PVs with sumPt > 10
#    process.patSequence *= process.goodPrimaryVertices10

    dataPatSequence = cms.Sequence(
        process.eventPreSelection *
        process.patSequence
    )

    if options.tauEmbeddingInput != 0:
        from HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.customisations import addTauEmbeddingMuonTaus
        process.patMuonTauSequence = addTauEmbeddingMuonTaus(process)
        process.patSequence *= process.patMuonTauSequence
    
    return (dataPatSequence, counters)
예제 #5
0
def addPlainPat(process, dataVersion, doPatTrigger=True, doPatTaus=True, doHChTauDiscriminators=True, doPatMET=True, doPatElectronID=True,
                doPatCalo=True, doBTagging=True, doPatMuonPFIsolation=False, doPatTauIsoDeposits=False,
                doTauHLTMatching=True, matchingTauTrigger=None, matchingJetTrigger=None,
                includePFCands=False):
    out = None
    outdict = process.outputModules_()
    if outdict.has_key("out"):
        out = outdict["out"]

    outputCommands = []

    # Tau Discriminators
    process.hplusPatTauSequence = cms.Sequence()
    if doPatTaus:
        process.hplusPatTauSequence = addPFTausAndDiscriminators(process, dataVersion, doPatCalo, doHChTauDiscriminators)

    # PAT Layer 0+1
    process.load("PhysicsTools.PatAlgos.patSequences_cff")

    sequence = cms.Sequence(
        process.hplusPatTauSequence
    )

    # Restrict input to AOD
    restrictInputToAOD(process, ["All"])

    # Remove MC stuff if we have collision data (has to be done any add*Collection!)
    # This also adds the L2L3Residual JEC correction to the process.patJetCorrFactors
    if dataVersion.isData():
        runOnData(process, outputInProcess = out!=None)

    # Jets
    # Produce kt6 rho for L1Fastjet
    process.load('RecoJets.Configuration.RecoPFJets_cff')
    process.kt6PFJets.doRhoFastjet = True
    process.ak5PFJets.doAreaFastjet = True
    process.ak5PFJetSequence = cms.Sequence(process.kt6PFJets * process.ak5PFJets)
   
    # Set defaults
    process.patJets.jetSource = cms.InputTag("ak5CaloJets")
    process.patJets.trackAssociationSource = cms.InputTag("ak5JetTracksAssociatorAtVertex")
    setPatJetDefaults(process.patJets)
    setPatJetCorrDefaults(process.patJetCorrFactors, dataVersion)
    process.patDefaultSequence.replace(process.patJetCorrFactors,
                                       process.ak5PFJetSequence*process.patJetCorrFactors)
    process.selectedPatJets.cut = jetPreSelection

    # The default JEC to be embedded to pat::Jets are L2Relative,
    # L3Absolute, L5Flavor and L7Parton. The default JEC to be applied
    # is L2L3Residual, or L3Absolute, or Uncorrected (in this order).

    if doPatCalo:
        # Add JPT jets
        # FIXME: Disabled for now until the JEC for JPT works again (with the latest JEC)
#        addJetCollection(process, cms.InputTag('JetPlusTrackZSPCorJetAntiKt5'),
#                         'AK5', 'JPT',
#                         doJTA        = True,
#                         doBTagging   = doBTagging,
#                         jetCorrLabel = ('AK5JPT', process.patJetCorrFactors.levels),
#                         doType1MET   = False,
#                         doL1Cleaning = False,
#                         doL1Counters = True,
#                         genJetCollection = cms.InputTag("ak5GenJets"),
#                         doJetID      = True
#        )
    
        # Add PF jets
        addJetCollection(process, cms.InputTag('ak5PFJets'),
                         'AK5', 'PF',
                         doJTA        = True,
                         doBTagging   = doBTagging,
                         jetCorrLabel = ('AK5PF', process.patJetCorrFactors.levels),
                         doType1MET   = False,
                         doL1Cleaning = False,
                         doL1Counters = True,
                         genJetCollection = cms.InputTag("ak5GenJets"),
                         doJetID      = True
        )
        setPatJetCorrDefaults(process.patJetCorrFactorsAK5PF, dataVersion, True)

    else:
        setPatJetCorrDefaults(process.patJetCorrFactors, dataVersion, True)
        switchJetCollection(process, cms.InputTag('ak5PFJets'),
                            doJTA        = True,
                            doBTagging   = doBTagging,
                            jetCorrLabel = ('AK5PF', process.patJetCorrFactors.levels),
                            doType1MET   = False,
                            genJetCollection = cms.InputTag("ak5GenJets"),
                            doJetID      = True
        )
    
    outputCommands.extend([
            "keep *_selectedPatJets_*_*",
            "keep *_selectedPatJetsAK5JPT_*_*",
            "keep *_selectedPatJetsAK5PF_*_*",
            'drop *_selectedPatJets_pfCandidates_*', ## drop for default patJets which are CaloJets
            'drop *_*PF_caloTowers_*',
            'drop *_*JPT_pfCandidates_*',
            'drop *_*Calo_pfCandidates_*',
            ])

    # Taus

    # Set default PATTauProducer options here, they should be
    # replicated to all added tau collections (and the first call to
    # addTauCollection should replace the default producer modified
    # here)
    setPatTauDefaults(process.patTaus, includePFCands)
    process.selectedPatTaus.cut = tauPreSelection

    if doPatTaus:
        if doHChTauDiscriminators:
            addHChTauDiscriminators()

        # Don't enable TCTau nor shrinking cone tau
        # if doPatCalo:
        #     tauTools.addTauCollection(process,cms.InputTag('caloRecoTauProducer'),
        #                      algoLabel = "caloReco",
        #                      typeLabel = "Tau")
        #     setPatTauDefaults(process.patTausCaloRecoTau, True)
        #     process.patTausCaloRecoTau.embedLeadTrack = not includePFCands
        #     process.patTausCaloRecoTau.embedLeadPFChargedHadrCand = False
    
        # tauTools.addTauCollection(process,cms.InputTag('shrinkingConePFTauProducer'),
        #                  algoLabel = "shrinkingCone",
        #                  typeLabel = "PFTau")
        # # Disable isoDeposits like this until the problem with doPFIsoDeposits is fixed 
        # if not doPatTauIsoDeposits:
        #     process.patTausShrinkingConePFTau.isoDeposits = cms.PSet()

        tauTools.addTauCollection(process,cms.InputTag('hpsPFTauProducer'),
                         algoLabel = "hps",
                         typeLabel = "PFTau")
        if not doPatTauIsoDeposits:
            process.patTausHpsPFTau.isoDeposits = cms.PSet()
        addPatTauIsolationEmbedding(process, process.patDefaultSequence, "HpsPFTau")

        tauTools.addTauCollection(process,cms.InputTag('hpsTancTaus'),
                                  algoLabel = "hpsTanc",
                                  typeLabel = "PFTau")
        if not doPatTauIsoDeposits:
            process.patTausHpsTancPFTau.isoDeposits = cms.PSet()
        # Disable discriminators which are not in AOD
#        del process.patTausHpsTancPFTau.tauIDSources.againstCaloMuon
#        del process.patTausHpsTancPFTau.tauIDSources.byHPSvloose
        addPatTauIsolationEmbedding(process, process.patDefaultSequence, "HpsTancPFTau")

        # Add visible taus    
        if dataVersion.isMC():
            process.VisibleTaus = cms.EDProducer("HLTTauMCProducer",
                GenParticles  = cms.untracked.InputTag("genParticles"),
                ptMinTau      = cms.untracked.double(3),
                ptMinMuon     = cms.untracked.double(3),
                ptMinElectron = cms.untracked.double(3),
                BosonID       = cms.untracked.vint32(23),
                EtaMax         = cms.untracked.double(2.5)
            )
            sequence *= process.VisibleTaus
            outputCommands.append("keep *_VisibleTaus_*_*")

    else:
        # FIXME: this is broken at the moment
        #removeSpecificPATObjects(process, ["Taus"], outputInProcess= out != None)
        process.patDefaultSequence.remove(process.patTaus)
        process.patDefaultSequence.remove(process.selectedPatTaus)

    outputCommands.extend(["drop *_selectedPatTaus_*_*",
#                           "keep *_selectedPatTausCaloRecoTau_*_*",
#                           "keep *_selectedPatTausShrinkingConePFTau_*_*",
                           "keep *_selectedPatTausHpsPFTau_*_*",
                           "keep *_selectedPatTausHpsTancPFTau_*_*",
                           #"keep *_cleanPatTaus_*_*",
                           #"drop *_cleanPatTaus_*_*",
                           #"keep *_patTaus*_*_*",
                           #"keep *_patPFTauProducerFixedCone_*_*",
                           # keep these until the embedding problem with pat::Tau is fixed
                           #"keep recoPFCandidates_particleFlow_*_*",
                           #"keep recoTracks_generalTracks_*_*"
                           ])

    # MET
    addPfMET(process, 'PF')
    if doPatCalo:
        addTcMET(process, 'TC')
    else:
        # FIXME: This is broken at the moment...
        #removeSpecificPATObjects(process, ["METs"], outputInProcess= out != None)
        #process.patDefaultSequen
        process.patDefaultSequence.remove(process.patMETCorrections)
        process.patDefaultSequence.remove(process.patMETs)
        del process.patMETCorrections
        del process.patMETs

    outputCommands.extend([
            "keep *_patMETs_*_*",
            "keep *_patMETsTC_*_*",
            "keep *_patMETsPF_*_*",
            "keep *_genMetTrue_*_*",
            ])

    # Muons
    setPatLeptonDefaults(process.patMuons, includePFCands)
    if doPatMuonPFIsolation:
        addPFMuonIsolation(process, process.patMuons, sequence, verbose=True)

    outputCommands.extend([
            "keep *_selectedPatMuons_*_*"
            ])

    # Electrons
    # In order to calculate the transverse impact parameter w.r.t.
    # beam spot instead of primary vertex, see
    setPatLeptonDefaults(process.patMuons, includePFCands)

    # Electron ID, see
    # https://twiki.cern.ch/twiki/bin/view/CMS/SimpleCutBasedEleID
    if doPatElectronID:
        addPatElectronID(process, process.patElectrons, process.patDefaultSequence)

    outputCommands.extend([
            "keep *_selectedPatElectrons_*_*"
            ])

    # Photons
#    process.patPhotons.embedGenMatch = False
    outputCommands.extend([
            "keep *_selectedPatPhotons_*_*"
            ])

    # Trigger
    if doPatTrigger:
        outMod= ''
        if out != None:
            outMod  = 'out'
        switchOnTrigger(process, hltProcess=dataVersion.getTriggerProcess(), outputModule=outMod)
        process.patTrigger.addL1Algos = cms.bool(True)
        process.patTrigger.l1ExtraMu = cms.InputTag("l1extraParticles")
        process.patTrigger.l1ExtraCenJet = cms.InputTag("l1extraParticles", "Central")
        process.patTrigger.l1ExtraTauJet = cms.InputTag("l1extraParticles", "Tau")
        process.patTrigger.l1ExtraForJet = cms.InputTag("l1extraParticles", "Forward")
        process.patTrigger.l1ExtraETM = cms.InputTag("l1extraParticles", "MET")
        process.patTrigger.l1ExtraHTM = cms.InputTag("l1extraParticles", "MHT")


        # Keep StandAlone trigger objects for enabling trigger
        # matching in the analysis phase with PAT tools
        outputCommands.extend(patTriggerStandAloneEventContent)


    # Remove cleaning step and set the event content
    if out == None:
        myRemoveCleaning(process)
    else:
        backup = out.outputCommands[:]
        myRemoveCleaning(process)
#        backup_pat = out.outputCommands[:]

        # Remove PFParticles here, they are explicitly included when needed
#        backup_pat = filter(lambda n: "selectedPatPFParticles" not in n, backup_pat)

        out.outputCommands = backup
#        out.outputCommands.extend(backup_pat)
        out.outputCommands.extend(outputCommands)

    # Build sequence
    sequence *= process.patDefaultSequence

    # Tau+HLT matching
    if doTauHLTMatching:
        sequence *= HChTriggerMatching.addTauHLTMatching(process, matchingTauTrigger, matchingJetTrigger)

    return sequence
)
muons = "preselectedMuons" # this is common to embedding muon selection and muon veto
process.preselectedMuonsFilter = cms.EDFilter("CandViewCountFilter",
    src = cms.InputTag("preselectedMuons"),
    minNumber = cms.uint32(1)
)
process.preselectedMuonsCount = cms.EDProducer("EventCountProducer")
process.commonSequence += (
    process.preselectedMuons +
    process.preselectedMuonsFilter +
    process.preselectedMuonsCount
)
additionalCounters.append("preselectedMuonsCount")
# Trigger matching
import HiggsAnalysis.HeavyChHiggsToTauNu.HChTriggerMatching as HChTriggerMatching
process.preselectedMuonsMatched = HChTriggerMatching.createMuonTriggerMatchingInAnalysis(options.trigger, "preselectedMuons")
process.preselectedMuonsMatchedFilter = cms.EDFilter("CandViewCountFilter",
    src = cms.InputTag("preselectedMuonsMatched"),
    minNumber = cms.uint32(1)
)
process.preselectedMuonsMatchedCount = cms.EDProducer("EventCountProducer")
process.commonSequence += (
    process.preselectedMuonsMatched +
    process.preselectedMuonsMatchedFilter +
    process.preselectedMuonsMatchedCount
)
additionalCounters.append("preselectedMuonsMatchedCount")
# Kinematic cuts
process.preselectedMuons41 = cms.EDFilter("PATMuonSelector",
    src = cms.InputTag("preselectedMuonsMatched"),
    cut = cms.string("pt() > 41 && abs(eta) < 2.1")
# Uncomment the following in order to print the counters at the end of
# the job (note that if many other modules are being run in the same
# job, their INFO messages are printed too)
#process.MessageLogger.cerr.threshold = cms.untracked.string("INFO")

# Fragment to run PAT on the fly if requested from command line
from HiggsAnalysis.HeavyChHiggsToTauNu.HChPatTuple import addPatOnTheFly
process.commonSequence, additionalCounters = addPatOnTheFly(
    process, options, dataVersion)

# Re-run trigger matching
if doRerunTriggerMatching:
    import HiggsAnalysis.HeavyChHiggsToTauNu.HChTriggerMatching as TriggerMatching
    process.triggerMatching = TriggerMatching.addTauTriggerMatching(
        process,
        options.trigger,
        "Tau",
        #pathFilterMap={} # by default, use filter name in trigger matching re-running
    )
    process.commonSequence *= process.triggerMatching

# Add configuration information to histograms.root
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addConfigInfo
process.infoPath = addConfigInfo(process, options, dataVersion)

# MC Filter
import HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.customisations as tauEmbeddingCustomisations
if filterGenTaus:
    additionalCounters.extend(
        tauEmbeddingCustomisations.addGeneratorTauFilter(
            process,
            process.commonSequence,
예제 #8
0
        "&& innerTrack().hitPattern().numberOfValidPixelHits() > 0"
        "&& track().hitPattern().trackerLayersWithMeasurement() > 8"))
muons = "preselectedMuons"  # this is common to embedding muon selection and muon veto
process.preselectedMuonsFilter = cms.EDFilter(
    "CandViewCountFilter",
    src=cms.InputTag("preselectedMuons"),
    minNumber=cms.uint32(1))
process.preselectedMuonsCount = cms.EDProducer("EventCountProducer")
process.commonSequence += (process.preselectedMuons +
                           process.preselectedMuonsFilter +
                           process.preselectedMuonsCount)
additionalCounters.append("preselectedMuonsCount")
# Trigger matching
import HiggsAnalysis.HeavyChHiggsToTauNu.HChTriggerMatching as HChTriggerMatching

process.preselectedMuonsMatched = HChTriggerMatching.createMuonTriggerMatchingInAnalysis(
    options.trigger, "preselectedMuons")
process.preselectedMuonsMatchedFilter = cms.EDFilter(
    "CandViewCountFilter",
    src=cms.InputTag("preselectedMuonsMatched"),
    minNumber=cms.uint32(1))
process.preselectedMuonsMatchedCount = cms.EDProducer("EventCountProducer")
process.commonSequence += (process.preselectedMuonsMatched +
                           process.preselectedMuonsMatchedFilter +
                           process.preselectedMuonsMatchedCount)
additionalCounters.append("preselectedMuonsMatchedCount")
# Kinematic cuts
process.preselectedMuons41 = cms.EDFilter(
    "PATMuonSelector",
    src=cms.InputTag("preselectedMuonsMatched"),
    cut=cms.string("pt() > 41 && abs(eta) < 2.1"))
process.preselectedMuons41Filter = cms.EDFilter(
예제 #9
0
def customise(process):
    # Catch the case when this config is run from cmsDriver, it won't work due to VarParsing
    # First protect against crab job creation, then the no-argument case
    if hasattr(sys, "argv") and len(sys.argv) > 0:
        if "cmsDriver" in sys.argv[0]:
            print "Running pf_customise from cmsDriver, not executing running() further due to VarParsing"
            return
        else:
            print "Running pf_customise"

    # Command line arguments
    import FWCore.ParameterSet.VarParsing as VarParsing
    options = VarParsing.VarParsing('analysis')
    options.register(
        'overrideBeamSpot',
        0,  # default value, false
        VarParsing.VarParsing.multiplicity.singleton,
        VarParsing.VarParsing.varType.int,
        "should I override beamspot in globaltag?")
    options.register(
        "tauDecayMode",
        0,  # Default is all decays
        VarParsing.VarParsing.multiplicity.singleton,
        VarParsing.VarParsing.varType.int,
        "Tau decay mode (0=all, 230=hadronic)")
    options.register(
        "tauMinVisPt",
        -1,  # Disabled
        VarParsing.VarParsing.multiplicity.singleton,
        VarParsing.VarParsing.varType.int,
        "Minimum visible pt of tau decay (-1 disabled, >= 0 cut value in GeV)")

    options, dataVersion = getOptionsDataVersion("53XmcS10", options)

    hltProcessName = dataVersion.getTriggerProcess()
    recoProcessName = dataVersion.getRecoProcess()
    processName = process.name_()

    # Setup trigger matching
    if not (dataVersion.isMC() and options.triggerMC == 0
            and options.triggerMCInAnalysis == 0):
        HChTriggerMatching.setMuonTriggerMatchingInAnalysis(
            process.tightenedMuonsMatched, options.trigger)

    # Setup MuScleFit
    if dataVersion.isMC():
        process.muscleCorrectedMuons.identifier = "Summer12_DR53X_smearReReco"
        process.muscleCorrectedMuons.applySmearing = True
    else:
        process.muscleCorrectedMuons.identifier = "Data2012_53X_ReReco"

    # Setup output
    outputModule = None
    outdict = process.outputModules_()
    if len(outdict) == 1:
        outputModule = outdict.values()[0]
    elif outdict.has_key("RECOSIMoutput"):
        outputModule = outdict["RECOSIMoutput"]

    print "Adjusting event content to GEN-SIM-RECO+misc"
    process.load(
        "HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.HChEventContent_cff")
    #outputModule.outputCommands = cms.untracked.vstring("keep *")
    outputModule.outputCommands = cms.untracked.vstring("drop *")
    outputModule.outputCommands.extend(
        process.RECOSIMEventContent.outputCommands)
    outputModule.outputCommands.extend([
        "drop *_*_*_%s" % recoProcessName,
        "keep *_generalTracks_*_%s" % recoProcessName,
        "keep *_muons_*_%s" % recoProcessName,
        "keep *_globalMuons_*_%s" % recoProcessName,
        "keep recoGsfElectronCores_*_*_%s" % recoProcessName,
        "keep *_gsfElectrons_*_%s" % recoProcessName,
        "keep *_photons_*_%s" % recoProcessName,
        "keep *_photonCore_*_%s" % recoProcessName,
        "drop *_*_*_%s" % processName,
        "keep *_particleFlow*_*_%s" % processName,
        "keep *_generalTracks_*_%s" % processName,
        "keep *_muons_*_%s" % processName,
        "keep *_globalMuons_*_%s" % processName,
        "keep *_*Electron*_*_%s" % processName,
        "keep *_eid*_*_*",
    ])
    outputModule.outputCommands.extend(
        eventContent(hltProcessName, recoProcessName, processName))
    #    re_procName = re.compile("_\*$")
    #    outputModule.outputCommands.extend([re_procName.sub("_"+processName, x) for x in process.RECOSIMEventContent.outputCommands])
    outputModule.outputCommands.extend(process.HChEventContent.outputCommands)
    #outputModule.outputCommands.extend(process.RecoParticleFlowRECO.outputCommands)
    #outputModule.outputCommands.extend(["keep *_%s_*_%s" % (x, processName) for x in [
    #]])

    # Remove duplicate "drop *"
    index = 0
    for item in outputModule.outputCommands[:]:
        if item == "drop *" and index > 0:
            del outputModule.outputCommands[index]
            index -= 1
        index += 1

    # Disable gen vertex smearing
    process.VtxSmeared = cms.EDProducer("FlatEvtVtxGenerator",
                                        MaxZ=cms.double(0.0),
                                        MaxX=cms.double(0.0),
                                        MaxY=cms.double(0.0),
                                        MinX=cms.double(0.0),
                                        MinY=cms.double(0.0),
                                        MinZ=cms.double(0.0),
                                        TimeOffset=cms.double(0.0),
                                        src=cms.InputTag("generator"))

    # Set up tau decay options
    process.generator.ZTauTau.TauolaOptions.InputCards.mdtau = options.tauDecayMode
    if options.tauMinVisPt >= 0:
        process.generator.ZTauTau.minVisibleTransverseMomentum = "%d" % options.tauMinVisPt

    print "TAUOLA mdtau =", process.generator.ZTauTau.TauolaOptions.InputCards.mdtau

    # Do we have to override the beam spot for data?
    if options.overrideBeamSpot != 0:
        bs = cms.string(
            "BeamSpotObjects_2009_LumiBased_SigmaZ_v25_offline")  # 44x data gt
        #bs = cms.string("BeamSpotObjects_2009_LumiBased_SigmaZ_v21_offline") # 42x data gt
        process.GlobalTag.toGet = cms.VPSet(
            cms.PSet(record=cms.string("BeamSpotObjectsRcd"),
                     tag=bs,
                     connect=cms.untracked.string(
                         "frontier://FrontierProd/CMS_COND_31X_BEAMSPOT")))
        print "BeamSpot in globaltag set to ", bs
    else:
        print "BeamSpot in globaltag not changed"

    # Merge tracks
    process.tmfTracks = cms.EDProducer(
        "RecoTracksMixer",
        trackCol1=cms.InputTag("dimuonsGlobal", "tracks"),
        trackCol2=cms.InputTag("generalTracks", "", processName))
    process.offlinePrimaryVerticesWithBS.TrackLabel = cms.InputTag("tmfTracks")
    process.offlinePrimaryVertices.TrackLabel = cms.InputTag("tmfTracks")
    #print process.muons
    if hasattr(process.muons, "TrackExtractorPSet"):
        # <= 42X
        process.muons.TrackExtractorPSet.inputTrackCollection = cms.InputTag(
            "tmfTracks")
    elif hasattr(process, "muons1stStep") and hasattr(process.muons1stStep,
                                                      "TrackExtractorPSet"):
        # >= 44X
        process.muons1stStep.TrackExtractorPSet.inputTrackCollection = cms.InputTag(
            "tmfTracks")
    else:
        raise Exception(
            "Problem in overriding track collection for reco::Muon producer")

    # Ensure that tmfTracks is always run after generalTracks (to mix the original and embedded tracks)
    for p in process.paths:
        pth = getattr(process, p)
        if "generalTracks" in pth.moduleNames():
            pth.replace(process.generalTracks,
                        process.generalTracks * process.tmfTracks)

    # it should be the best solution to take the original beam spot for the
    # reconstruction of the new primary vertex
    # use the  one produced earlier, do not produce your own
    for s in process.sequences:
        seq = getattr(process, s)
        seq.remove(process.offlineBeamSpot)

    # Remove beam halo Id
    try:
        process.metreco.remove(process.BeamHaloId)
    except:
        pass

    # Disable lumi producer
    process.localreco_HcalNZS.remove(process.lumiProducer)
    process.localreco.remove(process.lumiProducer)

    # PFCandidate embedding
    process.particleFlowORG = process.particleFlow.clone()
    # Since CMSSW 4_4 the particleFlow reco works a bit differently. The step is
    # twofold, first particleFlowTmp is created and then the final particleFlow
    # collection. What we do in this case is that we merge the final ParticleFlow
    # collection. For the muon reconstruction, we also merge particleFlowTmp in
    # order to get PF-based isolation right.
    if hasattr(process, 'particleFlowTmp'):
        process.particleFlowTmpMixed = cms.EDProducer(
            'PFCandidateMixer',
            col1=cms.untracked.InputTag("dimuonsGlobal", "pfCands"),
            col2=cms.untracked.InputTag("particleFlowTmp", ""),
            trackCol=cms.untracked.InputTag("tmfTracks"),
            # Don't produce value maps:
            muons=cms.untracked.InputTag(""),
            gsfElectrons=cms.untracked.InputTag(""))
        process.muons.PFCandidates = cms.InputTag("particleFlowTmpMixed")

        for p in process.paths:
            pth = getattr(process, p)
            if "particleFlow" in pth.moduleNames():
                pth.replace(process.particleFlow,
                            process.particleFlowORG * process.particleFlow)
            if "muons" in pth.moduleNames():
                pth.replace(process.muons,
                            process.particleFlowTmpMixed * process.muons)
    else:
        # CMSSW_4_2
        if hasattr(process, "famosParticleFlowSequence"):
            process.famosParticleFlowSequence.remove(
                process.pfPhotonTranslatorSequence)
            process.famosParticleFlowSequence.remove(
                process.pfElectronTranslatorSequence)
            process.famosParticleFlowSequence.remove(process.particleFlow)
            process.famosParticleFlowSequence.__iadd__(process.particleFlowORG)
            process.famosParticleFlowSequence.__iadd__(process.particleFlow)
            process.famosParticleFlowSequence.__iadd__(
                process.pfElectronTranslatorSequence)
            process.famosParticleFlowSequence.__iadd__(
                process.pfPhotonTranslatorSequence)
        elif hasattr(process, "particleFlowReco"):
            process.particleFlowReco.remove(process.pfPhotonTranslatorSequence)
            process.particleFlowReco.remove(
                process.pfElectronTranslatorSequence)
            process.particleFlowReco.remove(process.particleFlow)
            process.particleFlowReco.__iadd__(process.particleFlowORG)
            process.particleFlowReco.__iadd__(process.particleFlow)
            process.particleFlowReco.__iadd__(
                process.pfElectronTranslatorSequence)
            process.particleFlowReco.__iadd__(
                process.pfPhotonTranslatorSequence)
        else:
            raise "Cannot find pflow sequence"
        process.pfSelectedElectrons.src = cms.InputTag("particleFlowORG")
        process.pfSelectedPhotons.src = cms.InputTag("particleFlowORG")

    process.particleFlow = cms.EDProducer(
        'PFCandidateMixer',
        col1=cms.untracked.InputTag("dimuonsGlobal", "pfCands"),
        col2=cms.untracked.InputTag("particleFlowORG", ""),
        trackCol=cms.untracked.InputTag("tmfTracks"),
        muons=cms.untracked.InputTag("muons"),
        gsfElectrons=cms.untracked.InputTag(
            "gsfElectrons", "", recoProcessName)  # FIXME does this work?
        #gsfElectrons = cms.untracked.InputTag("")
    )

    # Set the empty event filter source
    process.filterEmptyEv.src.setProcessName(processName)

    # Find all modules having particleFlow as their input
    pfInputNeeded = {}
    for p in process.paths:
        i = getattr(process, p)
        target = process.particleFlow

        lookForPFInput = ["particleFlow"]

        seqVis = SeqVisitor(target)
        seqVis.prepareSearch()
        seqVis.setLookFor(target)
        i.visit(seqVis)
        while seqVis.catch != 1 and seqVis.found == 1:
            target = seqVis.giveNext()

            pfInput = []

            targetAttributes = dir(target)
            for targetAttribute in targetAttributes:
                attr = getattr(target, targetAttribute
                               )  # get actual attribute, not just  the name
                if isinstance(attr, cms.InputTag):
                    if attr.getModuleLabel(
                    ) == "particleFlow" and attr.getProductInstanceLabel(
                    ) != "":
                        print "Changing: ", target, " ", targetAttribute, " ", attr, " to particleFlowORG"
                        attr.setModuleLabel("particleFlowORG")
                    if attr.getModuleLabel() in lookForPFInput:
                        pfInput.append(attr)

            if len(pfInput) > 0:
                lookForPFInput.append(target.label())
                pfInputNeeded[target.label()] = pfInput

            #i.replace(target, source)
            seqVis.prepareSearch()
            seqVis.setLookFor(target)
            i.visit(seqVis)

        #if (seqVis.catch==1):
        #seqVis.catch=0
        #i.__iadd__(source)

    pfOutputCommands = []
    for label in pfInputNeeded.keys():
        print "particleFlow as input in module %s, InputTags: %s" % (
            label, ", ".join(str(x) for x in pfInputNeeded[label]))
        pfOutputCommands.append("keep *_%s_*_%s" % (label, processName))
    outputModule.outputCommands.extend(pfOutputCommands)

    #process.pfSelectedElectrons.src = "particleFlowORG" # 4_2 legacy, already included above
    #process.pfSelectedPhotons.src = "particleFlowORG"   # 4_2 legacy, already included above

    # Setup/remove some HLT/DQM stuff whcih doesn't work
    if hasattr(process, "hltTrigReport"):
        process.hltTrigReport.HLTriggerResults.setProcessName(processName)
    if hasattr(process, "DQM_FEDIntegrity_v2"):
        process.schedule.remove(process.DQM_FEDIntegrity_v2)
    if hasattr(process, "DQM_FEDIntegrity_v3"):
        process.schedule.remove(process.DQM_FEDIntegrity_v3)
    if hasattr(process, "DQM_FEDIntegrity_v5"):
        process.schedule.remove(process.DQM_FEDIntegrity_v5)
    if hasattr(process, "HLTAnalyzerEndpath"):
        process.schedule.remove(process.HLTAnalyzerEndpath)
        del process.HLTAnalyzerEndpath

    #process.load("HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.printGenParticles_cff")
    #process.generation_step *= process.printGenParticles

    print "#############################################################"
    print " Warning! PFCandidates 'electron' collection is not mixed, "
    print "  and probably shouldnt be used. "
    print "#############################################################"

    addPAT(process, options, dataVersion)

    f = open("configDumpEmbed.py", "w")
    f.write(process.dumpPython())
    f.close()

    return process
예제 #10
0
# job, their INFO messages are printed too)
# process.MessageLogger.cerr.threshold = cms.untracked.string("INFO")

# Fragment to run PAT on the fly if requested from command line
from HiggsAnalysis.HeavyChHiggsToTauNu.HChPatTuple import addPatOnTheFly

process.commonSequence, additionalCounters = addPatOnTheFly(process, options, dataVersion)


# Re-run trigger matching
if doRerunTriggerMatching:
    import HiggsAnalysis.HeavyChHiggsToTauNu.HChTriggerMatching as TriggerMatching

    process.triggerMatching = TriggerMatching.addTauTriggerMatching(
        process,
        options.trigger,
        "Tau",
        # pathFilterMap={} # by default, use filter name in trigger matching re-running
    )
    process.commonSequence *= process.triggerMatching


# Add configuration information to histograms.root
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addConfigInfo

process.infoPath = addConfigInfo(process, options, dataVersion)

# MC Filter
import HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.customisations as tauEmbeddingCustomisations

if filterGenTaus:
    additionalCounters.extend(
예제 #11
0
def customise(process):
    # Catch the case when this config is run from cmsDriver, it won't work due to VarParsing
    # First protect against crab job creation, then the no-argument case
    if hasattr(sys, "argv") and len(sys.argv) > 0:
        if "cmsDriver" in sys.argv[0]:
            print "Running pf_customise from cmsDriver, not executing running() further due to VarParsing"
            return
        else:
            print "Running pf_customise"
  
    # Command line arguments
    import FWCore.ParameterSet.VarParsing as VarParsing
    options = VarParsing.VarParsing ('analysis')
    options.register ('overrideBeamSpot',
                      0, # default value, false
                      VarParsing.VarParsing.multiplicity.singleton,
                      VarParsing.VarParsing.varType.int,
                      "should I override beamspot in globaltag?")
    options.register("tauDecayMode", 0, # Default is all decays
                     VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.int,
                     "Tau decay mode (0=all, 230=hadronic)")
    options.register("tauMinVisPt", -1, # Disabled
                     VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.int,
                     "Minimum visible pt of tau decay (-1 disabled, >= 0 cut value in GeV)")

    options, dataVersion = getOptionsDataVersion("53XmcS10", options)

    hltProcessName = dataVersion.getTriggerProcess()
    recoProcessName = dataVersion.getRecoProcess()
    processName = process.name_()

    # Setup trigger matching
    if not (dataVersion.isMC() and options.triggerMC == 0 and options.triggerMCInAnalysis == 0):
        HChTriggerMatching.setMuonTriggerMatchingInAnalysis(process.tightenedMuonsMatched, options.trigger)

    # Setup MuScleFit
    if dataVersion.isMC():
        process.muscleCorrectedMuons.identifier = "Summer12_DR53X_smearReReco"
        process.muscleCorrectedMuons.applySmearing = True
    else:
        process.muscleCorrectedMuons.identifier = "Data2012_53X_ReReco"

    # Setup output
    outputModule = None
    outdict = process.outputModules_()
    if len(outdict) == 1:
        outputModule = outdict.values()[0]
    elif outdict.has_key("RECOSIMoutput"):
        outputModule = outdict["RECOSIMoutput"]

    print "Adjusting event content to GEN-SIM-RECO+misc"
    process.load("HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.HChEventContent_cff")
    #outputModule.outputCommands = cms.untracked.vstring("keep *")
    outputModule.outputCommands = cms.untracked.vstring("drop *")
    outputModule.outputCommands.extend(process.RECOSIMEventContent.outputCommands)
    outputModule.outputCommands.extend([
            "drop *_*_*_%s" % recoProcessName,
            "keep *_generalTracks_*_%s" % recoProcessName,
            "keep *_muons_*_%s" % recoProcessName,
            "keep *_globalMuons_*_%s" % recoProcessName,
            "keep recoGsfElectronCores_*_*_%s" % recoProcessName,
            "keep *_gsfElectrons_*_%s" % recoProcessName,
            "keep *_photons_*_%s" % recoProcessName,
            "keep *_photonCore_*_%s" % recoProcessName,

            "drop *_*_*_%s" % processName,
            "keep *_particleFlow*_*_%s" % processName,
            "keep *_generalTracks_*_%s" % processName,
            "keep *_muons_*_%s" % processName,
            "keep *_globalMuons_*_%s" % processName,

            "keep *_*Electron*_*_%s" % processName,
            "keep *_eid*_*_*",
    ])
    outputModule.outputCommands.extend(eventContent(hltProcessName, recoProcessName, processName))
#    re_procName = re.compile("_\*$")
#    outputModule.outputCommands.extend([re_procName.sub("_"+processName, x) for x in process.RECOSIMEventContent.outputCommands])
    outputModule.outputCommands.extend(process.HChEventContent.outputCommands)
    #outputModule.outputCommands.extend(process.RecoParticleFlowRECO.outputCommands)
    #outputModule.outputCommands.extend(["keep *_%s_*_%s" % (x, processName) for x in [
    #]])


    # Remove duplicate "drop *"
    index = 0
    for item in outputModule.outputCommands[:]:
        if item == "drop *" and index > 0:
            del outputModule.outputCommands[index]
            index -= 1
        index += 1


    # Disable gen vertex smearing
    process.VtxSmeared = cms.EDProducer("FlatEvtVtxGenerator", 
        MaxZ = cms.double(0.0),
        MaxX = cms.double(0.0),
        MaxY = cms.double(0.0),
        MinX = cms.double(0.0),
        MinY = cms.double(0.0),
        MinZ = cms.double(0.0),
        TimeOffset = cms.double(0.0),
        src = cms.InputTag("generator")
    )

    # Set up tau decay options
    process.generator.ZTauTau.TauolaOptions.InputCards.mdtau = options.tauDecayMode
    if options.tauMinVisPt >= 0:
        process.generator.ZTauTau.minVisibleTransverseMomentum = "%d"%options.tauMinVisPt

    print "TAUOLA mdtau =", process.generator.ZTauTau.TauolaOptions.InputCards.mdtau

    # Do we have to override the beam spot for data?
    if options.overrideBeamSpot !=  0:
        bs = cms.string("BeamSpotObjects_2009_LumiBased_SigmaZ_v25_offline") # 44x data gt
        #bs = cms.string("BeamSpotObjects_2009_LumiBased_SigmaZ_v21_offline") # 42x data gt
        process.GlobalTag.toGet = cms.VPSet(
            cms.PSet(record = cms.string("BeamSpotObjectsRcd"),
                     tag = bs,
                     connect = cms.untracked.string("frontier://FrontierProd/CMS_COND_31X_BEAMSPOT")
            )
        )
        print "BeamSpot in globaltag set to ", bs
    else:
        print "BeamSpot in globaltag not changed"


    # Merge tracks
    process.tmfTracks = cms.EDProducer("RecoTracksMixer",
        trackCol1 = cms.InputTag("dimuonsGlobal", "tracks"),
        trackCol2 = cms.InputTag("generalTracks", "", processName)
    )
    process.offlinePrimaryVerticesWithBS.TrackLabel = cms.InputTag("tmfTracks")
    process.offlinePrimaryVertices.TrackLabel = cms.InputTag("tmfTracks")
    #print process.muons
    if hasattr(process.muons, "TrackExtractorPSet"):
        # <= 42X
        process.muons.TrackExtractorPSet.inputTrackCollection = cms.InputTag("tmfTracks")
    elif hasattr(process, "muons1stStep") and hasattr(process.muons1stStep, "TrackExtractorPSet"):
       # >= 44X
       process.muons1stStep.TrackExtractorPSet.inputTrackCollection = cms.InputTag("tmfTracks")
    else:
       raise Exception("Problem in overriding track collection for reco::Muon producer")

    # Ensure that tmfTracks is always run after generalTracks (to mix the original and embedded tracks)
    for p in process.paths:
        pth = getattr(process, p)
        if "generalTracks" in pth.moduleNames():
            pth.replace(process.generalTracks, process.generalTracks*process.tmfTracks)


    # it should be the best solution to take the original beam spot for the
    # reconstruction of the new primary vertex
    # use the  one produced earlier, do not produce your own
    for s in process.sequences:
        seq =  getattr(process,s)
        seq.remove(process.offlineBeamSpot) 

    # Remove beam halo Id
    try:
        process.metreco.remove(process.BeamHaloId)
    except:
        pass

    # Disable lumi producer
    process.localreco_HcalNZS.remove(process.lumiProducer)
    process.localreco.remove(process.lumiProducer)

    # PFCandidate embedding
    process.particleFlowORG = process.particleFlow.clone()
    # Since CMSSW 4_4 the particleFlow reco works a bit differently. The step is
    # twofold, first particleFlowTmp is created and then the final particleFlow
    # collection. What we do in this case is that we merge the final ParticleFlow
    # collection. For the muon reconstruction, we also merge particleFlowTmp in
    # order to get PF-based isolation right.
    if hasattr(process, 'particleFlowTmp'):
        process.particleFlowTmpMixed = cms.EDProducer('PFCandidateMixer',
            col1 = cms.untracked.InputTag("dimuonsGlobal", "pfCands"),
            col2 = cms.untracked.InputTag("particleFlowTmp", ""),
            trackCol = cms.untracked.InputTag("tmfTracks"),
            # Don't produce value maps:
            muons = cms.untracked.InputTag(""),
            gsfElectrons = cms.untracked.InputTag("")
        )
        process.muons.PFCandidates = cms.InputTag("particleFlowTmpMixed")

        for p in process.paths:
            pth = getattr(process, p)
            if "particleFlow" in pth.moduleNames():
                pth.replace(process.particleFlow, process.particleFlowORG*process.particleFlow)
            if "muons" in pth.moduleNames():
                pth.replace(process.muons, process.particleFlowTmpMixed*process.muons)
    else:
        # CMSSW_4_2
        if hasattr(process,"famosParticleFlowSequence"):
            process.famosParticleFlowSequence.remove(process.pfPhotonTranslatorSequence)
            process.famosParticleFlowSequence.remove(process.pfElectronTranslatorSequence)
            process.famosParticleFlowSequence.remove(process.particleFlow)
            process.famosParticleFlowSequence.__iadd__(process.particleFlowORG)
            process.famosParticleFlowSequence.__iadd__(process.particleFlow)
            process.famosParticleFlowSequence.__iadd__(process.pfElectronTranslatorSequence)
            process.famosParticleFlowSequence.__iadd__(process.pfPhotonTranslatorSequence)
        elif hasattr(process,"particleFlowReco"):
            process.particleFlowReco.remove(process.pfPhotonTranslatorSequence)
            process.particleFlowReco.remove(process.pfElectronTranslatorSequence)
            process.particleFlowReco.remove(process.particleFlow)
            process.particleFlowReco.__iadd__(process.particleFlowORG)
            process.particleFlowReco.__iadd__(process.particleFlow)
            process.particleFlowReco.__iadd__(process.pfElectronTranslatorSequence)
            process.particleFlowReco.__iadd__(process.pfPhotonTranslatorSequence)
        else:
            raise "Cannot find pflow sequence"
        process.pfSelectedElectrons.src = cms.InputTag("particleFlowORG")
        process.pfSelectedPhotons.src   = cms.InputTag("particleFlowORG")

    process.particleFlow =  cms.EDProducer('PFCandidateMixer',
        col1 = cms.untracked.InputTag("dimuonsGlobal", "pfCands"),
        col2 = cms.untracked.InputTag("particleFlowORG", ""),
        trackCol = cms.untracked.InputTag("tmfTracks"),
        muons = cms.untracked.InputTag("muons"),
        gsfElectrons = cms.untracked.InputTag("gsfElectrons","",recoProcessName) # FIXME does this work?
        #gsfElectrons = cms.untracked.InputTag("")
    )

    # Set the empty event filter source
    process.filterEmptyEv.src.setProcessName(processName)

    # Find all modules having particleFlow as their input
    pfInputNeeded = {}
    for p in process.paths:
        i =  getattr(process,p)
        target = process.particleFlow

        lookForPFInput = ["particleFlow"]

        seqVis = SeqVisitor(target)
        seqVis.prepareSearch()
        seqVis.setLookFor(target)
        i.visit(seqVis)
        while seqVis.catch != 1 and seqVis.found == 1: 
            target = seqVis.giveNext()

            pfInput = []

            targetAttributes =  dir(target)
            for targetAttribute in targetAttributes:
                attr=getattr(target,targetAttribute) # get actual attribute, not just  the name
                if isinstance(attr, cms.InputTag):
                    if attr.getModuleLabel()=="particleFlow" and attr.getProductInstanceLabel()!="":
                        print "Changing: ", target, " ", targetAttribute, " ", attr, " to particleFlowORG"
                        attr.setModuleLabel("particleFlowORG")
                    if attr.getModuleLabel() in lookForPFInput:
                        pfInput.append(attr)

            if len(pfInput) > 0:
                lookForPFInput.append(target.label())
                pfInputNeeded[target.label()] = pfInput


            #i.replace(target, source) 
            seqVis.prepareSearch()
            seqVis.setLookFor(target)
            i.visit(seqVis)

        #if (seqVis.catch==1):
            #seqVis.catch=0
            #i.__iadd__(source)

    pfOutputCommands = []
    for label in pfInputNeeded.keys():
        print "particleFlow as input in module %s, InputTags: %s" % (label, ", ".join(str(x) for x in pfInputNeeded[label]))
        pfOutputCommands.append("keep *_%s_*_%s" % (label, processName))
    outputModule.outputCommands.extend(pfOutputCommands)

    #process.pfSelectedElectrons.src = "particleFlowORG" # 4_2 legacy, already included above
    #process.pfSelectedPhotons.src = "particleFlowORG"   # 4_2 legacy, already included above


    # Setup/remove some HLT/DQM stuff whcih doesn't work
    if hasattr(process, "hltTrigReport"):
        process.hltTrigReport.HLTriggerResults.setProcessName(processName)
    if hasattr(process, "DQM_FEDIntegrity_v2"):
        process.schedule.remove(process.DQM_FEDIntegrity_v2)
    if hasattr(process, "DQM_FEDIntegrity_v3"):
        process.schedule.remove(process.DQM_FEDIntegrity_v3)
    if hasattr(process, "DQM_FEDIntegrity_v5"):
        process.schedule.remove(process.DQM_FEDIntegrity_v5)
    if hasattr(process, "HLTAnalyzerEndpath"):
        process.schedule.remove(process.HLTAnalyzerEndpath)
        del process.HLTAnalyzerEndpath

    #process.load("HiggsAnalysis.HeavyChHiggsToTauNu.tauEmbedding.printGenParticles_cff")
    #process.generation_step *= process.printGenParticles

    print "#############################################################"
    print " Warning! PFCandidates 'electron' collection is not mixed, "
    print "  and probably shouldnt be used. "
    print "#############################################################"


    addPAT(process, options, dataVersion)

    f = open("configDumpEmbed.py", "w")
    f.write(process.dumpPython())
    f.close()

    return process