Example #1
0
def addPUWeightVariation(name):
    # Up variation
    module = getattr(process, name).clone()
    module.Tree.fill = False
    param.setPileupWeight(dataVersion,
                          process,
                          process.commonSequence,
                          pset=module.vertexWeight,
                          psetReader=module.pileupWeightReader,
                          era=puweight,
                          suffix="up")
    addAnalysis(process,
                name + "PUWeightPlus",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
    # Down variation
    module = module.clone()
    param.setPileupWeight(dataVersion,
                          process,
                          process.commonSequence,
                          pset=module.vertexWeight,
                          psetReader=module.pileupWeightReader,
                          era=puweight,
                          suffix="down")
    addAnalysis(process,
                name + "PUWeightMinus",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
Example #2
0
def addTauIdAnalyses(process, dataVersion, prefix, prototype, commonSequence,
                     additionalCounters):
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    import HiggsAnalysis.HeavyChHiggsToTauNu.HChMetCorrection as MetCorrection

    selections = tauSelections[:]
    names = tauSelectionNames[:]
    # Remove TCTau from list
    tctauIndex = selections.index(tauSelectionCaloTauCutBased)
    del selections[tctauIndex]
    del names[tctauIndex]
    # Remove PF shrinking cone from list
    pfShrinkingConeIndex = selections.index(tauSelectionShrinkingConeCutBased)
    del selections[pfShrinkingConeIndex]
    del names[pfShrinkingConeIndex]
    # Remove TaNC from list
    tancIndex = selections.index(tauSelectionShrinkingConeTaNCBased)
    del selections[tancIndex]
    del names[tancIndex]
    # HPS loose
    hpsLoose = selections.index(tauSelectionHPSLooseTauBased)
    #del selections[hpsLoose]
    #del names[hpsLoose]
    # TCTau can be missing in tau embedding case
    try:
        caloTauIndex = selections.index(tauSelectionCaloTauCutBased)
        del selections[caloTauIndex]
        del names[caloTauIndex]
    except ValueError:
        pass
    # Remove combined HPS TaNC from list
    combinedHPSTaNCIndex = selections.index(
        tauSelectionCombinedHPSTaNCTauBased)
    del selections[combinedHPSTaNCIndex]
    del names[combinedHPSTaNCIndex]

    for selection, name in zip(selections, names):
        module = prototype.clone()
        module.tauSelection = selection.clone()

        # Calculate type 1 MET
        (type1Sequence,
         type1Met) = MetCorrection.addCorrectedMet(process,
                                                   dataVersion,
                                                   module.tauSelection,
                                                   module.jetSelection,
                                                   postfix=name)
        module.MET.type1Src = type1Met

        seq = cms.Sequence(commonSequence * type1Sequence)
        setattr(process, "commonSequence" + name, seq)

        addAnalysis(process,
                    prefix + name,
                    module,
                    preSequence=seq,
                    additionalCounters=additionalCounters)
def addTauIdAnalyses(process, dataVersion, prefix, prototype, commonSequence, additionalCounters):
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    import HiggsAnalysis.HeavyChHiggsToTauNu.HChMetCorrection as MetCorrection

    selections = tauSelections[:]
    names = tauSelectionNames[:]
    # Remove TCTau from list
    tctauIndex = selections.index(tauSelectionCaloTauCutBased)
    del selections[tctauIndex]
    del names[tctauIndex]
    # Remove PF shrinking cone from list
    pfShrinkingConeIndex = selections.index(tauSelectionShrinkingConeCutBased)
    del selections[pfShrinkingConeIndex]
    del names[pfShrinkingConeIndex]
    # Remove TaNC from list
    tancIndex = selections.index(tauSelectionShrinkingConeTaNCBased)
    del selections[tancIndex]
    del names[tancIndex]
    # HPS loose
    hpsLoose = selections.index(tauSelectionHPSLooseTauBased)
    #del selections[hpsLoose]
    #del names[hpsLoose]
    # TCTau can be missing in tau embedding case
    try: 
        caloTauIndex = selections.index(tauSelectionCaloTauCutBased)
        del selections[caloTauIndex]
        del names[caloTauIndex]
    except ValueError:
        pass
    # Remove combined HPS TaNC from list
    combinedHPSTaNCIndex = selections.index(tauSelectionCombinedHPSTaNCTauBased)
    del selections[combinedHPSTaNCIndex]
    del names[combinedHPSTaNCIndex]

    for selection, name in zip(selections, names):
        module = prototype.clone()
        module.tauSelection = selection.clone()

        # Calculate type 1 MET
        (type1Sequence, type1Met) = MetCorrection.addCorrectedMet(process, dataVersion, module.tauSelection, module.jetSelection, postfix=name)
        module.MET.type1Src = type1Met

        seq = cms.Sequence(
            commonSequence *
            type1Sequence
        )
        setattr(process, "commonSequence"+name, seq)

        addAnalysis(process, prefix+name, module,
                    preSequence=seq,
                    additionalCounters=additionalCounters)
def addPUWeightVariation(name):
    # Up variation
    module = getattr(process, name).clone()
    module.Tree.fill = False
    param.setPileupWeight(dataVersion, process, process.commonSequence, pset=module.vertexWeight, psetReader=module.pileupWeightReader, era=puweight, suffix="up")
    addAnalysis(process, name+"PUWeightPlus", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
    # Down variation
    module = module.clone()
    param.setPileupWeight(dataVersion, process, process.commonSequence, pset=module.vertexWeight, psetReader=module.pileupWeightReader, era=puweight, suffix="down")
    addAnalysis(process, name+"PUWeightMinus", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
 def doVariation(self, depth, idxlist, process, additionalCounters, commonSequence, nominalAnalysis, analysisName):
     myModuleNames = []
     if depth == len(idxlist):
         # Top reached, create module
         myVariationName = self.getVariationName(analysisName, idxlist)
         addAnalysis(process,
                     myVariationName,
                     self.createVariationModule(idxlist, nominalAnalysis),
                     preSequence=commonSequence,
                     additionalCounters=additionalCounters,
                     signalAnalysisCounters=True)
         myModuleNames.append(myVariationName)
         #print "Added module:",myVariationName
     else:
         for i in range(0, self._variationItems[depth].getNumberOfVariations()):
             # Enter recursion
             idxlist[depth] = i
             myModuleNames.extend(self.doVariation(depth+1, idxlist, process, additionalCounters, commonSequence, nominalAnalysis, analysisName))
     return myModuleNames
def addPUWeightVariation(name):
    module = getattr(process, name).clone()
    module.Tree.fill = False
    module.vertexWeight.shiftMean = True
    module.vertexWeight.shiftMeanAmount = PUWeightVariation
    addAnalysis(process,
                name + "PUWeightPlus",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)

    module = module.clone()
    module.vertexWeight.shiftMeanAmount = -PUWeightVariation
    addAnalysis(process,
                name + "PUWeightMinus",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
Example #7
0
 def doVariation(self, depth, idxlist, process, additionalCounters,
                 commonSequence, nominalAnalysis, analysisName):
     myModuleNames = []
     if depth == len(idxlist):
         # Top reached, create module
         myVariationName = self.getVariationName(analysisName, idxlist)
         addAnalysis(process,
                     myVariationName,
                     self.createVariationModule(idxlist, nominalAnalysis),
                     preSequence=commonSequence,
                     additionalCounters=additionalCounters,
                     signalAnalysisCounters=True)
         myModuleNames.append(myVariationName)
         #print "Added module:",myVariationName
     else:
         for i in range(
                 0, self._variationItems[depth].getNumberOfVariations()):
             # Enter recursion
             idxlist[depth] = i
             myModuleNames.extend(
                 self.doVariation(depth + 1, idxlist, process,
                                  additionalCounters, commonSequence,
                                  nominalAnalysis, analysisName))
     return myModuleNames
Example #8
0
def addPUWeightVariation(name):
    module = getattr(process, name).clone()
    module.Tree.fill = False
    module.vertexWeight.shiftMean = True
    module.vertexWeight.shiftMeanAmount = PUWeightVariation
    addAnalysis(
        process,
        name + "PUWeightPlus",
        module,
        preSequence=process.commonSequence,
        additionalCounters=additionalCounters,
        signalAnalysisCounters=True,
    )

    module = module.clone()
    module.vertexWeight.shiftMeanAmount = -PUWeightVariation
    addAnalysis(
        process,
        name + "PUWeightMinus",
        module,
        preSequence=process.commonSequence,
        additionalCounters=additionalCounters,
        signalAnalysisCounters=True,
    )
Example #9
0
if doMETResolution:
    process.load("HiggsAnalysis.HeavyChHiggsToTauNu.METResolutionAnalysis_cfi")
    process.signalAnalysisPath += process.metResolutionAnalysis

# Summer PAS cuts
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis

if doSummerPAS:
    module = process.signalAnalysis.clone()
    module.tauSelection.rtauCut = 0
    module.MET.METCut = 70
    module.jetSelection.EMfractionCut = 999  # disable
    addAnalysis(
        process,
        "signalAnalysisRtau0MET70",
        module,
        preSequence=process.commonSequence,
        additionalCounters=additionalCounters,
        signalAnalysisCounters=True,
    )


# b tagging testing
if doBTagScan:
    module = process.signalAnalysis.clone()
    #    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 2.0
    module.Tree.fill = False
    addAnalysis(
        process,
        "signalAnalysisBtaggingTest",
        module,
Example #10
0
        modules.append("signalAnalysisRtau0MET70")
    if doRtau0:
        modules.append("signalAnalysisRtau0")
    if doOptimisation:
        modules.extend(variationModuleNames)
    return modules

# To have tau embedding like preselection
if doTauEmbeddingLikePreselection:
    # Preselection similar to tau embedding selection (genuine tau+3 jets+lepton vetoes), no tau+MET trigger required
    process.tauEmbeddingLikeSequence = cms.Sequence(process.commonSequence)
    module = process.signalAnalysis.clone()
    counters = additionalCounters[:]
    counters.extend(tauEmbeddingCustomisations.addEmbeddingLikePreselection(process, process.tauEmbeddingLikeSequence, module))
    addAnalysis(process, "signalAnalysisTauEmbeddingLikePreselection", module,
                preSequence=process.tauEmbeddingLikeSequence,
                additionalCounters=counters, signalAnalysisCounters=True)

    # Preselection similar to tau embedding selection (genuine tau+3 jets+lepton vetoes), tau+MET trigger required
    process.tauEmbeddingLikeTriggeredSequence = cms.Sequence(process.commonSequence)
    module = process.signalAnalysis.clone()
    counters = additionalCounters[:]
    counters.extend(tauEmbeddingCustomisations.addEmbeddingLikePreselection(process, process.tauEmbeddingLikeTriggeredSequence, module, prefix="embeddingLikeTriggeredPreselection", disableTrigger=False))
    addAnalysis(process, "signalAnalysisTauEmbeddingLikeTriggeredPreselection", module,
                preSequence=process.tauEmbeddingLikeTriggeredSequence,
                additionalCounters=counters, signalAnalysisCounters=True)    

    process.genuineTauSequence = cms.Sequence(process.commonSequence)
    module = process.signalAnalysis.clone()
    counters = additionalCounters[:]
    counters.extend(tauEmbeddingCustomisations.addGenuineTauPreselection(process, process.genuineTauSequence, module))
Example #11
0
    * process.signalOptimisationCounters
    * process.PickEvents
)


# b tagging testing
if doBTagScan:
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis

    module = process.signalOptimisation.clone()
    # module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 3.0
    addAnalysis(
        process,
        "signalOptimisationBtaggingTest",
        module,
        preSequence=process.commonSequence,
        additionalCounters=additionalCounters,
        signalOptimisationCounters=True,
    )


################################################################################
# The signal analysis with different tau ID algorithms
#
# Run the analysis for the different tau ID algorithms at the same job
# as the golden analysis. It is significantly more efficiency to run
# many analyses in a single job compared to many jobs (this avoids
# some of the I/O and grid overhead). The fragment below creates the
# following histogram directories
# signalOptimisationTauSelectionShrinkingConeCutBased
# signalOptimisationTauSelectionShrinkingConeTaNCBased
process.load("HiggsAnalysis.HeavyChHiggsToTauNu.PickEventsDumper_cfi")
process.alphatAnalysisPath = cms.Path(
    process.commonSequence
    *  # supposed to be empty, unless "doPat=1" command line argument is given
    process.alphatAnalysis * process.alphatAnalysisCounters *
    process.PickEvents)

# b tagging testing
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
if doBTagScan:
    module = process.alphatAnalysis.clone()
    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 1.2
    addAnalysis(process,
                "alphatAnalysisBtaggingTest",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                alphatAnalysisCounters=True)
if doBTagScan:
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    module = process.alphatAnalysis.clone()
    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 1.8
    addAnalysis(process,
                "alphatAnalysisBtaggingTest2",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                alphatAnalysisCounters=True)

################################################################################
Example #13
0
process.alphatAnalysisPath = cms.Path(
    process.commonSequence * # supposed to be empty, unless "doPat=1" command line argument is given
    process.alphatAnalysis *
    process.alphatAnalysisCounters *
    process.PickEvents
)


# b tagging testing
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
if doBTagScan:
    module = process.alphatAnalysis.clone()
    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 1.2
    addAnalysis(process, "alphatAnalysisBtaggingTest", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                alphatAnalysisCounters=True)
if doBTagScan:
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    module = process.alphatAnalysis.clone()
    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 1.8
    addAnalysis(process, "alphatAnalysisBtaggingTest2", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                alphatAnalysisCounters=True)


################################################################################
# The alphat analysis with different tau ID algorithms
#
process.load("HiggsAnalysis.HeavyChHiggsToTauNu.PickEventsDumper_cfi")
process.signalOptimisationPath = cms.Path(
    process.commonSequence
    *  # supposed to be empty, unless "doPat=1" command line argument is given
    process.signalOptimisation * process.signalOptimisationCounters *
    process.PickEvents)

# b tagging testing
if doBTagScan:
    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    module = process.signalOptimisation.clone()
    #module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 3.0
    addAnalysis(process,
                "signalOptimisationBtaggingTest",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalOptimisationCounters=True)

################################################################################
# The signal analysis with different tau ID algorithms
#
# Run the analysis for the different tau ID algorithms at the same job
# as the golden analysis. It is significantly more efficiency to run
# many analyses in a single job compared to many jobs (this avoids
# some of the I/O and grid overhead). The fragment below creates the
# following histogram directories
# signalOptimisationTauSelectionShrinkingConeCutBased
# signalOptimisationTauSelectionShrinkingConeTaNCBased
# signalOptimisationTauSelectionCaloTauCutBased
# signalOptimisationTauSelectionHPSTightTauBased
    process.PickEvents
)

if doMETResolution:
    process.load("HiggsAnalysis.HeavyChHiggsToTauNu.METResolutionAnalysis_cfi")
    process.signalAnalysisPath += process.metResolutionAnalysis

# Summer PAS cuts
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
if doSummerPAS:
    module = process.signalAnalysis.clone()
    module.tauSelection.rtauCut = 0
    module.MET.METCut = 70
    module.jetSelection.EMfractionCut = 999 # disable
    addAnalysis(process, "signalAnalysisRtau0MET70", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)


# b tagging testing
if doBTagScan:
    module = process.signalAnalysis.clone()
#    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 2.0
    module.Tree.fill = False
    addAnalysis(process, "signalAnalysisBtaggingTest", module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)

    from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
    process.PickEvents)

if doMETResolution:
    process.load("HiggsAnalysis.HeavyChHiggsToTauNu.METResolutionAnalysis_cfi")
    process.signalAnalysisPath += process.metResolutionAnalysis

# Summer PAS cuts
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
if doSummerPAS:
    module = process.signalAnalysis.clone()
    module.tauSelection.rtauCut = 0
    module.MET.METCut = 70
    module.jetSelection.EMfractionCut = 999  # disable
    addAnalysis(process,
                "signalAnalysisRtau0MET70",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)

# b tagging testing
if doBTagScan:
    module = process.signalAnalysis.clone()
    #    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 2.0
    module.Tree.fill = False
    addAnalysis(process,
                "signalAnalysisBtaggingTest",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
    process.PickEvents)

if doMETResolution:
    process.load("HiggsAnalysis.HeavyChHiggsToTauNu.METResolutionAnalysis_cfi")
    process.signalAnalysisPath += process.metResolutionAnalysis

# Summer PAS cuts
from HiggsAnalysis.HeavyChHiggsToTauNu.HChTools import addAnalysis
if doSummerPAS:
    module = process.signalAnalysis.clone()
    module.tauSelection.rtauCut = 0
    module.MET.METCut = 70
    module.jetSelection.EMfractionCut = 999  # disable
    addAnalysis(process,
                "signalAnalysisRtau0MET70",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)

# b tagging testing
if doBTagScan:
    module = process.signalAnalysis.clone()
    #    module.bTagging.discriminator = "trackCountingHighPurBJetTags"
    module.bTagging.discriminatorCut = 2.0
    module.Tree.fill = False
    addAnalysis(process,
                "signalAnalysisBtaggingTest",
                module,
                preSequence=process.commonSequence,
                additionalCounters=additionalCounters,
                signalAnalysisCounters=True)
Example #18
0
        modules.extend(variationModuleNames)
    return modules


# To have tau embedding like preselection
if doTauEmbeddingLikePreselection:
    # Preselection similar to tau embedding selection (genuine tau+3 jets+lepton vetoes), no tau+MET trigger required
    process.tauEmbeddingLikeSequence = cms.Sequence(process.commonSequence)
    module = process.signalAnalysis.clone()
    counters = additionalCounters[:]
    counters.extend(
        tauEmbeddingCustomisations.addEmbeddingLikePreselection(
            process, process.tauEmbeddingLikeSequence, module))
    addAnalysis(process,
                "signalAnalysisTauEmbeddingLikePreselection",
                module,
                preSequence=process.tauEmbeddingLikeSequence,
                additionalCounters=counters,
                signalAnalysisCounters=True)

    # Preselection similar to tau embedding selection (genuine tau+3 jets+lepton vetoes), tau+MET trigger required
    process.tauEmbeddingLikeTriggeredSequence = cms.Sequence(
        process.commonSequence)
    module = process.signalAnalysis.clone()
    counters = additionalCounters[:]
    counters.extend(
        tauEmbeddingCustomisations.addEmbeddingLikePreselection(
            process,
            process.tauEmbeddingLikeTriggeredSequence,
            module,
            prefix="embeddingLikeTriggeredPreselection",
            disableTrigger=False))