def addAnalyzer(era):
    process = Process(outputPrefix="singleTauEfficiency_"+era)
    process.addDatasetsFromMulticrab(sys.argv[1])
    ds = getDatasetsForEras(process.getDatasets(),eras[era])
    process.setDatasets(ds)
    global runmin,runmax
    runmin,runmax = process.getRuns()
    process.addAnalyzer("SingleTau"+tauThreshold+"_"+era, lambda dv: createAnalyzer(dv, era))
    process.run()
示例#2
0
def addAnalyzer(era):
    process = Process(outputPrefix="L1Study")
    process.addDatasetsFromMulticrab(sys.argv[1],includeOnlyTasks="2016H")
    ds = getDatasetsForEras(process.getDatasets(),eras[era])
    process.setDatasets(ds)
    global runmin,runmax
    runmin,runmax = process.getRuns()
    process.addAnalyzer("L1Study_"+eras[era], lambda dv: createAnalyzer(dv, era))
    process.run()
示例#3
0
def addAnalyzer(era,onlineSelection):
    process = Process(outputPrefix="metLegEfficiency_"+era)
    process.addDatasetsFromMulticrab(sys.argv[1])
    ds = getDatasetsForEras(process.getDatasets(),eras[era])
    process.setDatasets(ds)
    global runmin,runmax
    runmin,runmax = process.getRuns()
    process.addAnalyzer("METLeg_"+era+"_"+onlineSelection, lambda dv: createAnalyzer(dv, era, onlineSelection))
    process.run()
def addAnalyzer(era,onlineSelection):
    process = Process(outputPrefix="metLegEfficiency_"+era)
    process.addDatasetsFromMulticrab(sys.argv[1])
    ds = getDatasetsForEras(process.getDatasets(),eras[era])
    process.setDatasets(ds)
    global runmin,runmax
    runmin,runmax = process.getRuns()
    process.addAnalyzer("METLeg_"+era+"_"+onlineSelection, lambda dv: createAnalyzer(dv, era, onlineSelection))
    process.run()
def addAnalyzer(era):
    process = Process(outputPrefix="tauLegEfficiency_"+era)
#    process.setDatasets([])
    process.addDatasetsFromMulticrab(sys.argv[1])
    ds = getDatasetsForEras(process.getDatasets(),eras[era])
    process.setDatasets(ds)
    global runmin,runmax
    runmin,runmax = process.getRuns()
    process.addAnalyzer("TauLeg_"+era, lambda dv: createAnalyzer(dv, era))
#    process.addAnalyzer("TauLeg_"+era, createAnalyzer(dv, era))
    process.run()
示例#6
0
def addAnalyzer(era):
    process = Process(outputPrefix="tauLegEfficiency_" + era)
    #    process.setDatasets([])
    process.addDatasetsFromMulticrab(sys.argv[1])
    ds = getDatasetsForEras(process.getDatasets(), eras[era])
    process.setDatasets(ds)
    global runmin, runmax
    runmin, runmax = process.getRuns()
    process.addAnalyzer("TauLeg_" + era, lambda dv: createAnalyzer(dv, era))
    #    process.addAnalyzer("TauLeg_"+era, createAnalyzer(dv, era))
    process.run()
from HiggsAnalysis.NtupleAnalysis.main import Process, PSet, Analyzer

process = Process()

# Example of adding a dataset which has its files defined in data/<dataset_name>.txt file
#process.addDatasets(["TTbar_HBWB_HToTauNu_M_160_13TeV_pythia6"])

# Example of adding datasets from a multicrab directory
import sys
if len(sys.argv) != 2:
    print "Usage: ./metAnalysis.py <path-to-multicrab-directory>"
    sys.exit(0)
process.addDatasetsFromMulticrab(sys.argv[1])

# Example of adding an analyzer
process.addAnalyzer("test", Analyzer("MetAnalysis", tauPtCut=10))


# Example of adding an analyzer whose configuration depends on dataVersion
def createAnalyzer(dataVersion):
    a = Analyzer("MetAnalysis")
    if dataVersion.isMC():
        a.tauPtCut = 10
    else:
        a.tauPtCut = 20
    return a


process.addAnalyzer("test2", createAnalyzer)

# Run the analysis
示例#8
0
process = Process()

# Example of adding a dataset which has its files defined in data/<dataset_name>.txt file
#process.addDatasets(["TTbar_HBWB_HToTauNu_M_160_13TeV_pythia6"])

# Example of adding datasets from a multicrab directory
import sys
if len(sys.argv) != 2:
    print "Usage: ./exampleAnalysis.py <path-to-multicrab-directory>"
    sys.exit(0)
process.addDatasetsFromMulticrab(sys.argv[1])

# Example of adding an analyzer
process.addAnalyzer("test", Analyzer("ExampleAnalysis",
                                     tauPtCut = 10
))

# Example of adding an analyzer whose configuration depends on dataVersion
def createAnalyzer(dataVersion):
    a = Analyzer("ExampleAnalysis")
    if dataVersion.isMC():
        a.tauPtCut = 10
    else:
        a.tauPtCut = 20
    return a
process.addAnalyzer("test2", createAnalyzer)

# Run the analysis
process.run()
from HiggsAnalysis.NtupleAnalysis.main import Process, PSet, Analyzer

process = Process()

# Example of adding a dataset which has its files defined in data/<dataset_name>.txt file
#process.addDatasets(["TTbar_HBWB_HToTauNu_M_160_13TeV_pythia6"])

# Example of adding datasets from a multicrab directory
import sys
if len(sys.argv) != 2:
    print "Usage: ./metAnalysis.py <path-to-multicrab-directory>"
    sys.exit(0)
process.addDatasetsFromMulticrab(sys.argv[1])

# Example of adding an analyzer
process.addAnalyzer("test", Analyzer("CorrelationAnalysis", tauPtCut=10))


# Example of adding an analyzer whose configuration depends on dataVersion
def createAnalyzer(dataVersion):
    a = Analyzer("CorrelationAnalysis")
    if dataVersion.isMC():
        a.tauPtCut = 10
    else:
        a.tauPtCut = 20
    return a


process.addAnalyzer("test2", createAnalyzer)

# Run the analysis
process = Process()

# Example of adding a dataset which has its files defined in data/<dataset_name>.txt file
#process.addDatasets(["TTbar_HBWB_HToTauNu_M_160_13TeV_pythia6"])

# Example of adding datasets from a multicrab directory
import sys
if len(sys.argv) != 2:
    print "Usage: ./metAnalysis.py <path-to-multicrab-directory>"
    sys.exit(0)
process.addDatasetsFromMulticrab(sys.argv[1])

# Example of adding an analyzer
process.addAnalyzer("test", Analyzer("CorrelationAnalysis",
                                     tauPtCut = 10
))

# Example of adding an analyzer whose configuration depends on dataVersion
def createAnalyzer(dataVersion):
    a = Analyzer("CorrelationAnalysis")
    if dataVersion.isMC():
        a.tauPtCut = 10
    else:
        a.tauPtCut = 20
    return a
process.addAnalyzer("test2", createAnalyzer)

# Run the analysis
process.run()
示例#11
0
def main():

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        print __doc__
        sys.exit(0)
    else:
        pass

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    maxEvents = {}
    maxEvents["All"] = opts.nEvts
    process = Process(prefix, postfix, maxEvents)

    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Print("Adding only dataset \"%s\" from multiCRAB directory %s" %
              (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(
            opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Print("Adding all datasets except \"%s\" from multiCRAB directory %s" %
              (opts.excludeTasks, opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        process.addDatasetsFromMulticrab(opts.mcrab,
                                         excludeTasks=opts.excludeTasks)
    else:
        myBlackList = []
        if opts.analysisType == "HToTB":
            myBlackList = [
                "QCD_b", "ChargedHiggs", "DY", "WZ", "WW", "ZZ", "TTTT", "ST",
                "TTWJets", "TTZ"
            ]

        Print("Adding all datasets from multiCRAB directory %s" % (opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        regex = "|".join(myBlackList)
        if len(myBlackList) > 0:
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Selection customisations
    # ================================================================================================
    if opts.analysisType == "HToTauNu":
        from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import allSelections
        # Disable rtau
        allSelections.TauSelection.prongs = 1
        allSelections.TauSelection.rtau = 0.0
    elif opts.analysisType == "HToTB":
        from HiggsAnalysis.NtupleAnalysis.parameters.hplus2tbAnalysis import allSelections
    else:
        raise Exception(
            "Invalid analysis selection \"%s\"! Valid options are: %s" %
            (opts.analysisType, ", ".join(allowedAnalysis)))

    # Jet cut values
    allSelections.__setattr__("jetPtCutMin", 0.0)
    allSelections.__setattr__("jetPtCutMax", 99990.0)
    allSelections.__setattr__("jetEtaCutMin", -2.5)
    allSelections.__setattr__("jetEtaCutMax", 2.5)
    if 0:
        for algo in ["combinedInclusiveSecondaryVertexV2BJetTags"]:
            for wp in ["Loose", "Medium", "Tight"]:
                selections = allSelections.clone()
                selections.BJetSelection.bjetDiscr = algo
                selections.BJetSelection.bjetDiscrWorkingPoint = wp
                suffix = "_%s_%s" % (algo, wp)
                print "Added analyzer for algo/wp: %s" % suffix
                process.addAnalyzer(
                    "BTagEfficiency" + suffix,
                    Analyzer("BTagEfficiencyAnalysis",
                             config=selections,
                             silent=False))

    # Set the analysis type
    allSelections.__setattr__("AnalysisType", opts.analysisType)

    # Overwrite verbosity
    allSelections.verbose = opts.verbose

    # Overwrite histo ambient level (Options: Systematics, Vital, Informative, Debug)
    allSelections.histogramAmbientLevel = opts.histoLevel

    #================================================================================================
    # Build analysis modules
    #================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(
        prefix,
        dataEras,
        searchModes,
        #### Options ####
        usePUreweighting=opts.usePUreweighting,
        useTopPtReweighting=opts.useTopPtReweighting,
        doSystematicVariations=opts.doSystematics)

    # ================================================================================================
    # Add Analysis Variations
    # ================================================================================================
    builder.addVariation("BJetSelection.bjetDiscr",
                         ["pfCombinedInclusiveSecondaryVertexV2BJetTags"])
    builder.addVariation("BJetSelection.bjetDiscrWorkingPoint",
                         ["Loose", "Medium", "Tight"])

    # ================================================================================================
    # Build the builder
    # ================================================================================================
    builder.build(process, allSelections)

    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    Print("Running process", True)
    process.run()
示例#12
0
    sys.exit(0)

#print sys.argv[1]

process.addDatasetsFromMulticrab(sys.argv[1])

import HiggsAnalysis.NtupleAnalysis.tools.aux as aux
PileupHistogramPath = os.path.join(aux.higgsAnalysisPath(), "NtupleAnalysis", "data", "PUWeights")

process.addAnalyzer("generatorComparison", 
    Analyzer("GeneratorComparison",
        histogramAmbientLevel = "Informative",
        tauPtCut = 41.0,
        tauEtaCut = 2.1,
        bjetEtCut = 30.0,
        bjetEtaCut = 2.4,

        lumi    = 7274,
        runMin  = 202807,
        runMax  = 208686,
    ),
    #includeOnlyTasks="TauPlusX_\S+_2012D_Jan22"
)

# Run the analysis
process.run()


# Run the analysis with PROOF
# By default it uses all cores, but you can give proofWorkers=<N> as a parameter
#process.run(proof=True)
示例#13
0
def main():

    # Require at least two arguments (script-name, path to multicrab)      
    if len(sys.argv) < 2:
        Print("Not enough arguments passed to script execution. Printing docstring & EXIT.")
        print __doc__
        sys.exit(0)
    else:
        pass
        

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    maxEvents = {}
    maxEvents["All"] = opts.nEvts
    process = Process(prefix, postfix, maxEvents)

            
    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Print("Adding only dataset \"%s\" from multiCRAB directory %s" % (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Print("Adding all datasets except \"%s\" from multiCRAB directory %s" % (opts.excludeTasks, opts.mcrab))
        Print("If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc...")
        process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=opts.excludeTasks)
    else:
        myBlackList = []
        if opts.analysisType == "HToTB":
            myBlackList = ["QCD_b", "ChargedHiggs", "DY", "WZ", "WW", "ZZ", "TTTT", "ST", "TTWJets", "TTZ"]

        Print("Adding all datasets from multiCRAB directory %s" % (opts.mcrab))
        Print("If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc...")
        regex =  "|".join(myBlackList)
        if len(myBlackList)>0:
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Selection customisations
    # ================================================================================================
    if opts.analysisType == "HToTauNu":
        from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import allSelections
        # Disable rtau
        allSelections.TauSelection.prongs = 1
        allSelections.TauSelection.rtau = 0.0
    elif opts.analysisType == "HToTB":
        from HiggsAnalysis.NtupleAnalysis.parameters.hplus2tbAnalysis import allSelections
    else:
        raise Exception("Invalid analysis selection \"%s\"! Valid options are: %s" % (opts.analysisType, ", ".join(allowedAnalysis)))
    
    # Jet cut values
    allSelections.__setattr__("jetPtCutMin", 0.0)
    allSelections.__setattr__("jetPtCutMax", 99990.0)
    allSelections.__setattr__("jetEtaCutMin", -2.5)
    allSelections.__setattr__("jetEtaCutMax", 2.5)
    if 0:
        for algo in ["combinedInclusiveSecondaryVertexV2BJetTags"]:
            for wp in ["Loose", "Medium", "Tight"]:
                selections = allSelections.clone()
                selections.BJetSelection.bjetDiscr = algo
                selections.BJetSelection.bjetDiscrWorkingPoint = wp
                suffix = "_%s_%s"%(algo,wp)
                print "Added analyzer for algo/wp: %s"%suffix
                process.addAnalyzer("BTagEfficiency"+suffix, Analyzer("BTagEfficiencyAnalysis", config=selections, silent=False))

    # Set the analysis type
    allSelections.__setattr__("AnalysisType", opts.analysisType)

    # Overwrite verbosity
    allSelections.verbose = opts.verbose

    # Overwrite histo ambient level (Options: Systematics, Vital, Informative, Debug)
    allSelections.histogramAmbientLevel = opts.histoLevel
    
    #================================================================================================
    # Build analysis modules
    #================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(prefix,
                              dataEras,
                              searchModes,
                              #### Options ####
                              usePUreweighting       = opts.usePUreweighting,
                              useTopPtReweighting    = opts.useTopPtReweighting,
                              doSystematicVariations = opts.doSystematics)

    # ================================================================================================
    # Add Analysis Variations
    # ================================================================================================
    builder.addVariation("BJetSelection.bjetDiscr", ["pfCombinedInclusiveSecondaryVertexV2BJetTags"])
    builder.addVariation("BJetSelection.bjetDiscrWorkingPoint", ["Loose", "Medium", "Tight"])

    # ================================================================================================
    # Build the builder
    # ================================================================================================
    builder.build(process, allSelections)

    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    Print("Running process", True)
    process.run()