예제 #1
0
def main():

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        sys.exit(0)
    else:
        pass

    ###################
    ## SETUP THE PROCESS
    ###################

    maxEvents = {}

    process = Process(prefix, maxEvents=maxEvents)

    ###################
    ## ADD DATASETS
    ###################

    process.addDatasetsFromMulticrab(sys.argv[1],
                                     blacklist=blacklist,
                                     whitelist=whitelist)

    # Enable genuine tau histograms for common plots (needed for calculating N_QCD)
    allSelections.CommonPlots.enableGenuineTauHistograms = True

    # Set splitting of phase space (first bin is below first edge value and last bin is above last edge value)
    allSelections.CommonPlots.histogramSplitting = [
        PSet(label="tauPt_1", binLowEdges=[40, 60], useAbsoluteValues=False),
        PSet(label="decayMode_1", binLowEdges=[2, 3], useAbsoluteValues=False),
        PSet(label="tauPt_2",
             binLowEdges=[20, 40, 60],
             useAbsoluteValues=False),
        PSet(label="decayMode_2",
             binLowEdges=[1, 2, 3],
             useAbsoluteValues=False),
    ]

    ##################
    ## BUILD ANALYSIS MODULES
    ###################

    builder = AnalysisBuilder(
        prefix,
        dataEras,
        searchModes,
        ### OPRIONS ###
        usePUreweighting=True,
        useTopPtReweighting=False,
        doSystematicVariations=True,
        analysisType="HToHW_background")

    builder.build(process, allSelections)

    process.run()
예제 #2
0
def _setupQGL(jsonname):
    # Read json
    _jsonpath = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis",
                             "data", "QGLR")

    filename = os.path.join(_jsonpath, jsonname)

    if not os.path.exists(filename):
        raise Exception("Error: file '%s' does not exist!" % filename)
    f = open(filename)
    contents = json.load(f)

    f.close()

    # Loop over the contents to convert as list of PSets the requested information
    psetList = []
    for row in contents:

        p = PSet(
            jetType=row["Jet"],
            prob=float(row["prob"]),
            probError=float(row["probError"]),
            QGLmin=float(row["QGLmin"]),
            QGLmax=float(row["QGLmax"]),
            Ptmin=float(row["Ptmin"]),
            Ptmax=float(row["Ptmax"]),
        )

        psetList.append(p)

    return psetList
예제 #3
0
def _setupBtagEfficiency(btagPset, btagEfficiencyFilename, direction,
                         variationInfo):
    fullname = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis",
                            "data", btagEfficiencyFilename)
    if not os.path.exists(fullname):
        raise Exception(
            "Error: Could not find the btag efficiency json file! (tried: %s)"
            % fullname)
    # Read the json file
    f = open(fullname)
    contents = json.load(f)
    f.close()
    # Loop over the contents to convert as list of PSets the requested information
    psetList = []
    for row in contents:
        # Require that the btag discriminator and working points match
        if row["discr"] == btagPset.__getattr__(
                "bjetDiscr") and row["workingPoint"] == btagPset.__getattr__(
                    "bjetDiscrWorkingPoint"):
            #print row["discr"], row["workingPoint"], row["flavor"], row["ptMin"]
            p = PSet(jetFlavor=row["flavor"],
                     ptMin=row["ptMin"],
                     ptMax=row["ptMax"],
                     eff=float(row["eff"]),
                     effDown=float(row["effDown"]),
                     effUp=float(row["effUp"]))
            psetList.append(p)
    btagPset.btagEfficiency = psetList
예제 #4
0
def createAnalyzer(dataVersion, era):
    a = Analyzer(
        "TriggerEfficiency",
        name=era,
        Trigger=PSet(triggerOR=[], triggerOR2=[]),
        usePileupWeights=True,
        offlineSelection=leg,
        MuonSelection=PSet(
            #            discriminators = ["muIDMedium"],
            #            discriminators = ["TrgMatch_IsoMu20_eta2p1"],
            #            discriminators = ["Muons_TrgMatch_IsoMu16_eta2p1"],
        ),
        TauSelection=PSet(
            discriminators=[
                "byLooseCombinedIsolationDeltaBetaCorr3Hits",  #"byMediumIsolationMVA3newDMwLT",
                "againstMuonTight3",
                "againstElectronMediumMVA6"
            ],
            nprongs=1,
            relaxedOfflineSelection=False,
        ),
        binning=binning,
        xLabel=xLabel,
        yLabel=yLabel,
    )

    if isData(dataVersion):
        if "2016" in era:
            a.Trigger.triggerOR = ["HLT_IsoMu22_eta2p1_vx"]
            a.Trigger.triggerOR2 = [
                "HLT_VLooseIsoPFTau" + tauThreshold + "_Trk50_eta2p1_vx"
            ]
            if tauThreshold == "50":
                a.Trigger.triggerOR2 = ["HLT_LooseIsoPFTau50_Trk30_eta2p1_vx"]

        a.runMin = runmin
        a.runMax = runmax
    else:
        if "2016" in era:
            a.Trigger.triggerOR = ["HLT_IsoMu22_eta2p1_vx"]
            a.Trigger.triggerOR2 = [
                "HLT_VLooseIsoPFTau" + tauThreshold + "_Trk50_eta2p1_vx"
            ]
            if tauThreshold == "50":
                a.Trigger.triggerOR2 = ["HLT_LooseIsoPFTau50_Trk30_eta2p1_vx"]

    return a
예제 #5
0
def pileupWeight(data=None, mc=None, enabled=None):
    if data is not None and mc is not None and enabled is None:
        enabled = True
    if enabled is None:
        enabled = False

    pset = PSet(enabled=enabled)
    if not enabled:
        return pset
    if data is None:
        raise Exception("If pileupWeight is enabled, must give parameter 'data' for the data era")
    if mc is None:
        raise Exception("If pileupWeight is enabled, must give parameter 'mc' for the MC era")

    pset.data = File(os.path.join(_pileupHistogramPath, "PileupHistogramData"+data+".root"))
    pset.mc = File(os.path.join(_pileupHistogramPath, "PileupHistogramMC"+mc+".root"))
    return pset
예제 #6
0
def _assignTrgSF(name, binEdges, SF, SFup, SFdown, pset, direction):
    if not direction in ["nominal", "up", "down"]:
        raise Exception("Error: unknown option for SF direction('%s')!" %
                        direction)
    myScaleFactors = SF[:]
    if direction == "up":
        myScaleFactors = SFup[:]
    elif direction == "down":
        myScaleFactors = SFdown[:]
    setattr(pset, name,
            PSet(binLeftEdges=binEdges[:], scaleFactors=myScaleFactors))
예제 #7
0
def pileupWeight(data=None, mc=None, enabled=None):
    if data is not None and mc is not None and enabled is None:
        enabled = True
    if enabled is None:
        enabled = False

    pset = PSet(enabled=enabled)
    if not enabled:
        return pset
    if data is None:
        raise Exception(
            "If pileupWeight is enabled, must give parameter 'data' for the data era"
        )
    if mc is None:
        raise Exception(
            "If pileupWeight is enabled, must give parameter 'mc' for the MC era"
        )

    pset.data = File(
        os.path.join(_pileupHistogramPath,
                     "PileupHistogramData" + data + ".root"))
    pset.mc = File(
        os.path.join(_pileupHistogramPath, "PileupHistogramMC" + mc + ".root"))
    return pset
예제 #8
0
def main():

    # Save start time (epoch seconds)
    tStart = time.time()
    Verbose("Started @ " + str(tStart), True)

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        print __doc__
        sys.exit(0)
    else:
        pass

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    maxEvents = {}
    maxEvents["All"] = opts.nEvts
    # maxEvents["2016"] = 1
    # maxEvents["ZZTo4Q"] = -1
    # maxEvents["ZJetsToQQ_HT600toInf"] = 1
    # maxEvents["WZ_ext1"] = 1
    # maxEvents["WZ"] = 1
    # maxEvents["WWTo4Q"] = 1
    # maxEvents["WJetsToQQ_HT_600ToInf"] = 1
    # maxEvents["TTZToQQ"] = 1
    # maxEvents["TTWJetsToQQ"] = 1
    # maxEvents["TTTT"] = 1
    # maxEvents["TT"] = 1
    # maxEvents["ST_t_channel_top_4f_inclusiveDecays"] = 1
    # maxEvents["ST_t_channel_antitop_4f_inclusiveDecays"] = 1
    # maxEvents["ST_tW_top_5f_inclusiveDecays_ext1"] = 1
    # maxEvents["ST_tW_top_5f_inclusiveDecays"] = 1
    # maxEvents["ST_tW_antitop_5f_inclusiveDecays_ext1"] = 1
    # maxEvents["ST_tW_antitop_5f_inclusiveDecays"] = 1
    # maxEvents["ST_s_channel_4f_InclusiveDecays"] = 1
    # maxEvents["QCD_HT700to1000_ext1"] = 1
    # maxEvents["QCD_HT700to1000"] = 1
    # maxEvents["QCD_HT50to100"] = 1
    # maxEvents["QCD_HT500to700_ext1"] = 1
    # maxEvents["QCD_HT500to700"] = 1
    # maxEvents["QCD_HT300to500_ext1"] = 1
    # maxEvents["QCD_HT300to500"] = 1
    # maxEvents["QCD_HT200to300_ext1"] = 1
    # maxEvents["QCD_HT200to300"] = 1
    # maxEvents["QCD_HT2000toInf_ext1"] = 1
    # maxEvents["QCD_HT2000toInf"] = 1
    # maxEvents["QCD_HT1500to2000_ext1"] = 1
    # maxEvents["QCD_HT1500to2000"] = 1
    # maxEvents["QCD_HT100to200"] = 1
    # maxEvents["QCD_HT1000to1500_ext1"] = 1
    # maxEvents["QCD_HT1000to1500"] = 1
    # maxEvents["JetHT_Run2016H_03Feb2017_ver3_v1_284036_284044"] = 1
    # maxEvents["JetHT_Run2016H_03Feb2017_ver2_v1_281613_284035"] = 1
    # maxEvents["JetHT_Run2016G_03Feb2017_v1_278820_280385"] = 1
    # maxEvents["JetHT_Run2016F_03Feb2017_v1_278801_278808"] = 1
    # maxEvents["JetHT_Run2016F_03Feb2017_v1_277932_278800"] = 1
    # maxEvents["JetHT_Run2016E_03Feb2017_v1_276831_277420"] = 1
    # maxEvents["JetHT_Run2016D_03Feb2017_v1_276315_276811"] = 1
    # maxEvents["JetHT_Run2016C_03Feb2017_v1_275656_276283"] = 1
    # maxEvents["JetHT_Run2016B_03Feb2017_ver2_v2_273150_275376"] = 1
    # maxEvents["DYJetsToQQ_HT180"] = 1
    # maxEvents["ChargedHiggs_HplusTB_HplusToTB_M_500"] = 1
    process = Process(prefix, postfix, maxEvents)

    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Verbose("Adding only dataset %s from multiCRAB directory %s" %
                (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(
            opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Verbose("Adding all datasets except %s from multiCRAB directory %s" %
                (opts.excludeTasks, opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        process.addDatasetsFromMulticrab(opts.mcrab,
                                         excludeTasks=opts.excludeTasks)
    else:
        myBlackList = [
            "M_180", "M_200", "M_220", "M_250", "M_300", "M_350", "M_400",
            "M_500", "M_650", "M_800", "M_1000", "M_1500", "M_2000", "M_2500",
            "M_3000", "M_5000", "M_7000", "M_10000", "QCD"
        ]
        Verbose("Adding all datasets from multiCRAB directory %s except %s" %
                (opts.mcrab, (",".join(myBlackList))))
        Verbose(
            "Vertex reweighting is done according to the chosen data era (%s)"
            % (",".join(dataEras)))
        # process.addDatasetsFromMulticrab(opts.mcrab, blacklist=myBlackList)
        if len(myBlackList) > 0:
            regex = "|".join(myBlackList)
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Overwrite Default Settings
    # ================================================================================================
    from HiggsAnalysis.NtupleAnalysis.parameters.hplus2tbAnalysis import allSelections

    allSelections.verbose = opts.verbose
    allSelections.histogramAmbientLevel = opts.histoLevel

    # Set splitting of phase-space (first bin is below first edge value and last bin is above last edge value)
    allSelections.CommonPlots.histogramSplitting = [
        # PSet(label="TetrajetBjetPt" , binLowEdges=[60, 100], useAbsoluteValues=False),
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.8, 1.6, 2.1], useAbsoluteValues=True), # B) not bad for -1.0 < BDT < 0.4
        ## PSet(label="TetrajetBjetPt" , binLowEdges=[120], useAbsoluteValues=False),
        ## PSet(label="TetrajetBjetEta", binLowEdges=[0.6, 1.2, 1.8, 2.1], useAbsoluteValues=True),
        ## PSet(label="TetrajetBjetPt" , binLowEdges=[100], useAbsoluteValues=False),
        ## PSet(label="TetrajetBjetEta", binLowEdges=[0.6, 0.9, 1.2, 1.5, 1.8, 2.1], useAbsoluteValues=True),
        ## PSet(label="TetrajetBjetPt" , binLowEdges=[80], useAbsoluteValues=False),
        ## PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 0.8, 1.6, 2.0], useAbsoluteValues=True),
        ### Default binning
        #PSet(label="TetrajetBjetPt" , binLowEdges=[60, 80, 160], useAbsoluteValues=False), # Alexandros (40-60, and 60-80 bins off. others v. good)
        #PSet(label="TetrajetBjetEta", binLowEdges=[1.0, 1.8], useAbsoluteValues=True), # Alexandros good!
        #PSet(label="TetrajetBjetPt" , binLowEdges=[80, 160], useAbsoluteValues=False), # Alexandros (40-60, and 60-80 bins off. others good)
        #PSet(label="TetrajetBjetEta", binLowEdges=[1.0, 1.8], useAbsoluteValues=True), # Alexandros good!
        #
        # PSet(label="TetrajetBjetPt" , binLowEdges=[80, 200], useAbsoluteValues=False), # Fotis
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.8, 1.6], useAbsoluteValues=True), # Fotis
        #
        # PSet(label="TetrajetBjetPt" , binLowEdges=[60, 80, 140, 200], useAbsoluteValues=False), #Fotis
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.8, 1.5, 2.1], useAbsoluteValues=True),  #Fotis
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 60, 80, 100, 160], useAbsoluteValues=False),
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.2, 0.4, 0.6, 0.8, 1.6, 2.0, 2.2], useAbsoluteValues=True),
        PSet(label="TetrajetBjetEta",
             binLowEdges=[0.4, 0.8, 1.6, 2.0, 2.2],
             useAbsoluteValues=True),  #AN v4
        ### Other attempts
        # PSet(label="TetrajetBjetEta", binLowEdges=[-2.2, -2.0, -1.6, -0.8, -0.4, +0.4, +0.8, +1.6, +2.0, +2.2], useAbsoluteValues=False),
        # PSet(label="TetrajetBjetPt" , binLowEdges=[100], useAbsoluteValues=False), # C)
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 1.2, 1.8, 2.1], useAbsoluteValues=True), # C)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[60, 100], useAbsoluteValues=False), # B) not bad for -1.0 < BDT < 0.4
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 1.2, 1.8, 2.0], useAbsoluteValues=True), # B) not bad for -1.0 < BDT < 0.4
        # PSet(label="TetrajetBjetPt" , binLowEdges=[120, 200], useAbsoluteValues=False),          # A) not great for -1.0 < BDT < 0.4
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 1.2, 1.8, 2.1], useAbsoluteValues=True), # A) not great for -1.0 < BDT < 0.4
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 0.8, 1.6, 1.8, 2.0, 2.2], useAbsoluteValues=True), #so-so
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 1.2, 1.8], useAbsoluteValues=True), #|eta| < 0.4,  0.4 < |eta| < 1.2, 1.2 < |eta| < 1.8, |eta| > 1.8,
        # PSet(label="TetrajetBjetEta", binLowEdges=[0.2, 0.4, 0.8, 1.2, 1.6, 2.0, 2.2], useAbsoluteValues=True),
        # PSet(label="TetrajetBjetEta", binLowEdges=[-1.8, -1.2, -0.4, 0.0, 0.4, 1.2, 1.8], useAbsoluteValues=False),
        # PSet(label="TetrajetBjetPt" , binLowEdges=[40, 60, 100, 200, 300], useAbsoluteValues=False), # pT < 40, pT=40-60, pT=60-100, pT=100-200, pT > 200
    ]

    # allSelections.BJetSelection.triggerMatchingApply = True # at least 1 trg b-jet matched to offline b-jets
    # allSelections.Trigger.triggerOR = ["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056"]
    # allSelections.Trigger.triggerOR = ["HLT_PFHT450_SixJet40_BTagCSV_p056"]

    # ================================================================================================
    # Command Line Options
    # ================================================================================================
    # from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import applyAnalysisCommandLineOptions
    # applyAnalysisCommandLineOptions(sys.argv, allSelections)

    # ================================================================================================
    # Build analysis modules
    # ================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(prefix,
                              dataEras,
                              searchModes,
                              usePUreweighting=opts.usePUreweighting,
                              useTopPtReweighting=opts.useTopPtReweighting,
                              doSystematicVariations=opts.doSystematics,
                              analysisType="HToTB",
                              verbose=opts.verbose)

    # Add variations (e.g. for optimisation)
    # builder.addVariation("BJetSelection.triggerMatchingApply", [True, False]) # At least 1 trg b-jet dR-matched to offline b-jets
    # builder.addVariation("FakeBMeasurement.prelimTopFitChiSqrCutValue", [100, 20])
    # builder.addVariation("FakeBMeasurement.prelimTopFitChiSqrCutDirection", ["<=", "==", ">="])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutValue", [0, 1])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutDirection", ["=="])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutDirection", ["<=", "==", ">="])
    # builder.addVariation("FakeBMeasurement.numberOfInvertedBJetsCutValue", [0, 1])
    # builder.addVariation("FakeBMeasurement.numberOfInvertedBJetsCutDirection", [">="])
    # builder.addVariation("FakeBMeasurement.invertedBJetDiscr", "")
    # builder.addVariation("FakeBMeasurement.invertedBJetDiscrWorkingPoint", "Loose")
    # builder.addVariation("FakeBMeasurement.invertedBJetsSortType", ["Random", "DescendingBDiscriminator"])
    # builder.addVariation("FakeBMeasurement.invertedBJetsDiscrMaxCutValue", [0.82, 0.80, 0.75, 0.70])
    # builder.addVariation("TopSelection.ChiSqrCutValue", [100])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT450_SixJet40"], ["HLT_PFHT400_SixJet30"]])
    # builder.addVariation("TopologySelection.FoxWolframMomentCutValue", [0.5, 0.7])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056"], ["HLT_PFHT450_SixJet40_BTagCSV_p056"]])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056", "HLT_PFHT450_SixJet40_BTagCSV_p056"]])

    # Build the builder
    builder.build(process, allSelections)

    # ================================================================================================
    # Example of adding an analyzer whose configuration depends on dataVersion
    # ================================================================================================
    # def createAnalyzer(dataVersion):
    # a = Analyzer("ExampleAnalysis")
    # if dataVersion.isMC():
    # a.tauPtCut = 10
    # else:
    # a.tauPtCut = 20
    # return a
    # process.addAnalyzer("test2", createAnalyzer)

    # ================================================================================================
    # Pick events
    # ================================================================================================
    # process.addOptions(EventSaver = PSet(enabled = True,pickEvents = True))

    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    # Run the analysis with PROOF? You can give proofWorkers=<N> as a parameter
    if opts.jCores:
        Print("Running process with PROOF (proofWorkes=%s)" %
              (str(opts.jCores)))
        process.run(proof=True, proofWorkers=opts.jCores)
    else:
        Print("Running process (no PROOF)")
        process.run()

    # Print total time elapsed
    tFinish = time.time()
    dt = int(tFinish) - int(tStart)
    days = divmod(dt, 86400)  # days
    hours = divmod(days[1], 3600)  # hours
    mins = divmod(hours[1], 60)  # minutes
    secs = mins[1]  # seconds
    Print(
        "Total elapsed time is %s days, %s hours, %s mins, %s secs" %
        (days[0], hours[0], mins[0], secs), True)
    return
예제 #9
0
from HiggsAnalysis.NtupleAnalysis.main import PSet
import HiggsAnalysis.NtupleAnalysis.parameters.scaleFactors as scaleFactors

##########
## General parameters
##########

##########
## Trigger
##########

trg = PSet(
    # No need to specify version numbers, they are automatically scanned in range 1--100 (remove the '_v' suffix)
    MuontriggerEfficiencyJsonName="muonPAGEff.json",
    #  METtriggerEfficiencyJsonName = "metLegTriggerEfficiency_2016_MET90_fit.json",
    #  L1ETM = 80,
    triggerOR=["HLT_Ele27_eta2p1_WPTight_Gsf"],
    triggerOR2=[],
)

##########
## MET filter
##########

metFilter = PSet(
    discriminators=[  #"hbheNoiseTokenRun2Loose", # Loose is recommended
        #                    "hbheIsoNoiseToken", # under scrutiny
        "Flag_HBHENoiseFilter",
        "Flag_HBHENoiseIsoFilter",
        "Flag_EcalDeadCellTriggerPrimitiveFilter",
        #                    "Flag_CSCTightHaloFilter",
    print "Usage: ./QCDMeasurementAnalysis.py <path-to-multicrab-directory> <1pr> <2pr> <3pr>"
    sys.exit(0)

from HiggsAnalysis.NtupleAnalysis.main import Process, PSet, Analyzer
from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import obtainAnalysisSuffix
process = Process("QCDMeasurement" + obtainAnalysisSuffix(sys.argv))
process.addDatasetsFromMulticrab(sys.argv[1], blacklist=["ChargedHiggs"])

# Add config
from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import allSelections, applyAnalysisCommandLineOptions, setAngularCutsWorkingPoint
# Enable genuine tau histograms for common plots (needed for calculating N_QCD)
allSelections.CommonPlots.enableGenuineTauHistograms = True
# Set splitting of phase space (first bin is below first edge value and last bin is above last edge value)
allSelections.CommonPlots.histogramSplitting = [
    PSet(label="tauPt",
         binLowEdges=[60.0, 80.0, 100.0],
         useAbsoluteValues=False),
]
#===== Selection customisations
allSelections.TauSelection.prongs = 1
allSelections.TauSelection.tauPtCut = 50.0
allSelections.METSelection.METCutValue = 90.0
allSelections.AngularCutsBackToBack.cutValueJet1 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet2 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet3 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet4 = 40.0
allSelections.TauSelection.rtau = 0.7
allSelections.BJetSelection.bjetDiscrWorkingPoint = "Medium"

#allSelections.AngularCutsCollinear.cutValueJet1 = 80.0
#allSelections.AngularCutsCollinear.cutValueJet2 = 80.0
예제 #11
0
def _setupToptagEfficiency(topTagPset, topTagEfficiencyFilename, direction,
                           variationInfo):
    '''
    Helper function accessed through setupToptagSFInformation
    '''
    runrange = "runs_273150_284044"  #fixme
    era = "2016"  #fixme
    fileName = topTagEfficiencyFilename
    fullname = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis",
                            "data", fileName)
    if not os.path.exists(fullname):
        raise Exception(
            "Could not find the top-tag efficiency json file! (tried: %s)" %
            fullname)

    # Read the json file
    Print(
        "Opening file \"%s\" for reading the top-tag efficiencies" %
        (fullname), True)
    f = open(fullname)
    contents = json.load(f)
    f.close()

    # Obtain data efficiencies
    param = "dataParameters"
    if not param in contents.keys():
        raise Exception(
            "Missing key '%s' in json '%s'! Options: %s" %
            (param, fileName, ", ".join(map(str, contents.keys()))))
    if not runrange in contents[param].keys():
        raise Exception(
            "Missing run range '%s' for data in json '%s'! Options: %s" (
                runrange, fileName, ", ".join(map(str,
                                                  contents[param].keys()))))
    datadict = readValues(contents[param][runrange], "data")

    # Obtain MC efficiencies
    param = "mcParameters"
    if not param in contents.keys():
        raise Exception(
            "Missing key '%s' in json '%s'! Options: %s" %
            (param, fileName, ", ".join(map(str, contents.keys()))))
    if not era in contents[param].keys():
        raise Exception(
            "Error: missing era '%s' for mc in json '%s'! Options: %s" (
                runrange, fileName, ", ".join(map(str,
                                                  contents[param].keys()))))
    mcdict = readValues(contents[param][era], "mc")

    # Calculate the SF = Eff(Data)/Eff(MC)
    keys = datadict.keys()
    if len(keys) != len(mcdict.keys()):
        raise Exception(
            "Different number of bins for data and mc in json '%s'!" %
            fileName)

    keys.sort()
    result = {}
    result["binEdges"] = []
    result["SF"] = []
    result["SFUp"] = []
    result["SFDown"] = []
    psetList = []

    # For-loop: All keys
    for i, pT in enumerate(keys, 0):

        if i > 0:
            result["binEdges"].append(pT)

        pTMin = pT
        if i == len(keys) - 1:
            pTMax = 100000.0  # overflow bin (fixme?)
        else:
            pTMax = keys[i + 1]

        # Get the efficiencies and their errors
        effData = datadict[pT]["dataeff"]
        effDataUp = datadict[pT]["dataeffup"]
        effDataDown = datadict[pT]["dataeffdown"]
        effMC = mcdict[pT]["mceff"]
        effMCUp = mcdict[pT]["mceffup"]
        effMCDown = mcdict[pT]["mceffdown"]

        # Define the Scale Factor (SF) as: SF = Eff_Data / Eff_MC
        sf = effData / effMC
        sfUp = effDataUp / effMC
        dsf = (sfUp - sf)
        sfDown = sf - dsf  # gives symmetric shape
        #sfDown = effDataDown / effMC  # gives asymmetric shape
        Verbose(
            "pT = %.1f, sf = %0.3f, sf+ = %0.3f, sf- = %0.3f" %
            (pT, sf, sfUp, sfDown), i == 0)

        result["SF"].append(sf)
        result["SFUp"].append(sfUp)
        result["SFDown"].append(sfDown)
        if abs(mcdict[pT]["mceffdown"]) < 0.00001:
            raise Exception("Down variation in bin '%s' is zero in json '%s'" %
                            (pT, fileName))

        # Sanity check
        if result["SF"][len(result["SF"]) - 1] < 0.00001:
            raise Exception("In file '%s' bin %s the SF is zero! Please fix!" %
                            (fileName, pT))

        # Define the PSet
        p = PSet(
            ptMin=pTMin,
            ptMax=pTMax,
            effMC=effMC,
            effMCUp=effMCUp,
            effMCDown=effMCDown,
            effData=effData,
            effDataUp=effDataUp,
            effDataDown=effDataDown,
            sf=sf,
            sfUp=sfUp,
            sfDown=sfDown,
        )
        psetList.append(p)
        topTagPset.topTagEfficiency = psetList
    if 0:
        print topTagPset
        #print topTagPset.topTagEfficiency
    return
예제 #12
0
def _setupToptagEffUncertainties(topTagPset, topTagEffUncertaintiesFilename,
                                 direction, variationInfo):
    '''
    Helper function accessed through setupToptagSFInformation
    '''
    fullname = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis",
                            "data", topTagEffUncertaintiesFilename)
    if not os.path.exists(fullname):
        raise Exception(
            "Could not find the top-tagging eff. uncertainties file! (tried: %s)"
            % fullname)

    # Read the json file
    Print(
        "Opening file \"%s\" for reading the top-tag efficiency ucertainties" %
        (fullname), True)
    f = open(fullname)
    contents = json.load(f)
    f.close()

    # Obtain uncertainties (Use top-mass systematic only once! Take maximum deviation (31 July 2018)
    #params = ["TT_hdampUP", "TT_mtop1715", "TT_mtop1755", "TT_fsrdown", "TT_fsrup", "TT_isrdown", "TT_mtop1735", "TT_mtop1785", "TT_TuneEE5C",
    #          "TT_hdampDOWN", "TT_mtop1695", "TT_evtgen", "TT_ isrup", "TT_mtop1665", "matching"]
    params = [
        "TT_hdampUP", "TT_fsrdown", "TT_fsrup", "TT_isrdown", "TT_TuneEE5C",
        "TT_hdampDOWN", "TT_evtgen", "TT_isrup", "TT_mtop1665", "matching"
    ]

    for param in params:
        if not param in contents.keys():
            raise Exception(
                "Missing key '%s' in json '%s'! Options: %s" %
                (param, fullname, ", ".join(map(str, contents.keys()))))

    psetList = []
    first = contents[params[0]]
    firstBins = first["bins"]

    # For-loop: All loops
    for i in range(0, len(firstBins)):

        dSF2 = 0.0
        pt = firstBins[i]["pt"]

        for param in params:
            paramDict = contents[param]
            binsList = paramDict["bins"]

            case = binsList[i]
            pT = case["pt"]
            uncertainty = case["uncertainty"]

            dSF2 += uncertainty * uncertainty

        dSF = math.sqrt(dSF2)

        # Find pTMin, pTMax
        pTMin = pt
        if i == len(firstBins) - 1:
            pTMax = 100000.0  # overflow bin (fixme?)
        else:
            pTMax = firstBins[i + 1]["pt"]

        # Uncertainty Up
        dSFUp = dSF

        # Uncertainty Down
        dSFDown = -dSF

        # Define a PSet
        p = PSet(
            ptMin=pTMin,
            ptMax=pTMax,
            dsfUp=dSFUp,
            dsfDown=dSFDown,
        )
        psetList.append(p)

    # Save the PSet
    topTagPset.topTagEffUncertainties = psetList
    return
예제 #13
0
    btagPayloadFilename="CSVv2.csv",
    btagEfficiencyFilename="btageff_HToTB.json",
    direction="nominal")

#fakeBTopSelectionBDT = hplus2tbAnalysis.fakeBTopSelectionBDT

fakeBMeasurement = hplus2tbAnalysis.fakeBMeasurement

systTopBDTSelection = PSet(
    MuTrijetDRCutValue="2.0",
    MuTrijetDRCutDirection=">",
    MuBJetDRCutValue="1.5",
    MuBJetDRCutDirection="<=",
    MiniIsoCutValue="0.1",
    MiniIsoCutDirection="<=",
    MiniIsoInvCutValue="0.1",
    MiniIsoInvCutDirection=">",
    METCutValue="50",
    METCutDirection=">=",
    METInvCutValue="20",
    METInvCutDirection="<",
    MVACutValue="0.4",
    MVACutDirection=">=",
)

#================================================================================================
# Common plots options
#================================================================================================
commonPlotsOptions = hplus2tbAnalysis.commonPlotsOptions

#================================================================================================
# Build all selections group
예제 #14
0
import HiggsAnalysis.NtupleAnalysis.parameters.scaleFactors as scaleFactors
import HiggsAnalysis.NtupleAnalysis.parameters.jsonReader as jsonReader

#================================================================================================
# General parameters
#================================================================================================
verbose = True
histogramAmbientLevel = "Debug"  # (options: "Systematics", "Vital", "Informative", "Debug")

#================================================================================================
# Trigger [scanned in range _v1--_v100 (=>remove the '_v' suffix)]
#================================================================================================
trigger = PSet(
    triggerOR=[
        "HLT_PFHT400_SixJet30_DoubleBTagCSV_p056",
        "HLT_PFHT450_SixJet40_BTagCSV_p056",
        "HLT_PFJet450",  #for trg eff recovery in 2016H
    ],
    triggerOR2=[],
)

#================================================================================================
# MET filter
#================================================================================================
metFilter = PSet(discriminators=[
    "Flag_HBHENoiseFilter", "Flag_HBHENoiseIsoFilter",
    "Flag_EcalDeadCellTriggerPrimitiveFilter", "Flag_eeBadScFilter",
    "Flag_goodVertices", "Flag_globalTightHalo2016Filter", "badPFMuonFilter",
    "badChargedCandidateFilter"
])

#================================================================================================
예제 #15
0
def main():

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        print __doc__
        sys.exit(0)
    else:
        pass

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    maxEvents = {}
    maxEvents["All"] = opts.nEvts
    # maxEvents["2016"] = 1
    # maxEvents["ZZTo4Q"] = -1
    # maxEvents["ZJetsToQQ_HT600toInf"] = 1
    # maxEvents["WZ_ext1"] = 1
    # maxEvents["WZ"] = 1
    # maxEvents["WWTo4Q"] = 1
    # maxEvents["WJetsToQQ_HT_600ToInf"] = 1
    # maxEvents["TTZToQQ"] = 1
    # maxEvents["TTWJetsToQQ"] = 1
    # maxEvents["TTTT"] = 1
    # maxEvents["TT"] = 1
    # maxEvents["ST_t_channel_top_4f_inclusiveDecays"] = 1
    # maxEvents["ST_t_channel_antitop_4f_inclusiveDecays"] = 1
    # maxEvents["ST_tW_top_5f_inclusiveDecays_ext1"] = 1
    # maxEvents["ST_tW_top_5f_inclusiveDecays"] = 1
    # maxEvents["ST_tW_antitop_5f_inclusiveDecays_ext1"] = 1
    # maxEvents["ST_tW_antitop_5f_inclusiveDecays"] = 1
    # maxEvents["ST_s_channel_4f_InclusiveDecays"] = 1
    # maxEvents["QCD_HT700to1000_ext1"] = 1
    # maxEvents["QCD_HT700to1000"] = 1
    # maxEvents["QCD_HT50to100"] = 1
    # maxEvents["QCD_HT500to700_ext1"] = 1
    # maxEvents["QCD_HT500to700"] = 1
    # maxEvents["QCD_HT300to500_ext1"] = 1
    # maxEvents["QCD_HT300to500"] = 1
    # maxEvents["QCD_HT200to300_ext1"] = 1
    # maxEvents["QCD_HT200to300"] = 1
    # maxEvents["QCD_HT2000toInf_ext1"] = 1
    # maxEvents["QCD_HT2000toInf"] = 1
    # maxEvents["QCD_HT1500to2000_ext1"] = 1
    # maxEvents["QCD_HT1500to2000"] = 1
    # maxEvents["QCD_HT100to200"] = 1
    # maxEvents["QCD_HT1000to1500_ext1"] = 1
    # maxEvents["QCD_HT1000to1500"] = 1
    # maxEvents["JetHT_Run2016H_03Feb2017_ver3_v1_284036_284044"] = 1
    # maxEvents["JetHT_Run2016H_03Feb2017_ver2_v1_281613_284035"] = 1
    # maxEvents["JetHT_Run2016G_03Feb2017_v1_278820_280385"] = 1
    # maxEvents["JetHT_Run2016F_03Feb2017_v1_278801_278808"] = 1
    # maxEvents["JetHT_Run2016F_03Feb2017_v1_277932_278800"] = 1
    # maxEvents["JetHT_Run2016E_03Feb2017_v1_276831_277420"] = 1
    # maxEvents["JetHT_Run2016D_03Feb2017_v1_276315_276811"] = 1
    # maxEvents["JetHT_Run2016C_03Feb2017_v1_275656_276283"] = 1
    # maxEvents["JetHT_Run2016B_03Feb2017_ver2_v2_273150_275376"] = 1
    # maxEvents["DYJetsToQQ_HT180"] = 1
    # maxEvents["ChargedHiggs_HplusTB_HplusToTB_M_500"] = 1
    process = Process(prefix, postfix, maxEvents)

    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Print("Adding only dataset %s from multiCRAB directory %s" %
              (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(
            opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Print("Adding all datasets except %s from multiCRAB directory %s" %
              (opts.excludeTasks, opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        process.addDatasetsFromMulticrab(opts.mcrab,
                                         excludeTasks=opts.excludeTasks)
    else:
        myBlackList = [
            "M_180", "M_2500", "M_3000", "M_5000", "M_7000", "M_10000", "QCD"
        ]
        Print("Adding all datasets from multiCRAB directory %s except %s" %
              (opts.mcrab, (",".join(myBlackList))))
        Print(
            "Vertex reweighting is done according to the chosen data era (%s)"
            % (",".join(dataEras)))
        # process.addDatasetsFromMulticrab(opts.mcrab, blacklist=myBlackList)
        if len(myBlackList) > 0:
            regex = "|".join(myBlackList)
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Overwrite Default Settings
    # ================================================================================================
    from HiggsAnalysis.NtupleAnalysis.parameters.hplus2tbAnalysis import allSelections

    allSelections.verbose = opts.verbose
    allSelections.histogramAmbientLevel = opts.histoLevel

    # Set splitting of phase-space (first bin is below first edge value and last bin is above last edge value)
    allSelections.CommonPlots.histogramSplitting = [
        PSet(label="TetrajetBjetEta",
             binLowEdges=[0.4, 1.2, 1.8],
             useAbsoluteValues=True),
        #PSet(label="TetrajetBjetEta", binLowEdges=[0.4, 1.2], useAbsoluteValues=True),
        #PSet(label="TetrajetBjetEta", binLowEdges=[1.2], useAbsoluteValues=True),
        #PSet(label="TetrajetBjetEta", binLowEdges=[1.3], useAbsoluteValues=True), # good but perhaps limit to 1.2
        #PSet(label="TetrajetBjetEta", binLowEdges=[0.6, 1.4], useAbsoluteValues=True), #middle bin is bad
    ]

    # allSelections.BJetSelection.triggerMatchingApply = True # at least 1 trg b-jet matched to offline b-jets
    # allSelections.Trigger.triggerOR = ["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056"]
    # allSelections.Trigger.triggerOR = ["HLT_PFHT450_SixJet40_BTagCSV_p056"]

    # ================================================================================================
    # Command Line Options
    # ================================================================================================
    # from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import applyAnalysisCommandLineOptions
    # applyAnalysisCommandLineOptions(sys.argv, allSelections)

    # ================================================================================================
    # Build analysis modules
    # ================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(prefix,
                              dataEras,
                              searchModes,
                              usePUreweighting=opts.usePUreweighting,
                              useTopPtReweighting=opts.useTopPtReweighting,
                              doSystematicVariations=opts.doSystematics)

    # Add variations (e.g. for optimisation)
    # builder.addVariation("BJetSelection.triggerMatchingApply", [True, False]) # At least 1 trg b-jet dR-matched to offline b-jets
    # builder.addVariation("TestQGLR.prelimTopFitChiSqrCutValue", [100, 20])
    # builder.addVariation("TestQGLR.prelimTopFitChiSqrCutDirection", ["<=", "==", ">="])
    # builder.addVariation("TestQGLR.numberOfBJetsCutValue", [0, 1])
    # builder.addVariation("TestQGLR.numberOfBJetsCutDirection", ["=="])
    # builder.addVariation("TestQGLR.numberOfBJetsCutDirection", ["<=", "==", ">="])
    # builder.addVariation("TestQGLR.numberOfInvertedBJetsCutValue", [0, 1])
    # builder.addVariation("TestQGLR.numberOfInvertedBJetsCutDirection", [">="])
    # builder.addVariation("TestQGLR.invertedBJetDiscr", "")
    # builder.addVariation("TestQGLR.invertedBJetDiscrWorkingPoint", "Loose")
    # builder.addVariation("TestQGLR.invertedBJetsSortType", ["Random", "DescendingBDiscriminator"])
    # builder.addVariation("TestQGLR.invertedBJetsDiscrMaxCutValue", [0.82, 0.80, 0.75, 0.70])
    # builder.addVariation("TopSelection.ChiSqrCutValue", [100])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT450_SixJet40"], ["HLT_PFHT400_SixJet30"]])
    # builder.addVariation("TopologySelection.FoxWolframMomentCutValue", [0.5, 0.7])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056"], ["HLT_PFHT450_SixJet40_BTagCSV_p056"]])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056", "HLT_PFHT450_SixJet40_BTagCSV_p056"]])

    # Build the builder
    builder.build(process, allSelections)

    # ================================================================================================
    # Example of adding an analyzer whose configuration depends on dataVersion
    # ================================================================================================
    # def createAnalyzer(dataVersion):
    # a = Analyzer("ExampleAnalysis")
    # if dataVersion.isMC():
    # a.tauPtCut = 10
    # else:
    # a.tauPtCut = 20
    # return a
    # process.addAnalyzer("test2", createAnalyzer)

    # ================================================================================================
    # Pick events
    # ================================================================================================
    # process.addOptions(EventSaver = PSet(enabled = True,pickEvents = True))

    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    # Run the analysis with PROOF? You can give proofWorkers=<N> as a parameter
    if opts.jCores:
        Print("Running process with PROOF (proofWorkes=%s)" %
              (str(opts.jCores)))
        process.run(proof=True, proofWorkers=opts.jCores)
    else:
        Print("Running process (no PROOF)")
        process.run()

    return
예제 #16
0
def main():

    # Save start time (epoch seconds)
    tStart = time.time()
    Verbose("Started @ " + str(tStart), True)

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        print __doc__
        sys.exit(0)
    else:
        pass

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    maxEvents = {}
    maxEvents["All"] = opts.nEvts
    process = Process(prefix, postfix, maxEvents)

    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Print("Adding only dataset %s from multiCRAB directory %s" %
              (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(
            opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Print("Adding all datasets except %s from multiCRAB directory %s" %
              (opts.excludeTasks, opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        process.addDatasetsFromMulticrab(opts.mcrab,
                                         excludeTasks=opts.excludeTasks)
    else:
        myBlackList = [
            "QCD_b"
            "ChargedHiggs_HplusTB_HplusToTB_M_180_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_200_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_220_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_250_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_300_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_350_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_400_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_500_ext1",
            #"ChargedHiggs_HplusTB_HplusToTB_M_650",  #10M events!
            "ChargedHiggs_HplusTB_HplusToTB_M_800_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_1000_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_2000_ext1"
            "ChargedHiggs_HplusTB_HplusToTB_M_2500_ext1",
            "ChargedHiggs_HplusTB_HplusToTB_M_3000_ext1",
            # "ChargedHiggs_HplusTB_HplusToTB_M_1000",
            "ChargedHiggs_HplusTB_HplusToTB_M_1500",  # Speeed things up
            "ChargedHiggs_HplusTB_HplusToTB_M_2000",  # Speeed things up
            "ChargedHiggs_HplusTB_HplusToTB_M_2500",  # Speeed things up
            "ChargedHiggs_HplusTB_HplusToTB_M_3000",  # Speeed things up
            "ChargedHiggs_HplusTB_HplusToTB_M_5000",  # Speeed things up
            "ChargedHiggs_HplusTB_HplusToTB_M_7000",  # Speeed things up  
            "ChargedHiggs_HplusTB_HplusToTB_M_10000",  # Speeed things up
        ]
        if opts.doSystematics:
            myBlackList.append("QCD")

        Print("Adding all datasets from multiCRAB directory %s" % (opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        regex = "|".join(myBlackList)
        if len(myBlackList) > 0:
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Overwrite Default Settings
    # ================================================================================================
    from HiggsAnalysis.NtupleAnalysis.parameters.jetTriggers import allSelections
    from HiggsAnalysis.NtupleAnalysis.main import PSet
    import HiggsAnalysis.NtupleAnalysis.parameters.scaleFactors as scaleFactors

    allSelections.verbose = opts.verbose
    allSelections.histogramAmbientLevel = opts.histoLevel

    #==========================
    #  Systematics selections
    #==========================

    # marina
    allSelections.SystTopBDTSelection.MVACutValue = 0.40
    allSelections.TopSelectionBDT.TopMVACutValue = 0.40

    # BDT MisID SF
    MisIDSF = PSet(
        MisIDSFJsonName="topMisID_BDT0p40_TopMassCut400.json",
        ApplyMisIDSF=False,
    )

    scaleFactors.assignMisIDSF(MisIDSF, "nominal", MisIDSF.MisIDSFJsonName)
    allSelections.MisIDSF = MisIDSF

    allSelections.SystTopBDTSelection.MiniIsoCutValue = 0.1
    allSelections.SystTopBDTSelection.MiniIsoInvCutValue = 0.1
    allSelections.SystTopBDTSelection.METCutValue = 50.0
    allSelections.SystTopBDTSelection.METInvCutValue = 20.0

    # Muon
    allSelections.MuonSelection.muonPtCut = 30

    # Jets
    allSelections.JetSelection.numberOfJetsCutValue = 4
    allSelections.JetSelection.jetPtCuts = [40.0, 40.0, 40.0, 40.0]

    # Trigger
    allSelections.Trigger.triggerOR = ["HLT_Mu50"]

    # Bjets
    allSelections.BJetSelection.jetPtCuts = [40.0, 40.0]
    allSelections.BJetSelection.numberOfBJetsCutValue = 2

    # ================================================================================================
    # Add Analysis Variations
    # ================================================================================================
    # selections = allSelections.clone()
    # process.addAnalyzer(prefix, Analyzer(prefix, config=selections, silent=False) ) #trigger passed from selections

    # ================================================================================================
    # Command Line Options
    # ================================================================================================
    # from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import applyAnalysisCommandLineOptions
    # applyAnalysisCommandLineOptions(sys.argv, allSelections)

    #================================================================================================
    # Build analysis modules
    #================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(prefix,
                              dataEras,
                              searchModes,
                              usePUreweighting=opts.usePUreweighting,
                              useTopPtReweighting=opts.useTopPtReweighting,
                              doSystematicVariations=opts.doSystematics,
                              analysisType="HToTB")

    # Add variations (e.g. for optimisation)
    # builder.addVariation("METSelection.METCutValue", [100,120,140])
    # builder.addVariation("AngularCutsBackToBack.workingPoint", ["Loose","Medium","Tight"])
    # builder.addVariation("BJetSelection.triggerMatchingApply", [False])
    # builder.addVariation("TopSelection.ChiSqrCutValue", [5, 10, 15, 20])

    # Build the builder
    builder.build(process, allSelections)

    # ================================================================================================
    # Example of adding an analyzer whose configuration depends on dataVersion
    # ================================================================================================
    #def createAnalyzer(dataVersion):
    #a = Analyzer("ExampleAnalysis")
    #if dataVersion.isMC():
    #a.tauPtCut = 10
    #else:
    #a.tauPtCut = 20
    #return a
    #process.addAnalyzer("test2", createAnalyzer)

    # ================================================================================================
    # Pick events
    # ================================================================================================
    #process.addOptions(EventSaver = PSet(enabled = True,pickEvents = True))
    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    # Run the analysis with PROOF? You can give proofWorkers=<N> as a parameter
    if opts.jCores:
        Print("Running process with PROOF (proofWorkes=%s)" %
              (str(opts.jCores)))
        process.run(proof=True, proofWorkers=opts.jCores)
    else:
        Print("Running process")
        process.run()

    # Print total time elapsed
    tFinish = time.time()
    dt = int(tFinish) - int(tStart)
    days = divmod(dt, 86400)  # days
    hours = divmod(days[1], 3600)  # hours
    mins = divmod(hours[1], 60)  # minutes
    secs = mins[1]  # seconds
    Print(
        "Total elapsed time is %s days, %s hours, %s mins, %s secs" %
        (days[0], hours[0], mins[0], secs), True)
    return
예제 #17
0
import HiggsAnalysis.NtupleAnalysis.parameters.scaleFactors as scaleFactors

#================================================================================================
# General parameters
#================================================================================================
verbose = True
histogramAmbientLevel = "Debug"  # Options: Systematics, Vital, Informative, Debug

#================================================================================================
# Trigger
#================================================================================================
trigger = PSet(
    triggerOR=[
        "HLT_PFHT400_SixJet30_DoubleBTagCSV_p056",  # scanned in range _v1--_v100 (=>remove the '_v' suffix)
        "HLT_PFHT450_SixJet40_BTagCSV_p056",  # scanned in range _v1--_v100 (=>remove the '_v' suffix)
        #"HLT_PFHT400_SixJet30", #Prescale 110 at inst. lumi 1.35E+34
        #"HLT_PFHT450_SixJet40", #Prescale 26 at inst. lumi 1.35E+34
    ],
    triggerOR2=[],
)

#================================================================================================
# Tau selection (sync with HToTauNu analysis)
#================================================================================================
tauSelection = PSet(
    applyTriggerMatching=False,
    triggerMatchingCone=0.1,  # DeltaR for matching offline tau with trigger tau
    tauPtCut=20.0,  #
    tauEtaCut=2.3,  #
    tauLdgTrkPtCut=0.0,  #
    prongs=-1,  # options: 1, 2, 3, 12, 13, 23, 123 or -1 (all)
def createAnalyzer(dataVersion, era):
    a = Analyzer(
        "TriggerEfficiency",
        name=era,
        Trigger=PSet(triggerOR=[], triggerOR2=[]),
        METFilter=metFilter,
        usePileupWeights=True,
        offlineSelection=leg,
        MuonSelection=PSet(
            #            discriminators = ["muIDMedium"],
            #            discriminators = ["TrgMatch_IsoMu20_eta2p1"],
            discriminators=["Muons_TrgMatch_IsoMu16_eta2p1"], ),
        TauSelection=PSet(
            discriminators=[
                "byLooseCombinedIsolationDeltaBetaCorr3Hits",  #"byMediumIsolationMVA3newDMwLT",
                "againstMuonTight3",
                "againstElectronMediumMVA6"
            ],
            nprongs=1,
            relaxedOfflineSelection=False),
        binning=binning,
        xLabel=xLabel,
        yLabel=yLabel,
    )

    if isData(dataVersion):
        a.Trigger.triggerOR = [
            "HLT_IsoMu15_eta2p1_L1ETM20_v3", "HLT_IsoMu15_eta2p1_L1ETM20_v4",
            "HLT_IsoMu15_eta2p1_L1ETM20_v5", "HLT_IsoMu15_eta2p1_L1ETM20_v6",
            "HLT_IsoMu15_eta2p1_L1ETM20_v7"
        ]
        a.Trigger.triggerOR2 = [
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v2",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v4",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v6",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v7",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v9",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v10"
        ]
        if era == "2015C":
            #            a.Trigger.triggerOR = ["HLT_IsoMu16_eta2p1_CaloMET30_v1",
            #                                   "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1"]
            #            a.Trigger.triggerOR2 = ["HLT_IsoMu16_eta2p1_CaloMET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
            #                                    "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1"]
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v2"]
#            a.Trigger.triggerOR2 = ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v2"]
        if era == "2015D":
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v3",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx",
                "HLT_IsoMu16_eta2p1_MET30_v1", "HLT_IsoMu16_eta2p1_MET30_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v3",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
        if era == "2015CD":
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v3",
                "HLT_IsoMu16_eta2p1_MET30_v1"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v3",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_v1"
            ]
        if "2016" in era:
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_vx", "HLT_IsoMu22_eta2p1_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx",
                "HLT_IsoMu21_eta2p1_LooseIsoPFTau50_Trk30_eta2p1_SingleL1_vx"
            ]

        if era == "2016ICHEP":
            a.Trigger.triggerOR = ["HLT_IsoMu16_eta2p1_MET30_vx"]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]

        if era == "2016HIPFIXED":
            a.Trigger.triggerOR = ["HLT_IsoMu22_eta2p1_vx"]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu21_eta2p1_LooseIsoPFTau50_Trk30_eta2p1_SingleL1_vx"
            ]

#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v1",
#                                   "HLT_IsoMu20_eta2p1_v2",
#                                   "HLT_IsoMu17_eta2p1_v2"]
#            a.Trigger.triggerOR2= ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v1",
#                                   "HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v2"]

#        runmin,runmax = process.getRuns()
#        a.lumi    = lumi
        a.runMin = runmin
        a.runMax = runmax
    else:
        a.Trigger.triggerOR = ["HLT_IsoMu15_eta2p1_L1ETM20_v5"]
        a.Trigger.triggerOR2 = [
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v6"
        ]
        if era == "2015C" or era == "2015D" or era == "2015CD" or "2016" in era:
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_CaloMET30_v1",
                "HLT_IsoMu16_eta2p1_MET30_vx",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_CaloMET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]


#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v1"]
#            a.Trigger.triggerOR2 = ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v1"]

#print "check triggerOR",a.Trigger.triggerOR
    return a
예제 #19
0
import HiggsAnalysis.NtupleAnalysis.parameters.jsonReader as jsonReader

#================================================================================================
# General parameters
#================================================================================================
verbose = True
histogramAmbientLevel = "Debug"  # Options: Systematics, Vital, Informative, Debug

#================================================================================================
# Trigger
#================================================================================================
trigger = PSet(
    # scanned in range _v1--_v100 (=>remove the '_v' suffix)
    triggerOR=[
        "HLT_PFHT400_SixJet30_DoubleBTagCSV_p056",
        "HLT_PFHT450_SixJet40_BTagCSV_p056",
        "HLT_PFJet450",  #for trg eff recovery in 2016H
    ],
    triggerOR2=[],
)

#================================================================================================
# MET filter
#================================================================================================
metFilter = PSet(discriminators=[
    "hbheNoiseTokenRun2Loose", "Flag_HBHENoiseIsoFilter",
    "Flag_EcalDeadCellTriggerPrimitiveFilter", "Flag_CSCTightHaloFilter",
    "Flag_eeBadScFilter", "Flag_goodVertices"
])

#================================================================================================
#!/usr/bin/env python

from HiggsAnalysis.NtupleAnalysis.main import PSet
import HiggsAnalysis.NtupleAnalysis.parameters.scaleFactors as scaleFactors


#====== General parameters
histoLevel = "Debug"  # Options: Systematics, Vital, Informative, Debug

#====== Trigger
trg = PSet(
  # No need to specify version numbers, they are automatically scanned in range 1--100 (remove the '_v' suffix)
  L1ETM = 80,
  triggerOR = ["HLT_LooseIsoPFTau50_Trk30_eta2p1_MET90"
               ],
  triggerOR2 = [],
)

#====== MET filter
metFilter = PSet(
  discriminators = [#"hbheNoiseTokenRun2Loose", # Loose is recommended
#                    "hbheIsoNoiseToken", # under scrutiny
                    "Flag_HBHENoiseFilter",
                    "Flag_HBHENoiseIsoFilter",
                    "Flag_EcalDeadCellTriggerPrimitiveFilter",
#                    "Flag_CSCTightHaloFilter",
                    "Flag_eeBadScFilter",
                    "Flag_goodVertices",
                    "Flag_globalTightHalo2016Filter",
                    "badPFMuonFilter",
                    "badChargedCandidateFilter"]
예제 #21
0
def createAnalyzer(dataVersion, era):
    a = Analyzer(
        "TriggerEfficiency",
        name=era,
        Trigger=PSet(triggerOR=[], triggerOR2=[]),
        usePileupWeights=True,
        offlineSelection=leg,
        MuonSelection=PSet(
            #            discriminators = ["muIDMedium"],
            #            discriminators = ["TrgMatch_IsoMu20_eta2p1"],
            discriminators=["Muons_TrgMatch_IsoMu16_eta2p1"], ),
        TauSelection=PSet(discriminators=[
            "byLooseCombinedIsolationDeltaBetaCorr3Hits", "againstMuonTight3",
            "againstElectronMediumMVA5"
        ], ),
        binning=binning,
        xLabel=xLabel,
        yLabel=yLabel,
    )

    if isData(dataVersion):
        a.Trigger.triggerOR = [
            "HLT_IsoMu15_eta2p1_L1ETM20_v3", "HLT_IsoMu15_eta2p1_L1ETM20_v4",
            "HLT_IsoMu15_eta2p1_L1ETM20_v5", "HLT_IsoMu15_eta2p1_L1ETM20_v6",
            "HLT_IsoMu15_eta2p1_L1ETM20_v7"
        ]
        a.Trigger.triggerOR2 = [
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v2",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v4",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v6",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v7",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v9",
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v10"
        ]
        if era == "2015C":
            #            a.Trigger.triggerOR = ["HLT_IsoMu16_eta2p1_CaloMET30_v1",
            #                                   "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1"]
            #            a.Trigger.triggerOR2 = ["HLT_IsoMu16_eta2p1_CaloMET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
            #                                    "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1"]
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v2"]
#            a.Trigger.triggerOR2 = ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v2"]
        if era == "2015D":
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v3",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx",
                "HLT_IsoMu16_eta2p1_MET30_v1", "HLT_IsoMu16_eta2p1_MET30_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v3",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
        if era == "2015CD":
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_v3",
                "HLT_IsoMu16_eta2p1_MET30_v1"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v2",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_v3",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_v1"
            ]
#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v1",
#                                   "HLT_IsoMu20_eta2p1_v2",
#                                   "HLT_IsoMu17_eta2p1_v2"]
#            a.Trigger.triggerOR2= ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v1",
#                                   "HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v2"]

        lumi, runmin, runmax = runRange(era)
        a.lumi = lumi
        a.runMin = runmin
        a.runMax = runmax
    else:
        a.Trigger.triggerOR = ["HLT_IsoMu15_eta2p1_L1ETM20_v5"]
        a.Trigger.triggerOR2 = [
            "HLT_IsoMu15_eta2p1_LooseIsoPFTau35_Trk20_Prong1_L1ETM20_v6"
        ]
        if era == "2015C" or era == "2015D" or era == "2015CD":
            a.Trigger.triggerOR = [
                "HLT_IsoMu16_eta2p1_CaloMET30_v1",
                "HLT_IsoMu16_eta2p1_MET30_vx",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_IsoMu16_eta2p1_CaloMET30_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_IsoMu16_eta2p1_MET30_LooseIsoPFTau50_Trk30_eta2p1_vx",
                "HLT_IsoMu16_eta2p1_MET30_JetIdCleaned_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]


#            a.Trigger.triggerOR = ["HLT_IsoMu20_eta2p1_v1"]
#            a.Trigger.triggerOR2 = ["HLT_IsoMu17_eta2p1_LooseIsoPFTau20_v1"]

#print "check triggerOR",a.Trigger.triggerOR
    return a
예제 #22
0
def _setupBtagSFDatabase(btagPset, btagPayloadFilename, direction,
                         variationInfo):
    fullname = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis",
                            "data", btagPayloadFilename)
    if not os.path.exists(fullname):
        raise Exception(
            "Error: Could not find btag POG btag SF payload csv file! (tried: %s)"
            % fullname)
    # Obtain header row and rows
    headerRow = None
    rows = []
    validAlgoHeaderPairs = {
        "pfCombinedInclusiveSecondaryVertexV2BJetTags": "CSVv2"
    }
    if not btagPset.__getattr__("bjetDiscr") in validAlgoHeaderPairs.keys():
        raise Exception(
            "Error: No valid payload header ID has been specified for btag algo %s"
            % btagPset.__getattr__("bjetDiscr"))
    directionLUT = {"nominal": " central", "down": " down", "up": " up"}
    if not direction in directionLUT.keys():
        raise Exception("Error: direction '%s' is unknown! Valid options: %s" %
                        (direction, ", ".join(map(str, directionLUT))))
    workingPointLUT = {
        "Loose": "0",
        "Medium": "1",
        "Tight": "2",
    }
    if not btagPset.__getattr__(
            "bjetDiscrWorkingPoint") in workingPointLUT.keys():
        raise Exception(
            "Error: Btag working point '%s' is not defined in the look-up table!"
            % (btagPset.__getattr__("bjetDiscrWorkingPoint")))
    # Column names in the btag payload file
    headerColumnIndices = {
        "OperatingPoint": None,
        "measurementType": None,
        "sysType": None,
        "jetFlavor": None,
        "etaMin": None,
        "etaMax": None,
        "ptMin": None,
        "ptMax": None,
        "discrMin": None,
        "discrMax": None,
        "formula": None
    }
    with open(fullname, 'rb') as csvfile:
        reader = csv.reader(csvfile)
        for row in reader:
            if headerRow == None:
                headerRow = row[1:]
                # Check that payload matches with tagger
                if validAlgoHeaderPairs[btagPset.__getattr__(
                        "bjetDiscr")] != row[0]:
                    raise Exception(
                        "Error: btag algo = %s is incompatible with btag SF payload file header '%s' (expected %s)!"
                        % (btagPset.__getattr__("bjetDiscr"), row[0],
                           validAlgoHeaderPairs[btagPset.__getattr__(
                               "bjetDiscr")]))
                # Obtain column indices
                for key in headerColumnIndices.keys():
                    for i in range(len(headerRow)):
                        if headerRow[i] == key or headerRow[
                                i] == " " + key or headerRow[
                                    i] == " " + key + " ":
                            headerColumnIndices[key] = i
                    if headerColumnIndices[key] == None:
                        raise Exception(
                            "Error: could not find column '%s' in file %s:\n  header = %s"
                            % (key, fullname, headerRow))
            else:
                # Store only the rows which apply for the desired variation and working point
                if row[headerColumnIndices[
                        "OperatingPoint"]] == workingPointLUT[
                            btagPset.__getattr__("bjetDiscrWorkingPoint")]:
                    rows.append(row)
    if len(rows) == 0:
        raise Exception(
            "Error: for unknown reason, no entries found from the btag SF payload (%s)!"
            % fullname)
    # Convert output into vector of PSets
    psetList = []
    for row in rows:
        p = PSet(jetFlavor=int(row[headerColumnIndices["jetFlavor"]]),
                 ptMin=float(row[headerColumnIndices["ptMin"]]),
                 ptMax=float(row[headerColumnIndices["ptMax"]]),
                 etaMin=float(row[headerColumnIndices["etaMin"]]),
                 etaMax=float(row[headerColumnIndices["etaMax"]]),
                 discrMin=float(row[headerColumnIndices["discrMin"]]),
                 discrMax=float(row[headerColumnIndices["discrMax"]]),
                 sysType=row[headerColumnIndices["sysType"]],
                 formula=row[headerColumnIndices["formula"]])
        psetList.append(p)
    btagPset.btagSF = psetList
예제 #23
0
    print "Usage: ./QCDMeasurementAnalysis.py <path-to-multicrab-directory> <1pr> <2pr> <3pr>"
    sys.exit(0)

from HiggsAnalysis.NtupleAnalysis.main import Process, PSet, Analyzer
from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import obtainAnalysisSuffix
process = Process("QCDMeasurement" + obtainAnalysisSuffix(sys.argv))
process.addDatasetsFromMulticrab(sys.argv[1], blacklist=["ChargedHiggs"])

# Add config
from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import allSelections, applyAnalysisCommandLineOptions, setAngularCutsWorkingPoint
# Enable genuine tau histograms for common plots (needed for calculating N_QCD)
allSelections.CommonPlots.enableGenuineTauHistograms = True
# Set splitting of phase space (first bin is below first edge value and last bin is above last edge value)
allSelections.CommonPlots.histogramSplitting = [
    PSet(label="tauPt",
         binLowEdges=[60.0, 80.0, 100.0],
         useAbsoluteValues=False),
    PSet(label="tauEta", binLowEdges=[0.6, 1.4], useAbsoluteValues=True),
]
#===== Selection customisations
allSelections.TauSelection.prongs = 1
allSelections.TauSelection.tauPtCut = 50.0
allSelections.METSelection.METCutValue = 90.0
allSelections.AngularCutsBackToBack.cutValueJet1 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet2 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet3 = 40.0
allSelections.AngularCutsBackToBack.cutValueJet4 = 40.0
allSelections.TauSelection.rtau = 0.75
#allSelections.TauSelection.rtauSmallerThan = 0.75
allSelections.BJetSelection.bjetDiscrWorkingPoint = "Medium"
예제 #24
0
def createAnalyzer(dataVersion, era, onlineSelection="MET80"):
    useCaloMET = False
    if "CaloMET" in era:
        useCaloMET = True
        era = era[:-8]

    a = Analyzer(
        "TriggerEfficiency",
        name=era,
        Trigger=PSet(triggerOR=[], triggerOR2=[]),
        usePileupWeights=True,
        #        usePileupWeights = False,
        onlineSelection=onlineSelection,
        offlineSelection=leg,
        TauSelection=signalAnalysis.tauSelection,
        #        TauSelection = PSet(
        #            discriminators = ["byLooseCombinedIsolationDeltaBetaCorr3Hits",
        #                             "againstMuonTight3",
        #                             "againstElectronMediumMVA5"],
        #        ),
        ElectronSelection=signalAnalysis.eVeto,
        MuonSelection=signalAnalysis.muVeto,
        JetSelection=signalAnalysis.jetSelection,
        BJetSelection=signalAnalysis.bjetSelection,
        binning=binning,
        xLabel=xLabel,
        yLabel=yLabel,
    )
    #    a.TauSelection.applyTriggerMatching = False
    a.JetSelection.numberOfJetsCutValue = 3
    #    a.BJetSelection.bjetDiscrWorkingPoint = "Medium"
    a.BJetSelection.numberOfBJetsCutValue = 1

    if dataVersion.isData():
        a.Trigger.triggerOR = [
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v2",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v3",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v4",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v6",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v7",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v9",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_v10"
        ]
        a.Trigger.triggerOR2 = [
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v2",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v3",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v4",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v6",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v7",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v9",
            "HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v10"
        ]
        if era == "2015C" or era == "2015D" or era == "2015CD":
            a.Trigger.triggerOR = [
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_v2",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_v3",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection +
                "_JetIdCleaned_v1", "HLT_LooseIsoPFTau50_Trk30_eta2p1_" +
                onlineSelection + "_JetIdCleaned_v2",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection + "_v1",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection +
                "_JetIdCleaned_vx",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection + "_vx"
            ]

        lumi, runmin, runmax = runRange(era)
        a.lumi = lumi
        a.runMin = runmin
        a.runMax = runmax
    else:
        a.Trigger.triggerOR = ["HLT_LooseIsoPFTau35_Trk20_Prong1_v6"]
        a.Trigger.triggerOR2 = ["HLT_LooseIsoPFTau35_Trk20_Prong1_MET70_v6"]
        if era == "2015C" or era == "2015D" or era == "2015CD":
            a.Trigger.triggerOR = [
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_v1",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_vx"
            ]
            a.Trigger.triggerOR2 = [
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection + "_v1",
                "HLT_LooseIsoPFTau50_Trk30_eta2p1_" + onlineSelection + "_vx"
            ]

    if useCaloMET:
        a.Trigger.triggerOR2 = []

    return a
예제 #25
0
def main():

    # Save start time (epoch seconds)
    tStart = time.time()
    Verbose("Started @ " + str(tStart), True)

    # Require at least two arguments (script-name, path to multicrab)
    if len(sys.argv) < 2:
        Print(
            "Not enough arguments passed to script execution. Printing docstring & EXIT."
        )
        print __doc__
        sys.exit(0)
    else:
        pass

    # ================================================================================================
    # Setup the process
    # ================================================================================================
    completeList = GetDatasetCompleteList()
    whiteList = GetDatasetWhitelist(opts)
    blackList = GetDatasetBlackList(completeList, whiteList)
    maxEvents = {}
    for d in whiteList:
        maxEvents[d] = -1
        #if  d == "ChargedHiggs_HplusTB_HplusToTB_M_650":
        #    maxEvents[d] = 4000000
    process = Process(prefix, postfix, maxEvents)

    # ================================================================================================
    # Add the datasets (according to user options)
    # ================================================================================================
    if (opts.includeOnlyTasks):
        Verbose("Adding only dataset %s from multiCRAB directory %s" %
                (opts.includeOnlyTasks, opts.mcrab))
        process.addDatasetsFromMulticrab(
            opts.mcrab, includeOnlyTasks=opts.includeOnlyTasks)
    elif (opts.excludeTasks):
        Verbose("Adding all datasets except %s from multiCRAB directory %s" %
                (opts.excludeTasks, opts.mcrab))
        Print(
            "If collision data are present, then vertex reweighting is done according to the chosen data era (era=2015C, 2015D, 2015) etc..."
        )
        process.addDatasetsFromMulticrab(opts.mcrab,
                                         excludeTasks=opts.excludeTasks)
    else:
        myBlackList = blackList
        #myBlackList = ["M_180", "M_200" , "M_220" , "M_250" , "M_300" , "M_350" , "M_400" , "M_500" , "M_650",
        #               "M_800", "M_1000", "M_1500", "M_2000", "M_2500", "M_3000", "M_5000", "M_7000", "M_10000"]

        # Extend the blacklist with datasets not in the group
        #myBlackList.extend(blackList)

        Verbose("Adding all datasets from multiCRAB directory %s except %s" %
                (opts.mcrab, (",".join(myBlackList))))
        Verbose(
            "Vertex reweighting is done according to the chosen data era (%s)"
            % (",".join(dataEras)))
        regex = "|".join(myBlackList)
        if len(myBlackList) > 0:
            process.addDatasetsFromMulticrab(opts.mcrab, excludeTasks=regex)
        else:
            process.addDatasetsFromMulticrab(opts.mcrab)

    # ================================================================================================
    # Overwrite Default Settings
    # ================================================================================================
    from HiggsAnalysis.NtupleAnalysis.parameters.hplus2tbAnalysis import allSelections

    allSelections.verbose = opts.verbose
    allSelections.histogramAmbientLevel = opts.histoLevel

    # Set splitting of phase-space (first bin is below first edge value and last bin is above last edge value)
    allSelections.CommonPlots.histogramSplitting = [
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 160,  300], useAbsoluteValues=False), # last (not bad)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 120,  300], useAbsoluteValues=False), # last (not bad)
        #PSet(label="TetrajetBjetPt" , binLowEdges=[50, 150,  300], useAbsoluteValues=False), # last (quite good)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 60,  150], useAbsoluteValues=False),
        # PSet(label="TetrajetBjetPt" , binLowEdges=[60, 90, 160, 300], useAbsoluteValues=False), # BEST (v1)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 60, 90, 160, 330], useAbsoluteValues=False),
        PSet(label="TetrajetBjetPt",
             binLowEdges=[60, 90, 160, 300],
             useAbsoluteValues=False),  # BEST (v2)
        #PSet(label="TetrajetBjetPt" , binLowEdges=[60, 90, 160, 300], useAbsoluteValues=False), # BEST (v3)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 65, 130, 330], useAbsoluteValues=False), # BEST (v4)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[50, 65, 95, 130, 330], useAbsoluteValues=False), # BEST (v5)
        # PSet(label="TetrajetBjetPt" , binLowEdges=[45, 50, 60, 90, 160, 330], useAbsoluteValues=False), # BEST (v5)
        #
        PSet(label="TetrajetBjetEta",
             binLowEdges=[0.8, 1.4, 2.0],
             useAbsoluteValues=True),  # optimised
    ]

    # Overwrite values
    # allSelections.TopSelectionBDT.CSV_bDiscCutDirection  = ">="
    allSelections.TopSelectionBDT.CSV_bDiscCutValue = 0.54  # allow CSVv2-L for inverted top

    # ================================================================================================
    # Command Line Options
    # ================================================================================================
    # from HiggsAnalysis.NtupleAnalysis.parameters.signalAnalysisParameters import applyAnalysisCommandLineOptions
    # applyAnalysisCommandLineOptions(sys.argv, allSelections)

    # ================================================================================================
    # Build analysis modules
    # ================================================================================================
    PrintOptions(opts)
    builder = AnalysisBuilder(prefix,
                              dataEras,
                              searchModes,
                              usePUreweighting=opts.usePUreweighting,
                              useTopPtReweighting=opts.useTopPtReweighting,
                              doSystematicVariations=opts.doSystematics,
                              analysisType="HToTB",
                              verbose=opts.verbose,
                              systVarsList=opts.systVarsList)

    # Add variations (e.g. for optimisation)
    # builder.addVariation("BJetSelection.triggerMatchingApply", [True, False]) # At least 1 trg b-jet dR-matched to offline b-jets
    # builder.addVariation("FakeBMeasurement.prelimTopFitChiSqrCutValue", [100, 20])
    # builder.addVariation("FakeBMeasurement.prelimTopFitChiSqrCutDirection", ["<=", "==", ">="])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutValue", [0, 1])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutDirection", ["=="])
    # builder.addVariation("FakeBMeasurement.numberOfBJetsCutDirection", ["<=", "==", ">="])
    # builder.addVariation("FakeBMeasurement.numberOfInvertedBJetsCutValue", [0, 1])
    # builder.addVariation("FakeBMeasurement.numberOfInvertedBJetsCutDirection", [">="])
    # builder.addVariation("FakeBMeasurement.invertedBJetDiscr", "")
    # builder.addVariation("FakeBMeasurement.invertedBJetDiscrWorkingPoint", "Loose")
    # builder.addVariation("FakeBMeasurement.invertedBJetsSortType", ["Random", "DescendingBDiscriminator"])
    # builder.addVariation("FakeBMeasurement.invertedBJetsDiscrMaxCutValue", [0.82, 0.80, 0.75, 0.70])
    # builder.addVariation("TopSelection.ChiSqrCutValue", [100])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT450_SixJet40"], ["HLT_PFHT400_SixJet30"]])
    # builder.addVariation("TopologySelection.FoxWolframMomentCutValue", [0.5, 0.7])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056"], ["HLT_PFHT450_SixJet40_BTagCSV_p056"]])
    # builder.addVariation("Trigger.triggerOR", [["HLT_PFHT400_SixJet30_DoubleBTagCSV_p056", "HLT_PFHT450_SixJet40_BTagCSV_p056"]])

    # Build the builder
    builder.build(process, allSelections)

    # ================================================================================================
    # Example of adding an analyzer whose configuration depends on dataVersion
    # ================================================================================================
    # def createAnalyzer(dataVersion):
    # a = Analyzer("ExampleAnalysis")
    # if dataVersion.isMC():
    # a.tauPtCut = 10
    # else:
    # a.tauPtCut = 20
    # return a
    # process.addAnalyzer("test2", createAnalyzer)

    # ================================================================================================
    # Pick events
    # ================================================================================================
    # process.addOptions(EventSaver = PSet(enabled = True,pickEvents = True))

    # ================================================================================================
    # Run the analysis
    # ================================================================================================
    # Run the analysis with PROOF? You can give proofWorkers=<N> as a parameter
    if opts.jCores:
        Print("Running process with PROOF (proofWorkes=%s)" %
              (str(opts.jCores)))
        process.run(proof=True, proofWorkers=opts.jCores)
    else:
        Print("Running process (no PROOF)")
        process.run()

    # Print total time elapsed
    tFinish = time.time()
    dt = int(tFinish) - int(tStart)
    days = divmod(dt, 86400)  # days
    hours = divmod(days[1], 3600)  # hours
    mins = divmod(hours[1], 60)  # minutes
    secs = mins[1]  # seconds
    Print(
        "Total elapsed time is %s days, %s hours, %s mins, %s secs" %
        (days[0], hours[0], mins[0], secs), True)
    return