Beispiel #1
0
def mmmm_100(mass):
    logger.info("Processing mass-point %i" % mass)

    cuts = '(%f < h1mass) & (h1mass < %f)' % (0.9*mass, 1.1*mass)
    cuts += '& (%f < sT)' % (0.6*mass + 130.0)
    cuts += '& (channel == "mmmm")'

    limits = Limits("DblH", cuts, "./ntuples", "./datacards/mmmm100/%i" % mass,
            channels=["dblh4l"], lumi=19.7, blinded=True)

    limits.add_group("hpp%i" % mass, "HPlus*%i*" % mass, isSignal=True, scale=36.0)
    limits.add_group("dyjets", "DYJets*")
    limits.add_group("zz", "ZZTo*")
    limits.add_group("top", "T*")
    limits.add_group("data", "data_*", isData=True)

    lumi = {'hpp%i' % mass: 1.026,
            'dyjets':       1.026,
            'zz':           1.026,
            'top':          1.026}
    limits.add_systematics("lumi", "lnN", **lumi)

    mu_eff = {'hpp%i' % mass: 1.043,
              'dyjets':       1.043,
              'zz':           1.043,
              'top':          1.043}
    limits.add_systematics("mu_eff", "lnN", **mu_eff)

    limits.gen_card("test.txt")
Beispiel #2
0
def fourl(mass):
    logger.info("Processing mass-point %i" % mass)

    cuts = '(%f < h1mass) & (h1mass < %f)' % (0.9*mass, 1.1*mass)
    cuts += '& (%f < sT)' % (0.6*mass + 130.0)
    cuts += ('& ((channel == "mmmm") | (channel == "eeee") | (channel == "eemm") |'
             '(channel == "mmee") | (channel == "meme") | (channel == "emem") |'
             '(channel == "emme") | (channel == "meem") |'
             '(channel == "eeem") | (channel == "eeme") | (channel == "emee") | (channel == "meee") |'
             '(channel == "emmm") | (channel == "memm") | (channel == "mmem") | (channel == "mmme"))')

    limits = Limits("DblH", cuts, "./ntuples", "./datacards/light_lep_all/%i" % mass,
            channels=["dblh4l"], lumi=19.7, blinded=True)

    limits.add_group("hpp%i" % mass, "HPlus*%i*" % mass, isSignal=True)
    limits.add_group("dyjets", "DYJets*")
    limits.add_group("zz", "ZZTo*")
    limits.add_group("top", "T*")
    limits.add_group("data", "data_*", isData=True)

    lumi = {'hpp%i' % mass: 1.026,
            'dyjets':       1.026,
            'zz':           1.026,
            'top':          1.026}
    limits.add_systematics("lumi", "lnN", **lumi)

    mu_eff = {'hpp%i' % mass: 1.043,
              'dyjets':       1.043,
              'zz':           1.043,
              'top':          1.043}

    e_eff = {'hpp%i' % mass: 1.101,
             'dyjets':       1.101,
             'zz':           1.101,
             'top':          1.101}

    limits.add_systematics("mu_eff", "lnN", **mu_eff)
    limits.add_systematics("e_eff", "lnN", **e_eff)

    hpp_sys = {'hpp%i' % mass: 1.15}

    limits.add_systematics("mc_err", "lnN", **hpp_sys)

    limits.gen_card("4l.txt")
Beispiel #3
0
def four_lepton_mc(name, channels, directory, scale=1.0):
    for mass in _4L_MASSES:
        cuts = '(%f < h1mass) & (h1mass < %f)' % (0.9*mass, 1.1*mass)
        cuts += '& (%f < h2mass) & (h2mass < %f)' % (0.9*mass, 1.1*mass)
        cuts += '& (%f < sT)' % (0.6*mass + 130.0)
        cuts += '& (%s)' % ' | '.join(['(channel == "%s")' % channel for channel in channels])

        limits = Limits("DblH", cuts, "./ntuples", "%s/%i" % (directory, mass),
                channels=["dblh4l"], lumi=19.7, blinded=True)

        limits.add_group("hpp%i" % mass, "HPlus*%i*" % mass, isSignal=True, scale=scale)
        limits.add_group("dyjets", "DYJets*")
        limits.add_group("zz", "ZZTo*")
        limits.add_group("top", "T*")
        limits.add_group("data", "data_*", isData=True)

        lumi = {'hpp%i' % mass: 1.026,
                'dyjets':       1.026,
                'zz':           1.026,
                'top':          1.026}
        limits.add_systematics("lumi", "lnN", **lumi)

        hpp_sys = {'hpp%i' % mass: 1.15}
        limits.add_systematics("sig_mc_err", "lnN", **hpp_sys)

        eff_syst = efficiency_systematic(name)

        if eff_syst[0]:
            mu_eff = {'hpp%i' % mass: eff_syst[0],
                      'dyjets':       eff_syst[0],
                      'zz':           eff_syst[0],
                      'top':          eff_syst[0]}
            limits.add_systematics("mu_eff", "lnN", **mu_eff)

        if eff_syst[1]:
            e_eff = {'hpp%i' % mass: eff_syst[1],
                     'dyjets':       eff_syst[1],
                     'zz':           eff_syst[1],
                     'top':          eff_syst[1]}
            limits.add_systematics("e_eff", "lnN", **e_eff)

        limits.gen_card("%s.txt" % name)
Beispiel #4
0
def four_lepton(name, channels, directory, scale=1.0, fs=None, tau=False):
    for mass in _4L_MASSES:

        if tau:
            cuts = '(%f < h1mass) & (h1mass < %f)' % (0.5*mass, 1.1*mass)
            cuts += '& (%f < h2mass) & (h2mass < %f)' % (0.5*mass, 1.1*mass)
            cuts += '& (z_sep > 10)'
            cuts += '& ((%f < sT) | (400 < sT))' % (mass + 100.0)
        else:
            cuts = '(%f < h1mass) & (h1mass < %f)' % (0.9*mass, 1.1*mass)
            cuts += '& (%f < h2mass) & (h2mass < %f)' % (0.9*mass, 1.1*mass)
            #cuts += '& (z_sep > 20)'
            cuts += '& (%f < sT)' % (0.6*mass + 130.0)

        cuts += '& (%s)' % ' | '.join(['(channel == "%s")' % channel \
                for channel in channels])

        limits = Limits("DblH", cuts, "./ntuples", "%s/%i" % (directory, mass),
                channels=["dblh4l"], lumi=19.7, blinded=True)

        limits.add_group("hpp%i" % mass, "HPlus*%i*" % mass, isSignal=True,
                scale=scale, allowed_decays=fs)
        limits.add_group("data", "data_*", isData=True)

        lumi = {'hpp%i' % mass: 1.026}
        limits.add_systematics("lumi", "lnN", **lumi)

        hpp_sys = {'hpp%i' % mass: 1.15}
        limits.add_systematics("sig_mc_err", "lnN", **hpp_sys)

        eff_syst = efficiency_systematic(name)

        # Add the muon efficiency systematic if it exists for this channel
        if eff_syst[0]:
            mu_eff = {'hpp%i' % mass: eff_syst[0]}
            limits.add_systematics("mu_eff", "lnN", **mu_eff)

        # Add the electron efficiency systematic if it exists for this channel
        if eff_syst[1]:
            e_eff = {'hpp%i' % mass: eff_syst[1]}
            limits.add_systematics("e_eff", "lnN", **e_eff)

        if tau:
            N_db_data = mky.data_sideband(
                mass,
                '(%s)' % ' | '.join(['(channel == "%s")' % channel for channel in channels]),
                cuts='(10 < z_sep) & ((%f < sT) | (400 < sT))' % (mass + 100.0),
                tau=True)

            alpha = mky.alpha(
                mass,
                '(%s)' % ' | '.join(['(channel == "%s")' % channel for channel in channels]),
                tau=True)
        else:
            N_db_data = mky.data_sideband(
                mass,
                '(%s)' % ' | '.join(['(channel == "%s")' % channel for channel in channels]),
                #cuts='(z_sep > 80) & (%f < sT)' % (0.6*mass + 130.0))
                #cuts='(z_sep > 20) & (mass > 0)')
                cuts='(%f < sT)' % (0.6*mass + 130.0))

            alpha = mky.alpha(
                mass,
                '(%s)' % ' | '.join(['(channel == "%s")' % channel for channel in channels]))

        limits.add_bkg_rate("bkg_sb_%s" % channels[0], float(N_db_data) * alpha)
        kwargs = {"bkg_sb_%s" % channels[0]: alpha}
        limits.add_systematics("bkg_err_%s" % channels[0], "gmN %i" % N_db_data, **kwargs)

        kwargs = {"bkg_sb_%s" % channels[0]: 1.10}
        limits.add_systematics("alph_err_%s" % channels[0], "lnN", **kwargs)

        limits.gen_card("%s.txt" % name)