Esempio n. 1
0
def makeSysSamplesBTagWeight():
    mc_samples = settings.mc_samples()
    for name in mc_samples.iterkeys():
        makeSysSample(
            name,
            name + "_BTagWeightBCMinus",
            {}
        )
        makeSysSample(
            name,
            name + "_BTagWeightBCPlus",
            {}
        )
        makeSysSample(
            name,
            name + "_BTagWeightUDSGMinus",
            {}
        )
        makeSysSample(
            name,
            name + "_BTagWeightUDSGPlus",
            {}
        )
        settings.samples[name + "_BTagWeightBCMinus"].cfg_add_lines += (
            "process.bTagWeight.errorModeBC = -1",
        )
        settings.samples[name + "_BTagWeightBCPlus"].cfg_add_lines += (
            "process.bTagWeight.errorModeBC = +1",
        )
        settings.samples[name + "_BTagWeightUDSGMinus"].cfg_add_lines += (
            "process.bTagWeight.errorModeUDSG = -1",
        )
        settings.samples[name + "_BTagWeightUDSGPlus"].cfg_add_lines += (
            "process.bTagWeight.errorModeUDSG = +1",
        )
Esempio n. 2
0
    def configure(self):
        super(TemplateFitToolChHadIso, self).configure()
        self.fitter = Fitter()
        self.fitbox_bounds = 0.33, 0.62, settings.defaults_Legend["y_pos"]

        # here the stacked templates are taken for purity calculation
        # but they are replaced in fetch_mc_templates(..)
        self.mc_tmplts      = gen.filter(
            settings.post_proc_dict["TemplateStacks"], {
            "analyzer"  : ("TemplateChHadIsoreal", "TemplateChHadIsofake"),
        })
        self.fitted         = rebin_chhadiso(
            gen.fs_filter_active_sort_load({
                "analyzer"  : "TemplateChHadIso",
                "is_data"   : True,
            })
        )
        ttbar_sample = next((
            s.name
            for s in settings.mc_samples().values()
            if s.legend == "t#bar{t} inclusive"
        ))
        self.gen_bkg_tmplt = rebin_chhadiso(
            gen.gen_norm_to_data_lumi(
                gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateChHadIsofake",
                    "sample"    : ttbar_sample,
                })))
        self.gen_sig_tmplt = rebin_chhadiso(
            gen.gen_norm_to_data_lumi(
                gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateChHadIsoreal",
                    "sample"    : re.compile("whiz2to5"),
                })))
Esempio n. 3
0
def makeSysSamplesPU():
    mc_samples = settings.mc_samples()
    for name in mc_samples.iterkeys():
        makeSysSample(
            name,
            name + "_PU",
            {"puWeightInput": "PU_Run2012_73500.root"}
        )
Esempio n. 4
0
 def get_shilp_counts(self, c):
     data_lumi_sum = settings.data_lumi_sum()
     for smp in settings.mc_samples().itervalues():
         norm = data_lumi_sum / smp.lumi
         tight = smp.log_event_counts[self.tight_cnt] * norm
         real  = smp.log_event_counts["realFullTightIDCount,"] * norm
         c.tight += tight
         c.shilp += tight - real
Esempio n. 5
0
def makeSysSamplesTrig():
    mc_samples = settings.mc_samples()
    for name in mc_samples.iterkeys():
        makeSysSample(
            name,
            name + "_TrigPlus",
            {}
        )
        makeSysSample(
            name,
            name + "_TrigMinus",
            {}
        )
        settings.samples[name + "_TrigPlus"].cfg_add_lines += (
            "process.trigWeight.uncertMode = 1",
        )
        settings.samples[name + "_TrigMinus"].cfg_add_lines += (
            "process.trigWeight.uncertMode = -1",
        )
Esempio n. 6
0
 def get_purity_counts(self, c):
     for smp in settings.mc_samples().itervalues():
         l = smp.legend
         c.mc_sum  += smp.log_event_counts[self.cnt_name] / smp.lumi
         if l == "t#bar{t}#gamma (Signal)":
             c.mc_ttgam  += smp.log_event_counts[self.cnt_name] / smp.lumi
Esempio n. 7
0
    def run(self):
        self.configure()

        if not self.n_sig_ttgam_wrp:
            self.message("WARNING Did not find result in post_proc_dict. Skipping...")
            return

        # store results in wrapper
        r = copy.deepcopy(self.n_sig_ttgam_wrp)
        r.name = self.name
        self.result = r

        # prepare mc counts
        class counts(object): pass
        c = counts()
        c.sig_pre   = 0.
        c.sig_fid   = 0.
        c.sig_post  = 0.
        c.bkg_pre   = 0.
        c.bkg_post  = 0.
        c.tt_pre    = 0.
        c.tt_post   = 0
        for smp in settings.mc_samples().itervalues():
            legend = smp.legend
            if legend == "t#bar{t}#gamma (Signal)":
                c.sig_pre  += smp.log_event_counts[self.get_sig_count_name(self.pre_count_name)] / smp.lumi
                c.sig_fid  += smp.log_event_counts[self.get_sig_count_name(self.fid_count_name)] / smp.lumi
                c.sig_post += smp.log_event_counts[self.get_sig_count_name(self.post_count_name)] / smp.lumi
            else:
                c.bkg_pre  += smp.log_event_counts[self.pre_count_name] / smp.lumi
                c.bkg_post += smp.log_event_counts[self.post_count_name] / smp.lumi
            if legend == "t#bar{t} inclusive":
                c.tt_pre  += smp.log_event_counts[self.pre_count_name] / smp.lumi
                c.tt_post += smp.log_event_counts[self.post_count_name] / smp.lumi
        data_lumi_sum = settings.data_lumi_sum()
        for k in c.__dict__.keys():
            c.__dict__[k] *= data_lumi_sum

        # prepare data counts
        c.data_pre  = 0.
        c.data_post = 0.
        for smp in settings.data_samples().itervalues():
            c.data_pre  += smp.log_event_counts[self.pre_count_name]
            c.data_post += smp.log_event_counts[self.post_count_name]

        # selection performance
        r.eff_gamma     = c.sig_post / c.sig_fid
        r.eff_gamma_fid = c.sig_fid / c.sig_pre
        r.pur_tt        = (c.tt_pre + c.sig_pre) / (c.bkg_pre + c.sig_pre)
        r.N_presel_data = c.data_pre
        r.N_sel_data    = c.data_post
        r.StoB_gamma    = c.sig_post / c.bkg_post
        r.StoB_presel   = c.tt_pre   / (c.bkg_pre - c.tt_pre)

        # background-substracted number of ttgamma signal events
        # r.n_sig_ttgam   = self.n_sig_ttgam_wrp.n_sig_ttgam

        # R_fid
        R_fid_denom     = r.eff_gamma * r.N_presel_data * r.pur_tt
        r.R_fid         = r.n_sig_ttgam     / R_fid_denom
        r.R_fid_err_stat= r.n_sig_ttgam_err / R_fid_denom

        # R
        R_denom         = r.eff_gamma_fid * r.eff_gamma * r.N_presel_data * r.pur_tt
        r.R             = r.n_sig_ttgam     / R_denom
        r.R_err_stat    = r.n_sig_ttgam_err / R_denom

        # xsec
        r.xsec          = r.R * settings.ttbar_xsec_cms
        r.xsec_err_stat = r.xsec * r.R_err_stat / r.R

        self.message(str(r))
Esempio n. 8
0
 def prepare_for_systematic(self):
     mc_samples = settings.mc_samples().keys()
     da_samples = settings.data_samples().keys()
     pu_samples = list(s + "_TrigMinus" for s in mc_samples)
     settings.active_samples = pu_samples + da_samples
     super(SysTrigMinus, self).prepare_for_systematic()
Esempio n. 9
0
 def prepare_for_systematic(self):
     mc_samples = settings.mc_samples().keys()
     da_samples = settings.data_samples().keys()
     btag_samples = list(s + "_BTagWeightUDSGPlus" for s in mc_samples)
     settings.active_samples = btag_samples + da_samples
     super(SysBTagWeightUDSGPlus, self).prepare_for_systematic()
Esempio n. 10
0
 def get_shilp_counts(self, c):
     data_lumi_sum = settings.data_lumi_sum()
     for smp in settings.mc_samples().itervalues():
         norm = data_lumi_sum / smp.lumi
         c.tight += smp.log_event_counts[self.tight_cnt] * norm
         c.shilp += smp.log_event_counts[self.shilp_cnt] * norm
Esempio n. 11
0

import cmstoolsac3b.main as main
import cmstoolsac3b.settings as settings
settings.ttbar_xsec = 245.8
settings.ttbar_xsec_err = 2.6 * settings.ttbar_xsec

from cmstoolsac3b.sample import load_samples
import MyPackage.TtGamma8TeV.samples_cern as samples_cern
settings.samples = {}
settings.samples.update(load_samples(samples_cern))
settings.active_samples = settings.samples.keys() # add all MC and data
settings.samples = dict(settings.mc_samples())
settings.active_samples = settings.samples.keys()

settings.cfg_main_import_path="MyPackage.ShilpiNTuple.cfg_produce_ntuple"

settings.try_reuse_results = True
settings.max_num_processes = 4

if __name__ == '__main__':
    main.main()