def configure(self):
        super(TemplateFitToolChHadIso, self).configure()
        self.fitter = ThetaFitter()
        self.fitbox_bounds  = 0.33, 0.62, 0.88

        self.mc_tmplts      = gen.filter(
            settings.post_proc_dict["TemplateStacks"], {
                "analyzer"  : ("TemplateChHadIsoreal", "PlotSBIDfake"),
            }
        )
        self.fitted         = rebin_chhadiso(
            gen.fs_filter_active_sort_load({
                "analyzer"  : "TemplateChHadIso",
                "is_data"   : True,
            })
        )

        self.gen_bkg_tmplt = iter(
            settings.post_proc_dict["TemplateFitToolChHadIsoSBIDInputBkg"]
        )
        self.gen_sig_tmplt = rebin_chhadiso(
            gen.gen_norm_to_data_lumi(
                gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateChHadIsoreal",
                    "sample"    : re.compile("whiz2to5"),
                })
            )
        )
    def configure(self):
        super(TemplateFitToolChHadIso, self).configure()
        self.fitter = Fitter()
        self.fitbox_bounds = 0.33, 0.62, settings.defaults_Legend["y_pos"]

        # here the stacked templates are taken for purity calculation
        # but they are replaced in fetch_mc_templates(..)
        self.mc_tmplts      = gen.filter(
            settings.post_proc_dict["TemplateStacks"], {
            "analyzer"  : ("TemplateChHadIsoreal", "TemplateChHadIsofake"),
        })
        self.fitted         = rebin_chhadiso(
            gen.fs_filter_active_sort_load({
                "analyzer"  : "TemplateChHadIso",
                "is_data"   : True,
            })
        )
        ttbar_sample = next((
            s.name
            for s in settings.mc_samples().values()
            if s.legend == "t#bar{t} inclusive"
        ))
        self.gen_bkg_tmplt = rebin_chhadiso(
            gen.gen_norm_to_data_lumi(
                gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateChHadIsofake",
                    "sample"    : ttbar_sample,
                })))
        self.gen_sig_tmplt = rebin_chhadiso(
            gen.gen_norm_to_data_lumi(
                gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateChHadIsoreal",
                    "sample"    : re.compile("whiz2to5"),
                })))
    def run(self):

        RandCone = gen.fs_filter_active_sort_load(
            {"sample":"whiz2to5",
             "analyzer":"PlotRandCone"}
        )
        ChHad = gen.fs_filter_active_sort_load(
            { "sample":"whiz2to5",
             "analyzer":"TemplateChHadIsoreal"}
        )

        zipped = itertools.izip(RandCone, ChHad)
        #zipped = (gen.callback(z, lambda x: x.histo.SetBinContent(1,0.)) for z in zipped) # remove first bin
        zipped = (gen.apply_histo_linecolor(z) for z in zipped)
        zipped = (gen.apply_histo_linewidth(z) for z in zipped)
        zipped = list(list(z) for z in zipped) # load all to memory

        if not (zipped and zipped[0]):
            self.message("WARNING Histograms not found!! Quitting..")
            return

        zipped[0][0].legend = "Rand. Cone Iso."
        zipped[0][1].legend = "Charged Had. Iso."
	zipped[0][1].primary_object().SetLineColor(1)

        def save_canvas(wrps, postfix):
            canvas = gen.canvas(
                wrps,
                [rnd.BottomPlotRatio, rnd.Legend, com.SimpleTitleBox]
            )
            canvas = gen.save(
                canvas,
                lambda c: self.plot_output_dir + c.name + postfix
            )
            canvas = gen.switch_log_scale(canvas)
            canvas = gen.save(
                canvas,
                lambda c: self.plot_output_dir + c.name + postfix + "_log"
            )
            gen.consume_n_count(canvas)

        # norm to integral / lumi and save
       # save_canvas(
       #     (gen.gen_norm_to_lumi(z) for z in zipped),
       #     "_lumi"
       # )
	save_canvas(
	 (gen.gen_norm_to_integral(z) for z in zipped),
	 "_int"
	)
Example #4
0
 def configure(self):
     self.data_sihih = gen.op.sum(
         gen.fs_filter_active_sort_load({
             "analyzer"  : "dataTemplateFitHistoSihih",
             "name"      : "sihihEB",
             "is_data"   : True,
             })
     )
     self.data_sihih_bkg = gen.op.sum(
         gen.fs_filter_active_sort_load({
             "analyzer"  : "Nm1PlotSihihChHadIsoInv",
             "name"      : "histo",
             "is_data"   : True,
             })
     )
    def run(self):
        wrp = next(rebin_chhadiso(
            gen.gen_sum(
                [
                    gen.fs_filter_active_sort_load({
                        "analyzer"  : sb_anzlrs,
                        "is_data"   : True
                    })
                ]
            )
        ))
        # multiply with weight
        if do_dist_reweighting:
            wrp = gen.op.prod((
                settings.post_proc_dict["TemplateFitToolChHadIsoSbBkgInputBkgWeight"],
                wrp,
            ))
        wrp.lumi = settings.data_lumi_sum()

        self.result = [wrp]
        gen.consume_n_count(
            gen.save(
                gen.canvas((self.result,)),
                lambda c: self.plot_output_dir + c.name
            )
        )
 def configure(self):
     super(TemplateFitToolSihihShift, self).configure()
     self.fitbox_bounds  = 0.63, 0.93, 0.60
     self.mc_tmplts      = settings.post_proc_dict["mc_templates_sihih_shift"]
     self.fitted         = gen.fs_filter_active_sort_load({
         "analyzer"  : "TemplateSihih",
         "is_data"   : True,
     })
    def run(self):

        kicked = gen.fs_filter_active_sort_load(
            {"sample":"TTJetsSignal",
             "analyzer":"ttbarPhotonMergerSingleCall",
             "name":re.compile("\S*Kicked")}
        )
        whizard = gen.fs_filter_active_sort_load(
            {"sample":"whiz2to5",
             "analyzer":"photonsSignalMEanalyzer"}
        )

        zipped = itertools.izip(kicked, whizard)
        zipped = (gen.callback(z, lambda x: x.histo.SetBinContent(1,0.)) for z in zipped) # remove first bin
        zipped = (gen.apply_histo_linecolor(z) for z in zipped)
        zipped = (gen.apply_histo_linewidth(z) for z in zipped)
        zipped = list(list(z) for z in zipped) # load all to memory

        if not (zipped and zipped[0]):
            self.message("WARNING Histograms not found!! Quitting..")
            return

        zipped[0][0].legend = "removed (madgraph)"
        zipped[0][1].legend = "tt#gamma (whizard)"

        def save_canvas(wrps, postfix):
            canvas = gen.canvas(
                wrps,
                [rnd.BottomPlotRatio, rnd.LegendRight, com.SimpleTitleBox]
            )
            canvas = gen.save(
                canvas,
                lambda c: self.plot_output_dir + c.name + postfix
            )
            canvas = gen.switch_log_scale(canvas)
            canvas = gen.save(
                canvas,
                lambda c: self.plot_output_dir + c.name + postfix + "_log"
            )
            gen.consume_n_count(canvas)

        # norm to integral / lumi and save
        save_canvas(
            (gen.gen_norm_to_lumi(z) for z in zipped),
            "_lumi"
        )
    def configure(self):
        super(TemplateFitToolSihih, self).configure()
#        self.fitter = FractionFitter()
        self.fitbox_bounds  = 0.63, 0.93, 0.60
        self.mc_tmplts      = gen.filter(
            settings.post_proc_dict["TemplateStacks"], {
            "analyzer"  : ("TemplateSihihreal", "TemplateSihihfake"),
        })
        self.fitted         = gen.fs_filter_active_sort_load({
            "analyzer"  : "TemplateSihih",
            "is_data"   : True,
        })
Example #9
0
 def configure(self):
     data_lumi = settings.data_lumi_sum_wrp()
     self.data_sihih = gen.op.prod((
         gen.op.merge(
             gen.fs_filter_active_sort_load({
                 "analyzer"  : "dataTemplateFitHistoSihih",
                 "name"      : "sihihEB",
                 "is_data"   : False,
             })
         ),
         data_lumi
     ))
     self.data_sihih_bkg = gen.op.prod((
         gen.op.merge(
             gen.fs_filter_active_sort_load({
                 "analyzer"  : "Nm1PlotSihihChHadIsoInv",
                 "name"      : "histo",
                 "is_data"   : False,
             })
         ),
         data_lumi
     ))
Example #10
0
    def run(self):
        wrp = next(rebin_chhadiso(
            gen.gen_sum(
                [gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateRandConereal",
                    "is_data"   : True
                })]
            )
        ))
        # normalize to mc expectation
        integral_real = next(
            gen.gen_integral(
                gen.gen_norm_to_data_lumi(
                    gen.filter(
                        settings.post_proc_dict["TemplateStacks"],
                        {"analyzer": "TemplateRandConereal"}
                    )
                )
            )
        )
        print integral_real
        wrp = gen.op.prod((
            gen.op.norm_to_integral(wrp),
            integral_real
        ))

        # multiply with weight
        if do_dist_reweighting:
            wrp = gen.op.prod((
                settings.post_proc_dict["TemplateFitToolRandConeIsoInputSigWeight"],
                wrp,
            ))

        wrp.lumi = settings.data_lumi_sum()
        self.result = [wrp]
        gen.consume_n_count(
            gen.save(
                gen.canvas((self.result,)),
                lambda c: self.plot_output_dir + c.name
            )
        )
Example #11
0
    def set_up_stacking(self):
        mc_tmplts = rebin_chhadiso(
            gen.fs_filter_active_sort_load({
                "analyzer"  : all_analyzers,
            })
        )
        mc_tmplts = gen.gen_norm_to_lumi(mc_tmplts)

        mc_tmplts = gen.group(mc_tmplts)
        def stack_with_purity_info(grps):
            for grp in grps:
                sub_tot_list = [0., 0.]
                grp = list(com.count_ttgamma_photons(grp,sub_tot_list))
                grp = gen.mc_stack((grp,))
                grp = grp.next()
                grp.sub_tot_list = sub_tot_list
                yield grp
        mc_tmplts = list(stack_with_purity_info(mc_tmplts))

        mc_tmplts_plot = list(
            gen.filter(mc_tmplts, {"analyzer": analyzers_mc})
        )
        self.result = mc_tmplts_plot
        self.stream_stack = mc_tmplts_plot
Example #12
0
    def run(self):
        self.configure()

        # store result
        self.result = copy.deepcopy(
            settings.post_proc_dict["RealTightIdPurityCount"]
        )
        r = self.result

        # fetch all histograms
        data_sihih          = self.data_sihih
        data_sihih_bkg      = self.data_sihih_bkg
        mc_chhadiso_real    =  gen.op.merge(
            gen.fs_filter_active_sort_load({
                "analyzer"  : "realNm2PlotchargedHadronIsoEB",
                "name"      : "histo",
            })
        )
        mc_chhadiso_fake    = gen.op.merge(
            gen.fs_filter_active_sort_load({
                "analyzer"  : "fakeNm2PlotchargedHadronIsoEB",
                "name"      : "histo",
            })
        )

        # throw away wrappers
        data_sihih          = data_sihih.histo
        data_sihih_bkg      = data_sihih_bkg.histo
        mc_chhadiso_real    = mc_chhadiso_real.histo
        mc_chhadiso_fake    = mc_chhadiso_fake.histo

        # get bin numbers (c: chhadiso, s: sihih)
        b_c_low     = mc_chhadiso_real.FindBin(2.)
        b_c_high    = mc_chhadiso_real.FindBin(6.)
        b_s_high    = data_sihih.GetNbinsX()
        b_s_011p    = data_sihih.FindBin(0.011001) # bin above 0.011
        b_s_011m    = b_s_011p - 1
        b_s_low     = 1

        # signal contamination in chhadiso sideband region
        # (data_sihih_bkg is filled from this region)
        r.chhad_sb_real = mc_chhadiso_real.Integral(b_c_low, b_c_high)
        r.chhad_sb_fake = mc_chhadiso_fake.Integral(b_c_low, b_c_high)
        r.sb_sig_cont   = r.chhad_sb_real / (r.chhad_sb_real + r.chhad_sb_fake)

        # sihih integrals, lower and higher than 0.011
        r.data_gt_011 = data_sihih.Integral(b_s_011p, b_s_high)
        r.data_lt_011 = data_sihih.Integral(b_s_low, b_s_011m)
        r.bkg_gt_011  = data_sihih_bkg.Integral(b_s_011p, b_s_high)
        r.bkg_lt_011  = data_sihih_bkg.Integral(b_s_low, b_s_011m)

        # background normalization factor from sihih sideband
        r.bkg_norm    = r.data_gt_011 / r.bkg_gt_011

        # number of misidentified photons in signal region
        r.n_bkg       = r.bkg_lt_011     * r.bkg_norm * (1 - r.sb_sig_cont)
        r.n_bkg_err   = r.bkg_lt_011**.5 * r.bkg_norm * (1 - r.sb_sig_cont)

        # number of correctly identified photons in signal region
        r.n_sig       = r.data_lt_011 - r.n_bkg
        r.n_sig_err   = (r.data_lt_011 + r.n_bkg_err)**.5

        # now correct for the non-ttgamma events with a real photon
        r.n_sig_ttgam = r.n_sig * r.pur_ttgam
        r.n_sig_ttgam_err = r.n_sig_err * r.pur_ttgam