def run(self):
        wrp = next(rebin_chhadiso(
            gen.gen_sum(
                [
                    gen.fs_filter_active_sort_load({
                        "analyzer"  : sb_anzlrs,
                        "is_data"   : True
                    })
                ]
            )
        ))
        # multiply with weight
        if do_dist_reweighting:
            wrp = gen.op.prod((
                settings.post_proc_dict["TemplateFitToolChHadIsoSbBkgInputBkgWeight"],
                wrp,
            ))
        wrp.lumi = settings.data_lumi_sum()

        self.result = [wrp]
        gen.consume_n_count(
            gen.save(
                gen.canvas((self.result,)),
                lambda c: self.plot_output_dir + c.name
            )
        )
 def run(self):
         top_sample = next(s for s in settings.active_samples if s[:2] == "TT")
         wrp_fake = next(rebin_chhadiso(gen.fs_filter_sort_load({
             "analyzer": "TemplateChHadIsoreal",
             "sample":   top_sample,#"TTMadG",
         })))
         wrp_sb = gen.op.merge(rebin_chhadiso(gen.fs_filter_sort_load({
             "analyzer": "TemplateRandConereal",
             "sample": top_sample,#"TTMadG",
         })))
         wrp = gen.op.div((
             gen.op.norm_to_integral(wrp_fake),
             gen.op.norm_to_integral(wrp_sb),
         ))
         wrp.lumi = 1.
         wrp.draw_option = "E1"
         self.result = wrp
         cnvs = list(gen.canvas(((wrp,),)),)
         cnvs[0].canvas.SetGridy(1)
         gen.consume_n_count(
             gen.save(
                 cnvs,
                 lambda c: self.plot_output_dir + c.name
             )
         )
         del wrp.draw_option
 def store_results(self):
     cnv = itertools.chain(
         gen.canvas(
             [self.plot_chi2] + self.plots_truth_vs_fitted,
             [rnd.Legend(None, True, y_pos=0.25)]
         ),
         gen.canvas(self.plots_diffs)
     )
     cnv = gen.save(cnv, lambda c: self.plot_output_dir + c.name)
     gen.consume_n_count(cnv)
    def test_gen_filter(self):
        import re

        aliases = list(gen.fs_content())
        data = gen.filter(aliases, {"is_data": True})
        tmplt = gen.filter(aliases, {"analyzer": "fakeTemplate"})
        crtlplt = gen.filter(aliases, {"analyzer": re.compile("CrtlFilt*")})
        crtlplt2 = gen.filter(aliases, {"analyzer": [re.compile("CrtlFilt*")]})
        ttgam_cf = gen.filter(aliases, {"name": "cutflow", "sample": ["ttgamma", "tt"]})
        self.assertEqual(gen.consume_n_count(data), 52)
        self.assertEqual(gen.consume_n_count(tmplt), 9)
        self.assertEqual(gen.consume_n_count(crtlplt), 39)
        self.assertEqual(gen.consume_n_count(crtlplt2), 39)
        self.assertEqual(gen.consume_n_count(ttgam_cf), 2)
 def save_canvas(wrps, postfix):
     canvas = gen.canvas(
         wrps,
         [rnd.BottomPlotRatio, rnd.Legend, com.SimpleTitleBox]
     )
     canvas = gen.save(
         canvas,
         lambda c: self.plot_output_dir + c.name + postfix
     )
     canvas = gen.switch_log_scale(canvas)
     canvas = gen.save(
         canvas,
         lambda c: self.plot_output_dir + c.name + postfix + "_log"
     )
     gen.consume_n_count(canvas)
    def test_gen_save(self):
        wrps = gen.fs_filter_sort_load({"name": "cutflow", "sample": ["zjets", "ttgamma"]})

        # create dir and go
        if not os.path.exists("test"):
            os.mkdir("test")
        gen.consume_n_count(gen.save(wrps, lambda w: "test/" + w.name + "_" + w.sample))

        # check the new files
        self.assertTrue(os.path.exists("test/cutflow_ttgamma.root"))
        self.assertTrue(os.path.exists("test/cutflow_ttgamma.info"))
        self.assertTrue(os.path.exists("test/cutflow_zjets.root"))
        self.assertTrue(os.path.exists("test/cutflow_zjets.info"))
        self.tfile = TFile.Open("test/cutflow_ttgamma.root")
        self.assertTrue(self.tfile.GetKey("histo"))
Beispiel #7
0
 def run(self):
     self.result = settings.post_proc_dict["XsecCalculatorChHadIsoSBID"]
     for quantity in ["R", "R_fid", "xsec"]:
         self.calc_variation(quantity)
         histo = wrappers.HistoWrapper(
             util.list2histogram(
                 self.values,
                 "PDF_uncert_distr_" + quantity,
                 ";#Delta("+quantity+");CTEQ61 PDF eigenvector evaluation",
                 60
             )
         )
         del self.values[:]
         cnv = gen.canvas([[histo]])
         cnv = gen.save(cnv, lambda c: self.plot_output_dir + c.name)
         gen.consume_n_count(cnv)
Beispiel #8
0
    def run(self):
        """
        Load, stack, print and save histograms in a stream.
        """
        # combined operation for loading, filtering, stacking, etc..
        # the output looks like: [(stack1, data1), (stack2, data2), ...]
        stream_stack_n_data = gen.fs_mc_stack_n_data_sum({
            "name":
            "histo",
            "analyzer": ["CrtlFiltEt", "CrtlFiltEta"]
        })

        # plot (stack, data) pairs into canvases, with legend
        stream_canvas = gen.canvas(stream_stack_n_data,
                                   [cmstoolsac3b.rendering.Legend])

        # store into dir of this tool
        stream_canvas = gen.save(
            stream_canvas,
            lambda wrp: self.plot_output_dir + wrp.
            name,  # this function returns a path without postfix
            settings.rootfile_postfixes)

        # pull everything through the stream
        count = gen.consume_n_count(stream_canvas)

        # make a nice statement
        self.message("INFO: " + self.name + " produced " + count +
                     " canvases.")
    def run(self):
        """
        Load, stack, print and save histograms in a stream.
        """
        # combined operation for loading, filtering, stacking, etc..
        # the output looks like: [(stack1, data1), (stack2, data2), ...]
        stream_stack_n_data = gen.fs_mc_stack_n_data_sum(
            self.histo_filter_dict
        )

        # can be saved for later use.
        if self.store_stack_and_data_in_pool:
            stream_stack_n_data = self.store_to_pool(stream_stack_n_data)

        # plot (stack, data) pairs into canvases, with decorators
        stream_canvas = gen.canvas(
            stream_stack_n_data,
            self.canvas_decorators
        )

        # store into dir of this tool
        stream_canvas = gen.save(
            stream_canvas,
            lambda wrp: self.plot_output_dir + wrp.analyzer
        )

        # pull everything through the stream
        count = gen.consume_n_count(stream_canvas)

        # make a nice statement
        self.message("INFO: "+self.name+" produced "+count+" canvases.")
Beispiel #10
0
    def run(self):
        """
        Load, stack, print and save histograms in a stream.
        """
        # combined operation for loading, filtering, stacking, etc..
        # the output looks like: [(stack1, data1), (stack2, data2), ...]
        stream_stack_n_data = gen.fs_mc_stack_n_data_sum(
            self.histo_filter_dict)

        # can be saved for later use.
        if self.store_stack_and_data_in_pool:
            stream_stack_n_data = self.store_to_pool(stream_stack_n_data)

        # plot (stack, data) pairs into canvases, with decorators
        stream_canvas = gen.canvas(stream_stack_n_data, self.canvas_decorators)

        # store into dir of this tool
        stream_canvas = gen.save(
            stream_canvas, lambda wrp: self.plot_output_dir + wrp.analyzer)

        # pull everything through the stream
        count = gen.consume_n_count(stream_canvas)

        # make a nice statement
        self.message("INFO: " + self.name + " produced " + count +
                     " canvases.")
 def test_gen_filter(self):
     import re
     aliases = list(gen.fs_content())
     data = gen.filter(aliases, {"is_data": True})
     tmplt = gen.filter(aliases, {"analyzer": "fakeTemplate"})
     crtlplt = gen.filter(aliases, {"analyzer": re.compile("CrtlFilt*")})
     crtlplt2 = gen.filter(aliases, {"analyzer": [re.compile("CrtlFilt*")]})
     ttgam_cf = gen.filter(aliases, {
         "name": "cutflow",
         "sample": ["ttgamma", "tt"]
     })
     self.assertEqual(gen.consume_n_count(data), 52)
     self.assertEqual(gen.consume_n_count(tmplt), 9)
     self.assertEqual(gen.consume_n_count(crtlplt), 39)
     self.assertEqual(gen.consume_n_count(crtlplt2), 39)
     self.assertEqual(gen.consume_n_count(ttgam_cf), 2)
Beispiel #12
0
    def run(self):
        """
        Load, stack, print and save histograms in a stream.
        """
        # combined operation for loading, filtering, stacking, etc..
        # the output looks like: [(stack1, data1), (stack2, data2), ...]
        stream_stack_n_data = gen.fs_mc_stack_n_data_sum(
            {
                "name"      : "histo",
                "analyzer"  : ["CrtlFiltEt", "CrtlFiltEta"]
            }
        )

        # plot (stack, data) pairs into canvases, with legend
        stream_canvas = gen.canvas(
            stream_stack_n_data,
            [cmstoolsac3b.rendering.Legend]
        )

        # store into dir of this tool
        stream_canvas = gen.save(
            stream_canvas,
            lambda wrp: self.plot_output_dir + wrp.name,  # this function returns a path without postfix
            settings.rootfile_postfixes
        )

        # pull everything through the stream
        count = gen.consume_n_count(stream_canvas)

        # make a nice statement
        self.message("INFO: "+self.name+" produced "+count+" canvases.")
    def test_gen_save(self):
        wrps = gen.fs_filter_sort_load({
            "name": "cutflow",
            "sample": ["zjets", "ttgamma"]
        })

        # create dir and go
        if not os.path.exists("test"):
            os.mkdir("test")
        gen.consume_n_count(
            gen.save(wrps, lambda w: "test/" + w.name + "_" + w.sample))

        # check the new files
        self.assertTrue(os.path.exists("test/cutflow_ttgamma.root"))
        self.assertTrue(os.path.exists("test/cutflow_ttgamma.info"))
        self.assertTrue(os.path.exists("test/cutflow_zjets.root"))
        self.assertTrue(os.path.exists("test/cutflow_zjets.info"))
        self.tfile = TFile.Open("test/cutflow_ttgamma.root")
        self.assertTrue(self.tfile.GetKey("histo"))
        def run(self):
            wrp = tmpl_fit.get_merged_sbbkg_histo(sample)

            # multiply with weight
            if tmpl_fit.do_dist_reweighting:
                wrp = gen.op.prod((
                    settings.post_proc_dict["TemplateFitToolChHadIsoSbBkgInputBkgWeight"],
                    wrp,
                ))

            wrps = gen.gen_norm_to_data_lumi((wrp,))
            wrps = list(wrps)
            self.result = wrps
            gen.consume_n_count(
                gen.save(
                    gen.canvas((wrps,)),
                    lambda c: self.plot_output_dir + c.name
                )
            )
    def run(self):
        wrp = next(rebin_chhadiso(
            gen.gen_sum(
                [gen.fs_filter_active_sort_load({
                    "analyzer"  : "TemplateRandConereal",
                    "is_data"   : True
                })]
            )
        ))
        # normalize to mc expectation
        integral_real = next(
            gen.gen_integral(
                gen.gen_norm_to_data_lumi(
                    gen.filter(
                        settings.post_proc_dict["TemplateStacks"],
                        {"analyzer": "TemplateRandConereal"}
                    )
                )
            )
        )
        print integral_real
        wrp = gen.op.prod((
            gen.op.norm_to_integral(wrp),
            integral_real
        ))

        # multiply with weight
        if do_dist_reweighting:
            wrp = gen.op.prod((
                settings.post_proc_dict["TemplateFitToolRandConeIsoInputSigWeight"],
                wrp,
            ))

        wrp.lumi = settings.data_lumi_sum()
        self.result = [wrp]
        gen.consume_n_count(
            gen.save(
                gen.canvas((self.result,)),
                lambda c: self.plot_output_dir + c.name
            )
        )
 def do_evaluation(self, wrp_list, maker_func=None):
     if not maker_func:
         maker_func = self.make_histo_1d
     self.wrp_dict = dict((w.histo_key, w) for w in wrp_list)
     min_wrp = min(wrp_list, key=lambda w: w.float)
     histo_in_s = maker_func(min_wrp, "sieie")
     histo_in_p = maker_func(min_wrp, "phoiso")
     histo_in_n = maker_func(min_wrp, "neuiso")
     histo_in_s.min_token = min_wrp.histo_key
     histo_in_p.min_token = min_wrp.histo_key
     histo_in_n.min_token = min_wrp.histo_key
     self.result = [
         histo_in_s,
         histo_in_p,
         histo_in_n,
     ]
     gen.consume_n_count(
         gen.save(
             gen.canvas((h,) for h in self.result),
             lambda c: self.plot_output_dir + c.name
         )
     )
        def run(self):
            wrps = tmpl_fit.rebin_chhadiso(gen.fs_filter_sort_load({
                "analyzer": "PlotSBID",
                "sample": sample,
            }))
            wrp = gen.op.merge(wrps)

            # multiply with weight
            if tmpl_fit.do_dist_reweighting:
                wrp = gen.op.prod((
                    settings.post_proc_dict["TemplateFitToolChHadIsoSBIDInputBkgWeight"],
                    wrp,
                ))

            wrps = gen.gen_norm_to_data_lumi((wrp,))
            wrps = list(wrps)
            self.result = wrps
            gen.consume_n_count(
                gen.save(
                    gen.canvas((wrps,)),
                    lambda c: self.plot_output_dir + c.name
                )
            )
    def test_gen_special_treat(self):
        sample = ["tt", "zjets"]
        name = "cutflow"

        class TreatCls(object):
            def __init__(self, test):
                self.test = test
                self.n_times_called = 0

            def __call__(self, alias):
                self.n_times_called += 1
                self.test.assertTrue(alias.sample in sample)
                self.test.assertEqual(alias.name, name)

        treat_func = TreatCls(self)
        treated = gen.callback(gen.fs_content(), treat_func, {"sample": sample, "name": name})
        self.assertEqual(gen.consume_n_count(treated), 150)
        self.assertEqual(treat_func.n_times_called, 2)
    def test_gen_special_treat(self):
        sample = ["tt", "zjets"]
        name = "cutflow"

        class TreatCls(object):
            def __init__(self, test):
                self.test = test
                self.n_times_called = 0

            def __call__(self, alias):
                self.n_times_called += 1
                self.test.assertTrue(alias.sample in sample)
                self.test.assertEqual(alias.name, name)

        treat_func = TreatCls(self)
        treated = gen.callback(gen.fs_content(), treat_func, {
            "sample": sample,
            "name": name
        })
        self.assertEqual(gen.consume_n_count(treated), 150)
        self.assertEqual(treat_func.n_times_called, 2)