Ejemplo n.º 1
0
    def test_declination_sensitivity(self):

        logging.info("Testing 'fixed_weight' MinimisationHandler class")

        for i, e_pdf_dict in enumerate(energy_pdfs):

            llh_dict = {
                "llh_name": "fixed_energy",
                "llh_sig_time_pdf": {
                    "time_pdf_name": "steady"
                },
                "llh_bkg_time_pdf": {
                    "time_pdf_name": "steady",
                },
                "llh_energy_pdf": e_pdf_dict
            }

            unblind_dict = {
                "name": "tests/test_energy_pdfs/",
                "mh_name": "fixed_weights",
                "dataset":
                icecube_ps_3_year.get_seasons('IC79-2010', 'IC86-2011'),
                "catalogue": catalogue,
                "llh_dict": llh_dict,
            }

            ub = create_unblinder(unblind_dict)
            key = [x for x in ub.res_dict.keys() if x != "TS"][0]
            res = ub.res_dict[key]

            logging.info("Best fit values {0}".format(list(res["x"])))
            logging.info("Reference best fit {0}".format(true_parameters[i]))

            for j, x in enumerate(list(res["x"])):
                self.assertAlmostEqual(x, true_parameters[i][j], delta=0.1)
Ejemplo n.º 2
0
    def test_declination_sensitivity(self):

        logging.info("Testing 'standard' LLH class")

        # Test three declinations

        for j, sindec in enumerate(sindecs):

            unblind_dict = {
                "name": "tests/test_llh_standard/",
                "mh_name": "fixed_weights",
                "dataset": icecube_ps_3_year.get_seasons("IC86-2011"),
                "catalogue": ps_catalogue_name(sindec),
                "llh_dict": llh_dict,
            }

            ub = create_unblinder(unblind_dict)
            key = [x for x in ub.res_dict.keys() if x != "TS"][0]
            res = ub.res_dict[key]

            logging.info("Best fit values {0}".format(list(res["x"])))
            logging.info("Reference best fit {0}".format(true_parameters[j]))

            for i, x in enumerate(res["x"]):
                self.assertAlmostEqual(x, true_parameters[j][i], delta=0.1)
    def test_declination_sensitivity(self):

        logging.info("Testing 'large_catalogue' MinimisationHandler class "
                     "with {0} sources and IC40 data".format(n_sources))

        # Test stacking

        unblind_dict = {
            "name": "test/test_large_catalogue/",
            "mh_name": "large_catalogue",
            "dataset": icecube_ps_3_year.get_seasons("IC79-2010"),
            "catalogue": catalogue,
            "llh_dict": llh_dict,
            "inj_dict": {}
        }

        ub = create_unblinder(unblind_dict)
        key = [x for x in ub.res_dict.keys() if x != "TS"][0]
        res = ub.res_dict[key]

        logging.info("Best fit values {0}".format(list(res["x"])))
        logging.info("Reference best fit {0}".format(true_parameters[0]))

        for i, x in enumerate(res["x"]):
            self.assertAlmostEqual(x, true_parameters[0][i], delta=0.1)
Ejemplo n.º 4
0
    def test_declination_sensitivity(self):

        logging.info("Testing 'fit_weight' MinimisationHandler class")

        mh_name = "fit_weights"

        # Test three declinations

        unblind_dict = {
            "name": "tests/test_mh_fit_weights",
            "mh_name": mh_name,
            "dataset": icecube_ps_3_year.get_seasons("IC86-2011"),
            "catalogue": catalogue,
            "llh_dict": llh_dict,
        }

        ub = create_unblinder(unblind_dict)
        key = [x for x in ub.res_dict.keys() if x != "TS"][0]
        res = ub.res_dict[key]

        logging.info("Best fit values {0}".format(list(res["x"])))
        logging.info("Reference best fit {0}".format(true_parameters))

        for i, x in enumerate(res["x"]):
            self.assertAlmostEqual(x, true_parameters[i], delta=0.1)

        inj_dict = {
            "injection_sig_time_pdf": {"time_pdf_name": "steady"},
            "injection_bkg_time_pdf": {
                "time_pdf_name": "steady",
            },
            "injection_energy_pdf": {"energy_pdf_name": "power_law", "gamma": 2.0},
        }

        mh_dict = dict(unblind_dict)
        mh_dict["inj_dict"] = inj_dict
        mh_dict["n_trials"] = 1.0
        mh_dict["n_steps"] = 3.0
        mh_dict["scale"] = 5.0

        mh = MinimisationHandler.create(mh_dict)
        res = mh.simulate_and_run(5.0)
        analyse(mh_dict, cluster=False)

        mh.corner_likelihood_scan(
            save=True,
            res_dict=res,
        )
    "injection_energy_pdf": injection_energy,
    "injection_time_pdf": injection_time,
}

llh_energy = injection_energy

llh_dict = {
    "name": "standard",
    "llh_energy_pdf": llh_energy,
    "llh_time_pdf": llh_time,
}

sindecs = np.linspace(0.75, 0.0, 4)

datasets = [
    ("IceCube (One Year", icecube_ps_3_year.get_seasons("IC86-2012")),
    # ("Simcube (One year)", simcube_dataset.get_seasons()),
]

# plt.figure()
# ax1 = plt.subplot2grid((4, 1), (0, 0), colspan=3, rowspan=3)
# ax2 = plt.subplot2grid((4, 1), (3, 0), colspan=3, rowspan=1, sharex=ax1)
# refs = reference_7year_discovery_potential(sindecs, injection_gamma)
#
# ax1.plot(sindecs, refs, label=r"7-year Point Source analysis", color="k")

for i, (label, dataset) in enumerate(datasets):

    for sindec in sindecs:

        cat_path = ps_catalogue_name(sindec)
Ejemplo n.º 6
0
        sim_true_e = interp1d(fluence_cumulative, log_e_range)

        true_e_vals = np.array(
            [10**sim_true_e(random.random()) for _ in range(n_sim)])

        new_events["logE"] = self.energy_proxy_map(true_e_vals)

        new_events["sigma"] = self.angular_res_f(new_events["logE"]).copy()
        new_events["raw_sigma"] = new_events["sigma"].copy()

        return new_events


simcube_dataset = SimDataset()

for (name, season) in icecube_ps_3_year.get_seasons().items():

    def ideal_energy_proxy(e):
        return np.log10(e)

    def wrapper_f(bkg_time_pdf_dict,
                  bkg_flux_model,
                  energy_proxy_map=None,
                  sim_name=None,
                  **kwargs):

        if np.logical_and(energy_proxy_map is None, sim_name is None):
            energy_proxy_map = ideal_energy_proxy
            sim_name = "default"

        if np.logical_and(energy_proxy_map != ideal_energy_proxy,
Ejemplo n.º 7
0
unblind_llh = {
    "llh_name": "standard",
    "llh_sig_time_pdf": llh_time,
    "llh_bkg_time_pdf": {
        "time_pdf_name": "steady"
    },
    "llh_energy_pdf": llh_energy,
}

cat_path = ps_catalogue_name(0.5)

unblind_dict = {
    "name": "tests/test_flare_search/",
    "mh_name": "flare",
    "dataset": icecube_ps_3_year.get_seasons("IC86-2011"),
    "catalogue": cat_path,
    "llh_dict": unblind_llh,
}

# Inspecting the neutrino lightcurve for this fixed-seed scramble confirms
# that the most significant flare is in a 14 day window. The best-fit
# parameters are shown below. As both the scrambling and fitting is
# deterministic, these values should be returned every time this test is run.

true_parameters = [
    2.455898386344462,
    3.764204148466931,
    55761.7435891,
    55764.59807937,
    2.8544902700014063,
Ejemplo n.º 8
0
    def test_full_chain(self):

        logging.info("Testing MinimisationHandler analysis chain")

        base_name = "tests/test_analysis_chain"

        try:

            for j, gamma in enumerate([2.0, 2.5]):
                # Initialise Injectors/LLHs

                inj_dict = {
                    "injection_sig_time_pdf": {"time_pdf_name": "steady"},
                    "injection_energy_pdf": {
                        "energy_pdf_name": "power_law",
                        "gamma": gamma,
                    },
                }

                llh_dict = {
                    "llh_name": "standard",
                    "llh_sig_time_pdf": {"time_pdf_name": "steady"},
                    "llh_bkg_time_pdf": {
                        "time_pdf_name": "steady",
                    },
                    "llh_energy_pdf": {"energy_pdf_name": "power_law"},
                }

                # Test three declinations

                mh_dict = {
                    "name": f"{base_name}/{gamma}/",
                    "mh_name": "fixed_weights",
                    "dataset": icecube_ps_3_year.get_seasons("IC86-2011"),
                    "catalogue": catalogue,
                    "inj_dict": inj_dict,
                    "llh_dict": llh_dict,
                    "n_steps": 5,
                    "n_trials": 10,
                    "scale": [3.0, 500.0][j],
                }

                analyse(mh_dict, n_cpu=24, cluster=False)

                rh = ResultsHandler(mh_dict)

                # Deliberately test a second time, to see performance once results have been combined

                rh = ResultsHandler(mh_dict)

            ub_dict = dict(mh_dict)

            # Test without background TS

            ub = create_unblinder(ub_dict, full_plots=True)

            # Test with background TS

            ub_dict["background_ts"] = base_name

            ub = create_unblinder(ub_dict, full_plots=True, scan_2d=True)

            mh = MinimisationHandler.create(mh_dict)
            mh.iterate_run(scale=1.0, n_steps=3, n_trials=1)

        except OverfluctuationError:
            pass