Ejemplo n.º 1
0
    def calculate_single_source(self, source, scale):

        # Calculate the effective injection time for simulation. Equal to
        # the overlap between the season and the injection time PDF for
        # the source, scaled if the injection PDF is not uniform in time.
        eff_inj_time = self.sig_time_pdf.effective_injection_time(source)

        # All injection fluxes are given in terms of k, equal to 1e-9
        inj_flux = k_to_flux(source["injection_weight_modifier"] * scale)

        # Fraction of total flux allocated to given source, assuming
        # standard candles with flux proportional to 1/d^2 multiplied by the
        # sources weight

        weight = calculate_source_weight(source) / self.weight_scale

        # Calculate the fluence, using the effective injection time.
        fluence = inj_flux * eff_inj_time * weight

        def source_eff_area(e):
            return self.effective_area_f(np.log10(e), np.sin(
                source["dec_rad"])) * self.energy_pdf.f(e)

        int_eff_a = self.energy_pdf.integrate_over_E(source_eff_area)

        # Effective areas are given in m2, but flux is in per cm2

        int_eff_a *= 10**4

        n_inj = fluence * int_eff_a

        return n_inj
Ejemplo n.º 2
0
    def __init__(self, season, sources, **kwargs):

        kwargs = read_injector_dict(kwargs)
        self.inj_kwargs = kwargs

        logger.info("Initialising Injector for {0}".format(season.season_name))
        self.injection_band_mask = dict()
        self.season = season
        self.season.load_background_model()

        self.sources = sources

        if len(sources) > 0:
            self.weight_scale = calculate_source_weight(self.sources)

        try:
            self.sig_time_pdf = TimePDF.create(
                kwargs["injection_sig_time_pdf"], season.get_time_pdf()
            )
            # self.bkg_time_pdf = TimePDF.create(kwargs["injection_bkg_time_pdf"],
            #                                    season.get_time_pdf())
            self.energy_pdf = EnergyPDF.create(kwargs["injection_energy_pdf"])
            self.spatial_pdf = SpatialPDF(kwargs["injection_spatial_pdf"], season)
        except KeyError:
            raise Exception(
                "Injection Arguments missing. \n "
                "'injection_energy_pdf', 'injection_time_pdf',"
                "and 'injection_spatial_pdf' are required. \n"
                "Found: \n {0}".format(kwargs)
            )

        if "poisson_smear_bool" in list(kwargs.keys()):
            self.poisson_smear = kwargs["poisson_smear_bool"]
        else:
            self.poisson_smear = True

        self.n_exp = np.nan

        try:
            self.fixed_n = kwargs["fixed_n"]
        except KeyError:
            self.fixed_n = np.nan
Ejemplo n.º 3
0
    def calculate_fluence(self, source, scale, source_mc, band_mask, omega):
        """Function to calculate the fluence for a given source, and multiply
        the oneweights by this. After this step, the oneweight sum is equal
        to the expected neutrino number.

        :param source: Source to be calculated
        :param scale: Flux scale
        :param source_mc: MC that is close to source
        :param band_mask: Closeness mask for MC
        :param omega: Solid angle covered by MC mask
        :return: Modified source MC
        """
        # Calculate the effective injection time for simulation. Equal to
        # the overlap between the season and the injection time PDF for
        # the source, scaled if the injection PDF is not uniform in time.
        eff_inj_time = self.sig_time_pdf.effective_injection_time(source)

        # All injection fluxes are given in terms of k, equal to 1e-9
        inj_flux = k_to_flux(source["injection_weight_modifier"] * scale)

        # Fraction of total flux allocated to given source, assuming
        # standard candles with flux proportional to 1/d^2 multiplied by the
        # sources weight

        weight = calculate_source_weight(source) / self.weight_scale

        # Calculate the fluence, using the effective injection time.
        fluence = inj_flux * eff_inj_time * weight

        # Recalculates the oneweights to account for the declination
        # band, and the relative distance of the sources.
        # Multiplies by the fluence, to enable calculations of n_inj,
        # the expected number of injected events

        source_mc["ow"] = fluence * self.mc_weights[band_mask] / omega

        return source_mc
Ejemplo n.º 4
0
def estimate_discovery_potential(seasons,
                                 inj_dict,
                                 sources,
                                 llh_dict,
                                 raw_scale=1.0):
    """Function to estimate discovery potential given an injection model. It
    assumes an optimal LLH construction, i.e aligned time windows and correct
    energy weighting etc. Takes injectors and seasons.

    :param injectors: Injectors to be used
    :param sources: Sources to be evaluated
    :return: An estimate for the discovery potential
    """
    logging.info("Trying to guess scale using AsimovEstimator.")

    season_bkg = []
    season_sig = []

    def weight_f(n_s, n_bkg):
        metric = np.array(n_s)  #/np.sqrt(np.array(n_bkg))
        return 1.  #metric #/ np.mean(metric)#/ max(metric)

    def ts_weight(n_s):
        return 1.
        # return n_s / np.sum(n_s)

    # def weight_ts(ts, n_s)

    weight_scale = calculate_source_weight(sources)

    livetime = 0.

    n_s_tot = 0.
    n_tot = 0.
    n_tot_coincident = 0.

    all_ns = []
    all_nbkg = []

    all_ts = []
    all_bkg_ts = []

    final_ts = []

    new_n_s = 0.
    new_n_bkg = 0.

    for season in seasons.values():

        new_llh_dict = dict(llh_dict)
        new_llh_dict["llh_name"] = "fixed_energy"
        new_llh_dict["llh_energy_pdf"] = inj_dict["injection_energy_pdf"]
        llh = LLH.create(season, sources, new_llh_dict)

        data = season.get_background_model()
        n_tot += np.sum(data["weight"])
        livetime += llh.bkg_time_pdf.livetime * 60 * 60 * 24

        def signalness(sig_over_background):
            """Converts a signal over background ratio into a signal
            probability. This is ratio/(1 + ratio)

            :param sig_over_background: Ratio of signal to background
            probability
            :return: Percentage probability of signal
            """

            return sig_over_background / (1. + sig_over_background)

        n_sigs = []
        n_bkgs = []

        ts_vals = []
        bkg_vals = []
        n_s_season = 0.

        # n_exp = np.sum(inj.n_exp["n_exp"]) * raw_scale

        sig_times = np.array(
            [llh.sig_time_pdf.effective_injection_time(x) for x in sources])
        source_weights = np.array(
            [calculate_source_weight(x) for x in sources])
        mean_time = np.sum(sig_times * source_weights) / weight_scale

        # print(source_weights)

        fluences = np.array(
            [x * sig_times[i]
             for i, x in enumerate(source_weights)]) / weight_scale
        # print(sources.dtype.names)
        # print(sources["dec_rad"], np.sin(sources["dec_rad"]))
        # print(fluences)
        # input("?")
        res = np.histogram(np.sin(sources["dec_rad"]),
                           bins=season.sin_dec_bins,
                           weights=fluences)

        dummy_sources = []
        bounds = []
        n_eff_sources = []

        for i, w in enumerate(res[0]):
            if w > 0:
                lower = res[1][i]
                upper = res[1][i + 1]
                mid = np.mean([upper, lower])

                mask = np.logical_and(
                    np.sin(sources["dec_rad"]) > lower,
                    np.sin(sources["dec_rad"]) < upper)

                n_eff_sources.append(
                    (np.sum(fluences[mask])**2. / np.sum(fluences[mask]**2)))

                # print(n_eff_sources)
                # print(fluences[mask])
                #
                # tester = np.array([1.5, 1.5, 1.5])
                #
                #
                # print(np.sum(tester**2)/(np.mean(tester)**2.))
                # input("?")

                dummy_sources.append((np.arcsin(mid), res[0][i], 1., 1.,
                                      "dummy_{0}".format(mid)))
                bounds.append((lower, upper))

        dummy_sources = np.array(dummy_sources,
                                 dtype=np.dtype([("dec_rad", np.float),
                                                 ("base_weight", np.float),
                                                 ("distance_mpc", np.float),
                                                 ("injection_weight_modifier",
                                                  np.float),
                                                 ("source_name", np.str)]))
        inj = season.make_injector(dummy_sources, **inj_dict)

        for j, dummy_source in enumerate(dummy_sources):

            lower, upper = bounds[j]

            n_eff = n_eff_sources[j]

            source_mc = inj.calculate_single_source(dummy_source,
                                                    scale=raw_scale)

            if len(source_mc) == 0:
                logging.warning(
                    "Warning, no MC found for dummy source at declinbation ".
                    format(np.arcsin(lower), np.arcsin(upper)))
                ts_vals.append(0.0)
                n_sigs.append(0.0)
                n_bkgs.append(0.0)
            else:
                # Gives the solid angle coverage of the sky for the band
                omega = 2. * np.pi * (upper - lower)

                data_mask = np.logical_and(
                    np.greater(data["dec"], np.arcsin(lower)),
                    np.less(data["dec"], np.arcsin(upper)))
                local_data = data[data_mask]

                data_weights = signalness(
                    llh.energy_weight_f(local_data)) * local_data["weight"]

                # print("source_mc", source_mc)

                mc_weights = signalness(llh.energy_weight_f(source_mc))

                true_errors = angular_distance(source_mc["ra"],
                                               source_mc["dec"],
                                               source_mc["trueRa"],
                                               source_mc["trueDec"])

                # median_sigma = weighted_quantile(
                #             true_errors, 0.5, source_mc["ow"] * mc_weights)

                median_sigma = np.mean(local_data["sigma"])

                area = np.pi * (2.0 * median_sigma)**2 / np.cos(
                    dummy_source["dec_rad"])

                local_rate = np.sum(data_weights)

                # n_bkg = local_rate * area  # * source_weight
                n_bkg = np.sum(local_data["weight"])

                n_tot_coincident += n_bkg

                ratio_time = livetime / mean_time

                sig_spatial = signalness((1. / (2. * np.pi * source_mc["sigma"] ** 2.) *
                                          np.exp(-0.5 * (
                                                  (true_errors / source_mc["sigma"]) ** 2.))) \
                                         / llh.spatial_pdf.background_spatial(source_mc))

                ra_steps = np.linspace(-np.pi, np.pi, 100)
                dec_steps = np.linspace(lower, upper, 10)

                mean_dec = np.mean(
                    signalness(
                        norm.pdf(dec_steps,
                                 scale=median_sigma /
                                 np.cos(dummy_source["dec_rad"]),
                                 loc=np.mean([lower, upper])) *
                        (upper - lower)))

                mean_ra = np.mean(
                    signalness(
                        norm.pdf(ra_steps, scale=median_sigma, loc=0.) * 2. *
                        np.pi))

                bkg_spatial = mean_dec * mean_ra  # * n_eff

                n_s_tot += np.sum(source_mc["ow"])
                n_s_season += np.sum(source_mc["ow"])

                med_sig = np.mean(sig_spatial * mc_weights) * signalness(
                    ratio_time) * np.sum(source_mc["ow"])
                med_bkg = np.mean(bkg_spatial * data_weights) * (
                    1. - signalness(ratio_time)) * n_bkg

                new_n_s += med_sig
                new_n_bkg += med_bkg

    scaler_ratio = new_n_s / n_s_tot

    scaler_ratio = new_n_bkg / n_tot_coincident

    print("Scaler Ratio", scaler_ratio)

    disc_count = norm.ppf(norm.cdf(5.0), loc=0.,
                          scale=np.sqrt(new_n_bkg))  # * scaler_ratio

    simple = 5. * np.sqrt(new_n_bkg)  # * scaler_ratio
    #
    # disc_count = simple

    # print(disc_count, simple, simple/disc_count, n_s_tot)
    #
    # print("testerer", new_n_s, new_n_bkg)
    #
    print("Disc count", disc_count, disc_count / scaler_ratio)
    scale = disc_count / new_n_s
    print(scale)

    # Convert from scale factor to flux units

    scale = k_to_flux(scale) * raw_scale

    logging.info(
        "Estimated Discovery Potential is: {:.3g} GeV sr^-1 s^-1 cm^-2".format(
            scale))

    return scale
Ejemplo n.º 5
0
def calculate_astronomy(flux, e_pdf_dict, catalogue):

    flux /= (u.GeV * u.cm**2 * u.s)

    energy_PDF = EnergyPDF.create(e_pdf_dict)

    astro_res = dict()

    phi_integral = energy_PDF.flux_integral() * u.GeV

    e_integral = energy_PDF.fluence_integral() * u.GeV**2

    # Calculate fluence

    tot_fluence = (flux * e_integral)

    astro_res["Energy Flux (GeV cm^{-2} s^{-1})"] = tot_fluence.value

    logger.debug("Energy Flux:{0}".format(tot_fluence))

    src_1 = np.sort(catalogue, order="distance_mpc")[0]

    frac = calculate_source_weight(src_1) / calculate_source_weight(catalogue)

    si = flux * frac

    astro_res["Flux from nearest source"] = si

    logger.debug("Total flux: {0}".format(flux))
    logger.debug("Fraction from nearest source: {0}".format(frac))
    logger.debug("Flux from nearest source: {0}".format(flux * frac))

    lumdist = src_1["distance_mpc"] * u.Mpc

    area = (4 * math.pi * (lumdist.to(u.cm))**2)

    dNdA = (si * phi_integral).to(u.s**-1 * u.cm**-2)

    # int_dNdA += dNdA

    N = dNdA * area

    logger.debug("There would be {:.3g} neutrinos emitted.".format(N))
    logger.debug(
        "The energy range was assumed to be between {0} and {1}".format(
            energy_PDF.integral_e_min, energy_PDF.integral_e_max))
    # Energy requires a 1/(1+z) factor

    zfactor = find_zfactor(lumdist)
    etot = (si * area * e_integral).to(u.erg / u.s) * zfactor

    astro_res["Mean Luminosity (erg/s)"] = etot.value

    logger.debug("The required neutrino luminosity was {0}".format(etot))

    cr_e = etot / f_cr_to_nu

    logger.debug(
        "Assuming {0:.3g}% was transferred from CR to neutrinos, we would require a total CR luminosity of {1}"
        .format(100 * f_cr_to_nu, cr_e))

    return astro_res