Esempio n. 1
0
def test_spectrum_like_with_background_model():
    energies = np.logspace(1, 3, 51)

    low_edge = energies[:-1]
    high_edge = energies[1:]

    sim_K = 1E-1
    sim_kT = 20.

    # get a blackbody source function
    source_function = Blackbody(K=sim_K, kT=sim_kT)

    # power law background function
    background_function = Powerlaw(K=5, index=-1.5, piv=100.)

    spectrum_generator = SpectrumLike.from_function(
        'fake',
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge)

    background_plugin = SpectrumLike.from_background('background',
                                                     spectrum_generator)

    bb = Blackbody()

    pl = Powerlaw()
    pl.piv = 100

    bkg_ps = PointSource('bkg', 0, 0, spectral_shape=pl)

    bkg_model = Model(bkg_ps)

    jl_bkg = JointLikelihood(bkg_model, DataList(background_plugin))

    _ = jl_bkg.fit()

    plugin_bkg_model = SpectrumLike('full',
                                    spectrum_generator.observed_spectrum,
                                    background=background_plugin)

    pts = PointSource('mysource', 0, 0, spectral_shape=bb)

    model = Model(pts)

    # MLE fitting

    jl = JointLikelihood(model, DataList(plugin_bkg_model))

    result = jl.fit()

    K_variates = jl.results.get_variates('mysource.spectrum.main.Blackbody.K')

    kT_variates = jl.results.get_variates(
        'mysource.spectrum.main.Blackbody.kT')

    assert np.all(
        np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT],
                   rtol=0.5))
Esempio n. 2
0
def test_input_output():

    tm = TemplateModel('__test')
    tm.alpha = -0.95
    tm.beta = -2.23

    fake_source = PointSource("test", ra=0.0, dec=0.0, spectral_shape=tm)

    fake_model = Model(fake_source)

    clone = clone_model(fake_model)

    assert clone.get_number_of_point_sources() == 1
    assert tm.data_file == clone.test.spectrum.main.shape.data_file

    assert clone.test.spectrum.main.shape.alpha.value == tm.alpha.value
    assert clone.test.spectrum.main.shape.beta.value == tm.beta.value

    xx = np.linspace(1, 10, 100)

    assert np.allclose(clone.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))

    # Test pickling
    dump = pickle.dumps(clone)

    clone2 = pickle.loads(dump)

    assert clone2.get_number_of_point_sources() == 1
    assert tm.data_file == clone2.test.spectrum.main.shape.data_file
    assert np.allclose(clone2.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))

    # Test pickling with other functions
    new_shape = tm * Powerlaw()

    new_shape.index_2 = -2.256

    dump2 = pickle.dumps(new_shape)

    clone3 = pickle.loads(dump2)

    assert clone3.index_2.value == new_shape.index_2.value

    # Now save to disk and reload
    fake_source2 = PointSource("test", ra=0.0, dec=0.0, spectral_shape=new_shape)

    fake_model2 = Model(fake_source2)

    fake_model2.save("__test.yml", overwrite=True)

    # Now try to reload
    reloaded_model = load_model("__test.yml")

    assert reloaded_model.get_number_of_point_sources() == 1
    assert np.allclose(fake_model2.test.spectrum.main.shape(xx), reloaded_model.test.spectrum.main.shape(xx))

    os.remove("__test.yml")
Esempio n. 3
0
    def _create_plugin(self):
        quiet_mode()

        if self._demo_plugin is None:

            self._demo_plugin = OGIPLike("tmp",
                                         observation="data/grb050401/apcsource.pi",
                                         background="data/grb050401/apcback.pi",
                                         response="data/grb050401/apc.rmf",
                                         arf_file="data/grb050401/apc.arf",
                                         verbose=False

                                         )

        spec = Powerlaw_Eflux(F=self._eflux, index=self._index, a=.4, b=15) * TbAbs(NH=self._mw_nh,
                                                                                    redshift=0) * TbAbs(NH=self._host_nh, redshift=self._z)

        if (self._whim_n0 is not None) and (self._whim_T is not None):

            spec = spec * \
                Integrate_Absori(n0=self._whim_n0,
                                 temp=self._whim_T, redshift=self._z)

        ps = PointSource("tmp", self._ra, self._dec, spectral_shape=spec)
        model = Model(ps)

        self._demo_plugin.set_model(model)

        simulation = self._demo_plugin.get_simulated_dataset()

        self._simulated_data = simulation
Esempio n. 4
0
def test_dispersion_spectrum_constructor(loaded_response):

    rsp = loaded_response

    pl = Powerlaw()

    ps = PointSource("fake", 0, 0, spectral_shape=pl)

    model = Model(ps)

    obs_spectrum = BinnedSpectrumWithDispersion(counts=np.ones(128),
                                                exposure=1,
                                                response=rsp,
                                                is_poisson=True)
    bkg_spectrum = BinnedSpectrumWithDispersion(counts=np.ones(128),
                                                exposure=1,
                                                response=rsp,
                                                is_poisson=True)

    specLike = DispersionSpectrumLike("fake",
                                      observation=obs_spectrum,
                                      background=bkg_spectrum)
    specLike.set_model(model)
    specLike.get_model()

    specLike.write_pha("test_from_dispersion", overwrite=True)

    assert os.path.exists("test_from_dispersion.pha")
    assert os.path.exists("test_from_dispersion_bak.pha")

    os.remove("test_from_dispersion.pha")
    os.remove("test_from_dispersion_bak.pha")
Esempio n. 5
0
    def fit(self, function, minimizer='minuit', verbose=False):
        """
        Fit the data with the provided function (an astromodels function)

        :param function: astromodels function
        :param minimizer: the minimizer to use
        :param verbose: print every step of the fit procedure
        :return: best fit results
        """

        # This is a wrapper to give an easier way to fit simple data without having to go through the definition
        # of sources
        pts = PointSource("source", 0.0, 0.0, function)

        model = Model(pts)

        self.set_model(model)

        self._joint_like_obj = JointLikelihood(model,
                                               DataList(self),
                                               verbose=verbose)

        self._joint_like_obj.set_minimizer(minimizer)

        return self._joint_like_obj.fit()
Esempio n. 6
0
def photometry_data_model(grond_plugin):

    spec = Powerlaw()  # * XS_zdust() * XS_zdust()

    datalist = DataList(grond_plugin)

    model = Model(PointSource("grb", 0, 0, spectral_shape=spec))

    yield model, datalist
Esempio n. 7
0
def get_grb_model(spectrum):

    triggerName = "bn090217206"
    ra = 204.9
    dec = -8.4

    GRB = PointSource(triggerName, ra, dec, spectral_shape=spectrum)

    model = Model(GRB)

    return model
Esempio n. 8
0
def test_astromodels_model_with_units():

    x = SynchrotronNumerical()

    ene = np.logspace(1,5, 5) * u.keV

    model = Model(PointSource('test',0,0,spectral_shape=x))

    model.get_point_source_fluxes(0,ene)


    model.get_point_source_fluxes(0, 1*u.keV)
Esempio n. 9
0
    def from_function(cls, name, function, x, yerr):

        y = function(x)

        xyl_gen = XYLike("generator", x, y, yerr)

        pts = PointSource("fake", 0.0, 0.0, function)

        model = Model(pts)

        xyl_gen.set_model(model)

        return xyl_gen.get_simulated_dataset(name)
def test_spectrumlike_fit():

    energies = np.logspace(1, 3, 51)

    low_edge = energies[:-1]
    high_edge = energies[1:]

    sim_K = 1e-1
    sim_kT = 20.0

    # get a blackbody source function
    source_function = Blackbody(K=sim_K, kT=sim_kT)

    # power law background function
    background_function = Powerlaw(K=1, index=-1.5, piv=100.0)

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    bb = Blackbody()

    pts = PointSource("mysource", 0, 0, spectral_shape=bb)

    model = Model(pts)

    # MLE fitting

    jl = JointLikelihood(model, DataList(spectrum_generator))

    result = jl.fit()

    K_variates = jl.results.get_variates("mysource.spectrum.main.Blackbody.K")

    kT_variates = jl.results.get_variates(
        "mysource.spectrum.main.Blackbody.kT")

    assert np.all(
        np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT],
                   atol=1))
Esempio n. 11
0
def get_joint_likelihood_object_complex_likelihood():

    minus_log_L = Complex()

    # Instance a plugin (in this case a special one for illustrative purposes)
    plugin = CustomLikelihoodLike("custom")
    # Set the log likelihood function explicitly. This is not needed for any other
    # plugin
    plugin.set_minus_log_likelihood(minus_log_L)

    # Make the data list (in this case just one dataset)
    data = DataList(plugin)

    src = PointSource("test", ra=0.0, dec=0.0, spectral_shape=minus_log_L)
    model = Model(src)

    jl = JointLikelihoodWrap(model, data, verbose=False)

    return jl, model
def test_xspec_saving():

    s = XS_powerlaw() + XS_bbody()

    ps = PointSource("test", 0, 0, spectral_shape=s)

    model = Model(ps)

    cloned_model = clone_model(model)

    filename = "_test_xspec_model.yml"

    model.save(filename)

    reloaded_model = load_model(filename)

    p = Path(filename)

    p.unlink()
Esempio n. 13
0
def get_bayesian_analysis_object_complex_likelihood():
    minus_log_L = Complex()

    minus_log_L.mu.set_uninformative_prior(Log_uniform_prior)

    # Instance a plugin (in this case a special one for illustrative purposes)
    plugin = CustomLikelihoodLike("custom")
    # Set the log likelihood function explicitly. This is not needed for any other
    # plugin
    plugin.set_minus_log_likelihood(minus_log_L)

    # Make the data list (in this case just one dataset)
    data = DataList(plugin)

    src = PointSource("test", ra=0.0, dec=0.0, spectral_shape=minus_log_L)
    model = Model(src)

    bayes = BayesianAnalysisWrap(model, data, verbose=False)

    return bayes, model
def test_dispersionspectrumlike_fit():

    response = OGIPResponse(
        get_path_of_data_file("datasets/ogip_powerlaw.rsp"))

    sim_K = 1e-1
    sim_kT = 20.0

    # get a blackbody source function
    source_function = Blackbody(K=sim_K, kT=sim_kT)

    # power law background function
    background_function = Powerlaw(K=1, index=-1.5, piv=100.0)

    spectrum_generator = DispersionSpectrumLike.from_function(
        "test",
        source_function=source_function,
        response=response,
        background_function=background_function,
    )

    bb = Blackbody()

    pts = PointSource("mysource", 0, 0, spectral_shape=bb)

    model = Model(pts)

    # MLE fitting

    jl = JointLikelihood(model, DataList(spectrum_generator))

    result = jl.fit()

    K_variates = jl.results.get_variates("mysource.spectrum.main.Blackbody.K")

    kT_variates = jl.results.get_variates(
        "mysource.spectrum.main.Blackbody.kT")

    assert np.all(
        np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT],
                   atol=1))
Esempio n. 15
0
def xy_model_and_datalist():

    y = np.array(poiss_sig)

    xy = XYLike("test", x, y, poisson_data=True)

    fitfun = Line() + Gaussian()

    fitfun.b_1.bounds = (-10, 10.0)
    fitfun.a_1.bounds = (-100, 100.0)
    fitfun.F_2 = 60.0
    fitfun.F_2.bounds = (1e-3, 200.0)
    fitfun.mu_2 = 5.0
    fitfun.mu_2.bounds = (0.0, 100.0)
    fitfun.sigma_2.bounds = (1e-3, 10.0)

    model = Model(PointSource("fake", 0.0, 0.0, fitfun))

    data = DataList(xy)

    return model, data
Esempio n. 16
0
def test_all():
    response = OGIPResponse(
        get_path_of_data_file("datasets/ogip_powerlaw.rsp"))

    np.random.seed(1234)

    # rescale the functions for the response
    source_function = Blackbody(K=1e-7, kT=500.0)
    background_function = Powerlaw(K=1, index=-1.5, piv=1.0e3)
    spectrum_generator = DispersionSpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        response=response)

    source_function.K.prior = Log_normal(mu=np.log(1e-7), sigma=1)
    source_function.kT.prior = Log_normal(mu=np.log(300), sigma=2)

    ps = PointSource("demo", 0, 0, spectral_shape=source_function)

    model = Model(ps)

    ba = BayesianAnalysis(model, DataList(spectrum_generator))

    ba.set_sampler()

    ba.sample(quiet=True)

    ppc = compute_ppc(ba,
                      ba.results,
                      n_sims=500,
                      file_name="my_ppc.h5",
                      overwrite=True,
                      return_ppc=True)

    ppc.fake.plot(bkg_subtract=True)

    ppc.fake.plot(bkg_subtract=False)
Esempio n. 17
0
    def from_function(cls, name, function, x, yerr, **kwargs):
        """
        Generate an XYLike plugin from an astromodels function instance
        
        :param name: name of plugin
        :param function: astromodels function instance
        :param x: where to simulate
        :param yerr: y errors or None for Poisson data
        :param kwargs: kwargs from xylike constructor
        :return: XYLike plugin
        """

        y = function(x)

        xyl_gen = XYLike("generator", x, y, yerr, **kwargs)

        pts = PointSource("fake", 0.0, 0.0, function)

        model = Model(pts)

        xyl_gen.set_model(model)

        return xyl_gen.get_simulated_dataset(name)
Esempio n. 18
0
    if args.dec_roi is None:

        args.dec_roi = args.dec

    roi = HealpixConeROI(args.data_radius,
                         args.model_radius,
                         ra=args.ra_roi,
                         dec=args.dec_roi)

    # Build point source model

    # Get spectrum function
    spectrum = astromodels.get_function_class(args.spectrum)()

    src = PointSource("pts", ra=args.ra, dec=args.dec, spectral_shape=spectrum)

    model = Model(src)

    # Parse parameters
    tokens = args.params.split(",")

    for token in tokens:

        key, value = token.split("=")

        # The path of this parameter in the final model will be:
        path = "pts.spectrum.main.%s.%s" % (args.spectrum, key.strip())

        assert path in model, "Could not find parameter %s in function %s" % (
            key, args.spectrum)
Esempio n. 19
0
def test_spectrum_constructor():

    ebounds = ChannelSet.from_list_of_edges(np.array([1, 2, 3, 4, 5, 6]))

    pl = Powerlaw()

    ps = PointSource("fake", 0, 0, spectral_shape=pl)

    model = Model(ps)

    obs_spectrum = BinnedSpectrum(counts=np.ones(len(ebounds)),
                                  exposure=1,
                                  ebounds=ebounds,
                                  is_poisson=True)
    bkg_spectrum = BinnedSpectrum(counts=np.ones(len(ebounds)),
                                  exposure=1,
                                  ebounds=ebounds,
                                  is_poisson=True)

    assert np.all(obs_spectrum.counts == obs_spectrum.rates)
    assert np.all(bkg_spectrum.counts == bkg_spectrum.rates)

    specLike = SpectrumLike("fake",
                            observation=obs_spectrum,
                            background=bkg_spectrum)
    specLike.set_model(model)
    specLike.get_model()

    specLike.get_simulated_dataset()

    specLike.rebin_on_background(min_number_of_counts=1e-1)
    specLike.remove_rebinning()

    specLike.significance
    specLike.significance_per_channel

    obs_spectrum = BinnedSpectrum(
        counts=np.ones(len(ebounds)),
        count_errors=np.ones(len(ebounds)),
        exposure=1,
        ebounds=ebounds,
        is_poisson=False,
    )
    bkg_spectrum = BinnedSpectrum(counts=np.ones(len(ebounds)),
                                  exposure=1,
                                  ebounds=ebounds,
                                  is_poisson=True)

    with pytest.raises(NotImplementedError):

        specLike = SpectrumLike("fake",
                                observation=obs_spectrum,
                                background=bkg_spectrum)

    # gaussian source only

    obs_spectrum = BinnedSpectrum(
        counts=np.ones(len(ebounds)),
        count_errors=np.ones(len(ebounds)),
        exposure=1,
        ebounds=ebounds,
    )

    specLike = SpectrumLike("fake", observation=obs_spectrum, background=None)
    specLike.set_model(model)
    specLike.get_model()

    specLike.get_simulated_dataset()

    with pytest.raises(AssertionError):
        specLike.rebin_on_background(min_number_of_counts=1e-1)
Esempio n. 20
0
def test_ubinned_poisson_full(event_observation_contiguous, event_observation_split):

    s = Line()

    ps = PointSource("s", 0, 0, spectral_shape=s)

    s.a.bounds = (0, None)
    s.a.value = .1
    s.b.value = .1

    s.a.prior = Log_normal(mu=np.log(10), sigma=1)
    s.b.prior = Gaussian(mu=0, sigma=1)

    m = Model(ps)

    ######
    ######
    ######

    
    ub1 = UnbinnedPoissonLike("test", observation=event_observation_contiguous)

    jl = JointLikelihood(m, DataList(ub1))

    jl.fit(quiet=True)

    np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5)

    ba = BayesianAnalysis(m, DataList(ub1))

    ba.set_sampler("emcee")

    ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500)

    ba.sample(quiet=True)

    ba.restore_median_fit()

    np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5)

    ######
    ######
    ######

    ub2 = UnbinnedPoissonLike("test", observation=event_observation_split)

    jl = JointLikelihood(m, DataList(ub2))

    jl.fit(quiet=True)

    np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5)

    ba = BayesianAnalysis(m, DataList(ub2))

    ba.set_sampler("emcee")

    ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500)

    ba.sample(quiet=True)

    ba.restore_median_fit()

    np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5)
Esempio n. 21
0
def polyfit(x: Iterable[float], y: Iterable[float], grade: int, exposure: Iterable[float], bayes: bool = False) -> Tuple[Polynomial, float]:
    """ 
    function to fit a polynomial to data. 
    not a member to allow parallel computation

    :param x: the x coord of the data
    :param y: teh y coord of the data
    :param grade: the polynomical order or grade
    :param expousure: the exposure of the interval
    :param bayes: to do a bayesian fit or not


    """

    # Check that we have enough counts to perform the fit, otherwise
    # return a "zero polynomial"

    log.debug(f"starting polyfit with grade {grade} ")

    if threeML_config.time_series.default_fit_method is not None:

        bayes = threeML_config.time_series.default_fit_method
        log.debug("using a default poly fit method")

    nan_mask = np.isnan(y)

    y = y[~nan_mask]
    x = x[~nan_mask]
    exposure = exposure[~nan_mask]

    non_zero_mask = y > 0
    n_non_zero = non_zero_mask.sum()
    if n_non_zero == 0:

        log.debug("no counts, return 0")

        # No data, nothing to do!
        return Polynomial([0.0]*(grade+1)), 0.0

    # create 3ML plugins and fit them with 3ML!
    # should eventuallly allow better config

    # seelct the model based on the grade

    shape = _grade_model_lookup[grade]()

    ps = PointSource("_dummy", 0, 0, spectral_shape=shape)

    model = Model(ps)

    avg = np.mean(y/exposure)

    log.debug(f"starting polyfit with avg norm {avg}")

    with silence_console_log():

        xy = XYLike("series", x=x, y=y, exposure=exposure,
                    poisson_data=True, quiet=True)

        if not bayes:

            # make sure the model is positive

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)

                    v.value = avg

                else:

                    v.value = 0.0

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            jl: JointLikelihood = JointLikelihood(model, DataList(xy))

            jl.set_minimizer("minuit")

            # if the fit falis, retry and then just accept

            try:

                jl.fit(quiet=True)

            except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                log.debug("1st fit failed")

                try:

                    jl.fit(quiet=True)

                except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                    log.debug("all MLE fits failed")

                    pass

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            try:
                final_polynomial.set_covariace_matrix(
                    jl.results.covariance_matrix)

            except:

                log.exception(f"Fit failed in channel")
                raise FitFailed()

            min_log_likelihood = xy.get_log_like()

        else:

            # set smart priors

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)
                    v.prior = Log_normal(
                        mu=np.log(avg), sigma=np.max([np.log(avg/2), 1]))
                    v.value = 1

                else:

                    v.prior = Gaussian(mu=0, sigma=2)
                    v.value = 1e-2

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy))

            ba.set_sampler("emcee")

            ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20)

            ba.sample(quiet=True)

            ba.restore_median_fit()

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(
                ba.results.estimate_covariance_matrix())

            min_log_likelihood = xy.get_log_like()

    log.debug(f"-min loglike: {-min_log_likelihood}")

    return final_polynomial, -min_log_likelihood
Esempio n. 22
0
def unbinned_polyfit(events: Iterable[float], grade: int, t_start: float, t_stop: float, exposure: float, bayes: bool) -> Tuple[Polynomial, float]:
    """
    function to fit a polynomial to unbinned event data. 
    not a member to allow parallel computation

    :param events: the events to fit
    :param grade: the polynomical order or grade
    :param t_start: the start time to fit over
    :param t_stop: the end time to fit over
    :param expousure: the exposure of the interval
    :param bayes: to do a bayesian fit or not

    """

    log.debug(f"starting unbinned_polyfit with grade {grade}")
    log.debug(f"have {len(events)} events with {exposure} exposure")

    # create 3ML plugins and fit them with 3ML!
    # should eventuallly allow better config

    # select the model based on the grade

    if threeML_config.time_series.default_fit_method is not None:

        bayes = threeML_config.time_series.default_fit_method
        log.debug("using a default poly fit method")

    if len(events) == 0:

        log.debug("no events! returning zero")

        return Polynomial([0] * (grade + 1)), 0

    shape = _grade_model_lookup[grade]()

    with silence_console_log():

        ps = PointSource("dummy", 0, 0, spectral_shape=shape)

        model = Model(ps)

        observation = EventObservation(events, exposure, t_start, t_stop)

        xy = UnbinnedPoissonLike("series", observation=observation)

        if not bayes:

            # make sure the model is positive

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)

                    v.value = 10

                else:

                    v.value = 0.0

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            jl: JointLikelihood = JointLikelihood(model, DataList(xy))

            grid_minimizer = GlobalMinimization("grid")

            local_minimizer = LocalMinimization("minuit")

            my_grid = {
                model.dummy.spectrum.main.shape.a: np.logspace(0, 3, 10)}

            grid_minimizer.setup(
                second_minimization=local_minimizer, grid=my_grid)

            jl.set_minimizer(grid_minimizer)

            # if the fit falis, retry and then just accept

            try:

                jl.fit(quiet=True)

            except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                try:

                    jl.fit(quiet=True)

                except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                    log.debug("all MLE fits failed, returning zero")

                    return Polynomial([0]*(grade + 1)), 0

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(jl.results.covariance_matrix)

            min_log_likelihood = xy.get_log_like()

        else:

            # set smart priors

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)
                    v.prior = Log_normal(mu=np.log(5), sigma=np.log(5))
                    v.value = 1

                else:

                    v.prior = Gaussian(mu=0, sigma=.5)
                    v.value = 0.1

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy))

            ba.set_sampler("emcee")

            ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20)

            ba.sample(quiet=True)

            ba.restore_median_fit()

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(
                ba.results.estimate_covariance_matrix())

            min_log_likelihood = xy.get_log_like()

    log.debug(f"-min loglike: {-min_log_likelihood}")

    return final_polynomial, -min_log_likelihood
Esempio n. 23
0
def test_model_residual_maps(geminga_maptree, geminga_response, geminga_roi):

    #data_radius = 5.0
    #model_radius = 7.0
    output = dirname(geminga_maptree)

    ra_src, dec_src = 101.7, 16.0
    maptree, response, roi = geminga_maptree, geminga_response, geminga_roi

    hawc = HAL("HAWC", maptree, response, roi)

    # Use from bin 1 to bin 9
    hawc.set_active_measurements(1, 9)

    # Display information about the data loaded and the ROI
    hawc.display()
    '''
    Define model: Two sources, 1 point, 1 extended

    Same declination, but offset in RA

    Different spectral index, but both power laws

    '''
    pt_shift = 3.0
    ext_shift = 2.0

    # First source
    spectrum1 = Powerlaw()
    source1 = PointSource("point",
                          ra=ra_src + pt_shift,
                          dec=dec_src,
                          spectral_shape=spectrum1)

    spectrum1.K = 1e-12 / (u.TeV * u.cm**2 * u.s)
    spectrum1.piv = 1 * u.TeV
    spectrum1.index = -2.3

    spectrum1.piv.fix = True
    spectrum1.K.fix = True
    spectrum1.index.fix = True

    # Second source
    shape = Gaussian_on_sphere(lon0=ra_src - ext_shift,
                               lat0=dec_src,
                               sigma=0.3)
    spectrum2 = Powerlaw()
    source2 = ExtendedSource("extended",
                             spatial_shape=shape,
                             spectral_shape=spectrum2)

    spectrum2.K = 1e-12 / (u.TeV * u.cm**2 * u.s)
    spectrum2.piv = 1 * u.TeV
    spectrum2.index = -2.0

    shape.lon0.fix = True
    shape.lat0.fix = True
    shape.sigma.fix = True

    spectrum2.piv.fix = True
    spectrum2.K.fix = True
    spectrum2.index.fix = True

    # Define model with both sources
    model = Model(source1, source2)

    # Define the data we are using
    data = DataList(hawc)

    # Define the JointLikelihood object (glue the data to the model)
    jl = JointLikelihood(model, data, verbose=False)

    # This has the effect of loading the model cache
    fig = hawc.display_spectrum()

    # the test file names
    model_file_name = "{0}/test_model.hdf5".format(output)
    residual_file_name = "{0}/test_residual.hdf5".format(output)

    # Write the map trees for testing
    model_map_tree = hawc.write_model_map(model_file_name,
                                          poisson_fluctuate=True,
                                          test_return_map=True)
    residual_map_tree = hawc.write_residual_map(residual_file_name,
                                                test_return_map=True)

    # Read the maps back in
    hawc_model = map_tree_factory(model_file_name, roi)
    hawc_residual = map_tree_factory(residual_file_name, roi)

    check_map_trees(hawc_model, model_map_tree)
    check_map_trees(hawc_residual, residual_map_tree)
def test_all_statistics():

    energies = np.logspace(1, 3, 51)

    low_edge = energies[:-1]
    high_edge = energies[1:]

    # get a blackbody source function
    source_function = Blackbody(K=9e-2, kT=20)

    # power law background function
    background_function = Powerlaw(K=1, index=-1.5, piv=100.0)

    pts = PointSource("mysource", 0, 0, spectral_shape=source_function)

    model = Model(pts)

    # test Poisson no bkg

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )
    spectrum_generator.set_model(model)

    spectrum_generator.get_log_like()

    # test Poisson w/ Poisson bkg

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    spectrum_generator.set_model(model)

    spectrum_generator.get_log_like()

    spectrum_generator._background_counts = -np.ones_like(
        spectrum_generator._background_counts)

    with pytest.raises(NegativeBackground):
        spectrum_generator._probe_noise_models()

    # test Poisson w/ ideal bkg

    spectrum_generator.background_noise_model = "ideal"

    spectrum_generator.get_log_like()

    # test Poisson w/ gauss bkg

    # test Poisson w/ Poisson bkg

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        background_errors=0.1 * background_function(low_edge),
        energy_min=low_edge,
        energy_max=high_edge,
    )

    spectrum_generator.set_model(model)

    spectrum_generator.get_log_like()

    # test Gaussian w/ no bkg

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        source_errors=0.5 * source_function(low_edge),
        energy_min=low_edge,
        energy_max=high_edge,
    )

    spectrum_generator.set_model(model)

    spectrum_generator.get_log_like()
def test_spectrum_like_with_background_model():
    energies = np.logspace(1, 3, 51)

    low_edge = energies[:-1]
    high_edge = energies[1:]

    sim_K = 1e-1
    sim_kT = 20.0

    # get a blackbody source function
    source_function = Blackbody(K=sim_K, kT=sim_kT)

    # power law background function
    background_function = Powerlaw(K=5, index=-1.5, piv=100.0)

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    background_plugin = SpectrumLike.from_background("background",
                                                     spectrum_generator)

    bb = Blackbody()

    pl = Powerlaw()
    pl.piv = 100

    bkg_ps = PointSource("bkg", 0, 0, spectral_shape=pl)

    bkg_model = Model(bkg_ps)

    jl_bkg = JointLikelihood(bkg_model, DataList(background_plugin))

    _ = jl_bkg.fit()

    plugin_bkg_model = SpectrumLike("full",
                                    spectrum_generator.observed_spectrum,
                                    background=background_plugin)

    pts = PointSource("mysource", 0, 0, spectral_shape=bb)

    model = Model(pts)

    # MLE fitting

    jl = JointLikelihood(model, DataList(plugin_bkg_model))

    result = jl.fit()

    K_variates = jl.results.get_variates("mysource.spectrum.main.Blackbody.K")

    kT_variates = jl.results.get_variates(
        "mysource.spectrum.main.Blackbody.kT")

    assert np.all(
        np.isclose([K_variates.average, kT_variates.average], [sim_K, sim_kT],
                   rtol=0.5))

    ## test with ogiplike
    with within_directory(__example_dir):
        ogip = OGIPLike("test_ogip",
                        observation="test.pha{1}",
                        background=background_plugin)
def test_assigning_source_name():

    energies = np.logspace(1, 3, 51)

    low_edge = energies[:-1]
    high_edge = energies[1:]

    sim_K = 1e-1
    sim_kT = 20.0

    # get a blackbody source function
    source_function = Blackbody(K=sim_K, kT=sim_kT)

    # power law background function
    background_function = Powerlaw(K=1, index=-1.5, piv=100.0)

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    # good name setting

    bb = Blackbody()

    pts = PointSource("good_name", 0, 0, spectral_shape=bb)

    model = Model(pts)

    # before setting model

    spectrum_generator.assign_to_source("good_name")

    jl = JointLikelihood(model, DataList(spectrum_generator))

    _ = jl.fit()

    # after setting model

    pts = PointSource("good_name", 0, 0, spectral_shape=bb)

    model = Model(pts)

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    jl = JointLikelihood(model, DataList(spectrum_generator))

    spectrum_generator.assign_to_source("good_name")

    # after with bad name

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    jl = JointLikelihood(model, DataList(spectrum_generator))

    with pytest.raises(RuntimeError):

        spectrum_generator.assign_to_source("bad_name")

        # before with bad name

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    spectrum_generator.assign_to_source("bad_name")

    with pytest.raises(RuntimeError):

        jl = JointLikelihood(model, DataList(spectrum_generator))

    # multisource model

    spectrum_generator = SpectrumLike.from_function(
        "fake",
        source_function=source_function,
        background_function=background_function,
        energy_min=low_edge,
        energy_max=high_edge,
    )

    ps1 = PointSource("ps1", 0, 0, spectral_shape=Blackbody())
    ps2 = PointSource("ps2", 0, 0, spectral_shape=Powerlaw())

    model = Model(ps1, ps2)

    model.ps2.spectrum.main.Powerlaw.K.fix = True
    model.ps2.spectrum.main.Powerlaw.index.fix = True

    spectrum_generator.assign_to_source("ps1")

    dl = DataList(spectrum_generator)

    jl = JointLikelihood(model, dl)

    _ = jl.fit()
Esempio n. 27
0
def test_healpixRoi(geminga_maptree, geminga_response):

    #test to make sure writing a model with HealpixMapROI works fine
    ra, dec = 101.7, 16.
    data_radius = 9.
    model_radius = 24.

    m = np.zeros(hp.nside2npix(NSIDE))
    vec = Sky2Vec(ra, dec)
    m[hp.query_disc(NSIDE,
                    vec, (data_radius * u.degree).to(u.radian).value,
                    inclusive=False)] = 1

    #hp.fitsfunc.write_map("roitemp.fits" , m, nest=False, coord="C", partial=False, overwrite=True )

    map_roi = HealpixMapROI(data_radius=data_radius,
                            ra=ra,
                            dec=dec,
                            model_radius=model_radius,
                            roimap=m)
    #fits_roi = HealpixMapROI(data_radius=data_radius, ra=ra, dec=dec, model_radius=model_radius, roifile="roitemp.fits")
    hawc = HAL("HAWC", geminga_maptree, geminga_response, map_roi)
    hawc.set_active_measurements(1, 9)
    '''
  Define model: Two sources, 1 point, 1 extended

  Same declination, but offset in RA

  Different spectral idnex, but both power laws
  '''
    pt_shift = 3.0
    ext_shift = 2.0

    # First soource
    spectrum1 = Powerlaw()
    source1 = PointSource("point",
                          ra=ra + pt_shift,
                          dec=dec,
                          spectral_shape=spectrum1)

    spectrum1.K = 1e-12 / (u.TeV * u.cm**2 * u.s)
    spectrum1.piv = 1 * u.TeV
    spectrum1.index = -2.3

    spectrum1.piv.fix = True
    spectrum1.K.fix = True
    spectrum1.index.fix = True

    # Second source
    shape = Gaussian_on_sphere(lon0=ra - ext_shift, lat0=dec, sigma=0.3)
    spectrum2 = Powerlaw()
    source2 = ExtendedSource("extended",
                             spatial_shape=shape,
                             spectral_shape=spectrum2)

    spectrum2.K = 1e-12 / (u.TeV * u.cm**2 * u.s)
    spectrum2.piv = 1 * u.TeV
    spectrum2.index = -2.0

    spectrum2.piv.fix = True
    spectrum2.K.fix = True
    spectrum2.index.fix = True

    shape.lon0.fix = True
    shape.lat0.fix = True
    shape.sigma.fix = True

    model = Model(source1, source2)

    hawc.set_model(model)

    # Write the model map
    model_map_tree = hawc.write_model_map("test.hd5", test_return_map=True)

    # Read the model back
    hawc_model = map_tree_factory('test.hd5', map_roi)

    # Check written model and read model are the same
    check_map_trees(hawc_model, model_map_tree)

    os.remove("test.hd5")