Example #1
0
def completed_bn090217206_bayesian_analysis_multicomp(
    fitted_joint_likelihood_bn090217206_nai_multicomp, ):

    jl, _, _ = fitted_joint_likelihood_bn090217206_nai_multicomp

    # This is necessary because other tests/functions might have messed up with the
    # model stored within
    jl.restore_best_fit()

    model = jl.likelihood_model
    data_list = jl.data_list
    spectrum = jl.likelihood_model.bn090217206.spectrum.main.shape

    spectrum.index_1.prior = Uniform_prior(lower_bound=-5.0, upper_bound=5.0)
    spectrum.K_1.prior = Log_uniform_prior(lower_bound=1.0, upper_bound=10)
    spectrum.K_2.prior = Log_uniform_prior(lower_bound=1e-20, upper_bound=10)
    spectrum.kT_2.prior = Log_uniform_prior(lower_bound=1e0, upper_bound=1e3)

    bayes = BayesianAnalysis(model, data_list)

    bayes.set_sampler("emcee")

    bayes.sampler.setup(n_walkers=50,
                        n_burn_in=50,
                        n_iterations=100,
                        seed=1234)

    samples = bayes.sample()

    return bayes, bayes.samples
Example #2
0
def xy_completed_bayesian_analysis(xy_fitted_joint_likelihood):

    jl, _, _ = xy_fitted_joint_likelihood

    jl.restore_best_fit()

    model = jl.likelihood_model
    data = jl.data_list

    model.fake.spectrum.main.composite.a_1.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.b_1.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.F_2.set_uninformative_prior(
        Log_uniform_prior)
    model.fake.spectrum.main.composite.mu_2.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.sigma_2.set_uninformative_prior(
        Log_uniform_prior)

    bs = BayesianAnalysis(model, data)

    bs.set_sampler("emcee")

    bs.sampler.setup(n_burn_in=100, n_iterations=100, n_walkers=20)

    samples = bs.sample()

    return bs, samples
Example #3
0
def xy_completed_bayesian_analysis(xy_fitted_joint_likelihood):

    jl, _, _ = xy_fitted_joint_likelihood

    jl.restore_best_fit()

    model = jl.likelihood_model
    data = jl.data_list

    model.fake.spectrum.main.composite.a_1.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.b_1.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.F_2.set_uninformative_prior(
        Log_uniform_prior)
    model.fake.spectrum.main.composite.mu_2.set_uninformative_prior(
        Uniform_prior)
    model.fake.spectrum.main.composite.sigma_2.set_uninformative_prior(
        Log_uniform_prior)

    bs = BayesianAnalysis(model, data)

    samples = bs.sample(20, 100, 1000)

    return bs, samples
Example #4
0
def completed_bn090217206_bayesian_analysis(fitted_joint_likelihood_bn090217206_nai):

    jl, _, _ = fitted_joint_likelihood_bn090217206_nai

    jl.restore_best_fit()

    model = jl.likelihood_model
    data_list = jl.data_list
    powerlaw = jl.likelihood_model.bn090217206.spectrum.main.Powerlaw

    powerlaw.index.prior = Uniform_prior(lower_bound=-5.0, upper_bound=5.0)
    powerlaw.K.prior = Log_uniform_prior(lower_bound=1.0, upper_bound=10)

    bayes = BayesianAnalysis(model, data_list)

    samples = bayes.sample(n_walkers=50, burn_in=50, n_samples=100, seed=1234)

    return bayes, samples
Example #5
0
def completed_bn090217206_bayesian_analysis(
        fitted_joint_likelihood_bn090217206_nai):

    jl, _, _ = fitted_joint_likelihood_bn090217206_nai

    jl.restore_best_fit()

    model = jl.likelihood_model
    data_list = jl.data_list
    powerlaw = jl.likelihood_model.bn090217206.spectrum.main.Powerlaw

    powerlaw.index.prior = Uniform_prior(lower_bound=-5.0, upper_bound=5.0)
    powerlaw.K.prior = Log_uniform_prior(lower_bound=1.0, upper_bound=10)

    bayes = BayesianAnalysis(model, data_list)

    samples = bayes.sample(n_walkers=50, burn_in=50, n_samples=100, seed=1234)

    return bayes, samples
Example #6
0
def xy_completed_bayesian_analysis(xy_fitted_joint_likelihood):

    jl, _, _ = xy_fitted_joint_likelihood

    jl.restore_best_fit()

    model = jl.likelihood_model
    data = jl.data_list

    model.fake.spectrum.main.composite.a_1.set_uninformative_prior(Uniform_prior)
    model.fake.spectrum.main.composite.b_1.set_uninformative_prior(Uniform_prior)
    model.fake.spectrum.main.composite.F_2.set_uninformative_prior(Log_uniform_prior)
    model.fake.spectrum.main.composite.mu_2.set_uninformative_prior(Uniform_prior)
    model.fake.spectrum.main.composite.sigma_2.set_uninformative_prior(Log_uniform_prior)

    bs = BayesianAnalysis(model, data)

    samples = bs.sample(20, 100, 1000)

    return bs, samples
Example #7
0
def bayes_fitter(fitted_joint_likelihood_bn090217206_nai):
    jl, fit_results, like_frame = fitted_joint_likelihood_bn090217206_nai
    datalist = jl.data_list
    model = jl.likelihood_model

    jl.restore_best_fit()

    set_priors(model)

    bayes = BayesianAnalysis(model, datalist)

    return bayes
Example #8
0
def completed_bn090217206_bayesian_analysis_multicomp(fitted_joint_likelihood_bn090217206_nai_multicomp):

    jl, _, _ = fitted_joint_likelihood_bn090217206_nai_multicomp

    # This is necessary because other tests/functions might have messed up with the
    # model stored within
    jl.restore_best_fit()

    model = jl.likelihood_model
    data_list = jl.data_list
    spectrum = jl.likelihood_model.bn090217206.spectrum.main.shape

    spectrum.index_1.prior = Uniform_prior(lower_bound=-5.0, upper_bound=5.0)
    spectrum.K_1.prior = Log_uniform_prior(lower_bound=1.0, upper_bound=10)
    spectrum.K_2.prior = Log_uniform_prior(lower_bound=1E-20, upper_bound=10)
    spectrum.kT_2.prior = Log_uniform_prior(lower_bound=1E0, upper_bound=1E3)

    bayes = BayesianAnalysis(model, data_list)

    samples = bayes.sample(n_walkers=50, burn_in=50, n_samples=100, seed=1234)

    return bayes, samples
Example #9
0
def unbinned_polyfit(events: Iterable[float], grade: int, t_start: float, t_stop: float, exposure: float, bayes: bool) -> Tuple[Polynomial, float]:
    """
    function to fit a polynomial to unbinned event data. 
    not a member to allow parallel computation

    :param events: the events to fit
    :param grade: the polynomical order or grade
    :param t_start: the start time to fit over
    :param t_stop: the end time to fit over
    :param expousure: the exposure of the interval
    :param bayes: to do a bayesian fit or not

    """

    log.debug(f"starting unbinned_polyfit with grade {grade}")
    log.debug(f"have {len(events)} events with {exposure} exposure")

    # create 3ML plugins and fit them with 3ML!
    # should eventuallly allow better config

    # select the model based on the grade

    if threeML_config.time_series.default_fit_method is not None:

        bayes = threeML_config.time_series.default_fit_method
        log.debug("using a default poly fit method")

    if len(events) == 0:

        log.debug("no events! returning zero")

        return Polynomial([0] * (grade + 1)), 0

    shape = _grade_model_lookup[grade]()

    with silence_console_log():

        ps = PointSource("dummy", 0, 0, spectral_shape=shape)

        model = Model(ps)

        observation = EventObservation(events, exposure, t_start, t_stop)

        xy = UnbinnedPoissonLike("series", observation=observation)

        if not bayes:

            # make sure the model is positive

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)

                    v.value = 10

                else:

                    v.value = 0.0

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            jl: JointLikelihood = JointLikelihood(model, DataList(xy))

            grid_minimizer = GlobalMinimization("grid")

            local_minimizer = LocalMinimization("minuit")

            my_grid = {
                model.dummy.spectrum.main.shape.a: np.logspace(0, 3, 10)}

            grid_minimizer.setup(
                second_minimization=local_minimizer, grid=my_grid)

            jl.set_minimizer(grid_minimizer)

            # if the fit falis, retry and then just accept

            try:

                jl.fit(quiet=True)

            except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                try:

                    jl.fit(quiet=True)

                except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                    log.debug("all MLE fits failed, returning zero")

                    return Polynomial([0]*(grade + 1)), 0

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(jl.results.covariance_matrix)

            min_log_likelihood = xy.get_log_like()

        else:

            # set smart priors

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)
                    v.prior = Log_normal(mu=np.log(5), sigma=np.log(5))
                    v.value = 1

                else:

                    v.prior = Gaussian(mu=0, sigma=.5)
                    v.value = 0.1

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy))

            ba.set_sampler("emcee")

            ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20)

            ba.sample(quiet=True)

            ba.restore_median_fit()

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(
                ba.results.estimate_covariance_matrix())

            min_log_likelihood = xy.get_log_like()

    log.debug(f"-min loglike: {-min_log_likelihood}")

    return final_polynomial, -min_log_likelihood
Example #10
0
def polyfit(x: Iterable[float], y: Iterable[float], grade: int, exposure: Iterable[float], bayes: bool = False) -> Tuple[Polynomial, float]:
    """ 
    function to fit a polynomial to data. 
    not a member to allow parallel computation

    :param x: the x coord of the data
    :param y: teh y coord of the data
    :param grade: the polynomical order or grade
    :param expousure: the exposure of the interval
    :param bayes: to do a bayesian fit or not


    """

    # Check that we have enough counts to perform the fit, otherwise
    # return a "zero polynomial"

    log.debug(f"starting polyfit with grade {grade} ")

    if threeML_config.time_series.default_fit_method is not None:

        bayes = threeML_config.time_series.default_fit_method
        log.debug("using a default poly fit method")

    nan_mask = np.isnan(y)

    y = y[~nan_mask]
    x = x[~nan_mask]
    exposure = exposure[~nan_mask]

    non_zero_mask = y > 0
    n_non_zero = non_zero_mask.sum()
    if n_non_zero == 0:

        log.debug("no counts, return 0")

        # No data, nothing to do!
        return Polynomial([0.0]*(grade+1)), 0.0

    # create 3ML plugins and fit them with 3ML!
    # should eventuallly allow better config

    # seelct the model based on the grade

    shape = _grade_model_lookup[grade]()

    ps = PointSource("_dummy", 0, 0, spectral_shape=shape)

    model = Model(ps)

    avg = np.mean(y/exposure)

    log.debug(f"starting polyfit with avg norm {avg}")

    with silence_console_log():

        xy = XYLike("series", x=x, y=y, exposure=exposure,
                    poisson_data=True, quiet=True)

        if not bayes:

            # make sure the model is positive

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)

                    v.value = avg

                else:

                    v.value = 0.0

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            jl: JointLikelihood = JointLikelihood(model, DataList(xy))

            jl.set_minimizer("minuit")

            # if the fit falis, retry and then just accept

            try:

                jl.fit(quiet=True)

            except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                log.debug("1st fit failed")

                try:

                    jl.fit(quiet=True)

                except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance):

                    log.debug("all MLE fits failed")

                    pass

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            try:
                final_polynomial.set_covariace_matrix(
                    jl.results.covariance_matrix)

            except:

                log.exception(f"Fit failed in channel")
                raise FitFailed()

            min_log_likelihood = xy.get_log_like()

        else:

            # set smart priors

            for i, (k, v) in enumerate(model.free_parameters.items()):

                if i == 0:

                    v.bounds = (0, None)
                    v.prior = Log_normal(
                        mu=np.log(avg), sigma=np.max([np.log(avg/2), 1]))
                    v.value = 1

                else:

                    v.prior = Gaussian(mu=0, sigma=2)
                    v.value = 1e-2

            # we actually use a line here
            # because a constant is returns a
            # single number

            if grade == 0:

                shape.b = 0
                shape.b.fix = True

            ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy))

            ba.set_sampler("emcee")

            ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20)

            ba.sample(quiet=True)

            ba.restore_median_fit()

            coeff = [v.value for _, v in model.free_parameters.items()]

            log.debug(f"got coeff: {coeff}")

            final_polynomial = Polynomial(coeff)

            final_polynomial.set_covariace_matrix(
                ba.results.estimate_covariance_matrix())

            min_log_likelihood = xy.get_log_like()

    log.debug(f"-min loglike: {-min_log_likelihood}")

    return final_polynomial, -min_log_likelihood
Example #11
0
def test_ubinned_poisson_full(event_observation_contiguous, event_observation_split):

    s = Line()

    ps = PointSource("s", 0, 0, spectral_shape=s)

    s.a.bounds = (0, None)
    s.a.value = .1
    s.b.value = .1

    s.a.prior = Log_normal(mu=np.log(10), sigma=1)
    s.b.prior = Gaussian(mu=0, sigma=1)

    m = Model(ps)

    ######
    ######
    ######

    
    ub1 = UnbinnedPoissonLike("test", observation=event_observation_contiguous)

    jl = JointLikelihood(m, DataList(ub1))

    jl.fit(quiet=True)

    np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5)

    ba = BayesianAnalysis(m, DataList(ub1))

    ba.set_sampler("emcee")

    ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500)

    ba.sample(quiet=True)

    ba.restore_median_fit()

    np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5)

    ######
    ######
    ######

    ub2 = UnbinnedPoissonLike("test", observation=event_observation_split)

    jl = JointLikelihood(m, DataList(ub2))

    jl.fit(quiet=True)

    np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5)

    ba = BayesianAnalysis(m, DataList(ub2))

    ba.set_sampler("emcee")

    ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500)

    ba.sample(quiet=True)

    ba.restore_median_fit()

    np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5)