def get_simulated_data(self, id: int): # Generate a new data set for each plugin contained in the data list new_datas = [] for dataset in list( self._joint_likelihood_instance0.data_list.values()): # Make sure that the active likelihood model is the null hypothesis # This is needed if the user has used the same DataList instance for both # JointLikelihood instances dataset.set_model( self._joint_likelihood_instance0.likelihood_model) new_data = dataset.get_simulated_dataset("%s_sim" % dataset.name) new_datas.append(new_data) new_data_list = DataList(*new_datas) if self._save_pha: self._data_container.append(new_data_list) return new_data_list
def data_list_bn090217206_nai6(): NaI6 = get_dataset() data_list = DataList(NaI6) return data_list
def fit(self, function, minimizer='minuit', verbose=False): """ Fit the data with the provided function (an astromodels function) :param function: astromodels function :param minimizer: the minimizer to use :param verbose: print every step of the fit procedure :return: best fit results """ # This is a wrapper to give an easier way to fit simple data without having to go through the definition # of sources pts = PointSource("source", 0.0, 0.0, function) model = Model(pts) self.set_model(model) self._joint_like_obj = JointLikelihood(model, DataList(self), verbose=verbose) self._joint_like_obj.set_minimizer(minimizer) return self._joint_like_obj.fit()
def photometry_data_model(grond_plugin): spec = Powerlaw() # * XS_zdust() * XS_zdust() datalist = DataList(grond_plugin) model = Model(PointSource("grb", 0, 0, spectral_shape=spec)) yield model, datalist
def data_list_bn090217206_nai6_nai9_bgo1(): p_list = [] p_list.append(get_dataset_det("n6")) p_list.append(get_dataset_det("n9")) p_list.append(get_dataset_det("b1")) data_list = DataList(*p_list) return data_list
def get_model_and_datalist(): grond = get_plugin() spec = Powerlaw() # * XS_zdust() * XS_zdust() datalist = DataList(grond) model = Model(PointSource("grb", 0, 0, spectral_shape=spec)) return model, datalist
def get_simulated_data(self, id): # Make sure we start from the best fit model self._jl_instance.restore_best_fit() # Generate a new data set for each plugin contained in the data list new_datas = [] for dataset in list(self._jl_instance.data_list.values()): new_data = dataset.get_simulated_dataset("%s_sim" % dataset.name) new_datas.append(new_data) new_data_list = DataList(*new_datas) return new_data_list
def get_joint_likelihood_object_complex_likelihood(): minus_log_L = Complex() # Instance a plugin (in this case a special one for illustrative purposes) plugin = CustomLikelihoodLike("custom") # Set the log likelihood function explicitly. This is not needed for any other # plugin plugin.set_minus_log_likelihood(minus_log_L) # Make the data list (in this case just one dataset) data = DataList(plugin) src = PointSource("test", ra=0.0, dec=0.0, spectral_shape=minus_log_L) model = Model(src) jl = JointLikelihoodWrap(model, data, verbose=False) return jl, model
def get_bayesian_analysis_object_complex_likelihood(): minus_log_L = Complex() minus_log_L.mu.set_uninformative_prior(Log_uniform_prior) # Instance a plugin (in this case a special one for illustrative purposes) plugin = CustomLikelihoodLike("custom") # Set the log likelihood function explicitly. This is not needed for any other # plugin plugin.set_minus_log_likelihood(minus_log_L) # Make the data list (in this case just one dataset) data = DataList(plugin) src = PointSource("test", ra=0.0, dec=0.0, spectral_shape=minus_log_L) model = Model(src) bayes = BayesianAnalysisWrap(model, data, verbose=False) return bayes, model
def xy_model_and_datalist(): y = np.array(poiss_sig) xy = XYLike("test", x, y, poisson_data=True) fitfun = Line() + Gaussian() fitfun.b_1.bounds = (-10, 10.0) fitfun.a_1.bounds = (-100, 100.0) fitfun.F_2 = 60.0 fitfun.F_2.bounds = (1e-3, 200.0) fitfun.mu_2 = 5.0 fitfun.mu_2.bounds = (0.0, 100.0) fitfun.sigma_2.bounds = (1e-3, 10.0) model = Model(PointSource("fake", 0.0, 0.0, fitfun)) data = DataList(xy) return model, data
def test_energy_time_fit(): # Let's generate our dataset of 4 spectra with a normalization that follows # a powerlaw in time def generate_one(K): # Let's generate some data with y = Powerlaw(x) gen_function = Powerlaw() gen_function.K = K # Generate a dataset using the power law, and a # constant 30% error x = np.logspace(0, 2, 50) xyl_generator = XYLike.from_function("sim_data", function=gen_function, x=x, yerr=0.3 * gen_function(x)) y = xyl_generator.y y_err = xyl_generator.yerr # xyl = XYLike("data", x, y, y_err) # xyl.plot(x_scale='log', y_scale='log') return x, y, y_err time_tags = np.array([1.0, 2.0, 5.0, 10.0]) # This is the power law that defines the normalization as a function of time normalizations = 0.23 * time_tags**(-1.2) datasets = list(map(generate_one, normalizations)) # Now set up the fit and fit it time = IndependentVariable("time", 1.0, u.s) plugins = [] for i, dataset in enumerate(datasets): x, y, y_err = dataset xyl = XYLike("data%i" % i, x, y, y_err) xyl.tag = (time, time_tags[i]) assert xyl.tag == (time, time_tags[i], None) plugins.append(xyl) data = DataList(*plugins) spectrum = Powerlaw() spectrum.K.bounds = (0.01, 1000.0) src = PointSource("test", 0.0, 0.0, spectrum) model = Model(src) model.add_independent_variable(time) time_po = Powerlaw() time_po.K.bounds = (0.01, 1000) time_po.K.value = 2.0 time_po.index = -1.5 model.link(spectrum.K, time, time_po) jl = JointLikelihood(model, data) jl.set_minimizer("minuit") best_fit_parameters, likelihood_values = jl.fit() # Make sure we are within 10% of the expected result assert np.allclose( best_fit_parameters["value"].values, [0.25496115, -1.2282951, -2.01508341], rtol=0.1, )
def unbinned_polyfit(events: Iterable[float], grade: int, t_start: float, t_stop: float, exposure: float, bayes: bool) -> Tuple[Polynomial, float]: """ function to fit a polynomial to unbinned event data. not a member to allow parallel computation :param events: the events to fit :param grade: the polynomical order or grade :param t_start: the start time to fit over :param t_stop: the end time to fit over :param expousure: the exposure of the interval :param bayes: to do a bayesian fit or not """ log.debug(f"starting unbinned_polyfit with grade {grade}") log.debug(f"have {len(events)} events with {exposure} exposure") # create 3ML plugins and fit them with 3ML! # should eventuallly allow better config # select the model based on the grade if threeML_config.time_series.default_fit_method is not None: bayes = threeML_config.time_series.default_fit_method log.debug("using a default poly fit method") if len(events) == 0: log.debug("no events! returning zero") return Polynomial([0] * (grade + 1)), 0 shape = _grade_model_lookup[grade]() with silence_console_log(): ps = PointSource("dummy", 0, 0, spectral_shape=shape) model = Model(ps) observation = EventObservation(events, exposure, t_start, t_stop) xy = UnbinnedPoissonLike("series", observation=observation) if not bayes: # make sure the model is positive for i, (k, v) in enumerate(model.free_parameters.items()): if i == 0: v.bounds = (0, None) v.value = 10 else: v.value = 0.0 # we actually use a line here # because a constant is returns a # single number if grade == 0: shape.b = 0 shape.b.fix = True jl: JointLikelihood = JointLikelihood(model, DataList(xy)) grid_minimizer = GlobalMinimization("grid") local_minimizer = LocalMinimization("minuit") my_grid = { model.dummy.spectrum.main.shape.a: np.logspace(0, 3, 10)} grid_minimizer.setup( second_minimization=local_minimizer, grid=my_grid) jl.set_minimizer(grid_minimizer) # if the fit falis, retry and then just accept try: jl.fit(quiet=True) except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance): try: jl.fit(quiet=True) except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance): log.debug("all MLE fits failed, returning zero") return Polynomial([0]*(grade + 1)), 0 coeff = [v.value for _, v in model.free_parameters.items()] log.debug(f"got coeff: {coeff}") final_polynomial = Polynomial(coeff) final_polynomial.set_covariace_matrix(jl.results.covariance_matrix) min_log_likelihood = xy.get_log_like() else: # set smart priors for i, (k, v) in enumerate(model.free_parameters.items()): if i == 0: v.bounds = (0, None) v.prior = Log_normal(mu=np.log(5), sigma=np.log(5)) v.value = 1 else: v.prior = Gaussian(mu=0, sigma=.5) v.value = 0.1 # we actually use a line here # because a constant is returns a # single number if grade == 0: shape.b = 0 shape.b.fix = True ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy)) ba.set_sampler("emcee") ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20) ba.sample(quiet=True) ba.restore_median_fit() coeff = [v.value for _, v in model.free_parameters.items()] log.debug(f"got coeff: {coeff}") final_polynomial = Polynomial(coeff) final_polynomial.set_covariace_matrix( ba.results.estimate_covariance_matrix()) min_log_likelihood = xy.get_log_like() log.debug(f"-min loglike: {-min_log_likelihood}") return final_polynomial, -min_log_likelihood
def polyfit(x: Iterable[float], y: Iterable[float], grade: int, exposure: Iterable[float], bayes: bool = False) -> Tuple[Polynomial, float]: """ function to fit a polynomial to data. not a member to allow parallel computation :param x: the x coord of the data :param y: teh y coord of the data :param grade: the polynomical order or grade :param expousure: the exposure of the interval :param bayes: to do a bayesian fit or not """ # Check that we have enough counts to perform the fit, otherwise # return a "zero polynomial" log.debug(f"starting polyfit with grade {grade} ") if threeML_config.time_series.default_fit_method is not None: bayes = threeML_config.time_series.default_fit_method log.debug("using a default poly fit method") nan_mask = np.isnan(y) y = y[~nan_mask] x = x[~nan_mask] exposure = exposure[~nan_mask] non_zero_mask = y > 0 n_non_zero = non_zero_mask.sum() if n_non_zero == 0: log.debug("no counts, return 0") # No data, nothing to do! return Polynomial([0.0]*(grade+1)), 0.0 # create 3ML plugins and fit them with 3ML! # should eventuallly allow better config # seelct the model based on the grade shape = _grade_model_lookup[grade]() ps = PointSource("_dummy", 0, 0, spectral_shape=shape) model = Model(ps) avg = np.mean(y/exposure) log.debug(f"starting polyfit with avg norm {avg}") with silence_console_log(): xy = XYLike("series", x=x, y=y, exposure=exposure, poisson_data=True, quiet=True) if not bayes: # make sure the model is positive for i, (k, v) in enumerate(model.free_parameters.items()): if i == 0: v.bounds = (0, None) v.value = avg else: v.value = 0.0 # we actually use a line here # because a constant is returns a # single number if grade == 0: shape.b = 0 shape.b.fix = True jl: JointLikelihood = JointLikelihood(model, DataList(xy)) jl.set_minimizer("minuit") # if the fit falis, retry and then just accept try: jl.fit(quiet=True) except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance): log.debug("1st fit failed") try: jl.fit(quiet=True) except(FitFailed, BadCovariance, AllFitFailed, CannotComputeCovariance): log.debug("all MLE fits failed") pass coeff = [v.value for _, v in model.free_parameters.items()] log.debug(f"got coeff: {coeff}") final_polynomial = Polynomial(coeff) try: final_polynomial.set_covariace_matrix( jl.results.covariance_matrix) except: log.exception(f"Fit failed in channel") raise FitFailed() min_log_likelihood = xy.get_log_like() else: # set smart priors for i, (k, v) in enumerate(model.free_parameters.items()): if i == 0: v.bounds = (0, None) v.prior = Log_normal( mu=np.log(avg), sigma=np.max([np.log(avg/2), 1])) v.value = 1 else: v.prior = Gaussian(mu=0, sigma=2) v.value = 1e-2 # we actually use a line here # because a constant is returns a # single number if grade == 0: shape.b = 0 shape.b.fix = True ba: BayesianAnalysis = BayesianAnalysis(model, DataList(xy)) ba.set_sampler("emcee") ba.sampler.setup(n_iterations=500, n_burn_in=200, n_walkers=20) ba.sample(quiet=True) ba.restore_median_fit() coeff = [v.value for _, v in model.free_parameters.items()] log.debug(f"got coeff: {coeff}") final_polynomial = Polynomial(coeff) final_polynomial.set_covariace_matrix( ba.results.estimate_covariance_matrix()) min_log_likelihood = xy.get_log_like() log.debug(f"-min loglike: {-min_log_likelihood}") return final_polynomial, -min_log_likelihood
def test_ubinned_poisson_full(event_observation_contiguous, event_observation_split): s = Line() ps = PointSource("s", 0, 0, spectral_shape=s) s.a.bounds = (0, None) s.a.value = .1 s.b.value = .1 s.a.prior = Log_normal(mu=np.log(10), sigma=1) s.b.prior = Gaussian(mu=0, sigma=1) m = Model(ps) ###### ###### ###### ub1 = UnbinnedPoissonLike("test", observation=event_observation_contiguous) jl = JointLikelihood(m, DataList(ub1)) jl.fit(quiet=True) np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5) ba = BayesianAnalysis(m, DataList(ub1)) ba.set_sampler("emcee") ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500) ba.sample(quiet=True) ba.restore_median_fit() np.testing.assert_allclose([s.a.value, s.b.value], [6.11, 1.45], rtol=.5) ###### ###### ###### ub2 = UnbinnedPoissonLike("test", observation=event_observation_split) jl = JointLikelihood(m, DataList(ub2)) jl.fit(quiet=True) np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5) ba = BayesianAnalysis(m, DataList(ub2)) ba.set_sampler("emcee") ba.sampler.setup(n_burn_in=100, n_walkers=20, n_iterations=500) ba.sample(quiet=True) ba.restore_median_fit() np.testing.assert_allclose([s.a.value, s.b.value], [2., .2], rtol=.5)