def test_XYLike_chi2(): # Get fake data with Gaussian noise yerr = np.array(gauss_sigma) y = np.array(gauss_signal) # Fit xy = XYLike("test", x, y, yerr) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.mu_2 = 4.5 res = xy.fit(fitfun) # Verify that the fit converged where it should have assert np.allclose( res[0]["value"].values, [0.82896119, 40.20269202, 62.80359114, 5.04080011, 0.27286713], rtol=0.05, ) # test not setting yerr xy = XYLike("test", x, y) assert np.all(xy.yerr == np.ones_like(y)) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.mu_2 = 4.5 res = xy.fit(fitfun)
def test_XYLike_chi2(): # Get fake data with Gaussian noise yerr = np.array(gauss_sigma) y = np.array(gauss_signal) # Fit xy = XYLike("test", x, y, yerr) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.mu_2 = 4.5 res = xy.fit(fitfun) # Verify that the fit converged where it should have assert np.allclose(res[0]['value'].values,[0.82896119, 40.20269202, 62.80359114, 5.04080011, 0.27286713], rtol=0.05) # test not setting yerr xy = XYLike("test", x, y) assert np.all(xy.yerr == np.ones_like(y)) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.mu_2 = 4.5 res = xy.fit(fitfun)
def test_goodness_of_fit(): # Let's generate some data with y = Powerlaw(x) gen_function = Powerlaw() # Generate a dataset using the power law, and a # constant 30% error x = np.logspace(0, 2, 50) xyl_generator = XYLike.from_function( "sim_data", function=gen_function, x=x, yerr=0.3 * gen_function(x) ) y = xyl_generator.y y_err = xyl_generator.yerr fit_function = Powerlaw() xyl = XYLike("data", x, y, y_err) parameters, like_values = xyl.fit(fit_function) gof, all_results, all_like_values = xyl.goodness_of_fit() # Compute the number of degrees of freedom n_dof = len(xyl.x) - len(fit_function.free_parameters) # Get the observed value for chi2 obs_chi2 = 2 * like_values["-log(likelihood)"]["data"] theoretical_gof = scipy.stats.chi2(n_dof).sf(obs_chi2) assert np.isclose(theoretical_gof, gof["total"], rtol=0.1)
def test_xy_plot(): # Get fake data with Gaussian noise yerr = np.array(gauss_sigma) y = np.array(gauss_signal) # Fit xy = XYLike("test", x, y, yerr) xy.plot() fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.mu_2 = 4.5 res = xy.fit(fitfun) xy.plot()
def test_XYLike_poisson(): # Now Poisson case y = np.array(poiss_sig) xy = XYLike("test", x, y, poisson_data=True) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.F_2.bounds = (0, 200.0) fitfun.mu_2 = 5.0 fitfun.a_1.bounds = (0.1, 5.0) fitfun.b_1.bounds = (0.1, 100.0) res = xy.fit(fitfun) # Verify that the fit converged where it should have #print res[0]['value'] assert np.allclose(res[0]['value'], [0.783748,40.344599 , 71.560055, 4.989727 , 0.330570 ], rtol=0.05)
def test_XYLike_poisson(): # Now Poisson case y = np.array(poiss_sig) xy = XYLike("test", x, y, poisson_data=True) fitfun = Line() + Gaussian() fitfun.F_2 = 60.0 fitfun.F_2.bounds = (0, 200.0) fitfun.mu_2 = 5.0 fitfun.a_1.bounds = (0.1, 5.0) fitfun.b_1.bounds = (0.1, 100.0) res = xy.fit(fitfun) # Verify that the fit converged where it should have # print res[0]['value'] assert np.allclose(res[0]["value"], [0.783748, 40.344599, 71.560055, 4.989727, 0.330570], rtol=0.05)
def test_goodness_of_fit(): # Let's generate some data with y = Powerlaw(x) gen_function = Powerlaw() # Generate a dataset using the power law, and a # constant 30% error x = np.logspace(0, 2, 50) xyl_generator = XYLike.from_function("sim_data", function=gen_function, x=x, yerr=0.3 * gen_function(x)) y = xyl_generator.y y_err = xyl_generator.yerr fit_function = Powerlaw() xyl = XYLike("data", x, y, y_err) parameters, like_values = xyl.fit(fit_function) gof, all_results, all_like_values = xyl.goodness_of_fit() # Compute the number of degrees of freedom n_dof = len(xyl.x) - len(fit_function.free_parameters) # Get the observed value for chi2 obs_chi2 = 2 * like_values['-log(likelihood)']['data'] theoretical_gof = scipy.stats.chi2(n_dof).sf(obs_chi2) assert np.isclose(theoretical_gof, gof['total'], rtol=0.1)