def test_anisotropic_limit(): """Test that AnisotropicRBF with isotropic covariance equals RBF""" kernel1 = "RBF(0.45)" kernel2 = "AnisotropicRBF(scale_length=[0.45, 0.45])" gp1 = piff.GPInterp(kernel=kernel1) gp2 = piff.GPInterp(kernel=kernel2) X = np.random.rand(1000, 2) np.testing.assert_allclose(gp1.gp.kernel(X), gp2.gp.kernel(X))
def test_guess(): rng = galsim.BaseDeviate(8675309) ntrain, nvalidate, nvisualize = 100, 1, 21 training_data, validation_data, visualization_data = \ make_grf_psf_params(ntrain, nvalidate, nvisualize) inferred_scale_length = [] if __name__ == '__main__': guesses = [0.03, 0.1, 0.3, 1.0, 3.0] rtol = 0.02 else: guesses = [0.03, 0.3, 3.0] rtol = 0.03 for guess in guesses: # noise of 0.3 turns out to be pretty significant here. stars = params_to_stars(training_data, noise=0.3, rng=rng) kernel = "1*RBF({0}, (1e-1, 1e1))".format(guess) kernel += " + WhiteKernel(1e-5, (1e-7, 1e-1))" interp = piff.GPInterp(kernel=kernel) stars = [mod.fit(s) for s in stars] stars = interp.initialize(stars) interp.solve(stars) # A bit complicated, but this extracts the scale-length inferred_scale_length.append(np.exp(interp.gp.kernel_.theta[1])) # Check that the inferred scale length is close to the input value of 0.3 np.testing.assert_allclose(inferred_scale_length, 0.3, rtol=0.15) # More interesting however, is how independent is the optimization wrt the initial value. # So check that the standard deviation of the results is much smaller than the value. np.testing.assert_array_less(np.std(inferred_scale_length), 0.3 * rtol)
def check_gp(training_data, validation_data, visualization_data, kernel, npca=0, optimize=False, filename=None, rng=None, visualize=False, check_config=False): """ Solve for global PSF model, test it, and optionally display it. """ stars = params_to_stars(training_data, noise=0.03, rng=rng) validate_stars = params_to_stars(validation_data, noise=0.0, rng=rng) interp = piff.GPInterp(kernel=kernel, optimize=optimize, npca=npca) interp.initialize(stars) iterate(stars, interp) if visualize: display(training_data, visualization_data, interp) validate(validate_stars, interp) if check_config: config = { 'interp': { 'type': 'GPInterp', 'kernel': kernel, 'npca': npca, 'optimize': optimize } } logger = piff.config.setup_logger() interp3 = piff.Interp.process(config['interp'], logger) iterate(stars, interp3) validate(validate_stars, interp3) # Check that we can write interp to disk and read back in. if filename is not None: testfile = os.path.join('output', filename) with fitsio.FITS(testfile, 'rw', clobber=True) as f: interp.write(f, 'interp') with fitsio.FITS(testfile, 'r') as f: interp2 = piff.GPInterp.read(f, 'interp') print("Revalidating after i/o.") X = np.vstack([training_data['u'], training_data['v']]).T np.testing.assert_allclose(interp.gp.kernel(X), interp2.gp.kernel(X)) np.testing.assert_allclose(interp.gp.kernel.theta, interp2.gp.kernel.theta) np.testing.assert_allclose(interp.gp.kernel_.theta, interp2.gp.kernel_.theta) np.testing.assert_allclose(interp.gp.alpha_, interp2.gp.alpha_, rtol=1e-6) np.testing.assert_allclose(interp.gp.X_train_, interp2.gp.X_train_) np.testing.assert_allclose(interp.gp.y_train_mean, interp2.gp.y_train_mean) validate(validate_stars, interp2)
def test_anisotropic_guess(): rng = galsim.BaseDeviate(8675309) # ntrain, nvalidate, nvisualize = 100, 1, 1 # training_data, validation_data, visualization_data = \ # make_grf_psf_params(ntrain, nvalidate, nvisualize) ntrain, nvalidate, nvisualize = 100, 1, 1 training_data, validation_data, visualization_data = \ make_anisotropic_grf_psf_params(ntrain, nvalidate, nvisualize) var1s = [] var2s = [] corrs = [] if __name__ == '__main__': guesses = [0.03, 0.1, 0.3, 1.0, 3.0] rtol = 0.05 else: guesses = [0.03, 0.3, 3.0] rtol = 0.10 for guess in guesses: # noise of 0.3 turns out to be pretty significant here. stars = params_to_stars(training_data, noise=0.03, rng=rng) kernel = "1*AnisotropicRBF(scale_length={0!r})".format([guess, guess]) kernel += " + WhiteKernel(1e-5, (1e-7, 1e-1))" interp = piff.GPInterp(kernel=kernel) stars = [mod.fit(s) for s in stars] stars = interp.initialize(stars) interp.solve(stars) invLam = interp.gp.kernel_.get_params()['k1__k2__invLam'] Lam = np.linalg.inv(invLam) var1s.append(Lam[0, 0]) var2s.append(Lam[1, 1]) corrs.append(Lam[0, 1] / np.sqrt(Lam[0, 0] * Lam[1, 1])) print(var1s[-1], var2s[-1], corrs[-1]) # Check that the inferred correlation is close to the input correlation with params: # var1 = 0.1**2, var2 = 0.2**2, corr = 0.7 np.testing.assert_allclose( var1s, 0.1**2, rtol=1.0) # Only get right order-of-magnitude or so np.testing.assert_allclose( var2s, 0.2**2, rtol=1.0) # Only get right order-of-magnitude or so np.testing.assert_allclose(corrs, 0.7, rtol=0.1) # This one works much better # More interesting however, is how independent is the optimization wrt the initial value. # So check that the standard deviation of the results is small. np.testing.assert_array_less(np.std(var1s), 0.1**2 * rtol) np.testing.assert_array_less(np.std(var2s), 0.2**2 * rtol) np.testing.assert_array_less(np.std(corrs), 0.7 * rtol)
def test_meanify(): if __name__ == '__main__': rtol = 4.e-1 atol = 5.e-2 bin_spacing = 30 # arcsec else: rtol = 1.e-1 atol = 3.e-2 bin_spacing = 150 # arcsec psf_file = 'test_mean_*.piff' average_file = 'average.fits' psfs_list = sorted(glob.glob(os.path.join('output', 'test_mean_*.piff'))) config0 = { 'output': { 'file_name': psfs_list, }, 'hyper': { 'file_name': 'output/' + average_file, } } config1 = { 'output': { 'file_name': psf_file, 'dir': 'output', }, 'hyper': { 'file_name': average_file, 'dir': 'output', 'bin_spacing': bin_spacing, 'statistic': 'mean', 'params_fitted': [0, 2] } } config2 = { 'output': { 'file_name': psf_file, 'dir': 'output', }, 'hyper': { 'file_name': average_file, 'dir': 'output', 'bin_spacing': bin_spacing, 'statistic': 'median', } } for config in [config0, config1, config2]: piff.meanify(config) ## test if found initial average average = fitsio.read(os.path.join('output', average_file)) params0 = make_average(coord=average['COORDS0'][0] / 0.26, gp=False) keys = ['hlr', 'g1', 'g2'] for i, key in enumerate(keys): if config == config1 and i == 1: np.testing.assert_allclose(np.zeros( len(average['PARAMS0'][0][:, i])), average['PARAMS0'][0][:, i], rtol=0, atol=0) else: np.testing.assert_allclose(params0[key], average['PARAMS0'][0][:, i], rtol=rtol, atol=atol) ## gaussian process testing of meanify np.random.seed(68) x = np.random.uniform(0, 2048, size=1000) y = np.random.uniform(0, 2048, size=1000) coord = np.array([x, y]).T average = make_average(coord=coord) stars = params_to_stars(average, noise=0.0, rng=None) stars_training = stars[:900] stars_validation = stars[900:] fit_hyp = ['none', 'isotropic'] for fit in fit_hyp: gp = piff.GPInterp(kernel="0.009 * RBF(300.*0.26)", optimizer=fit, white_noise=1e-5, average_fits='output/average.fits') gp.initialize(stars_training) gp.solve(stars_training) stars_interp = gp.interpolateList(stars_validation) params_interp = np.array([s.fit.params for s in stars_interp]) params_validation = np.array([s.fit.params for s in stars_validation]) params_training = np.array([s.fit.params for s in stars_training]) np.testing.assert_allclose(params_interp, params_validation, rtol=rtol, atol=atol)
def check_gp(stars_training, stars_validation, kernel, optimizer, min_sep=None, max_sep=None, nbins=20, l0=3000., rows=None, plotting=False, atol=4e-2, rtol=1e-3, test_star_fit=False): """ Solve for global PSF model, test it, and optionally display it. """ interp = piff.GPInterp(kernel=kernel, optimizer=optimizer, normalize=True, white_noise=0., l0=l0, n_neighbors=4, average_fits=None, rows=rows, nbins=nbins, min_sep=min_sep, max_sep=max_sep, logger=None) interp.initialize(stars_training) interp.solve(stars=stars_training, logger=None) if not test_star_fit: stars_test = interp.interpolateList(stars_validation) else: stars_v = copy.deepcopy(stars_validation) for s in stars_v: s.fit = None stars_test = interp.interpolateList(stars_v) xtest = np.array([interp.getProperties(star) for star in stars_validation]) y_validation = np.array([star.fit.params for star in stars_validation]) y_err = np.sqrt( np.array([star.fit.params_var for star in stars_validation])) y_test = np.array([star.fit.params for star in stars_test]) np.testing.assert_allclose(y_test, y_validation, atol=atol) if optimizer is not 'none': truth_hyperparameters = np.exp(interp._init_theta) fitted_hyperparameters = np.exp( np.array([gp._optimizer._kernel.theta for gp in interp.gps])) np.testing.assert_allclose(np.mean(fitted_hyperparameters, axis=0), np.mean(truth_hyperparameters, axis=0), rtol=rtol) # Invalid kernel (can't use an instantiated kernel object for the kernel here) with np.testing.assert_raises(TypeError): piff.GPInterp(kernel=interp.gps[0].kernel, optimizer=optimizer) # Invalid optimizer with np.testing.assert_raises(ValueError): piff.GPInterp(kernel=kernel, optimizer='invalid') # Invalid number of kernels. (Can't tell until initialize) if isinstance(kernel, str): interp2 = piff.GPInterp(kernel=[kernel] * 4, optimizer=optimizer) with np.testing.assert_raises(ValueError): interp2.initialize(stars_training) # Check I/O. file_name = os.path.join('output', 'test_gp.fits') with fitsio.FITS(file_name, 'rw', clobber=True) as fout: interp.write(fout, extname='gp') with fitsio.FITS(file_name, 'r') as fin: interp2 = piff.Interp.read(fin, extname='gp') stars_test = interp2.interpolateList(stars_validation) y_test = np.array([star.fit.params for star in stars_test]) np.testing.assert_allclose(y_test, y_validation, atol=atol) if plotting: import matplotlib.pyplot as plt title = ["size", "$g_1$", "$g_2$"] for j in range(3): plt.figure() plt.title('%s validation' % (title[j]), fontsize=18) plt.scatter(xtest[:, 0], xtest[:, 1], c=y_validation[:, j], vmin=-4e-2, vmax=4e-2, cmap=plt.cm.seismic) plt.colorbar() plt.figure() plt.title('%s test (gp interp)' % (title[j]), fontsize=18) plt.scatter(xtest[:, 0], xtest[:, 1], c=y_test[:, j], vmin=-4e-2, vmax=4e-2, cmap=plt.cm.seismic) plt.colorbar() if optimizer in ['isotropic', 'anisotropic']: if optimizer == 'isotropic': for gp in interp.gps: plt.figure() plt.scatter(gp._optimizer._2pcf_dist, gp._optimizer._2pcf) plt.plot(gp._optimizer._2pcf_dist, gp._optimizer._2pcf_fit) plt.plot(gp._optimizer._2pcf_dist, np.ones_like(gp._optimizer._2pcf_dist) * 4e-4, 'b--') plt.ylim(0, 7e-4) else: for gp in interp.gps: EXT = [ np.min(gp._optimizer._2pcf_dist[:, 0]), np.max(gp._optimizer._2pcf_dist[:, 0]), np.min(gp._optimizer._2pcf_dist[:, 1]), np.max(gp._optimizer._2pcf_dist[:, 1]) ] CM = plt.cm.seismic MAX = np.max(gp._optimizer._2pcf) N = int(np.sqrt(len(gp._optimizer._2pcf))) plt.figure(figsize=(10, 5), frameon=False) plt.subplots_adjust(wspace=0.5, left=0.07, right=0.95, bottom=0.15, top=0.85) plt.subplot(1, 2, 1) plt.imshow(gp._optimizer._2pcf.reshape(N, N), extent=EXT, interpolation='nearest', origin='lower', vmin=-MAX, vmax=MAX, cmap=CM) cbar = plt.colorbar() cbar.formatter.set_powerlimits((0, 0)) cbar.update_ticks() cbar.set_label('$\\xi$', fontsize=20) plt.xlabel('$\\theta_X$', fontsize=20) plt.ylabel('$\\theta_Y$', fontsize=20) plt.title('Measured 2-PCF', fontsize=16) plt.subplot(1, 2, 2) plt.imshow(gp._optimizer._2pcf_fit.reshape(N, N), extent=EXT, interpolation='nearest', origin='lower', vmin=-MAX, vmax=MAX, cmap=CM) cbar = plt.colorbar() cbar.formatter.set_powerlimits((0, 0)) cbar.update_ticks() cbar.set_label('$\\xi\'$', fontsize=20) plt.xlabel('$\\theta_X$', fontsize=20) plt.ylabel('$\\theta_Y$', fontsize=20) plt.show()