コード例 #1
0
def test_anisotropic_rbf_kernel():
    from scipy.spatial.distance import pdist, squareform

    corr_length = [1., 30., 30., 30., 30.]
    g1 = [0, 0.4, 0.4, -0.4, -0.4]
    g2 = [0, 0.4, -0.4, 0.4, -0.4]
    kernel_amp = [1e-4, 1e-3, 1e-2, 1., 1.]
    dist = np.linspace(0, 10, 100)
    coord = np.array([dist, dist]).T

    dist = np.linspace(-10, 10, 21)

    X, Y = np.meshgrid(dist, dist)
    x = X.reshape(len(dist)**2)
    y = Y.reshape(len(dist)**2)
    coord_corr = np.array([x, y]).T

    def _anisotropic_rbf_kernel(x, sigma, corr_length, g1, g2):
        L = get_correlation_length_matrix(corr_length, g1, g2)
        invL = np.linalg.inv(L)
        dists = pdist(x, metric='mahalanobis', VI=invL)
        K = np.exp(-0.5 * dists**2)
        lim0 = 1.
        K = squareform(K)
        np.fill_diagonal(K, lim0)
        K *= sigma**2
        return K

    def _anisotropic_rbf_corr_function(x, y, sigma, corr_length, g1, g2):
        L = get_correlation_length_matrix(corr_length, g1, g2)
        l = np.linalg.inv(L)
        dist_a = (l[0, 0] * x * x) + (2 * l[0, 1] * x * y) + (l[1, 1] * y * y)
        z = np.exp(-0.5 * dist_a)
        return z * sigma**2

    for i in range(5):
        L = get_correlation_length_matrix(corr_length[i], g1[i], g2[i])
        inv_L = np.linalg.inv(L)
        ker = kernel_amp[i]**2 * treegp.AnisotropicRBF(invLam=inv_L)
        ker_treegp = ker.__call__(coord)
        corr_treegp = ker.__call__(coord_corr, Y=np.zeros_like(coord_corr))[:,
                                                                            0]
        ker_test = _anisotropic_rbf_kernel(coord, kernel_amp[i],
                                           corr_length[i], g1[i], g2[i])
        corr_test = _anisotropic_rbf_corr_function(x, y, kernel_amp[i],
                                                   corr_length[i], g1[i],
                                                   g2[i])
        np.testing.assert_allclose(ker_treegp, ker_test, atol=1e-12)
        np.testing.assert_allclose(corr_treegp, corr_test, atol=1e-12)

        hyperparameter = ker.theta
        theta = hyperparameter[1:]
        L1 = np.zeros_like(inv_L)
        L1[np.diag_indices(2)] = np.exp(theta[:2])
        L1[np.tril_indices(2, -1)] = theta[2:]
        invLam = np.dot(L1, L1.T)
        np.testing.assert_allclose(inv_L, invLam, atol=1e-12)
コード例 #2
0
ファイル: test_meanify.py プロジェクト: jmeyers314/treegp
def test_gpinterp_meanify():
    optimizer = ['log-likelihood', 'anisotropic']
    npoints = [600, 2000]
    noise = 0.01
    sigma = 2.
    size = 0.5
    g1 = 0.2
    g2 = 0.2
    ker = 'AnisotropicRBF'

    # Generate 2D gaussian random fields.
    L = get_correlation_length_matrix(size, g1, g2)
    invL = np.linalg.inv(L)
    kernel = "%f**2*%s"%((sigma, ker))
    kernel += "(invLam={0!r})".format(invL)
    kernel_skl = treegp.eval_kernel(kernel)

    for n, opt in enumerate(optimizer):

        x, y, y_err = make_2d_grf(kernel_skl,
                                  noise=noise,
                                  seed=42, npoints=npoints[n])
        # add mean function
        coords0, y0 = make_average(coord=x, gp=False)
        y += y0

        # Do gp interpolation without hyperparameters
        # fitting (truth is put initially).
        gp = treegp.GPInterpolation(kernel=kernel, optimizer=opt,
                                    normalize=True, nbins=21, min_sep=0.,
                                    max_sep=3., p0 = [0.5, 0, 0],
                                    average_fits=os.path.join('inputs',
                                                              'mean_gp_stat_mean.fits'))
        gp.initialize(x, y, y_err=y_err)
        gp.solve()
        # test if found hyperparameters are close the true hyperparameters.
        np.testing.assert_allclose(kernel_skl.theta, gp.kernel.theta, atol=5e-1)

        # Predict at same position as the simulated data.
        # Predictions are strictily equal to the input data
        # in the case of no noise. With noise you should expect
        # to have a pull distribution with mean function arround 0
        # with a std<1 (you use the same data to train and validate, and
        # the data are well sample compared to the input correlation
        # length).
        y_predict, y_cov = gp.predict(x, return_cov=True)
        y_std = np.sqrt(np.diag(y_cov))
        pull = y - y_predict
        pull /= np.sqrt(y_err**2 + y_std**2)
        mean_pull = np.mean(pull)
        std_pull = np.std(pull)

        # Test that mean of the pull is close to zeros and std of the pull bellow 1.
        np.testing.assert_allclose(0., mean_pull, atol=3.*(std_pull)/np.sqrt(npoints[n]))
        if std_pull > 1.:
            raise ValueError("std_pull is > 1. Current value std_pull = %f"%(std_pull))
コード例 #3
0
 def _anisotropic_rbf_kernel(x, sigma, corr_length, g1, g2):
     L = get_correlation_length_matrix(corr_length, g1, g2)
     invL = np.linalg.inv(L)
     dists = pdist(x, metric='mahalanobis', VI=invL)
     K = np.exp(-0.5 * dists**2)
     lim0 = 1.
     K = squareform(K)
     np.fill_diagonal(K, lim0)
     K *= sigma**2
     return K
コード例 #4
0
 def _anisotropic_vonkarman_kernel(x, sigma, corr_length, g1, g2):
     L = get_correlation_length_matrix(corr_length, g1, g2)
     invL = np.linalg.inv(L)
     dists = pdist(x, metric='mahalanobis', VI=invL)
     K = dists**(5. / 6.) * special.kv(5. / 6., 2 * np.pi * dists)
     lim0 = special.gamma(5. / 6.) / (2 * ((np.pi)**(5. / 6.)))
     K = squareform(K)
     np.fill_diagonal(K, lim0)
     K /= lim0
     K *= sigma**2
     return K
コード例 #5
0
 def _anisotropic_vonkarman_corr_function(x, y, sigma, corr_length, g1, g2):
     L = get_correlation_length_matrix(corr_length, g1, g2)
     l = np.linalg.inv(L)
     dist_a = (l[0, 0] * x * x) + (2 * l[0, 1] * x * y) + (l[1, 1] * y * y)
     z = np.zeros_like(dist_a)
     Filter = dist_a != 0.
     z[Filter] = dist_a[Filter]**(5. / 12.) * special.kv(
         5. / 6., 2 * np.pi * np.sqrt(dist_a[Filter]))
     lim0 = special.gamma(5. / 6.) / (2 * ((np.pi)**(5. / 6.)))
     if np.sum(Filter) != len(z):
         z[~Filter] = lim0
     z /= lim0
     return z * sigma**2
コード例 #6
0
ファイル: test_gp_interp.py プロジェクト: jmeyers314/treegp
def test_gp_interp_2d():
    npoints = 200
    noise = [None, 0.1]
    # When there is no noise, a "magic"
    # factor is needed in order to be abble
    # to get a numericaly definite positive
    # matrix and to get a gp interpolation (determinant
    # of the kernel matrix is close to 0.). This
    # problem is solved by adding a little bit of
    # white noise when there is no noise.
    white_noise = [1e-5, 0.]
    sigma = [1., 1.]
    size = [2., 4.]
    g1 = [0., 0.2]
    g2 = [0., 0.2]
    atols_on_data = [0., 1e-3]
    kernels = ['AnisotropicRBF', 'AnisotropicVonKarman']

    for ker in kernels:
        for i in range(2):
            # Generate 2D gaussian random fields.
            L = get_correlation_length_matrix(size[i], g1[i], g2[i])
            invL = np.linalg.inv(L)
            kernel = "%f**2*%s" % ((sigma[i], ker))
            kernel += "(invLam={0!r})".format(invL)
            kernel_skl = treegp.eval_kernel(kernel)

            x, y, y_err = make_2d_grf(kernel_skl,
                                      noise=noise[i],
                                      seed=42,
                                      npoints=npoints)

            # Do gp interpolation without hyperparameters
            # fitting (truth is put initially).
            gp = treegp.GPInterpolation(kernel=kernel,
                                        optimizer="none",
                                        white_noise=white_noise[i])
            gp.initialize(x, y, y_err=y_err)

            # Predict at same position as the simulated data.
            # Predictions are strictily equal to the input data
            # in the case of no noise. With noise you should expect
            # to have a pull distribution with mean function arround 0
            # with a std<1 (you use the same data to train and validate, and
            # the data are well sample compared to the input correlation
            # length).
            y_predict, y_cov = gp.predict(x, return_cov=True)
            y_std = np.sqrt(np.diag(y_cov))
            pull = y - y_predict
            if noise[i] is not None:
                pull /= np.sqrt(y_err**2 + y_std**2)
            else:
                # Test that prediction is equal to the data at data
                # postion. Also test that diagonal of predict
                # covariance is zeros at data positions when no noise.
                np.testing.assert_allclose(y,
                                           y_predict,
                                           atol=3. * white_noise[i])
                np.testing.assert_allclose(np.zeros_like(y_std),
                                           y_std,
                                           atol=3. * white_noise[i])

            mean_pull = np.mean(pull)
            std_pull = np.std(pull)

            # Test that mean of the pull is close to zeros and std of the pull bellow 1.
            np.testing.assert_allclose(0.,
                                       mean_pull,
                                       atol=3. * (std_pull) / np.sqrt(npoints))
            if std_pull > 1.:
                raise ValueError(
                    "std_pull is > 1. Current value std_pull = %f" %
                    (std_pull))

            # Test that for extrapolation, interpolation is the mean function (0 here)
            # and the diagonal of the covariance matrix is close to the hyperameters is
            # link to the amplitudes of the fluctuation of the gaussian random fields.

            np.random.seed(42)
            x1 = np.random.uniform(
                np.max(x) + 6. * size[i],
                np.max(x) + 6. * size[i], npoints)
            x2 = np.random.uniform(
                np.max(x) + 6. * size[i],
                np.max(x) + 6. * size[i], npoints)
            new_x = np.array([x1, x2]).T

            gp = treegp.GPInterpolation(kernel=kernel,
                                        optimizer="none",
                                        normalize=False,
                                        white_noise=white_noise[i])
            gp.initialize(x, y, y_err=y_err)
            y_predict, y_cov = gp.predict(new_x, return_cov=True)
            y_std = np.sqrt(np.diag(y_cov))

            np.testing.assert_allclose(np.zeros_like(y_predict),
                                       y_predict,
                                       atol=1e-5)
            np.testing.assert_allclose(sigma[i] * np.ones_like(y_std),
                                       y_std,
                                       atol=1e-5)
コード例 #7
0
 def _anisotropic_rbf_corr_function(x, y, sigma, corr_length, g1, g2):
     L = get_correlation_length_matrix(corr_length, g1, g2)
     l = np.linalg.inv(L)
     dist_a = (l[0, 0] * x * x) + (2 * l[0, 1] * x * y) + (l[1, 1] * y * y)
     z = np.exp(-0.5 * dist_a)
     return z * sigma**2
コード例 #8
0
def test_anisotropic_vonkarman_kernel():
    from scipy import special
    from scipy.spatial.distance import pdist, squareform

    corr_length = [1., 30., 30., 30., 30.]
    g1 = [0, 0.4, 0.4, -0.4, -0.4]
    g2 = [0, 0.4, -0.4, 0.4, -0.4]
    kernel_amp = [1e-4, 1e-3, 1e-2, 1., 1.]
    dist = np.linspace(0, 10, 100)
    coord = np.array([dist, dist]).T

    dist = np.linspace(-10, 10, 21)

    X, Y = np.meshgrid(dist, dist)
    x = X.reshape(len(dist)**2)
    y = Y.reshape(len(dist)**2)
    coord_corr = np.array([x, y]).T

    def _anisotropic_vonkarman_kernel(x, sigma, corr_length, g1, g2):
        L = get_correlation_length_matrix(corr_length, g1, g2)
        invL = np.linalg.inv(L)
        dists = pdist(x, metric='mahalanobis', VI=invL)
        K = dists**(5. / 6.) * special.kv(5. / 6., 2 * np.pi * dists)
        lim0 = special.gamma(5. / 6.) / (2 * ((np.pi)**(5. / 6.)))
        K = squareform(K)
        np.fill_diagonal(K, lim0)
        K /= lim0
        K *= sigma**2
        return K

    def _anisotropic_vonkarman_corr_function(x, y, sigma, corr_length, g1, g2):
        L = get_correlation_length_matrix(corr_length, g1, g2)
        l = np.linalg.inv(L)
        dist_a = (l[0, 0] * x * x) + (2 * l[0, 1] * x * y) + (l[1, 1] * y * y)
        z = np.zeros_like(dist_a)
        Filter = dist_a != 0.
        z[Filter] = dist_a[Filter]**(5. / 12.) * special.kv(
            5. / 6., 2 * np.pi * np.sqrt(dist_a[Filter]))
        lim0 = special.gamma(5. / 6.) / (2 * ((np.pi)**(5. / 6.)))
        if np.sum(Filter) != len(z):
            z[~Filter] = lim0
        z /= lim0
        return z * sigma**2

    for i in range(5):
        L = get_correlation_length_matrix(corr_length[i], g1[i], g2[i])
        inv_L = np.linalg.inv(L)
        ker = kernel_amp[i]**2 * treegp.AnisotropicVonKarman(invLam=inv_L)
        ker_treegp = ker.__call__(coord)
        corr_treegp = ker.__call__(coord_corr, Y=np.zeros_like(coord_corr))[:,
                                                                            0]
        ker_test = _anisotropic_vonkarman_kernel(coord, kernel_amp[i],
                                                 corr_length[i], g1[i], g2[i])
        corr_test = _anisotropic_vonkarman_corr_function(
            x, y, kernel_amp[i], corr_length[i], g1[i], g2[i])
        np.testing.assert_allclose(ker_treegp, ker_test, atol=1e-12)
        np.testing.assert_allclose(corr_treegp, corr_test, atol=1e-12)

        hyperparameter = ker.theta
        theta = hyperparameter[1:]
        L1 = np.zeros_like(inv_L)
        L1[np.diag_indices(2)] = np.exp(theta[:2])
        L1[np.tril_indices(2, -1)] = theta[2:]
        invLam = np.dot(L1, L1.T)
        np.testing.assert_allclose(inv_L, invLam, atol=1e-12)
コード例 #9
0
ファイル: test_hyp_search.py プロジェクト: jmeyers314/treegp
def test_hyperparameter_search_2d():
    optimizer = ['log-likelihood', 'anisotropic']
    npoints = [400, 2000]

    noise = 0.01
    sigma = 2.
    size = 0.5
    g1 = 0.2
    g2 = 0.2
    ker = 'AnisotropicRBF'

    # Generate 2D gaussian random fields.
    L = get_correlation_length_matrix(size, g1, g2)
    invL = np.linalg.inv(L)
    kernel = "%f**2*%s"%((sigma, ker))
    kernel += "(invLam={0!r})".format(invL)
    kernel_skl = treegp.eval_kernel(kernel)

    for n, opt in enumerate(optimizer):
        x, y, y_err = make_2d_grf(kernel_skl,
                                  noise=noise,
                                  seed=42, npoints=npoints[n])

        # Do gp interpolation without hyperparameters
        # fitting (truth is put initially).
        gp = treegp.GPInterpolation(kernel=kernel, optimizer=opt,
                                    normalize=True, nbins=21, min_sep=0.,
                                    max_sep=1., p0=[0.3, 0.,0.])
        gp.initialize(x, y, y_err=y_err)
        gp.solve()
        # test if found hyperparameters are close the true hyperparameters.
        np.testing.assert_allclose(kernel_skl.theta, gp.kernel.theta, atol=5e-1)

        # Predict at same position as the simulated data.
        # Predictions are strictily equal to the input data
        # in the case of no noise. With noise you should expect
        # to have a pull distribution with mean function arround 0
        # with a std<1 (you use the same data to train and validate, and
        # the data are well sample compared to the input correlation
        # length).
        y_predict, y_cov = gp.predict(x, return_cov=True)
        y_std = np.sqrt(np.diag(y_cov))
        pull = y - y_predict
        pull /= np.sqrt(y_err**2 + y_std**2)
        mean_pull = np.mean(pull)
        std_pull = np.std(pull)

        # Test that mean of the pull is close to zeros and std of the pull bellow 1.
        np.testing.assert_allclose(0., mean_pull, atol=3.*(std_pull)/np.sqrt(npoints[n]))
        if std_pull > 1.:
            raise ValueError("std_pull is > 1. Current value std_pull = %f"%(std_pull))

        # Test that for extrapolation, interpolation is the mean function (0 here)
        # and the diagonal of the covariance matrix is close to the hyperameters is
        # link to the amplitudes of the fluctuation of the gaussian random fields.

        np.random.seed(42)
        x1 = np.random.uniform(np.max(x)+6.*size,
                               np.max(x)+6.*size, npoints[n])
        x2 = np.random.uniform(np.max(x)+6.*size,
                               np.max(x)+6.*size, npoints[n])
        new_x = np.array([x1, x2]).T

        y_predict, y_cov = gp.predict(new_x, return_cov=True)
        y_std = np.sqrt(np.diag(y_cov))

        np.testing.assert_allclose(np.mean(y), y_predict, atol=1e-5)
        sig = np.sqrt(np.exp(gp.kernel.theta[0]))
        np.testing.assert_allclose(sig*np.ones_like(y_std), y_std, atol=1e-5)