コード例 #1
0
ファイル: test.py プロジェクト: FNTwin/GPGO
def test_GP_2D(optimize=True, function=np.linspace):
    dim_test = 2
    dim_out = 1
    n_train_p = 7
    X = np.random.uniform(-1, 1, (14, 2))
    Z = ((X[:, 1]**2 * X[:, 0]**2) * np.sin((X[:, 1]**2 + X[:, 0]**2)))[:,
                                                                        None]
    #Z=np.sin((X[:, 1] ** 2 + X[:, 0] ** 2))[:,None]
    gp = GP(X, Z, kernel=RBF())
    gp.fit()
    plot = generate_grid(dim_test, 100, [[-1, 1] for i in range(dim_test)])
    print(plot.shape)

    #pred = gp.predict(plot)
    #gp.plot(plot)
    # gp.static_compute_marg()
    #print("Old marg likelihood :", gp.get_marg(),
    #"\n Hyperparameters: ", gp.get_kernel().gethyper())

    if optimize:
        gp.optimize(n_restarts=30)
        pred = gp.predict(plot)
        gp.plot(plot)
        print("New marg likelihood :", gp.get_marg(), "\n Hyperparameters: ",
              gp.get_kernel().gethyper())
コード例 #2
0
ファイル: test.py プロジェクト: FNTwin/GPGO
def test_GP_4D(optimize=False):
    x = generate_grid(4, 3, [[-2, 2] for i in range(4)], np.random.uniform)

    def f(x):
        return x[:, 1]**2 - x[:, 3] * x[:, 0]

    y = f(x)[:, None]
    plot = generate_grid(4, 5, [[-2, 2] for i in range(4)], np.linspace)
    gp = GP(x, y, kernel=RBF(sigma_l=2, l=2))
    gp.fit()
    mean, var = gp.predict(plot)
    print("Old marg likelihood :", gp.get_marg(), "\n Hyperparameters: ",
          gp.get_kernel().gethyper())

    if optimize:
        gp.optimize_grid(constrains=[[1, 3], [2, 100], [0, 30]],
                         n_points=100,
                         function=np.random.uniform)
        mean, var = gp.predict(plot)
        print("New marg likelihood :", gp.get_marg(), "\n Hyperparameters: ",
              gp.get_kernel().gethyper())

    return mean, var
コード例 #3
0
ファイル: test.py プロジェクト: FNTwin/GPGO
def test_GP_1D(optimize=True):
    #x =  np.arange(-3, 5, 1)[:, None]
    #x=np.array([0.1,0.12,0.143,0.3,0.5,0.75,0.67,0.9,0.92,1.1])[:,None]
    x = np.random.uniform(-3, 3, 16)[:, None]

    def f(X):
        #return -(1.4-3*X)*np.sin(18*X)
        #return X**0
        return np.sin(X)
        #return (6 * X - 2) ** 2 * np.sin(12 * X - 4) - X
        #return X + np.sin(X)*10

    def noise(x, alpha=1):
        return f(x) + np.random.randn(*x.shape) * alpha

    #y = noise(x, alpha=0)
    y = f(x)
    print(y)

    #gp = GP(x*10000, y*1000, kernel=RBF(sigma_l=0.2, l= 1, noise= 1e-3, gradient=False), normalize_y=True)
    gp = GP(x, y, kernel=RBF(gradient=False), normalize_y=False)
    gp.fit()

    plot = np.linspace(-20, 20, 1000)

    #pred_old, var_old = gp.predict(plot[:, None])

    #gp.plot(plot[:, None])
    gp.log_marginal_likelihood()
    print("Old marg likelihood :", gp.get_marg(), "\n Hyperparameters: ",
          gp.get_kernel().gethyper())
    if optimize:
        """new = gp.grid_search_optimization(constrains=[[1, 30], [1, 30],[0.00001,1]],
                                          n_points=100,
                                          function=np.linspace)"""

        gp.optimize(n_restarts=10, optimizer="L-BFGS-B", verbose=False)
        #gp.optimize_grid(n_points=50)

        #optimized.fit()
        #pred, var = gp.predict(plot[:, None])

        #plt.plot(plot[:,None],f(plot))
        gp.plot(plot[:, None])
        #plt.scatter(x,y,marker="x",color="red")
        gp.log_marginal_likelihood()
        log_gp(gp)