Beispiel #1
0
def GPLVM_model(name):
    Q = 5
    M = 20
    N = Y.shape[0]
    X_mean = gpflow.models.PCA_reduce(Y, Q)  # Initialise via PCA
    Z = np.random.permutation(X_mean.copy())[:M]

    fHmmm = False
    if (fHmmm):
        k = (kernels.RBF(3, ARD=True, active_dims=slice(0, 3)) +
             kernels.Linear(2, ARD=False, active_dims=slice(3, 5)))
    else:
        k = (kernels.RBF(3, ARD=True, active_dims=[0, 1, 2]) +
             kernels.Linear(2, ARD=False, active_dims=[3, 4]))

    # GPLVM
    GPLVM = gpflow.models.GPLVM(Y=Y, latent_dim=Q, X_mean=X_mean, kern=k)

    opt = gpflow.train.ScipyOptimizer()
    GPLVM.compile()
    opt.minimize(GPLVM)  #, options=dict(disp=True, maxiter=100))

    # Compute and sensitivity to input
    # print(m.kern.kernels)
    kern = GPLVM.kern.kernels[0]
    sens = np.sqrt(kern.variance.read_value()) / kern.lengthscales.read_value()
    print(GPLVM.kern)
    print(sens)
    # fig, ax = plt.subplots()
    # ax.bar(np.arange(len(kern.lengthscales.read_value())) , sens, 0.1, color='y')
    # ax.set_title('Sensitivity to latent inputs')
    # plt.savefig("../res/oils_sen.png")
    # plt.close(fig)

    return GPLVM, sens
Beispiel #2
0
def BGPLVM_model(name, Q=10, M=20):
    np.random.seed(22)
    # Create Bayesian GPLVM model using additive kernel
    # Q = 5
    # M = 20 # number of inducing pts
    N = Y.shape[0]
    X_mean = gpflow.models.PCA_reduce(Y, Q)  # Initialise via PCA
    # X_mean = np.random.normal(size = [N, Q])
    Z = np.random.permutation(X_mean.copy())[:M]

    fHmmm = False
    if (fHmmm):
        k = (kernels.RBF(3, ARD=True, active_dims=slice(0, 3)) +
             kernels.Linear(2, ARD=False, active_dims=slice(3, 5)))
    else:
        # k = (kernels.RBF(3, ARD=True, active_dims=[0,1,2]) +
        #      kernels.Linear(2, ARD=False, active_dims=[3, 4]))
        k = kernels.RBF(Q, ARD=True)

    # Bayesian GPLVM
    BGPLVM = gpflow.models.BayesianGPLVM(X_mean=X_mean,
                                         X_var=0.1 * np.ones((N, Q)),
                                         Y=Y,
                                         kern=k,
                                         M=M,
                                         Z=Z)
    BGPLVM.likelihood.variance = 0.01

    opt = gpflow.train.ScipyOptimizer()
    BGPLVM.compile()
    opt.minimize(BGPLVM, disp=False,
                 maxiter=1000)  #, options=dict(disp=True, maxiter=100))
    # print("###############################")
    # print(BGPLVM.X_mean.read_value(), BGPLVM.X_var.read_value())
    # print("###############################")

    # Compute and sensitivity to input
    # print(m.kern.kernels)
    kern = BGPLVM.kern
    sens = np.sqrt(kern.variance.read_value()) / kern.lengthscales.read_value()
    print(BGPLVM.kern)
    print(sens)
    fig, ax = plt.subplots()
    ax.bar(np.arange(len(kern.lengthscales.read_value())),
           sens,
           0.1,
           color='y')
    ax.set_title('Sensitivity to latent inputs')
    plt.savefig("../res/{}_sen_bgplvm_Q{}_M{}.png".format(name, Q, M))
    plt.close(fig)
    with open("../res/{}_bgplvm_Q{}_M{}.pickle".format(name, Q, M),
              "wb") as res:
        pickle.dump(BGPLVM.X_mean.read_value(), res)

    return BGPLVM, sens
Beispiel #3
0
def RVBGPLVM_model(name, Q=10, M=20, lamb=20, verbose=False):
    np.random.seed(22)
    # Create Regularzed GPLVM model using additive kernel
    # Q = 5
    # M = 20 # number of inducing pts
    N, D = Y.shape
    X_mean = gpflow.models.PCA_reduce(Y, Q)  # Initialise via PCA
    Z = np.random.permutation(X_mean.copy())[:M]

    fHmmm = False
    if (fHmmm):
        k = (kernels.RBF(3, ARD=True, active_dims=slice(0, 3)) +
             kernels.Linear(2, ARD=False, active_dims=slice(3, 5)))
    else:
        # k = (kernels.RBF(3, ARD=True, active_dims=[0,1,2]) +
        #      kernels.Linear(2, ARD=False, active_dims=[3, 4]))
        k = kernels.RBF(Q, ARD=True)

    # Regularized GPLVM
    RVBGPLVM = gpflow.models.RegularizedVBGPLVM(X_mean=X_mean,
                                                X_var=0.1 * np.ones((N, Q)),
                                                U_mean=np.zeros((M, D)),
                                                U_var=0.01 * np.ones((M, D)),
                                                Y=Y,
                                                kern=k,
                                                M=M,
                                                Z=Z,
                                                lamb=lamb)
    RVBGPLVM.likelihood.variance = 0.01

    opt = gpflow.train.ScipyOptimizer()
    RVBGPLVM.compile()
    opt.minimize(RVBGPLVM,
                 disp=False)  #, options=dict(disp=True, maxiter=100))

    # print(m.X_mean, m.X_var)

    # Compute and sensitivity to input
    # print(m.kern.kernels)
    kern = RVBGPLVM.kern
    sens = np.sqrt(kern.variance.read_value()) / kern.lengthscales.read_value()
    print(RVBGPLVM.kern)
    print(sens)
    if verbose:
        fig, ax = plt.subplots()
        ax.bar(np.arange(len(kern.lengthscales.read_value())),
               sens,
               0.1,
               color='y')
        ax.set_title('Sensitivity to latent inputs')
        plt.savefig("../res/test/{}_sen_rvbgplvm_Q{}_M{}_LAM{}.png".format(
            name, Q, M, lamb))
        plt.close(fig)

        colors = cm.rainbow(np.linspace(0, 1, len(np.unique(labels))))
        sens_order = np.argsort(sens)
        fig, ax = plt.subplots()
        for i, c in zip(np.unique(labels), colors):
            ax.scatter(RVBGPLVM.X_mean.read_value()[labels == i,
                                                    sens_order[-1]],
                       RVBGPLVM.X_mean.read_value()[labels == i,
                                                    sens_order[-2]],
                       color=c,
                       label=i)
            ax.set_title('RVBGPLVM LAM = {}'.format(lamb))
            ax.scatter(RVBGPLVM.feature.Z.read_value()[:, sens_order[-1]],
                       RVBGPLVM.feature.Z.read_value()[:, sens_order[-2]],
                       label="IP",
                       marker='x')
        plt.savefig("../res/test/RVBGPLVM_LAM{}.png".format(lamb))
        loglik = RVBGPLVM.compute_log_likelihood()
        np.savetxt("../res/test/RVBGPLVM_LAM{}.csv".format(lamb),
                   np.asarray([loglik]))

    with open(
            "../res/{}_rvbgplvm_Q{}_M{}_LAM{}.pickle".format(name, Q, M, lamb),
            "wb") as res:
        pickle.dump([
            RVBGPLVM.X_mean.read_value(),
            RVBGPLVM.feature.Z.read_value(), labels, colors, sens_order
        ], res)

    return RVBGPLVM, sens