Exemple #1
0
    k = pyGPs.cov.RBF()
    model.setPrior(kernel=k)

    # split training and test data
    x_train = x[index_train, :]
    y_train = y[index_train, :]
    x_test = x[index_test, :]
    y_test = y[index_test, :]

    # gp
    model.optimize(x_train, y_train)
    model.predict(x_test)

    # evaluation
    predictive_class_rbf = np.sign(model.ym)
    ACC_rbf = valid.ACC(predictive_class_rbf, y_test)

    ## DIFFUSION Kernel
    # compute kernel matrix and initalize GP with precomputed kernel
    model = pyGPs.GPC()
    M1, M2 = graphUtil.formKernelMatrix(Matrix, index_train, index_test)
    k = pyGPs.cov.Pre(M1, M2)
    model.setPrior(kernel=k)

    # if you only use precomputed kernel matrix, there is no training data needed,
    # but you still need to specify x_train (due to general structure of pyGPs)
    # e.g. you can use the following:
    n = len(index_train)
    x_train = np.zeros((n, 1))

    # gp
    # normalize kernel matrix (not useful for MUTAG)
    # Matrix = graphUtil.normalizeKernel(Matrix)

    # start cross-validation for this t
    for index_train, index_test in valid.k_fold_index(N, K=10):

        y_train = graph_label[index_train, :]
        y_test = graph_label[index_test, :]

        n1 = len(index_train)
        n2 = len(index_test)

        model = pyGPs.GPC()
        M1, M2 = graphUtil.formKernelMatrix(Matrix, index_train, index_test)
        k = pyGPs.cov.Pre(M1, M2)
        model.setPrior(kernel=k)

        # gp
        x_train = np.zeros((n1, 1))
        x_test = np.zeros((n2, 1))
        model.fit(x_train, y_train)
        model.predict(x_test)
        predictive_class = np.sign(model.ym)

        # evaluation
        acc = valid.ACC(predictive_class, y_test)
        ACC.append(acc)

    print 'Accuracy: ', np.round(np.mean(ACC),
                                 2), '(' + str(np.round(np.std(ACC), 2)) + ')'
Exemple #3
0
# State model with 10 classes
model = pyGPs.GPMC(10)

# Set data to model
model.setData(x,y)

# optimize default GPC model (see demo_GPC) for each binary classification problem, 
# and decide label for test patterns of hand-writen digits
# prdictive_vote[i,j] is the probability of being class j for test pattern i
predictive_vote = model.optimizeAndPredict(xs)

predictive_class = np.argmax(predictive_vote, axis=1)
predictive_class = np.reshape(predictive_class, (predictive_class.shape[0],1))

# Accuracy of recognized digit
acc = valid.ACC(predictive_class, ys)
print "Accuracy of recognizing hand-writen digits:", round(acc,2)


#----------------------------------------------------------------------
# A bit more things you can do
#----------------------------------------------------------------------
# Just like we did for GP classification
# You can use specify the setting for all binary classificiation problem by:
m = pyGPs.mean.Zero()
k = pyGPs.cov.RBF()
model.setPrior(mean=m,kernel=k)
model.useInference("Laplace")

# Beside optimizeAndPredict(xs),
# there is also an option to predict without optimization
Exemple #4
0
cv_run = 0
for x_train, x_test, y_train, y_test in valid.k_fold_validation(x, y, K):
    print('Run:', cv_run)
    # This is a binary classification problem
    model = pyGPs.GPC()
    # Since no prior knowldege, leave everything default
    model.optimize(x_train, y_train)
    # Predit
    ymu, ys2, fmu, fs2, lp = model.predict(x_test, ys=y_test)

    # ymu for classification is a continuous value over -1 to +1
    # If you want predicting result to either one of the classes, take a sign of ymu.
    ymu_class = np.sign(ymu)

    # Evluation
    acc = valid.ACC(ymu_class, y_test)
    print('   accuracy =', round(acc, 2))
    rmse = valid.RMSE(ymu_class, y_test)
    print('   rmse =', round(rmse, 2))
    ACC.append(acc)
    RMSE.append(rmse)

    # Toward next run
    cv_run += 1

print('\nAccuracy: ', np.round(np.mean(ACC), 2),
      '(' + str(np.round(np.std(ACC), 2)) + ')')
print('Root-Mean-Square Error: ', np.round(np.mean(RMSE), 2))

#----------------------------------------------------------------------
# Evaluation measures