Exemplo n.º 1
0
def ksvm_cross_validation(k,
                          X,
                          Y,
                          kfun,
                          kparam,
                          lambda_,
                          lr=1e-3,
                          steps=10000):
    m = X.shape[0]
    # Randomly assign a fold index to each sample.
    folds = np.arange(m) % k
    np.random.shuffle(folds)
    correct_predictions = 0
    # For each fold...
    for fold in range(k):
        Xtrain = X[folds != fold, :]
        Ytrain = Y[folds != fold]
        # Train a model
        alpha, b = pvml.ksvm_train(Xtrain,
                                   Ytrain,
                                   kfun,
                                   kparam,
                                   lambda_,
                                   lr=lr,
                                   steps=steps)
        # Evaluate the model on the left-out fold
        Xval = X[folds == fold, :]
        Yval = Y[folds == fold]
        pred, _ = pvml.ksvm_inference(Xval, Xtrain, alpha, b, kfun, kparam)
        print((pred == Yval).mean())
        correct_predictions += (pred == Yval).sum()
    return correct_predictions / m
Exemplo n.º 2
0
 def inference(self, X):
     ret = pvml.ksvm_inference(X, self.Xtrain, self.alpha, self.b,
                               self.kfun, self.kparam)
     labels, logits = ret
     return labels, logits + 0.5
Exemplo n.º 3
0
 def test_wrong_bias(self):
     X = np.linspace(-1, 1, 10).reshape(5, 2)
     alpha = np.linspace(-1, 1, 5)
     b = np.linspace(-1, 1, 5)
     with self.assertRaises(ValueError):
         pvml.ksvm_inference(X, X, alpha, b, "polynomial", 2)
Exemplo n.º 4
0
 def test_wrong_coefficients1(self):
     X = np.linspace(-1, 1, 10)
     alpha = np.linspace(-1, 1, 10)
     b = 0
     with self.assertRaises(ValueError):
         pvml.ksvm_inference(X, X, alpha, b, "polynomial", 2)
Exemplo n.º 5
0
 def test_train2(self):
     X, Y = test_data.separable_hypercubes_data_set(12, 2)
     alpha, b = pvml.ksvm_train(X, Y, "polynomial", 2, 0, lr=10, steps=1000)
     Yhat, P = pvml.ksvm_inference(X, X, alpha, b, "polynomial", 2)
     self.assertListEqual(Y.tolist(), Yhat.tolist())
     self.assertListEqual(Yhat.tolist(), (P > 0).tolist())
Exemplo n.º 6
0
 def test_train1(self):
     X, Y = test_data.separable_circle_data_set(50, 2)
     alpha, b = pvml.ksvm_train(X, Y, "rbf", 0.1, 0, lr=1e-1, steps=1000)
     Yhat, P = pvml.ksvm_inference(X, X, alpha, b, "rbf", 0.1)
     self.assertListEqual(Y.tolist(), Yhat.tolist())
     self.assertListEqual(Yhat.tolist(), (P > 0).tolist())