def test_model_1d(): # 10 1d datapoints between -1 and 1 np.random.seed(0) X = np.random.uniform(size=(10, 1)) # linearly separable labels Y = 1 - 2 * (X.ravel() < .5) pbl = BinarySVMModel(n_features=2) # we have to add a constant 1 feature by hand :-/ X = np.hstack([X, np.ones((X.shape[0], 1))]) w = [1, -.5] Y_pred = np.hstack([pbl.inference(x, w) for x in X]) assert_array_equal(Y, Y_pred) # check that sign of psi and inference agree for x, y in zip(X, Y): assert_true(np.dot(w, pbl.psi(x, y)) > np.dot(w, pbl.psi(x, -y))) # check that sign of psi and the sign of y correspond for x, y in zip(X, Y): assert_true(np.dot(w, pbl.psi(x, y)) == -np.dot(w, pbl.psi(x, -y)))
def test_blobs_batch(): # make two gaussian blobs X, Y = make_blobs(n_samples=80, centers=2, random_state=1) Y = 2 * Y - 1 pbl = BinarySVMModel(n_features=2) # test psi psi_mean = pbl.batch_psi(X, Y) psi_mean2 = np.sum([pbl.psi(x, y) for x, y in zip(X, Y)], axis=0) assert_array_equal(psi_mean, psi_mean2) # test inference w = np.random.uniform(-1, 1, size=pbl.size_psi) Y_hat = pbl.batch_inference(X, w) for i, (x, y_hat) in enumerate(zip(X, Y_hat)): assert_array_equal(Y_hat[i], pbl.inference(x, w)) # test inference Y_hat = pbl.batch_loss_augmented_inference(X, Y, w) for i, (x, y, y_hat) in enumerate(zip(X, Y, Y_hat)): assert_array_equal(Y_hat[i], pbl.loss_augmented_inference(x, y, w))