Exemplo n.º 1
0
def test_GM_fit_lowiterations():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X, iterations=1)
    assert GM.learned
    np.testing.assert_array_almost_equal(GM.mus[0], [2.22, 2.5], decimal=1)
def test_LVQ():
    X, y = data.categorical_2Dmatrix_data_big()
    lvq = prototypemthods.LearningVectorQuantization()
    lvq.fit(X, y, n_prototypes=3)
    assert [0, 1] == sorted(lvq.prototypes.keys())
    assert (3, 2) == lvq.prototypes[0].shape
    assert (3, 2) == lvq.prototypes[1].shape
Exemplo n.º 3
0
def test_test_train_splitter():
    X, y = data.categorical_2Dmatrix_data_big()
    X_train, X_test, y_train, y_test = modelselection.test_train_splitter(X, y)
    assert X_train.shape == (9, 2)
    assert X_test.shape == (2, 2)
    assert len(y_train) == 9
    assert len(y_test) == 2
Exemplo n.º 4
0
def test_Perceptron():
    X, y = data.categorical_2Dmatrix_data_big()
    y = (y * 2) - 1
    perceptron = svm.Perceptron()
    perceptron.fit(X, y)
    predictions = perceptron.predict(X)
    assert (predictions == y).sum() > 7
def test_test_train_splitter():
    X, y = data.categorical_2Dmatrix_data_big()
    X_train, X_test, y_train, y_test = modelselection.test_train_splitter(X, y)
    assert X_train.shape == (9, 2)
    assert X_test.shape == (2, 2)
    assert len(y_train) == 9
    assert len(y_test) == 2
def test_KMediods():
    X, y = data.categorical_2Dmatrix_data_big()
    km = prototypemthods.KMediods()
    km.fit(X)
    assignments = km.sample_assignments
    reversed_assignments = (assignments - 1) * -1
    assert np.array_equal(assignments, y) or \
        np.array_equal(reversed_assignments, y)
def test_LVQ_prediction():
    X, y = data.categorical_2Dmatrix_data_big()
    lvq = prototypemthods.LearningVectorQuantization()
    lvq.fit(X, y, n_prototypes=3)
    prediction = lvq.predict(X[0])
    assert prediction == y[0]
    prediction = lvq.predict(X[-1])
    assert prediction == y[-1]
def test_DANN_prediction():
    X, y = data.categorical_2Dmatrix_data_big()
    dann = prototypemthods.DANN()
    dann.fit(X, y, neighborhood_size=3)
    prediction = dann.predict(X[0])
    assert prediction == y[0]
    prediction = dann.predict(X[-1])
    assert prediction == y[-1]
def test_KMediods_prediction():
    X, y = data.categorical_2Dmatrix_data_big()
    km = prototypemthods.KMediods()
    km.fit(X)
    assignments = km.sample_assignments
    reversed_assignments = (assignments - 1) * -1
    prediction = km.predict(X[0])
    np.testing.assert_array_almost_equal(prediction, [3.0, 3.0])
def test_GM_predict_probs():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X)
    max_class, class_probs = GM.predict(X[0], probs=True)
    assert max_class == 0
    assert np.isclose(class_probs[0], 1)
    assert np.isclose(class_probs[1], 0)
def test_GM_predict():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X)
    pred_one = GM.predict(X[0])
    pred_two = GM.predict(X[-1])
    assert pred_one == y[0]
    assert pred_two == y[-1]
def test_GM_fit_lowiterations():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X, iterations=1)
    assert GM.learned
    np.testing.assert_array_almost_equal(GM.mus[0],
                                         [2.22, 2.5],
                                         decimal=1)
Exemplo n.º 13
0
def test_SupportVectorMachine():
    X, y = data.categorical_2Dmatrix_data_big()
    y = (y * 2) - 1
    SVM = svm.SupportVectorMachine()
    SVM.fit(X, y)
    prediction = SVM.predict(X[0])
    assert prediction == y[0]
    prediction = SVM.predict(X[-1])
    assert prediction == y[-1]
Exemplo n.º 14
0
def test_GM_predict_probs():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X)
    max_class, class_probs = GM.predict(X[0], probs=True)
    assert max_class == 0
    assert np.isclose(class_probs[0], 1)
    assert np.isclose(class_probs[1], 0)
Exemplo n.º 15
0
def test_GM_predict():
    X, y = data.categorical_2Dmatrix_data_big()
    np.random.seed(0)
    GM = gaussianmixture.GaussianMixture()
    GM.fit(X)
    pred_one = GM.predict(X[0])
    pred_two = GM.predict(X[-1])
    assert pred_one == y[0]
    assert pred_two == y[-1]
Exemplo n.º 16
0
def test_RegressionTree():
    tree = treemethods.RegressionTree()
    X, y = data.categorical_2Dmatrix_data_big()
    tree.fit(X, y, 3)
    assert tree.predict(X[0]) == y[0]
    assert tree.predict(X[-1]) == y[-1]
Exemplo n.º 17
0
def test_PrimRegression():
    tree = treemethods.PrimRegression()
    X, y = data.categorical_2Dmatrix_data_big()
    tree.fit(X, y, 1)
    assert tree.predict(X[1]) == y[1]
    assert np.isclose(tree.predict(X[-1]), 0.6666, 1)
Exemplo n.º 18
0
def test_DiscreteAdaBoost():
    tree = treemethods.DiscreteAdaBoost()
    X, y = data.categorical_2Dmatrix_data_big()
    tree.fit(X, y, 3)
    assert tree.predict(X[0]) == y[0]
    assert tree.predict(X[-1]) == y[-1]
def test_DANN():
    X, y = data.categorical_2Dmatrix_data_big()
    dann = prototypemthods.DANN()
    dann.fit(X, y)
    assert dann.learned
Exemplo n.º 20
0
def test_GradientBoostingRegression():
    tree = treemethods.GradientBoostingRegression()
    X, y = data.categorical_2Dmatrix_data_big()
    tree.fit(X, y, 3)
    assert np.isclose(tree.predict(X[0]), 0.3976, 1)
    assert np.isclose(tree.predict(X[-1]), y[-1], 1)
def test_knearestneighbor_regression():
    X, y = data.categorical_2Dmatrix_data_big()
    knn = prototypemthods.KNearestNeighbor()
    knn.fit(X, y)
    prediction = knn.predict(X[0])
    assert prediction == y[0]