Esempio n. 1
0
def test_gradient_descent_correctness_nonsep_data(non_linear_separable_data):
    x_test, y_test, wrong_idx = non_linear_separable_data

    w_ans = GradientDescent(lr=0.01).execute(x_test, y_test, epochs=10)
    y_pred = sign(add_cons(x_test) @ w_ans)

    assert len(np.where(y_test != y_pred)[0].tolist()) == 1
    assert wrong_idx[0] == np.where(y_test != y_pred)[0][0]
Esempio n. 2
0
def linear_separable_data():
    """generate linear Seperable data"""
    np.random.seed(123)
    x_test = np.random.rand(30, 2)
    w_test = np.random.rand(2)
    b_test = np.random.rand(1)
    y_test = sign(x_test @ w_test - b_test)
    return x_test, y_test
Esempio n. 3
0
def test_pocket_correctness_nonsep_data(non_linear_separable_data):
    x_test, y_test, wrong_idx = non_linear_separable_data

    w_ans = Pocket().execute(x_test, y_test, updates=100)
    y_pred = sign(add_cons(x_test) @ w_ans)

    assert len(np.where(y_test != y_pred)[0].tolist()) == 1
    assert wrong_idx[0] == np.where(y_test != y_pred)[0][0]
Esempio n. 4
0
def test_adaboost_stump():
    x_test, y_test = make_moons(20, random_state=123)
    y_test = sign(y_test)

    model = AdaBoostStump(n_estimators=10)
    model.fit(x_test, y_test)
    y_pred = model.predict(x_test)
    accuracy = model.evaluate(x_test, y_test)[0]

    # test predict()
    assert np.array_equal(y_pred, y_test)
    # test evaluate()
    assert accuracy == 1.0
Esempio n. 5
0
def non_linear_separable_data():
    """generate non-separable data"""
    np.random.seed(123)
    x_test = np.random.rand(40, 2)
    w_test = np.random.rand(2)
    b_test = np.random.rand(1)
    margin = np.abs((x_test @ w_test - b_test) /
                    np.sqrt(np.sum(w_test**2) + b_test**2)) > 0.03
    x_test = x_test[margin, :]
    y_test = sign(x_test @ w_test - b_test)
    wrong_idx = np.random.randint(0, len(x_test), 1)  # One mistake
    y_test[wrong_idx] = -1 * y_test[wrong_idx]
    return x_test, y_test, wrong_idx
Esempio n. 6
0
def test_bagging_pocket():
    x_test, y_test = make_moons(20, noise=0.05, random_state=4, shuffle=False)
    x_test, y_test = np.delete(x_test, [0, 10],
                               axis=0), np.delete(y_test, [0, 10])
    y_test = sign(y_test)

    model = BaggingPerceptron(n_estimators=15, optimizer=Pocket(updates=2000))
    model.fit(x_test, y_test)
    y_pred = model.predict(x_test)

    # test predict()
    assert np.array_equal(y_pred, y_test)
    # test evaluate()
    assert model.evaluate(x_test, y_test)[0] == 1.0  # Accuracy == 1.0
Esempio n. 7
0
def test_gradient_descent_correctness_linsep_data(linear_separable_data):
    x_test, y_test = linear_separable_data

    w_ans = GradientDescent(lr=0.01).execute(x_test, y_test, epochs=np.Inf)
    y_pred = sign(add_cons(x_test) @ w_ans)
    assert np.array_equal(y_pred, y_test)
Esempio n. 8
0
def test_pocket_correctness_linsep_data(linear_separable_data):
    x_test, y_test = linear_separable_data

    w_ans = Pocket().execute(x_test, y_test, updates=np.Inf)
    y_pred = sign(add_cons(x_test) @ w_ans)
    assert np.array_equal(y_pred, y_test)
Esempio n. 9
0
def test_linear_seperable_correctness(linear_separable_data):
    x_test, y_test = linear_separable_data

    w_ans = LinearSeparable().execute(x_test, y_test)
    y_pred = sign(add_cons(x_test) @ w_ans)
    assert np.array_equal(y_pred, y_test)
Esempio n. 10
0
def test_data():
    x_test, y_test = make_classification(random_state=1)
    y_test = sign(y_test, zero=-1)
    return x_test, y_test