Пример #1
0
def test_0_1_class():

    t1 = np.array([0.51, -0.04,  0.51])
    ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
    ada.fit(X_std, y0)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y0 == ada.predict(X_std)).all())
Пример #2
0
def test_stochastic_gradient_descent():

    t1 = np.array([0.03, -0.09, 1.02])
    ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #3
0
def test_stochastic_gradient_descent():

    t1 = np.array([0.03, -0.09, 1.02])
    ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #4
0
def test_0_1_class():

    t1 = np.array([0.51, -0.04, 0.51])
    ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
    ada.fit(X_std, y0)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y0 == ada.predict(X_std)).all())
Пример #5
0
def test_print_progress_2():
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  print_progress=2,
                  random_seed=1)
    ada.fit(X_std, y1)
Пример #6
0
def test_gradient_descent():

    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #7
0
def test_gradient_descent():

    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #8
0
def test_print_progress_3():
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  print_progress=3,
                  random_seed=1)
    ada.fit(X_std, y1)
Пример #9
0
def test_refit_weights():
    t1 = np.array([[-0.08], [1.02]])
    ada = Adaline(epochs=15, eta=0.01, minibatches=1, random_seed=1)
    ada.fit(X_std, y1, init_params=True)
    ada.fit(X_std, y1, init_params=False)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #10
0
def test_refit_weights():
    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=15, eta=0.01, solver='gd', random_seed=1)
    ada.fit(X_std, y1, init_weights=True)
    ada.fit(X_std, y1, init_weights=False)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #11
0
def test_refit_weights():
    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=15, eta=0.01, solver='gd', random_seed=1)
    ada.fit(X_std, y1, init_weights=True)
    ada.fit(X_std, y1, init_weights=False)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #12
0
def test_gradient_descent():
    t1 = np.array([[-0.08], [1.02]])
    b1 = np.array([0.00])
    ada = Adaline(epochs=30, eta=0.01, minibatches=1, random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
    np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #13
0
def test_ary_persistency_in_shuffling():
    orig = X_std.copy()
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=len(y),
                  random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(orig, X_std, 6)
Пример #14
0
def test_invalid_class():

    ada = Adaline(epochs=40, eta=0.01, random_seed=1)
    try:
        ada.fit(X, y2)  # 0, 1 class
        assert (1 == 2)
    except ValueError:
        pass
Пример #15
0
def test_normal_equation():
    t1 = np.array([[-0.08], [1.02]])
    b1 = np.array([0.00])
    ada = Adaline(epochs=30, eta=0.01, minibatches=None, random_seed=None)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
    np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
    assert (y1 == ada.predict(X_std)).all(), ada.predict(X_std)
Пример #16
0
def test_score_function():
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  random_seed=1)
    ada.fit(X_std, y1)
    acc = ada.score(X_std, y1)
    assert acc == 1.0, acc
Пример #17
0
def test_invalid_class():

    ada = Adaline(epochs=40, eta=0.01, random_seed=1)
    try:
        ada.fit(X, y2)  # 0, 1 class
        assert(1==2)
    except ValueError:
        pass
Пример #18
0
def test_stochastic_gradient_descent():
    t1 = np.array([[-0.08], [1.02]])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=len(y),
                  random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #19
0
def test_score_function():
    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  random_seed=1)
    ada.fit(X_std, y1)
    acc = ada.score(X_std, y1)
    assert acc == 1.0, acc
Пример #20
0
def test_normal_equation():
    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=None,
                  random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #21
0
def test_standardized_iris_data_with_shuffle():
    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  solver='gd',
                  random_seed=1,
                  shuffle=True)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #22
0
def test_refit_weights():
    t1 = np.array([[-0.08], [1.02]])
    ada = Adaline(epochs=15,
                  eta=0.01,
                  minibatches=1,
                  random_seed=1)
    ada.fit(X_std, y1, init_params=True)
    ada.fit(X_std, y1, init_params=False)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #23
0
def test_standardized_iris_data_with_zero_weights():
    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  random_seed=1,
                  zero_init_weight=True)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert((y1 == ada.predict(X_std)).all())
Пример #24
0
def test_standardized_iris_data_with_zero_weights():
    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  solver='gd',
                  random_seed=1,
                  zero_init_weight=True)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #25
0
def test_normal_equation():
    t1 = np.array([[-0.08], [1.02]])
    b1 = np.array([0.00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=None,
                  random_seed=None)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
    np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
    assert (y1 == ada.predict(X_std)).all(), ada.predict(X_std)
Пример #26
0
def test_gradient_descent():
    t1 = np.array([[-0.08], [1.02]])
    b1 = np.array([0.00])
    ada = Adaline(epochs=30,
                  eta=0.01,
                  minibatches=1,
                  random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
    np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
    assert((y1 == ada.predict(X_std)).all())
Пример #27
0
def test_normal_equation():
    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=30, eta=0.01, solver='normal equation', random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #28
0
def test_array_dimensions():
    ada = Adaline(epochs=15, eta=0.01, random_seed=1)
    ada = ada.fit(np.array([1, 2, 3]), [-1])
Пример #29
0
def test_invalid_solver():
    t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
    ada = Adaline(epochs=30, eta=0.01, solver='bla', random_seed=1)
    ada.fit(X_std, y1)
Пример #30
0
def test_ary_persistency_in_shuffling():
    orig = X_std.copy()
    ada = Adaline(epochs=30, eta=0.01, minibatches=len(y), random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(orig, X_std, 6)
Пример #31
0
def test_stochastic_gradient_descent():
    t1 = np.array([[-0.08], [1.02]])
    ada = Adaline(epochs=30, eta=0.01, minibatches=len(y), random_seed=1)
    ada.fit(X_std, y1)
    np.testing.assert_almost_equal(ada.w_, t1, 2)
    assert ((y1 == ada.predict(X_std)).all())
Пример #32
0
def test_array_dimensions():
    ada = Adaline(epochs=15, eta=0.01, random_seed=1)
    ada = ada.fit(np.array([1, 2, 3]), [-1])
Пример #33
0
X, y = iris_data()
X = X[:, [0, 3]]
X = X[0:100]
y = y[0:100]

X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()


# Closed Form Solution
ada = Adaline(epochs=30,
              eta=0.01,
              minibatches=None,
              random_seed=1)

ada.fit(X, y)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Stochastic Gradient Descent')
plt.show()



# (Stochastic) Gradient Descent
ada2 = Adaline(epochs=30,
               eta=0.01,
               minibatches=1, # 1 for GD learning
               #minibatches=len(y), # len(y) for SGD learning
               #minibatches=5, # for SGD learning w. minibatch size 20
               random_seed=1,
               print_progress=3)
ada2.fit(X, y)
Пример #34
0
def test_invalid_solver():
    t1 = np.array([-5.21e-16,  -7.86e-02,   1.02e+00])
    ada = Adaline(epochs=30, eta=0.01, solver='bla', random_seed=1)
    ada.fit(X_std, y1)
Пример #35
0
def test_score_function():
    ada = Adaline(epochs=30, eta=0.01, minibatches=1, random_seed=1)
    ada.fit(X_std, y1)
    acc = ada.score(X_std, y1)
    assert acc == 1.0, acc
from mlxtend.classifier import Adaline
import matplotlib.pyplot as plt

# Loading Data

X, y = iris_data()
X = X[:, [0, 3]]  # sepal length and petal width
X = X[0:100]  # class 0 and class 1
y = y[0:100]  # class 0 and class 1

# standardize
X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

ada = Adaline(epochs=30, eta=0.01, minibatches=None, random_seed=1)
ada.fit(X, y)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Stochastic Gradient Descent')

plt.show()

# ## Example 2 - Gradient Descent

# In[4]:

from mlxtend.data import iris_data
from mlxtend.plotting import plot_decision_regions
from mlxtend.classifier import Adaline
import matplotlib.pyplot as plt

# Loading Data