Exemplo n.º 1
0
def test_logistic_regression_gd():
    t = np.array([0.51, 1.18, 4.40])
    lr = LogisticRegression(epochs=100, eta=0.01, learning='gd', random_seed=0)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert ((y == lr.predict(X)).all())
Exemplo n.º 2
0
def test_print_progress_3():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            print_progress=3,
                            random_seed=1)
    lr.fit(X, y)
Exemplo n.º 3
0
def test_predict_proba():
    lr = LogisticRegression(epochs=100, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    idx = [0, 48, 99]  # sample labels: 0, 0, 1
    y_pred = lr.predict_proba(X[idx])
    expect = np.array([0.009, 0.012, 0.993])
    np.testing.assert_almost_equal(y_pred, expect, 3)
Exemplo n.º 4
0
def test_ary_persistency_in_shuffling():
    orig = X.copy()
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            random_seed=1)
    lr.fit(X, y)
    np.testing.assert_almost_equal(orig, X, 6)
Exemplo n.º 5
0
def test_invalid_labels_2():
    y1 = np.where(y == 0, -1, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)
    assert_raises(AttributeError,
                  'y array must not contain negative labels.\nFound [-1  1]',
                  lr.fit,
                  X,
                  y1,
                  {(-1, 1)})
Exemplo n.º 6
0
def test_score_function():
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=1,
                            random_seed=1)

    lr.fit(X, y)
    acc = lr.score(X, y)
    assert acc == 1.0, "Acc: %s" % acc
def test_logistic_regression_sgd():
    t = np.array([0.50, 1.16, 4.38])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=len(y),
                            random_seed=0)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, t, 2)
    assert ((y == lr.predict(X)).all())
Exemplo n.º 8
0
def test_logistic_regression_gd():
    w = np.array([[1.2], [4.4]])
    b = np.array([0.52])
    lr = LogisticRegression(epochs=100, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    np.testing.assert_almost_equal(lr.w_, w, 2)
    np.testing.assert_almost_equal(lr.b_, b, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
Exemplo n.º 9
0
def test_invalid_labels_1():
    y1 = np.where(y == 0, 2, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)

    if sys.version_info >= (3, 0):
        objtype = '{(0, 1)}'
    else:
        objtype = 'set([(0, 1)])'

    expect = 'Labels not in %s.\nFound (1, 2)' % objtype

    assert_raises(AttributeError, expect, lr.fit, X, y1, {(0, 1)})
Exemplo n.º 10
0
def test_logistic_regression_sgd():
    w = np.array([[1.18], [4.38]])
    lr = LogisticRegression(epochs=100,
                            eta=0.01,
                            minibatches=len(y),
                            random_seed=1)

    lr.fit(X, y)  # 0, 1 class
    np.testing.assert_almost_equal(lr.w_, w, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 1.0, "Acc: %s" % acc
Exemplo n.º 11
0
def test_l2_regularization_gd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            minibatches=1,
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.115, 1.032, 2.272])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 3)
    acc = sum(y_pred == y) / len(y)
    assert (acc == 1.0)
Exemplo n.º 12
0
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            random_seed=1)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([[0.24], [0.35]])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    y_pred = lr.predict(X)
    acc = np.sum(y == y_pred, axis=0) / float(X.shape[0])
    assert acc == 0.97, "Acc: %s" % acc
Exemplo n.º 13
0
def test_refit_weights():
    w = np.array([[1.2], [4.4]])
    b = np.array([0.52])
    lr = LogisticRegression(epochs=50, eta=0.01, minibatches=1, random_seed=1)

    lr.fit(X, y)
    w1 = lr.w_[0][0]
    w2 = lr.w_[0][0]
    lr.fit(X, y, init_params=False)

    assert w1 != lr.w_[0][0]
    assert w2 != lr.w_[1][0]
    np.testing.assert_almost_equal(lr.w_, w, 2)
    np.testing.assert_almost_equal(lr.b_, b, 2)
Exemplo n.º 14
0
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=20,
                            learning='sgd',
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0.09, 0.232, 0.35])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    acc = sum(y_pred == y) / len(y)
    assert (acc == 1.0)
Exemplo n.º 15
0
def test_l2_regularization_sgd():
    lr = LogisticRegression(eta=0.01,
                            epochs=100,
                            minibatches=len(y),
                            l2_lambda=1.0,
                            regularization='l2',
                            random_seed=0)
    lr.fit(X, y)
    y_pred = lr.predict(X)
    expect_weights = np.array([0., 0.24, 0.35])

    np.testing.assert_almost_equal(lr.w_, expect_weights, 2)
    acc = sum(y_pred == y) / float(len(y))

    assert (acc == 0.97)
Exemplo n.º 16
0
def test_invalid_labels_1():
    y1 = np.where(y == 0, 2, 1)
    lr = LogisticRegression(epochs=15, eta=0.01, random_seed=1)
    assert_raises(AttributeError, 'Labels not in {(0, 1)}.\nFound (1, 2)',
                  lr.fit, X, y1, {(0, 1)})
Exemplo n.º 17
0
from mlxtend.plotting import plot_decision_regions
from mlxtend.classifier import LogisticRegression
import matplotlib.pyplot as plt

# Loading Data
X, y = iris_data()
X = X[:, [0, 3]]  # sepal length and petal width
X = X[0:100]  # class 0 and class 1
y = y[0:100]  # class 0 and class 1

# standardize
X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

lr = LogisticRegression(
    eta=0.1,
    l2_lambda=0.0,
    epochs=100,
    minibatches=1,  # for Gradient Descent
    random_seed=1,
    print_progress=3)
lr.fit(X, y)

plot_decision_regions(X, y, clf=lr)
plt.title('Logistic Regression - Gradient Descent')
plt.show()

plt.plot(range(len(lr.cost_)), lr.cost_)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
Exemplo n.º 18
0
import matplotlib.pyplot as plt

X, y = iris_data()

X = X[:, [0, 3]]
X = X[0:100]
y = y[0:100]

X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

lr = LogisticRegression(
    eta=0.1,
    l2_lambda=0.0,
    epochs=500,
    #minibatches=1, # 1 for Gradient Descent
    #minibatches=len(y), #  len(y) for SGD learning
    minibatches=5,  # 100/5 = 20 -> minibatch-s
    random_seed=1,
    print_progress=3)
lr.fit(X, y)

y_pred = lr.predict(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])
y_pred = lr.predict_proba(X)
print('Last 3 Class Labels: %s' % y_pred[-3:])

plot_decision_regions(X, y, clf=lr)
plt.title("Logistic regression - gd")
plt.show()
Exemplo n.º 19
0
def test_clone():
    log = LogisticRegression()
    clone(log)